AudioSource.PlayScheduled

function PlayScheduled (time : double) : void

Parameters

NameDescription
time Time in seconds on the absolute time-line that AudioSettings.dspTime refers to for when the sound should start playing.

Description

Plays the clip at a specific time on the absolute time-line that AudioSettings.dspTime reads from. This is the preferred way to stitch AudioClips in music players because it is independent of the frame rate and gives the audio system enough time to prepare the playback of the sound to fetch it from media where the opening and buffering takes a lot of time (streams) without causing sudden CPU spikes.

JavaScript
// Basic demonstration of a music system that uses PlayScheduled to preload and sample-accurately stitch two AudioClips in an alternating fashion.
// The code assumes that the music pieces are each 16 bars (4 beats / bar) at a tempo of 140 beats per minute.
// To make it stitch arbitrary clips just replace the line
// nextEventTime += (60.0 / bpm) * numBeatsPerSegment
// by
// nextEventTime += clips[flip].length;
@script RequireComponent(AudioSource)
public var bpm = 140.0;
public var numBeatsPerSegment = 16;
public var clips = new AudioClip[2];
private var nextEventTime:double;
private var flip = 0;
private var audioSources = new AudioSource[2];
private var running = false;
function Start()
{
for(var i = 0; i < 2; i++)
{
var child:GameObject = new GameObject("Player");
child.transform.parent = gameObject.transform;
audioSources[i] = child.AddComponent("AudioSource");
}
nextEventTime = AudioSettings.dspTime + 2.0;
running = true;
}
function Update()
{
if(!running)
return;
var time = AudioSettings.dspTime;
if(time + 1.0 > nextEventTime)
{
// We are now approx. 1 second before the time at which the sound should play, so we will schedule it now in order for the system to have enough time
// to prepare the playback at the specified time. This may involve opening buffering a streamed file and should therefore take any worst-case delay into account.
audioSources[flip].clip = clips[flip];
audioSources[flip].PlayScheduled(nextEventTime);
Debug.Log("Scheduled source " + flip + " to start at time " + nextEventTime);
// Place the next event 16 beats from here at a rate of 140 beats per minute
nextEventTime += (60.0 / bpm) * numBeatsPerSegment;
// Flip between two audio sources so that the loading process of one does not interfere with the one that's playing out
flip = 1 - flip;
}
}

using UnityEngine;
using System.Collections;

[RequireComponent(typeof(AudioSource))]
public class example : MonoBehaviour {
public float bpm = 140.0F;
public int numBeatsPerSegment = 16;
public AudioClip[] clips = new AudioClip[2];
private double nextEventTime;
private int flip = 0;
private AudioSource[] audioSources = new AudioSource[2];
private bool running = false;
void Start() {
int i = 0;
while (i < 2) {
GameObject child = new GameObject("Player");
child.transform.parent = gameObject.transform;
audioSources[i] = child.AddComponent("AudioSource") as AudioSource;
i++;
}
nextEventTime = AudioSettings.dspTime + 2.0F;
running = true;
}
void Update() {
if (!running)
return;

double time = AudioSettings.dspTime;
if (time + 1.0F > nextEventTime) {
audioSources[flip].clip = clips[flip];
audioSources[flip].PlayScheduled(nextEventTime);
Debug.Log("Scheduled source " + flip + " to start at time " + nextEventTime);
nextEventTime += 60.0F / bpm * numBeatsPerSegment;
flip = 1 - flip;
}
}
}

import UnityEngine
import System.Collections

[RequireComponent(AudioSource)]
class example(MonoBehaviour):

public bpm as single = 140.0F

public numBeatsPerSegment as int = 16

public clips as (AudioClip) = array[of AudioClip](2)

private nextEventTime as double

private flip as int = 0

private audioSources as (AudioSource) = array[of AudioSource](2)

private running as bool = false

def Start():
i as int = 0
while i < 2:
child as GameObject = GameObject('Player')
child.transform.parent = gameObject.transform
audioSources[i] = (child.AddComponent('AudioSource') as AudioSource)
i++
nextEventTime = (AudioSettings.dspTime + 2.0F)
running = true

def Update():
if not running:
return
time as double = AudioSettings.dspTime
if (time + 1.0F) > nextEventTime:
audioSources[flip].clip = clips[flip]
audioSources[flip].PlayScheduled(nextEventTime)
Debug.Log(((('Scheduled source ' + flip) + ' to start at time ') + nextEventTime))
nextEventTime += ((60.0F / bpm) * numBeatsPerSegment)
flip = (1 - flip)