This commit is contained in:
2025-01-04 00:34:03 +01:00
parent 41829408dc
commit 0ca14bbc19
18111 changed files with 1871397 additions and 0 deletions

View File

@@ -0,0 +1,7 @@
export * as types from "./_types.mjs";
export {default as AudioBufferCache} from "./cache.mjs";
export {default as AudioHelper} from "./helper.mjs";
export {default as AudioTimeout} from "./timeout.mjs";
export {default as Sound} from "./sound.mjs";
export {default as BiquadFilterEffect} from "./biquad.mjs";
export {default as ConvolverEffect} from "./convolver.mjs";

View File

@@ -0,0 +1,41 @@
/**
* @typedef {Object} AudioBufferCacheEntry
* @property {string} src
* @property {AudioBuffer} buffer
* @property {number} size
* @property {boolean} [locked]
* @property {AudioBufferCacheEntry} [next]
* @property {AudioBufferCacheEntry} [previous]
*/
/**
* @typedef {Object} SoundCreationOptions
* @property {string} src The source URL for the audio file
* @property {AudioContext} [context] A specific AudioContext to attach the sound to
* @property {boolean} [singleton=true] Reuse an existing Sound for this source?
* @property {boolean} [preload=false] Begin loading the audio immediately?
* @property {boolean} [autoplay=false] Begin playing the audio as soon as it is ready?
* @property {SoundPlaybackOptions} [autoplayOptions={}] Options passed to the play method if autoplay is true
*/
/**
* @typedef {Object} SoundPlaybackOptions
* @property {number} [delay=0] A delay in seconds by which to delay playback
* @property {number} [duration] A limited duration in seconds for which to play
* @property {number} [fade=0] A duration in milliseconds over which to fade in playback
* @property {boolean} [loop=false] Should sound playback loop?
* @property {number} [loopStart=0] Seconds of the AudioBuffer when looped playback should start.
* Only works for AudioBufferSourceNode.
* @property {number} [loopEnd] Seconds of the Audio buffer when looped playback should restart.
* Only works for AudioBufferSourceNode.
* @property {number} [offset=0] An offset in seconds at which to start playback
* @property {Function|null} [onended] A callback function attached to the source node
* @property {number} [volume=1.0] The volume at which to play the sound
*/
/**
* @callback SoundScheduleCallback
* @param {Sound} sound The Sound instance being scheduled
* @returns {any} A return value of the callback is returned as the resolved value of the
* Sound#schedule promise
*/

View File

@@ -0,0 +1,74 @@
/**
* A sound effect which applies a biquad filter.
* @see {@link https://developer.mozilla.org/en-US/docs/Web/API/BiquadFilterNode}
* @alias foundry.audio.BiquadFilterEffect
*/
export default class BiquadFilterEffect extends BiquadFilterNode {
/**
* A ConvolverEffect is constructed by passing the following parameters.
* @param {AudioContext} context The audio context required by the BiquadFilterNode
* @param {object} [options] Additional options which modify the BiquadFilterEffect behavior
* @param {BiquadFilterType} [options.type=lowpass] The filter type to apply
* @param {number} [options.intensity=5] The initial intensity of the effect
*/
constructor(context, {type="lowpass", intensity=5, ...options}={}) {
if ( !BiquadFilterEffect.#ALLOWED_TYPES.includes(type) ) {
throw new Error(`Invalid BiquadFilterEffect type "${type}" provided`);
}
super(context, options);
this.#type = this.type = type;
this.#intensity = intensity;
this.update();
}
/**
* The allowed filter types supported by this effect class.
*/
static #ALLOWED_TYPES = ["lowpass", "highpass", "bandpass", "lowshelf", "highshelf", "peaking", "notch"];
/**
* The original configured type of the effect.
* @type {BiquadFilterType}
*/
#type;
/* -------------------------------------------- */
/**
* Adjust the intensity of the effect on a scale of 0 to 10.
* @type {number}
*/
get intensity() {
return this.#intensity;
}
set intensity(intensity) {
this.update({intensity});
}
#intensity;
/* -------------------------------------------- */
/**
* Update the state of the effect node given the active flag and numeric intensity.
* @param {object} options Options which are updated
* @param {number} [options.intensity] A new effect intensity
* @param {BiquadFilterType} [options.type] A new filter type
*/
update({intensity, type} = {}) {
if ( Number.isFinite(intensity) ) this.#intensity = Math.clamp(intensity, 1, 10);
if ( BiquadFilterEffect.#ALLOWED_TYPES.includes(type) ) this.#type = type;
this.type = this.#type;
switch ( this.#type ) {
case "lowpass":
this.frequency.value = 1100 - (100 * this.#intensity); // More intensity cuts at a lower frequency
break;
case "highpass":
this.frequency.value = 100 * this.#intensity; // More intensity cuts at higher frequency
break;
default:
throw new Error(`BiquadFilterEffect type "${this.#type}" not yet configured`);
}
}
}

View File

@@ -0,0 +1,190 @@
/** @typedef {import("./_types.mjs").AudioBufferCacheEntry} AudioBufferCacheEntry
/**
* A specialized cache used for audio buffers.
* This is an LRU cache which expires buffers from the cache once the maximum cache size is exceeded.
* @extends {Map<string, AudioBufferCacheEntry>}
*/
export default class AudioBufferCache extends Map {
/**
* Construct an AudioBufferCache providing a maximum disk size beyond which entries are expired.
* @param {number} [cacheSize] The maximum cache size in bytes. 1GB by default.
*/
constructor(cacheSize=Math.pow(1024, 3)) {
super();
this.#maxSize = cacheSize;
}
/**
* The maximum cache size in bytes.
* @type {number}
*/
#maxSize;
/**
* The current memory utilization in bytes.
* @type {number}
*/
#memorySize = 0;
/**
* The head of the doubly-linked list.
* @type {AudioBufferCacheEntry}
*/
#head;
/**
* The tail of the doubly-linked list
* @type {AudioBufferCacheEntry}
*/
#tail;
/**
* A string representation of the current cache utilization.
* @type {{current: number, max: number, pct: number, currentString: string, maxString: string, pctString: string}}
*/
get usage() {
return {
current: this.#memorySize,
max: this.#maxSize,
pct: this.#memorySize / this.#maxSize,
currentString: foundry.utils.formatFileSize(this.#memorySize),
maxString: foundry.utils.formatFileSize(this.#maxSize),
pctString: `${(this.#memorySize * 100 / this.#maxSize).toFixed(2)}%`
};
}
/* -------------------------------------------- */
/* Cache Methods */
/* -------------------------------------------- */
/**
* Retrieve an AudioBuffer from the cache.
* @param {string} src The audio buffer source path
* @returns {AudioBuffer} The cached audio buffer, or undefined
*/
getBuffer(src) {
const node = super.get(src);
let buffer;
if ( node ) {
buffer = node.buffer;
if ( this.#head !== node ) this.#shift(node);
}
return buffer;
}
/* -------------------------------------------- */
/**
* Insert an AudioBuffer into the buffers cache.
* @param {string} src The audio buffer source path
* @param {AudioBuffer} buffer The audio buffer to insert
* @returns {AudioBufferCache}
*/
setBuffer(src, buffer) {
if ( !(buffer instanceof AudioBuffer) ) {
throw new Error("The AudioBufferCache is only used to store AudioBuffer instances");
}
let node = super.get(src);
if ( node ) this.#remove(node);
node = {src, buffer, size: buffer.length * buffer.numberOfChannels * 4, next: this.#head};
super.set(src, node);
this.#insert(node);
game.audio.debug(`Cached audio buffer "${src}" | ${this}`);
this.#expire();
return this;
}
/* -------------------------------------------- */
/**
* Delete an entry from the cache.
* @param {string} src The audio buffer source path
* @returns {boolean} Was the buffer deleted from the cache?
*/
delete(src) {
const node = super.get(src);
if ( node ) this.#remove(node);
return super.delete(src);
}
/* -------------------------------------------- */
/**
* Lock a buffer, preventing it from being expired even if it is least-recently-used.
* @param {string} src The audio buffer source path
* @param {boolean} [locked=true] Lock the buffer, preventing its expiration?
*/
lock(src, locked=true) {
const node = super.get(src);
if ( !node ) return;
node.locked = locked;
}
/* -------------------------------------------- */
/**
* Insert a new node into the cache, updating the linked list and cache size.
* @param {AudioBufferCacheEntry} node The node to insert
*/
#insert(node) {
if ( this.#head ) {
this.#head.previous = node;
this.#head = node;
}
else this.#head = this.#tail = node;
this.#memorySize += node.size;
}
/* -------------------------------------------- */
/**
* Remove a node from the cache, updating the linked list and cache size.
* @param {AudioBufferCacheEntry} node The node to remove
*/
#remove(node) {
if ( node.previous ) node.previous.next = node.next;
else this.#head = node.next;
if ( node.next ) node.next.previous = node.previous;
else this.#tail = node.previous;
this.#memorySize -= node.size;
}
/* -------------------------------------------- */
/**
* Shift an accessed node to the head of the linked list.
* @param {AudioBufferCacheEntry} node The node to shift
*/
#shift(node) {
node.previous = undefined;
node.next = this.#head;
this.#head.previous = node;
this.#head = node;
}
/* -------------------------------------------- */
/**
* Recursively expire entries from the cache in least-recently used order.
* Skip expiration of any entries which are locked.
* @param {AudioBufferCacheEntry} [node] A node from which to start expiring. Otherwise, starts from the tail.
*/
#expire(node) {
if ( this.#memorySize < this.#maxSize ) return;
node ||= this.#tail;
if ( !node.locked ) {
this.#remove(node);
game.audio.debug(`Expired audio buffer ${node.src} | ${this}`);
}
if ( node.previous ) this.#expire(node.previous);
}
/* -------------------------------------------- */
/** @override */
toString() {
const {currentString, maxString, pctString} = this.usage;
return `AudioBufferCache: ${currentString} / ${maxString} (${pctString})`;
}
}

View File

@@ -0,0 +1,121 @@
import Sound from "./sound.mjs";
/**
* A sound effect which applies a convolver filter.
* The convolver effect splits the input sound into two separate paths:
* 1. A "dry" node which is the original sound
* 2. A "wet" node which contains the result of the convolution
* This effect mixes between the dry and wet channels based on the intensity of the reverb effect.
* @see {@link https://developer.mozilla.org/en-US/docs/Web/API/ConvolverNode}
* @alias foundry.audio.ConvolverFilterEffect
*/
export default class ConvolverEffect extends ConvolverNode {
/**
* A ConvolverEffect is constructed by passing the following parameters.
* @param {AudioContext} context The audio context required by the ConvolverNode
* @param {object} [options] Additional options which modify the ConvolverEffect behavior
* @param {string} [options.impulseResponsePath] The file path to the impulse response buffer to use
* @param {number} [options.intensity] The initial intensity of the effect
*/
constructor(context, {impulseResponsePath="sounds/impulse-responses/ir-full.wav", intensity=5, ...options}={}) {
super(context, options);
this.#impulseResponsePath = impulseResponsePath;
this.#intensity = intensity;
this.#dryGain = context.createGain();
this.#wetGain = context.createGain();
this.update();
}
/**
* The identifier of the impulse response buffer currently used.
* The default impulse response function was generated using https://aldel.com/reverbgen/.
* @type {string}
*/
#impulseResponsePath;
/**
* A GainNode which mixes base, non-convolved, audio playback into the final result.
* @type {GainNode}
*/
#dryGain;
/**
* A GainNode which mixes convolved audio playback into the final result.
* @type {GainNode}
*/
#wetGain;
/**
* Flag whether the impulse response buffer has been loaded to prevent duplicate load requests.
* @type {boolean}
*/
#loaded = false;
/* -------------------------------------------- */
/**
* Adjust the intensity of the effect on a scale of 0 to 10.
* @type {number}
*/
get intensity() {
return this.#intensity;
}
set intensity(value) {
this.update({intensity: value});
}
#intensity;
/* -------------------------------------------- */
/**
* Update the state of the effect node given the active flag and numeric intensity.
* @param {object} options Options which are updated
* @param {number} [options.intensity] A new effect intensity
*/
update({intensity} = {}) {
if ( Number.isFinite(intensity) ) this.#intensity = Math.clamp(intensity, 1, 10);
// Load an impulse response buffer
if ( !this.#loaded ) {
const irSound = new Sound(this.#impulseResponsePath, {context: this.context});
this.#loaded = true;
irSound.load().then(s => this.buffer = s.buffer);
}
// Set mix of wet and dry gain based on reverb intensity
this.#wetGain.gain.value = 0.2 + Math.sqrt(this.#intensity / 10); // [0.2, 1.2]
this.#dryGain.gain.value = Math.sqrt((11 - this.#intensity) / 10);
}
/* -------------------------------------------- */
/** @override */
disconnect(...args) {
this.#wetGain.disconnect();
this.#dryGain.disconnect();
return super.disconnect(...args);
}
/* -------------------------------------------- */
/** @override */
connect(destinationNode, ...args) {
super.connect(this.#wetGain, ...args);
this.#dryGain.connect(destinationNode);
this.#wetGain.connect(destinationNode);
return destinationNode;
}
/* -------------------------------------------- */
/**
* Additional side effects performed when some other AudioNode connects to this one.
* This behavior is not supported by the base WebAudioAPI but is needed here for more complex effects.
* @param {AudioNode} sourceNode An upstream source node that is connecting to this one
*/
onConnectFrom(sourceNode) {
sourceNode.connect(this.#dryGain);
}
}

View File

@@ -0,0 +1,635 @@
import AudioBufferCache from "./cache.mjs";
import Sound from "./sound.mjs";
/**
* @typedef {import("./_types.mjs").SoundCreationOptions} SoundCreationOptions
*/
/**
* A helper class to provide common functionality for working with the Web Audio API.
* https://developer.mozilla.org/en-US/docs/Web/API/Web_Audio_API
* A singleton instance of this class is available as game#audio.
* @see Game#audio
* @alias game.audio
*/
export default class AudioHelper {
constructor() {
if ( game.audio instanceof this.constructor ) {
throw new Error("You may not re-initialize the singleton AudioHelper. Use game.audio instead.");
}
this.unlock = this.awaitFirstGesture();
}
/**
* The Native interval for the AudioHelper to analyse audio levels from streams
* Any interval passed to startLevelReports() would need to be a multiple of this value.
* @type {number}
*/
static levelAnalyserNativeInterval = 50;
/**
* The cache size threshold after which audio buffers will be expired from the cache to make more room.
* 1 gigabyte, by default.
*/
static THRESHOLD_CACHE_SIZE_BYTES = Math.pow(1024, 3);
/**
* Audio Context singleton used for analysing audio levels of each stream
* Only created if necessary to listen to audio streams.
* @type {AudioContext}
*/
static #analyzerContext;
/**
* The set of singleton Sound instances which are shared across multiple uses of the same sound path.
* @type {Map<string,WeakRef<Sound>>}
*/
sounds = new Map();
/**
* Get a map of the Sound objects which are currently playing.
* @type {Map<number,Sound>}
*/
playing = new Map();
/**
* A user gesture must be registered before audio can be played.
* This Array contains the Sound instances which are requested for playback prior to a gesture.
* Once a gesture is observed, we begin playing all elements of this Array.
* @type {Function[]}
* @see Sound
*/
pending = [];
/**
* A Promise which resolves once the game audio API is unlocked and ready to use.
* @type {Promise<void>}
*/
unlock;
/**
* A flag for whether video playback is currently locked by awaiting a user gesture
* @type {boolean}
*/
locked = true;
/**
* A singleton audio context used for playback of music.
* @type {AudioContext}
*/
music;
/**
* A singleton audio context used for playback of environmental audio.
* @type {AudioContext}
*/
environment;
/**
* A singleton audio context used for playback of interface sounds and effects.
* @type {AudioContext}
*/
interface;
/**
* For backwards compatibility, AudioHelper#context refers to the context used for music playback.
* @type {AudioContext}
*/
get context() {
return this.music;
}
/**
* Interval ID as returned by setInterval for analysing the volume of streams
* When set to 0, means no timer is set.
* @type {number}
*/
#analyserInterval;
/**
* A singleton cache used for audio buffers.
* @type {AudioBufferCache}
*/
buffers = new AudioBufferCache(AudioHelper.THRESHOLD_CACHE_SIZE_BYTES);
/**
* Map of all streams that we listen to for determining the decibel levels.
* Used for analyzing audio levels of each stream.
* @type {Record<string, {stream: MediaStream, analyser: AnalyserNode, interval: number, callback: Function}>}
*/
#analyserStreams = {};
/**
* Fast Fourier Transform Array.
* Used for analysing the decibel level of streams. The array is allocated only once
* then filled by the analyser repeatedly. We only generate it when we need to listen to
* a stream's level, so we initialize it to null.
* @type {Float32Array}
*/
#fftArray = null;
/* -------------------------------------------- */
/**
* Create a Sound instance for a given audio source URL
* @param {SoundCreationOptions} options Sound creation options
* @returns {Sound}
*/
create({src, context, singleton=true, preload=false, autoplay=false, autoplayOptions={}}) {
let sound;
// Share singleton sounds across multiple use cases
if ( singleton ) {
const ref = this.sounds.get(src);
sound = ref?.deref();
if ( !sound ) {
sound = new Sound(src, {context});
this.sounds.set(src, new WeakRef(sound));
}
}
// Create an independent sound instance
else sound = new Sound(src, {context});
// Preload or autoplay
if ( preload && !sound.loaded ) sound.load({autoplay, autoplayOptions});
else if ( autoplay ) sound.play(autoplayOptions);
return sound;
}
/* -------------------------------------------- */
/**
* Test whether a source file has a supported audio extension type
* @param {string} src A requested audio source path
* @returns {boolean} Does the filename end with a valid audio extension?
*/
static hasAudioExtension(src) {
let rgx = new RegExp(`(\\.${Object.keys(CONST.AUDIO_FILE_EXTENSIONS).join("|\\.")})(\\?.*)?`, "i");
return rgx.test(src);
}
/* -------------------------------------------- */
/**
* Given an input file path, determine a default name for the sound based on the filename
* @param {string} src An input file path
* @returns {string} A default sound name for the path
*/
static getDefaultSoundName(src) {
const parts = src.split("/").pop().split(".");
parts.pop();
let name = decodeURIComponent(parts.join("."));
return name.replace(/[-_.]/g, " ").titleCase();
}
/* -------------------------------------------- */
/**
* Play a single Sound by providing its source.
* @param {string} src The file path to the audio source being played
* @param {object} [options] Additional options which configure playback
* @param {AudioContext} [options.context] A specific AudioContext within which to play
* @returns {Promise<Sound>} The created Sound which is now playing
*/
async play(src, {context, ...options}={}) {
const sound = new Sound(src, {context});
await sound.load();
sound.play(options);
return sound;
}
/* -------------------------------------------- */
/**
* Register an event listener to await the first mousemove gesture and begin playback once observed.
* @returns {Promise<void>} The unlocked audio context
*/
async awaitFirstGesture() {
if ( !this.locked ) return;
await new Promise(resolve => {
for ( let eventName of ["contextmenu", "auxclick", "pointerdown", "pointerup", "keydown"] ) {
document.addEventListener(eventName, event => this._onFirstGesture(event, resolve), {once: true});
}
});
}
/* -------------------------------------------- */
/**
* Request that other connected clients begin preloading a certain sound path.
* @param {string} src The source file path requested for preload
* @returns {Promise<Sound>} A Promise which resolves once the preload is complete
*/
preload(src) {
if ( !src || !AudioHelper.hasAudioExtension(src) ) {
throw new Error(`Invalid audio source path ${src} provided for preload request`);
}
game.socket.emit("preloadAudio", src);
return this.constructor.preloadSound(src);
}
/* -------------------------------------------- */
/* Settings and Volume Controls */
/* -------------------------------------------- */
/**
* Register client-level settings for global volume controls.
*/
static registerSettings() {
// Playlist Volume
game.settings.register("core", "globalPlaylistVolume", {
name: "Global Playlist Volume",
hint: "Define a global playlist volume modifier",
scope: "client",
config: false,
type: new foundry.data.fields.AlphaField({required: true, initial: 0.5}),
onChange: AudioHelper.#onChangeMusicVolume
});
// Ambient Volume
game.settings.register("core", "globalAmbientVolume", {
name: "Global Ambient Volume",
hint: "Define a global ambient volume modifier",
scope: "client",
config: false,
type: new foundry.data.fields.AlphaField({required: true, initial: 0.5}),
onChange: AudioHelper.#onChangeEnvironmentVolume
});
// Interface Volume
game.settings.register("core", "globalInterfaceVolume", {
name: "Global Interface Volume",
hint: "Define a global interface volume modifier",
scope: "client",
config: false,
type: new foundry.data.fields.AlphaField({required: true, initial: 0.5}),
onChange: AudioHelper.#onChangeInterfaceVolume
});
}
/* -------------------------------------------- */
/**
* Handle changes to the global music volume slider.
* @param {number} volume
*/
static #onChangeMusicVolume(volume) {
volume = Math.clamp(volume, 0, 1);
const ctx = game.audio.music;
if ( !ctx ) return;
ctx.gainNode.gain.setValueAtTime(volume, ctx.currentTime);
ui.playlists?.render();
Hooks.callAll("globalPlaylistVolumeChanged", volume);
}
/* -------------------------------------------- */
/**
* Handle changes to the global environment volume slider.
* @param {number} volume
*/
static #onChangeEnvironmentVolume(volume) {
volume = Math.clamp(volume, 0, 1);
const ctx = game.audio.environment;
if ( !ctx ) return;
ctx.gainNode.gain.setValueAtTime(volume, ctx.currentTime);
if ( canvas.ready ) {
for ( const mesh of canvas.primary.videoMeshes ) {
mesh.sourceElement.volume = mesh.object instanceof Tile ? mesh.object.volume : volume;
}
}
ui.playlists?.render();
Hooks.callAll("globalAmbientVolumeChanged", volume);
}
/* -------------------------------------------- */
/**
* Handle changes to the global interface volume slider.
* @param {number} volume
*/
static #onChangeInterfaceVolume(volume) {
volume = Math.clamp(volume, 0, 1);
const ctx = game.audio.interface;
if ( !ctx ) return;
ctx.gainNode.gain.setValueAtTime(volume, ctx.currentTime);
ui.playlists?.render();
Hooks.callAll("globalInterfaceVolumeChanged", volume);
}
/* -------------------------------------------- */
/* Socket Listeners and Handlers */
/* -------------------------------------------- */
/**
* Open socket listeners which transact ChatMessage data
* @param socket
*/
static _activateSocketListeners(socket) {
socket.on("playAudio", audioData => this.play(audioData, false));
socket.on("playAudioPosition", args => canvas.sounds.playAtPosition(...args));
socket.on("preloadAudio", src => this.preloadSound(src));
}
/* -------------------------------------------- */
/**
* Play a one-off sound effect which is not part of a Playlist
*
* @param {Object} data An object configuring the audio data to play
* @param {string} data.src The audio source file path, either a public URL or a local path relative to the public directory
* @param {string} [data.channel] An audio channel in CONST.AUDIO_CHANNELS where the sound should play
* @param {number} data.volume The volume level at which to play the audio, between 0 and 1.
* @param {boolean} data.autoplay Begin playback of the audio effect immediately once it is loaded.
* @param {boolean} data.loop Loop the audio effect and continue playing it until it is manually stopped.
* @param {object|boolean} socketOptions Options which only apply when emitting playback over websocket.
* As a boolean, emits (true) or does not emit (false) playback to all other clients
* As an object, can configure which recipients should receive the event.
* @param {string[]} [socketOptions.recipients] An array of user IDs to push audio playback to. All users by default.
*
* @returns {Sound} A Sound instance which controls audio playback.
*
* @example Play the sound of a locked door for all players
* ```js
* AudioHelper.play({src: "sounds/lock.wav", volume: 0.8, loop: false}, true);
* ```
*/
static play(data, socketOptions) {
const audioData = foundry.utils.mergeObject({
src: null,
volume: 1.0,
loop: false,
channel: "interface"
}, data, {insertKeys: true});
// Push the sound to other clients
const push = socketOptions && (socketOptions !== false);
if ( push ) {
socketOptions = foundry.utils.getType(socketOptions) === "Object" ? socketOptions : {};
if ( "recipients" in socketOptions && !Array.isArray(socketOptions.recipients)) {
throw new Error("Socket recipients must be an array of User IDs");
}
game.socket.emit("playAudio", audioData, socketOptions);
}
// Backwards compatibility, if autoplay was passed as false take no further action
if ( audioData.autoplay === false ) return;
// Play the sound locally
return game.audio.play(audioData.src, {
volume: audioData.volume ?? 1.0,
loop: audioData.loop,
context: game.audio[audioData.channel]
});
}
/* -------------------------------------------- */
/**
* Begin loading the sound for a provided source URL adding its
* @param {string} src The audio source path to preload
* @returns {Promise<Sound>} The created and loaded Sound ready for playback
*/
static async preloadSound(src) {
const sound = game.audio.create({src: src, preload: false, singleton: true});
await sound.load();
return sound;
}
/* -------------------------------------------- */
/**
* Returns the volume value based on a range input volume control's position.
* This is using an exponential approximation of the logarithmic nature of audio level perception
* @param {number|string} value Value between [0, 1] of the range input
* @param {number} [order=1.5] The exponent of the curve
* @returns {number}
*/
static inputToVolume(value, order=1.5) {
if ( typeof value === "string" ) value = parseFloat(value);
return Math.pow(value, order);
}
/* -------------------------------------------- */
/**
* Counterpart to inputToVolume()
* Returns the input range value based on a volume
* @param {number} volume Value between [0, 1] of the volume level
* @param {number} [order=1.5] The exponent of the curve
* @returns {number}
*/
static volumeToInput(volume, order=1.5) {
return Math.pow(volume, 1 / order);
}
/* -------------------------------------------- */
/* Audio Stream Analysis */
/* -------------------------------------------- */
/**
* Returns a singleton AudioContext if one can be created.
* An audio context may not be available due to limited resources or browser compatibility
* in which case null will be returned
*
* @returns {AudioContext} A singleton AudioContext or null if one is not available
*/
getAnalyzerContext() {
if ( !AudioHelper.#analyzerContext ) AudioHelper.#analyzerContext = new AudioContext();
return AudioHelper.#analyzerContext;
}
/* -------------------------------------------- */
/**
* Registers a stream for periodic reports of audio levels.
* Once added, the callback will be called with the maximum decibel level of
* the audio tracks in that stream since the last time the event was fired.
* The interval needs to be a multiple of AudioHelper.levelAnalyserNativeInterval which defaults at 50ms
*
* @param {string} id An id to assign to this report. Can be used to stop reports
* @param {MediaStream} stream The MediaStream instance to report activity on.
* @param {Function} callback The callback function to call with the decibel level. `callback(dbLevel)`
* @param {number} [interval] The interval at which to produce reports.
* @param {number} [smoothing] The smoothingTimeConstant to set on the audio analyser.
* @returns {boolean} Returns whether listening to the stream was successful
*/
startLevelReports(id, stream, callback, interval=50, smoothing=0.1) {
if ( !stream || !id ) return false;
let audioContext = this.getAnalyzerContext();
if (audioContext === null) return false;
// Clean up any existing report with the same ID
this.stopLevelReports(id);
// Make sure this stream has audio tracks, otherwise we can't connect the analyser to it
if (stream.getAudioTracks().length === 0) return false;
// Create the analyser
let analyser = audioContext.createAnalyser();
analyser.fftSize = 512;
analyser.smoothingTimeConstant = smoothing;
// Connect the analyser to the MediaStreamSource
audioContext.createMediaStreamSource(stream).connect(analyser);
this.#analyserStreams[id] = {stream, analyser, interval, callback, _lastEmit: 0};
// Ensure the analyser timer is started as we have at least one valid stream to listen to
this.#ensureAnalyserTimer();
return true;
}
/* -------------------------------------------- */
/**
* Stop sending audio level reports
* This stops listening to a stream and stops sending reports.
* If we aren't listening to any more streams, cancel the global analyser timer.
* @param {string} id The id of the reports that passed to startLevelReports.
*/
stopLevelReports(id) {
delete this.#analyserStreams[id];
if ( foundry.utils.isEmpty(this.#analyserStreams) ) this.#cancelAnalyserTimer();
}
/* -------------------------------------------- */
/**
* Ensures the global analyser timer is started
*
* We create only one timer that runs every 50ms and only create it if needed, this is meant to optimize things
* and avoid having multiple timers running if we want to analyse multiple streams at the same time.
* I don't know if it actually helps much with performance but it's expected that limiting the number of timers
* running at the same time is good practice and with JS itself, there's a potential for a timer congestion
* phenomenon if too many are created.
*/
#ensureAnalyserTimer() {
if ( !this.#analyserInterval ) {
this.#analyserInterval = setInterval(this.#emitVolumes.bind(this), AudioHelper.levelAnalyserNativeInterval);
}
}
/* -------------------------------------------- */
/**
* Cancel the global analyser timer
* If the timer is running and has become unnecessary, stops it.
*/
#cancelAnalyserTimer() {
if ( this.#analyserInterval ) {
clearInterval(this.#analyserInterval);
this.#analyserInterval = undefined;
}
}
/* -------------------------------------------- */
/**
* Capture audio level for all speakers and emit a webrtcVolumes custom event with all the volume levels
* detected since the last emit.
* The event's detail is in the form of {userId: decibelLevel}
*/
#emitVolumes() {
for ( const stream of Object.values(this.#analyserStreams) ) {
if ( ++stream._lastEmit < (stream.interval / AudioHelper.levelAnalyserNativeInterval) ) continue;
// Create the Fast Fourier Transform Array only once. Assume all analysers use the same fftSize
if ( this.#fftArray === null ) this.#fftArray = new Float32Array(stream.analyser.frequencyBinCount);
// Fill the array
stream.analyser.getFloatFrequencyData(this.#fftArray);
const maxDecibel = Math.max(...this.#fftArray);
stream.callback(maxDecibel, this.#fftArray);
stream._lastEmit = 0;
}
}
/* -------------------------------------------- */
/* Event Handlers */
/* -------------------------------------------- */
/**
* Handle the first observed user gesture
* @param {Event} event The mouse-move event which enables playback
* @param {Function} resolve The Promise resolution function
* @private
*/
_onFirstGesture(event, resolve) {
if ( !this.locked ) return resolve();
// Create audio contexts
this.music = AudioHelper.#createContext("globalPlaylistVolume");
this.environment = AudioHelper.#createContext("globalAmbientVolume");
this.interface = AudioHelper.#createContext("globalInterfaceVolume");
// Unlock and evaluate pending playbacks
this.locked = false;
if ( this.pending.length ) {
console.log(`${vtt} | Activating pending audio playback with user gesture.`);
this.pending.forEach(fn => fn());
this.pending = [];
}
return resolve();
}
/* -------------------------------------------- */
/**
* Create an AudioContext with an attached GainNode for master volume control.
* @returns {AudioContext}
*/
static #createContext(volumeSetting) {
const ctx = new AudioContext();
ctx.gainNode = ctx.createGain();
ctx.gainNode.connect(ctx.destination);
const volume = game.settings.get("core", volumeSetting);
ctx.gainNode.gain.setValueAtTime(volume, ctx.currentTime)
return ctx;
}
/* -------------------------------------------- */
/**
* Log a debugging message if the audio debugging flag is enabled.
* @param {string} message The message to log
*/
debug(message) {
if ( CONFIG.debug.audio ) console.debug(`${vtt} | ${message}`);
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
getCache(src) {
foundry.utils.logCompatibilityWarning("AudioHelper#getCache is deprecated in favor of AudioHelper#buffers#get");
return this.buffers.getBuffer(src, {since: 12, until: 14});
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
updateCache(src, playing=false) {
foundry.utils.logCompatibilityWarning("AudioHelper#updateCache is deprecated without replacement");
}
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
setCache(src, buffer) {
foundry.utils.logCompatibilityWarning("AudioHelper#setCache is deprecated in favor of AudioHelper#buffers#set");
this.buffers.setBuffer(src, buffer);
}
}

View File

@@ -0,0 +1,990 @@
import AudioTimeout from "./timeout.mjs";
import EventEmitterMixin from "../../common/utils/event-emitter.mjs";
/**
* @typedef {import("./_types.mjs").SoundPlaybackOptions} SoundPlaybackOptions
* @typedef {import("./_types.mjs").SoundScheduleCallback} SoundScheduleCallback
*/
/**
* A container around an AudioNode which manages sound playback in Foundry Virtual Tabletop.
* Each Sound is either an AudioBufferSourceNode (for short sources) or a MediaElementAudioSourceNode (for long ones).
* This class provides an interface around both types which allows standardized control over playback.
* @alias foundry.audio.Sound
* @see {EventEmitterMixin}
*/
export default class Sound extends EventEmitterMixin(Object) {
/**
* Construct a Sound by providing the source URL and other options.
* @param {string} src The audio source path, either a relative path or a remote URL
* @param {object} [options] Additional options which configure the Sound
* @param {AudioContext} [options.context] A non-default audio context within which the sound should play
* @param {boolean} [options.forceBuffer] Force use of an AudioBufferSourceNode even if the audio duration is long
*/
constructor(src, {context, forceBuffer=false}={}) {
super();
Object.defineProperties(this, {
id: {value: Sound.#nodeId++, writable: false, enumerable: true, configurable: false},
src: {value: src, writable: false, enumerable: true, configurable: false}
});
this.#context = context || game.audio.music;
this.#forceBuffer = forceBuffer;
}
/**
* The sequence of container loading states.
* @enum {Readonly<number>}
*/
static STATES = Object.freeze({
FAILED: -1,
NONE: 0,
LOADING: 1,
LOADED: 2,
STARTING: 3,
PLAYING: 4,
PAUSED: 5,
STOPPING: 6,
STOPPED: 7
});
/**
* The maximum duration, in seconds, for which an AudioBufferSourceNode will be used.
* Otherwise, a MediaElementAudioSourceNode will be used.
* @type {number}
*/
static MAX_BUFFER_DURATION = 10 * 60; // 10 Minutes
/**
* An incrementing counter used to assign each Sound a unique id.
* @type {number}
*/
static #nodeId = 0;
/** @override */
static emittedEvents = ["load", "play", "pause", "end", "stop"];
/**
* A unique integer identifier for this sound.
* @type {number}
*/
id;
/**
* The audio source path.
* Either a relative path served by the running Foundry VTT game server or a remote URL.
* @type {string}
*/
src;
/**
* The audio context within which this Sound is played.
* @type {AudioContext}
*/
get context() {
return this.#context;
}
#context;
/**
* When this Sound uses an AudioBuffer, this is an AudioBufferSourceNode.
* @type {AudioBufferSourceNode}
*/
#bufferNode;
/**
* When this Sound uses an HTML Audio stream, this is a MediaElementAudioSourceNode.
* @type {MediaElementAudioSourceNode}
*/
#mediaNode;
/**
* The AudioSourceNode used to control sound playback.
* @type {AudioBufferSourceNode|MediaElementAudioSourceNode}
*/
get sourceNode() {
return this.#bufferNode || this.#mediaNode;
}
/**
* The GainNode used to control volume for this sound.
* @type {GainNode}
*/
gainNode;
/**
* An AudioBuffer instance, if this Sound uses an AudioBufferSourceNode for playback.
* @type {AudioBuffer|null}
*/
buffer = null;
/**
* An HTMLAudioElement, if this Sound uses a MediaElementAudioSourceNode for playback.
* @type {HTMLAudioElement|null}
*/
element = null;
/**
* Playback configuration options specified at the time that Sound#play is called.
* @type {SoundPlaybackOptions}
*/
#playback = {
delay: 0,
duration: undefined,
fade: 0,
loop: false,
loopStart: 0,
loopEnd: undefined,
offset: 0,
onended: null,
volume: 1.0
};
/**
* Force usage of an AudioBufferSourceNode regardless of audio duration?
* @type {boolean}
*/
#forceBuffer = false;
/**
* The life-cycle state of the sound.
* @see {Sound.STATES}
* @type {number}
* @protected
*/
_state = Sound.STATES.NONE;
/**
* Has the audio file been loaded either fully or for streaming.
* @type {boolean}
*/
get loaded() {
if ( this._state < Sound.STATES.LOADED ) return false;
return !!(this.buffer || this.element);
}
/**
* Did the audio file fail to load.
* @type {boolean}
*/
get failed() {
return this._state === Sound.STATES.FAILED;
}
/**
* Is this sound currently playing?
* @type {boolean}
*/
get playing() {
return (this._state === Sound.STATES.STARTING) || (this._state === Sound.STATES.PLAYING);
}
/**
* Does this Sound use an AudioBufferSourceNode?
* Otherwise, the Sound uses a streamed MediaElementAudioSourceNode.
* @type {boolean}
*/
get isBuffer() {
return !!this.buffer && (this.sourceNode instanceof AudioBufferSourceNode);
}
/**
* A convenience reference to the GainNode gain audio parameter.
* @type {AudioParam}
*/
get gain() {
return this.gainNode?.gain;
}
/**
* The AudioNode destination which is the output target for the Sound.
* @type {AudioNode}
*/
destination;
/**
* Record the pipeline of nodes currently used by this Sound.
* @type {AudioNode[]}
*/
#pipeline = [];
/**
* A pipeline of AudioNode instances to be applied to Sound playback.
* @type {AudioNode[]}
*/
effects = [];
/**
* The currently playing volume of the sound.
* Undefined until playback has started for the first time.
* @type {number}
*/
get volume() {
return this.gain?.value;
}
set volume(value) {
if ( !this.gainNode || !Number.isFinite(value) ) return;
const ct = this.#context.currentTime;
this.gain.cancelScheduledValues(ct);
this.gain.value = value;
this.gain.setValueAtTime(value, ct); // Immediately schedule the new value
}
/**
* The time in seconds at which playback was started.
* @type {number}
*/
startTime;
/**
* The time in seconds at which playback was paused.
* @type {number}
*/
pausedTime;
/**
* The total duration of the audio source in seconds.
* @type {number}
*/
get duration() {
if ( this._state < Sound.STATES.LOADED ) return undefined;
if ( this.buffer ) {
const {loop, loopStart, loopEnd} = this.#playback;
if ( loop && Number.isFinite(loopStart) && Number.isFinite(loopEnd) ) return loopEnd - loopStart;
return this.buffer.duration;
}
return this.element?.duration;
}
/**
* The current playback time of the sound.
* @type {number}
*/
get currentTime() {
if ( !this.playing ) return undefined;
if ( this.pausedTime ) return this.pausedTime;
let time = this.#context.currentTime - this.startTime;
if ( Number.isFinite(this.duration) ) time %= this.duration;
return time;
}
/**
* Is the sound looping?
* @type {boolean}
*/
get loop() {
return this.#playback.loop;
}
set loop(value) {
const loop = this.#playback.loop = Boolean(value);
if ( this.#bufferNode ) this.#bufferNode.loop = loop;
else if ( this.element ) this.element.loop = loop;
}
/**
* A set of scheduled events orchestrated using the Sound#schedule function.
* @type {Set<AudioTimeout>}
*/
#scheduledEvents = new Set();
/**
* An operation in progress on the sound which must be queued.
* @type {Promise}
*/
#operation;
/**
* A delay timeout before the sound starts or stops.
* @type {AudioTimeout}
*/
#delay;
/**
* An internal reference to some object which is managing this Sound instance.
* @type {Object|null}
* @internal
*/
_manager = null;
/* -------------------------------------------- */
/* Life-Cycle Methods */
/* -------------------------------------------- */
/**
* Load the audio source and prepare it for playback, either using an AudioBuffer or a streamed HTMLAudioElement.
* @param {object} [options={}] Additional options which affect resource loading
* @param {boolean} [options.autoplay=false] Automatically begin playback of the sound once loaded
* @param {SoundPlaybackOptions} [options.autoplayOptions] Playback options passed to Sound#play, if autoplay
* @returns {Promise<Sound>} A Promise which resolves to the Sound once it is loaded
*/
async load({autoplay=false, autoplayOptions={}}={}) {
const {STATES} = Sound;
// Await audio unlock
if ( game.audio.locked ) {
game.audio.debug(`Delaying load of sound "${this.src}" until after first user gesture`);
await game.audio.unlock;
}
// Wait for another ongoing operation
if ( this.#operation ) {
await this.#operation;
return this.load({autoplay, autoplayOptions});
}
// Queue loading
if ( !this.loaded ) {
this._state = STATES.LOADING;
this.#context ||= game.audio.music;
try {
this.#operation = this._load();
await this.#operation;
this._state = STATES.LOADED;
this.dispatchEvent(new Event("load"));
} catch(err) {
console.error(err);
this._state = STATES.FAILED;
}
finally {
this.#operation = undefined;
}
}
// Autoplay after load
if ( autoplay && !this.failed && !this.playing ) {
// noinspection ES6MissingAwait
this.play(autoplayOptions);
}
return this;
}
/* -------------------------------------------- */
/**
* An inner method which handles loading so that it can be de-duplicated under a single shared Promise resolution.
* This method is factored out to allow for subclasses to override loading behavior.
* @returns {Promise<void>} A Promise which resolves once the sound is loaded
* @throws {Error} An error if loading failed for any reason
* @protected
*/
async _load() {
// Attempt to load a cached AudioBuffer
this.buffer = game.audio.buffers.getBuffer(this.src) || null;
this.element = null;
// Otherwise, load the audio as an HTML5 audio element to learn its playback duration
if ( !this.buffer ) {
const element = await this.#createAudioElement();
const isShort = (element?.duration || Infinity) <= Sound.MAX_BUFFER_DURATION;
// For short sounds create and cache the audio buffer and use an AudioBufferSourceNode
if ( isShort || this.#forceBuffer ) {
this.buffer = await this.#createAudioBuffer();
game.audio.buffers.setBuffer(this.src, this.buffer);
Sound.#unloadAudioElement(element);
}
else this.element = element;
}
}
/* -------------------------------------------- */
/**
* Begin playback for the Sound.
* This method is asynchronous because playback may not start until after an initially provided delay.
* The Promise resolves *before* the fade-in of any configured volume transition.
* @param {SoundPlaybackOptions} [options] Options which configure the beginning of sound playback
* @returns {Promise<Sound>} A Promise which resolves once playback has started (excluding fade)
*/
async play(options={}) {
// Signal our intention to start immediately
const {STATES} = Sound;
if ( ![STATES.LOADED, STATES.PAUSED, STATES.STOPPED].includes(this._state) ) return this;
this._state = STATES.STARTING;
// Wait for another ongoing operation
if ( this.#operation ) {
await this.#operation;
return this.play(options);
}
// Configure options
if ( typeof options === "number" ) {
options = {offset: options};
if ( arguments[1] instanceof Function ) options.onended = arguments[1];
foundry.utils.logCompatibilityWarning("Sound#play now takes an object of playback options instead of "
+ "positional arguments.", {since: 12, until: 14});
}
// Queue playback
try {
this.#operation = this.#queuePlay(options);
await this.#operation;
this._state = STATES.PLAYING;
} finally {
this.#operation = undefined;
}
return this;
}
/* -------------------------------------------- */
/**
* An inner method that is wrapped in an enqueued promise. See {@link Sound#play}.
*/
async #queuePlay(options={}) {
// Configure playback
this.#configurePlayback(options);
const {delay, fade, offset, volume} = this.#playback;
// Create the audio pipeline including gainNode and sourceNode used for playback
this._createNodes();
this._connectPipeline();
// Delay playback start
if ( delay ) {
await this.wait(delay * 1000);
if ( this._state !== Sound.STATES.STARTING ) return; // We may no longer be starting if the delay was cancelled
}
// Begin playback
this._play();
// Record state change
this.startTime = this.#context.currentTime - offset;
this.pausedTime = undefined;
// Set initial volume
this.volume = fade ? 0 : volume;
if ( fade ) this.fade(volume, {duration: fade});
this.#onStart();
}
/* -------------------------------------------- */
/**
* Begin playback for the configured pipeline and playback options.
* This method is factored out so that subclass implementations of Sound can implement alternative behavior.
* @protected
*/
_play() {
const {loop, loopStart, loopEnd, offset, duration} = this.#playback;
if ( this.buffer ) {
this.#bufferNode.loop = loop;
if ( loop && Number.isFinite(loopStart) && Number.isFinite(loopEnd) ) {
this.#bufferNode.loopStart = loopStart;
this.#bufferNode.loopEnd = loopEnd;
}
this.#bufferNode.onended = this.#onEnd.bind(this);
this.#bufferNode.start(0, offset, duration);
}
else if ( this.element ) {
this.element.loop = loop;
this.element.currentTime = offset;
this.element.onended = this.#onEnd.bind(this);
this.element.play();
}
game.audio.debug(`Beginning playback of Sound "${this.src}"`);
}
/* -------------------------------------------- */
/**
* Pause playback of the Sound.
* For AudioBufferSourceNode this stops playback after recording the current time.
* Calling Sound#play will resume playback from the pausedTime unless some other offset is passed.
* For a MediaElementAudioSourceNode this simply calls the HTMLAudioElement#pause method directly.
*/
pause() {
const {STATES} = Sound;
if ( this._state !== STATES.PLAYING ) {
throw new Error("You may only call Sound#pause for a Sound which is PLAYING");
}
this._pause();
this.pausedTime = this.currentTime;
this._state = STATES.PAUSED;
this.#onPause();
}
/* -------------------------------------------- */
/**
* Pause playback of the Sound.
* This method is factored out so that subclass implementations of Sound can implement alternative behavior.
* @protected
*/
_pause() {
if ( this.isBuffer ) {
this.#bufferNode.onended = undefined;
this.#bufferNode.stop(0);
}
else this.element.pause();
game.audio.debug(`Pausing playback of Sound "${this.src}"`);
}
/* -------------------------------------------- */
/**
* Stop playback for the Sound.
* This method is asynchronous because playback may not stop until after an initially provided delay.
* The Promise resolves *after* the fade-out of any configured volume transition.
* @param {SoundPlaybackOptions} [options] Options which configure the stopping of sound playback
* @returns {Promise<Sound>} A Promise which resolves once playback is fully stopped (including fade)
*/
async stop(options={}) {
// Signal our intention to stop immediately
if ( !this.playing ) return this;
this._state = Sound.STATES.STOPPING;
this.#delay?.cancel();
// Wait for another operation to conclude
if ( this.#operation ) {
await this.#operation;
return this.stop(options);
}
// Queue stop
try {
this.#operation = this.#queueStop(options);
await this.#operation;
this._state = Sound.STATES.STOPPED;
} finally {
this.#operation = undefined;
}
return this;
}
/* -------------------------------------------- */
/**
* An inner method that is wrapped in an enqueued promise. See {@link Sound#stop}.
*/
async #queueStop(options) {
// Immediately disconnect the onended callback
if ( this.#bufferNode ) this.#bufferNode.onended = undefined;
if ( this.#mediaNode ) this.element.onended = undefined;
// Configure playback settings
this.#configurePlayback(options);
const {delay, fade, volume} = this.#playback;
// Fade out
if ( fade ) await this.fade(volume, {duration: fade});
else this.volume = volume;
// Stop playback
if ( delay ) {
await this.wait(delay * 1000);
if ( this._state !== Sound.STATES.STOPPING ) return; // We may no longer be stopping if the delay was cancelled
}
this._stop();
// Disconnect the audio pipeline
this._disconnectPipeline();
// Record state changes
this.#bufferNode = this.#mediaNode = undefined;
this.startTime = this.pausedTime = undefined;
this.#onStop();
}
/* -------------------------------------------- */
/**
* Stop playback of the Sound.
* This method is factored out so that subclass implementations of Sound can implement alternative behavior.
* @protected
*/
_stop() {
this.gain.cancelScheduledValues(this.context.currentTime);
if ( this.buffer && this.sourceNode && (this._state === Sound.STATES.PLAYING) ) this.#bufferNode.stop(0);
else if ( this.element ) {
Sound.#unloadAudioElement(this.element);
this.element = null;
}
game.audio.debug(`Stopping playback of Sound "${this.src}"`);
}
/* -------------------------------------------- */
/**
* Fade the volume for this sound between its current level and a desired target volume.
* @param {number} volume The desired target volume level between 0 and 1
* @param {object} [options={}] Additional options that configure the fade operation
* @param {number} [options.duration=1000] The duration of the fade effect in milliseconds
* @param {number} [options.from] A volume level to start from, the current volume by default
* @param {string} [options.type=linear] The type of fade easing, "linear" or "exponential"
* @returns {Promise<void>} A Promise that resolves after the requested fade duration
*/
async fade(volume, {duration=1000, from, type="linear"}={}) {
if ( !this.gain ) return;
const ramp = this.gain[`${type}RampToValueAtTime`];
if ( !ramp ) throw new Error(`Invalid fade type ${type} requested`);
// Cancel any other ongoing transitions
const startTime = this.#context.currentTime;
this.gain.cancelScheduledValues(startTime);
// Immediately schedule the starting volume
from ??= this.gain.value;
this.gain.setValueAtTime(from, startTime);
// Ramp to target volume
ramp.call(this.gain, volume, startTime + (duration / 1000));
// Wait for the transition
if ( volume !== from ) await this.wait(duration);
}
/* -------------------------------------------- */
/**
* Wait a certain scheduled duration within this sound's own AudioContext.
* @param {number} duration The duration to wait in milliseconds
* @returns {Promise<void>} A promise which resolves after the waited duration
*/
async wait(duration) {
this.#delay = new AudioTimeout(duration, {context: this.#context});
await this.#delay.complete;
this.#delay = undefined;
}
/* -------------------------------------------- */
/**
* Schedule a function to occur at the next occurrence of a specific playbackTime for this Sound.
* @param {SoundScheduleCallback} fn A function that will be called with this Sound as its single argument
* @param {number} playbackTime The desired playback time at which the function should be called
* @returns {Promise<any>} A Promise which resolves to the returned value of the provided function once
* it has been evaluated.
*
* @example Schedule audio playback changes
* ```js
* sound.schedule(() => console.log("Do something exactly 30 seconds into the track"), 30);
* sound.schedule(() => console.log("Do something next time the track loops back to the beginning"), 0);
* sound.schedule(() => console.log("Do something 5 seconds before the end of the track"), sound.duration - 5);
* ```
*/
async schedule(fn, playbackTime) {
// Determine the amount of time until the next occurrence of playbackTime
const {currentTime, duration} = this;
playbackTime = Math.clamp(playbackTime, 0, duration);
if ( this.#playback.loop && Number.isFinite(duration) ) {
while ( playbackTime < currentTime ) playbackTime += duration;
}
const deltaMS = Math.max(0, (playbackTime - currentTime) * 1000);
// Wait for an AudioTimeout completion then invoke the scheduled function
const timeout = new AudioTimeout(deltaMS, {context: this.#context});
this.#scheduledEvents.add(timeout);
try {
await timeout.complete;
return fn(this);
}
catch {}
finally {
this.#scheduledEvents.delete(timeout);
}
}
/* -------------------------------------------- */
/**
* Update the array of effects applied to a Sound instance.
* Optionally a new array of effects can be assigned. If no effects are passed, the current effects are re-applied.
* @param {AudioNode[]} [effects] An array of AudioNode effects to apply
*/
applyEffects(effects) {
if ( Array.isArray(effects) ) this.effects = effects;
this._disconnectPipeline();
this._connectPipeline();
game.audio.debug(`Applied effects to Sound "${this.src}": ${this.effects.map(e => e.constructor.name)}`);
}
/* -------------------------------------------- */
/* Playback Events */
/* -------------------------------------------- */
/**
* Additional workflows when playback of the Sound begins.
*/
#onStart() {
game.audio.playing.set(this.id, this); // Track playing sounds
this.dispatchEvent(new Event("play"));
}
/* -------------------------------------------- */
/**
* Additional workflows when playback of the Sound is paused.
*/
#onPause() {
this.#cancelScheduledEvents();
this.dispatchEvent(new Event("pause"));
}
/* -------------------------------------------- */
/**
* Additional workflows when playback of the Sound concludes.
* This is called by the AudioNode#onended callback.
*/
async #onEnd() {
await this.stop();
this.#playback.onended?.(this);
this.dispatchEvent(new Event("end"));
}
/* -------------------------------------------- */
/**
* Additional workflows when playback of the Sound is stopped, either manually or by concluding its playback.
*/
#onStop() {
game.audio.playing.delete(this.id);
this.#cancelScheduledEvents();
this.dispatchEvent(new Event("stop"));
}
/* -------------------------------------------- */
/* Helper Methods */
/* -------------------------------------------- */
/**
* Create an HTML5 Audio element which has loaded the metadata for the provided source.
* @returns {Promise<HTMLAudioElement>} A created HTML Audio element
* @throws {Error} An error if audio element creation failed
*/
async #createAudioElement() {
game.audio.debug(`Loading audio element "${this.src}"`);
return new Promise((resolve, reject) => {
const element = new Audio();
element.autoplay = false;
element.crossOrigin = "anonymous";
element.preload = "metadata";
element.onloadedmetadata = () => resolve(element);
element.onerror = () => reject(`Failed to load audio element "${this.src}"`);
element.src = this.src;
});
}
/* -------------------------------------------- */
/**
* Ensure to safely unload a media stream
* @param {HTMLAudioElement} element The audio element to unload
*/
static #unloadAudioElement(element) {
element.onended = undefined;
element.pause();
element.src = "";
element.remove();
}
/* -------------------------------------------- */
/**
* Load an audio file and decode it to create an AudioBuffer.
* @returns {Promise<AudioBuffer>} A created AudioBuffer
* @throws {Error} An error if buffer creation failed
*/
async #createAudioBuffer() {
game.audio.debug(`Loading audio buffer "${this.src}"`);
try {
const response = await foundry.utils.fetchWithTimeout(this.src);
const arrayBuffer = await response.arrayBuffer();
return this.#context.decodeAudioData(arrayBuffer);
} catch(err) {
err.message = `Failed to load audio buffer "${this.src}"`;
throw err;
}
}
/* -------------------------------------------- */
/**
* Create any AudioNode instances required for playback of this Sound.
* @protected
*/
_createNodes() {
this.gainNode ||= this.#context.createGain();
this.destination ||= (this.#context.gainNode ?? this.#context.destination); // Prefer a context gain if present
const {buffer, element: mediaElement} = this;
if ( buffer ) this.#bufferNode = new AudioBufferSourceNode(this.#context, {buffer});
else if ( mediaElement ) this.#mediaNode = new MediaElementAudioSourceNode(this.#context, {mediaElement});
}
/* -------------------------------------------- */
/**
* Create the audio pipeline used to play this Sound.
* The GainNode is reused each time to link volume changes across multiple playbacks.
* The AudioSourceNode is re-created every time that Sound#play is called.
* @protected
*/
_connectPipeline() {
if ( !this.sourceNode ) return;
this.#pipeline.length = 0;
// Start with the sourceNode
let node = this.sourceNode;
this.#pipeline.push(node);
// Connect effect nodes
for ( const effect of this.effects ) {
node.connect(effect);
effect.onConnectFrom?.(node); // Special behavior to inform the effect node it has been connected
node = effect;
this.#pipeline.push(effect);
}
// End with the gainNode
node.connect(this.gainNode);
this.#pipeline.push(this.gainNode);
this.gainNode.connect(this.destination);
}
/* -------------------------------------------- */
/**
* Disconnect the audio pipeline once playback is stopped.
* Walk backwards along the Sound##pipeline from the Sound#destination, disconnecting each node.
* @protected
*/
_disconnectPipeline() {
for ( let i=this.#pipeline.length-1; i>=0; i-- ) {
const node = this.#pipeline[i];
node.disconnect();
}
}
/* -------------------------------------------- */
/**
* Configure playback parameters for the Sound.
* @param {SoundPlaybackOptions} Provided playback options
*/
#configurePlayback({delay, duration, fade, loop, loopStart, loopEnd, offset, onended, volume}={}) {
// Some playback options only update if they are explicitly passed
this.#playback.loop = loop ?? this.#playback.loop;
this.#playback.loopStart = loopStart ?? this.#playback.loopStart;
this.#playback.loopEnd = loopEnd ?? this.#playback.loopEnd;
this.#playback.volume = volume ?? this.#playback.volume;
this.#playback.onended = onended !== undefined ? onended : this.#playback.onended;
// Determine playback offset and duration timing
const loopTime = (this.#playback.loopEnd ?? Infinity) - this.#playback.loopStart;
// Starting offset
offset ??= this.#playback.loopStart;
if ( Number.isFinite(this.pausedTime) ) offset += this.pausedTime;
// Loop forever
if ( this.#playback.loop ) duration ??= undefined;
// Play once
else if ( Number.isFinite(loopTime) ) {
offset = Math.clamp(offset, this.#playback.loopStart, this.#playback.loopEnd);
duration ??= loopTime;
duration = Math.min(duration, loopTime);
}
// Some playback options reset unless they are explicitly passed
this.#playback.delay = delay ?? 0;
this.#playback.offset = offset;
this.#playback.duration = duration;
this.#playback.fade = fade ?? 0;
}
/* -------------------------------------------- */
/**
* Cancel any scheduled events which have not yet occurred.
*/
#cancelScheduledEvents() {
for ( const timeout of this.#scheduledEvents ) timeout.cancel();
this.#scheduledEvents.clear();
}
/* -------------------------------------------- */
/* Deprecations and Compatibility */
/* -------------------------------------------- */
/**
* @deprecated since v12
* @ignore
*/
static get LOAD_STATES() {
foundry.utils.logCompatibilityWarning("AudioContainer.LOAD_STATES is deprecated in favor of Sound.STATES",
{since: 12, until: 14});
return this.STATES;
}
/**
* @deprecated since v12
* @ignore
*/
get loadState() {
foundry.utils.logCompatibilityWarning("AudioContainer#loadState is deprecated in favor of Sound#_state",
{since: 12, until: 14});
return this._state;
}
/**
* @deprecated since v12
* @ignore
*/
get container() {
foundry.utils.logCompatibilityWarning("Sound#container is deprecated without replacement because the Sound and "
+ "AudioContainer classes are now merged", {since: 12, until: 14});
return this;
}
/**
* @deprecated since v12
* @ignore
*/
get node() {
foundry.utils.logCompatibilityWarning("Sound#node is renamed Sound#sourceNode", {since: 12, until: 14});
return this.sourceNode;
}
/**
* @deprecated since v12
* @ignore
*/
on(eventName, fn, {once=false}={}) {
foundry.utils.logCompatibilityWarning("Sound#on is deprecated in favor of Sound#addEventListener",
{since: 12, until: 14});
return this.addEventListener(eventName, fn, {once});
}
/**
* @deprecated since v12
* @ignore
*/
off(eventName, fn) {
foundry.utils.logCompatibilityWarning("Sound#off is deprecated in favor of Sound#removeEventListener",
{since: 12, until: 14});
return this.removeEventListener(eventName, fn);
}
/**
* @deprecated since v12
* @ignore
*/
emit(eventName) {
foundry.utils.logCompatibilityWarning("Sound#emit is deprecated in favor of Sound#dispatchEvent",
{since: 12, until: 14});
const event = new Event(eventName, {cancelable: true});
return this.dispatchEvent(event);
}
}

View File

@@ -0,0 +1,147 @@
/**
* @typedef {Object} AudioTimeoutOptions
* @property {AudioContext} [context]
* @property {function(): any} [callback]
*/
/**
* A special error class used for cancellation.
*/
class AudioTimeoutCancellation extends Error {}
/**
* A framework for scheduled audio events with more precise and synchronized timing than using window.setTimeout.
* This approach creates an empty audio buffer of the desired duration played using the shared game audio context.
* The onended event of the AudioBufferSourceNode provides a very precise way to synchronize audio events.
* For audio timing, this is preferable because it avoids numerous issues with window.setTimeout.
*
* @example Using a callback function
* ```js
* function playForDuration(sound, duration) {
* sound.play();
* const wait = new AudioTimeout(duration, {callback: () => sound.stop()})
* }
* ```
*
* @example Using an awaited Promise
* ```js
* async function playForDuration(sound, duration) {
* sound.play();
* const timeout = new AudioTimeout(delay);
* await timeout.complete;
* sound.stop();
* }
* ```
*
* @example Using the wait helper
* ```js
* async function playForDuration(sound, duration) {
* sound.play();
* await AudioTimeout.wait(duration);
* sound.stop();
* }
* ```
*/
export default class AudioTimeout {
/**
* Create an AudioTimeout by providing a delay and callback.
* @param {number} delayMS A desired delay timing in milliseconds
* @param {AudioTimeoutOptions} [options] Additional options which modify timeout behavior
*/
constructor(delayMS, {callback, context}={}) {
if ( !(typeof delayMS === "number") ) throw new Error("Numeric timeout duration must be provided");
this.#callback = callback;
this.complete = new Promise((resolve, reject) => {
this.#resolve = resolve;
this.#reject = reject;
// Immediately evaluated
if ( delayMS <= 0 ) return this.end();
// Create and play a blank AudioBuffer of the desired delay duration
context ||= game.audio.music;
const seconds = delayMS / 1000;
const sampleRate = context.sampleRate;
const buffer = new AudioBuffer({length: seconds * sampleRate, sampleRate});
this.#sourceNode = new AudioBufferSourceNode(context, {buffer});
this.#sourceNode.onended = this.end.bind(this);
this.#sourceNode.start();
})
// The promise may get cancelled
.catch(err => {
if ( err instanceof AudioTimeoutCancellation ) return;
throw err;
});
}
/**
* Is the timeout complete?
* This can be used to await the completion of the AudioTimeout if necessary.
* The Promise resolves to the returned value of the provided callback function.
* @type {Promise<*>}
*/
complete;
/**
* The resolution function for the wrapping Promise.
* @type {Function}
*/
#resolve;
/**
* The rejection function for the wrapping Promise.
* @type {Function}
*/
#reject;
/**
* A scheduled callback function
* @type {Function}
*/
#callback;
/**
* The source node used to maintain the timeout
* @type {AudioBufferSourceNode}
*/
#sourceNode;
/* -------------------------------------------- */
/**
* Cancel an AudioTimeout by ending it early, rejecting its completion promise, and skipping any callback function.
*/
cancel() {
if ( !this.#reject ) return;
const reject = this.#reject;
this.#resolve = this.#reject = undefined;
reject(new AudioTimeoutCancellation("AudioTimeout cancelled"));
this.#sourceNode.onended = null;
this.#sourceNode.stop();
}
/* -------------------------------------------- */
/**
* End the timeout, either on schedule or prematurely. Executing any callback function
*/
end() {
const resolve = this.#resolve;
this.#resolve = this.#reject = undefined;
resolve(this.#callback?.());
}
/* -------------------------------------------- */
/**
* Schedule a task according to some audio timeout.
* @param {number} delayMS A desired delay timing in milliseconds
* @param {AudioTimeoutOptions} [options] Additional options which modify timeout behavior
* @returns {Promise<void|any>} A promise which resolves as a returned value of the callback or void
*/
static async wait(delayMS, options) {
const timeout = new this(delayMS, options);
return timeout.complete;
}
}