Initial
This commit is contained in:
264
resources/app/client/av/client.js
Normal file
264
resources/app/client/av/client.js
Normal file
@@ -0,0 +1,264 @@
|
||||
/**
|
||||
* An interface for an Audio/Video client which is extended to provide broadcasting functionality.
|
||||
* @interface
|
||||
* @param {AVMaster} master The master orchestration instance
|
||||
* @param {AVSettings} settings The audio/video settings being used
|
||||
*/
|
||||
class AVClient {
|
||||
constructor(master, settings) {
|
||||
|
||||
/**
|
||||
* The master orchestration instance
|
||||
* @type {AVMaster}
|
||||
*/
|
||||
this.master = master;
|
||||
|
||||
/**
|
||||
* The active audio/video settings being used
|
||||
* @type {AVSettings}
|
||||
*/
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Is audio broadcasting push-to-talk enabled?
|
||||
* @returns {boolean}
|
||||
*/
|
||||
get isVoicePTT() {
|
||||
return this.settings.client.voice.mode === "ptt";
|
||||
}
|
||||
|
||||
/**
|
||||
* Is audio broadcasting always enabled?
|
||||
* @returns {boolean}
|
||||
*/
|
||||
get isVoiceAlways() {
|
||||
return this.settings.client.voice.mode === "always";
|
||||
}
|
||||
|
||||
/**
|
||||
* Is audio broadcasting voice-activation enabled?
|
||||
* @returns {boolean}
|
||||
*/
|
||||
get isVoiceActivated() {
|
||||
return this.settings.client.voice.mode === "activity";
|
||||
}
|
||||
|
||||
/**
|
||||
* Is the current user muted?
|
||||
* @returns {boolean}
|
||||
*/
|
||||
get isMuted() {
|
||||
return this.settings.client.users[game.user.id]?.muted;
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
/* Connection */
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* One-time initialization actions that should be performed for this client implementation.
|
||||
* This will be called only once when the Game object is first set-up.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async initialize() {
|
||||
throw Error("The initialize() method must be defined by an AVClient subclass.");
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Connect to any servers or services needed in order to provide audio/video functionality.
|
||||
* Any parameters needed in order to establish the connection should be drawn from the settings object.
|
||||
* This function should return a boolean for whether the connection attempt was successful.
|
||||
* @returns {Promise<boolean>} Was the connection attempt successful?
|
||||
*/
|
||||
async connect() {
|
||||
throw Error("The connect() method must be defined by an AVClient subclass.");
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Disconnect from any servers or services which are used to provide audio/video functionality.
|
||||
* This function should return a boolean for whether a valid disconnection occurred.
|
||||
* @returns {Promise<boolean>} Did a disconnection occur?
|
||||
*/
|
||||
async disconnect() {
|
||||
throw Error("The disconnect() method must be defined by an AVClient subclass.");
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
/* Device Discovery */
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Provide an Object of available audio sources which can be used by this implementation.
|
||||
* Each object key should be a device id and the key should be a human-readable label.
|
||||
* @returns {Promise<{object}>}
|
||||
*/
|
||||
async getAudioSinks() {
|
||||
return this._getSourcesOfType("audiooutput");
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Provide an Object of available audio sources which can be used by this implementation.
|
||||
* Each object key should be a device id and the key should be a human-readable label.
|
||||
* @returns {Promise<{object}>}
|
||||
*/
|
||||
async getAudioSources() {
|
||||
return this._getSourcesOfType("audioinput");
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Provide an Object of available video sources which can be used by this implementation.
|
||||
* Each object key should be a device id and the key should be a human-readable label.
|
||||
* @returns {Promise<{object}>}
|
||||
*/
|
||||
async getVideoSources() {
|
||||
return this._getSourcesOfType("videoinput");
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Obtain a mapping of available device sources for a given type.
|
||||
* @param {string} kind The type of device source being requested
|
||||
* @returns {Promise<{object}>}
|
||||
* @private
|
||||
*/
|
||||
async _getSourcesOfType(kind) {
|
||||
if ( !("mediaDevices" in navigator) ) return {};
|
||||
const devices = await navigator.mediaDevices.enumerateDevices();
|
||||
return devices.reduce((obj, device) => {
|
||||
if ( device.kind === kind ) {
|
||||
obj[device.deviceId] = device.label || game.i18n.localize("WEBRTC.UnknownDevice");
|
||||
}
|
||||
return obj;
|
||||
}, {});
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
/* Track Manipulation */
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Return an array of Foundry User IDs which are currently connected to A/V.
|
||||
* The current user should also be included as a connected user in addition to all peers.
|
||||
* @returns {string[]} The connected User IDs
|
||||
*/
|
||||
getConnectedUsers() {
|
||||
throw Error("The getConnectedUsers() method must be defined by an AVClient subclass.");
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Provide a MediaStream instance for a given user ID
|
||||
* @param {string} userId The User id
|
||||
* @returns {MediaStream|null} The MediaStream for the user, or null if the user does not have one
|
||||
*/
|
||||
getMediaStreamForUser(userId) {
|
||||
throw Error("The getMediaStreamForUser() method must be defined by an AVClient subclass.");
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Provide a MediaStream for monitoring a given user's voice volume levels.
|
||||
* @param {string} userId The User ID.
|
||||
* @returns {MediaStream|null} The MediaStream for the user, or null if the user does not have one.
|
||||
*/
|
||||
getLevelsStreamForUser(userId) {
|
||||
throw new Error("An AVClient subclass must define the getLevelsStreamForUser method");
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Is outbound audio enabled for the current user?
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isAudioEnabled() {
|
||||
throw Error("The isAudioEnabled() method must be defined by an AVClient subclass.");
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Is outbound video enabled for the current user?
|
||||
* @returns {boolean}
|
||||
*/
|
||||
isVideoEnabled() {
|
||||
throw Error("The isVideoEnabled() method must be defined by an AVClient subclass.");
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Set whether the outbound audio feed for the current game user is enabled.
|
||||
* This method should be used when the user marks themselves as muted or if the gamemaster globally mutes them.
|
||||
* @param {boolean} enable Whether the outbound audio track should be enabled (true) or disabled (false)
|
||||
*/
|
||||
toggleAudio(enable) {
|
||||
throw Error("The toggleAudio() method must be defined by an AVClient subclass.");
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Set whether the outbound audio feed for the current game user is actively broadcasting.
|
||||
* This can only be true if audio is enabled, but may be false if using push-to-talk or voice activation modes.
|
||||
* @param {boolean} broadcast Whether outbound audio should be sent to connected peers or not?
|
||||
*/
|
||||
toggleBroadcast(broadcast) {
|
||||
throw Error("The toggleBroadcast() method must be defined by an AVClient subclass.");
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Set whether the outbound video feed for the current game user is enabled.
|
||||
* This method should be used when the user marks themselves as hidden or if the gamemaster globally hides them.
|
||||
* @param {boolean} enable Whether the outbound video track should be enabled (true) or disabled (false)
|
||||
*/
|
||||
toggleVideo(enable) {
|
||||
throw Error("The toggleVideo() method must be defined by an AVClient subclass.");
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Set the Video Track for a given User ID to a provided VideoElement
|
||||
* @param {string} userId The User ID to set to the element
|
||||
* @param {HTMLVideoElement} videoElement The HTMLVideoElement to which the video should be set
|
||||
*/
|
||||
async setUserVideo(userId, videoElement) {
|
||||
throw Error("The setUserVideo() method must be defined by an AVClient subclass.");
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
/* Settings and Configuration */
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Handle changes to A/V configuration settings.
|
||||
* @param {object} changed The settings which have changed
|
||||
*/
|
||||
onSettingsChanged(changed) {}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Replace the local stream for each connected peer with a re-generated MediaStream.
|
||||
*/
|
||||
async updateLocalStream() {
|
||||
throw Error("The updateLocalStream() method must be defined by an AVClient subclass.");
|
||||
}
|
||||
}
|
||||
499
resources/app/client/av/clients/simplepeer.js
Normal file
499
resources/app/client/av/clients/simplepeer.js
Normal file
@@ -0,0 +1,499 @@
|
||||
/**
|
||||
* An implementation of the AVClient which uses the simple-peer library and the Foundry socket server for signaling.
|
||||
* Credit to bekit#4213 for identifying simple-peer as a viable technology and providing a POC implementation.
|
||||
* @extends {AVClient}
|
||||
*/
|
||||
class SimplePeerAVClient extends AVClient {
|
||||
|
||||
/**
|
||||
* The local Stream which captures input video and audio
|
||||
* @type {MediaStream}
|
||||
*/
|
||||
localStream = null;
|
||||
|
||||
/**
|
||||
* The dedicated audio stream used to measure volume levels for voice activity detection.
|
||||
* @type {MediaStream}
|
||||
*/
|
||||
levelsStream = null;
|
||||
|
||||
/**
|
||||
* A mapping of connected peers
|
||||
* @type {Map}
|
||||
*/
|
||||
peers = new Map();
|
||||
|
||||
/**
|
||||
* A mapping of connected remote streams
|
||||
* @type {Map}
|
||||
*/
|
||||
remoteStreams = new Map();
|
||||
|
||||
/**
|
||||
* Has the client been successfully initialized?
|
||||
* @type {boolean}
|
||||
* @private
|
||||
*/
|
||||
_initialized = false;
|
||||
|
||||
/**
|
||||
* Is outbound broadcast of local audio enabled?
|
||||
* @type {boolean}
|
||||
*/
|
||||
audioBroadcastEnabled = false;
|
||||
|
||||
/**
|
||||
* The polling interval ID for connected users that might have unexpectedly dropped out of our peer network.
|
||||
* @type {number|null}
|
||||
*/
|
||||
_connectionPoll = null;
|
||||
|
||||
/* -------------------------------------------- */
|
||||
/* Required AVClient Methods */
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/** @override */
|
||||
async connect() {
|
||||
await this._connect();
|
||||
clearInterval(this._connectionPoll);
|
||||
this._connectionPoll = setInterval(this._connect.bind(this), CONFIG.WebRTC.connectedUserPollIntervalS * 1000);
|
||||
return true;
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Try to establish a peer connection with each user connected to the server.
|
||||
* @private
|
||||
*/
|
||||
_connect() {
|
||||
const promises = [];
|
||||
for ( let user of game.users ) {
|
||||
if ( user.isSelf || !user.active ) continue;
|
||||
promises.push(this.initializePeerStream(user.id));
|
||||
}
|
||||
return Promise.all(promises);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/** @override */
|
||||
async disconnect() {
|
||||
clearInterval(this._connectionPoll);
|
||||
this._connectionPoll = null;
|
||||
await this.disconnectAll();
|
||||
return true;
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/** @override */
|
||||
async initialize() {
|
||||
if ( this._initialized ) return;
|
||||
console.debug(`Initializing SimplePeer client connection`);
|
||||
|
||||
// Initialize the local stream
|
||||
await this.initializeLocalStream();
|
||||
|
||||
// Set up socket listeners
|
||||
this.activateSocketListeners();
|
||||
|
||||
// Register callback to close peer connections when the window is closed
|
||||
window.addEventListener("beforeunload", ev => this.disconnectAll());
|
||||
|
||||
// Flag the client as initialized
|
||||
this._initialized = true;
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/** @override */
|
||||
getConnectedUsers() {
|
||||
return [...Array.from(this.peers.keys()), game.userId];
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/** @override */
|
||||
getMediaStreamForUser(userId) {
|
||||
return userId === game.user.id ? this.localStream : this.remoteStreams.get(userId);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/** @override */
|
||||
getLevelsStreamForUser(userId) {
|
||||
return userId === game.userId ? this.levelsStream : this.getMediaStreamForUser(userId);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/** @override */
|
||||
isAudioEnabled() {
|
||||
return !!this.localStream?.getAudioTracks().length;
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/** @override */
|
||||
isVideoEnabled() {
|
||||
return !!this.localStream?.getVideoTracks().length;
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/** @override */
|
||||
toggleAudio(enabled) {
|
||||
const stream = this.localStream;
|
||||
if ( !stream ) return;
|
||||
|
||||
// If "always on" broadcasting is not enabled, don't proceed
|
||||
if ( !this.audioBroadcastEnabled || this.isVoicePTT ) return;
|
||||
|
||||
// Enable active broadcasting
|
||||
return this.toggleBroadcast(enabled);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/** @override */
|
||||
toggleBroadcast(enabled) {
|
||||
const stream = this.localStream;
|
||||
if ( !stream ) return;
|
||||
console.debug(`[SimplePeer] Toggling broadcast of outbound audio: ${enabled}`);
|
||||
this.audioBroadcastEnabled = enabled;
|
||||
for ( let t of stream.getAudioTracks() ) {
|
||||
t.enabled = enabled;
|
||||
}
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/** @override */
|
||||
toggleVideo(enabled) {
|
||||
const stream = this.localStream;
|
||||
if ( !stream ) return;
|
||||
console.debug(`[SimplePeer] Toggling broadcast of outbound video: ${enabled}`);
|
||||
for (const track of stream.getVideoTracks()) {
|
||||
track.enabled = enabled;
|
||||
}
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/** @override */
|
||||
async setUserVideo(userId, videoElement) {
|
||||
const stream = this.getMediaStreamForUser(userId);
|
||||
|
||||
// Set the stream as the video element source
|
||||
if ("srcObject" in videoElement) videoElement.srcObject = stream;
|
||||
else videoElement.src = window.URL.createObjectURL(stream); // for older browsers
|
||||
|
||||
// Forward volume to the configured audio sink
|
||||
if ( videoElement.sinkId === undefined ) {
|
||||
return console.warn(`[SimplePeer] Your web browser does not support output audio sink selection`);
|
||||
}
|
||||
const requestedSink = this.settings.get("client", "audioSink");
|
||||
await videoElement.setSinkId(requestedSink).catch(err => {
|
||||
console.warn(`[SimplePeer] An error occurred when requesting the output audio device: ${requestedSink}`);
|
||||
})
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
/* Local Stream Management */
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Initialize a local media stream for the current user
|
||||
* @returns {Promise<MediaStream>}
|
||||
*/
|
||||
async initializeLocalStream() {
|
||||
console.debug(`[SimplePeer] Initializing local media stream for current User`);
|
||||
|
||||
// If there is already an existing local media stream, terminate it
|
||||
if ( this.localStream ) this.localStream.getTracks().forEach(t => t.stop());
|
||||
this.localStream = null;
|
||||
|
||||
if ( this.levelsStream ) this.levelsStream.getTracks().forEach(t => t.stop());
|
||||
this.levelsStream = null;
|
||||
|
||||
// Determine whether the user can send audio
|
||||
const audioSrc = this.settings.get("client", "audioSrc");
|
||||
const canBroadcastAudio = this.master.canUserBroadcastAudio(game.user.id);
|
||||
const audioParams = (audioSrc && (audioSrc !== "disabled") && canBroadcastAudio) ? {
|
||||
deviceId: { ideal: audioSrc }
|
||||
} : false;
|
||||
|
||||
// Configure whether the user can send video
|
||||
const videoSrc = this.settings.get("client", "videoSrc");
|
||||
const canBroadcastVideo = this.master.canUserBroadcastVideo(game.user.id);
|
||||
const videoParams = (videoSrc && (videoSrc !== "disabled") && canBroadcastVideo) ? {
|
||||
deviceId: { ideal: videoSrc },
|
||||
width: { ideal: 320 },
|
||||
height: { ideal: 240 }
|
||||
} : false;
|
||||
|
||||
// FIXME: Firefox does not allow you to request a specific device, you can only use whatever the browser allows
|
||||
// https://bugzilla.mozilla.org/show_bug.cgi?id=1443294#c7
|
||||
if ( navigator.userAgent.match(/Firefox/) ) {
|
||||
delete videoParams["deviceId"];
|
||||
}
|
||||
|
||||
if ( !videoParams && !audioParams ) return null;
|
||||
let stream = await this._createMediaStream({video: videoParams, audio: audioParams});
|
||||
if ( (videoParams && audioParams) && (stream instanceof Error) ) {
|
||||
// Even if the game is set to both audio and video, the user may not have one of those devices, or they might have
|
||||
// blocked access to one of them. In those cases we do not want to prevent A/V loading entirely, so we must try
|
||||
// each of them separately to see what is available.
|
||||
if ( audioParams ) stream = await this._createMediaStream({video: false, audio: audioParams});
|
||||
if ( (stream instanceof Error) && videoParams ) {
|
||||
stream = await this._createMediaStream({video: videoParams, audio: false});
|
||||
}
|
||||
}
|
||||
|
||||
if ( stream instanceof Error ) {
|
||||
const error = new Error(`[SimplePeer] Unable to acquire user media stream: ${stream.message}`);
|
||||
error.stack = stream.stack;
|
||||
console.error(error);
|
||||
return null;
|
||||
}
|
||||
|
||||
this.localStream = stream;
|
||||
this.levelsStream = stream.clone();
|
||||
this.levelsStream.getVideoTracks().forEach(t => this.levelsStream.removeTrack(t));
|
||||
return stream;
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Attempt to create local media streams.
|
||||
* @param {{video: object, audio: object}} params Parameters for the getUserMedia request.
|
||||
* @returns {Promise<MediaStream|Error>} The created MediaStream or an error.
|
||||
* @private
|
||||
*/
|
||||
async _createMediaStream(params) {
|
||||
try {
|
||||
return await navigator.mediaDevices.getUserMedia(params);
|
||||
} catch(err) {
|
||||
return err;
|
||||
}
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
/* Peer Stream Management */
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Listen for Audio/Video updates on the av socket to broker connections between peers
|
||||
*/
|
||||
activateSocketListeners() {
|
||||
game.socket.on("av", (request, userId) => {
|
||||
if ( request.userId !== game.user.id ) return; // The request is not for us, this shouldn't happen
|
||||
switch ( request.action ) {
|
||||
case "peer-signal":
|
||||
if ( request.activity ) this.master.settings.handleUserActivity(userId, request.activity);
|
||||
return this.receiveSignal(userId, request.data);
|
||||
case "peer-close":
|
||||
return this.disconnectPeer(userId);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Initialize a stream connection with a new peer
|
||||
* @param {string} userId The Foundry user ID for which the peer stream should be established
|
||||
* @returns {Promise<SimplePeer>} A Promise which resolves once the peer stream is initialized
|
||||
*/
|
||||
async initializePeerStream(userId) {
|
||||
const peer = this.peers.get(userId);
|
||||
if ( peer?.connected || peer?._connecting ) return peer;
|
||||
return this.connectPeer(userId, true);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Receive a request to establish a peer signal with some other User id
|
||||
* @param {string} userId The Foundry user ID who is requesting to establish a connection
|
||||
* @param {object} data The connection details provided by SimplePeer
|
||||
*/
|
||||
receiveSignal(userId, data) {
|
||||
console.debug(`[SimplePeer] Receiving signal from User [${userId}] to establish initial connection`);
|
||||
let peer = this.peers.get(userId);
|
||||
if ( !peer ) peer = this.connectPeer(userId, false);
|
||||
peer.signal(data);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Connect to a peer directly, either as the initiator or as the receiver
|
||||
* @param {string} userId The Foundry user ID with whom we are connecting
|
||||
* @param {boolean} isInitiator Is the current user initiating the connection, or responding to it?
|
||||
* @returns {SimplePeer} The constructed and configured SimplePeer instance
|
||||
*/
|
||||
connectPeer(userId, isInitiator=false) {
|
||||
|
||||
// Create the SimplePeer instance for this connection
|
||||
const peer = this._createPeerConnection(userId, isInitiator);
|
||||
this.peers.set(userId, peer);
|
||||
|
||||
// Signal to request that a remote user establish a connection with us
|
||||
peer.on("signal", data => {
|
||||
console.debug(`[SimplePeer] Sending signal to User [${userId}] to establish initial connection`);
|
||||
game.socket.emit("av", {
|
||||
action: "peer-signal",
|
||||
userId: userId,
|
||||
data: data,
|
||||
activity: this.master.settings.getUser(game.userId)
|
||||
}, {recipients: [userId]});
|
||||
});
|
||||
|
||||
// Receive a stream provided by a peer
|
||||
peer.on("stream", stream => {
|
||||
console.debug(`[SimplePeer] Received media stream from User [${userId}]`);
|
||||
this.remoteStreams.set(userId, stream);
|
||||
this.master.render();
|
||||
});
|
||||
|
||||
// Close a connection with a current peer
|
||||
peer.on("close", () => {
|
||||
console.debug(`[SimplePeer] Closed connection with remote User [${userId}]`);
|
||||
return this.disconnectPeer(userId);
|
||||
});
|
||||
|
||||
// Handle errors
|
||||
peer.on("error", err => {
|
||||
if ( err.code !== "ERR_DATA_CHANNEL" ) {
|
||||
const error = new Error(`[SimplePeer] An unexpected error occurred with User [${userId}]: ${err.message}`);
|
||||
error.stack = err.stack;
|
||||
console.error(error);
|
||||
}
|
||||
if ( peer.connected ) return this.disconnectPeer(userId);
|
||||
});
|
||||
|
||||
this.master.render();
|
||||
return peer;
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Create the SimplePeer instance for the desired peer connection.
|
||||
* Modules may implement more advanced connection strategies by overriding this method.
|
||||
* @param {string} userId The Foundry user ID with whom we are connecting
|
||||
* @param {boolean} isInitiator Is the current user initiating the connection, or responding to it?
|
||||
* @private
|
||||
*/
|
||||
_createPeerConnection(userId, isInitiator) {
|
||||
const options = {
|
||||
initiator: isInitiator,
|
||||
stream: this.localStream
|
||||
};
|
||||
|
||||
this._setupCustomTURN(options);
|
||||
return new SimplePeer(options);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Setup the custom TURN relay to be used in subsequent calls if there is one configured.
|
||||
* TURN credentials are mandatory in WebRTC.
|
||||
* @param {object} options The SimplePeer configuration object.
|
||||
* @private
|
||||
*/
|
||||
_setupCustomTURN(options) {
|
||||
const { url, type, username, password } = this.settings.world.turn;
|
||||
if ( (type !== "custom") || !url || !username || !password ) return;
|
||||
const iceServer = { username, urls: url, credential: password };
|
||||
options.config = { iceServers: [iceServer] };
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Disconnect from a peer by stopping current stream tracks and destroying the SimplePeer instance
|
||||
* @param {string} userId The Foundry user ID from whom we are disconnecting
|
||||
* @returns {Promise<void>} A Promise which resolves once the disconnection is complete
|
||||
*/
|
||||
async disconnectPeer(userId) {
|
||||
|
||||
// Stop audio and video tracks from the remote stream
|
||||
const remoteStream = this.remoteStreams.get(userId);
|
||||
if ( remoteStream ) {
|
||||
this.remoteStreams.delete(userId);
|
||||
for ( let track of remoteStream.getTracks() ) {
|
||||
await track.stop();
|
||||
}
|
||||
}
|
||||
|
||||
// Remove the peer
|
||||
const peer = this.peers.get(userId);
|
||||
if ( peer ) {
|
||||
this.peers.delete(userId);
|
||||
await peer.destroy();
|
||||
}
|
||||
|
||||
// Re-render the UI on disconnection
|
||||
this.master.render();
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Disconnect from all current peer streams
|
||||
* @returns {Promise<Array>} A Promise which resolves once all peers have been disconnected
|
||||
*/
|
||||
async disconnectAll() {
|
||||
const promises = [];
|
||||
for ( let userId of this.peers.keys() ) {
|
||||
promises.push(this.disconnectPeer(userId));
|
||||
}
|
||||
return Promise.all(promises);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
/* Settings and Configuration */
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/** @override */
|
||||
async onSettingsChanged(changed) {
|
||||
const keys = new Set(Object.keys(foundry.utils.flattenObject(changed)));
|
||||
|
||||
// Change audio or video sources
|
||||
const sourceChange = ["client.videoSrc", "client.audioSrc"].some(k => keys.has(k));
|
||||
if ( sourceChange ) await this.updateLocalStream();
|
||||
|
||||
// Change voice broadcasting mode
|
||||
const modeChange = ["client.voice.mode", `client.users.${game.user.id}.muted`].some(k => keys.has(k));
|
||||
if ( modeChange ) {
|
||||
const isAlways = this.settings.client.voice.mode === "always";
|
||||
this.toggleAudio(isAlways && this.master.canUserShareAudio(game.user.id));
|
||||
this.master.broadcast(isAlways);
|
||||
this.master._initializeUserVoiceDetection(changed.client.voice?.mode);
|
||||
ui.webrtc.setUserIsSpeaking(game.user.id, this.master.broadcasting);
|
||||
}
|
||||
|
||||
// Re-render the AV camera view
|
||||
const renderChange = ["client.audioSink", "client.muteAll", "client.disableVideo"].some(k => keys.has(k));
|
||||
if ( sourceChange || renderChange ) this.master.render();
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/** @inheritdoc */
|
||||
async updateLocalStream() {
|
||||
const oldStream = this.localStream;
|
||||
await this.initializeLocalStream();
|
||||
for ( let peer of this.peers.values() ) {
|
||||
if ( oldStream ) peer.removeStream(oldStream);
|
||||
if ( this.localStream ) peer.addStream(this.localStream);
|
||||
}
|
||||
// FIXME: This is a cheat, should be handled elsewhere
|
||||
this.master._initializeUserVoiceDetection(this.settings.client.voice.mode);
|
||||
}
|
||||
}
|
||||
467
resources/app/client/av/master.js
Normal file
467
resources/app/client/av/master.js
Normal file
@@ -0,0 +1,467 @@
|
||||
/**
|
||||
* The master Audio/Video controller instance.
|
||||
* This is available as the singleton game.webrtc
|
||||
*
|
||||
* @param {AVSettings} settings The Audio/Video settings to use
|
||||
*/
|
||||
class AVMaster {
|
||||
constructor() {
|
||||
this.settings = new AVSettings();
|
||||
this.config = new AVConfig(this);
|
||||
|
||||
/**
|
||||
* The Audio/Video client class
|
||||
* @type {AVClient}
|
||||
*/
|
||||
this.client = new CONFIG.WebRTC.clientClass(this, this.settings);
|
||||
|
||||
/**
|
||||
* A flag to track whether the current user is actively broadcasting their microphone.
|
||||
* @type {boolean}
|
||||
*/
|
||||
this.broadcasting = false;
|
||||
|
||||
/**
|
||||
* Flag to determine if we are connected to the signalling server or not.
|
||||
* This is required for synchronization between connection and reconnection attempts.
|
||||
* @type {boolean}
|
||||
*/
|
||||
this._connected = false;
|
||||
|
||||
/**
|
||||
* The cached connection promise.
|
||||
* This is required to prevent re-triggering a connection while one is already in progress.
|
||||
* @type {Promise<boolean>|null}
|
||||
* @private
|
||||
*/
|
||||
this._connecting = null;
|
||||
|
||||
/**
|
||||
* A flag to track whether the A/V system is currently in the process of reconnecting.
|
||||
* This occurs if the connection is lost or interrupted.
|
||||
* @type {boolean}
|
||||
* @private
|
||||
*/
|
||||
this._reconnecting = false;
|
||||
|
||||
// Other internal flags
|
||||
this._speakingData = {speaking: false, volumeHistories: []};
|
||||
this._pttMuteTimeout = 0;
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
get mode() {
|
||||
return this.settings.world.mode;
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
/* Initialization */
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Connect to the Audio/Video client.
|
||||
* @return {Promise<boolean>} Was the connection attempt successful?
|
||||
*/
|
||||
async connect() {
|
||||
if ( this._connecting ) return this._connecting;
|
||||
const connect = async () => {
|
||||
// Disconnect from any existing session
|
||||
await this.disconnect();
|
||||
|
||||
// Activate the connection
|
||||
if ( this.mode === AVSettings.AV_MODES.DISABLED ) return false;
|
||||
|
||||
// Initialize Client state
|
||||
await this.client.initialize();
|
||||
|
||||
// Connect to the client
|
||||
const connected = await this.client.connect();
|
||||
if ( !connected ) return false;
|
||||
console.log(`${vtt} | Connected to the ${this.client.constructor.name} Audio/Video client.`);
|
||||
|
||||
// Initialize local broadcasting
|
||||
this._initialize();
|
||||
return this._connected = connected;
|
||||
};
|
||||
|
||||
return this._connecting = connect().finally(() => this._connecting = null);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Disconnect from the Audio/Video client.
|
||||
* @return {Promise<boolean>} Whether an existing connection was terminated?
|
||||
*/
|
||||
async disconnect() {
|
||||
if ( !this._connected ) return false;
|
||||
this._connected = this._reconnecting = false;
|
||||
await this.client.disconnect();
|
||||
console.log(`${vtt} | Disconnected from the ${this.client.constructor.name} Audio/Video client.`);
|
||||
return true;
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Callback actions to take when the user becomes disconnected from the server.
|
||||
* @return {Promise<void>}
|
||||
*/
|
||||
async reestablish() {
|
||||
if ( !this._connected ) return;
|
||||
ui.notifications.warn("WEBRTC.ConnectionLostWarning", {localize: true});
|
||||
await this.disconnect();
|
||||
|
||||
// Attempt to reconnect
|
||||
while ( this._reconnecting ) {
|
||||
await this.connect();
|
||||
if ( this._connected ) {
|
||||
this._reconnecting = true;
|
||||
break;
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, this._reconnectPeriodMS));
|
||||
}
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Initialize the local broadcast state.
|
||||
* @private
|
||||
*/
|
||||
_initialize() {
|
||||
const client = this.settings.client;
|
||||
const voiceMode = client.voice.mode;
|
||||
|
||||
// Initialize voice detection
|
||||
this._initializeUserVoiceDetection(voiceMode);
|
||||
|
||||
// Reset the speaking history for the user
|
||||
this._resetSpeakingHistory(game.user.id);
|
||||
|
||||
// Set the initial state of outbound audio and video streams
|
||||
const isAlways = voiceMode === "always";
|
||||
this.client.toggleAudio(isAlways && client.audioSrc && this.canUserShareAudio(game.user.id));
|
||||
this.client.toggleVideo(client.videoSrc && this.canUserShareVideo(game.user.id));
|
||||
this.broadcast(isAlways);
|
||||
|
||||
// Update the display of connected A/V
|
||||
ui.webrtc.render();
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
/* Permissions */
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* A user can broadcast audio if the AV mode is compatible and if they are allowed to broadcast.
|
||||
* @param {string} userId
|
||||
* @return {boolean}
|
||||
*/
|
||||
canUserBroadcastAudio(userId) {
|
||||
if ( [AVSettings.AV_MODES.DISABLED, AVSettings.AV_MODES.VIDEO].includes(this.mode) ) return false;
|
||||
const user = this.settings.getUser(userId);
|
||||
return user && user.canBroadcastAudio;
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* A user can share audio if they are allowed to broadcast and if they have not muted themselves or been blocked.
|
||||
* @param {string} userId
|
||||
* @return {boolean}
|
||||
*/
|
||||
canUserShareAudio(userId) {
|
||||
if ( [AVSettings.AV_MODES.DISABLED, AVSettings.AV_MODES.VIDEO].includes(this.mode) ) return false;
|
||||
const user = this.settings.getUser(userId);
|
||||
return user && user.canBroadcastAudio && !(user.muted || user.blocked);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* A user can broadcast video if the AV mode is compatible and if they are allowed to broadcast.
|
||||
* @param {string} userId
|
||||
* @return {boolean}
|
||||
*/
|
||||
canUserBroadcastVideo(userId) {
|
||||
if ( [AVSettings.AV_MODES.DISABLED, AVSettings.AV_MODES.AUDIO].includes(this.mode) ) return false;
|
||||
const user = this.settings.getUser(userId);
|
||||
return user && user.canBroadcastVideo;
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* A user can share video if they are allowed to broadcast and if they have not hidden themselves or been blocked.
|
||||
* @param {string} userId
|
||||
* @return {boolean}
|
||||
*/
|
||||
canUserShareVideo(userId) {
|
||||
if ( [AVSettings.AV_MODES.DISABLED, AVSettings.AV_MODES.AUDIO].includes(this.mode) ) return false;
|
||||
const user = this.settings.getUser(userId);
|
||||
return user && user.canBroadcastVideo && !(user.hidden || user.blocked);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
/* Broadcasting */
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Trigger a change in the audio broadcasting state when using a push-to-talk workflow.
|
||||
* @param {boolean} intent The user's intent to broadcast. Whether an actual broadcast occurs will depend
|
||||
* on whether or not the user has muted their audio feed.
|
||||
*/
|
||||
broadcast(intent) {
|
||||
this.broadcasting = intent && this.canUserShareAudio(game.user.id);
|
||||
this.client.toggleBroadcast(this.broadcasting);
|
||||
const activity = this.settings.activity[game.user.id];
|
||||
if ( activity.speaking !== this.broadcasting ) game.user.broadcastActivity({av: {speaking: this.broadcasting}});
|
||||
activity.speaking = this.broadcasting;
|
||||
return ui.webrtc.setUserIsSpeaking(game.user.id, this.broadcasting);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Set up audio level listeners to handle voice activation detection workflow.
|
||||
* @param {string} mode The currently selected voice broadcasting mode
|
||||
* @private
|
||||
*/
|
||||
_initializeUserVoiceDetection(mode) {
|
||||
|
||||
// Deactivate prior detection
|
||||
game.audio.stopLevelReports(game.user.id);
|
||||
if ( !["always", "activity"].includes(mode) ) return;
|
||||
|
||||
// Activate voice level detection for always-on and activity-based broadcasting
|
||||
const stream = this.client.getLevelsStreamForUser(game.user.id);
|
||||
const ms = mode === "activity" ? CONFIG.WebRTC.detectSelfVolumeInterval : CONFIG.WebRTC.detectPeerVolumeInterval;
|
||||
this.activateVoiceDetection(stream, ms);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Activate voice detection tracking for a userId on a provided MediaStream.
|
||||
* Currently only a MediaStream is supported because MediaStreamTrack processing is not yet supported cross-browser.
|
||||
* @param {MediaStream} stream The MediaStream which corresponds to that User
|
||||
* @param {number} [ms] A number of milliseconds which represents the voice activation volume interval
|
||||
*/
|
||||
activateVoiceDetection(stream, ms) {
|
||||
this.deactivateVoiceDetection();
|
||||
if ( !stream || !stream.getAudioTracks().some(t => t.enabled) ) return;
|
||||
ms = ms || CONFIG.WebRTC.detectPeerVolumeInterval;
|
||||
const handler = this._onAudioLevel.bind(this);
|
||||
game.audio.startLevelReports(game.userId, stream, handler, ms);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Actions which the orchestration layer should take when a peer user disconnects from the audio/video service.
|
||||
*/
|
||||
deactivateVoiceDetection() {
|
||||
this._resetSpeakingHistory();
|
||||
game.audio.stopLevelReports(game.userId);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Periodic notification of user audio level
|
||||
*
|
||||
* This function uses the audio level (in dB) of the audio stream to determine if the user is speaking or not and
|
||||
* notifies the UI of such changes.
|
||||
*
|
||||
* The User is considered speaking if they are above the decibel threshold in any of the history values.
|
||||
* This marks them as speaking as soon as they have a high enough volume, and marks them as not speaking only after
|
||||
* they drop below the threshold in all histories (last 4 volumes = for 200 ms).
|
||||
*
|
||||
* There can be more optimal ways to do this and which uses whether the user was already considered speaking before
|
||||
* or not, in order to eliminate short bursts of audio (coughing for example).
|
||||
*
|
||||
* @param {number} dbLevel The audio level in decibels of the user within the last 50ms
|
||||
* @private
|
||||
*/
|
||||
_onAudioLevel(dbLevel) {
|
||||
const voice = this.settings.client.voice;
|
||||
const speakingData = this._speakingData;
|
||||
const wasSpeaking = speakingData.speaking;
|
||||
|
||||
// Add the current volume to the history of the user and keep the list below the history length config.
|
||||
if (speakingData.volumeHistories.push(dbLevel) > CONFIG.WebRTC.speakingHistoryLength) {
|
||||
speakingData.volumeHistories.shift();
|
||||
}
|
||||
|
||||
// Count the number and total decibels of speaking events which exceed an activity threshold
|
||||
const [count, max, total] = speakingData.volumeHistories.reduce((totals, vol) => {
|
||||
if ( vol >= voice.activityThreshold ) {
|
||||
totals[0] += 1;
|
||||
totals[1] = Math.min(totals[1], vol);
|
||||
totals[2] += vol;
|
||||
}
|
||||
return totals;
|
||||
}, [0, 0, 0]);
|
||||
|
||||
// The user is classified as currently speaking if they exceed a certain threshold of speaking events
|
||||
const isSpeaking = (count > (wasSpeaking ? 0 : CONFIG.WebRTC.speakingThresholdEvents)) && !this.client.isMuted;
|
||||
speakingData.speaking = isSpeaking;
|
||||
|
||||
// Take further action when a change in the speaking state has occurred
|
||||
if ( isSpeaking === wasSpeaking ) return;
|
||||
if ( this.client.isVoiceActivated ) return this.broadcast(isSpeaking); // Declare broadcast intent
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
/* Push-To-Talk Controls */
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Resets the speaking history of a user
|
||||
* If the user was considered speaking, then mark them as not speaking
|
||||
*/
|
||||
_resetSpeakingHistory() {
|
||||
if ( ui.webrtc ) ui.webrtc.setUserIsSpeaking(game.userId, false);
|
||||
this._speakingData.speaking = false;
|
||||
this._speakingData.volumeHistories = [];
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Handle activation of a push-to-talk key or button.
|
||||
* @param {KeyboardEventContext} context The context data of the event
|
||||
*/
|
||||
_onPTTStart(context) {
|
||||
if ( !this._connected ) return false;
|
||||
const voice = this.settings.client.voice;
|
||||
|
||||
// Case 1: Push-to-Talk (begin broadcasting immediately)
|
||||
if ( voice.mode === "ptt" ) {
|
||||
if (this._pttMuteTimeout > 0) clearTimeout(this._pttMuteTimeout);
|
||||
this._pttMuteTimeout = 0;
|
||||
this.broadcast(true);
|
||||
}
|
||||
|
||||
// Case 2: Push-to-Mute (disable broadcasting on a timeout)
|
||||
else this._pttMuteTimeout = setTimeout(() => this.broadcast(false), voice.pttDelay);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Handle deactivation of a push-to-talk key or button.
|
||||
* @param {KeyboardEventContext} context The context data of the event
|
||||
*/
|
||||
_onPTTEnd(context) {
|
||||
if ( !this._connected ) return false;
|
||||
const voice = this.settings.client.voice;
|
||||
|
||||
// Case 1: Push-to-Talk (disable broadcasting on a timeout)
|
||||
if ( voice.mode === "ptt" ) {
|
||||
this._pttMuteTimeout = setTimeout(() => this.broadcast(false), voice.pttDelay);
|
||||
}
|
||||
|
||||
// Case 2: Push-to-Mute (re-enable broadcasting immediately)
|
||||
else {
|
||||
if (this._pttMuteTimeout > 0) clearTimeout(this._pttMuteTimeout);
|
||||
this._pttMuteTimeout = 0;
|
||||
this.broadcast(true);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
/* User Interface Controls */
|
||||
/* -------------------------------------------- */
|
||||
|
||||
render() {
|
||||
return ui.webrtc.render();
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Render the audio/video streams to the CameraViews UI.
|
||||
* Assign each connected user to the correct video frame element.
|
||||
*/
|
||||
onRender() {
|
||||
const users = this.client.getConnectedUsers();
|
||||
for ( let u of users ) {
|
||||
const videoElement = ui.webrtc.getUserVideoElement(u);
|
||||
if ( !videoElement ) continue;
|
||||
const isSpeaking = this.settings.activity[u]?.speaking || false;
|
||||
this.client.setUserVideo(u, videoElement);
|
||||
ui.webrtc.setUserIsSpeaking(u, isSpeaking);
|
||||
}
|
||||
|
||||
// Determine the players list position based on the user's settings.
|
||||
const dockPositions = AVSettings.DOCK_POSITIONS;
|
||||
const isAfter = [dockPositions.RIGHT, dockPositions.BOTTOM].includes(this.settings.client.dockPosition);
|
||||
const iface = document.getElementById("interface");
|
||||
const cameraViews = ui.webrtc.element[0];
|
||||
ui.players.render(true);
|
||||
|
||||
if ( this.settings.client.hideDock || ui.webrtc.hidden ) {
|
||||
cameraViews?.style.removeProperty("width");
|
||||
cameraViews?.style.removeProperty("height");
|
||||
}
|
||||
|
||||
document.body.classList.toggle("av-horizontal-dock", !this.settings.verticalDock);
|
||||
|
||||
// Change the dock position based on the user's settings.
|
||||
if ( cameraViews ) {
|
||||
if ( isAfter && (iface.nextElementSibling !== cameraViews) ) document.body.insertBefore(iface, cameraViews);
|
||||
else if ( !isAfter && (cameraViews.nextElementSibling !== iface) ) document.body.insertBefore(cameraViews, iface);
|
||||
}
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
/* Events Handlers and Callbacks */
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Respond to changes which occur to AV Settings.
|
||||
* Changes are handled in descending order of impact.
|
||||
* @param {object} changed The object of changed AV settings
|
||||
*/
|
||||
onSettingsChanged(changed) {
|
||||
const keys = Object.keys(foundry.utils.flattenObject(changed));
|
||||
|
||||
// Change the server configuration (full AV re-connection)
|
||||
if ( keys.includes("world.turn") ) return this.connect();
|
||||
|
||||
// Change audio and video visibility at a user level
|
||||
const sharing = foundry.utils.getProperty(changed, `client.users.${game.userId}`) || {};
|
||||
if ( "hidden" in sharing ) this.client.toggleVideo(this.canUserShareVideo(game.userId));
|
||||
if ( "muted" in sharing ) this.client.toggleAudio(this.canUserShareAudio(game.userId));
|
||||
|
||||
// Restore stored dock width when switching to a vertical dock position.
|
||||
const isVertical =
|
||||
[AVSettings.DOCK_POSITIONS.LEFT, AVSettings.DOCK_POSITIONS.RIGHT].includes(changed.client?.dockPosition);
|
||||
const dockWidth = changed.client?.dockWidth ?? this.settings.client.dockWidth ?? 240;
|
||||
if ( isVertical ) ui.webrtc.position.width = dockWidth;
|
||||
|
||||
// Switch resize direction if docked to the right.
|
||||
if ( keys.includes("client.dockPosition") ) {
|
||||
ui.webrtc.options.resizable.rtl = changed.client.dockPosition === AVSettings.DOCK_POSITIONS.RIGHT;
|
||||
}
|
||||
|
||||
// Requires re-render.
|
||||
const rerender = ["client.borderColors", "client.dockPosition", "client.nameplates"].some(k => keys.includes(k));
|
||||
if ( rerender ) ui.webrtc.render(true);
|
||||
|
||||
// Call client specific setting handling
|
||||
this.client.onSettingsChanged(changed);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
debug(message) {
|
||||
if ( this.settings.debug ) console.debug(message);
|
||||
}
|
||||
}
|
||||
254
resources/app/client/av/settings.js
Normal file
254
resources/app/client/av/settings.js
Normal file
@@ -0,0 +1,254 @@
|
||||
/**
|
||||
* @typedef {object} AVSettingsData
|
||||
* @property {boolean} [muted] Whether this user has muted themselves.
|
||||
* @property {boolean} [hidden] Whether this user has hidden their video.
|
||||
* @property {boolean} [speaking] Whether the user is broadcasting audio.
|
||||
*/
|
||||
|
||||
class AVSettings {
|
||||
constructor() {
|
||||
this.initialize();
|
||||
this._set = foundry.utils.debounce((key, value) => game.settings.set("core", key, value), 100);
|
||||
this._change = foundry.utils.debounce(this._onSettingsChanged.bind(this), 100);
|
||||
this.activity[game.userId] = {};
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* WebRTC Mode, Disabled, Audio only, Video only, Audio & Video
|
||||
* @enum {number}
|
||||
*/
|
||||
static AV_MODES = {
|
||||
DISABLED: 0,
|
||||
AUDIO: 1,
|
||||
VIDEO: 2,
|
||||
AUDIO_VIDEO: 3
|
||||
};
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Voice modes: Always-broadcasting, voice-level triggered, push-to-talk.
|
||||
* @enum {string}
|
||||
*/
|
||||
static VOICE_MODES = {
|
||||
ALWAYS: "always",
|
||||
ACTIVITY: "activity",
|
||||
PTT: "ptt"
|
||||
};
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Displayed nameplate options: Off entirely, animate between player and character name, player name only, character
|
||||
* name only.
|
||||
* @enum {number}
|
||||
*/
|
||||
static NAMEPLATE_MODES = {
|
||||
OFF: 0,
|
||||
BOTH: 1,
|
||||
PLAYER_ONLY: 2,
|
||||
CHAR_ONLY: 3
|
||||
};
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* AV dock positions.
|
||||
* @enum {string}
|
||||
*/
|
||||
static DOCK_POSITIONS = {
|
||||
TOP: "top",
|
||||
RIGHT: "right",
|
||||
BOTTOM: "bottom",
|
||||
LEFT: "left"
|
||||
};
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Default client AV settings.
|
||||
* @type {object}
|
||||
*/
|
||||
static DEFAULT_CLIENT_SETTINGS = {
|
||||
videoSrc: "default",
|
||||
audioSrc: "default",
|
||||
audioSink: "default",
|
||||
dockPosition: AVSettings.DOCK_POSITIONS.LEFT,
|
||||
hidePlayerList: false,
|
||||
hideDock: false,
|
||||
muteAll: false,
|
||||
disableVideo: false,
|
||||
borderColors: false,
|
||||
dockWidth: 240,
|
||||
nameplates: AVSettings.NAMEPLATE_MODES.BOTH,
|
||||
voice: {
|
||||
mode: AVSettings.VOICE_MODES.PTT,
|
||||
pttName: "`",
|
||||
pttDelay: 100,
|
||||
activityThreshold: -45
|
||||
},
|
||||
users: {}
|
||||
};
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Default world-level AV settings.
|
||||
* @type {object}
|
||||
*/
|
||||
static DEFAULT_WORLD_SETTINGS = {
|
||||
mode: AVSettings.AV_MODES.DISABLED,
|
||||
turn: {
|
||||
type: "server",
|
||||
url: "",
|
||||
username: "",
|
||||
password: ""
|
||||
}
|
||||
};
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Default client settings for each connected user.
|
||||
* @type {object}
|
||||
*/
|
||||
static DEFAULT_USER_SETTINGS = {
|
||||
popout: false,
|
||||
x: 100,
|
||||
y: 100,
|
||||
z: 0,
|
||||
width: 320,
|
||||
volume: 1.0,
|
||||
muted: false,
|
||||
hidden: false,
|
||||
blocked: false
|
||||
};
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Stores the transient AV activity data received from other users.
|
||||
* @type {Record<string, AVSettingsData>}
|
||||
*/
|
||||
activity = {};
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
initialize() {
|
||||
this.client = game.settings.get("core", "rtcClientSettings");
|
||||
this.world = game.settings.get("core", "rtcWorldSettings");
|
||||
this._original = foundry.utils.deepClone({client: this.client, world: this.world});
|
||||
const {muted, hidden} = this._getUserSettings(game.user);
|
||||
game.user.broadcastActivity({av: {muted, hidden}});
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
changed() {
|
||||
return this._change();
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
get(scope, setting) {
|
||||
return foundry.utils.getProperty(this[scope], setting);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
getUser(userId) {
|
||||
const user = game.users.get(userId);
|
||||
if ( !user ) return null;
|
||||
return this._getUserSettings(user);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
set(scope, setting, value) {
|
||||
foundry.utils.setProperty(this[scope], setting, value);
|
||||
this._set(`rtc${scope.titleCase()}Settings`, this[scope]);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Return a mapping of AV settings for each game User.
|
||||
* @type {object}
|
||||
*/
|
||||
get users() {
|
||||
const users = {};
|
||||
for ( let u of game.users ) {
|
||||
users[u.id] = this._getUserSettings(u);
|
||||
}
|
||||
return users;
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* A helper to determine if the dock is configured in a vertical position.
|
||||
*/
|
||||
get verticalDock() {
|
||||
const positions = this.constructor.DOCK_POSITIONS;
|
||||
return [positions.LEFT, positions.RIGHT].includes(this.client.dockPosition ?? positions.LEFT);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Prepare a standardized object of user settings data for a single User
|
||||
* @private
|
||||
*/
|
||||
_getUserSettings(user) {
|
||||
const clientSettings = this.client.users[user.id] || {};
|
||||
const activity = this.activity[user.id] || {};
|
||||
const settings = foundry.utils.mergeObject(AVSettings.DEFAULT_USER_SETTINGS, clientSettings, {inplace: false});
|
||||
settings.canBroadcastAudio = user.can("BROADCAST_AUDIO");
|
||||
settings.canBroadcastVideo = user.can("BROADCAST_VIDEO");
|
||||
|
||||
if ( user.isSelf ) {
|
||||
settings.muted ||= !game.webrtc?.client.isAudioEnabled();
|
||||
settings.hidden ||= !game.webrtc?.client.isVideoEnabled();
|
||||
} else {
|
||||
// Either we have muted or hidden them, or they have muted or hidden themselves.
|
||||
settings.muted ||= !!activity.muted;
|
||||
settings.hidden ||= !!activity.hidden;
|
||||
}
|
||||
|
||||
settings.speaking = activity.speaking;
|
||||
return settings;
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Handle setting changes to either rctClientSettings or rtcWorldSettings.
|
||||
* @private
|
||||
*/
|
||||
_onSettingsChanged() {
|
||||
const original = this._original;
|
||||
this.initialize();
|
||||
const changed = foundry.utils.diffObject(original, this._original);
|
||||
game.webrtc.onSettingsChanged(changed);
|
||||
Hooks.callAll("rtcSettingsChanged", this, changed);
|
||||
}
|
||||
|
||||
/* -------------------------------------------- */
|
||||
|
||||
/**
|
||||
* Handle another connected user changing their AV settings.
|
||||
* @param {string} userId
|
||||
* @param {AVSettingsData} settings
|
||||
*/
|
||||
handleUserActivity(userId, settings) {
|
||||
const current = this.activity[userId] || {};
|
||||
this.activity[userId] = foundry.utils.mergeObject(current, settings, {inplace: false});
|
||||
if ( !ui.webrtc ) return;
|
||||
const hiddenChanged = ("hidden" in settings) && (current.hidden !== settings.hidden);
|
||||
const mutedChanged = ("muted" in settings) && (current.muted !== settings.muted);
|
||||
if ( (hiddenChanged || mutedChanged) && ui.webrtc.getUserVideoElement(userId) ) ui.webrtc._refreshView(userId);
|
||||
if ( "speaking" in settings ) ui.webrtc.setUserIsSpeaking(userId, settings.speaking);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user