@4players/odin
Version:
A cross-platform SDK enabling developers to integrate real-time VoIP chat technology into their projects
196 lines (195 loc) • 8.23 kB
JavaScript
"use strict";
var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
if (kind === "m") throw new TypeError("Private method is not writable");
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
};
var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
};
var _AudioOutput_instances, _AudioOutput_lastActive, _AudioOutput_paused, _AudioOutput_volume, _AudioOutput_dispatchActivity, _AudioOutput_dispatchPowerLvl, _AudioOutput_dispatchJitterStats;
Object.defineProperty(exports, "__esModule", { value: true });
exports.AudioOutput = void 0;
const odin_event_target_1 = require("../../utils/odin-event-target");
const helpers_1 = require("../../utils/helpers");
const media_service_1 = require("../../media-service");
class AudioOutput extends odin_event_target_1.OdinEventTarget {
constructor(
/**
* The AudioPlayback that is provided by the underling plugin.
*
* @return {AudioPlayback} The AudioPlayback of the underling plugin.
*/
playback, _mediaData, peer, room) {
super();
_AudioOutput_instances.add(this);
this.playback = playback;
this._mediaData = _mediaData;
this.peer = peer;
this.room = room;
this.kind = 'audio-output';
_AudioOutput_lastActive.set(this, false);
_AudioOutput_paused.set(this, void 0);
_AudioOutput_volume.set(this, [1, 1]);
__classPrivateFieldSet(this, _AudioOutput_paused, _mediaData.paused, "f");
(0, media_service_1.registerAudioOutput)(this, (method) => {
switch (method) {
case 'PowerLevel':
__classPrivateFieldGet(this, _AudioOutput_instances, "m", _AudioOutput_dispatchPowerLvl).call(this);
break;
case 'Activity':
__classPrivateFieldGet(this, _AudioOutput_instances, "m", _AudioOutput_dispatchActivity).call(this);
break;
case 'JitterStats':
__classPrivateFieldGet(this, _AudioOutput_instances, "m", _AudioOutput_dispatchJitterStats).call(this);
break;
}
}, room);
}
/**
* Retrieves the unique identifier (UID) associated with the playback.
*
* @return {string} The UID of the playback.
*/
get uid() {
return this.playback.uid;
}
/**
* Retrieves the media ID associated with this instance.
*
* @return {number} The unique identifier of the media.
*/
get mediaId() {
return this._mediaData.id;
}
/**
* Whether the AudioOutput is currently active or not.
* Only works when voice activity detection was enabled.
*
* @return {boolean} True if the instance is active, false otherwise.
*/
get isActive() {
return !this.playback.activity.isSilent;
}
/**
* The custom type helps to identify the purpose of the VideoOutput that was
* defined by the remote AudioInput.
*
* @return {string | undefined} The custom type value if available; otherwise, undefined.
*/
get customType() {
return this.playback.customType;
}
/**
* Retrieves the current volume.
*
* @return {PlaybackVolume} The volume number or 'muted'.
*/
get volume() {
return __classPrivateFieldGet(this, _AudioOutput_volume, "f");
}
/**
* Gets the aggregated volume (room * peer * audioOutput) that is currently in use.
*
* @return {number} The current aggregated volume level, or 0 if volume is not defined.
*/
get volumeAggregated() {
return this.playback.volume[0];
}
/**
* Sets the volume of the AudioOutput.
*
* The volume value should be between 0 and 2 (inclusive). Values outside this range are clamped.
*
* @param {PlaybackVolume | number} [value] - The desired volume setting. It can be a specific volume level or 'muted'.
* @return {Promise<void>} A promise that resolves when the volume adjustment is complete.
*/
async setVolume(value) {
if (typeof value !== 'undefined') {
__classPrivateFieldSet(this, _AudioOutput_volume, typeof value === 'number' ? [value, value] : value, "f");
}
const volume = (0, helpers_1.calcPlaybackVolume)([
__classPrivateFieldGet(this, _AudioOutput_volume, "f"),
this.peer.volume,
this.room.volume,
]);
this.playback.setVolume(volume);
}
/**
* Retrieves the paused state of the object.
*
* @return {boolean} Returns true if the object is in a paused state, otherwise false.
*/
get isPaused() {
return __classPrivateFieldGet(this, _AudioOutput_paused, "f");
}
/**
* Retrieves the Root Mean Square (RMS) in decibels relative to full scale (dBFS)
* for the captured audio activity. This is a measure of the average power level
* of the audio signal.
*
* @return {number} The RMS value expressed in dBFS.
*/
get powerLevel() {
return this.playback.activity.rmsDBFS;
}
/**
* Retrieves the jitter statistics for the playback.
*
* @return {JitterStats} The jitter statistics data associated with the playback.
*/
get jitterStats() {
return this.playback.jitterStats;
}
/**
* Pauses the AudioOutput at the SFU and prevents receiving audio packets
*
* @return {Promise<void>} A promise that resolves when the media is successfully paused.
*/
async pause() {
await this.room.pauseMedia(this.mediaId);
__classPrivateFieldSet(this, _AudioOutput_paused, true, "f");
}
/**
* Resumes the AudioOutput at the SFU.
*/
async resume() {
await this.room.resumeMedia(this.mediaId);
__classPrivateFieldSet(this, _AudioOutput_paused, false, "f");
}
/**
* Closes the AudioOutput.
*/
close() {
this.playback.close();
(0, media_service_1.unregisterAudioOutput)(this);
__classPrivateFieldGet(this, _AudioOutput_instances, "m", _AudioOutput_dispatchActivity).call(this);
}
}
exports.AudioOutput = AudioOutput;
_AudioOutput_lastActive = new WeakMap(), _AudioOutput_paused = new WeakMap(), _AudioOutput_volume = new WeakMap(), _AudioOutput_instances = new WeakSet(), _AudioOutput_dispatchActivity = function _AudioOutput_dispatchActivity() {
// Avoid dispatching activity when there was no change
if (__classPrivateFieldGet(this, _AudioOutput_lastActive, "f") !== this.isActive) {
__classPrivateFieldSet(this, _AudioOutput_lastActive, this.isActive, "f");
const payload = {
media: this,
};
this.dispatchEvent(new odin_event_target_1.OdinEvent('Activity', payload));
if (this.onAudioActivity) {
this.onAudioActivity(this.isActive);
}
}
}, _AudioOutput_dispatchPowerLvl = function _AudioOutput_dispatchPowerLvl() {
const payload = {
media: this,
peer: this.peer,
};
this.dispatchEvent(new odin_event_target_1.OdinEvent('PowerLevel', payload));
this.onPowerLevel?.(this.powerLevel);
}, _AudioOutput_dispatchJitterStats = function _AudioOutput_dispatchJitterStats() {
this.dispatchEvent(new odin_event_target_1.OdinEvent('JitterStats', this.jitterStats));
this.onJitterStats?.(this.jitterStats);
};