podchat-browser
Version:
Javascript SDK to use POD's Chat Service - Browser Only
423 lines (386 loc) • 17.9 kB
JavaScript
import '../constants.js'
import {errorList} from "../errorHandler.js";
function MediaStreamManager() {
const deviceStreams = {
videoIn: null,
audioIn: null,
audioOut: null,
screenShare: null,
deviceChangeFlag: false,
};
return {
setAudioInput(stream) {
deviceStreams.audioIn = stream;
},
setVideoInput(stream) {
deviceStreams.videoIn = stream;
},
setScreenShareInput(stream) {
deviceStreams.screenShare = stream;
},
getVideoInput() {
return deviceStreams.videoIn
},
getAudioInput() {
return deviceStreams.audioIn;
},
getScreenShareInput() {
return deviceStreams.screenShare;
},
async stopAudioInput() {
if (!deviceStreams.audioIn)
return;
deviceStreams.audioIn.getTracks().forEach(track => {
if (!!track) {
track.stop();
}
deviceStreams.audioIn = null;
});
},
async stopVideoInput() {
if (!deviceStreams.videoIn)
return;
deviceStreams.videoIn.getTracks().forEach(track => {
track.stop();
deviceStreams.videoIn = null;
})
},
async stopScreenShareInput() {
if (!deviceStreams.screenShare)
return;
deviceStreams.screenShare.getTracks().forEach(track => {
track.stop();
})
deviceStreams.screenShare = null;
}
}
}
function DeviceManager(app) {
const config = {
mediaStreams: new MediaStreamManager(),
streamsMetada: {
audioInWatcherId: null
},
permissionRequests: [],
isProcessingPermissionRequests: false,
}
const deviceManager = {
getInputDevicePermission({
audio = false,
video = false,
}) {
return new Promise((resolve, reject) => {
if (video && config.mediaStreams.getVideoInput()) {
resolve(config.mediaStreams.getVideoInput());
return;
}
if (audio && config.mediaStreams.getAudioInput()) {
resolve(config.mediaStreams.getAudioInput());
return;
}
navigator.mediaDevices.getUserMedia({audio, video}).then(stream => {
if (audio)
config.mediaStreams.setAudioInput(stream);
if (video)
config.mediaStreams.setVideoInput(stream);
resolve(stream);
}).catch(error => {
app.chatEvents.fireEvent('callEvents', {
type: 'CALL_ERROR',
code: (audio ? 12401 : 12400),
message: error,
// environmentDetails: getSDKCallDetails()
});
reject(app.errorHandler.handleError((audio ? 12401 : 12400)))
});
});
},
canChooseAudioOutputDevice() {
return !!navigator.mediaDevices.selectAudioOutput;
},
changeAudioOutputDevice() {
if (!navigator.mediaDevices.selectAudioOutput) {
console.warn("selectAudioOutput() not supported.");
return;
}
//Display prompt and log selected device or error
navigator.mediaDevices.selectAudioOutput()
.then((device) => {
console.log(device.kind + ": " + device.label + " id = " + device.deviceId);
})
.catch(function (err) {
console.log(err.name + ": " + err.message);
});
},
replaceVideoStream(newStream, callback) {
let call = app.call.currentCall();
if (call) {
let user = app.call.currentCall().users().get(app.store.user.get().id);
user.replaceVideoStream(newStream, callback);
return;
} else {
config.mediaStreams.setVideoInput(newStream);
}
},
changeMediaDevice(params, callback) {
let user = app.call.currentCall().users().get(app.store.user.get().id);
let config = {deviceId: {exact: params.deviceId}};
if (params.deviceType === 'audioIn') {
user.changeAudioStream(params.deviceId, callback);
}
else if (params.deviceType === 'audioOut') {
app.call.currentCall().users().switchSpeakers(params.deviceId, callback);
}
else if (params.deviceType === 'videoIn') {
config = Object.assign(config, {width: 320, framerate: 10});
user.changeVideoStream(params.deviceId, callback);
}
// callback && callback({hasError: false, deviceId: params.deviceId});
},
getMediaDevices(callback) {
if (!navigator.mediaDevices?.enumerateDevices) {
callback && callback({hasError: true});
} else {
navigator.mediaDevices.getUserMedia({audio: true}).then(stream => {
navigator.mediaDevices
.enumerateDevices()
.then((devices) => {
let my_devices = {audioinput: [], videoinput: [], audiooutput: []};
devices.forEach((device) => {
const {kind, label, deviceId, groupId} = device;
let active;
if(kind == 'videoinput') {
active = deviceId == app.preferredMediaDevices.get('videoIn');
} else if(kind == 'audioinput') {
active = deviceId == app.preferredMediaDevices.get('audioIn');
} else if(kind == 'audiooutput') {
active = deviceId == app.preferredMediaDevices.get('audioOut');
}
my_devices[kind]?.push({label, deviceId, groupId, active});
});
// check browser is safari
if(/^((?!chrome|android).)*safari/i.test(navigator.userAgent)){
my_devices['audiooutput']?.push({label:'اسپیکر پیش فرض سیستم', deviceId:'', groupId:'', active:true})
}
stream.getTracks().forEach(item => item.stop());
// deviceManager.mediaStreams.stopAudioInput();
callback && callback({hasError: false, devices: my_devices})
})
.catch((err) => {
callback && callback({hasError: false, message: err.message, devices: []})
// reject(err)
console.error(`${err.name}: ${err.message}`);
});
}).catch((err) => {
callback && callback({hasError: false, message: err.message, devices: []})
// reject(err)
console.error(`${err.name}: ${err.message}`);
});
if (!navigator.mediaDevices.ondevicechange) {
navigator.mediaDevices.ondevicechange = deviceManager.handleDeviceChange;
}
}
},
removeDuplicateGroupIds(input) {
let repeated_group_id = '';
let hasDefault = {audioinput: false, videoinput: false, audiooutput: false};
const result = {};
const uniqueGroupIds = {};
for (const key in input) {
uniqueGroupIds[key] = new Set();
if (input.hasOwnProperty(key)) {
result[key] = [...input[key]].reverse().filter(item => {
if (uniqueGroupIds[key].has(item.groupId)) {
repeated_group_id = item.groupId
return false;
} else {
uniqueGroupIds[key].add(item.groupId);
return true;
}
});
// change default value of object if groupId repeated
const index = result[key].findIndex(data => data.groupId === repeated_group_id)
if (index > -1) {
hasDefault[key] = true
result[key][index]['default'] = true;
}
}
// if no repeated groupId
if (!hasDefault[key]) {
result[key][result[key].length - 1]['default'] = true;
}
}
return result;
},
handleDeviceChange(event) {
if (!deviceManager.deviceChangeFlag) {
deviceManager.deviceChangeFlag = true;
deviceManager.getMediaDevices(function ({hasError, devices}) {
app.chatEvents.fireEvent('callEvents', {
type: 'USER_MEDIA_DEVICES_CHANGED',
result: devices
});
})
setTimeout(() => {
deviceManager.deviceChangeFlag = false;
}, 1000);
}
},
grantScreenSharePermission({closeStream = false}, callback = null) {
return new Promise((resolve, reject) => {
if (config.mediaStreams.getScreenShareInput()) {
if (!config.mediaStreams.getScreenShareInput().active) {
config.mediaStreams.stopScreenShareInput();
// resolve(config.mediaStreams.getScreenShareInput());
} else {
// console.log("exists resolving")
resolve(config.mediaStreams.getScreenShareInput());
return;
}
}
navigator.mediaDevices.getDisplayMedia({
audio: false,
video: true
}).then(stream => {
config.mediaStreams.setScreenShareInput(stream);
if (closeStream) {
config.mediaStreams.stopScreenShareInput();
}
callback && callback({
hasError: false
})
resolve(stream);
}).catch(e => {
let error = app.errorHandler.raiseError(errorList.SCREENSHARE_PERMISSION_ERROR, callback, true, {
eventName: 'callEvents',
eventType: 'CALL_ERROR'
});
reject(error);
});
});
},
executePermissionRequest({video , audio , closeStream, callback}){
config.isProcessingPermissionRequests = true;
return new Promise(async (resolve, reject) => {
try {
if (audio)
await deviceManager.getInputDevicePermission({audio: audio});
if (video)
await deviceManager.getInputDevicePermission({
video: video
});
if (closeStream) {
if (audio)
config.mediaStreams.stopAudioInput();
if (video)
config.mediaStreams.stopVideoInput();
}
if (callback) {
let res = {hasError: false};
if(video) {
res.videoStream = config.mediaStreams.getVideoInput();
}
if(audio) {
res.audioStream = config.mediaStreams.getAudioInput();
}
callback(res);
}
resolve({hasError: false});
} catch (error) {
let parsedError = {
hasError: true,
errorCode: error.code,
errorMessage: error.message
}
if (callback)
callback(parsedError);
reject(parsedError);
}
config.isProcessingPermissionRequests = false;
return deviceManager.maybeGoToNext();
});
},
maybeGoToNext(){
if(!config.isProcessingPermissionRequests && config.permissionRequests.length) {
deviceManager.executePermissionRequest(config.permissionRequests.shift())
}
},
grantUserMediaDevicesPermissions({video = false, audio = false, closeStream = false}, callback = null) {
if(app.preferredMediaDevices.get('videoIn')) {
if (video) {
if (typeof video == 'object') {
if(!video.deviceId)
video.deviceId = {exact: app.preferredMediaDevices.get('videoIn')};
} else {
video = {
deviceId: {exact: app.preferredMediaDevices.get('videoIn')}
};
}
}
}
if(app.preferredMediaDevices.get('audioIn')) {
if (audio) {
if (typeof audio == 'object') {
if(!audio.deviceId)
audio.deviceId = {exact: app.preferredMediaDevices.get('audioIn')};
} else {
audio = {
deviceId: {exact: app.preferredMediaDevices.get('audioIn')}
};
}
}
}
config.permissionRequests.push({video , audio , closeStream, callback});
deviceManager.maybeGoToNext();
},
mediaStreams: config.mediaStreams,
watchAudioInputStream(callErrorHandler) {
config.streamsMetada.audioInWatcherId && clearInterval(config.streamsMetada.audioInWatcherId)
config.streamsMetada.audioInWatcherId = setInterval(() => {
if (!config.mediaStreams.getAudioInput()) {
clearInterval(config.streamsMetada.audioInWatcherId);
return;
}
const audioTracks = config.mediaStreams.getAudioInput()?.getAudioTracks();
if (audioTracks.length === 0) {
callErrorHandler(errorList.NO_AUDIO_TRACKS_AVAILABLE, null, true, {});
clearInterval(config.streamsMetada.audioInWatcherId);
// No audio from microphone has been captured
return;
}
// We asked for the microphone so one track
const track = audioTracks[0];
if (track.muted) {
// Track is muted which means that the track is unable to provide media data.
// When muted, a track can't be unmuted.
// This track will no more provide data...
callErrorHandler(errorList.AUDIO_TRACK_MUTED, null, true, {});
clearInterval(config.streamsMetada.audioInWatcherId);
}
if (!track.enabled) {
// Track is disabled (muted for telephonist) which means that the track provides silence instead of real data.
// When disabled, a track can be enabled again.
// When in that case, user can't be heard until track is enabled again.
callErrorHandler(errorList.AUDIO_TRACK_DISABLED, null, true, {});
}
if (track.readyState === "ended") {
// Possibly a disconnection of the device
// When ended, a track can't be active again
// This track will no more provide data
callErrorHandler(errorList.AUDIO_TRACK_ENDED, null, true, {});
clearInterval(config.streamsMetada.audioInWatcherId);
}
}, 10000)
},
async onCallEnd(){
config.permissionRequests = [];
config.isProcessingPermissionRequests = false;
await config.mediaStreams.stopAudioInput();
await config.mediaStreams.stopVideoInput();
await config.mediaStreams.stopScreenShareInput();
navigator.mediaDevices.ondevicechange = null;
}
}
return deviceManager;
};
export {DeviceManager}