networked-aframe
Version:
A web framework for building multi-user virtual reality experiences.
364 lines (301 loc) • 11.7 kB
JavaScript
/* global NAF */
const NoOpAdapter = require('./NoOpAdapter');
class EasyRtcAdapter extends NoOpAdapter {
constructor(easyrtc) {
super();
this.easyrtc = easyrtc || window.easyrtc;
this.app = "default";
this.room = "default";
this.destination = {targetRoom: this.room};
this.mediaStreams = {};
this.remoteClients = {};
this.pendingMediaRequests = new Map();
this.serverTimeRequests = 0;
this.timeOffsets = [];
this.avgTimeOffset = 0;
this.easyrtc.setPeerOpenListener((clientId) => {
const clientConnection = this.easyrtc.getPeerConnectionByUserId(clientId);
this.remoteClients[clientId] = clientConnection;
});
this.easyrtc.setPeerClosedListener((clientId) => {
delete this.remoteClients[clientId];
const pendingMediaRequests = this.pendingMediaRequests.get(clientId);
if (pendingMediaRequests) {
const msg = "The user disconnected before the media stream was resolved.";
Object.keys(pendingMediaRequests).forEach((streamName) => {
pendingMediaRequests[streamName].reject(msg);
});
this.pendingMediaRequests.delete(clientId);
}
});
}
setServerUrl(url) {
this.easyrtc.setSocketUrl(url);
}
setApp(appName) {
this.app = appName;
}
setRoom(roomName) {
this.room = roomName;
this.destination.targetRoom = this.room;
this.easyrtc.joinRoom(roomName, null);
}
// options: { datachannel: bool, audio: bool, video: bool }
setWebRtcOptions(options) {
// this.easyrtc.enableDebug(true);
this.easyrtc.enableDataChannels(options.datachannel);
this.easyrtc.enableVideo(options.video);
this.easyrtc.enableAudio(options.audio);
// TODO receive(audio|video) options ?
this.easyrtc.enableVideoReceive(true);
this.easyrtc.enableAudioReceive(true);
}
setServerConnectListeners(successListener, failureListener) {
this.connectSuccess = successListener;
this.connectFailure = failureListener;
}
setRoomOccupantListener(occupantListener) {
this.easyrtc.setRoomOccupantListener(function(
roomName,
occupants,
primary
) {
occupantListener(occupants);
});
}
setDataChannelListeners(openListener, closedListener, messageListener) {
this.easyrtc.setDataChannelOpenListener(openListener);
this.easyrtc.setDataChannelCloseListener(closedListener);
this.easyrtc.setPeerListener(messageListener);
}
updateTimeOffset() {
const clientSentTime = Date.now() + this.avgTimeOffset;
return fetch(document.location.href, { method: "HEAD", cache: "no-cache" })
.then(res => {
var precision = 1000;
var serverReceivedTime = new Date(res.headers.get("Date")).getTime() + (precision / 2);
var clientReceivedTime = Date.now();
var serverTime = serverReceivedTime + ((clientReceivedTime - clientSentTime) / 2);
var timeOffset = serverTime - clientReceivedTime;
this.serverTimeRequests++;
if (this.serverTimeRequests <= 10) {
this.timeOffsets.push(timeOffset);
} else {
this.timeOffsets[this.serverTimeRequests % 10] = timeOffset;
}
this.avgTimeOffset = this.timeOffsets.reduce((acc, offset) => acc += offset, 0) / this.timeOffsets.length;
if (this.serverTimeRequests > 10) {
setTimeout(() => this.updateTimeOffset(), 5 * 60 * 1000); // Sync clock every 5 minutes.
} else {
this.updateTimeOffset();
}
});
}
connect() {
Promise.all([
this.updateTimeOffset(),
new Promise((resolve, reject) => {
this._connect(resolve, reject);
})
]).then(([_, clientId]) => {
this._myRoomJoinTime = this._getRoomJoinTime(clientId);
this.connectSuccess(clientId);
}).catch(this.connectFailure);
}
shouldStartConnectionTo(client) {
return this._myRoomJoinTime <= client.roomJoinTime;
}
startStreamConnection(clientId) {
this.easyrtc.call(
clientId,
function(caller, media) {
if (media === "datachannel") {
NAF.log.write("Successfully started datachannel to ", caller);
}
},
function(errorCode, errorText) {
NAF.log.error(errorCode, errorText);
},
function(wasAccepted) {
// console.log("was accepted=" + wasAccepted);
}
);
}
closeStreamConnection(clientId) {
this.easyrtc.hangup(clientId);
}
sendData(clientId, dataType, data) {
// send via webrtc otherwise fallback to websockets
this.easyrtc.sendData(clientId, dataType, data);
}
sendDataGuaranteed(clientId, dataType, data) {
this.easyrtc.sendDataWS(clientId, dataType, data);
}
broadcastData(dataType, data) {
var roomOccupants = this.easyrtc.getRoomOccupantsAsMap(this.room);
// Iterate over the keys of the easyrtc room occupants map.
// getRoomOccupantsAsArray uses Object.keys which allocates memory.
for (var roomOccupant in roomOccupants) {
if (
roomOccupants[roomOccupant] &&
roomOccupant !== this.easyrtc.myEasyrtcid
) {
// send via webrtc otherwise fallback to websockets
this.easyrtc.sendData(roomOccupant, dataType, data);
}
}
}
broadcastDataGuaranteed(dataType, data) {
this.easyrtc.sendDataWS(this.destination, dataType, data);
}
getConnectStatus(clientId) {
var status = this.easyrtc.getConnectStatus(clientId);
if (status == this.easyrtc.IS_CONNECTED) {
return NAF.adapters.IS_CONNECTED;
} else if (status == this.easyrtc.NOT_CONNECTED) {
return NAF.adapters.NOT_CONNECTED;
} else {
return NAF.adapters.CONNECTING;
}
}
getMediaStream(clientId, streamName = "audio") {
if (this.mediaStreams[clientId] && this.mediaStreams[clientId][streamName]) {
NAF.log.write(`Already had ${streamName} for ${clientId}`);
return Promise.resolve(this.mediaStreams[clientId][streamName]);
} else {
NAF.log.write(`Waiting on ${streamName} for ${clientId}`);
// Create initial pendingMediaRequests with audio|video alias
if (!this.pendingMediaRequests.has(clientId)) {
const pendingMediaRequests = {};
const audioPromise = new Promise((resolve, reject) => {
pendingMediaRequests.audio = { resolve, reject };
}).catch(e => NAF.log.warn(`${clientId} getMediaStream Audio Error`, e));
pendingMediaRequests.audio.promise = audioPromise;
const videoPromise = new Promise((resolve, reject) => {
pendingMediaRequests.video = { resolve, reject };
}).catch(e => NAF.log.warn(`${clientId} getMediaStream Video Error`, e));
pendingMediaRequests.video.promise = videoPromise;
this.pendingMediaRequests.set(clientId, pendingMediaRequests);
}
const pendingMediaRequests = this.pendingMediaRequests.get(clientId);
// Create initial pendingMediaRequests with streamName
if (!pendingMediaRequests[streamName]) {
const streamPromise = new Promise((resolve, reject) => {
pendingMediaRequests[streamName] = { resolve, reject };
}).catch(e => NAF.log.warn(`${clientId} getMediaStream "${streamName}" Error`, e))
pendingMediaRequests[streamName].promise = streamPromise;
}
return this.pendingMediaRequests.get(clientId)[streamName].promise;
}
}
setMediaStream(clientId, stream, streamName) {
const pendingMediaRequests = this.pendingMediaRequests.get(clientId); // return undefined if there is no entry in the Map
const clientMediaStreams = this.mediaStreams[clientId] = this.mediaStreams[clientId] || {};
if (streamName === "default") {
// Safari doesn't like it when you use a mixed media stream where one of the tracks is inactive, so we
// split the tracks into two streams.
// Add mediaStreams audio streamName alias
const audioTracks = stream.getAudioTracks();
if (audioTracks.length > 0) {
const audioStream = new MediaStream();
try {
audioTracks.forEach(track => audioStream.addTrack(track));
clientMediaStreams.audio = audioStream;
} catch(e) {
NAF.log.warn(`${clientId} setMediaStream "audio" alias Error`, e);
}
// Resolve the promise for the user's media stream audio alias if it exists.
if (pendingMediaRequests) pendingMediaRequests.audio.resolve(audioStream);
}
// Add mediaStreams video streamName alias
const videoTracks = stream.getVideoTracks();
if (videoTracks.length > 0) {
const videoStream = new MediaStream();
try {
videoTracks.forEach(track => videoStream.addTrack(track));
clientMediaStreams.video = videoStream;
} catch(e) {
NAF.log.warn(`${clientId} setMediaStream "video" alias Error`, e);
}
// Resolve the promise for the user's media stream video alias if it exists.
if (pendingMediaRequests) pendingMediaRequests.video.resolve(videoStream);
}
} else {
clientMediaStreams[streamName] = stream;
// Resolve the promise for the user's media stream by StreamName if it exists.
if (pendingMediaRequests && pendingMediaRequests[streamName]) {
pendingMediaRequests[streamName].resolve(stream);
delete pendingMediaRequests[streamName];
}
}
}
addLocalMediaStream(stream, streamName) {
const easyrtc = this.easyrtc;
streamName = streamName || stream.id;
this.setMediaStream("local", stream, streamName);
easyrtc.register3rdPartyLocalMediaStream(stream, streamName);
// Add local stream to existing connections
Object.keys(this.remoteClients).forEach((clientId) => {
if (easyrtc.getConnectStatus(clientId) !== easyrtc.NOT_CONNECTED) {
easyrtc.addStreamToCall(clientId, streamName);
}
});
}
removeLocalMediaStream(streamName) {
this.easyrtc.closeLocalMediaStream(streamName);
delete this.mediaStreams["local"][streamName];
}
enableMicrophone(enabled) {
this.easyrtc.enableMicrophone(enabled);
}
enableCamera(enabled) {
this.easyrtc.enableCamera(enabled);
}
disconnect() {
this.easyrtc.disconnect();
}
/**
* Privates
*/
_connect(connectSuccess, connectFailure) {
var that = this;
this.easyrtc.setStreamAcceptor(this.setMediaStream.bind(this));
this.easyrtc.setOnStreamClosed(function(clientId, stream, streamName) {
if (streamName === "default") {
delete that.mediaStreams[clientId].audio;
delete that.mediaStreams[clientId].video;
} else {
delete that.mediaStreams[clientId][streamName];
}
if (Object.keys(that.mediaStreams[clientId]).length === 0) {
delete that.mediaStreams[clientId];
}
});
if (that.easyrtc.audioEnabled || that.easyrtc.videoEnabled) {
navigator.mediaDevices.getUserMedia({
video: that.easyrtc.videoEnabled,
audio: that.easyrtc.audioEnabled
}).then(
function(stream) {
that.addLocalMediaStream(stream, "default");
that.easyrtc.connect(that.app, connectSuccess, connectFailure);
},
function(errorCode, errmesg) {
NAF.log.error(errorCode, errmesg);
}
);
} else {
that.easyrtc.connect(that.app, connectSuccess, connectFailure);
}
}
_getRoomJoinTime(clientId) {
var myRoomId = NAF.room;
var joinTime = this.easyrtc.getRoomOccupantsAsMap(myRoomId)[clientId]
.roomJoinTime;
return joinTime;
}
getServerTime() {
return Date.now() + this.avgTimeOffset;
}
}
module.exports = EasyRtcAdapter;