homebridge-eufy-security
Version:
Control Eufy Security from homebridge.
286 lines • 13.3 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.StreamingDelegate = void 0;
const dgram_1 = require("dgram");
const ffmpeg_1 = require("../utils/ffmpeg");
const eufy_security_client_1 = require("eufy-security-client");
const LocalLivestreamManager_1 = require("./LocalLivestreamManager");
const SnapshotManager_1 = require("./SnapshotManager");
const Talkback_1 = require("../utils/Talkback");
const utils_1 = require("../utils/utils");
const camera_utils_1 = require("@homebridge/camera-utils");
class StreamingDelegate {
camera;
videoConfig;
controller;
platform;
device;
log;
localLivestreamManager;
snapshotManager;
// keep track of sessions
pendingSessions = new Map();
ongoingSessions = new Map();
timeouts = new Map();
constructor(camera) {
this.camera = camera;
this.platform = camera.platform;
this.device = camera.device;
this.videoConfig = camera.cameraConfig.videoConfig;
this.log = camera.log;
this.localLivestreamManager = new LocalLivestreamManager_1.LocalLivestreamManager(camera);
this.snapshotManager = new SnapshotManager_1.SnapshotManager(this.camera, this.localLivestreamManager);
}
setController(controller) {
this.controller = controller;
}
getLivestreamManager() {
return this.localLivestreamManager;
}
async handleSnapshotRequest(request, callback) {
this.log.debug('handleSnapshotRequest');
try {
this.log.debug('Snapshot requested: ' + request.width + ' x ' + request.height, this.videoConfig.debug);
const snapshot = await this.snapshotManager.getSnapshotBufferResized(request);
this.log.debug('snapshot byte lenght: ' + snapshot?.byteLength);
callback(undefined, snapshot);
}
catch (error) {
this.log.error(error);
callback();
}
}
async prepareStream(request, callback) {
const ipv6 = request.addressVersion === 'ipv6';
this.log.debug(`stream prepare request with session id ${request.sessionID} was received.`);
const [videoReturnPort, audioReturnPort] = await (0, camera_utils_1.reservePorts)({ type: 'udp', count: 2 });
const videoSSRC = utils_1.HAP.CameraController.generateSynchronisationSource();
const audioSSRC = utils_1.HAP.CameraController.generateSynchronisationSource();
const sessionInfo = {
address: request.targetAddress,
ipv6: ipv6,
videoPort: request.video.port,
videoReturnPort: videoReturnPort,
videoCryptoSuite: request.video.srtpCryptoSuite,
videoSRTP: Buffer.concat([request.video.srtp_key, request.video.srtp_salt]),
videoSSRC: videoSSRC,
audioPort: request.audio.port,
audioReturnPort: audioReturnPort,
audioCryptoSuite: request.audio.srtpCryptoSuite,
audioSRTP: Buffer.concat([request.audio.srtp_key, request.audio.srtp_salt]),
audioSSRC: audioSSRC,
};
const response = {
video: {
port: videoReturnPort,
ssrc: videoSSRC,
srtp_key: request.video.srtp_key,
srtp_salt: request.video.srtp_salt,
},
audio: {
port: audioReturnPort,
ssrc: audioSSRC,
srtp_key: request.audio.srtp_key,
srtp_salt: request.audio.srtp_salt,
},
};
this.pendingSessions.set(request.sessionID, sessionInfo);
callback(undefined, response);
}
async startStream(request, callback) {
const sessionInfo = this.pendingSessions.get(request.sessionID);
if (!sessionInfo) {
this.log.error('Error finding session information.');
callback(new Error('Error finding session information'));
}
this.log.debug('VIDEOCONFIG: ', this.videoConfig);
try {
const activeSession = {};
activeSession.socket = (0, dgram_1.createSocket)(sessionInfo.ipv6 ? 'udp6' : 'udp4');
activeSession.socket.on('error', (err) => {
this.log.error('Socket error: ' + err.message);
this.stopStream(request.sessionID);
});
activeSession.socket.on('message', () => {
if (activeSession.timeout) {
clearTimeout(activeSession.timeout);
}
activeSession.timeout = setTimeout(() => {
this.log.debug('Device appears to be inactive. Stopping video stream.');
this.controller?.forceStopStreamingSession(request.sessionID);
this.stopStream(request.sessionID);
}, request.video.rtcp_interval * 5 * 1000);
});
activeSession.socket.bind(sessionInfo.videoReturnPort);
// get streams
const videoParams = await ffmpeg_1.FFmpegParameters.forVideo(this.videoConfig.debug);
videoParams.setup(this.camera.cameraConfig, request);
videoParams.setRTPTarget(sessionInfo, request);
const useAudio = (request.audio.codec === "OPUS" /* AudioStreamingCodecType.OPUS */
|| request.audio.codec === "AAC-eld" /* AudioStreamingCodecType.AAC_ELD */)
&& this.videoConfig.audio;
if (!useAudio && this.videoConfig.audio) {
this.log.warn(`An unsupported audio codec (type: ${request.audio.codec}) was requested. Audio streaming will be omitted.`);
}
let audioParams = undefined;
if (useAudio) {
audioParams = await ffmpeg_1.FFmpegParameters.forAudio(this.videoConfig.debug);
audioParams.setup(this.camera.cameraConfig, request);
audioParams.setRTPTarget(sessionInfo, request);
}
const rtsp = (0, utils_1.is_rtsp_ready)(this.device, this.camera.cameraConfig);
if (rtsp) {
const url = this.device.getPropertyValue(eufy_security_client_1.PropertyName.DeviceRTSPStreamUrl);
this.log.debug('RTSP URL: ' + url);
videoParams.setInputSource(url);
audioParams?.setInputSource(url);
}
else {
try {
const streamData = await this.localLivestreamManager.getLocalLivestream().catch((error) => {
throw error;
});
await videoParams.setInputStream(streamData.videostream);
await audioParams?.setInputStream(streamData.audiostream);
}
catch (error) {
this.log.error(('Unable to start the livestream: ' + error));
callback(error);
this.pendingSessions.delete(request.sessionID);
return;
}
}
const useSeparateProcesses = this.videoConfig.useSeparateProcesses ??= false;
const videoProcess = new ffmpeg_1.FFmpeg(`[Video Process]`, !useSeparateProcesses && audioParams ? [videoParams, audioParams] : videoParams);
videoProcess.on('started', () => {
callback();
});
videoProcess.on('error', (error) => {
this.log.error('Video process ended with error: ' + error);
this.stopStream(request.sessionID);
});
activeSession.videoProcess = videoProcess;
activeSession.videoProcess.start();
if (useSeparateProcesses && audioParams) {
const audioProcess = new ffmpeg_1.FFmpeg(`[Audio Process]`, audioParams);
audioProcess.on('error', (error) => {
this.log.error('Audio process ended with error: ' + error);
this.stopStream(request.sessionID);
});
activeSession.audioProcess = audioProcess;
activeSession.audioProcess.start();
}
if (this.camera.cameraConfig.talkback) {
const talkbackParameters = await ffmpeg_1.FFmpegParameters.forAudio(this.videoConfig.debug);
await talkbackParameters.setTalkbackInput(sessionInfo);
if (this.camera.cameraConfig.talkbackChannels) {
talkbackParameters.setTalkbackChannels(this.camera.cameraConfig.talkbackChannels);
}
activeSession.talkbackStream = new Talkback_1.TalkbackStream(this.platform, this.device);
activeSession.returnProcess = new ffmpeg_1.FFmpeg(`[Talkback Process]`, talkbackParameters);
activeSession.returnProcess.on('error', (error) => {
this.log.error('Talkback process ended with error: ' + error);
});
activeSession.returnProcess.start();
activeSession.returnProcess.stdout?.pipe(activeSession.talkbackStream);
}
// Check if the pendingSession has been stopped before it was successfully started.
const pendingSession = this.pendingSessions.get(request.sessionID);
// pendingSession has not been deleted. Transfer it to ongoingSessions.
if (pendingSession) {
this.ongoingSessions.set(request.sessionID, activeSession);
this.pendingSessions.delete(request.sessionID);
}
else { // pendingSession has been deleted. Add it to ongoingSession and end it immediately.
this.ongoingSessions.set(request.sessionID, activeSession);
this.log.info('pendingSession has been deleted. Add it to ongoingSession and end it immediately.');
this.stopStream(request.sessionID);
}
}
catch (error) {
this.log.error('Stream could not be started: ' + error);
callback(error);
this.pendingSessions.delete(request.sessionID);
}
}
handleStreamRequest(request, callback) {
switch (request.type) {
case "start" /* StreamRequestTypes.START */:
this.log.debug(`Received request to start stream with id ${request.sessionID}`);
this.log.debug(`request data:`, request);
this.startStream(request, callback);
break;
case "reconfigure" /* StreamRequestTypes.RECONFIGURE */:
this.log.debug('Received request to reconfigure: ' +
request.video.width +
' x ' +
request.video.height +
', ' +
request.video.fps +
' fps, ' +
request.video.max_bit_rate +
' kbps (Ignored)', this.videoConfig.debug);
callback();
break;
case "stop" /* StreamRequestTypes.STOP */:
this.log.debug('Receive Apple HK Stop request', request);
this.stopStream(request.sessionID);
callback();
break;
}
}
stopStream(sessionId) {
this.log.debug('Stopping session with id: ' + sessionId);
const pendingSession = this.pendingSessions.get(sessionId);
if (pendingSession) {
this.pendingSessions.delete(sessionId);
}
const session = this.ongoingSessions.get(sessionId);
if (session) {
if (session.timeout) {
clearTimeout(session.timeout);
}
try {
session.talkbackStream?.stopTalkbackStream();
session.returnProcess?.stdout?.unpipe();
session.returnProcess?.stop();
}
catch (error) {
this.log.error('Error occurred terminating returnAudio FFmpeg process: ' + error);
}
try {
session.videoProcess?.stop();
}
catch (error) {
this.log.error('Error occurred terminating video FFmpeg process: ' + error);
}
try {
session.audioProcess?.stop();
}
catch (error) {
this.log.error('Error occurred terminating audio FFmpeg process: ' + error);
}
try {
session.socket?.close();
}
catch (error) {
this.log.error('Error occurred closing socket: ' + error);
}
try {
if (!(0, utils_1.is_rtsp_ready)(this.device, this.camera.cameraConfig)) {
this.localLivestreamManager.stopLocalLiveStream();
}
}
catch (error) {
this.log.error('Error occurred terminating Eufy Station livestream: ' + error);
}
this.ongoingSessions.delete(sessionId);
this.log.info('Stopped video stream.');
}
else {
this.log.debug('No session to stop.');
}
}
}
exports.StreamingDelegate = StreamingDelegate;
//# sourceMappingURL=streamingDelegate.js.map