navflow-browser-server
Version:
Standalone Playwright browser server for NavFlow - enables browser automation with API key authentication, workspace device management, session sync, LLM discovery tools, and requires Node.js v22+
295 lines • 13.1 kB
JavaScript
;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.ScreenShareService = void 0;
const events_1 = require("events");
// @ts-ignore - wrtc doesn't have complete types
const wrtc_1 = __importDefault(require("@roamhq/wrtc"));
const canvas_1 = require("canvas");
class ScreenShareService extends events_1.EventEmitter {
constructor() {
super();
this.screenSessions = new Map();
this.iceServers = [
{ urls: 'stun:stun.l.google.com:19302' },
{ urls: 'stun:stun1.l.google.com:19302' }
];
}
setTunnelWebSocket(ws) {
this.tunnelWebSocket = ws;
}
async startScreenShare(sessionId, browserSession) {
try {
// Close existing screen share if active
if (this.screenSessions.has(sessionId)) {
await this.stopScreenShare(sessionId);
}
console.log(`Starting screen share for session: ${sessionId}`);
// Create WebRTC peer connection
const peerConnection = new wrtc_1.default.RTCPeerConnection({
iceServers: this.iceServers
});
// Set up peer connection event handlers
peerConnection.onicecandidate = (event) => {
if (event.candidate && this.tunnelWebSocket) {
const message = {
type: 'webrtc_ice_candidate',
sessionId,
candidate: event.candidate
};
this.tunnelWebSocket.send(JSON.stringify(message));
this.emit('iceCandidate', sessionId, event.candidate);
}
};
peerConnection.onconnectionstatechange = () => {
console.log(`WebRTC connection state for ${sessionId}:`, peerConnection.connectionState);
this.emit('connectionStateChange', sessionId, peerConnection.connectionState);
};
// Get video stream from Playwright page
const { stream, cdpSession } = await this.capturePageStream(browserSession);
if (stream) {
// Add video track to peer connection
stream.getTracks().forEach((track) => {
peerConnection.addTrack(track, stream);
});
}
const screenSession = {
sessionId,
peerConnection,
stream: stream || undefined,
cdpSession,
isActive: true,
createdAt: new Date()
};
this.screenSessions.set(sessionId, screenSession);
this.emit('screenShareStarted', sessionId);
// Automatically create and send WebRTC offer
console.log(`Creating WebRTC offer for session: ${sessionId}`);
await this.createOffer(sessionId);
}
catch (error) {
console.error(`Failed to start screen share for ${sessionId}:`, error);
throw error;
}
}
async stopScreenShare(sessionId) {
const screenSession = this.screenSessions.get(sessionId);
if (!screenSession)
return;
try {
console.log(`Stopping screen share for session: ${sessionId}`);
// Stop CDP screencast
if (screenSession.cdpSession) {
try {
await screenSession.cdpSession.send('Page.stopScreencast');
await screenSession.cdpSession.detach();
}
catch (cdpError) {
console.warn('Failed to stop CDP screencast:', cdpError);
}
}
// Stop all tracks
if (screenSession.stream) {
screenSession.stream.getTracks().forEach((track) => {
track.stop();
});
}
// Close peer connection
screenSession.peerConnection.close();
screenSession.isActive = false;
this.screenSessions.delete(sessionId);
this.emit('screenShareStopped', sessionId);
}
catch (error) {
console.error(`Failed to stop screen share for ${sessionId}:`, error);
throw error;
}
}
async createOffer(sessionId) {
const screenSession = this.screenSessions.get(sessionId);
if (!screenSession) {
throw new Error(`No screen share session found for ${sessionId}`);
}
try {
const offer = await screenSession.peerConnection.createOffer({
offerToReceiveAudio: false,
offerToReceiveVideo: false
});
await screenSession.peerConnection.setLocalDescription(offer);
// Send offer through tunnel WebSocket
if (this.tunnelWebSocket) {
const message = {
type: 'webrtc_offer',
sessionId,
offer
};
this.tunnelWebSocket.send(JSON.stringify(message));
}
return offer;
}
catch (error) {
console.error(`Failed to create offer for ${sessionId}:`, error);
throw error;
}
}
async handleAnswer(sessionId, answer) {
const screenSession = this.screenSessions.get(sessionId);
if (!screenSession) {
throw new Error(`No screen share session found for ${sessionId}`);
}
try {
await screenSession.peerConnection.setRemoteDescription(answer);
}
catch (error) {
console.error(`Failed to handle answer for ${sessionId}:`, error);
throw error;
}
}
async addIceCandidate(sessionId, candidate) {
const screenSession = this.screenSessions.get(sessionId);
if (!screenSession) {
throw new Error(`No screen share session found for ${sessionId}`);
}
try {
await screenSession.peerConnection.addIceCandidate(candidate);
}
catch (error) {
console.error(`Failed to add ICE candidate for ${sessionId}:`, error);
throw error;
}
}
getScreenShareStatus(sessionId) {
const screenSession = this.screenSessions.get(sessionId);
if (!screenSession) {
return { isActive: false };
}
return {
isActive: screenSession.isActive,
connectionState: screenSession.peerConnection.connectionState
};
}
async capturePageStream(browserSession) {
try {
console.log('Creating video stream from browser session');
// Use Chrome DevTools Protocol to start screen cast
const page = browserSession.page;
const cdpSession = await page.context().newCDPSession(page);
// Get actual viewport dimensions from page
const viewport = page.viewportSize() || { width: 1920, height: 1080 };
// Start screencast with CDP using actual dimensions
await cdpSession.send('Page.startScreencast', {
format: 'jpeg',
quality: 80,
maxWidth: viewport.width,
maxHeight: viewport.height,
everyNthFrame: 1
});
// Create a canvas for frame processing using actual dimensions
const canvas = (0, canvas_1.createCanvas)(viewport.width, viewport.height);
const ctx = canvas.getContext('2d');
// For WebRTC MediaStream, we'll use a different approach
// Since node-canvas doesn't support captureStream, we'll create a mock stream
// and handle frame data differently
const videoTrack = new wrtc_1.default.nonstandard.RTCVideoSource();
const stream = new wrtc_1.default.MediaStream([videoTrack.createTrack()]);
// Handle screencast frames from CDP
cdpSession.on('Page.screencastFrame', async (params) => {
try {
// Convert base64 to buffer
const imageBuffer = Buffer.from(params.data, 'base64');
// Create image from buffer and draw to canvas
const img = new canvas_1.Image();
img.onload = () => {
try {
// Adjust canvas size to match image dimensions to avoid buffer mismatch
const imgWidth = img.width || viewport.width;
const imgHeight = img.height || viewport.height;
// Update canvas size if needed
if (canvas.width !== imgWidth || canvas.height !== imgHeight) {
canvas.width = imgWidth;
canvas.height = imgHeight;
console.log(`Adjusted canvas size to ${imgWidth}x${imgHeight}`);
}
ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.drawImage(img, 0, 0, canvas.width, canvas.height);
// Convert canvas to ImageData for WebRTC
const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
const rgbaFrame = {
width: canvas.width,
height: canvas.height,
data: new Uint8Array(imageData.data)
};
// Send frame to WebRTC video track
videoTrack.onFrame(rgbaFrame);
}
catch (frameError) {
console.error('Error processing frame in onload:', frameError);
}
};
img.onerror = (error) => {
console.error('Failed to load image:', error);
};
// Load image from buffer
img.src = `data:image/jpeg;base64,${params.data}`;
// Acknowledge the frame
await cdpSession.send('Page.screencastFrameAck', {
sessionId: params.sessionId
});
}
catch (error) {
console.error('Error processing screencast frame:', error);
}
});
return { stream, cdpSession };
}
catch (error) {
console.error('Failed to capture page stream:', error);
// Fallback: Create empty video stream if CDP fails
try {
const canvas = (0, canvas_1.createCanvas)(1920, 1080);
const ctx = canvas.getContext('2d');
// Draw a placeholder frame
ctx.fillStyle = '#000000';
ctx.fillRect(0, 0, canvas.width, canvas.height);
ctx.fillStyle = '#ffffff';
ctx.font = '48px Arial';
ctx.textAlign = 'center';
ctx.fillText('Screen Share Starting...', canvas.width / 2, canvas.height / 2);
// Create a simple fallback stream
const videoTrack = new wrtc_1.default.nonstandard.RTCVideoSource();
const stream = new wrtc_1.default.MediaStream([videoTrack.createTrack()]);
// Send static frame
const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
const rgbaFrame = {
width: canvas.width,
height: canvas.height,
data: new Uint8Array(imageData.data)
};
videoTrack.onFrame(rgbaFrame);
return { stream, cdpSession: null };
}
catch (fallbackError) {
console.error('Fallback stream creation failed:', fallbackError);
}
return { stream: null, cdpSession: null };
}
}
// Get all active screen share sessions
getActiveSessions() {
return Array.from(this.screenSessions.keys()).filter(sessionId => this.screenSessions.get(sessionId)?.isActive);
}
// Get screen share session count
getActiveSessionCount() {
return this.getActiveSessions().length;
}
// Cleanup all screen shares
async cleanup() {
const sessionIds = Array.from(this.screenSessions.keys());
console.log(`[ScreenShareService] Cleaning up ${sessionIds.length} screen share sessions`);
await Promise.all(sessionIds.map(id => this.stopScreenShare(id)));
}
}
exports.ScreenShareService = ScreenShareService;
//# sourceMappingURL=ScreenShareService.js.map