facezkp
Version:
Face analysis library with liveness detection, biometric template extraction, and fuzzy hashing for privacy-preserving identity verification.
603 lines (517 loc) • 17.6 kB
JavaScript
import {
HumanIntegration,
generateFuzzyHash,
compareFuzzyHashes,
defaultFuzzyConfig,
} from '../dist/facezk.esm.js';
// DOM Elements
const video = document.getElementById('video');
const canvas = document.getElementById('overlay');
const ctx = canvas.getContext('2d');
const startBtn = document.getElementById('startBtn');
const verifyBtn = document.getElementById('verifyBtn');
const debugBtn = document.getElementById('debugBtn');
const resultDiv = document.getElementById('result');
const instructions = document.getElementById('instructions');
const spinner = document.getElementById('spinner');
const videoContainer = document.querySelector('.video-container');
const debugPanel = document.getElementById('debugPanel');
const debugInfo = document.getElementById('debugInfo');
// Status elements
const cameraStatus = document.getElementById('cameraStatus');
const modelStatus = document.getElementById('modelStatus');
const detectionStatus = document.getElementById('detectionStatus');
const performanceStatus = document.getElementById('performanceStatus');
// Performance elements
const detectFPS = document.getElementById('detectFPS');
const drawFPS = document.getElementById('drawFPS');
const memoryUsage = document.getElementById('memoryUsage');
const backendInfo = document.getElementById('backendInfo');
// State variables
let humanIntegration;
let isHumanReady = false;
let isCameraReady = false;
let isDetectionRunning = false;
let lastFaceResult = null;
let lastFuzzyHash = null;
let previousFuzzyHash = null;
let animationId = null;
let debugMode = false;
// Performance tracking
let frameCount = 0;
let detectionCount = 0;
// let lastDetectionTime = 0; // Unused variable - commented out
const detectFPSHistory = [];
const drawFPSHistory = [];
let lastDrawTime = 0;
let lastDetectTime = 0;
const maxFPSHistory = 30;
// Debug information
const debugData = {
camera: { width: 0, height: 0, fps: 0 },
models: { loaded: false, backend: '', error: null },
detection: { frames: 0, faces: 0, avgTime: 0, lastResult: null },
performance: { fps: 0, memory: 0 },
};
// Utility functions
function setLoading(loading) {
spinner.style.display = loading ? 'block' : 'none';
startBtn.disabled = loading;
verifyBtn.disabled = loading || !isHumanReady || !isCameraReady || !lastFaceResult;
}
function updateStatus(element, status, icon, text) {
const statusText = element.querySelector('.status-text');
const statusIcon = element.querySelector('.status-icon');
statusText.textContent = text;
statusIcon.textContent = icon;
element.className = `status-item ${status}`;
}
function setFaceDetected(detected) {
if (detected) {
videoContainer.classList.add('face-detected');
verifyBtn.disabled = false;
} else {
videoContainer.classList.remove('face-detected');
verifyBtn.disabled = !isHumanReady || !isCameraReady || !lastFaceResult;
}
}
function showResult(msg, type = 'info') {
resultDiv.textContent = msg;
resultDiv.className = 'result ' + type;
}
function updatePerformance() {
const now = performance.now();
// Update detection FPS
if (lastDetectTime > 0) {
const detectFPS = 1000 / (now - lastDetectTime);
detectFPSHistory.push(detectFPS);
if (detectFPSHistory.length > maxFPSHistory) detectFPSHistory.shift();
}
lastDetectTime = now;
// Update draw FPS
if (lastDrawTime > 0) {
const drawFPS = 1000 / (now - lastDrawTime);
drawFPSHistory.push(drawFPS);
if (drawFPSHistory.length > maxFPSHistory) drawFPSHistory.shift();
}
lastDrawTime = now;
// Calculate averages
const avgDetectFPS =
detectFPSHistory.length > 0
? Math.round((detectFPSHistory.reduce((a, b) => a + b, 0) / detectFPSHistory.length) * 10) /
10
: 0;
const avgDrawFPS =
drawFPSHistory.length > 0
? Math.round((drawFPSHistory.reduce((a, b) => a + b, 0) / drawFPSHistory.length) * 10) / 10
: 0;
// Update UI
detectFPS.textContent = `${avgDetectFPS}`;
drawFPS.textContent = `${avgDrawFPS}`;
performanceStatus.querySelector('.status-text').textContent = `FPS: ${avgDetectFPS}`;
// Memory usage
if (navigator.memory) {
const memMB = Math.round(navigator.memory.usedJSHeapSize / 1024 / 1024);
memoryUsage.textContent = `${memMB} MB`;
}
// Backend info
if (humanIntegration && humanIntegration.getHuman()) {
const backend = humanIntegration.getHuman().tf.getBackend();
backendInfo.textContent = backend.toUpperCase();
}
}
function updateDebugInfo() {
if (!debugMode) return;
const info = [];
// Camera info
info.push(
`<div class="debug-item"><span class="debug-label">Camera:</span><span class="debug-value">${debugData.camera.width}x${debugData.camera.height} @ ${debugData.camera.fps.toFixed(1)}fps</span></div>`
);
// Model info
info.push(
`<div class="debug-item"><span class="debug-label">Models:</span><span class="debug-value">${debugData.models.loaded ? 'Loaded' : 'Loading...'} (${debugData.models.backend})</span></div>`
);
if (debugData.models.error) {
info.push(
`<div class="debug-item"><span class="debug-label">Error:</span><span class="debug-value">${debugData.models.error}</span></div>`
);
}
// Detection info
info.push(
`<div class="debug-item"><span class="debug-label">Detection:</span><span class="debug-value">${debugData.detection.faces} faces in ${debugData.detection.frames} frames</span></div>`
);
info.push(
`<div class="debug-item"><span class="debug-label">Avg Time:</span><span class="debug-value">${debugData.detection.avgTime.toFixed(2)}ms</span></div>`
);
// Performance info
info.push(
`<div class="debug-item"><span class="debug-label">FPS:</span><span class="debug-value">${debugData.performance.fps.toFixed(1)}</span></div>`
);
// Last detection result
if (debugData.detection.lastResult) {
const result = debugData.detection.lastResult;
info.push(
`<div class="debug-item"><span class="debug-label">Confidence:</span><span class="debug-value">${(result.confidence * 100).toFixed(1)}%</span></div>`
);
if (result.emotion) {
info.push(
`<div class="debug-item"><span class="debug-label">Emotion:</span><span class="debug-value">${result.emotion.emotion} (${(result.emotion.confidence * 100).toFixed(0)}%)</span></div>`
);
}
if (result.ageGender) {
info.push(
`<div class="debug-item"><span class="debug-label">Age/Gender:</span><span class="debug-value">${result.ageGender.age}yo ${result.ageGender.gender} (${(result.ageGender.ageConfidence * 100).toFixed(0)}%)</span></div>`
);
}
}
debugInfo.innerHTML = info.join('');
}
// Camera setup
async function setupCamera() {
try {
updateStatus(cameraStatus, 'loading', '📷', 'Camera: Initializing...');
setLoading(true);
const stream = await navigator.mediaDevices.getUserMedia({
video: {
facingMode: 'user',
width: { ideal: 800 },
height: { ideal: 600 },
},
});
video.srcObject = stream;
// Wait for video metadata to load
await new Promise((resolve, reject) => {
const timeout = setTimeout(() => {
reject(new Error('Camera metadata loading timeout'));
}, 10000); // 10 second timeout
video.onloadedmetadata = () => {
clearTimeout(timeout);
resolve();
};
video.onerror = () => {
clearTimeout(timeout);
reject(new Error('Video loading error'));
};
});
await video.play();
// Set canvas dimensions
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
debugData.camera.width = video.videoWidth;
debugData.camera.height = video.videoHeight;
isCameraReady = true;
updateStatus(cameraStatus, 'success', '📷', 'Camera: Ready');
setLoading(false);
console.log('Camera setup complete:', {
width: video.videoWidth,
height: video.videoHeight,
readyState: video.readyState,
});
} catch (error) {
console.error('Camera setup failed:', error);
updateStatus(cameraStatus, 'error', '📷', 'Camera: Failed');
showResult(`Camera error: ${error.message}`, 'error');
setLoading(false);
}
}
// Human library setup
async function setupHuman() {
try {
updateStatus(modelStatus, 'loading', '🧠', 'Models: Loading...');
setLoading(true);
humanIntegration = new HumanIntegration({
backend: 'webgl',
modelBasePath: '/models',
debug: debugMode,
face: {
enabled: true,
detector: {
rotation: true,
return: true,
maxDetected: 1,
minConfidence: 0.5,
iouThreshold: 0.1,
skipFrames: 0,
skipTime: 0,
},
mesh: {
enabled: true,
maxDetected: 1,
minConfidence: 0.5,
return: true,
},
iris: {
enabled: true,
maxDetected: 1,
minConfidence: 0.5,
return: true,
},
emotion: {
enabled: true,
maxDetected: 1,
minConfidence: 0.5,
return: true,
},
antispoof: {
enabled: true,
maxDetected: 1,
minConfidence: 0.5,
return: true,
},
ageGender: {
enabled: true,
maxDetected: 1,
minConfidence: 0.5,
return: true,
},
},
body: { enabled: false },
hand: { enabled: false },
gesture: { enabled: false },
});
await humanIntegration.initialize();
isHumanReady = true;
debugData.models.loaded = true;
debugData.models.backend = humanIntegration.getHuman().config.backend || 'webgl';
updateStatus(modelStatus, 'success', '🧠', 'Models: Ready');
setLoading(false);
} catch (error) {
console.error('Human setup failed:', error);
debugData.models.error = error.message;
updateStatus(modelStatus, 'error', '🧠', 'Models: Failed');
showResult(`Model loading error: ${error.message}`, 'error');
setLoading(false);
}
}
// Face detection processing
async function processFrame() {
if (!isHumanReady || !isCameraReady || !isDetectionRunning) {
animationId = requestAnimationFrame(processFrame);
return;
}
const startTime = performance.now();
frameCount++;
try {
// Clear canvas and draw video frame
ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
// Detect faces
const faces = await humanIntegration.detectFaces(video);
const processingTime = performance.now() - startTime;
debugData.detection.avgTime =
(debugData.detection.avgTime * (frameCount - 1) + processingTime) / frameCount;
if (faces && faces.length > 0) {
lastFaceResult = faces[0];
detectionCount++;
// lastDetectionTime = Date.now(); // Commented out unused variable
debugData.detection.faces = detectionCount;
debugData.detection.lastResult = lastFaceResult;
setFaceDetected(true);
updateStatus(detectionStatus, 'success', '👁️', 'Detection: Face Found');
// Draw overlay
humanIntegration.drawOverlay(ctx, lastFaceResult);
drawFaceAttrOverlay(lastFaceResult);
instructions.textContent = 'Face detected. Click Verify Identity.';
} else {
lastFaceResult = null;
setFaceDetected(false);
updateStatus(detectionStatus, 'waiting', '👁️', 'Detection: Searching...');
instructions.textContent = 'Position your face in the camera view';
}
} catch (error) {
console.error('Frame processing error:', error);
updateStatus(detectionStatus, 'error', '👁️', 'Detection: Error');
instructions.textContent = 'Detection error occurred';
}
// Update performance and debug info
updatePerformance();
if (debugMode) {
updateDebugInfo();
}
animationId = requestAnimationFrame(processFrame);
}
// Draw face attributes overlay
function drawFaceAttrOverlay(faceResult) {
if (!faceResult) return;
const x = faceResult.box ? faceResult.box[0] : 12;
const y = faceResult.box ? faceResult.box[1] - 10 : 12;
ctx.save();
ctx.globalAlpha = 0.92;
ctx.fillStyle = 'rgba(0,0,0,0.7)';
ctx.strokeStyle = 'rgba(241,94,65,0.8)';
ctx.lineWidth = 1.5;
// Draw rounded rectangle background
ctx.beginPath();
ctx.roundRect(x, y, 210, 120, 10);
ctx.fill();
ctx.stroke();
ctx.globalAlpha = 1;
ctx.font = '15px Segoe UI, Arial, sans-serif';
ctx.fillStyle = '#fff';
let attrY = y + 18;
// Confidence
ctx.fillText('✔️ Confidence: ' + (faceResult.confidence * 100).toFixed(1) + '%', x + 12, attrY);
attrY += 20;
// Age & Gender
if (faceResult.ageGender) {
ctx.fillText(
'🎂 Age: ' +
faceResult.ageGender.age +
' (' +
(faceResult.ageGender.ageConfidence * 100).toFixed(0) +
'%)',
x + 12,
attrY
);
attrY += 20;
ctx.fillText(
'🚻 Gender: ' +
faceResult.ageGender.gender +
' (' +
(faceResult.ageGender.genderConfidence * 100).toFixed(0) +
'%)',
x + 12,
attrY
);
attrY += 20;
}
// Emotion
if (faceResult.emotion) {
ctx.fillText(
'😊 Emotion: ' +
faceResult.emotion.emotion +
' (' +
(faceResult.emotion.confidence * 100).toFixed(0) +
'%)',
x + 12,
attrY
);
attrY += 20;
}
// Pose
if (faceResult.angles) {
ctx.fillText('🧭 Yaw: ' + faceResult.angles.yaw.toFixed(1) + '°', x + 12, attrY);
attrY += 20;
ctx.fillText('🧭 Pitch: ' + faceResult.angles.pitch.toFixed(1) + '°', x + 12, attrY);
attrY += 20;
ctx.fillText('🧭 Roll: ' + faceResult.angles.roll.toFixed(1) + '°', x + 12, attrY);
attrY += 20;
}
// Iris
if (faceResult.iris) {
ctx.fillText('👁️ Iris: ' + (faceResult.iris.confidence * 100).toFixed(0) + '%', x + 12, attrY);
attrY += 20;
}
// Anti-spoof
if (faceResult.antispoof) {
ctx.fillText(
'🛡️ Real: ' +
(faceResult.antispoof.isReal ? 'Yes' : 'No') +
' (' +
(faceResult.antispoof.confidence * 100).toFixed(0) +
'%)',
x + 12,
attrY
);
attrY += 20;
}
ctx.restore();
}
// Event handlers
startBtn.onclick = async () => {
if (isDetectionRunning) {
// Stop detection
isDetectionRunning = false;
startBtn.textContent = 'Start Detection';
startBtn.className = 'btn-primary';
updateStatus(detectionStatus, 'waiting', '👁️', 'Detection: Stopped');
instructions.textContent = 'Detection stopped. Click Start Detection to resume.';
} else {
// Start detection
if (!isHumanReady || !isCameraReady) {
showResult('Camera and models must be ready before starting detection.', 'error');
return;
}
isDetectionRunning = true;
startBtn.textContent = 'Stop Detection';
startBtn.className = 'btn-primary';
showResult('Detection started. Position your face in the camera.', 'info');
// Start frame processing if not already running
if (!animationId) {
processFrame();
}
}
};
verifyBtn.onclick = async () => {
if (!lastFaceResult) {
showResult('No face detected. Please position your face in the camera.', 'error');
return;
}
setLoading(true);
try {
// Generate fuzzy hash for current face
const template = humanIntegration.convertToBiometricTemplate(lastFaceResult);
const fuzzyConfig = defaultFuzzyConfig;
lastFuzzyHash = generateFuzzyHash(template, fuzzyConfig);
if (previousFuzzyHash) {
// Compare with previous
const comparison = compareFuzzyHashes(lastFuzzyHash, previousFuzzyHash, fuzzyConfig);
showResult(
comparison.match
? `✅ Match! Similarity: ${(comparison.similarity * 100).toFixed(1)}%`
: `❌ Not a match. Similarity: ${(comparison.similarity * 100).toFixed(1)}%`,
comparison.match ? 'success' : 'error'
);
} else {
showResult('✅ Template captured. Please verify again to compare.', 'success');
}
previousFuzzyHash = lastFuzzyHash;
} catch (error) {
console.error('Verification error:', error);
showResult(`Verification error: ${error.message}`, 'error');
}
setLoading(false);
};
debugBtn.onclick = () => {
debugMode = !debugMode;
debugPanel.style.display = debugMode ? 'block' : 'none';
debugBtn.textContent = debugMode ? 'Hide Debug' : 'Debug Mode';
if (debugMode) {
updateDebugInfo();
}
};
// Performance monitoring
setInterval(() => {
if (debugMode) {
debugData.performance.fps =
detectFPSHistory.length > 0 ? detectFPSHistory[detectFPSHistory.length - 1] : 0;
if (navigator.memory) {
debugData.performance.memory = navigator.memory.usedJSHeapSize / 1024 / 1024;
}
}
}, 1000);
// Initialize application
window.onload = async () => {
try {
setLoading(true);
// Setup camera and models in parallel
await Promise.all([setupCamera(), setupHuman()]);
// Start frame processing (but not detection yet)
processFrame();
showResult('System ready. Click Start Detection to begin face analysis.', 'info');
} catch (error) {
console.error('Initialization error:', error);
showResult(`Initialization error: ${error.message}`, 'error');
} finally {
setLoading(false);
}
};
// Cleanup on page unload
window.onbeforeunload = () => {
if (animationId) {
cancelAnimationFrame(animationId);
}
if (humanIntegration) {
humanIntegration.dispose();
}
};