@privateid/cryptonets-web-sdk-alpha
Version:
CryptoNets WebAssembly SDK
1,059 lines • 83.5 kB
JavaScript
/**
* @module Face module
*/
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
import { detect } from 'detect-browser';
import { proxy } from 'comlink';
import { createImages, deleteUUID, FHE_enrollOnefa, FHE_predictOnefa, getDebugType, getDefaultCameraDeviceId, getIsSIMD, getPortraitBase64, iOS, isMobileFunc, isValidBarCode, isValidInternal, printLogs, prividAgePredict, prividDocumentMugshotFaceCompare, prividFaceCompareLocal, prividFaceISO, scanDocument, scanDocumentNoFace, setDefaultCameraDeviceId, } from './utils';
import { CameraFaceMode, ImageType, LOGTYPE } from './types';
import { facingMode } from './constants';
import { getCameraList, getVideoConstraints, isFaceTimeCamera, isMobileBackCameraPortrait, isMobileDevice, } from './cameraUtils';
import { callbakTypeEnum, createCallback } from './createCallback';
let videoElement = null;
let faceMode = CameraFaceMode.front;
let mediaDevice = null;
let mediaDevices = [];
let mediaStream = null;
let isSimd = false;
const debugType = getDebugType();
// Declaring reusable values
const cameraHeight = 1440;
const cameraWidth = 2560;
const cameraLowResHeight = 1080;
const cameraLowResWidth = 1920;
const debugTypes = ['900', '901', '902', '903'];
const isDebugWithImages = debugTypes.includes(debugType);
getIsSIMD().then((simd) => (isSimd = simd));
let privid_wasm_result = (operation, id, response_str) => { };
export const deleteUser = ({ uuid, callback }) => __awaiter(void 0, void 0, void 0, function* () {
printLogs('----1-----', '', debugType);
privid_wasm_result = createCallback({ type: callbakTypeEnum.delete, callbackFunction: callback });
printLogs('----2-----', '', debugType);
const cb = proxy(privid_wasm_result);
printLogs('----3-----', '', debugType);
deleteUUID(uuid, cb);
printLogs('----4-----', '', debugType);
});
// Adding common helper functions to refactor the code
function startVideo(videoElement, stream) {
return __awaiter(this, void 0, void 0, function* () {
const element = document.getElementById(videoElement);
element.srcObject = stream;
element.play();
yield new Promise((resolve) => (element.onplaying = resolve));
enableMutationObserver(element, stream);
});
}
function getVideoDevices(filterVirtual = true) {
return __awaiter(this, void 0, void 0, function* () {
let devices = yield navigator.mediaDevices.enumerateDevices();
devices = devices.filter((d) => d.kind === 'videoinput');
if (filterVirtual) {
devices = devices.filter((d) => !isVirtualCamera(d.label));
}
return devices;
});
}
function getCameraStream(videoConstraints) {
return __awaiter(this, void 0, void 0, function* () {
let hasError = true;
let stream;
while (hasError) {
try {
stream = yield navigator.mediaDevices.getUserMedia(videoConstraints);
hasError = false;
}
catch (e) {
printLogs('Error opening camera', e, debugType, LOGTYPE.ERROR);
}
}
return stream;
});
}
let videoMutationObserver = null;
function enableMutationObserver(videoElement, stream) {
videoMutationObserver = new MutationObserver(() => {
if (videoElement.srcObject !== stream) {
console.warn('Unauthorized video source change detected! Resetting to correct camera.');
videoElement.srcObject = stream;
}
});
videoMutationObserver.observe(videoElement, { attributes: true, attributeFilter: ['srcObject'] });
}
function openCameraGeneric(domElement, constraints, requestFaceMode = CameraFaceMode.front) {
return __awaiter(this, void 0, void 0, function* () {
try {
videoElement = domElement;
faceMode = requestFaceMode;
printLogs(`Opening Camera: ${domElement}`, '', debugType);
const devices = yield getVideoDevices();
const stream = yield getCameraStream(constraints);
const track = stream.getVideoTracks()[0];
yield startVideo(domElement, stream);
mediaStream = stream;
return {
status: true,
stream,
devices,
settings: track.getSettings(),
capabilities: track.getCapabilities(),
faceMode,
};
}
catch (e) {
printLogs('Error opening camera', e, debugType, LOGTYPE.ERROR);
return null;
}
});
}
const isVirtualCamera = (deviceLabel) => {
const virtualCameraKeywords = ['virtual', 'obs', 'software'];
return virtualCameraKeywords.some((keyword) => deviceLabel.toLowerCase().includes(keyword));
};
const openCameraMobile = (domElement, requestFaceMode = null, canvasResolution, requireHD = false, isDocumentScan) => __awaiter(void 0, void 0, void 0, function* () {
try {
videoElement = domElement;
printLogs('Opening Camera on Mobile', '', debugType);
const devices = yield getVideoDevices();
const videoConstraint = getVideoConstraints(yield getCameraList(faceMode === CameraFaceMode.back),
// @ts-ignore
facingMode[faceMode], isDocumentScan, canvasResolution);
return yield openCameraGeneric(domElement, videoConstraint);
}
catch (e) {
printLogs('Error:', e, debugType, LOGTYPE.ERROR);
return null;
}
});
/**
* @ignore
*/
const openCameraMacSafari = (domElement, deviceId = null, canvasResolution = { width: cameraWidth, height: cameraHeight }, requireHD = false) => __awaiter(void 0, void 0, void 0, function* () {
var _a, _b, _c, _d;
try {
videoElement = domElement;
printLogs('Opening Camera on Mac Safari', '', debugType);
const devices = yield getVideoDevices();
const defaultDeviceId = getDefaultCameraDeviceId();
const externalDeviceId = devices.length > 1 ? devices.find((device) => !device.label.includes('FaceTime')) : devices[0];
const isDefaultDeviceAvailable = devices.find((device) => defaultDeviceId === device.deviceId);
mediaDevice = deviceId || (isDefaultDeviceAvailable ? defaultDeviceId : externalDeviceId.deviceId);
setDefaultCameraDeviceId(mediaDevice);
const deviceCapabilites = getDefaultDevice(devices, mediaDevice);
const resolution = requireHD
? (canvasResolution === null || canvasResolution === void 0 ? void 0 : canvasResolution.width) || Math.min(((_b = (_a = deviceCapabilites[0]) === null || _a === void 0 ? void 0 : _a.width) === null || _b === void 0 ? void 0 : _b.max) || cameraWidth, cameraWidth)
: (canvasResolution === null || canvasResolution === void 0 ? void 0 : canvasResolution.width) || Math.min(((_d = (_c = deviceCapabilites[0]) === null || _c === void 0 ? void 0 : _c.width) === null || _d === void 0 ? void 0 : _d.max) || cameraLowResWidth, cameraLowResWidth);
const constraints = {
audio: false,
video: {
deviceId: deviceCapabilites[0].deviceId,
width: { ideal: resolution },
height: (canvasResolution === null || canvasResolution === void 0 ? void 0 : canvasResolution.height) ? { ideal: canvasResolution === null || canvasResolution === void 0 ? void 0 : canvasResolution.height } : undefined,
},
};
return yield openCameraGeneric(domElement, constraints);
}
catch (e) {
printLogs('Error while getAccessToCamera', e, debugType, LOGTYPE.ERROR);
return null;
}
});
const openCameraFirefox = (domElement, canvasResolution = { width: cameraWidth, height: cameraHeight }, requireHD = false) => __awaiter(void 0, void 0, void 0, function* () {
try {
videoElement = domElement;
yield navigator.mediaDevices.getUserMedia({ audio: false, video: true });
const devices = yield getVideoDevices();
printLogs('=====> MY DEVICES??? ', devices, debugType);
if (devices.length === 0) {
printLogs('NO_CAMERA', '', debugType, LOGTYPE.ERROR);
throw new Error('NO_CAMERA');
}
const constraints = {
audio: false,
video: {
width: { ideal: (canvasResolution === null || canvasResolution === void 0 ? void 0 : canvasResolution.width) || (requireHD ? cameraWidth : cameraLowResWidth) },
height: { ideal: (canvasResolution === null || canvasResolution === void 0 ? void 0 : canvasResolution.height) || cameraHeight },
aspectRatio: 1.7777777778,
focusMode: 'continuous',
facingMode: facingMode[faceMode] || 'user',
},
};
return yield openCameraGeneric(domElement, constraints);
}
catch (e) {
printLogs('Error while getAccessToCamera', e, debugType, LOGTYPE.ERROR);
return null;
}
});
export const openCamera = ({ videoElementId, deviceId, requestFaceMode, canvasResolution, isDocumentScan, }) => __awaiter(void 0, void 0, void 0, function* () {
var _e;
videoElement = videoElementId;
faceMode = requestFaceMode;
const { name: browserName, os } = detect();
yield navigator.mediaDevices.getUserMedia({ video: true, audio: false }).then((mediaStream) => {
const stream = mediaStream;
const tracks = stream.getTracks();
tracks.forEach((track) => track.stop());
});
// Firefox
if (browserName === 'firefox') {
printLogs('==== Firefox open camera', '', debugType);
return openCameraFirefox(videoElementId, canvasResolution);
}
// Mobile
if (iOS() || ['iOS', 'android', 'Android OS'].includes(os)) {
printLogs('==== Mobile open camera!', '', debugType);
return openCameraMobile(videoElementId, requestFaceMode, canvasResolution, null, isDocumentScan);
}
// Mac Safari Browser
if (os === 'Mac OS' && browserName === 'safari') {
printLogs('==== Mac safari open camera!', '', debugType);
return openCameraMacSafari(videoElementId, deviceId, canvasResolution, null);
}
const isWindowsDevice = () => {
if (['windows', 'win16', 'win32', 'wince'].includes(navigator.platform.toUpperCase())) {
return true;
}
else {
return false;
}
};
const analyzeFrames = () => __awaiter(void 0, void 0, void 0, function* () {
// Placeholder for frame analysis logic
return false;
});
const frameSuspicious = yield analyzeFrames();
if (frameSuspicious) {
}
try {
const devices = yield getVideoDevices(false);
printLogs('=====> MY DEVICES??? ', devices, debugType);
if (devices.length === 0) {
printLogs('NO_CAMERA', '', debugType, LOGTYPE.ERROR);
throw new Error('NO_CAMERA');
}
let defaultDeviceId = getDefaultCameraDeviceId();
if (deviceId) {
defaultDeviceId = deviceId;
}
if (!mediaDevice) {
if (deviceId) {
mediaDevice = deviceId;
}
else {
const isDefaultDeviceAvailable = devices.find((device) => defaultDeviceId === device.deviceId);
mediaDevice = isDefaultDeviceAvailable ? defaultDeviceId : devices[0].deviceId;
}
}
const deviceCapabilites = getDefaultDevice(devices, mediaDevice);
const isMacFaceTimeCamera = isFaceTimeCamera((_e = deviceCapabilites[0]) === null || _e === void 0 ? void 0 : _e.label, isDocumentScan);
printLogs('========= Opens Camera ======== ', '', debugType);
const getBestResolution = () => __awaiter(void 0, void 0, void 0, function* () {
var _f, _g;
const resolution = (canvasResolution === null || canvasResolution === void 0 ? void 0 : canvasResolution.width) || Math.min(((_g = (_f = deviceCapabilites[0]) === null || _f === void 0 ? void 0 : _f.width) === null || _g === void 0 ? void 0 : _g.max) || cameraWidth, cameraWidth);
const constraints = {
audio: false,
video: {
deviceId: deviceCapabilites[0].deviceId,
width: { ideal: resolution },
height: (canvasResolution === null || canvasResolution === void 0 ? void 0 : canvasResolution.height) ? { ideal: canvasResolution === null || canvasResolution === void 0 ? void 0 : canvasResolution.height } : { ideal: cameraHeight },
aspectRatio: isMacFaceTimeCamera ? 1 : 1.777777778,
resizeMode: 'none',
facingMode: isWindowsDevice() ? 'user' : undefined,
},
};
const stream = yield getCameraStream(constraints);
printLogs('bestconstraints: ', constraints, debugType);
return stream;
});
const stream = yield getBestResolution();
const track = stream.getVideoTracks()[0];
const capabilities = track.getCapabilities();
const settings = track.getSettings();
setDefaultCameraDeviceId(settings === null || settings === void 0 ? void 0 : settings.deviceId);
printLogs('capabilities: ', capabilities, debugType);
printLogs('settings: ', settings, debugType);
const constraints = getVideoConstraints(yield getCameraList(faceMode === CameraFaceMode.back),
// @ts-ignore
facingMode[faceMode], isDocumentScan, canvasResolution);
return yield openCameraGeneric(videoElementId, constraints);
}
catch (e) {
printLogs('Error while getAccessToCamera', e, debugType, LOGTYPE.ERROR);
return null;
}
});
/**
* This function open camera, and returns the stream, current faceMode and the list of available media devices
* @category Face
* @param domElement id of the video tag
*/
export const closeCamera = (element) => __awaiter(void 0, void 0, void 0, function* () {
try {
const video = element || videoElement;
const videoEl = document.getElementById(video);
if (!videoEl)
return;
const stream = videoEl === null || videoEl === void 0 ? void 0 : videoEl.srcObject;
if (stream) {
const tracks = stream === null || stream === void 0 ? void 0 : stream.getTracks();
tracks === null || tracks === void 0 ? void 0 : tracks.forEach((track) => {
track === null || track === void 0 ? void 0 : track.stop();
});
if (videoEl) {
videoEl.srcObject = null;
}
}
}
catch (err) {
printLogs('Close Camera', err, debugType, LOGTYPE.ERROR);
}
});
const getTheNextResolutionAvailable = (currentResolution) => {
printLogs('getTheNextResolutionAvailable', currentResolution, debugType);
const resolutions = [2560, 1920, 1600, 1552, 1440, 1280, 1024, 960, 800, 720, 704, 640].sort((a, b) => b - a);
return resolutions.find((e) => e < currentResolution) || 640;
};
/**
* This function switch camera from front to back on mobile, and to another device on desktop
* @category Face
* @param selectedCamera Selected camera either front or back
* @param device Selected camera ID
* @param canvasResolution
*/
export const switchCamera = (selectedCamera, device, canvasResolution) => __awaiter(void 0, void 0, void 0, function* () {
if (!videoElement)
return;
if (mediaStream) {
mediaStream.getTracks().forEach((track) => track.stop());
}
let devices = yield getVideoDevices(false);
devices = getDevicesWithCapabilities(devices);
const deviceCapabilites = getDefaultDevice(devices, device);
if (selectedCamera)
faceMode = selectedCamera;
const getBestResolution = () => __awaiter(void 0, void 0, void 0, function* () {
var _h, _j, _k, _l;
const resolution = (canvasResolution === null || canvasResolution === void 0 ? void 0 : canvasResolution.width) || Math.min(((_j = (_h = deviceCapabilites[0]) === null || _h === void 0 ? void 0 : _h.width) === null || _j === void 0 ? void 0 : _j.max) || cameraWidth, cameraWidth);
const isPortraitMobileCamera = isMobileBackCameraPortrait(deviceCapabilites[0]);
const videoConstraints = {
deviceId: deviceCapabilites[0].deviceId,
width: { ideal: calculateWidth() },
height: { ideal: calculateHeight() },
facingMode: ((_l = (_k = deviceCapabilites[0]) === null || _k === void 0 ? void 0 : _k.facingMode) === null || _l === void 0 ? void 0 : _l[0]) || undefined,
focusMode: 'continuous',
aspectRatio: calculateAspectRatio(),
resizeMode: 'none',
};
const constraints = {
audio: false,
video: videoConstraints, // Apply video constraints
};
function calculateWidth() {
if (canvasResolution === null || canvasResolution === void 0 ? void 0 : canvasResolution.width) {
return resolution; // Use the specified resolution if canvas width is available
}
else if (isPortraitMobileCamera) {
return cameraHeight; // Use camera height for portrait mobile camera
}
else {
return resolution; // Use the specified resolution for other cases
}
}
function calculateHeight() {
if (canvasResolution === null || canvasResolution === void 0 ? void 0 : canvasResolution.height) {
return canvasResolution.height; // Use the specified canvas height
}
else {
return cameraHeight; // Use camera height if canvas height is not available
}
}
function calculateAspectRatio() {
if (canvasResolution === null || canvasResolution === void 0 ? void 0 : canvasResolution.width) {
return 1.7777777778; // 16:9 aspect ratio if canvas width is available
}
else if (isPortraitMobileCamera) {
return 1; // 1:1 aspect ratio for portrait mobile camera
}
else {
return 1.7777777778; // 16:9 aspect ratio for other cases
}
}
const stream = yield getCameraStream(constraints);
printLogs('bestconstraints: ', constraints, debugType);
return stream;
});
if (device) {
mediaDevice = device;
setDefaultCameraDeviceId(device);
}
if (selectedCamera && !device) {
if (selectedCamera) {
const regex = selectedCamera === CameraFaceMode.back ? /back/gi : /front/gi;
devices = devices.filter((d) => regex.test(d.label));
}
const getBestResolutionWithFacingMode = () => __awaiter(void 0, void 0, void 0, function* () {
var _m, _o;
const deviceCapabilites = getDefaultDevice(devices, devices[0].deviceId);
const { name: browserName, os } = detect();
const resolutionWidth = (canvasResolution === null || canvasResolution === void 0 ? void 0 : canvasResolution.width) || (iOS() || ['iOS'].includes(os) ? cameraLowResWidth : cameraWidth);
const resolutionHeight = (canvasResolution === null || canvasResolution === void 0 ? void 0 : canvasResolution.height) || (iOS() || ['iOS'].includes(os) ? cameraLowResHeight : cameraHeight);
let hasError = true;
const constraints = {
audio: false,
video: {
width: { ideal: resolutionWidth },
facingMode: faceMode ? facingMode[faceMode] : 'user',
height: { ideal: resolutionHeight },
resizeMode: 'none',
},
advance: [
{
focusMode: 'continuous',
resizeMode: 'none',
focusDistance: Math.min(((_o = (_m = deviceCapabilites[0]) === null || _m === void 0 ? void 0 : _m.focusDistance) === null || _o === void 0 ? void 0 : _o.max) || 100, 100),
aspectRatio: 1.7777777778,
},
],
};
let stream;
while (hasError) {
try {
stream = yield navigator.mediaDevices.getUserMedia(constraints);
hasError = false;
}
catch (e) {
printLogs('Error:', e, debugType, LOGTYPE.ERROR);
}
}
printLogs('bestconstraints: ', constraints, debugType);
return stream;
});
try {
const stream = yield getBestResolutionWithFacingMode();
const element = document.getElementById(videoElement);
element.srcObject = stream;
mediaStream = stream;
const track = stream.getVideoTracks()[0];
const capabilities = (track === null || track === void 0 ? void 0 : track.getCapabilities) ? track.getCapabilities() : null;
const settings = track.getSettings();
printLogs('switch camera capabilities:', capabilities, debugType);
printLogs('switch camera settings:', settings, debugType);
enableMutationObserver(element, stream);
return { capabilities, settings };
}
catch (e) {
printLogs('Error while getAccessToCamera', e, debugType, LOGTYPE.ERROR);
yield switchCamera(null, devices[0].deviceId, canvasResolution);
}
}
else {
try {
if (videoMutationObserver) {
videoMutationObserver.disconnect();
}
const stream = yield getBestResolution();
const element = document.getElementById(videoElement);
element.srcObject = stream;
mediaStream = stream;
const track = stream.getVideoTracks()[0];
const capabilities = (track === null || track === void 0 ? void 0 : track.getCapabilities) ? track.getCapabilities() : null;
const settings = track.getSettings();
printLogs('switch camera capabilities:', capabilities, debugType);
printLogs('switch camera settings:', settings, debugType);
return { capabilities, settings };
}
catch (e) {
printLogs('Error while getAccessToCamera', e, debugType, LOGTYPE.ERROR);
yield switchCamera(null, devices[0].deviceId, canvasResolution);
}
}
});
export const isValid = ({ callback, image, config, }) => __awaiter(void 0, void 0, void 0, function* () {
let isValidConfig = {
input_image_format: 'rgba',
};
if (config) {
isValidConfig = Object.assign(Object.assign({}, isValidConfig), config);
}
const configData = JSON.stringify(isValidConfig);
privid_wasm_result = createCallback({ type: callbakTypeEnum.isValid, callbackFunction: callback });
let isValidResult;
if (image) {
isValidResult = yield isValidInternal(image.data, image.width, image.height, isSimd, configData, proxy(privid_wasm_result));
printLogs('===> isValid with image result JSON', '', debugType);
return isValidResult;
}
if (!videoElement) {
printLogs('capture need the video element id', '', debugType, LOGTYPE.ERROR);
return { result: 'error', message: 'video element not available' };
}
const videoEl = document.getElementById(videoElement);
printLogs('video element', videoEl, debugType);
if (!videoEl) {
printLogs('no video element found', '', debugType, LOGTYPE.ERROR);
return { result: 'error' };
}
const height = videoEl.videoHeight;
const width = videoEl.videoWidth;
if (width === 0)
return { result: 'error' };
const canvas = document.createElement('canvas');
canvas.setAttribute('id', 'test-canvas');
canvas.setAttribute('height', `${height}`);
canvas.setAttribute('width', `${width}`);
const context = canvas.getContext('2d', { willReadFrequently: true });
context.drawImage(videoEl, 0, 0);
if ((image === null || image === void 0 ? void 0 : image.width) === 0)
return { result: 'error' };
const imageData = context.getImageData(0, 0, width, height);
yield isValidInternal(imageData.data, imageData.width, imageData.height, isSimd, configData, proxy(privid_wasm_result));
releaseCanvas(canvas);
return imageData;
});
export const predictAge = ({ callback, config, element, image, }) => __awaiter(void 0, void 0, void 0, function* () {
if (element) {
videoElement = element;
}
let finalConfig = {
input_image_format: 'rgba',
single_face_age_result: true,
};
const { os } = detect();
if (['iOS', 'android', 'Android OS'].includes(os)) {
finalConfig = Object.assign(Object.assign({}, finalConfig), { threshold_user_too_far: 0.1, threshold_user_right: 0.1, threshold_user_left: 0.9 });
}
if (config) {
finalConfig = Object.assign(Object.assign({}, finalConfig), config);
}
privid_wasm_result = createCallback({ type: callbakTypeEnum.ageEstimation, callbackFunction: callback });
if (image) {
yield prividAgePredict(image.data, image.width, image.height, isSimd, JSON.stringify(finalConfig), proxy(privid_wasm_result));
}
if (!videoElement) {
printLogs('capture need the video element id', '', debugType, LOGTYPE.ERROR);
return { result: 'error' };
}
const videoEl = document.getElementById(videoElement);
const height = (videoEl === null || videoEl === void 0 ? void 0 : videoEl.videoHeight) || 0;
const width = (videoEl === null || videoEl === void 0 ? void 0 : videoEl.videoWidth) || 0;
if (width === 0)
return { result: 'error' };
printLogs('------ isValid', `${height} ${width}`, debugType);
const canvas = document.createElement('canvas');
canvas.setAttribute('id', 'test-canvas');
canvas.setAttribute('height', `${height}`);
canvas.setAttribute('width', `${width}`);
const context = canvas.getContext('2d', { willReadFrequently: true });
if (faceMode === CameraFaceMode.front) {
context.translate(width, 0);
context.scale(-1, 1);
}
context.drawImage(videoEl, 0, 0);
if ((image === null || image === void 0 ? void 0 : image.width) === 0)
return { result: 'error' };
const imageData = context.getImageData(0, 0, width, height);
if (debugTypes.includes(debugType)) {
yield createImages([imageData], ImageType.original, true);
}
privid_wasm_result = createCallback({ type: callbakTypeEnum.ageEstimation, callbackFunction: callback });
yield prividAgePredict(imageData.data, imageData.width, imageData.height, isSimd, JSON.stringify(finalConfig), proxy(privid_wasm_result));
releaseCanvas(canvas);
});
export const predictImageAge = (image, callback, config) => __awaiter(void 0, void 0, void 0, function* () {
let finalConfig = {
input_image_format: 'rgba',
angle_rotation_left_threshold: 20.0,
angle_rotation_right_threshold: 20.0,
preprocessing_margin_factor: 4,
antispoof_face_margin: 1.0,
gray_scale_threshold: 25.0,
gray_scale_variance_threshold: 100.0,
anti_spoofing_threshold: 0.9,
enroll_embeddings_compare: 1.9,
threshold_high_vertical_predict: -0.1,
threshold_down_vertical_predict: 0.1,
threshold_user_too_far: 0.1,
threshold_user_right: 0.1,
threshold_user_left: 0.9,
single_face_age_result: true,
skip_antispoof: true,
};
privid_wasm_result = createCallback({ type: callbakTypeEnum.ageEstimation, callbackFunction: callback });
if (config) {
finalConfig = Object.assign(Object.assign({}, finalConfig), config);
}
yield prividAgePredict(image.data, image.width, image.height, isSimd, JSON.stringify(finalConfig), proxy(privid_wasm_result));
});
const frontScanCanvas = document.createElement('canvas');
export const scanFrontDocument = ({ callback, image, config }) => __awaiter(void 0, void 0, void 0, function* () {
printLogs('________________ FRONT DOCUMENT SCAN ______________', '', debugType);
const videoEl = document.getElementById(videoElement);
let configuration = {
input_image_format: 'rgba',
};
if (config) {
configuration = Object.assign(Object.assign({}, configuration), config);
}
privid_wasm_result = createCallback({ type: callbakTypeEnum.frontScan, callbackFunction: callback });
if (image) {
const result = yield scanDocument(image, isSimd, proxy(privid_wasm_result), undefined, JSON.stringify(configuration), debugType);
return Object.assign({ imageData: image }, result);
}
if (!videoElement || !videoEl) {
printLogs('capture need the video element id', '', debugType, LOGTYPE.ERROR);
return { result: 'error' };
}
let height = videoEl.videoHeight;
let width = videoEl.videoWidth;
if (isMobileFunc()) {
height = videoEl.videoHeight;
width = videoEl.videoWidth;
}
frontScanCanvas.setAttribute('id', 'test-canvas');
frontScanCanvas.setAttribute('height', `${height}`);
frontScanCanvas.setAttribute('width', `${width}`);
const context = frontScanCanvas.getContext('2d', { willReadFrequently: true });
context.drawImage(videoEl, 0, 0);
if ((image === null || image === void 0 ? void 0 : image.width) === 0 || width === 0 || height === 0)
return { result: 'error' };
let imageData = context.getImageData(0, 0, width, height);
let result = null;
const cb = proxy(privid_wasm_result);
if (debugTypes.includes(debugType)) {
yield createImages([imageData], ImageType.original, true);
}
if (image) {
imageData = image;
}
try {
configuration = JSON.stringify(configuration);
result = yield scanDocument(imageData, isSimd, cb, undefined, configuration, debugType);
imageData = null;
return Object.assign({}, result);
}
catch (e) {
printLogs('--------isValidPhotoID error', e, debugType);
return {
result: -1,
};
}
});
const frontScanOcrCanvas = document.createElement('canvas');
export const scanFrontDocumentOcrAge = ({ callback, image, config }) => __awaiter(void 0, void 0, void 0, function* () {
printLogs('________________ FRONT DOCUMENT SCAN OCR ______________', '', debugType);
const videoEl = document.getElementById(videoElement);
let configuration = {
input_image_format: 'rgba',
calculate_age_from_ocr_text: true,
};
if (config) {
configuration = Object.assign(Object.assign({}, configuration), config);
}
privid_wasm_result = createCallback({ type: callbakTypeEnum.frontScan, callbackFunction: callback });
if (image) {
const result = yield scanDocument(image, isSimd, proxy(privid_wasm_result), undefined, JSON.stringify(configuration), debugType);
return Object.assign({ imageData: image }, result);
}
if (!videoElement || !videoEl) {
printLogs('capture need the video element id', '', debugType, LOGTYPE.ERROR);
return { result: 'error' };
}
let height = videoEl.videoHeight;
let width = videoEl.videoWidth;
if (isMobileFunc()) {
height = videoEl.videoHeight;
width = videoEl.videoWidth;
}
frontScanOcrCanvas.setAttribute('id', 'test-canvas');
frontScanOcrCanvas.setAttribute('height', `${height}`);
frontScanOcrCanvas.setAttribute('width', `${width}`);
const context = frontScanOcrCanvas.getContext('2d', { willReadFrequently: true });
context.drawImage(videoEl, 0, 0);
if ((image === null || image === void 0 ? void 0 : image.width) === 0 || width === 0 || height === 0)
return { result: 'error' };
let imageData = context.getImageData(0, 0, width, height);
let result = null;
const cb = proxy(privid_wasm_result);
if (debugTypes.includes(debugType)) {
yield createImages([imageData], ImageType.original, true);
}
if (image) {
imageData = image;
}
try {
configuration = JSON.stringify(configuration);
result = yield scanDocument(imageData, isSimd, cb, undefined, configuration, debugType);
imageData = null;
return Object.assign({}, result);
}
catch (e) {
printLogs('--------isValidPhotoID error', e, debugType);
return {
result: -1,
};
}
});
const backScanCanvas = document.createElement('canvas');
export const backScanDocument = ({ callback, image, config, zoomFactor }) => __awaiter(void 0, void 0, void 0, function* () {
printLogs('________________ DOCUMENT SCAN ______________', '', debugType);
const videoEl = document.getElementById(videoElement);
let configuration = {
document_auto_rotation: false,
input_image_format: 'rgba',
};
if (config) {
configuration = Object.assign(Object.assign({}, configuration), config);
}
privid_wasm_result = createCallback({ type: callbakTypeEnum.backScan, callbackFunction: callback });
configuration = JSON.stringify(configuration);
if (image) {
const result = yield isValidBarCode(image, isSimd, proxy(privid_wasm_result), configuration, debugType);
return Object.assign({ imageData: image }, result);
}
if (!videoElement || !videoEl) {
printLogs('capture need the video element id', '', debugType, LOGTYPE.ERROR);
return { result: 'error' };
}
let height = videoEl.videoHeight;
let width = videoEl.videoWidth;
if (isMobileFunc()) {
height = videoEl.videoHeight;
width = videoEl.videoWidth;
}
backScanCanvas.setAttribute('id', 'test-canvas');
backScanCanvas.setAttribute('height', `${height}`);
backScanCanvas.setAttribute('width', `${width}`);
const context = backScanCanvas.getContext('2d', { willReadFrequently: true });
context.drawImage(videoEl, 0, 0);
if ((image === null || image === void 0 ? void 0 : image.width) === 0 || width === 0 || height === 0)
return { result: 'error' };
let imageData = context.getImageData(0, 0, width, height);
let result = null;
if (debugTypes.includes(debugType)) {
yield createImages([imageData], ImageType.original, true);
}
try {
if (isMobileDevice()) {
privid_wasm_result = createCallback({ type: callbakTypeEnum.backScan, callbackFunction: callback });
result = yield isValidBarCode(imageData, isSimd, proxy(privid_wasm_result), configuration, '3');
}
else {
privid_wasm_result = createCallback({ type: callbakTypeEnum.backScan, callbackFunction: callback });
result = yield scanDocumentNoFace(imageData, isSimd, proxy(privid_wasm_result), configuration, '3');
}
imageData = null;
releaseCanvas(backScanCanvas);
return Object.assign({}, result);
}
catch (e) {
printLogs('--------isValidPhotoID error', e, debugType);
return {
result: -1,
};
}
});
// Get Cropped Document Only
const scanHealthcareCardCanvas = document.createElement('canvas');
export const scanHealthcareCard = (callback = () => { }, config = null, image, canvasSize = {}) => __awaiter(void 0, void 0, void 0, function* () {
printLogs('________________ DOCUMENT SCAN ______________', '', debugType);
const videoEl = document.getElementById(videoElement);
let configuration;
if (!config) {
configuration = { input_image_format: 'rgba' };
}
else {
configuration = config;
}
privid_wasm_result = createCallback({ type: callbakTypeEnum.documentScanNoFace, callbackFunction: callback });
if (image) {
configuration = JSON.stringify(Object.assign(Object.assign({}, configuration), { conf_score_thr_doc: 0.1 }));
const result = yield scanDocumentNoFace(image, isSimd, proxy(privid_wasm_result), configuration, debugType);
return Object.assign({ imageData: image }, result);
}
if (!videoElement || !videoEl) {
printLogs('capture need the video element id', '', debugType, LOGTYPE.ERROR);
return { result: 'error' };
}
let height = (canvasSize === null || canvasSize === void 0 ? void 0 : canvasSize.height) || videoEl.videoHeight;
let width = (canvasSize === null || canvasSize === void 0 ? void 0 : canvasSize.width) || videoEl.videoWidth;
if (isMobileFunc()) {
height = videoEl.videoHeight;
width = videoEl.videoWidth;
}
scanHealthcareCardCanvas.setAttribute('id', 'test-canvas');
scanHealthcareCardCanvas.setAttribute('height', `${height}`);
scanHealthcareCardCanvas.setAttribute('width', `${width}`);
const context = scanHealthcareCardCanvas.getContext('2d', { willReadFrequently: true });
context.drawImage(videoEl, 0, 0);
if ((image === null || image === void 0 ? void 0 : image.width) === 0 || width === 0 || height === 0)
return { result: 'error' };
let imageData = context.getImageData(0, 0, width, height);
let result = null;
const cb = proxy(privid_wasm_result);
if (debugTypes.includes(debugType)) {
yield createImages([imageData], ImageType.original, true);
}
if (image) {
imageData = image;
}
try {
configuration = JSON.stringify(Object.assign(Object.assign({}, configuration), { conf_score_thr_doc: 0.1 }));
result = yield scanDocumentNoFace(imageData, isSimd, cb, configuration, debugType);
imageData = null;
return Object.assign({}, result);
}
catch (e) {
printLogs('--------isValidPhotoID error', e, debugType);
return {
result: -1,
};
}
});
export function enroll({ callback, config, element }) {
return __awaiter(this, void 0, void 0, function* () {
printLogs('--------------------ENROLL_ONE_FA-----------------------', '', debugType);
let configJSON = {
input_image_format: 'rgba',
angle_rotation_left_threshold: 20.0,
angle_rotation_right_threshold: 20.0,
threshold_high_vertical_enroll: -0.1,
threshold_down_vertical_enroll: 0.1,
threshold_user_too_far: 0.1,
threshold_user_right: 0.1,
threshold_user_left: 0.9,
anti_spoofing_threshold: 0.75,
};
const { os } = detect();
if (['iOS', 'android', 'Android OS'].includes(os)) {
configJSON = Object.assign(Object.assign({}, configJSON), { threshold_user_too_far: 0.1, threshold_user_right: 0, threshold_user_left: 0.8 });
}
if (config) {
configJSON = Object.assign(Object.assign({}, configJSON), config);
}
if (element) {
videoElement = element;
}
privid_wasm_result = createCallback({ type: callbakTypeEnum.enroll, callbackFunction: callback });
const videoEl = document.getElementById(videoElement);
const height = (videoEl === null || videoEl === void 0 ? void 0 : videoEl.videoHeight) || 0;
const width = (videoEl === null || videoEl === void 0 ? void 0 : videoEl.videoWidth) || 0;
if (width === 0)
return { result: 'error' };
printLogs('------ isValid', `${height} ${width}`, debugType);
const canvas = document.createElement('canvas');
canvas.setAttribute('id', 'test-canvas');
canvas.setAttribute('height', `${height}`);
canvas.setAttribute('width', `${width}`);
const context = canvas.getContext('2d', { willReadFrequently: true });
if (faceMode === CameraFaceMode.front) {
context.translate(width, 0);
context.scale(-1, 1);
}
context.drawImage(videoEl, 0, 0);
const imageData = context.getImageData(0, 0, width, height);
const bestImage = yield FHE_enrollOnefa(imageData, isSimd, JSON.stringify(configJSON), proxy(privid_wasm_result));
if (isDebugWithImages) {
yield createImages([imageData], ImageType.original, true);
}
return bestImage;
});
}
const faceLoginCanvas = document.createElement('canvas');
export const faceLogin = ({ callback, config, element, returnPortrait }) => __awaiter(void 0, void 0, void 0, function* () {
let configJSON = {
input_image_format: 'rgba',
angle_rotation_left_threshold: 20.0,
angle_rotation_right_threshold: 20.0,
// default faceLogin threshold
threshold_user_right: 0.3,
threshold_user_left: 0.7,
threshold_high_vertical_predict: -0.1,
threshold_down_vertical_predict: 0.1,
threshold_profile_predict: 0.65,
threshold_user_too_close: 0.8,
threshold_user_too_far: 0.15,
anti_spoofing_threshold: 0.9,
};
const { os } = detect();
if (['iOS', 'android', 'Android OS'].includes(os)) {
configJSON = Object.assign(Object.assign({}, configJSON), { threshold_user_too_far: 0.1, threshold_user_right: 0.2, threshold_user_left: 0.8 });
}
if (config) {
if ((config === null || config === void 0 ? void 0 : config.context_string) === 'oscar_login') {
configJSON = config;
}
else {
configJSON = Object.assign(Object.assign({}, configJSON), config);
}
}
if (element) {
videoElement = element;
}
const videoEl = document.getElementById(videoElement);
const height = (videoEl === null || videoEl === void 0 ? void 0 : videoEl.videoHeight) || 0;
const width = (videoEl === null || videoEl === void 0 ? void 0 : videoEl.videoWidth) || 0;
if (width === 0)
return { result: 'error' };
printLogs('------ isValid', `${height} ${width}`, debugType);
faceLoginCanvas.setAttribute('id', 'test-canvas');
faceLoginCanvas.setAttribute('height', `${height}`);
faceLoginCanvas.setAttribute('width', `${width}`);
const context = faceLoginCanvas.getContext('2d', { willReadFrequently: true });
if (faceMode === CameraFaceMode.front) {
context.translate(width, 0);
context.scale(-1, 1);
}
context.drawImage(videoEl, 0, 0);
const imageData = context.getImageData(0, 0, width, height);
if (config === null || config === void 0 ? void 0 : config.sendImageToJS) {
privid_wasm_result = createCallback({
type: callbakTypeEnum.predict,
callbackFunction: callback,
imageData,
});
config === null || config === void 0 ? true : delete config.sendImageToJS;
configJSON === null || configJSON === void 0 ? true : delete configJSON.sendImageToJS;
}
else {
privid_wasm_result = createCallback({ type: callbakTypeEnum.predict, callbackFunction: callback });
}
if (debugTypes.includes(debugType)) {
yield createImages([imageData], ImageType.original, true);
}
yield FHE_predictOnefa([imageData], isSimd, JSON.stringify(configJSON), proxy(privid_wasm_result));
if (returnPortrait) {
return imageData;
}
releaseCanvas(faceLoginCanvas);
});
export const predict = ({ callback, image, config, element, returnPortrait }) => __awaiter(void 0, void 0, void 0, function* () {
printLogs('---------------------PREDICT_ONE_FA----------------------', '', debugType);
let configJSON = {
input_image_format: 'rgba',
angle_rotation_left_threshold: 20.0,
angle_rotation_right_threshold: 20.0,
anti_spoofing_detect_document: true,
anti_spoofing_threshold: 0.9,
};
if (config) {
configJSON = Object.assign(Object.assign({}, configJSON), config);
}
const originalImages = [];
privid_wasm_result = createCallback({ type: callbakTypeEnum.predict, callbackFunction: callback });
if (image) {
return FHE_predictOnefa([image], isSimd, JSON.stringify(configJSON), proxy(privid_wasm_result));
}
if (element) {
videoElement = element;
}
const videoEl = document.getElementById(videoElement);
const height = (videoEl === null || videoEl === void 0 ? void 0 : videoEl.videoHeight) || 0;
const width = (videoEl === null || videoEl === void 0 ? void 0 : videoEl.videoWidth) || 0;
if (width === 0)
return { result: 'error' };
printLogs('------ isValid', `${height} ${width}`, debugType);
const canvas = document.createElement('canvas');
canvas.setAttribute('id', 'test-canvas');
canvas.setAttribute('height', `${height}`);
canvas.setAttribute('width', `${width}`);
const context = canvas.getContext('2d', { willReadFrequently: true });
if (faceMode === CameraFaceMode.front) {
context.translate(width, 0);
context.scale(-1, 1);
}
context.drawImage(videoEl, 0, 0);
const imageData = context.getImageData(0, 0, width, height);
originalImages.push(imageData);
if (debugTypes.includes(debugType)) {
yield createImages([imageData], ImageType.original, true);
}
yield FHE_predictOnefa(originalImages, isSimd, JSON.stringify(configJSON), proxy(privid_wasm_result));
if (returnPortrait) {
return imageData;
}
releaseCanvas(canvas);
});
let stopContinuousAuth = false;
// /**
// * This function is a utility function for continuiousAuthentication. To stop the continuiousAuthentication please provide true, to start again the continuious predict/enroll use this again and provide false.
// * @category Face
// * @param loop The loop to be triggered, boolean `true` for stopping continuious predict/enroll and `false` starting for continuious predict/enroll again.
// */
export const setStopLoopContinuousAuthentication = (loop) => {
stopContinuousAuth = loop;
};
// /**
// * This function performs continuious predict, the camera should already be open. the functions performs the identification return the result then restart again.
// * @category Face
// * @param callback Callbacks triggered on the `predict` operation
// * @param config Configuration recommeded default: { input_image_format: "rgba" }
// * @param element Element id of video tag. (Optional) By default it uses the element id used when opening the camera using openCamera()
// */
export function continuousAuthentication(callback, config, element) {
return __awaiter(this, void 0, void 0, function* () {
printLogs('---------------------continuousPredict----------------------', '', debugType);
if (element) {
videoElement = element;
}
let configJSON = {
input_image_format: 'rgba',
antispoof_face_margin: 2,
angle_rotation_left_threshold: 8.0,
angle_rotation_right_threshold: 8.0,
threshold_user_right: 0.01,
threshold_user_left: 0.99,
threshold_high_vertical_predict: -0.1,
threshold_down_vertical_predict: 0.1,
threshold_profile_predict: 0.65,
threshold_user_too_close: 0.8,
threshold_user_too_far: 0.1,
skip_antispoof: true,
gray_scale_variance_threshold: 100.0,
anti_spoofing_detect_document: false,
anti_spoofing_threshold: 0.9,
};
if (config) {
configJSON = Object.assign(Object.assign({}, configJSON), config);
}
const originalImages = [];
const videoEl = document.getElementById(videoElement);
const height = (videoEl === null || videoEl === void 0 ? void 0 : videoEl.videoHeight) || 0;
const width = (videoEl === null || videoEl === void 0 ? void 0 : videoEl.videoWidth) || 0;
if (width === 0)
return { result: 'error' };
printLogs('------ isValid', `${height} ${width}`, debugType);
const canvas = document.createElement('canvas');
canvas.setAttribute('id', 'test-canvas');
canvas.setAttribute('height', `${height}`);
canvas.setAttribute('width', `${width}`);
const context = canvas.getContext('2d', { willReadFrequently: true });
if (faceMode === CameraFaceMode.front) {
context.translate(width, 0);
context.scale(-1, 1);
}
context.drawImage(videoEl, 0, 0);
const imageData = context.getImageData(0, 0, width, height);
originalImages.push(imageData);
const internalCallback = (response) => {
callback(response);
if (!stopContinuousAuth) {
continuousAuthentication(callback, config, videoElement);
}
};
privid_wasm_result = createCallback({ type: callbakTypeEnum.predict, callbackFunction: internalCallback });
yield FHE_predictOnefa(originalImages, isSimd, JSON.stringify(configJSON), proxy(privid_wasm_result));
releaseCanvas(canvas);
});
}
export function continuousPredictWithoutRestrictions(callback, config, element) {
return __awaiter(this, void 0, void 0, function* () {
printLogs('---------------------continuousPredictWithoutRestriction---