UNPKG

@microblink/blinkid-in-browser-sdk

Version:

A simple ID scanning library for WebAssembly-enabled browsers.

1,107 lines (1,091 loc) 162 kB
/*! **************************************************************************** Copyright (c) Microblink. All rights reserved. THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, MERCHANTABLITY OR NON-INFRINGEMENT. ***************************************************************************** */ 'use strict'; Object.defineProperty(exports, '__esModule', { value: true }); /*! ***************************************************************************** Copyright (c) Microsoft Corporation. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ***************************************************************************** */ function __awaiter(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); } /** * Copyright (c) Microblink Ltd. All rights reserved. */ let nextMessageID = 0; function getNextMessageID() { const msgId = nextMessageID; nextMessageID = nextMessageID + 1; return msgId; } class BaseRequestMessage { constructor(action) { this.action = action; this.messageID = getNextMessageID(); } } class InitMessage extends BaseRequestMessage { constructor(wasmLoadSettings, userId) { super(InitMessage.action); this.wasmModuleName = wasmLoadSettings.wasmModuleName; this.licenseKey = wasmLoadSettings.licenseKey; this.userId = userId; this.registerLoadCallback = wasmLoadSettings.loadProgressCallback !== null; this.allowHelloMessage = wasmLoadSettings.allowHelloMessage; this.engineLocation = wasmLoadSettings.engineLocation; } } InitMessage.action = "init"; var ParameterType; (function (ParameterType) { ParameterType[ParameterType["Any"] = 0] = "Any"; ParameterType[ParameterType["Recognizer"] = 1] = "Recognizer"; ParameterType[ParameterType["RecognizerSettings"] = 2] = "RecognizerSettings"; ParameterType[ParameterType["Callback"] = 3] = "Callback"; })(ParameterType || (ParameterType = {})); class CreateNewRecognizer extends BaseRequestMessage { constructor(className, params) { super(CreateNewRecognizer.action); this.className = className; this.params = params; } } CreateNewRecognizer.action = "createNewNativeObject"; class CreateRecognizerRunner extends BaseRequestMessage { constructor(recognizerHandles, allowMultipleResults, registeredMetadataCallbacks) { super(CreateRecognizerRunner.action); this.recognizerHandles = recognizerHandles; this.allowMultipleResults = allowMultipleResults; this.registeredMetadataCallbacks = registeredMetadataCallbacks; } } CreateRecognizerRunner.action = "createRecognizerRunner"; class ReconfigureRecognizerRunner extends BaseRequestMessage { constructor(recognizerHandles, allowMultipleResults) { super(ReconfigureRecognizerRunner.action); this.recognizerHandles = recognizerHandles; this.allowMultipleResults = allowMultipleResults; } } ReconfigureRecognizerRunner.action = "reconfigureRecognizerRunner"; class DeleteRecognizerRunner extends BaseRequestMessage { constructor() { super(DeleteRecognizerRunner.action); } } DeleteRecognizerRunner.action = "deleteRecognizerRunner"; class InvokeObjectMethod extends BaseRequestMessage { constructor(objectHandle, methodName, params) { super(InvokeObjectMethod.action); this.objectHandle = objectHandle; this.methodName = methodName; this.params = params; } } InvokeObjectMethod.action = "invokeObject"; class ProcessImage extends BaseRequestMessage { constructor(image) { super(ProcessImage.action); this.frame = image; } getTransferrables() { return [this.frame.imageData.data.buffer]; } } ProcessImage.action = "processImage"; class ResetRecognizers extends BaseRequestMessage { constructor(hardReset) { super(ResetRecognizers.action); this.hardReset = hardReset; } } ResetRecognizers.action = "resetRecognizers"; class RegisteredMetadataCallbacks { constructor() { this.onDebugText = false; this.onDetectionFailed = false; this.onQuadDetection = false; this.onPointsDetection = false; this.onFirstSideResult = false; this.onGlare = false; } } class RegisterMetadataCallbacks extends BaseRequestMessage { constructor(registeredMetadataCallbacks) { super(RegisterMetadataCallbacks.action); this.registeredMetadataCallbacks = registeredMetadataCallbacks; } } RegisterMetadataCallbacks.action = "registerMetadataCallbacks"; class SetDetectionOnly extends BaseRequestMessage { constructor(detectionOnlyMode) { super(SetDetectionOnly.action); this.detectionOnlyMode = detectionOnlyMode; } } SetDetectionOnly.action = "setDetectionOnly"; class SetClearTimeoutCallback extends BaseRequestMessage { constructor(callbackNonEmpty) { super(SetClearTimeoutCallback.action); this.callbackNonEmpty = callbackNonEmpty; } } SetClearTimeoutCallback.action = "setClearTimeoutCallback"; class SetCameraPreviewMirrored extends BaseRequestMessage { constructor(cameraPreviewMirrored) { super(SetCameraPreviewMirrored.action); this.cameraPreviewMirrored = cameraPreviewMirrored; } } SetCameraPreviewMirrored.action = "setCameraPreviewMirrored"; // ===================================== / // Metadata callback messages // ===================================== / var MetadataCallback; (function (MetadataCallback) { MetadataCallback[MetadataCallback["onDebugText"] = 0] = "onDebugText"; MetadataCallback[MetadataCallback["onDetectionFailed"] = 1] = "onDetectionFailed"; MetadataCallback[MetadataCallback["onQuadDetection"] = 2] = "onQuadDetection"; MetadataCallback[MetadataCallback["onPointsDetection"] = 3] = "onPointsDetection"; MetadataCallback[MetadataCallback["onFirstSideResult"] = 4] = "onFirstSideResult"; MetadataCallback[MetadataCallback["clearTimeoutCallback"] = 5] = "clearTimeoutCallback"; MetadataCallback[MetadataCallback["onGlare"] = 6] = "onGlare"; MetadataCallback[MetadataCallback["recognizerCallback"] = 7] = "recognizerCallback"; })(MetadataCallback || (MetadataCallback = {})); /* eslint-enable @typescript-eslint/no-explicit-any */ /** * Copyright (c) Microblink Ltd. All rights reserved. */ function defaultEventHandler(resolve, reject) { return (msg) => { const resultMsg = msg; if (resultMsg.success) { resolve(); } else { reject(resultMsg.error); } }; } function defaultResultEventHandler(successResolver, reject) { return (msg) => { const resultMsg = msg; if (resultMsg.success) { successResolver(msg); } else { reject(resultMsg.error); } }; } /* eslint-disable @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-assignment */ function wrapParameters(params) { // convert params const wrappedPrameters = []; for (let param of params) { let paramType = ParameterType.Any; if (param instanceof RemoteRecognizer) { paramType = ParameterType.Recognizer; param = param.getRemoteObjectHandle(); } wrappedPrameters.push({ parameter: param, type: paramType }); } return wrappedPrameters; } /* eslint-enable @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-assignment */ class RemoteRecognizer { /* eslint-enable lines-between-class-members */ constructor(wasmWorker, recognizerName, remoteObjHandle) { this.wasmSDKWorker = wasmWorker; this.objectHandle = remoteObjHandle; this.recognizerName = recognizerName; this.callbacks = new Map(); } /* eslint-enable @typescript-eslint/ban-types */ getRemoteObjectHandle() { return this.objectHandle; } currentSettings() { return new Promise((resolve, reject) => { if (this.objectHandle < 0) { reject("Invalid object handle: " + this.objectHandle.toString()); return; } const msg = new InvokeObjectMethod(this.objectHandle, "currentSettings", []); const handler = defaultResultEventHandler((msg) => { resolve(msg.result); }, reject); this.wasmSDKWorker.postMessage(msg, handler); }); } clearAllCallbacks() { this.callbacks.clear(); this.wasmSDKWorker.unregisterRecognizerCallbacks(this.objectHandle); } /* eslint-disable @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-return */ // convert each function member into wrapped parameter, containing address where callback needs to be delivered removeFunctions(settings) { // clear any existing callbacks this.clearAllCallbacks(); const keys = Object.keys(settings); let needsRegistering = false; for (const key of keys) { const data = settings[key]; if (typeof data === "function") { this.callbacks.set(key, data); const wrappedFunction = { parameter: { recognizerHandle: this.objectHandle, callbackName: key }, type: ParameterType.Callback }; settings[key] = wrappedFunction; needsRegistering = true; } } if (needsRegistering) { this.wasmSDKWorker.registerRecognizerCallbacks(this.objectHandle, this); } return settings; } /* eslint-enable @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-return */ updateSettings(newSettings) { return new Promise((resolve, reject) => { if (this.objectHandle < 0) { reject("Invalid object handle: " + this.objectHandle.toString()); return; } /* eslint-disable @typescript-eslint/no-unsafe-assignment */ const msg = new InvokeObjectMethod(this.objectHandle, "updateSettings", [ { parameter: this.removeFunctions(newSettings), type: ParameterType.RecognizerSettings } ]); /* eslint-enable @typescript-eslint/no-unsafe-assignment */ const handler = defaultEventHandler(resolve, reject); this.wasmSDKWorker.postMessage(msg, handler); }); } /* eslint-disable @typescript-eslint/no-explicit-any */ invokeCallback(callbackName, args) { const callback = this.callbacks.get(callbackName); if (callback !== undefined) { callback(...args); } else { console.warn("Cannot find callback", callbackName); } } /* eslint-enable @typescript-eslint/no-explicit-any */ getResult() { return new Promise((resolve, reject) => { if (this.objectHandle < 0) { reject("Invalid object handle: " + this.objectHandle.toString()); return; } const msg = new InvokeObjectMethod(this.objectHandle, "getResult", []); const handler = defaultResultEventHandler((msg) => { resolve(msg.result); }, reject); this.wasmSDKWorker.postMessage(msg, handler); }); } delete() { return new Promise((resolve, reject) => { if (this.objectHandle < 0) { reject("Invalid object handle: " + this.objectHandle.toString()); return; } this.clearAllCallbacks(); const msg = new InvokeObjectMethod(this.objectHandle, "delete", []); const handler = defaultEventHandler(() => { this.objectHandle = -1; resolve(); }, reject); this.wasmSDKWorker.postMessage(msg, handler); }); } } function createRegisteredCallbacks(metadataCallbacks) { const msg = new RegisteredMetadataCallbacks(); // https://stackoverflow.com/a/20093686/213057 msg.onDebugText = !!metadataCallbacks.onDebugText; msg.onDetectionFailed = !!metadataCallbacks.onDetectionFailed; msg.onPointsDetection = !!metadataCallbacks.onPointsDetection; msg.onQuadDetection = !!metadataCallbacks.onQuadDetection; msg.onFirstSideResult = !!metadataCallbacks.onFirstSideResult; msg.onGlare = !!metadataCallbacks.onGlare; return msg; } class RemoteRecognizerRunner { constructor(wasmWorker) { this.deleted = false; this.wasmSDKWorker = wasmWorker; } processImage(image) { return new Promise((resolve, reject) => { if (this.deleted) { reject("Recognizer runner is deleted. It cannot be used anymore!"); return; } const msg = new ProcessImage(image); const handler = defaultResultEventHandler((response) => { const state = response.recognitionState; resolve(state); }, reject); this.wasmSDKWorker.postTransferrableMessage(msg, handler); }); } reconfigureRecognizers(recognizers, allowMultipleResults) { return new Promise((resolve, reject) => { if (this.deleted) { reject("Recognizer runner is deleted. It cannot be used anymore!"); return; } const recognizerHandles = getRecognizerHandles(recognizers); const msg = new ReconfigureRecognizerRunner(recognizerHandles, allowMultipleResults); const handler = defaultEventHandler(resolve, reject); this.wasmSDKWorker.postMessage(msg, handler); }); } setMetadataCallbacks(metadataCallbacks) { return new Promise((resolve, reject) => { const msg = new RegisterMetadataCallbacks(createRegisteredCallbacks(metadataCallbacks)); const handler = defaultEventHandler(resolve, reject); this.wasmSDKWorker.postMessageAndRegisterCallbacks(msg, metadataCallbacks, handler); }); } resetRecognizers(hardReset) { return new Promise((resolve, reject) => { const msg = new ResetRecognizers(hardReset); const handler = defaultEventHandler(resolve, reject); this.wasmSDKWorker.postMessage(msg, handler); }); } setDetectionOnlyMode(detectionOnly) { return new Promise((resolve, reject) => { const msg = new SetDetectionOnly(detectionOnly); const handler = defaultEventHandler(resolve, reject); this.wasmSDKWorker.postMessage(msg, handler); }); } setClearTimeoutCallback(clearTimeoutCallback) { return new Promise((resolve, reject) => { const msg = new SetClearTimeoutCallback(clearTimeoutCallback !== null); const handler = defaultEventHandler(resolve, reject); this.wasmSDKWorker.registerClearTimeoutCallback(clearTimeoutCallback); this.wasmSDKWorker.postMessage(msg, handler); }); } setCameraPreviewMirrored(mirrored) { return new Promise((resolve, reject) => { const msg = new SetCameraPreviewMirrored(mirrored); const handler = defaultEventHandler(resolve, reject); this.wasmSDKWorker.postMessage(msg, handler); }); } delete() { if (this.deleted) { return Promise.reject("Recognizer runner is already deleted."); } return new Promise((resolve, reject) => { const msg = new DeleteRecognizerRunner(); const handler = defaultEventHandler(() => { this.deleted = true; resolve(); }, reject); this.wasmSDKWorker.postMessage(msg, handler); }); } } function getRecognizerHandles(remoteRecognizers) { const recognizerHandles = []; for (const remoteRecognizer of remoteRecognizers) { recognizerHandles.push(remoteRecognizer.getRemoteObjectHandle()); } return recognizerHandles; } class WasmModuleWorkerProxy { constructor(wasmSDKWorker) { this.wasmSDKWorker = wasmSDKWorker; } createRecognizerRunner(recognizers, allowMultipleResults = false, metadataCallbacks = {}) { return new Promise((resolve, reject) => { const recognizerHandles = getRecognizerHandles(recognizers); const msg = new CreateRecognizerRunner(recognizerHandles, allowMultipleResults, createRegisteredCallbacks(metadataCallbacks)); const handler = defaultEventHandler(() => { resolve(new RemoteRecognizerRunner(this.wasmSDKWorker)); }, reject); this.wasmSDKWorker.postMessageAndRegisterCallbacks(msg, metadataCallbacks, handler); }); } /* eslint-disable @typescript-eslint/no-explicit-any */ newRecognizer(className, ...constructorArgs) { return new Promise((resolve, reject) => { const msg = new CreateNewRecognizer(className, wrapParameters(constructorArgs)); const handler = defaultResultEventHandler((msg) => { const remoteRecognizer = new RemoteRecognizer(this.wasmSDKWorker, className, msg.objectHandle); resolve(remoteRecognizer); }, reject); this.wasmSDKWorker.postMessage(msg, handler); }); } } class WasmSDKWorker { /* eslint-enable lines-between-class-members */ constructor(worker, loadProgressCallback, rejectHandler) { this.eventHandlers = {}; this.metadataCallbacks = {}; this.clearTimeoutCallback = null; this.mbWasmWorker = worker; this.mbWasmWorker.onmessage = (event) => { this.handleWorkerEvent(event); }; this.mbWasmWorker.onerror = () => { rejectHandler("Problem during initialization of worker file!"); return; }; this.mbWasmModule = new WasmModuleWorkerProxy(this); this.loadCallback = loadProgressCallback; this.recognizersWithCallbacks = new Map(); this.showOverlay = false; } postMessage(message, eventHandler) { this.eventHandlers[message.messageID] = eventHandler; this.mbWasmWorker.postMessage(message); } postTransferrableMessage(message, eventHandler) { this.eventHandlers[message.messageID] = eventHandler; this.mbWasmWorker.postMessage(message, message.getTransferrables()); } postMessageAndRegisterCallbacks(message, metadataCallbacks, eventHandler) { this.eventHandlers[message.messageID] = eventHandler; this.metadataCallbacks = metadataCallbacks; this.mbWasmWorker.postMessage(message); } registerClearTimeoutCallback(callback) { this.clearTimeoutCallback = callback; } registerRecognizerCallbacks(remoteRecognizerHandle, recognizer) { this.recognizersWithCallbacks.set(remoteRecognizerHandle, recognizer); } unregisterRecognizerCallbacks(remoteRecognizerHandle) { this.recognizersWithCallbacks.delete(remoteRecognizerHandle); } handleWorkerEvent(event) { if ("isCallbackMessage" in event.data) { const msg = event.data; switch (msg.callbackType) { case MetadataCallback.onDebugText: if (typeof this.metadataCallbacks.onDebugText === "function") { this.metadataCallbacks.onDebugText(msg.callbackParameters[0]); } break; case MetadataCallback.onDetectionFailed: if (typeof this.metadataCallbacks.onDetectionFailed === "function") { this.metadataCallbacks.onDetectionFailed(); } break; case MetadataCallback.onPointsDetection: if (typeof this.metadataCallbacks.onPointsDetection === "function") { this.metadataCallbacks.onPointsDetection(msg.callbackParameters[0]); } break; case MetadataCallback.onQuadDetection: if (typeof this.metadataCallbacks.onQuadDetection === "function") { this.metadataCallbacks.onQuadDetection(msg.callbackParameters[0]); } break; case MetadataCallback.onFirstSideResult: if (typeof this.metadataCallbacks.onFirstSideResult === "function") { this.metadataCallbacks.onFirstSideResult(); } break; case MetadataCallback.clearTimeoutCallback: if (this.clearTimeoutCallback && typeof this.clearTimeoutCallback.onClearTimeout === "function") { this.clearTimeoutCallback.onClearTimeout(); } break; case MetadataCallback.onGlare: if (typeof this.metadataCallbacks.onGlare === "function") { this.metadataCallbacks.onGlare(msg.callbackParameters[0]); } break; case MetadataCallback.recognizerCallback: { // first parameter is address, other parameters are callback parameters const address = msg.callbackParameters.shift(); const recognizer = this.recognizersWithCallbacks.get(address.recognizerHandle); if (recognizer !== undefined) { recognizer.invokeCallback(address.callbackName, msg.callbackParameters); } else { console.warn("Cannot find recognizer to deliver callback message. Maybe it's destroyed?", address); } break; } default: throw new Error(`Unknown callback type: ${MetadataCallback[msg.callbackType]}`); } } else if ("isLoadProgressMessage" in event.data) { const msg = event.data; if (typeof this.loadCallback === "function") { this.loadCallback(msg.progress); } } else { const msg = event.data; const eventHandler = this.eventHandlers[msg.messageID]; delete this.eventHandlers[msg.messageID]; eventHandler(msg); } } static createWasmWorker(worker, wasmLoadSettings, userId) { return __awaiter(this, void 0, void 0, function* () { return new Promise((resolve, reject) => { const wasmWorker = new WasmSDKWorker(worker, wasmLoadSettings.loadProgressCallback, reject); const initMessage = new InitMessage(wasmLoadSettings, userId); const initEventHandler = defaultResultEventHandler((msg) => { wasmWorker.showOverlay = msg.showOverlay; resolve(wasmWorker); }, reject); wasmWorker.postMessage(initMessage, initEventHandler); }); }); } } /** * Copyright (c) Microblink Ltd. All rights reserved. */ // ============================================ / // DATA STRUCTURES / // ============================================ / /** * Specifies the orientation of the contents of the image. * This is important for some recognizers, especially when * performing recognition on the mobile device. */ exports.ImageOrientation = void 0; (function (ImageOrientation) { /** * Image contents are rotated 90 degrees left. * This usually happens on mobile devices when capturing image while * device is held in "portrait" orientation, while device camera sensor * is mounted horizontally (i.e. produced image is in "landscape" orienation). */ ImageOrientation[ImageOrientation["RotatedLeft90"] = 0] = "RotatedLeft90"; /** * Image contents are not rotated in any manner. * This is the default for images captured using HTML canvas, as * used in FrameCapture class. * This orientation also usually happens on mobile devices when capturing * image while device is held in "landscape" orientation, while device * camera sensor is mounted horizontally (i.e. also in same orientation). */ ImageOrientation[ImageOrientation["NoRotation"] = 1] = "NoRotation"; /** * Image contents are rotated 90 degrees right. * This usually happens on mobile devices when capturing image while * device is held in "reverse-portrait" orientation, while device camera sensor * is mounted horizontally (i.e. produced image is in "landscape" orienation). */ ImageOrientation[ImageOrientation["RotatedRight90"] = 2] = "RotatedRight90"; /** * Image contents are rotated 180 degrees, i.e. image contents are "upside down". * This usually happens on mobile devices when capturing image while * device is held in "reverse-landscape" orientation, while device camera sensor * is mounted horizontally (i.e. produced image is in "landscape" orienation). */ ImageOrientation[ImageOrientation["Rotated180"] = 3] = "Rotated180"; })(exports.ImageOrientation || (exports.ImageOrientation = {})); /** * Specifies the state of the recognition result. */ exports.RecognizerResultState = void 0; (function (RecognizerResultState) { /** Nothing has been recognized. */ RecognizerResultState[RecognizerResultState["Empty"] = 0] = "Empty"; /** Something has been recognized, but some mandatory data is still missing. */ RecognizerResultState[RecognizerResultState["Uncertain"] = 1] = "Uncertain"; /** All required data has been recognized. */ RecognizerResultState[RecognizerResultState["Valid"] = 2] = "Valid"; /** Single stage of a multi-stage recognition is finished. */ RecognizerResultState[RecognizerResultState["StageValid"] = 3] = "StageValid"; })(exports.RecognizerResultState || (exports.RecognizerResultState = {})); /** * Copyright (c) Microblink Ltd. All rights reserved. */ /** * Detection status of the specific detected object. */ exports.DetectionStatus = void 0; (function (DetectionStatus) { /** Detection failed, form not detected */ DetectionStatus[DetectionStatus["Fail"] = 0] = "Fail"; /** Object was successfully detected */ DetectionStatus[DetectionStatus["Success"] = 1] = "Success"; /** Object detected, but the camera is too far above it */ DetectionStatus[DetectionStatus["CameraTooHigh"] = 2] = "CameraTooHigh"; /** Fallback detection of an object was successful */ DetectionStatus[DetectionStatus["FallbackSuccess"] = 3] = "FallbackSuccess"; /** Object is detected, but parts of it are not in image */ DetectionStatus[DetectionStatus["Partial"] = 4] = "Partial"; /** Object detected, but camera is at too big angle */ DetectionStatus[DetectionStatus["CameraAtAngle"] = 5] = "CameraAtAngle"; /** Object detected, but the camera is too near to it */ DetectionStatus[DetectionStatus["CameraTooNear"] = 6] = "CameraTooNear"; /** Document detected, but document is too close to the edge of the frame */ DetectionStatus[DetectionStatus["DocumentTooCloseToEdge"] = 7] = "DocumentTooCloseToEdge"; })(exports.DetectionStatus || (exports.DetectionStatus = {})); /** * Copyright (c) Microblink Ltd. All rights reserved. */ // ============================================ / // Frame capture and camera management support. / // ============================================ / let canvas; /** * Represents a captured frame from HTMLVideoElement. */ class CapturedFrame { constructor(imageData, orientation, videoFrame) { this.imageData = imageData; this.orientation = orientation; this.videoFrame = videoFrame; } } /** * Captures a frame from any CanvasImageSource, such as HTMLVideoElement or HTMLImageElement. * @param imageSource image source from which frame should be captured * @returns instance of CapturedFrame */ function captureFrame(imageSource) { let imageWidth; let imageHeight; let videoFrame = false; if (imageSource instanceof HTMLVideoElement) { imageWidth = imageSource.videoWidth; imageHeight = imageSource.videoHeight; videoFrame = true; } else if (imageSource instanceof HTMLImageElement) { imageWidth = imageSource.naturalWidth; imageHeight = imageSource.naturalHeight; } else if (imageSource instanceof SVGImageElement) { throw new Error("Recognition of SVG elements not supported!"); } else { imageWidth = imageSource.width; imageHeight = imageSource.height; } canvas = canvas || document.createElement("canvas"); canvas.width = imageWidth; canvas.height = imageHeight; const ctx = canvas.getContext("2d"); if (!ctx) { throw new Error("Could not get canvas 2d context!"); } ctx.drawImage(imageSource, 0, 0, canvas.width, canvas.height); const pixelData = ctx.getImageData(0, 0, canvas.width, canvas.height); return new CapturedFrame(pixelData, // TODO: https://developer.mozilla.org/en-US/docs/Web/API/Screen/orientation // or https://developer.mozilla.org/en-US/docs/Web/API/Window/orientation exports.ImageOrientation.NoRotation, videoFrame); } /** * Copyright (c) Microblink Ltd. All rights reserved. */ /** * Preferred type of camera to be used when opening the camera feed. */ exports.PreferredCameraType = void 0; (function (PreferredCameraType) { /** Prefer back facing camera */ PreferredCameraType[PreferredCameraType["BackFacingCamera"] = 0] = "BackFacingCamera"; /** Prefer front facing camera */ PreferredCameraType[PreferredCameraType["FrontFacingCamera"] = 1] = "FrontFacingCamera"; })(exports.PreferredCameraType || (exports.PreferredCameraType = {})); /** * Explanation why VideoRecognizer has failed to open the camera feed. */ exports.NotSupportedReason = void 0; (function (NotSupportedReason) { /** navigator.mediaDevices.getUserMedia is not supported by current browser for current context. */ NotSupportedReason["MediaDevicesNotSupported"] = "MediaDevicesNotSupported"; /** Camera with requested features is not available on current device. */ NotSupportedReason["CameraNotFound"] = "CameraNotFound"; /** Camera access was not granted by the user. */ NotSupportedReason["CameraNotAllowed"] = "CameraNotAllowed"; /** Unable to start playing because camera is already in use. */ NotSupportedReason["CameraInUse"] = "CameraInUse"; /** Camera is currently not available due to a OS or hardware error. */ NotSupportedReason["CameraNotAvailable"] = "CameraNotAvailable"; /** There is no provided video element to which the camera feed should be redirected. */ NotSupportedReason["VideoElementNotProvided"] = "VideoElementNotProvided"; })(exports.NotSupportedReason || (exports.NotSupportedReason = {})); /** * The error object thrown when VideoRecognizer fails to open the camera feed. */ class VideoRecognizerError extends Error { /* eslint-disable @typescript-eslint/no-explicit-any */ constructor(reason, ...params) { super(...params); this.reason = reason; this.name = "VideoRecognizerError"; } } /** * Indicates mode of recognition in VideoRecognizer. */ exports.VideoRecognitionMode = void 0; (function (VideoRecognitionMode) { /** Normal recognition */ VideoRecognitionMode[VideoRecognitionMode["Recognition"] = 0] = "Recognition"; /** Indefinite scan. Useful for profiling the performance of scan (using onDebugText metadata callback) */ VideoRecognitionMode[VideoRecognitionMode["RecognitionTest"] = 1] = "RecognitionTest"; /** Only detection. Useful for profiling the performance of detection (using onDebugText metadata callback) */ VideoRecognitionMode[VideoRecognitionMode["DetectionTest"] = 2] = "DetectionTest"; })(exports.VideoRecognitionMode || (exports.VideoRecognitionMode = {})); /** * A wrapper around RecognizerRunner that can use it to perform recognition of video feeds - either from live camera or * from predefined video file. */ class VideoRecognizer { constructor(videoFeed, recognizerRunner, cameraFlipped = false, allowManualVideoPlayout = false) { /** ********************************************************************************************* * PRIVATE AREA */ this.videoFeed = null; this.cancelled = false; this.timedOut = false; this.recognitionPaused = false; this.recognitionTimeoutMs = 15000; this.timeoutID = 0; this.videoRecognitionMode = exports.VideoRecognitionMode.Recognition; this.onScanningDone = null; this.allowManualVideoPlayout = false; this.cameraFlipped = false; this.videoFeed = videoFeed; this.recognizerRunner = recognizerRunner; this.cameraFlipped = cameraFlipped; if (allowManualVideoPlayout) { this.allowManualVideoPlayout = allowManualVideoPlayout; } } /** * Creates a new VideoRecognizer by opening a camera stream and attaching it to given HTMLVideoElement. If camera * cannot be accessed, the returned promise will be rejected. * * @param cameraFeed HTMLVideoELement to which camera stream should be attached * @param recognizerRunner RecognizerRunner that should be used for video stream recognition * @param cameraId User can provide specific camera ID to be selected and used * @param preferredCameraType Whether back facing or front facing camera is preferred. Obeyed only if there is * a choice (i.e. if device has only front-facing camera, the opened camera will be a front-facing camera, * regardless of preference) */ static createVideoRecognizerFromCameraStream(cameraFeed, recognizerRunner, cameraId = null, preferredCameraType = exports.PreferredCameraType.BackFacingCamera) { return __awaiter(this, void 0, void 0, function* () { // TODO: refactor this function into async/await syntax, instead of reject use throw /* eslint-disable */ return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { // Check for tag name intentionally left out, so it's possible to use VideoRecognizer with custom elements. if (!cameraFeed || !(cameraFeed instanceof Element)) { const errorMessage = "Video element, i.e. camera feed is not provided!"; reject(new VideoRecognizerError(exports.NotSupportedReason.VideoElementNotProvided, errorMessage)); return; } if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) { try { const selectedCamera = yield selectCamera(cameraId, preferredCameraType); if (selectedCamera === null) { reject(new VideoRecognizerError(exports.NotSupportedReason.CameraNotFound)); return; } const constraints = { audio: false, video: { width: { min: 640, ideal: 1920, max: 1920 }, height: { min: 480, ideal: 1080, max: 1080 } } }; if (selectedCamera.deviceId === "") { const isPreferredBackFacing = preferredCameraType === exports.PreferredCameraType.BackFacingCamera; constraints.video.facingMode = { ideal: isPreferredBackFacing ? "environment" : "user" }; } else { constraints.video.deviceId = { exact: selectedCamera.deviceId }; } const stream = yield navigator.mediaDevices.getUserMedia(constraints); cameraFeed.controls = false; cameraFeed.srcObject = stream; let cameraFlipped = false; // mirror the camera view for front-facing camera if (selectedCamera.facing === exports.PreferredCameraType.FrontFacingCamera) { cameraFeed.style.transform = "scaleX(-1)"; cameraFlipped = true; } // TODO: await maybe not needed here yield recognizerRunner.setCameraPreviewMirrored(cameraFlipped); resolve(new VideoRecognizer(cameraFeed, recognizerRunner, cameraFlipped)); } catch (error) { let errorReason = exports.NotSupportedReason.CameraInUse; switch (error.name) { case "NotFoundError": case "OverconstrainedError": errorReason = exports.NotSupportedReason.CameraNotFound; break; case "NotAllowedError": case "SecurityError": errorReason = exports.NotSupportedReason.CameraNotAllowed; break; case "AbortError": case "NotReadableError": errorReason = exports.NotSupportedReason.CameraNotAvailable; break; case "TypeError": // this should never happen. If it does, rethrow it throw error; } reject(new VideoRecognizerError(errorReason, error.message)); } } else { reject(new VideoRecognizerError(exports.NotSupportedReason.MediaDevicesNotSupported)); } })); /* eslint-enable */ }); } /** * Creates a new VideoRecognizer by attaching the given URL to video to given HTMLVideoElement and using it to * display video frames while processing them. * * @param videoPath URL of the video file that should be recognized. * @param videoFeed HTMLVideoElement to which video file will be attached * @param recognizerRunner RecognizerRunner that should be used for video stream recognition. */ static createVideoRecognizerFromVideoPath(videoPath, videoFeed, recognizerRunner) { return __awaiter(this, void 0, void 0, function* () { return new Promise((resolve) => { videoFeed.src = videoPath; videoFeed.currentTime = 0; videoFeed.onended = () => { videoRecognizer.cancelRecognition(); }; const videoRecognizer = new VideoRecognizer(videoFeed, recognizerRunner); resolve(videoRecognizer); }); }); } flipCamera() { return __awaiter(this, void 0, void 0, function* () { if (this.videoFeed) { if (!this.cameraFlipped) { this.videoFeed.style.transform = "scaleX(-1)"; this.cameraFlipped = true; } else { this.videoFeed.style.transform = "scaleX(1)"; this.cameraFlipped = false; } yield this.recognizerRunner.setCameraPreviewMirrored(this.cameraFlipped); } }); } /** * Sets the video recognition mode to be used. * * @param videoRecognitionMode the video recognition mode to be used. */ setVideoRecognitionMode(videoRecognitionMode) { return __awaiter(this, void 0, void 0, function* () { this.videoRecognitionMode = videoRecognitionMode; const isDetectionMode = this.videoRecognitionMode === exports.VideoRecognitionMode.DetectionTest; yield this.recognizerRunner.setDetectionOnlyMode(isDetectionMode); }); } /** * Starts the recognition of the video stream associated with this VideoRecognizer. The stream will be unpaused and * recognition loop will start. After recognition completes, a onScanningDone callback will be invoked with state of * the recognition. * * NOTE: As soon as the execution of the callback completes, the recognition loop will continue and recognition * state will be retained. To clear the recognition state, use resetRecognizers (within your callback). To * pause the recognition loop, use pauseRecognition (within your callback) - to resume it later use * resumeRecognition. To completely stop the recognition and video feed, while keeping the ability to use this * VideoRecognizer later, use pauseVideoFeed. To completely stop the recognition and video feed and release * all the resources involved with video stream, use releaseVideoFeed. * * @param onScanningDone Callback that will be invoked when recognition completes. * @param recognitionTimeoutMs Amount of time before returned promise will be resolved regardless of whether * recognition was successful or not. */ startRecognition(onScanningDone, recognitionTimeoutMs = 15000) { if (this.videoFeed === null) { throw new Error("The associated video feed has been released!"); } if (!this.videoFeed.paused) { throw new Error("The associated video feed is not paused. Use resumeRecognition instead!"); } this.cancelled = false; this.recognitionPaused = false; this.clearTimeout(); this.recognitionTimeoutMs = recognitionTimeoutMs; this.onScanningDone = onScanningDone; void this.recognizerRunner.setClearTimeoutCallback({ onClearTimeout: () => this.clearTimeout() }); this.videoFeed.play().then(() => this.playPauseEvent(), /* eslint-disable @typescript-eslint/no-explicit-any */ (nativeError) => { if (!this.allowManualVideoPlayout) { console.warn("Native error", nativeError); throw new Error("The play() request was interrupted or prevented by browser security rules!"); } if (!this.videoFeed) { return; } this.videoFeed.controls = true; this.videoFeed.addEventListener("play", () => this.playPauseEvent()); this.videoFeed.addEventListener("pause", () => this.playPauseEvent()); } /* eslint-enable @typescript-eslint/no-explicit-any */ ); } /** * Performs the recognition of the video stream associated with this VideoRecognizer. The stream will be * unpaused, recognition will be performed and promise will be resolved with recognition status. After * the resolution of returned promise, the video stream will be paused, but not released. To release the * stream, use function releaseVideoFeed. * This is a simple version of startRecognition that should be used for most cases, like when you only need * to perform one scan per video session. * * @param recognitionTimeoutMs Amount of time before returned promise will be resolved regardless of whether * recognition was successful or not. */ recognize(recognitionTimeoutMs = 15000) { return __awaiter(this, void 0, void 0, function* () { return new Promise((resolve, reject) => { try { this.startRecognition((recognitionState) => { this.pauseVideoFeed(); resolve(recognitionState); }, recognitionTimeoutMs); } catch (error) { reject(error); } }); }); } /** * Cancels current ongoing recognition. Note that after cancelling the recognition, the callback given to * startRecognition will be immediately called. This also means that the promise returned from method * recognize will be resolved immediately. */ cancelRecognition() { this.cancelled = true; } /** * Pauses the video feed. You can resume the feed by calling recognize or startRecognition. * Note that this pauses both the camera feed and recognition. If you just want to pause * recognition, while keeping the camera feed active, call method pauseRecognition. */ pauseVideoFeed() { this.pauseRecognition(); if (this.videoFeed) { this.videoFeed.pause(); } } /** * Pauses the recognition. This means that video frames that arrive from given video source * will not be recognized. To resume recognition, call resumeRecognition(boolean). * Unlike cancelRecognition, the callback given to startRecognition will not be invoked after pausing * the recognition (unless there is already processing in-flight that may call the callback just before * pausing the actual recognition loop). */ pauseRecognition() { this.recognitionPaused = true; } /** * Convenience method for invoking resetRecognizers on associated RecognizerRunner. * @param hardReset Same as in RecognizerRunner.resetRecognizers. */ resetRecognizers(hardReset) { return __awaiter(this, void 0, void 0, function* () { yield this.recognizerRunner.resetRecognizers(hardReset); }); } /** * Convenience method for accessing RecognizerRunner associated with this VideoRecognizer. * Sometimes it's useful to reconfigure RecognizerRunner while handling onScanningDone callback * and this method makes that much more convenient. */ getRecognizerRunner() { return this.recognizerRunner; } /** * Resumes the recognition. The video feed must not be paused. If it is, an error will be thrown. * If video feed is paused, you should use recognize or startRecognition methods.