UNPKG

@vladmandic/face-api

Version:

JavaScript module for Face Detection and Face Recognition Using Tensorflow/JS

1,506 lines (1,445 loc) 169 kB
var __defineProperty = Object.defineProperty; var __hasOwnProperty = Object.prototype.hasOwnProperty; var __commonJS = (callback, module2) => () => { if (!module2) { module2 = {exports: {}}; callback(module2.exports, module2); } return module2.exports; }; var __markAsModule = (target) => { return __defineProperty(target, "__esModule", {value: true}); }; var __export = (target, all) => { __markAsModule(target); for (var name in all) __defineProperty(target, name, {get: all[name], enumerable: true}); }; var __exportStar = (target, module2) => { __markAsModule(target); if (typeof module2 === "object" || typeof module2 === "function") { for (let key in module2) if (!__hasOwnProperty.call(target, key) && key !== "default") __defineProperty(target, key, {get: () => module2[key], enumerable: true}); } return target; }; var __toModule = (module2) => { if (module2 && module2.__esModule) return module2; return __exportStar(__defineProperty({}, "default", {value: module2, enumerable: true}), module2); }; // src/env/isNodejs.ts var require_isNodejs = __commonJS((exports2, module2) => { __export(exports2, { isNodejs: () => isNodejs3 }); function isNodejs3() { return typeof global === "object" && true && typeof module2 !== "undefined" && typeof process !== "undefined" && !!process.version; } }); // src/draw/drawContour.ts function drawContour(ctx, points, isClosed = false) { ctx.beginPath(); points.slice(1).forEach(({x, y}, prevIdx) => { const from = points[prevIdx]; ctx.moveTo(from.x, from.y); ctx.lineTo(x, y); }); if (isClosed) { const from = points[points.length - 1]; const to = points[0]; if (!from || !to) { return; } ctx.moveTo(from.x, from.y); ctx.lineTo(to.x, to.y); } ctx.stroke(); } // src/classes/Dimensions.ts class Dimensions { constructor(width, height) { if (!isValidNumber(width) || !isValidNumber(height)) { throw new Error(`Dimensions.constructor - expected width and height to be valid numbers, instead have ${JSON.stringify({width, height})}`); } this._width = width; this._height = height; } get width() { return this._width; } get height() { return this._height; } reverse() { return new Dimensions(1 / this.width, 1 / this.height); } } // src/utils/index.ts const utils_exports = {}; __export(utils_exports, { computeReshapedDimensions: () => computeReshapedDimensions, getCenterPoint: () => getCenterPoint, isDimensions: () => isDimensions, isEven: () => isEven, isFloat: () => isFloat, isTensor: () => isTensor, isTensor1D: () => isTensor1D, isTensor2D: () => isTensor2D, isTensor3D: () => isTensor3D, isTensor4D: () => isTensor4D, isValidNumber: () => isValidNumber, isValidProbablitiy: () => isValidProbablitiy, range: () => range, round: () => round }); const tf = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js")); function isTensor(tensor2, dim) { return tensor2 instanceof tf.Tensor && tensor2.shape.length === dim; } function isTensor1D(tensor2) { return isTensor(tensor2, 1); } function isTensor2D(tensor2) { return isTensor(tensor2, 2); } function isTensor3D(tensor2) { return isTensor(tensor2, 3); } function isTensor4D(tensor2) { return isTensor(tensor2, 4); } function isFloat(num) { return num % 1 !== 0; } function isEven(num) { return num % 2 === 0; } function round(num, prec = 2) { const f = Math.pow(10, prec); return Math.floor(num * f) / f; } function isDimensions(obj) { return obj && obj.width && obj.height; } function computeReshapedDimensions({width, height}, inputSize) { const scale2 = inputSize / Math.max(height, width); return new Dimensions(Math.round(width * scale2), Math.round(height * scale2)); } function getCenterPoint(pts) { return pts.reduce((sum, pt) => sum.add(pt), new Point(0, 0)).div(new Point(pts.length, pts.length)); } function range(num, start, step) { return Array(num).fill(0).map((_, i) => start + i * step); } function isValidNumber(num) { return !!num && num !== Infinity && num !== -Infinity && !isNaN(num) || num === 0; } function isValidProbablitiy(num) { return isValidNumber(num) && 0 <= num && num <= 1; } // src/classes/Point.ts class Point { constructor(x, y) { this._x = x; this._y = y; } get x() { return this._x; } get y() { return this._y; } add(pt) { return new Point(this.x + pt.x, this.y + pt.y); } sub(pt) { return new Point(this.x - pt.x, this.y - pt.y); } mul(pt) { return new Point(this.x * pt.x, this.y * pt.y); } div(pt) { return new Point(this.x / pt.x, this.y / pt.y); } abs() { return new Point(Math.abs(this.x), Math.abs(this.y)); } magnitude() { return Math.sqrt(Math.pow(this.x, 2) + Math.pow(this.y, 2)); } floor() { return new Point(Math.floor(this.x), Math.floor(this.y)); } } // src/classes/Box.ts class Box { static isRect(rect) { return !!rect && [rect.x, rect.y, rect.width, rect.height].every(isValidNumber); } static assertIsValidBox(box, callee, allowNegativeDimensions = false) { if (!Box.isRect(box)) { throw new Error(`${callee} - invalid box: ${JSON.stringify(box)}, expected object with properties x, y, width, height`); } if (!allowNegativeDimensions && (box.width < 0 || box.height < 0)) { throw new Error(`${callee} - width (${box.width}) and height (${box.height}) must be positive numbers`); } } constructor(_box, allowNegativeDimensions = true) { const box = _box || {}; const isBbox = [box.left, box.top, box.right, box.bottom].every(isValidNumber); const isRect = [box.x, box.y, box.width, box.height].every(isValidNumber); if (!isRect && !isBbox) { throw new Error(`Box.constructor - expected box to be IBoundingBox | IRect, instead have ${JSON.stringify(box)}`); } const [x, y, width, height] = isRect ? [box.x, box.y, box.width, box.height] : [box.left, box.top, box.right - box.left, box.bottom - box.top]; Box.assertIsValidBox({x, y, width, height}, "Box.constructor", allowNegativeDimensions); this._x = x; this._y = y; this._width = width; this._height = height; } get x() { return this._x; } get y() { return this._y; } get width() { return this._width; } get height() { return this._height; } get left() { return this.x; } get top() { return this.y; } get right() { return this.x + this.width; } get bottom() { return this.y + this.height; } get area() { return this.width * this.height; } get topLeft() { return new Point(this.left, this.top); } get topRight() { return new Point(this.right, this.top); } get bottomLeft() { return new Point(this.left, this.bottom); } get bottomRight() { return new Point(this.right, this.bottom); } round() { const [x, y, width, height] = [this.x, this.y, this.width, this.height].map((val) => Math.round(val)); return new Box({x, y, width, height}); } floor() { const [x, y, width, height] = [this.x, this.y, this.width, this.height].map((val) => Math.floor(val)); return new Box({x, y, width, height}); } toSquare() { let {x, y, width, height} = this; const diff = Math.abs(width - height); if (width < height) { x -= diff / 2; width += diff; } if (height < width) { y -= diff / 2; height += diff; } return new Box({x, y, width, height}); } rescale(s) { const scaleX = isDimensions(s) ? s.width : s; const scaleY = isDimensions(s) ? s.height : s; return new Box({ x: this.x * scaleX, y: this.y * scaleY, width: this.width * scaleX, height: this.height * scaleY }); } pad(padX, padY) { let [x, y, width, height] = [ this.x - padX / 2, this.y - padY / 2, this.width + padX, this.height + padY ]; return new Box({x, y, width, height}); } clipAtImageBorders(imgWidth, imgHeight) { const {x, y, right, bottom} = this; const clippedX = Math.max(x, 0); const clippedY = Math.max(y, 0); const newWidth = right - clippedX; const newHeight = bottom - clippedY; const clippedWidth = Math.min(newWidth, imgWidth - clippedX); const clippedHeight = Math.min(newHeight, imgHeight - clippedY); return new Box({x: clippedX, y: clippedY, width: clippedWidth, height: clippedHeight}).floor(); } shift(sx, sy) { const {width, height} = this; const x = this.x + sx; const y = this.y + sy; return new Box({x, y, width, height}); } padAtBorders(imageHeight, imageWidth) { const w = this.width + 1; const h = this.height + 1; let dx = 1; let dy = 1; let edx = w; let edy = h; let x = this.left; let y = this.top; let ex = this.right; let ey = this.bottom; if (ex > imageWidth) { edx = -ex + imageWidth + w; ex = imageWidth; } if (ey > imageHeight) { edy = -ey + imageHeight + h; ey = imageHeight; } if (x < 1) { edy = 2 - x; x = 1; } if (y < 1) { edy = 2 - y; y = 1; } return {dy, edy, dx, edx, y, ey, x, ex, w, h}; } calibrate(region) { return new Box({ left: this.left + region.left * this.width, top: this.top + region.top * this.height, right: this.right + region.right * this.width, bottom: this.bottom + region.bottom * this.height }).toSquare().round(); } } // src/classes/BoundingBox.ts class BoundingBox extends Box { constructor(left, top, right, bottom, allowNegativeDimensions = false) { super({left, top, right, bottom}, allowNegativeDimensions); } } // src/classes/ObjectDetection.ts class ObjectDetection { constructor(score, classScore, className, relativeBox, imageDims) { this._imageDims = new Dimensions(imageDims.width, imageDims.height); this._score = score; this._classScore = classScore; this._className = className; this._box = new Box(relativeBox).rescale(this._imageDims); } get score() { return this._score; } get classScore() { return this._classScore; } get className() { return this._className; } get box() { return this._box; } get imageDims() { return this._imageDims; } get imageWidth() { return this.imageDims.width; } get imageHeight() { return this.imageDims.height; } get relativeBox() { return new Box(this._box).rescale(this.imageDims.reverse()); } forSize(width, height) { return new ObjectDetection(this.score, this.classScore, this.className, this.relativeBox, {width, height}); } } // src/classes/FaceDetection.ts class FaceDetection extends ObjectDetection { constructor(score, relativeBox, imageDims) { super(score, score, "", relativeBox, imageDims); } forSize(width, height) { const {score, relativeBox, imageDims} = super.forSize(width, height); return new FaceDetection(score, relativeBox, imageDims); } } // src/ops/iou.ts function iou(box1, box2, isIOU = true) { const width = Math.max(0, Math.min(box1.right, box2.right) - Math.max(box1.left, box2.left)); const height = Math.max(0, Math.min(box1.bottom, box2.bottom) - Math.max(box1.top, box2.top)); const interSection = width * height; return isIOU ? interSection / (box1.area + box2.area - interSection) : interSection / Math.min(box1.area, box2.area); } // src/ops/minBbox.ts function minBbox(pts) { const xs = pts.map((pt) => pt.x); const ys = pts.map((pt) => pt.y); const minX = xs.reduce((min, x) => x < min ? x : min, Infinity); const minY = ys.reduce((min, y) => y < min ? y : min, Infinity); const maxX = xs.reduce((max, x) => max < x ? x : max, 0); const maxY = ys.reduce((max, y) => max < y ? y : max, 0); return new BoundingBox(minX, minY, maxX, maxY); } // src/ops/nonMaxSuppression.ts function nonMaxSuppression(boxes, scores, iouThreshold, isIOU = true) { let indicesSortedByScore = scores.map((score, boxIndex) => ({score, boxIndex})).sort((c1, c2) => c1.score - c2.score).map((c) => c.boxIndex); const pick = []; while (indicesSortedByScore.length > 0) { const curr = indicesSortedByScore.pop(); pick.push(curr); const indices = indicesSortedByScore; const outputs = []; for (let i = 0; i < indices.length; i++) { const idx = indices[i]; const currBox = boxes[curr]; const idxBox = boxes[idx]; outputs.push(iou(currBox, idxBox, isIOU)); } indicesSortedByScore = indicesSortedByScore.filter((_, j) => outputs[j] <= iouThreshold); } return pick; } // src/ops/normalize.ts const tf2 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js")); function normalize(x, meanRgb) { return tf2.tidy(() => { const [r, g, b] = meanRgb; const avg_r = tf2.fill([...x.shape.slice(0, 3), 1], r, "float32"); const avg_g = tf2.fill([...x.shape.slice(0, 3), 1], g, "float32"); const avg_b = tf2.fill([...x.shape.slice(0, 3), 1], b, "float32"); const avg_rgb = tf2.concat([avg_r, avg_g, avg_b], 3); return tf2.sub(x, avg_rgb); }); } // src/ops/padToSquare.ts const tf3 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js")); function padToSquare(imgTensor, isCenterImage = false) { return tf3.tidy(() => { const [height, width] = imgTensor.shape.slice(1); if (height === width) { return imgTensor; } const dimDiff = Math.abs(height - width); const paddingAmount = Math.round(dimDiff * (isCenterImage ? 0.5 : 1)); const paddingAxis = height > width ? 2 : 1; const createPaddingTensor = (paddingAmount2) => { const paddingTensorShape = imgTensor.shape.slice(); paddingTensorShape[paddingAxis] = paddingAmount2; return tf3.fill(paddingTensorShape, 0, "float32"); }; const paddingTensorAppend = createPaddingTensor(paddingAmount); const remainingPaddingAmount = dimDiff - paddingTensorAppend.shape[paddingAxis]; const paddingTensorPrepend = isCenterImage && remainingPaddingAmount ? createPaddingTensor(remainingPaddingAmount) : null; const tensorsToStack = [ paddingTensorPrepend, imgTensor, paddingTensorAppend ].filter((t) => !!t).map((t) => tf3.cast(t, "float32")); return tf3.concat(tensorsToStack, paddingAxis); }); } // src/ops/shuffleArray.ts function shuffleArray(inputArray) { const array = inputArray.slice(); for (let i = array.length - 1; i > 0; i--) { const j = Math.floor(Math.random() * (i + 1)); const x = array[i]; array[i] = array[j]; array[j] = x; } return array; } // src/ops/index.ts function sigmoid(x) { return 1 / (1 + Math.exp(-x)); } function inverseSigmoid(x) { return Math.log(x / (1 - x)); } // src/classes/Rect.ts class Rect extends Box { constructor(x, y, width, height, allowNegativeDimensions = false) { super({x, y, width, height}, allowNegativeDimensions); } } // src/classes/FaceLandmarks.ts const relX = 0.5; const relY = 0.43; const relScale = 0.45; class FaceLandmarks { constructor(relativeFaceLandmarkPositions, imgDims, shift = new Point(0, 0)) { const {width, height} = imgDims; this._imgDims = new Dimensions(width, height); this._shift = shift; this._positions = relativeFaceLandmarkPositions.map((pt) => pt.mul(new Point(width, height)).add(shift)); } get shift() { return new Point(this._shift.x, this._shift.y); } get imageWidth() { return this._imgDims.width; } get imageHeight() { return this._imgDims.height; } get positions() { return this._positions; } get relativePositions() { return this._positions.map((pt) => pt.sub(this._shift).div(new Point(this.imageWidth, this.imageHeight))); } forSize(width, height) { return new this.constructor(this.relativePositions, {width, height}); } shiftBy(x, y) { return new this.constructor(this.relativePositions, this._imgDims, new Point(x, y)); } shiftByPoint(pt) { return this.shiftBy(pt.x, pt.y); } align(detection, options = {}) { if (detection) { const box = detection instanceof FaceDetection ? detection.box.floor() : new Box(detection); return this.shiftBy(box.x, box.y).align(null, options); } const {useDlibAlignment, minBoxPadding} = Object.assign({}, {useDlibAlignment: false, minBoxPadding: 0.2}, options); if (useDlibAlignment) { return this.alignDlib(); } return this.alignMinBbox(minBoxPadding); } alignDlib() { const centers = this.getRefPointsForAlignment(); const [leftEyeCenter, rightEyeCenter, mouthCenter] = centers; const distToMouth = (pt) => mouthCenter.sub(pt).magnitude(); const eyeToMouthDist = (distToMouth(leftEyeCenter) + distToMouth(rightEyeCenter)) / 2; const size = Math.floor(eyeToMouthDist / relScale); const refPoint = getCenterPoint(centers); const x = Math.floor(Math.max(0, refPoint.x - relX * size)); const y = Math.floor(Math.max(0, refPoint.y - relY * size)); return new Rect(x, y, Math.min(size, this.imageWidth + x), Math.min(size, this.imageHeight + y)); } alignMinBbox(padding) { const box = minBbox(this.positions); return box.pad(box.width * padding, box.height * padding); } getRefPointsForAlignment() { throw new Error("getRefPointsForAlignment not implemented by base class"); } } // src/classes/FaceLandmarks5.ts class FaceLandmarks5 extends FaceLandmarks { getRefPointsForAlignment() { const pts = this.positions; return [ pts[0], pts[1], getCenterPoint([pts[3], pts[4]]) ]; } } // src/classes/FaceLandmarks68.ts class FaceLandmarks68 extends FaceLandmarks { getJawOutline() { return this.positions.slice(0, 17); } getLeftEyeBrow() { return this.positions.slice(17, 22); } getRightEyeBrow() { return this.positions.slice(22, 27); } getNose() { return this.positions.slice(27, 36); } getLeftEye() { return this.positions.slice(36, 42); } getRightEye() { return this.positions.slice(42, 48); } getMouth() { return this.positions.slice(48, 68); } getRefPointsForAlignment() { return [ this.getLeftEye(), this.getRightEye(), this.getMouth() ].map(getCenterPoint); } } // src/classes/FaceMatch.ts class FaceMatch { constructor(label, distance) { this._label = label; this._distance = distance; } get label() { return this._label; } get distance() { return this._distance; } toString(withDistance = true) { return `${this.label}${withDistance ? ` (${round(this.distance)})` : ""}`; } } // src/classes/LabeledBox.ts class LabeledBox extends Box { static assertIsValidLabeledBox(box, callee) { Box.assertIsValidBox(box, callee); if (!isValidNumber(box.label)) { throw new Error(`${callee} - expected property label (${box.label}) to be a number`); } } constructor(box, label) { super(box); this._label = label; } get label() { return this._label; } } // src/classes/LabeledFaceDescriptors.ts class LabeledFaceDescriptors { constructor(label, descriptors) { if (!(typeof label === "string")) { throw new Error("LabeledFaceDescriptors - constructor expected label to be a string"); } if (!Array.isArray(descriptors) || descriptors.some((desc) => !(desc instanceof Float32Array))) { throw new Error("LabeledFaceDescriptors - constructor expected descriptors to be an array of Float32Array"); } this._label = label; this._descriptors = descriptors; } get label() { return this._label; } get descriptors() { return this._descriptors; } toJSON() { return { label: this.label, descriptors: this.descriptors.map((d) => Array.from(d)) }; } static fromJSON(json) { const descriptors = json.descriptors.map((d) => { return new Float32Array(d); }); return new LabeledFaceDescriptors(json.label, descriptors); } } // src/classes/PredictedBox.ts class PredictedBox extends LabeledBox { static assertIsValidPredictedBox(box, callee) { LabeledBox.assertIsValidLabeledBox(box, callee); if (!isValidProbablitiy(box.score) || !isValidProbablitiy(box.classScore)) { throw new Error(`${callee} - expected properties score (${box.score}) and (${box.classScore}) to be a number between [0, 1]`); } } constructor(box, label, score, classScore) { super(box, label); this._score = score; this._classScore = classScore; } get score() { return this._score; } get classScore() { return this._classScore; } } // src/classes/index.ts // src/factories/WithFaceDetection.ts function isWithFaceDetection(obj) { return obj["detection"] instanceof FaceDetection; } function extendWithFaceDetection(sourceObj, detection) { const extension = {detection}; return Object.assign({}, sourceObj, extension); } // src/env/createBrowserEnv.ts function createBrowserEnv() { const fetch = window["fetch"] || function() { throw new Error("fetch - missing fetch implementation for browser environment"); }; const readFile = function() { throw new Error("readFile - filesystem not available for browser environment"); }; return { Canvas: HTMLCanvasElement, CanvasRenderingContext2D, Image: HTMLImageElement, ImageData, Video: HTMLVideoElement, createCanvasElement: () => document.createElement("canvas"), createImageElement: () => document.createElement("img"), fetch, readFile }; } // src/env/createFileSystem.ts function createFileSystem(fs) { let requireFsError = ""; if (!fs) { try { fs = require("fs"); } catch (err) { requireFsError = err.toString(); } } const readFile = fs ? function(filePath) { return new Promise((res, rej) => { fs.readFile(filePath, function(err, buffer) { return err ? rej(err) : res(buffer); }); }); } : function() { throw new Error(`readFile - failed to require fs in nodejs environment with error: ${requireFsError}`); }; return { readFile }; } // src/env/createNodejsEnv.ts function createNodejsEnv() { const Canvas = global["Canvas"] || global["HTMLCanvasElement"]; const Image = global["Image"] || global["HTMLImageElement"]; const createCanvasElement = function() { if (Canvas) { return new Canvas(); } throw new Error("createCanvasElement - missing Canvas implementation for nodejs environment"); }; const createImageElement = function() { if (Image) { return new Image(); } throw new Error("createImageElement - missing Image implementation for nodejs environment"); }; const fetch = global["fetch"] || function() { throw new Error("fetch - missing fetch implementation for nodejs environment"); }; const fileSystem = createFileSystem(); return { Canvas: Canvas || class { }, CanvasRenderingContext2D: global["CanvasRenderingContext2D"] || class { }, Image: Image || class { }, ImageData: global["ImageData"] || class { }, Video: global["HTMLVideoElement"] || class { }, createCanvasElement, createImageElement, fetch, ...fileSystem }; } // src/env/isBrowser.ts function isBrowser() { return typeof window === "object" && typeof document !== "undefined" && typeof HTMLImageElement !== "undefined" && typeof HTMLCanvasElement !== "undefined" && typeof HTMLVideoElement !== "undefined" && typeof ImageData !== "undefined" && typeof CanvasRenderingContext2D !== "undefined"; } // src/env/types.ts // src/env/index.ts const isNodejs = __toModule(require_isNodejs()); let environment; function getEnv() { if (!environment) { throw new Error("getEnv - environment is not defined, check isNodejs() and isBrowser()"); } return environment; } function setEnv(env16) { environment = env16; } function initialize() { if (isBrowser()) { return setEnv(createBrowserEnv()); } if (isNodejs.isNodejs()) { return setEnv(createNodejsEnv()); } } function monkeyPatch(env16) { if (!environment) { initialize(); } if (!environment) { throw new Error("monkeyPatch - environment is not defined, check isNodejs() and isBrowser()"); } const {Canvas = environment.Canvas, Image = environment.Image} = env16; environment.Canvas = Canvas; environment.Image = Image; environment.createCanvasElement = env16.createCanvasElement || (() => new Canvas()); environment.createImageElement = env16.createImageElement || (() => new Image()); environment.ImageData = env16.ImageData || environment.ImageData; environment.Video = env16.Video || environment.Video; environment.fetch = env16.fetch || environment.fetch; environment.readFile = env16.readFile || environment.readFile; } const env = { getEnv, setEnv, initialize, createBrowserEnv, createFileSystem, createNodejsEnv, monkeyPatch, isBrowser, isNodejs: isNodejs.isNodejs }; initialize(); // src/dom/resolveInput.ts function resolveInput(arg) { if (!env.isNodejs() && typeof arg === "string") { return document.getElementById(arg); } return arg; } // src/dom/getContext2dOrThrow.ts function getContext2dOrThrow(canvasArg) { const {Canvas, CanvasRenderingContext2D: CanvasRenderingContext2D2} = env.getEnv(); if (canvasArg instanceof CanvasRenderingContext2D2) { return canvasArg; } const canvas = resolveInput(canvasArg); if (!(canvas instanceof Canvas)) { throw new Error("resolveContext2d - expected canvas to be of instance of Canvas"); } const ctx = canvas.getContext("2d"); if (!ctx) { throw new Error("resolveContext2d - canvas 2d context is null"); } return ctx; } // src/draw/DrawTextField.ts var AnchorPosition; (function(AnchorPosition2) { AnchorPosition2["TOP_LEFT"] = "TOP_LEFT"; AnchorPosition2["TOP_RIGHT"] = "TOP_RIGHT"; AnchorPosition2["BOTTOM_LEFT"] = "BOTTOM_LEFT"; AnchorPosition2["BOTTOM_RIGHT"] = "BOTTOM_RIGHT"; })(AnchorPosition || (AnchorPosition = {})); class DrawTextFieldOptions { constructor(options = {}) { const {anchorPosition, backgroundColor, fontColor, fontSize, fontStyle, padding} = options; this.anchorPosition = anchorPosition || AnchorPosition.TOP_LEFT; this.backgroundColor = backgroundColor || "rgba(0, 0, 0, 0.5)"; this.fontColor = fontColor || "rgba(255, 255, 255, 1)"; this.fontSize = fontSize || 14; this.fontStyle = fontStyle || "Georgia"; this.padding = padding || 4; } } class DrawTextField { constructor(text, anchor, options = {}) { this.text = typeof text === "string" ? [text] : text instanceof DrawTextField ? text.text : text; this.anchor = anchor; this.options = new DrawTextFieldOptions(options); } measureWidth(ctx) { const {padding} = this.options; return this.text.map((l) => ctx.measureText(l).width).reduce((w0, w1) => w0 < w1 ? w1 : w0, 0) + 2 * padding; } measureHeight() { const {fontSize, padding} = this.options; return this.text.length * fontSize + 2 * padding; } getUpperLeft(ctx, canvasDims) { const {anchorPosition} = this.options; const isShiftLeft = anchorPosition === AnchorPosition.BOTTOM_RIGHT || anchorPosition === AnchorPosition.TOP_RIGHT; const isShiftTop = anchorPosition === AnchorPosition.BOTTOM_LEFT || anchorPosition === AnchorPosition.BOTTOM_RIGHT; const textFieldWidth = this.measureWidth(ctx); const textFieldHeight = this.measureHeight(); const x = isShiftLeft ? this.anchor.x - textFieldWidth : this.anchor.x; const y = isShiftTop ? this.anchor.y - textFieldHeight : this.anchor.y; if (canvasDims) { const {width, height} = canvasDims; const newX = Math.max(Math.min(x, width - textFieldWidth), 0); const newY = Math.max(Math.min(y, height - textFieldHeight), 0); return {x: newX, y: newY}; } return {x, y}; } draw(canvasArg) { const canvas = resolveInput(canvasArg); const ctx = getContext2dOrThrow(canvas); const {backgroundColor, fontColor, fontSize, fontStyle, padding} = this.options; ctx.font = `${fontSize}px ${fontStyle}`; const maxTextWidth = this.measureWidth(ctx); const textHeight = this.measureHeight(); ctx.fillStyle = backgroundColor; const upperLeft = this.getUpperLeft(ctx, canvas); ctx.fillRect(upperLeft.x, upperLeft.y, maxTextWidth, textHeight); ctx.fillStyle = fontColor; this.text.forEach((textLine, i) => { const x = padding + upperLeft.x; const y = padding + upperLeft.y + (i + 1) * fontSize; ctx.fillText(textLine, x, y); }); } } // src/draw/DrawBox.ts class DrawBoxOptions { constructor(options = {}) { const {boxColor, lineWidth, label, drawLabelOptions} = options; this.boxColor = boxColor || "rgba(0, 0, 255, 1)"; this.lineWidth = lineWidth || 2; this.label = label; const defaultDrawLabelOptions = { anchorPosition: AnchorPosition.BOTTOM_LEFT, backgroundColor: this.boxColor }; this.drawLabelOptions = new DrawTextFieldOptions(Object.assign({}, defaultDrawLabelOptions, drawLabelOptions)); } } class DrawBox { constructor(box, options = {}) { this.box = new Box(box); this.options = new DrawBoxOptions(options); } draw(canvasArg) { const ctx = getContext2dOrThrow(canvasArg); const {boxColor, lineWidth} = this.options; const {x, y, width, height} = this.box; ctx.strokeStyle = boxColor; ctx.lineWidth = lineWidth; ctx.strokeRect(x, y, width, height); const {label} = this.options; if (label) { new DrawTextField([label], {x: x - lineWidth / 2, y}, this.options.drawLabelOptions).draw(canvasArg); } } } // src/draw/drawDetections.ts function drawDetections(canvasArg, detections) { const detectionsArray = Array.isArray(detections) ? detections : [detections]; detectionsArray.forEach((det) => { const score = det instanceof FaceDetection ? det.score : isWithFaceDetection(det) ? det.detection.score : void 0; const box = det instanceof FaceDetection ? det.box : isWithFaceDetection(det) ? det.detection.box : new Box(det); const label = score ? `${round(score)}` : void 0; new DrawBox(box, {label}).draw(canvasArg); }); } // src/dom/isMediaLoaded.ts function isMediaLoaded(media) { const {Image, Video} = env.getEnv(); return media instanceof Image && media.complete || media instanceof Video && media.readyState >= 3; } // src/dom/awaitMediaLoaded.ts function awaitMediaLoaded(media) { return new Promise((resolve, reject) => { if (media instanceof env.getEnv().Canvas || isMediaLoaded(media)) { return resolve(null); } function onLoad(e) { if (!e.currentTarget) return; e.currentTarget.removeEventListener("load", onLoad); e.currentTarget.removeEventListener("error", onError); resolve(e); } function onError(e) { if (!e.currentTarget) return; e.currentTarget.removeEventListener("load", onLoad); e.currentTarget.removeEventListener("error", onError); reject(e); } media.addEventListener("load", onLoad); media.addEventListener("error", onError); }); } // src/dom/bufferToImage.ts function bufferToImage(buf) { return new Promise((resolve, reject) => { if (!(buf instanceof Blob)) { return reject("bufferToImage - expected buf to be of type: Blob"); } const reader = new FileReader(); reader.onload = () => { if (typeof reader.result !== "string") { return reject("bufferToImage - expected reader.result to be a string, in onload"); } const img = env.getEnv().createImageElement(); img.onload = () => resolve(img); img.onerror = reject; img.src = reader.result; }; reader.onerror = reject; reader.readAsDataURL(buf); }); } // src/dom/getMediaDimensions.ts function getMediaDimensions(input) { const {Image, Video} = env.getEnv(); if (input instanceof Image) { return new Dimensions(input.naturalWidth, input.naturalHeight); } if (input instanceof Video) { return new Dimensions(input.videoWidth, input.videoHeight); } return new Dimensions(input.width, input.height); } // src/dom/createCanvas.ts function createCanvas({width, height}) { const {createCanvasElement} = env.getEnv(); const canvas = createCanvasElement(); canvas.width = width; canvas.height = height; return canvas; } function createCanvasFromMedia(media, dims) { const {ImageData: ImageData2} = env.getEnv(); if (!(media instanceof ImageData2) && !isMediaLoaded(media)) { throw new Error("createCanvasFromMedia - media has not finished loading yet"); } const {width, height} = dims || getMediaDimensions(media); const canvas = createCanvas({width, height}); if (media instanceof ImageData2) { getContext2dOrThrow(canvas).putImageData(media, 0, 0); } else { getContext2dOrThrow(canvas).drawImage(media, 0, 0, width, height); } return canvas; } // src/dom/imageTensorToCanvas.ts const tf4 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js")); async function imageTensorToCanvas(imgTensor, canvas) { const targetCanvas = canvas || env.getEnv().createCanvasElement(); const [height, width, numChannels] = imgTensor.shape.slice(isTensor4D(imgTensor) ? 1 : 0); const imgTensor3D = tf4.tidy(() => imgTensor.as3D(height, width, numChannels).toInt()); await tf4.browser.toPixels(imgTensor3D, targetCanvas); imgTensor3D.dispose(); return targetCanvas; } // src/dom/isMediaElement.ts function isMediaElement(input) { const {Image, Canvas, Video} = env.getEnv(); return input instanceof Image || input instanceof Canvas || input instanceof Video; } // src/dom/imageToSquare.ts function imageToSquare(input, inputSize, centerImage = false) { const {Image, Canvas} = env.getEnv(); if (!(input instanceof Image || input instanceof Canvas)) { throw new Error("imageToSquare - expected arg0 to be HTMLImageElement | HTMLCanvasElement"); } const dims = getMediaDimensions(input); const scale2 = inputSize / Math.max(dims.height, dims.width); const width = scale2 * dims.width; const height = scale2 * dims.height; const targetCanvas = createCanvas({width: inputSize, height: inputSize}); const inputCanvas = input instanceof Canvas ? input : createCanvasFromMedia(input); const offset = Math.abs(width - height) / 2; const dx = centerImage && width < height ? offset : 0; const dy = centerImage && height < width ? offset : 0; getContext2dOrThrow(targetCanvas).drawImage(inputCanvas, dx, dy, width, height); return targetCanvas; } // src/dom/NetInput.ts const tf5 = __toModule(require("@tensorflow/tfjs-core")); class NetInput { constructor(inputs, treatAsBatchInput = false) { this._imageTensors = []; this._canvases = []; this._treatAsBatchInput = false; this._inputDimensions = []; if (!Array.isArray(inputs)) { throw new Error(`NetInput.constructor - expected inputs to be an Array of TResolvedNetInput or to be instanceof tf.Tensor4D, instead have ${inputs}`); } this._treatAsBatchInput = treatAsBatchInput; this._batchSize = inputs.length; inputs.forEach((input, idx) => { if (isTensor3D(input)) { this._imageTensors[idx] = input; this._inputDimensions[idx] = input.shape; return; } if (isTensor4D(input)) { const batchSize = input.shape[0]; if (batchSize !== 1) { throw new Error(`NetInput - tf.Tensor4D with batchSize ${batchSize} passed, but not supported in input array`); } this._imageTensors[idx] = input; this._inputDimensions[idx] = input.shape.slice(1); return; } const canvas = input instanceof env.getEnv().Canvas ? input : createCanvasFromMedia(input); this._canvases[idx] = canvas; this._inputDimensions[idx] = [canvas.height, canvas.width, 3]; }); } get imageTensors() { return this._imageTensors; } get canvases() { return this._canvases; } get isBatchInput() { return this.batchSize > 1 || this._treatAsBatchInput; } get batchSize() { return this._batchSize; } get inputDimensions() { return this._inputDimensions; } get inputSize() { return this._inputSize; } get reshapedInputDimensions() { return range(this.batchSize, 0, 1).map((_, batchIdx) => this.getReshapedInputDimensions(batchIdx)); } getInput(batchIdx) { return this.canvases[batchIdx] || this.imageTensors[batchIdx]; } getInputDimensions(batchIdx) { return this._inputDimensions[batchIdx]; } getInputHeight(batchIdx) { return this._inputDimensions[batchIdx][0]; } getInputWidth(batchIdx) { return this._inputDimensions[batchIdx][1]; } getReshapedInputDimensions(batchIdx) { if (typeof this.inputSize !== "number") { throw new Error("getReshapedInputDimensions - inputSize not set, toBatchTensor has not been called yet"); } const width = this.getInputWidth(batchIdx); const height = this.getInputHeight(batchIdx); return computeReshapedDimensions({width, height}, this.inputSize); } toBatchTensor(inputSize, isCenterInputs = true) { this._inputSize = inputSize; return tf5.tidy(() => { const inputTensors = range(this.batchSize, 0, 1).map((batchIdx) => { const input = this.getInput(batchIdx); if (input instanceof tf5.Tensor) { let imgTensor = isTensor4D(input) ? input : input.expandDims(); imgTensor = padToSquare(imgTensor, isCenterInputs); if (imgTensor.shape[1] !== inputSize || imgTensor.shape[2] !== inputSize) { imgTensor = tf5.image.resizeBilinear(imgTensor, [inputSize, inputSize]); } return imgTensor.as3D(inputSize, inputSize, 3); } if (input instanceof env.getEnv().Canvas) { return tf5.browser.fromPixels(imageToSquare(input, inputSize, isCenterInputs)); } throw new Error(`toBatchTensor - at batchIdx ${batchIdx}, expected input to be instanceof tf.Tensor or instanceof HTMLCanvasElement, instead have ${input}`); }); const batchTensor = tf5.stack(inputTensors.map((t) => tf5.cast(t, "float32"))).as4D(this.batchSize, inputSize, inputSize, 3); return batchTensor; }); } } // src/dom/toNetInput.ts async function toNetInput(inputs) { if (inputs instanceof NetInput) { return inputs; } let inputArgArray = Array.isArray(inputs) ? inputs : [inputs]; if (!inputArgArray.length) { throw new Error("toNetInput - empty array passed as input"); } const getIdxHint = (idx) => Array.isArray(inputs) ? ` at input index ${idx}:` : ""; const inputArray = inputArgArray.map(resolveInput); inputArray.forEach((input, i) => { if (!isMediaElement(input) && !isTensor3D(input) && !isTensor4D(input)) { if (typeof inputArgArray[i] === "string") { throw new Error(`toNetInput -${getIdxHint(i)} string passed, but could not resolve HTMLElement for element id ${inputArgArray[i]}`); } throw new Error(`toNetInput -${getIdxHint(i)} expected media to be of type HTMLImageElement | HTMLVideoElement | HTMLCanvasElement | tf.Tensor3D, or to be an element id`); } if (isTensor4D(input)) { const batchSize = input.shape[0]; if (batchSize !== 1) { throw new Error(`toNetInput -${getIdxHint(i)} tf.Tensor4D with batchSize ${batchSize} passed, but not supported in input array`); } } }); await Promise.all(inputArray.map((input) => isMediaElement(input) && awaitMediaLoaded(input))); return new NetInput(inputArray, Array.isArray(inputs)); } // src/dom/extractFaces.ts async function extractFaces(input, detections) { const {Canvas} = env.getEnv(); let canvas = input; if (!(input instanceof Canvas)) { const netInput = await toNetInput(input); if (netInput.batchSize > 1) { throw new Error("extractFaces - batchSize > 1 not supported"); } const tensorOrCanvas = netInput.getInput(0); canvas = tensorOrCanvas instanceof Canvas ? tensorOrCanvas : await imageTensorToCanvas(tensorOrCanvas); } const ctx = getContext2dOrThrow(canvas); const boxes = detections.map((det) => det instanceof FaceDetection ? det.forSize(canvas.width, canvas.height).box.floor() : det).map((box) => box.clipAtImageBorders(canvas.width, canvas.height)); return boxes.map(({x, y, width, height}) => { const faceImg = createCanvas({width, height}); getContext2dOrThrow(faceImg).putImageData(ctx.getImageData(x, y, width, height), 0, 0); return faceImg; }); } // src/dom/extractFaceTensors.ts const tf6 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js")); async function extractFaceTensors(imageTensor, detections) { if (!isTensor3D(imageTensor) && !isTensor4D(imageTensor)) { throw new Error("extractFaceTensors - expected image tensor to be 3D or 4D"); } if (isTensor4D(imageTensor) && imageTensor.shape[0] > 1) { throw new Error("extractFaceTensors - batchSize > 1 not supported"); } return tf6.tidy(() => { const [imgHeight, imgWidth, numChannels] = imageTensor.shape.slice(isTensor4D(imageTensor) ? 1 : 0); const boxes = detections.map((det) => det instanceof FaceDetection ? det.forSize(imgWidth, imgHeight).box : det).map((box) => box.clipAtImageBorders(imgWidth, imgHeight)); const faceTensors = boxes.map(({x, y, width, height}) => tf6.slice3d(imageTensor.as3D(imgHeight, imgWidth, numChannels), [y, x, 0], [height, width, numChannels])); return faceTensors; }); } // src/dom/fetchOrThrow.ts async function fetchOrThrow(url, init) { const fetch = env.getEnv().fetch; const res = await fetch(url, init); if (!(res.status < 400)) { throw new Error(`failed to fetch: (${res.status}) ${res.statusText}, from url: ${res.url}`); } return res; } // src/dom/fetchImage.ts async function fetchImage(uri) { const res = await fetchOrThrow(uri); const blob = await res.blob(); if (!blob.type.startsWith("image/")) { throw new Error(`fetchImage - expected blob type to be of type image/*, instead have: ${blob.type}, for url: ${res.url}`); } return bufferToImage(blob); } // src/dom/fetchJson.ts async function fetchJson(uri) { return (await fetchOrThrow(uri)).json(); } // src/dom/fetchNetWeights.ts async function fetchNetWeights(uri) { return new Float32Array(await (await fetchOrThrow(uri)).arrayBuffer()); } // src/common/getModelUris.ts function getModelUris(uri, defaultModelName) { const defaultManifestFilename = `${defaultModelName}-weights_manifest.json`; if (!uri) { return { modelBaseUri: "", manifestUri: defaultManifestFilename }; } if (uri === "/") { return { modelBaseUri: "/", manifestUri: `/${defaultManifestFilename}` }; } const protocol = uri.startsWith("http://") ? "http://" : uri.startsWith("https://") ? "https://" : ""; uri = uri.replace(protocol, ""); const parts = uri.split("/").filter((s) => s); const manifestFile = uri.endsWith(".json") ? parts[parts.length - 1] : defaultManifestFilename; let modelBaseUri = protocol + (uri.endsWith(".json") ? parts.slice(0, parts.length - 1) : parts).join("/"); modelBaseUri = uri.startsWith("/") ? `/${modelBaseUri}` : modelBaseUri; return { modelBaseUri, manifestUri: modelBaseUri === "/" ? `/${manifestFile}` : `${modelBaseUri}/${manifestFile}` }; } // src/dom/loadWeightMap.ts const tf7 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js")); async function loadWeightMap(uri, defaultModelName) { const {manifestUri, modelBaseUri} = getModelUris(uri, defaultModelName); let manifest = await fetchJson(manifestUri); return tf7.io.loadWeights(manifest, modelBaseUri); } // src/dom/matchDimensions.ts function matchDimensions(input, reference, useMediaDimensions = false) { const {width, height} = useMediaDimensions ? getMediaDimensions(reference) : reference; input.width = width; input.height = height; return {width, height}; } // src/dom/types.ts // src/dom/index.ts // src/NeuralNetwork.ts const tf8 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js")); class NeuralNetwork { constructor(_name) { this._name = _name; this._params = void 0; this._paramMappings = []; } get params() { return this._params; } get paramMappings() { return this._paramMappings; } get isLoaded() { return !!this.params; } getParamFromPath(paramPath) { const {obj, objProp} = this.traversePropertyPath(paramPath); return obj[objProp]; } reassignParamFromPath(paramPath, tensor2) { const {obj, objProp} = this.traversePropertyPath(paramPath); obj[objProp].dispose(); obj[objProp] = tensor2; } getParamList() { return this._paramMappings.map(({paramPath}) => ({ path: paramPath, tensor: this.getParamFromPath(paramPath) })); } getTrainableParams() { return this.getParamList().filter((param) => param.tensor instanceof tf8.Variable); } getFrozenParams() { return this.getParamList().filter((param) => !(param.tensor instanceof tf8.Variable)); } variable() { this.getFrozenParams().forEach(({path, tensor: tensor2}) => { this.reassignParamFromPath(path, tensor2.variable()); }); } freeze() { this.getTrainableParams().forEach(({path, tensor: variable}) => { const tensor2 = tf8.tensor(variable.dataSync()); variable.dispose(); this.reassignParamFromPath(path, tensor2); }); } dispose(throwOnRedispose = true) { this.getParamList().forEach((param) => { if (throwOnRedispose && param.tensor.isDisposed) { throw new Error(`param tensor has already been disposed for path ${param.path}`); } param.tensor.dispose(); }); this._params = void 0; } serializeParams() { return new Float32Array(this.getParamList().map(({tensor: tensor2}) => Array.from(tensor2.dataSync())).reduce((flat, arr) => flat.concat(arr))); } async load(weightsOrUrl) { if (weightsOrUrl instanceof Float32Array) { this.extractWeights(weightsOrUrl); return; } await this.loadFromUri(weightsOrUrl); } async loadFromUri(uri) { if (uri && typeof uri !== "string") { throw new Error(`${this._name}.loadFromUri - expected model uri`); } const weightMap = await loadWeightMap(uri, this.getDefaultModelName()); this.loadFromWeightMap(weightMap); } async loadFromDisk(filePath) { if (filePath && typeof filePath !== "string") { throw new Error(`${this._name}.loadFromDisk - expected model file path`); } const {readFile} = env.getEnv(); const {manifestUri, modelBaseUri} = getModelUris(filePath, this.getDefaultModelName()); const fetchWeightsFromDisk = (filePaths) => Promise.all(filePaths.map((filePath2) => readFile(filePath2).then((buf) => buf.buffer))); const loadWeights = tf8.io.weightsLoaderFactory(fetchWeightsFromDisk); const manifest = JSON.parse((await readFile(manifestUri)).toString()); const weightMap = await loadWeights(manifest, modelBaseUri); this.loadFromWeightMap(weightMap); } loadFromWeightMap(weightMap) { const { paramMappings, params } = this.extractParamsFromWeigthMap(weightMap); this._paramMappings = paramMappings; this._params = params; } extractWeights(weights) { const { paramMappings, params } = this.extractParams(weights); this._paramMappings = paramMappings; this._params = params; } traversePropertyPath(paramPath) { if (!this.params) { throw new Error(`traversePropertyPath - model has no loaded params`); } const result = paramPath.split("/").reduce((res, objProp2) => { if (!res.nextObj.hasOwnProperty(objProp2)) { throw new Error(`traversePropertyPath - object does not have property ${objProp2}, for path ${paramPath}`); } return {obj: res.nextObj, objProp: objProp2, nextObj: res.nextObj[objProp2]}; }, {nextObj: this.params}); const {obj, objProp} = result; if (!obj || !objProp || !(obj[objProp] instanceof tf8.Tensor)) { throw new Error(`traversePropertyPath - parameter is not a tensor, for path ${paramPath}`); } return {obj, objProp}; } } // src/common/depthwiseSeparableConv.ts const tf9 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js")); function depthwiseSeparableConv(x, params, stride) { return tf9.tidy(() => { let out = tf9.separableConv2d(x, params.depthwise_filter, params.pointwise_filter, stride, "same"); out = tf9.add(out, params.bias); return out; }); } // src/faceFeatureExtractor/denseBlock.ts const tf10 = __toModule(require("@tensorflow/tfjs/dist/tf.es2017.js")); function denseBlock3(x, denseBlockParams, isFirstLayer = false) { return tf10.tidy(() => { const out1 = tf10.relu(isFirstLayer ? tf10.add(tf10.conv2d(x, denseBlockParams.conv0.filters, [2, 2], "same"), denseBlockParams.conv0.bias) : depthwiseSeparableConv(x, denseBlockParams.conv0, [2, 2])); const out2 = depthwiseSeparableConv(out1, denseBlockParams.conv1, [1, 1]); const in3 = tf10.relu(tf10.add(out1, out2)); const out3 = depthwiseSeparableConv(in3, denseBlockParams.conv2, [1, 1]); return tf10.relu(tf10.add(out1, tf10.add(out2, out3))); }); } function denseBlock4(x, denseBlockParams, isFirstLayer = false, isScaleDown = true) { return tf10.tidy(() => { const out1 = tf10.relu(isFirstLayer ? tf10.add(tf10.conv2d(x, denseBlockParams.conv0.filters, isScaleDown ? [2, 2] : [1, 1], "same"), denseBlockParams.conv0.bias) : depthwiseSeparableConv(x, denseBlockParams.conv0, isScaleDown ? [2, 2] : [1, 1])); const out2 = depthwiseSeparableConv(out1, denseBlockParams.conv1, [1, 1]); const in3 = tf10.relu(tf10.add(out1, out2)); const out3 = depthwiseSeparableConv(in3, denseBlockParams.conv2, [1, 1]); const in4 = tf10.relu(tf10.add(out1, tf10.add(out2, out3))); const out4 = depthwiseSeparableConv(in4, denseBlockParams.conv3,