@vladmandic/face-api
Version:
FaceAPI: AI-powered Face Detection & Rotation Tracking, Face Description & Recognition, Age & Gender & Emotion Prediction for Browser and NodeJS using TensorFlow/JS
1,528 lines (1,471 loc) • 175 kB
JavaScript
/*
Face-API
homepage: <https://github.com/vladmandic/face-api>
author: <https://github.com/vladmandic>'
*/
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __markAsModule = (target) => __defProp(target, "__esModule", { value: true });
var __commonJS = (cb, mod) => function __require() {
return mod || (0, cb[Object.keys(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
};
var __export = (target, all) => {
__markAsModule(target);
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __reExport = (target, module2, desc) => {
if (module2 && typeof module2 === "object" || typeof module2 === "function") {
for (let key of __getOwnPropNames(module2))
if (!__hasOwnProp.call(target, key) && key !== "default")
__defProp(target, key, { get: () => module2[key], enumerable: !(desc = __getOwnPropDesc(module2, key)) || desc.enumerable });
}
return target;
};
var __toModule = (module2) => {
return __reExport(__markAsModule(__defProp(module2 != null ? __create(__getProtoOf(module2)) : {}, "default", module2 && module2.__esModule && "default" in module2 ? { get: () => module2.default, enumerable: true } : { value: module2, enumerable: true })), module2);
};
// dist/tfjs.esm.js
var require_tfjs_esm = __commonJS({
"dist/tfjs.esm.js"(exports) {
var __create2 = Object.create;
var __defProp2 = Object.defineProperty;
var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor;
var __getOwnPropNames2 = Object.getOwnPropertyNames;
var __getProtoOf2 = Object.getPrototypeOf;
var __hasOwnProp2 = Object.prototype.hasOwnProperty;
var __markAsModule2 = (target) => __defProp2(target, "__esModule", { value: true });
var __reExport2 = (target, module22, desc) => {
if (module22 && typeof module22 === "object" || typeof module22 === "function") {
for (let key of __getOwnPropNames2(module22))
if (!__hasOwnProp2.call(target, key) && key !== "default")
__defProp2(target, key, { get: () => module22[key], enumerable: !(desc = __getOwnPropDesc2(module22, key)) || desc.enumerable });
}
return target;
};
var __toModule2 = (module22) => {
return __reExport2(__markAsModule2(__defProp2(module22 != null ? __create2(__getProtoOf2(module22)) : {}, "default", module22 && module22.__esModule && "default" in module22 ? { get: () => module22.default, enumerable: true } : { value: module22, enumerable: true })), module22);
};
__markAsModule2(exports);
__reExport2(exports, __toModule2(require("@tensorflow/tfjs")));
}
});
// src/index.ts
__export(exports, {
AgeGenderNet: () => AgeGenderNet,
BoundingBox: () => BoundingBox,
Box: () => Box,
ComposableTask: () => ComposableTask,
ComputeAllFaceDescriptorsTask: () => ComputeAllFaceDescriptorsTask,
ComputeFaceDescriptorsTaskBase: () => ComputeFaceDescriptorsTaskBase,
ComputeSingleFaceDescriptorTask: () => ComputeSingleFaceDescriptorTask,
DetectAllFaceLandmarksTask: () => DetectAllFaceLandmarksTask,
DetectAllFacesTask: () => DetectAllFacesTask,
DetectFaceLandmarksTaskBase: () => DetectFaceLandmarksTaskBase,
DetectFacesTaskBase: () => DetectFacesTaskBase,
DetectSingleFaceLandmarksTask: () => DetectSingleFaceLandmarksTask,
DetectSingleFaceTask: () => DetectSingleFaceTask,
Dimensions: () => Dimensions,
FACE_EXPRESSION_LABELS: () => FACE_EXPRESSION_LABELS,
FaceDetection: () => FaceDetection,
FaceDetectionNet: () => FaceDetectionNet,
FaceExpressionNet: () => FaceExpressionNet,
FaceExpressions: () => FaceExpressions,
FaceLandmark68Net: () => FaceLandmark68Net,
FaceLandmark68TinyNet: () => FaceLandmark68TinyNet,
FaceLandmarkNet: () => FaceLandmarkNet,
FaceLandmarks: () => FaceLandmarks,
FaceLandmarks5: () => FaceLandmarks5,
FaceLandmarks68: () => FaceLandmarks68,
FaceMatch: () => FaceMatch,
FaceMatcher: () => FaceMatcher,
FaceRecognitionNet: () => FaceRecognitionNet,
Gender: () => Gender,
LabeledBox: () => LabeledBox,
LabeledFaceDescriptors: () => LabeledFaceDescriptors,
NetInput: () => NetInput,
NeuralNetwork: () => NeuralNetwork,
ObjectDetection: () => ObjectDetection,
Point: () => Point,
PredictedBox: () => PredictedBox,
Rect: () => Rect,
SsdMobilenetv1: () => SsdMobilenetv1,
SsdMobilenetv1Options: () => SsdMobilenetv1Options,
TinyFaceDetector: () => TinyFaceDetector,
TinyFaceDetectorOptions: () => TinyFaceDetectorOptions,
TinyYolov2: () => TinyYolov2,
TinyYolov2Options: () => TinyYolov2Options,
allFaces: () => allFaces,
allFacesSsdMobilenetv1: () => allFacesSsdMobilenetv1,
allFacesTinyYolov2: () => allFacesTinyYolov2,
awaitMediaLoaded: () => awaitMediaLoaded,
bufferToImage: () => bufferToImage,
computeFaceDescriptor: () => computeFaceDescriptor,
createCanvas: () => createCanvas,
createCanvasFromMedia: () => createCanvasFromMedia,
createFaceDetectionNet: () => createFaceDetectionNet,
createFaceRecognitionNet: () => createFaceRecognitionNet,
createSsdMobilenetv1: () => createSsdMobilenetv1,
createTinyFaceDetector: () => createTinyFaceDetector,
createTinyYolov2: () => createTinyYolov2,
detectAllFaces: () => detectAllFaces,
detectFaceLandmarks: () => detectFaceLandmarks,
detectFaceLandmarksTiny: () => detectFaceLandmarksTiny,
detectLandmarks: () => detectLandmarks,
detectSingleFace: () => detectSingleFace,
draw: () => draw_exports,
env: () => env,
euclideanDistance: () => euclideanDistance,
extendWithAge: () => extendWithAge,
extendWithFaceDescriptor: () => extendWithFaceDescriptor,
extendWithFaceDetection: () => extendWithFaceDetection,
extendWithFaceExpressions: () => extendWithFaceExpressions,
extendWithFaceLandmarks: () => extendWithFaceLandmarks,
extendWithGender: () => extendWithGender,
extractFaceTensors: () => extractFaceTensors,
extractFaces: () => extractFaces,
fetchImage: () => fetchImage,
fetchJson: () => fetchJson,
fetchNetWeights: () => fetchNetWeights,
fetchOrThrow: () => fetchOrThrow,
fetchVideo: () => fetchVideo,
getContext2dOrThrow: () => getContext2dOrThrow,
getMediaDimensions: () => getMediaDimensions,
imageTensorToCanvas: () => imageTensorToCanvas,
imageToSquare: () => imageToSquare,
inverseSigmoid: () => inverseSigmoid,
iou: () => iou,
isMediaElement: () => isMediaElement,
isMediaLoaded: () => isMediaLoaded,
isWithAge: () => isWithAge,
isWithFaceDetection: () => isWithFaceDetection,
isWithFaceExpressions: () => isWithFaceExpressions,
isWithFaceLandmarks: () => isWithFaceLandmarks,
isWithGender: () => isWithGender,
loadAgeGenderModel: () => loadAgeGenderModel,
loadFaceDetectionModel: () => loadFaceDetectionModel,
loadFaceExpressionModel: () => loadFaceExpressionModel,
loadFaceLandmarkModel: () => loadFaceLandmarkModel,
loadFaceLandmarkTinyModel: () => loadFaceLandmarkTinyModel,
loadFaceRecognitionModel: () => loadFaceRecognitionModel,
loadSsdMobilenetv1Model: () => loadSsdMobilenetv1Model,
loadTinyFaceDetectorModel: () => loadTinyFaceDetectorModel,
loadTinyYolov2Model: () => loadTinyYolov2Model,
loadWeightMap: () => loadWeightMap,
locateFaces: () => locateFaces,
matchDimensions: () => matchDimensions,
minBbox: () => minBbox,
nets: () => nets,
nonMaxSuppression: () => nonMaxSuppression,
normalize: () => normalize,
padToSquare: () => padToSquare,
predictAgeAndGender: () => predictAgeAndGender,
recognizeFaceExpressions: () => recognizeFaceExpressions,
resizeResults: () => resizeResults,
resolveInput: () => resolveInput,
shuffleArray: () => shuffleArray,
sigmoid: () => sigmoid,
ssdMobilenetv1: () => ssdMobilenetv1,
tf: () => tf42,
tinyFaceDetector: () => tinyFaceDetector,
tinyYolov2: () => tinyYolov2,
toNetInput: () => toNetInput,
utils: () => utils_exports,
validateConfig: () => validateConfig,
version: () => version2
});
var tf42 = __toModule(require_tfjs_esm());
// src/draw/index.ts
var draw_exports = {};
__export(draw_exports, {
AnchorPosition: () => AnchorPosition,
DrawBox: () => DrawBox,
DrawBoxOptions: () => DrawBoxOptions,
DrawFaceLandmarks: () => DrawFaceLandmarks,
DrawFaceLandmarksOptions: () => DrawFaceLandmarksOptions,
DrawTextField: () => DrawTextField,
DrawTextFieldOptions: () => DrawTextFieldOptions,
drawContour: () => drawContour,
drawDetections: () => drawDetections,
drawFaceExpressions: () => drawFaceExpressions,
drawFaceLandmarks: () => drawFaceLandmarks
});
// src/draw/drawContour.ts
function drawContour(ctx, points, isClosed = false) {
ctx.beginPath();
points.slice(1).forEach(({ x, y }, prevIdx) => {
const from = points[prevIdx];
ctx.moveTo(from.x, from.y);
ctx.lineTo(x, y);
});
if (isClosed) {
const from = points[points.length - 1];
const to = points[0];
if (!from || !to) {
return;
}
ctx.moveTo(from.x, from.y);
ctx.lineTo(to.x, to.y);
}
ctx.stroke();
}
// src/utils/index.ts
var utils_exports = {};
__export(utils_exports, {
computeReshapedDimensions: () => computeReshapedDimensions,
getCenterPoint: () => getCenterPoint,
isDimensions: () => isDimensions,
isEven: () => isEven,
isFloat: () => isFloat,
isTensor: () => isTensor,
isTensor1D: () => isTensor1D,
isTensor2D: () => isTensor2D,
isTensor3D: () => isTensor3D,
isTensor4D: () => isTensor4D,
isValidNumber: () => isValidNumber,
isValidProbablitiy: () => isValidProbablitiy,
range: () => range,
round: () => round
});
var tf = __toModule(require_tfjs_esm());
// src/classes/Dimensions.ts
var Dimensions = class {
constructor(width, height) {
if (!isValidNumber(width) || !isValidNumber(height)) {
throw new Error(`Dimensions.constructor - expected width and height to be valid numbers, instead have ${JSON.stringify({ width, height })}`);
}
this._width = width;
this._height = height;
}
get width() {
return this._width;
}
get height() {
return this._height;
}
reverse() {
return new Dimensions(1 / this.width, 1 / this.height);
}
};
// src/utils/index.ts
function isTensor(tensor2, dim) {
return tensor2 instanceof tf.Tensor && tensor2.shape.length === dim;
}
function isTensor1D(tensor2) {
return isTensor(tensor2, 1);
}
function isTensor2D(tensor2) {
return isTensor(tensor2, 2);
}
function isTensor3D(tensor2) {
return isTensor(tensor2, 3);
}
function isTensor4D(tensor2) {
return isTensor(tensor2, 4);
}
function isFloat(num) {
return num % 1 !== 0;
}
function isEven(num) {
return num % 2 === 0;
}
function round(num, prec = 2) {
const f = 10 ** prec;
return Math.floor(num * f) / f;
}
function isDimensions(obj) {
return obj && obj.width && obj.height;
}
function computeReshapedDimensions({ width, height }, inputSize) {
const scale2 = inputSize / Math.max(height, width);
return new Dimensions(Math.round(width * scale2), Math.round(height * scale2));
}
function getCenterPoint(pts) {
return pts.reduce((sum, pt) => sum.add(pt), new Point(0, 0)).div(new Point(pts.length, pts.length));
}
function range(num, start, step) {
return Array(num).fill(0).map((_, i) => start + i * step);
}
function isValidNumber(num) {
return !!num && num !== Infinity && num !== -Infinity && !Number.isNaN(num) || num === 0;
}
function isValidProbablitiy(num) {
return isValidNumber(num) && num >= 0 && num <= 1;
}
// src/classes/Point.ts
var Point = class {
constructor(x, y) {
this._x = x;
this._y = y;
}
get x() {
return this._x;
}
get y() {
return this._y;
}
add(pt) {
return new Point(this.x + pt.x, this.y + pt.y);
}
sub(pt) {
return new Point(this.x - pt.x, this.y - pt.y);
}
mul(pt) {
return new Point(this.x * pt.x, this.y * pt.y);
}
div(pt) {
return new Point(this.x / pt.x, this.y / pt.y);
}
abs() {
return new Point(Math.abs(this.x), Math.abs(this.y));
}
magnitude() {
return Math.sqrt(this.x ** 2 + this.y ** 2);
}
floor() {
return new Point(Math.floor(this.x), Math.floor(this.y));
}
};
// src/classes/Box.ts
var Box = class {
static isRect(rect) {
return !!rect && [rect.x, rect.y, rect.width, rect.height].every(isValidNumber);
}
static assertIsValidBox(box, callee, allowNegativeDimensions = false) {
if (!Box.isRect(box)) {
throw new Error(`${callee} - invalid box: ${JSON.stringify(box)}, expected object with properties x, y, width, height`);
}
if (!allowNegativeDimensions && (box.width < 0 || box.height < 0)) {
throw new Error(`${callee} - width (${box.width}) and height (${box.height}) must be positive numbers`);
}
}
constructor(_box, allowNegativeDimensions = true) {
const box = _box || {};
const isBbox = [box.left, box.top, box.right, box.bottom].every(isValidNumber);
const isRect = [box.x, box.y, box.width, box.height].every(isValidNumber);
if (!isRect && !isBbox) {
throw new Error(`Box.constructor - expected box to be IBoundingBox | IRect, instead have ${JSON.stringify(box)}`);
}
const [x, y, width, height] = isRect ? [box.x, box.y, box.width, box.height] : [box.left, box.top, box.right - box.left, box.bottom - box.top];
Box.assertIsValidBox({
x,
y,
width,
height
}, "Box.constructor", allowNegativeDimensions);
this._x = x;
this._y = y;
this._width = width;
this._height = height;
}
get x() {
return this._x;
}
get y() {
return this._y;
}
get width() {
return this._width;
}
get height() {
return this._height;
}
get left() {
return this.x;
}
get top() {
return this.y;
}
get right() {
return this.x + this.width;
}
get bottom() {
return this.y + this.height;
}
get area() {
return this.width * this.height;
}
get topLeft() {
return new Point(this.left, this.top);
}
get topRight() {
return new Point(this.right, this.top);
}
get bottomLeft() {
return new Point(this.left, this.bottom);
}
get bottomRight() {
return new Point(this.right, this.bottom);
}
round() {
const [x, y, width, height] = [this.x, this.y, this.width, this.height].map((val) => Math.round(val));
return new Box({
x,
y,
width,
height
});
}
floor() {
const [x, y, width, height] = [this.x, this.y, this.width, this.height].map((val) => Math.floor(val));
return new Box({
x,
y,
width,
height
});
}
toSquare() {
let {
x,
y,
width,
height
} = this;
const diff = Math.abs(width - height);
if (width < height) {
x -= diff / 2;
width += diff;
}
if (height < width) {
y -= diff / 2;
height += diff;
}
return new Box({ x, y, width, height });
}
rescale(s) {
const scaleX = isDimensions(s) ? s.width : s;
const scaleY = isDimensions(s) ? s.height : s;
return new Box({
x: this.x * scaleX,
y: this.y * scaleY,
width: this.width * scaleX,
height: this.height * scaleY
});
}
pad(padX, padY) {
const [x, y, width, height] = [
this.x - padX / 2,
this.y - padY / 2,
this.width + padX,
this.height + padY
];
return new Box({
x,
y,
width,
height
});
}
clipAtImageBorders(imgWidth, imgHeight) {
const { x, y, right, bottom } = this;
const clippedX = Math.max(x, 0);
const clippedY = Math.max(y, 0);
const newWidth = right - clippedX;
const newHeight = bottom - clippedY;
const clippedWidth = Math.min(newWidth, imgWidth - clippedX);
const clippedHeight = Math.min(newHeight, imgHeight - clippedY);
return new Box({
x: clippedX,
y: clippedY,
width: clippedWidth,
height: clippedHeight
}).floor();
}
shift(sx, sy) {
const { width, height } = this;
const x = this.x + sx;
const y = this.y + sy;
return new Box({
x,
y,
width,
height
});
}
padAtBorders(imageHeight, imageWidth) {
const w = this.width + 1;
const h = this.height + 1;
const dx = 1;
const dy = 1;
let edx = w;
let edy = h;
let x = this.left;
let y = this.top;
let ex = this.right;
let ey = this.bottom;
if (ex > imageWidth) {
edx = -ex + imageWidth + w;
ex = imageWidth;
}
if (ey > imageHeight) {
edy = -ey + imageHeight + h;
ey = imageHeight;
}
if (x < 1) {
edy = 2 - x;
x = 1;
}
if (y < 1) {
edy = 2 - y;
y = 1;
}
return {
dy,
edy,
dx,
edx,
y,
ey,
x,
ex,
w,
h
};
}
calibrate(region) {
return new Box({
left: this.left + region.left * this.width,
top: this.top + region.top * this.height,
right: this.right + region.right * this.width,
bottom: this.bottom + region.bottom * this.height
}).toSquare().round();
}
};
// src/classes/BoundingBox.ts
var BoundingBox = class extends Box {
constructor(left, top, right, bottom, allowNegativeDimensions = false) {
super({
left,
top,
right,
bottom
}, allowNegativeDimensions);
}
};
// src/classes/ObjectDetection.ts
var ObjectDetection = class {
constructor(score, classScore, className, relativeBox, imageDims) {
this._imageDims = new Dimensions(imageDims.width, imageDims.height);
this._score = score;
this._classScore = classScore;
this._className = className;
this._box = new Box(relativeBox).rescale(this._imageDims);
}
get score() {
return this._score;
}
get classScore() {
return this._classScore;
}
get className() {
return this._className;
}
get box() {
return this._box;
}
get imageDims() {
return this._imageDims;
}
get imageWidth() {
return this.imageDims.width;
}
get imageHeight() {
return this.imageDims.height;
}
get relativeBox() {
return new Box(this._box).rescale(this.imageDims.reverse());
}
forSize(width, height) {
return new ObjectDetection(this.score, this.classScore, this.className, this.relativeBox, { width, height });
}
};
// src/classes/FaceDetection.ts
var FaceDetection = class extends ObjectDetection {
constructor(score, relativeBox, imageDims) {
super(score, score, "", relativeBox, imageDims);
}
forSize(width, height) {
const { score, relativeBox, imageDims } = super.forSize(width, height);
return new FaceDetection(score, relativeBox, imageDims);
}
};
// src/ops/iou.ts
function iou(box1, box2, isIOU = true) {
const width = Math.max(0, Math.min(box1.right, box2.right) - Math.max(box1.left, box2.left));
const height = Math.max(0, Math.min(box1.bottom, box2.bottom) - Math.max(box1.top, box2.top));
const interSection = width * height;
return isIOU ? interSection / (box1.area + box2.area - interSection) : interSection / Math.min(box1.area, box2.area);
}
// src/ops/minBbox.ts
function minBbox(pts) {
const xs = pts.map((pt) => pt.x);
const ys = pts.map((pt) => pt.y);
const minX = xs.reduce((min, x) => x < min ? x : min, Infinity);
const minY = ys.reduce((min, y) => y < min ? y : min, Infinity);
const maxX = xs.reduce((max, x) => max < x ? x : max, 0);
const maxY = ys.reduce((max, y) => max < y ? y : max, 0);
return new BoundingBox(minX, minY, maxX, maxY);
}
// src/ops/nonMaxSuppression.ts
function nonMaxSuppression(boxes, scores, iouThreshold, isIOU = true) {
let indicesSortedByScore = scores.map((score, boxIndex) => ({ score, boxIndex })).sort((c1, c2) => c1.score - c2.score).map((c) => c.boxIndex);
const pick = [];
while (indicesSortedByScore.length > 0) {
const curr = indicesSortedByScore.pop();
pick.push(curr);
const indices = indicesSortedByScore;
const outputs = [];
for (let i = 0; i < indices.length; i++) {
const idx = indices[i];
const currBox = boxes[curr];
const idxBox = boxes[idx];
outputs.push(iou(currBox, idxBox, isIOU));
}
indicesSortedByScore = indicesSortedByScore.filter((_, j) => outputs[j] <= iouThreshold);
}
return pick;
}
// src/ops/normalize.ts
var tf2 = __toModule(require_tfjs_esm());
function normalize(x, meanRgb) {
return tf2.tidy(() => {
const [r, g, b] = meanRgb;
const avg_r = tf2.fill([...x.shape.slice(0, 3), 1], r, "float32");
const avg_g = tf2.fill([...x.shape.slice(0, 3), 1], g, "float32");
const avg_b = tf2.fill([...x.shape.slice(0, 3), 1], b, "float32");
const avg_rgb = tf2.concat([avg_r, avg_g, avg_b], 3);
return tf2.sub(x, avg_rgb);
});
}
// src/ops/padToSquare.ts
var tf3 = __toModule(require_tfjs_esm());
function padToSquare(imgTensor, isCenterImage = false) {
return tf3.tidy(() => {
const [height, width] = imgTensor.shape.slice(1);
if (height === width) {
return imgTensor;
}
const dimDiff = Math.abs(height - width);
const paddingAmount = Math.round(dimDiff * (isCenterImage ? 0.5 : 1));
const paddingAxis = height > width ? 2 : 1;
const createPaddingTensor = (paddingAmountLocal) => {
const paddingTensorShape = imgTensor.shape.slice();
paddingTensorShape[paddingAxis] = paddingAmountLocal;
return tf3.fill(paddingTensorShape, 0, "float32");
};
const paddingTensorAppend = createPaddingTensor(paddingAmount);
const remainingPaddingAmount = dimDiff - paddingTensorAppend.shape[paddingAxis];
const paddingTensorPrepend = isCenterImage && remainingPaddingAmount ? createPaddingTensor(remainingPaddingAmount) : null;
const tensorsToStack = [
paddingTensorPrepend,
imgTensor,
paddingTensorAppend
].filter((t) => !!t).map((t) => tf3.cast(t, "float32"));
return tf3.concat(tensorsToStack, paddingAxis);
});
}
// src/ops/shuffleArray.ts
function shuffleArray(inputArray) {
const array = inputArray.slice();
for (let i = array.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
const x = array[i];
array[i] = array[j];
array[j] = x;
}
return array;
}
// src/ops/index.ts
function sigmoid(x) {
return 1 / (1 + Math.exp(-x));
}
function inverseSigmoid(x) {
return Math.log(x / (1 - x));
}
// src/classes/Rect.ts
var Rect = class extends Box {
constructor(x, y, width, height, allowNegativeDimensions = false) {
super({
x,
y,
width,
height
}, allowNegativeDimensions);
}
};
// src/classes/FaceLandmarks.ts
var relX = 0.5;
var relY = 0.43;
var relScale = 0.45;
var FaceLandmarks = class {
constructor(relativeFaceLandmarkPositions, imgDims, shift = new Point(0, 0)) {
const { width, height } = imgDims;
this._imgDims = new Dimensions(width, height);
this._shift = shift;
this._positions = relativeFaceLandmarkPositions.map((pt) => pt.mul(new Point(width, height)).add(shift));
}
get shift() {
return new Point(this._shift.x, this._shift.y);
}
get imageWidth() {
return this._imgDims.width;
}
get imageHeight() {
return this._imgDims.height;
}
get positions() {
return this._positions;
}
get relativePositions() {
return this._positions.map((pt) => pt.sub(this._shift).div(new Point(this.imageWidth, this.imageHeight)));
}
forSize(width, height) {
return new this.constructor(this.relativePositions, { width, height });
}
shiftBy(x, y) {
return new this.constructor(this.relativePositions, this._imgDims, new Point(x, y));
}
shiftByPoint(pt) {
return this.shiftBy(pt.x, pt.y);
}
align(detection, options = {}) {
if (detection) {
const box = detection instanceof FaceDetection ? detection.box.floor() : new Box(detection);
return this.shiftBy(box.x, box.y).align(null, options);
}
const { useDlibAlignment, minBoxPadding } = { useDlibAlignment: false, minBoxPadding: 0.2, ...options };
if (useDlibAlignment) {
return this.alignDlib();
}
return this.alignMinBbox(minBoxPadding);
}
alignDlib() {
const centers = this.getRefPointsForAlignment();
const [leftEyeCenter, rightEyeCenter, mouthCenter] = centers;
const distToMouth = (pt) => mouthCenter.sub(pt).magnitude();
const eyeToMouthDist = (distToMouth(leftEyeCenter) + distToMouth(rightEyeCenter)) / 2;
const size = Math.floor(eyeToMouthDist / relScale);
const refPoint = getCenterPoint(centers);
const x = Math.floor(Math.max(0, refPoint.x - relX * size));
const y = Math.floor(Math.max(0, refPoint.y - relY * size));
return new Rect(x, y, Math.min(size, this.imageWidth + x), Math.min(size, this.imageHeight + y));
}
alignMinBbox(padding) {
const box = minBbox(this.positions);
return box.pad(box.width * padding, box.height * padding);
}
getRefPointsForAlignment() {
throw new Error("getRefPointsForAlignment not implemented by base class");
}
};
// src/classes/FaceLandmarks5.ts
var FaceLandmarks5 = class extends FaceLandmarks {
getRefPointsForAlignment() {
const pts = this.positions;
return [
pts[0],
pts[1],
getCenterPoint([pts[3], pts[4]])
];
}
};
// src/classes/FaceLandmarks68.ts
var FaceLandmarks68 = class extends FaceLandmarks {
getJawOutline() {
return this.positions.slice(0, 17);
}
getLeftEyeBrow() {
return this.positions.slice(17, 22);
}
getRightEyeBrow() {
return this.positions.slice(22, 27);
}
getNose() {
return this.positions.slice(27, 36);
}
getLeftEye() {
return this.positions.slice(36, 42);
}
getRightEye() {
return this.positions.slice(42, 48);
}
getMouth() {
return this.positions.slice(48, 68);
}
getRefPointsForAlignment() {
return [
this.getLeftEye(),
this.getRightEye(),
this.getMouth()
].map(getCenterPoint);
}
};
// src/classes/FaceMatch.ts
var FaceMatch = class {
constructor(label, distance) {
this._label = label;
this._distance = distance;
}
get label() {
return this._label;
}
get distance() {
return this._distance;
}
toString(withDistance = true) {
return `${this.label}${withDistance ? ` (${round(this.distance)})` : ""}`;
}
};
// src/classes/LabeledBox.ts
var LabeledBox = class extends Box {
static assertIsValidLabeledBox(box, callee) {
Box.assertIsValidBox(box, callee);
if (!isValidNumber(box.label)) {
throw new Error(`${callee} - expected property label (${box.label}) to be a number`);
}
}
constructor(box, label) {
super(box);
this._label = label;
}
get label() {
return this._label;
}
};
// src/classes/LabeledFaceDescriptors.ts
var LabeledFaceDescriptors = class {
constructor(label, descriptors) {
if (!(typeof label === "string")) {
throw new Error("LabeledFaceDescriptors - constructor expected label to be a string");
}
if (!Array.isArray(descriptors) || descriptors.some((desc) => !(desc instanceof Float32Array))) {
throw new Error("LabeledFaceDescriptors - constructor expected descriptors to be an array of Float32Array");
}
this._label = label;
this._descriptors = descriptors;
}
get label() {
return this._label;
}
get descriptors() {
return this._descriptors;
}
toJSON() {
return {
label: this.label,
descriptors: this.descriptors.map((d) => Array.from(d))
};
}
static fromJSON(json) {
const descriptors = json.descriptors.map((d) => new Float32Array(d));
return new LabeledFaceDescriptors(json.label, descriptors);
}
};
// src/classes/PredictedBox.ts
var PredictedBox = class extends LabeledBox {
static assertIsValidPredictedBox(box, callee) {
LabeledBox.assertIsValidLabeledBox(box, callee);
if (!isValidProbablitiy(box.score) || !isValidProbablitiy(box.classScore)) {
throw new Error(`${callee} - expected properties score (${box.score}) and (${box.classScore}) to be a number between [0, 1]`);
}
}
constructor(box, label, score, classScore) {
super(box, label);
this._score = score;
this._classScore = classScore;
}
get score() {
return this._score;
}
get classScore() {
return this._classScore;
}
};
// src/factories/WithFaceDetection.ts
function isWithFaceDetection(obj) {
return obj.detection instanceof FaceDetection;
}
function extendWithFaceDetection(sourceObj, detection) {
const extension = { detection };
return { ...sourceObj, ...extension };
}
// src/env/createBrowserEnv.ts
function createBrowserEnv() {
const fetch = window.fetch;
if (!fetch)
throw new Error("fetch - missing fetch implementation for browser environment");
const readFile = () => {
throw new Error("readFile - filesystem not available for browser environment");
};
return {
Canvas: HTMLCanvasElement,
CanvasRenderingContext2D,
Image: HTMLImageElement,
ImageData,
Video: HTMLVideoElement,
createCanvasElement: () => document.createElement("canvas"),
createImageElement: () => document.createElement("img"),
createVideoElement: () => document.createElement("video"),
fetch,
readFile
};
}
// src/env/createFileSystem.ts
function createFileSystem(fs) {
let requireFsError = "";
if (!fs) {
try {
fs = require("fs");
} catch (err) {
requireFsError = err.toString();
}
}
const readFile = fs ? (filePath) => new Promise((resolve, reject) => {
fs.readFile(filePath, (err, buffer) => err ? reject(err) : resolve(buffer));
}) : () => {
throw new Error(`readFile - failed to require fs in nodejs environment with error: ${requireFsError}`);
};
return {
readFile
};
}
// src/env/createNodejsEnv.ts
function createNodejsEnv() {
const Canvas = global["Canvas"] || global.HTMLCanvasElement;
const Image = global.Image || global.HTMLImageElement;
const Video = global["Video"] || global.HTMLVideoElement;
const createCanvasElement = () => {
if (Canvas)
return new Canvas();
throw new Error("createCanvasElement - missing Canvas implementation for nodejs environment");
};
const createImageElement = () => {
if (Image)
return new Image();
throw new Error("createImageElement - missing Image implementation for nodejs environment");
};
const createVideoElement = () => {
if (Video)
return new Video();
throw new Error("createVideoElement - missing Video implementation for nodejs environment");
};
const fetch = global.fetch;
const fileSystem = createFileSystem();
return {
Canvas: Canvas || class {
},
CanvasRenderingContext2D: global.CanvasRenderingContext2D || class {
},
Image: Image || class {
},
ImageData: global.ImageData || class {
},
Video: global.HTMLVideoElement || class {
},
createCanvasElement,
createImageElement,
createVideoElement,
fetch,
...fileSystem
};
}
// src/env/isBrowser.ts
function isBrowser() {
return typeof window === "object" && typeof document !== "undefined" && typeof HTMLImageElement !== "undefined" && typeof HTMLCanvasElement !== "undefined" && typeof HTMLVideoElement !== "undefined" && typeof ImageData !== "undefined" && typeof CanvasRenderingContext2D !== "undefined";
}
// src/env/isNodejs.ts
function isNodejs() {
return typeof global === "object" && typeof require === "function" && typeof module !== "undefined" && typeof process !== "undefined" && !!process.version;
}
// src/env/index.ts
var environment;
function getEnv() {
if (!environment) {
throw new Error("getEnv - environment is not defined, check isNodejs() and isBrowser()");
}
return environment;
}
function setEnv(env2) {
environment = env2;
}
function initialize() {
if (isBrowser())
return setEnv(createBrowserEnv());
if (isNodejs())
return setEnv(createNodejsEnv());
return null;
}
function monkeyPatch(env2) {
if (!environment) {
initialize();
}
if (!environment) {
throw new Error("monkeyPatch - environment is not defined, check isNodejs() and isBrowser()");
}
const { Canvas = environment.Canvas, Image = environment.Image } = env2;
environment.Canvas = Canvas;
environment.Image = Image;
environment.createCanvasElement = env2.createCanvasElement || (() => new Canvas());
environment.createImageElement = env2.createImageElement || (() => new Image());
environment.ImageData = env2.ImageData || environment.ImageData;
environment.Video = env2.Video || environment.Video;
environment.fetch = env2.fetch || environment.fetch;
environment.readFile = env2.readFile || environment.readFile;
}
var env = {
getEnv,
setEnv,
initialize,
createBrowserEnv,
createFileSystem,
createNodejsEnv,
monkeyPatch,
isBrowser,
isNodejs
};
initialize();
// src/dom/resolveInput.ts
function resolveInput(arg) {
if (!env.isNodejs() && typeof arg === "string") {
return document.getElementById(arg);
}
return arg;
}
// src/dom/getContext2dOrThrow.ts
function getContext2dOrThrow(canvasArg) {
const { Canvas, CanvasRenderingContext2D: CanvasRenderingContext2D2 } = env.getEnv();
if (canvasArg instanceof CanvasRenderingContext2D2) {
return canvasArg;
}
const canvas = resolveInput(canvasArg);
if (!(canvas instanceof Canvas)) {
throw new Error("resolveContext2d - expected canvas to be of instance of Canvas");
}
const ctx = canvas.getContext("2d");
if (!ctx) {
throw new Error("resolveContext2d - canvas 2d context is null");
}
return ctx;
}
// src/draw/DrawTextField.ts
var AnchorPosition;
(function(AnchorPosition2) {
AnchorPosition2["TOP_LEFT"] = "TOP_LEFT";
AnchorPosition2["TOP_RIGHT"] = "TOP_RIGHT";
AnchorPosition2["BOTTOM_LEFT"] = "BOTTOM_LEFT";
AnchorPosition2["BOTTOM_RIGHT"] = "BOTTOM_RIGHT";
})(AnchorPosition || (AnchorPosition = {}));
var DrawTextFieldOptions = class {
constructor(options = {}) {
const {
anchorPosition,
backgroundColor,
fontColor,
fontSize,
fontStyle,
padding
} = options;
this.anchorPosition = anchorPosition || AnchorPosition.TOP_LEFT;
this.backgroundColor = backgroundColor || "rgba(0, 0, 0, 0.5)";
this.fontColor = fontColor || "rgba(255, 255, 255, 1)";
this.fontSize = fontSize || 14;
this.fontStyle = fontStyle || "Georgia";
this.padding = padding || 4;
}
};
var DrawTextField = class {
constructor(text, anchor, options = {}) {
this.text = typeof text === "string" ? [text] : text instanceof DrawTextField ? text.text : text;
this.anchor = anchor;
this.options = new DrawTextFieldOptions(options);
}
measureWidth(ctx) {
const { padding } = this.options;
return this.text.map((l) => ctx.measureText(l).width).reduce((w0, w1) => w0 < w1 ? w1 : w0, 0) + 2 * padding;
}
measureHeight() {
const { fontSize, padding } = this.options;
return this.text.length * fontSize + 2 * padding;
}
getUpperLeft(ctx, canvasDims) {
const { anchorPosition } = this.options;
const isShiftLeft = anchorPosition === AnchorPosition.BOTTOM_RIGHT || anchorPosition === AnchorPosition.TOP_RIGHT;
const isShiftTop = anchorPosition === AnchorPosition.BOTTOM_LEFT || anchorPosition === AnchorPosition.BOTTOM_RIGHT;
const textFieldWidth = this.measureWidth(ctx);
const textFieldHeight = this.measureHeight();
const x = isShiftLeft ? this.anchor.x - textFieldWidth : this.anchor.x;
const y = isShiftTop ? this.anchor.y - textFieldHeight : this.anchor.y;
if (canvasDims) {
const { width, height } = canvasDims;
const newX = Math.max(Math.min(x, width - textFieldWidth), 0);
const newY = Math.max(Math.min(y, height - textFieldHeight), 0);
return { x: newX, y: newY };
}
return { x, y };
}
draw(canvasArg) {
const canvas = resolveInput(canvasArg);
const ctx = getContext2dOrThrow(canvas);
const {
backgroundColor,
fontColor,
fontSize,
fontStyle,
padding
} = this.options;
ctx.font = `${fontSize}px ${fontStyle}`;
const maxTextWidth = this.measureWidth(ctx);
const textHeight = this.measureHeight();
ctx.fillStyle = backgroundColor;
const upperLeft = this.getUpperLeft(ctx, canvas);
ctx.fillRect(upperLeft.x, upperLeft.y, maxTextWidth, textHeight);
ctx.fillStyle = fontColor;
this.text.forEach((textLine, i) => {
const x = padding + upperLeft.x;
const y = padding + upperLeft.y + (i + 1) * fontSize;
ctx.fillText(textLine, x, y);
});
}
};
// src/draw/DrawBox.ts
var DrawBoxOptions = class {
constructor(options = {}) {
const {
boxColor,
lineWidth,
label,
drawLabelOptions
} = options;
this.boxColor = boxColor || "rgba(0, 0, 255, 1)";
this.lineWidth = lineWidth || 2;
this.label = label;
const defaultDrawLabelOptions = {
anchorPosition: AnchorPosition.BOTTOM_LEFT,
backgroundColor: this.boxColor
};
this.drawLabelOptions = new DrawTextFieldOptions({ ...defaultDrawLabelOptions, ...drawLabelOptions });
}
};
var DrawBox = class {
constructor(box, options = {}) {
this.box = new Box(box);
this.options = new DrawBoxOptions(options);
}
draw(canvasArg) {
const ctx = getContext2dOrThrow(canvasArg);
const { boxColor, lineWidth } = this.options;
const {
x,
y,
width,
height
} = this.box;
ctx.strokeStyle = boxColor;
ctx.lineWidth = lineWidth;
ctx.strokeRect(x, y, width, height);
const { label } = this.options;
if (label) {
new DrawTextField([label], { x: x - lineWidth / 2, y }, this.options.drawLabelOptions).draw(canvasArg);
}
}
};
// src/draw/drawDetections.ts
function drawDetections(canvasArg, detections) {
const detectionsArray = Array.isArray(detections) ? detections : [detections];
detectionsArray.forEach((det) => {
const score = det instanceof FaceDetection ? det.score : isWithFaceDetection(det) ? det.detection.score : void 0;
const box = det instanceof FaceDetection ? det.box : isWithFaceDetection(det) ? det.detection.box : new Box(det);
const label = score ? `${round(score)}` : void 0;
new DrawBox(box, { label }).draw(canvasArg);
});
}
// src/faceExpressionNet/FaceExpressionNet.ts
var tf18 = __toModule(require_tfjs_esm());
// src/dom/isMediaLoaded.ts
function isMediaLoaded(media) {
const { Image, Video } = env.getEnv();
return media instanceof Image && media.complete || media instanceof Video && media.readyState >= 3;
}
// src/dom/awaitMediaLoaded.ts
function awaitMediaLoaded(media) {
return new Promise((resolve, reject) => {
if (media instanceof env.getEnv().Canvas || isMediaLoaded(media))
return resolve(null);
function onError(e) {
if (!e.currentTarget)
return;
e.currentTarget.removeEventListener("load", onLoad);
e.currentTarget.removeEventListener("error", onError);
reject(e);
}
function onLoad(e) {
if (!e.currentTarget)
return;
e.currentTarget.removeEventListener("load", onLoad);
e.currentTarget.removeEventListener("error", onError);
resolve(e);
}
media.addEventListener("load", onLoad);
media.addEventListener("error", onError);
});
}
// src/dom/bufferToImage.ts
function bufferToImage(buf) {
return new Promise((resolve, reject) => {
if (!(buf instanceof Blob))
reject(new Error("bufferToImage - expected buf to be of type: Blob"));
const reader = new FileReader();
reader.onload = () => {
if (typeof reader.result !== "string")
reject(new Error("bufferToImage - expected reader.result to be a string, in onload"));
const img = env.getEnv().createImageElement();
img.onload = () => resolve(img);
img.onerror = reject;
img.src = reader.result;
};
reader.onerror = reject;
reader.readAsDataURL(buf);
});
}
// src/dom/getMediaDimensions.ts
function getMediaDimensions(input) {
const { Image, Video } = env.getEnv();
if (input instanceof Image) {
return new Dimensions(input.naturalWidth, input.naturalHeight);
}
if (input instanceof Video) {
return new Dimensions(input.videoWidth, input.videoHeight);
}
return new Dimensions(input.width, input.height);
}
// src/dom/createCanvas.ts
function createCanvas({ width, height }) {
const { createCanvasElement } = env.getEnv();
const canvas = createCanvasElement();
canvas.width = width;
canvas.height = height;
return canvas;
}
function createCanvasFromMedia(media, dims) {
const { ImageData: ImageData2 } = env.getEnv();
if (!(media instanceof ImageData2) && !isMediaLoaded(media)) {
throw new Error("createCanvasFromMedia - media has not finished loading yet");
}
const { width, height } = dims || getMediaDimensions(media);
const canvas = createCanvas({ width, height });
if (media instanceof ImageData2) {
getContext2dOrThrow(canvas).putImageData(media, 0, 0);
} else {
getContext2dOrThrow(canvas).drawImage(media, 0, 0, width, height);
}
return canvas;
}
// src/dom/imageTensorToCanvas.ts
var tf4 = __toModule(require_tfjs_esm());
async function imageTensorToCanvas(imgTensor, canvas) {
const targetCanvas = canvas || env.getEnv().createCanvasElement();
const [height, width, numChannels] = imgTensor.shape.slice(isTensor4D(imgTensor) ? 1 : 0);
const imgTensor3D = tf4.tidy(() => imgTensor.as3D(height, width, numChannels).toInt());
await tf4.browser.toPixels(imgTensor3D, targetCanvas);
imgTensor3D.dispose();
return targetCanvas;
}
// src/dom/isMediaElement.ts
function isMediaElement(input) {
const { Image, Canvas, Video } = env.getEnv();
return input instanceof Image || input instanceof Canvas || input instanceof Video;
}
// src/dom/NetInput.ts
var tf5 = __toModule(require_tfjs_esm());
// src/dom/imageToSquare.ts
function imageToSquare(input, inputSize, centerImage = false) {
const { Image, Canvas } = env.getEnv();
if (!(input instanceof Image || input instanceof Canvas)) {
throw new Error("imageToSquare - expected arg0 to be HTMLImageElement | HTMLCanvasElement");
}
if (inputSize <= 0)
return createCanvas({ width: 1, height: 1 });
const dims = getMediaDimensions(input);
const scale2 = inputSize / Math.max(dims.height, dims.width);
const width = scale2 * dims.width;
const height = scale2 * dims.height;
const targetCanvas = createCanvas({ width: inputSize, height: inputSize });
const inputCanvas = input instanceof Canvas ? input : createCanvasFromMedia(input);
const offset = Math.abs(width - height) / 2;
const dx = centerImage && width < height ? offset : 0;
const dy = centerImage && height < width ? offset : 0;
if (inputCanvas.width > 0 && inputCanvas.height > 0)
getContext2dOrThrow(targetCanvas).drawImage(inputCanvas, dx, dy, width, height);
return targetCanvas;
}
// src/dom/NetInput.ts
var NetInput = class {
constructor(inputs, treatAsBatchInput = false) {
this._imageTensors = [];
this._canvases = [];
this._treatAsBatchInput = false;
this._inputDimensions = [];
if (!Array.isArray(inputs)) {
throw new Error(`NetInput.constructor - expected inputs to be an Array of TResolvedNetInput or to be instanceof tf.Tensor4D, instead have ${inputs}`);
}
this._treatAsBatchInput = treatAsBatchInput;
this._batchSize = inputs.length;
inputs.forEach((input, idx) => {
if (isTensor3D(input)) {
this._imageTensors[idx] = input;
this._inputDimensions[idx] = input.shape;
return;
}
if (isTensor4D(input)) {
const batchSize = input.shape[0];
if (batchSize !== 1) {
throw new Error(`NetInput - tf.Tensor4D with batchSize ${batchSize} passed, but not supported in input array`);
}
this._imageTensors[idx] = input;
this._inputDimensions[idx] = input.shape.slice(1);
return;
}
const canvas = input instanceof env.getEnv().Canvas ? input : createCanvasFromMedia(input);
this._canvases[idx] = canvas;
this._inputDimensions[idx] = [canvas.height, canvas.width, 3];
});
}
get imageTensors() {
return this._imageTensors;
}
get canvases() {
return this._canvases;
}
get isBatchInput() {
return this.batchSize > 1 || this._treatAsBatchInput;
}
get batchSize() {
return this._batchSize;
}
get inputDimensions() {
return this._inputDimensions;
}
get inputSize() {
return this._inputSize;
}
get reshapedInputDimensions() {
return range(this.batchSize, 0, 1).map((_, batchIdx) => this.getReshapedInputDimensions(batchIdx));
}
getInput(batchIdx) {
return this.canvases[batchIdx] || this.imageTensors[batchIdx];
}
getInputDimensions(batchIdx) {
return this._inputDimensions[batchIdx];
}
getInputHeight(batchIdx) {
return this._inputDimensions[batchIdx][0];
}
getInputWidth(batchIdx) {
return this._inputDimensions[batchIdx][1];
}
getReshapedInputDimensions(batchIdx) {
if (typeof this.inputSize !== "number") {
throw new Error("getReshapedInputDimensions - inputSize not set, toBatchTensor has not been called yet");
}
const width = this.getInputWidth(batchIdx);
const height = this.getInputHeight(batchIdx);
return computeReshapedDimensions({ width, height }, this.inputSize);
}
toBatchTensor(inputSize, isCenterInputs = true) {
this._inputSize = inputSize;
return tf5.tidy(() => {
const inputTensors = range(this.batchSize, 0, 1).map((batchIdx) => {
const input = this.getInput(batchIdx);
if (input instanceof tf5.Tensor) {
let imgTensor = isTensor4D(input) ? input : tf5.expandDims(input);
imgTensor = padToSquare(imgTensor, isCenterInputs);
if (imgTensor.shape[1] !== inputSize || imgTensor.shape[2] !== inputSize) {
imgTensor = tf5.image.resizeBilinear(imgTensor, [inputSize, inputSize], false, false);
}
return imgTensor.as3D(inputSize, inputSize, 3);
}
if (input instanceof env.getEnv().Canvas) {
return tf5.browser.fromPixels(imageToSquare(input, inputSize, isCenterInputs));
}
throw new Error(`toBatchTensor - at batchIdx ${batchIdx}, expected input to be instanceof tf.Tensor or instanceof HTMLCanvasElement, instead have ${input}`);
});
const batchTensor = tf5.stack(inputTensors.map((t) => tf5.cast(t, "float32"))).as4D(this.batchSize, inputSize, inputSize, 3);
return batchTensor;
});
}
};
// src/dom/toNetInput.ts
async function toNetInput(inputs) {
if (inputs instanceof NetInput)
return inputs;
const inputArgArray = Array.isArray(inputs) ? inputs : [inputs];
if (!inputArgArray.length)
throw new Error("toNetInput - empty array passed as input");
const getIdxHint = (idx) => Array.isArray(inputs) ? ` at input index ${idx}:` : "";
const inputArray = inputArgArray.map(resolveInput);
inputArray.forEach((input, i) => {
if (!isMediaElement(input) && !isTensor3D(input) && !isTensor4D(input)) {
if (typeof inputArgArray[i] === "string")
throw new Error(`toNetInput -${getIdxHint(i)} string passed, but could not resolve HTMLElement for element id ${inputArgArray[i]}`);
throw new Error(`toNetInput -${getIdxHint(i)} expected media to be of type HTMLImageElement | HTMLVideoElement | HTMLCanvasElement | tf.Tensor3D, or to be an element id`);
}
if (isTensor4D(input)) {
const batchSize = input.shape[0];
if (batchSize !== 1)
throw new Error(`toNetInput -${getIdxHint(i)} tf.Tensor4D with batchSize ${batchSize} passed, but not supported in input array`);
}
});
await Promise.all(inputArray.map((input) => isMediaElement(input) && awaitMediaLoaded(input)));
return new NetInput(inputArray, Array.isArray(inputs));
}
// src/dom/extractFaces.ts
async function extractFaces(input, detections) {
const { Canvas } = env.getEnv();
let canvas = input;
if (!(input instanceof Canvas)) {
const netInput = await toNetInput(input);
if (netInput.batchSize > 1)
throw new Error("extractFaces - batchSize > 1 not supported");
const tensorOrCanvas = netInput.getInput(0);
canvas = tensorOrCanvas instanceof Canvas ? tensorOrCanvas : await imageTensorToCanvas(tensorOrCanvas);
}
const ctx = getContext2dOrThrow(canvas);
const boxes = detections.map((det) => det instanceof FaceDetection ? det.forSize(canvas.width, canvas.height).box.floor() : det).map((box) => box.clipAtImageBorders(canvas.width, canvas.height));
return boxes.map(({ x, y, width, height }) => {
const faceImg = createCanvas({ width, height });
if (width > 0 && height > 0)
getContext2dOrThrow(faceImg).putImageData(ctx.getImageData(x, y, width, height), 0, 0);
return faceImg;
});
}
// src/dom/extractFaceTensors.ts
var tf6 = __toModule(require_tfjs_esm());
async function extractFaceTensors(imageTensor, detections) {
if (!isTensor3D(imageTensor) && !isTensor4D(imageTensor)) {
throw new Error("extractFaceTensors - expected image tensor to be 3D or 4D");
}
if (isTensor4D(imageTensor) && imageTensor.shape[0] > 1) {
throw new Error("extractFaceTensors - batchSize > 1 not supported");
}
return tf6.tidy(() => {
const [imgHeight, imgWidth, numChannels] = imageTensor.shape.slice(isTensor4D(imageTensor) ? 1 : 0);
const boxes = detections.map((det) => det instanceof FaceDetection ? det.forSize(imgWidth, imgHeight).box : det).map((box) => box.clipAtImageBorders(imgWidth, imgHeight));
const faceTensors = boxes.map(({
x,
y,
width,
height
}) => tf6.slice3d(imageTensor.as3D(imgHeight, imgWidth, numChannels), [y, x, 0], [height, width, numChannels]));
return faceTensors;
});
}
// src/dom/fetchOrThrow.ts
async function fetchOrThrow(url, init) {
const { fetch } = env.getEnv();
const res = await fetch(url, init);
if (!(res.status < 400)) {