@ashleysmart/react-native-vision-camera-face-detector
Version:
Frame Processor Plugin to detect faces using MLKit Vision Face Detector for React Native Vision Camera!
101 lines (94 loc) • 3.14 kB
JavaScript
function _extends() { _extends = Object.assign ? Object.assign.bind() : function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
import React from 'react';
import { Camera as VisionCamera, useFrameProcessor } from '@ashleysmart/react-native-vision-camera';
import { useSharedValue } from 'react-native-worklets-core';
import { detectFaces } from './FaceDetector';
// types
/**
* Create a Worklet function that persists between re-renders.
* The returned function can be called from both a Worklet context and the JS context, but will execute on a Worklet context.
*
* @param {function} func The Worklet. Must be marked with the `'worklet'` directive.
* @param {DependencyList} dependencyList The React dependencies of this Worklet.
* @returns {WorkletType} A memoized Worklet
*/
function useWorklet(func, dependencyList) {
const worklet = React.useMemo(() => {
const context = 'VisionCamera.async';
return Worklets.createRunInContextFn(func, context);
}, dependencyList);
return worklet;
}
/**
* Vision camera wrapper
*
* @param {ComponentType} props Camera + face detection props
* @returns
*/
export const Camera = /*#__PURE__*/React.forwardRef(({
faceDetectionOptions,
faceDetectionCallback,
...props
}, ref) => {
/**
* Is there an async task already running?
*/
const isAsyncContextBusy = useSharedValue(false);
/**
* Throws logs/errors back on js thread
*/
const logOnJs = Worklets.createRunInJsFn((log, error) => {
if (error) {
console.error(log, error.message ?? JSON.stringify(error));
} else {
console.log(log);
}
});
/**
* Async context that will handle face detection
*/
const runOnAsyncContext = useWorklet(frame => {
'worklet';
try {
detectFaces({
frame,
callback: faceDetectionCallback,
options: faceDetectionOptions
});
} catch (error) {
logOnJs('Execution error:', error);
} finally {
frame.decrementRefCount();
isAsyncContextBusy.value = false;
}
}, [faceDetectionOptions, faceDetectionCallback]);
/**
* Detect faces on frame on an async context without blocking camera preview
*
* @param {Frame} frame Current frame
*/
function runAsync(frame) {
'worklet';
if (isAsyncContextBusy.value) return;
// set async context as busy
isAsyncContextBusy.value = true;
// cast to internal frame and increment ref count
const internal = frame;
internal.incrementRefCount();
// detect faces in async context
runOnAsyncContext(internal);
}
/**
* Camera frame processor
*/
const cameraFrameProcessor = useFrameProcessor(frame => {
'worklet';
runAsync(frame);
}, [runOnAsyncContext]);
return /*#__PURE__*/React.createElement(VisionCamera, _extends({}, props, {
ref: ref,
frameProcessor: cameraFrameProcessor,
pixelFormat: "yuv"
}));
});
//# sourceMappingURL=Camera.js.map