@ashleysmart/react-native-vision-camera-face-detector
Version:
Frame Processor Plugin to detect faces using MLKit Vision Face Detector for React Native Vision Camera!
107 lines (100 loc) • 3.5 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.Camera = void 0;
var _react = _interopRequireDefault(require("react"));
var _reactNativeVisionCamera = require("@ashleysmart/react-native-vision-camera");
var _reactNativeWorkletsCore = require("react-native-worklets-core");
var _FaceDetector = require("./FaceDetector");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _extends() { _extends = Object.assign ? Object.assign.bind() : function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
// types
/**
* Create a Worklet function that persists between re-renders.
* The returned function can be called from both a Worklet context and the JS context, but will execute on a Worklet context.
*
* @param {function} func The Worklet. Must be marked with the `'worklet'` directive.
* @param {DependencyList} dependencyList The React dependencies of this Worklet.
* @returns {WorkletType} A memoized Worklet
*/
function useWorklet(func, dependencyList) {
const worklet = _react.default.useMemo(() => {
const context = 'VisionCamera.async';
return Worklets.createRunInContextFn(func, context);
}, dependencyList);
return worklet;
}
/**
* Vision camera wrapper
*
* @param {ComponentType} props Camera + face detection props
* @returns
*/
const Camera = exports.Camera = /*#__PURE__*/_react.default.forwardRef(({
faceDetectionOptions,
faceDetectionCallback,
...props
}, ref) => {
/**
* Is there an async task already running?
*/
const isAsyncContextBusy = (0, _reactNativeWorkletsCore.useSharedValue)(false);
/**
* Throws logs/errors back on js thread
*/
const logOnJs = Worklets.createRunInJsFn((log, error) => {
if (error) {
console.error(log, error.message ?? JSON.stringify(error));
} else {
console.log(log);
}
});
/**
* Async context that will handle face detection
*/
const runOnAsyncContext = useWorklet(frame => {
'worklet';
try {
(0, _FaceDetector.detectFaces)({
frame,
callback: faceDetectionCallback,
options: faceDetectionOptions
});
} catch (error) {
logOnJs('Execution error:', error);
} finally {
frame.decrementRefCount();
isAsyncContextBusy.value = false;
}
}, [faceDetectionOptions, faceDetectionCallback]);
/**
* Detect faces on frame on an async context without blocking camera preview
*
* @param {Frame} frame Current frame
*/
function runAsync(frame) {
'worklet';
if (isAsyncContextBusy.value) return;
// set async context as busy
isAsyncContextBusy.value = true;
// cast to internal frame and increment ref count
const internal = frame;
internal.incrementRefCount();
// detect faces in async context
runOnAsyncContext(internal);
}
/**
* Camera frame processor
*/
const cameraFrameProcessor = (0, _reactNativeVisionCamera.useFrameProcessor)(frame => {
'worklet';
runAsync(frame);
}, [runOnAsyncContext]);
return /*#__PURE__*/_react.default.createElement(_reactNativeVisionCamera.Camera, _extends({}, props, {
ref: ref,
frameProcessor: cameraFrameProcessor,
pixelFormat: "yuv"
}));
});
//# sourceMappingURL=Camera.js.map