react-native-executorch
Version:
An easy way to run AI models in react native with ExecuTorch
27 lines (26 loc) • 1.94 kB
JavaScript
;
import { Platform } from 'react-native';
const LINKING_ERROR = `The package 'react-native-executorch' doesn't seem to be linked. Make sure: \n\n` + Platform.select({
ios: "- You have run 'pod install'\n",
default: ''
}) + '- You rebuilt the app after installing the package\n' + '- You are not using Expo Go\n';
function returnSpecOrThrowLinkingError(spec) {
return spec ? spec : new Proxy({}, {
get() {
throw new Error(LINKING_ERROR);
}
});
}
const LLMNativeModule = returnSpecOrThrowLinkingError(require('./NativeLLM').default);
const ETModuleNativeModule = returnSpecOrThrowLinkingError(require('./NativeETModule').default);
const ClassificationNativeModule = returnSpecOrThrowLinkingError(require('./NativeClassification').default);
const ImageSegmentationNativeModule = returnSpecOrThrowLinkingError(require('./NativeImageSegmentation').default);
const ObjectDetectionNativeModule = returnSpecOrThrowLinkingError(require('./NativeObjectDetection').default);
const StyleTransferNativeModule = returnSpecOrThrowLinkingError(require('./NativeStyleTransfer').default);
const SpeechToTextNativeModule = returnSpecOrThrowLinkingError(require('./NativeSpeechToText').default);
const OCRNativeModule = returnSpecOrThrowLinkingError(require('./NativeOCR').default);
const VerticalOCRNativeModule = returnSpecOrThrowLinkingError(require('./NativeVerticalOCR').default);
const TokenizerNativeModule = returnSpecOrThrowLinkingError(require('./NativeTokenizer').default);
const TextEmbeddingsNativeModule = returnSpecOrThrowLinkingError(require('./NativeTextEmbeddings').default);
export { LLMNativeModule, ETModuleNativeModule, ClassificationNativeModule, ObjectDetectionNativeModule, StyleTransferNativeModule, ImageSegmentationNativeModule, SpeechToTextNativeModule, OCRNativeModule, VerticalOCRNativeModule, TextEmbeddingsNativeModule, TokenizerNativeModule };
//# sourceMappingURL=RnExecutorchModules.js.map