@fugood/onnxruntime-react-native
Version:
ONNX Runtime bridge for react native
62 lines (61 loc) • 1.77 kB
TypeScript
import type { TurboModule } from 'react-native/Libraries/TurboModule/RCTExport';
type SessionOptions = {};
type RunOptions = {};
/**
* model loading information
*/
export type ModelLoadInfo = {
/**
* Key for an instance of InferenceSession, which is passed to run() function as parameter.
*/
readonly key: string;
/**
* Get input names of the loaded model.
*/
readonly inputNames: string[];
/**
* Get output names of the loaded model.
*/
readonly outputNames: string[];
};
/**
* JSIBlob is a blob object that exchange ArrayBuffer by OnnxruntimeJSIHelper.
*/
export type JSIBlob = {
blobId: string;
offset: number;
size: number;
};
/**
* Tensor type for react native, which doesn't allow ArrayBuffer in native bridge, so data will be stored as JSIBlob.
*/
export type EncodedTensor = {
/**
* the dimensions of the tensor.
*/
readonly dims: readonly number[];
/**
* the data type of the tensor.
*/
readonly type: string;
/**
* the JSIBlob object of the buffer data of the tensor.
* if data is string array, it won't be stored as JSIBlob.
*/
readonly data: JSIBlob | string[];
};
export type Feeds = {
[name: string]: EncodedTensor;
};
export type Fetches = string[];
export type Return = {
[name: string]: EncodedTensor;
};
export interface Spec extends TurboModule {
loadModel(modelPath: string, options: SessionOptions): Promise<ModelLoadInfo>;
loadModelFromBlob?(blob: JSIBlob, options: SessionOptions): Promise<ModelLoadInfo>;
dispose(key: string): Promise<void>;
run(key: string, feeds: Feeds, fetches: Fetches, options: RunOptions): Promise<Return>;
}
declare const _default: Spec | null;
export default _default;