node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
14 lines (13 loc) • 496 B
TypeScript
export declare class DisposeGuard {
constructor(parentDisposeGuards?: DisposeGuard[]);
addParentDisposeGuard(parent: DisposeGuard): void;
removeParentDisposeGuard(parent: DisposeGuard): void;
acquireDisposeLock(): Promise<void>;
createPreventDisposalHandle(ignoreAwaitingDispose?: boolean): DisposalPreventionHandle;
}
export declare class DisposalPreventionHandle {
private constructor();
dispose(): void;
[Symbol.dispose](): void;
get disposed(): boolean;
}