node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
18 lines • 471 B
JavaScript
export class GgufReadOffset {
offset;
constructor(offset) {
if (offset instanceof GgufReadOffset)
this.offset = offset.offset;
else
this.offset = offset;
}
moveBy(amount) {
this.offset += amount;
}
static resolveReadOffset(offset) {
if (offset instanceof GgufReadOffset)
return offset;
return new GgufReadOffset(offset);
}
}
//# sourceMappingURL=GgufReadOffset.js.map