node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
12 lines • 409 B
JavaScript
export const allSegmentTypes = ["thought"];
export function isChatModelResponseFunctionCall(item) {
if (item == null || typeof item === "string")
return false;
return item.type === "functionCall";
}
export function isChatModelResponseSegment(item) {
if (item == null || typeof item === "string")
return false;
return item.type === "segment";
}
//# sourceMappingURL=types.js.map