node-llama-cpp
Version:
Run AI models locally on your machine with node.js bindings for llama.cpp. Enforce a JSON schema on the model output on the generation level
9 lines • 369 B
JavaScript
export function getChatWrapperSegmentDefinition(chatWrapperSetting, segmentType) {
if (segmentType === "thought")
return chatWrapperSetting.segments?.thought;
else if (segmentType === "comment")
return chatWrapperSetting.segments?.comment;
void segmentType;
return undefined;
}
//# sourceMappingURL=getChatWrapperSegmentDefinition.js.map