@llamaindex/core
Version:
LlamaIndex Core Module
47 lines (44 loc) • 1.24 kB
JavaScript
import { streamConverter, extractText } from '../../utils/dist/index.js';
class BaseLLM {
async complete(params) {
const { prompt, stream, responseFormat } = params;
if (stream) {
const stream = await this.chat({
messages: [
{
content: prompt,
role: "user"
}
],
stream: true,
...responseFormat ? {
responseFormat
} : {}
});
return streamConverter(stream, (chunk)=>{
return {
raw: null,
text: chunk.delta
};
});
}
const chatResponse = await this.chat({
messages: [
{
content: prompt,
role: "user"
}
],
...responseFormat ? {
responseFormat
} : {}
});
return {
text: extractText(chatResponse.message.content),
raw: chatResponse.raw
};
}
}
class ToolCallLLM extends BaseLLM {
}
export { BaseLLM, ToolCallLLM };