UNPKG

@xsai/generate-text

Version:

extra-small AI SDK.

80 lines (77 loc) 2.16 kB
import { trampoline, responseJSON } from '@xsai/shared'; import { chat, determineStepType, executeTool } from '@xsai/shared-chat'; const rawGenerateText = async (options) => chat({ ...options, maxSteps: void 0, messages: options.messages, steps: void 0, stream: false }).then(responseJSON).then(async (res) => { const { choices, usage } = res; if (!choices?.length) throw new Error(`No choices returned, response body: ${JSON.stringify(res)}`); const messages = structuredClone(options.messages); const steps = options.steps ? structuredClone(options.steps) : []; const toolCalls = []; const toolResults = []; const { finish_reason: finishReason, message } = choices[0]; const msgToolCalls = message?.tool_calls ?? []; const stepType = determineStepType({ finishReason, maxSteps: options.maxSteps ?? 1, stepsLength: steps.length, toolCallsLength: msgToolCalls.length }); messages.push(message); if (finishReason === "stop" || stepType === "done") { const step2 = { finishReason, stepType, text: message.content, toolCalls, toolResults, usage }; steps.push(step2); if (options.onStepFinish) await options.onStepFinish(step2); return { finishReason, messages, steps, text: message.content, toolCalls, toolResults, usage }; } for (const toolCall of msgToolCalls) { const { completionToolCall, completionToolResult, message: message2 } = await executeTool({ abortSignal: options.abortSignal, messages, toolCall, tools: options.tools }); toolCalls.push(completionToolCall); toolResults.push(completionToolResult); messages.push(message2); } const step = { finishReason, stepType, text: message.content, toolCalls, toolResults, usage }; steps.push(step); if (options.onStepFinish) await options.onStepFinish(step); return async () => rawGenerateText({ ...options, messages, steps }); }); const generateText = async (options) => trampoline(async () => rawGenerateText(options)); export { generateText };