@ai2070/l0
Version:
L0: The Missing Reliability Substrate for AI
241 lines (240 loc) • 7.15 kB
JavaScript
async function* wrapOpenAIStream(stream, options = {}) {
const {
includeUsage = true,
includeToolCalls = true,
emitFunctionCallsAsTokens = false,
choiceIndex = 0
} = options;
let usage;
const choiceState = /* @__PURE__ */ new Map();
const getChoiceState = (index) => {
if (!choiceState.has(index)) {
choiceState.set(index, {
functionCallAccumulator: null,
toolCallsAccumulator: /* @__PURE__ */ new Map(),
finished: false
});
}
return choiceState.get(index);
};
try {
for await (const chunk of stream) {
const choices = chunk.choices;
if (!choices || choices.length === 0) {
continue;
}
if (chunk.usage) {
usage = chunk.usage;
}
for (const choice of choices) {
if (!choice) continue;
const idx = choice.index;
if (choiceIndex !== "all" && idx !== choiceIndex) {
continue;
}
const state = getChoiceState(idx);
const delta = choice.delta;
if (!delta) continue;
const choicePrefix = choiceIndex === "all" ? `[choice:${idx}]` : "";
if (delta.content) {
yield {
type: "token",
value: choicePrefix ? `${choicePrefix}${delta.content}` : delta.content,
timestamp: Date.now()
};
}
if (delta.function_call) {
if (delta.function_call.name) {
state.functionCallAccumulator = {
name: delta.function_call.name,
arguments: delta.function_call.arguments || ""
};
} else if (delta.function_call.arguments && state.functionCallAccumulator) {
state.functionCallAccumulator.arguments += delta.function_call.arguments;
}
if (emitFunctionCallsAsTokens && delta.function_call.arguments) {
yield {
type: "token",
value: delta.function_call.arguments,
timestamp: Date.now()
};
}
}
if (delta.tool_calls) {
for (const toolCall of delta.tool_calls) {
const existing = state.toolCallsAccumulator.get(toolCall.index);
if (toolCall.id || toolCall.function?.name) {
state.toolCallsAccumulator.set(toolCall.index, {
id: toolCall.id || existing?.id || "",
name: toolCall.function?.name || existing?.name || "",
arguments: toolCall.function?.arguments || ""
});
} else if (toolCall.function?.arguments && existing) {
existing.arguments += toolCall.function.arguments;
}
if (emitFunctionCallsAsTokens && toolCall.function?.arguments) {
yield {
type: "token",
value: toolCall.function.arguments,
timestamp: Date.now()
};
}
}
}
if (choice.finish_reason && !state.finished) {
state.finished = true;
if (state.functionCallAccumulator && includeToolCalls) {
yield {
type: "message",
value: JSON.stringify({
type: "function_call",
function_call: state.functionCallAccumulator,
...choiceIndex === "all" ? { choiceIndex: idx } : {}
}),
role: "assistant",
timestamp: Date.now()
};
}
if (state.toolCallsAccumulator.size > 0 && includeToolCalls) {
const toolCalls = Array.from(state.toolCallsAccumulator.values());
yield {
type: "message",
value: JSON.stringify({
type: "tool_calls",
tool_calls: toolCalls,
...choiceIndex === "all" ? { choiceIndex: idx } : {}
}),
role: "assistant",
timestamp: Date.now()
};
}
}
}
}
yield {
type: "complete",
timestamp: Date.now(),
...includeUsage && usage ? { usage } : {}
};
} catch (error) {
yield {
type: "error",
error: error instanceof Error ? error : new Error(String(error)),
timestamp: Date.now()
};
}
}
function openaiStream(client, params, options) {
return async () => {
const stream = await client.chat.completions.create({
...params,
stream: true
});
return wrapOpenAIStream(stream, options);
};
}
function openaiText(client, model, prompt, options) {
const messages = typeof prompt === "string" ? [{ role: "user", content: prompt }] : prompt;
const {
includeUsage,
includeToolCalls,
emitFunctionCallsAsTokens,
...chatParams
} = options || {};
return openaiStream(
client,
{ model, messages, ...chatParams },
{ includeUsage, includeToolCalls, emitFunctionCallsAsTokens }
);
}
function openaiJSON(client, model, prompt, options) {
const messages = typeof prompt === "string" ? [{ role: "user", content: prompt }] : prompt;
const {
includeUsage,
includeToolCalls,
emitFunctionCallsAsTokens,
...chatParams
} = options || {};
return openaiStream(
client,
{
model,
messages,
response_format: { type: "json_object" },
...chatParams
},
{ includeUsage, includeToolCalls, emitFunctionCallsAsTokens }
);
}
function openaiWithTools(client, model, messages, tools, options) {
const {
includeUsage,
includeToolCalls,
emitFunctionCallsAsTokens,
...chatParams
} = options || {};
return openaiStream(
client,
{ model, messages, tools, ...chatParams },
{
includeUsage,
includeToolCalls: includeToolCalls ?? true,
emitFunctionCallsAsTokens
}
);
}
function isOpenAIChunk(obj) {
if (!obj || typeof obj !== "object" || !("choices" in obj)) {
return false;
}
const chunk = obj;
if (!Array.isArray(chunk.choices) || chunk.choices.length === 0) {
return false;
}
const firstChoice = chunk.choices[0];
return firstChoice !== void 0 && "delta" in firstChoice;
}
async function extractOpenAIText(stream) {
let text = "";
for await (const chunk of stream) {
const content = chunk.choices?.[0]?.delta?.content;
if (content) {
text += content;
}
}
return text;
}
function isOpenAIStream(input) {
if (!input || typeof input !== "object") return false;
if (!(Symbol.asyncIterator in input)) return false;
const stream = input;
if (typeof stream.toReadableStream === "function" && "controller" in stream) {
return true;
}
if ("response" in stream && typeof stream.toReadableStream === "function") {
return true;
}
return false;
}
const openaiAdapter = {
name: "openai",
detect: isOpenAIStream,
wrap: wrapOpenAIStream
};
import { registerAdapter } from "./registry";
try {
registerAdapter(openaiAdapter, { silent: true });
} catch {
}
export {
extractOpenAIText,
isOpenAIChunk,
isOpenAIStream,
openaiAdapter,
openaiJSON,
openaiStream,
openaiText,
openaiWithTools,
wrapOpenAIStream
};
//# sourceMappingURL=openai.js.map