@ai2070/l0
Version:
L0: The Missing Reliability Substrate for AI
167 lines • 5.99 kB
JavaScript
export function isAnthropicStreamEvent(event) {
if (!event || typeof event !== "object")
return false;
const e = event;
if (typeof e.type !== "string")
return false;
return [
"message_start",
"content_block_start",
"content_block_delta",
"content_block_stop",
"message_delta",
"message_stop",
].includes(e.type);
}
export function isAnthropicStream(input) {
if (!input || typeof input !== "object")
return false;
if (!(Symbol.asyncIterator in input))
return false;
const stream = input;
if (typeof stream.on === "function" &&
typeof stream.finalMessage === "function") {
return true;
}
if ("controller" in stream && "body" in stream) {
return true;
}
return false;
}
export async function* wrapAnthropicStream(stream, options = {}) {
const { includeUsage = true, includeToolUse = true } = options;
let usage = {};
let emittedDone = false;
const toolUseAccumulator = new Map();
try {
for await (const event of stream) {
const eventType = event.type;
switch (eventType) {
case "message_start": {
const e = event;
if (e.message?.usage) {
usage.input_tokens = e.message.usage.input_tokens;
usage.output_tokens = e.message.usage.output_tokens;
}
break;
}
case "content_block_start": {
const e = event;
if (e.content_block?.type === "tool_use" && includeToolUse) {
toolUseAccumulator.set(e.index, {
id: e.content_block.id || "",
name: e.content_block.name || "",
input: "",
});
}
break;
}
case "content_block_delta": {
const e = event;
if (e.delta?.type === "text_delta" && e.delta.text != null) {
yield {
type: "token",
value: e.delta.text,
timestamp: Date.now(),
};
}
else if (e.delta?.type === "input_json_delta" &&
e.delta.partial_json != null) {
const toolUse = toolUseAccumulator.get(e.index);
if (toolUse) {
toolUse.input += e.delta.partial_json;
}
}
break;
}
case "content_block_stop": {
const e = event;
if (includeToolUse) {
const toolUse = toolUseAccumulator.get(e.index);
if (toolUse) {
yield {
type: "message",
value: JSON.stringify({
type: "tool_use",
tool_use: {
id: toolUse.id,
name: toolUse.name,
input: toolUse.input,
},
}),
role: "assistant",
timestamp: Date.now(),
};
toolUseAccumulator.delete(e.index);
}
}
break;
}
case "message_delta": {
const e = event;
if (e.usage?.output_tokens != null) {
usage.output_tokens = e.usage.output_tokens;
}
break;
}
case "message_stop": {
if (!emittedDone) {
emittedDone = true;
yield {
type: "complete",
timestamp: Date.now(),
...(includeUsage && (usage.input_tokens || usage.output_tokens)
? { usage }
: {}),
};
}
break;
}
}
}
if (!emittedDone) {
emittedDone = true;
yield {
type: "complete",
timestamp: Date.now(),
...(includeUsage && (usage.input_tokens || usage.output_tokens)
? { usage }
: {}),
};
}
}
catch (err) {
yield {
type: "error",
error: err instanceof Error ? err : new Error(String(err)),
timestamp: Date.now(),
};
}
}
export const anthropicAdapter = {
name: "anthropic",
detect: isAnthropicStream,
wrap: wrapAnthropicStream,
};
import { registerAdapter } from "./registry";
try {
registerAdapter(anthropicAdapter, { silent: true });
}
catch {
}
export function anthropicStream(client, params, options) {
return async () => {
const stream = client.messages.stream(params);
return wrapAnthropicStream(stream, options);
};
}
export function anthropicText(client, model, prompt, options) {
const { maxTokens = 1024, system, includeUsage, includeToolUse, } = options || {};
return anthropicStream(client, {
model,
max_tokens: maxTokens,
messages: [{ role: "user", content: prompt }],
...(system ? { system } : {}),
}, { includeUsage, includeToolUse });
}
//# sourceMappingURL=anthropic.js.map