aiwrapper
Version:
A Universal AI Wrapper for JavaScript & TypeScript
282 lines (281 loc) • 9.81 kB
JavaScript
var __defProp = Object.defineProperty;
var __getOwnPropSymbols = Object.getOwnPropertySymbols;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __propIsEnum = Object.prototype.propertyIsEnumerable;
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp.call(b, prop))
__defNormalProp(a, prop, b[prop]);
if (__getOwnPropSymbols)
for (var prop of __getOwnPropSymbols(b)) {
if (__propIsEnum.call(b, prop))
__defNormalProp(a, prop, b[prop]);
}
return a;
};
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
import {
httpRequestWithRetry as fetch
} from "../../http-request.js";
import { processServerEvents } from "../../process-server-events.js";
import {
LanguageProvider
} from "../language-provider.js";
import { models } from "aimodels";
import { calculateModelResponseTokens } from "../utils/token-calculator.js";
import {
LangMessages
} from "../messages.js";
import { addInstructionAboutSchema } from "../prompt-for-json.js";
import { AnthropicStreamHandler } from "./anthropic-stream-handler.js";
class AnthropicLang extends LanguageProvider {
constructor(options) {
const modelName = options.model || "claude-3-7-sonnet-20250219";
super(modelName);
__publicField(this, "_config");
const modelInfo = models.id(modelName);
if (!modelInfo) {
console.error(`Invalid Anthropic model: ${modelName}. Model not found in aimodels database.`);
}
this._config = {
apiKey: options.apiKey,
model: modelName,
systemPrompt: options.systemPrompt,
maxTokens: options.maxTokens,
extendedThinking: options.extendedThinking
};
}
async ask(prompt, options) {
const messages = new LangMessages();
if (this._config.systemPrompt) {
messages.instructions = this._config.systemPrompt;
}
messages.addUserMessage(prompt);
return await this.chat(messages, options);
}
async chat(messages, options) {
const abortSignal = options == null ? void 0 : options.signal;
const messageCollection = messages instanceof LangMessages ? messages : new LangMessages(messages);
let instructions = messageCollection.instructions || "";
if (!instructions && this._config.systemPrompt) {
instructions = this._config.systemPrompt;
}
if (options == null ? void 0 : options.schema) {
const baseInstruction = instructions !== "" ? instructions + "\n\n" : "";
instructions = baseInstruction + addInstructionAboutSchema(
options.schema
);
}
const { providerMessages, requestMaxTokens, tools } = this.prepareRequest(messageCollection);
const result = messageCollection;
const requestBody = __spreadValues({
model: this._config.model,
messages: providerMessages,
max_tokens: requestMaxTokens,
system: instructions,
// Always stream internally to unify the code path
stream: true
}, tools ? { tools } : {});
try {
const response = await fetch("https://api.anthropic.com/v1/messages", {
method: "POST",
headers: {
"Content-Type": "application/json",
"anthropic-version": "2023-06-01",
"anthropic-dangerous-direct-browser-access": "true",
"x-api-key": this._config.apiKey
},
body: JSON.stringify(requestBody),
signal: abortSignal
}).catch((err) => {
throw new Error(err);
});
const streamHandler = new AnthropicStreamHandler(result, options == null ? void 0 : options.onResult);
await processServerEvents(response, (data) => {
streamHandler.handleEvent(data);
}, abortSignal);
} catch (error) {
if ((error == null ? void 0 : error.name) === "AbortError") {
result.aborted = true;
error.partialResult = result;
}
throw error;
}
result.finished = true;
const toolResults = await result.executeRequestedTools();
if ((options == null ? void 0 : options.onResult) && toolResults) options.onResult(toolResults);
return result;
}
prepareRequest(messageCollection) {
var _a;
const providerMessages = this.transformMessagesForProvider(messageCollection);
const modelInfo = models.id(this._config.model);
if (!modelInfo) {
console.warn(`Model info not found for ${this._config.model}`);
}
const requestMaxTokens = modelInfo ? calculateModelResponseTokens(
modelInfo,
messageCollection,
this._config.maxTokens
) : this._config.maxTokens || 16e3;
let tools;
if ((_a = messageCollection.availableTools) == null ? void 0 : _a.length) {
const structuredTools = messageCollection.availableTools.filter(
(tool) => typeof tool.parameters === "object" && tool.parameters !== null
);
if (structuredTools.length > 0) {
tools = structuredTools.map((tool) => ({
name: tool.name,
description: tool.description || "",
input_schema: tool.parameters
}));
}
}
return { providerMessages, requestMaxTokens, tools };
}
transformMessagesForProvider(messages) {
const out = [];
const pendingAssistantImages = [];
for (const message of messages) {
if (message.role === "user") {
const content = [];
const forwardedImages = pendingAssistantImages.length > 0;
for (const image of pendingAssistantImages) {
this.appendImageBlocks(content, image);
}
pendingAssistantImages.length = 0;
const { blocks: userBlocks, hasImages: userHasImages } = this.mapUserMessageItems(message);
content.push(...userBlocks);
if (forwardedImages || userHasImages) {
content.push({ type: "text", text: this.getVisionHintText() });
}
if (content.length > 0) {
out.push({ role: "user", content });
}
} else if (message.role === "assistant") {
const { content, imagesForNextUser } = this.mapAssistantMessageItems(message);
if (content.length > 0) {
out.push({ role: "assistant", content });
}
if (imagesForNextUser.length > 0) {
pendingAssistantImages.push(...imagesForNextUser);
}
} else if (message.role === "tool-results") {
const content = this.mapToolResultItems(message);
if (content.length > 0) {
out.push({ role: "user", content });
}
}
}
return out;
}
mapUserMessageItems(message) {
const blocks = [];
let hasImages = false;
for (const item of message.items) {
if (item.type === "text") {
const textItem = item;
if (textItem.text.length > 0) {
blocks.push({ type: "text", text: textItem.text });
}
} else if (item.type === "image") {
hasImages = true;
this.appendImageBlocks(blocks, item);
}
}
return { blocks, hasImages };
}
mapAssistantMessageItems(message) {
var _a;
const blocks = [];
const imagesForNextUser = [];
for (const item of message.items) {
switch (item.type) {
case "text": {
const textItem = item;
if (textItem.text.length > 0) {
blocks.push({ type: "text", text: textItem.text });
}
break;
}
case "image": {
imagesForNextUser.push(item);
break;
}
case "tool": {
const toolItem = item;
blocks.push({
type: "tool_use",
id: toolItem.callId,
name: toolItem.name,
input: (_a = toolItem.arguments) != null ? _a : {}
});
break;
}
case "reasoning":
break;
}
}
return { content: blocks, imagesForNextUser };
}
mapToolResultItems(message) {
const blocks = [];
for (const item of message.items) {
if (item.type !== "tool-result") continue;
const resultItem = item;
let content = resultItem.result;
if (typeof content !== "string") {
content = JSON.stringify(content != null ? content : {});
}
blocks.push({
type: "tool_result",
tool_use_id: resultItem.callId,
content
});
}
return blocks;
}
appendImageBlocks(target, image) {
const imageBlock = this.mapImageItemToAnthropicImageBlock(image);
if (imageBlock) {
target.push(imageBlock);
}
}
mapImageItemToAnthropicImageBlock(image) {
if (typeof image.base64 === "string" && image.base64.length > 0) {
const mediaType = image.mimeType || "image/png";
return {
type: "image",
source: { type: "base64", media_type: mediaType, data: image.base64 }
};
}
if (typeof image.url === "string" && image.url.length > 0) {
if (image.url.startsWith("data:")) {
const match = image.url.match(/^data:([^;]+);base64,(.*)$/);
if (!match) {
console.warn("Invalid data URL for Anthropic image.");
return null;
}
const media_type = match[1];
const data = match[2];
return {
type: "image",
source: { type: "base64", media_type, data }
};
}
return {
type: "image",
source: { type: "url", url: image.url }
};
}
return null;
}
getVisionHintText() {
return "Describe the visual details of the image, including the subject's fur color and explicitly name the surface or object it is on (for example, a table).";
}
}
export {
AnthropicLang
};
//# sourceMappingURL=anthropic-lang.js.map