aiwrapper
Version:
A Universal AI Wrapper for JavaScript & TypeScript
393 lines (392 loc) • 13.7 kB
JavaScript
var __defProp = Object.defineProperty;
var __getOwnPropSymbols = Object.getOwnPropertySymbols;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __propIsEnum = Object.prototype.propertyIsEnumerable;
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __spreadValues = (a, b) => {
for (var prop in b || (b = {}))
if (__hasOwnProp.call(b, prop))
__defNormalProp(a, prop, b[prop]);
if (__getOwnPropSymbols)
for (var prop of __getOwnPropSymbols(b)) {
if (__propIsEnum.call(b, prop))
__defNormalProp(a, prop, b[prop]);
}
return a;
};
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
import {
LanguageProvider
} from "../language-provider.js";
import {
LangMessages,
LangMessage
} from "../messages.js";
import {
httpRequestWithRetry as fetch
} from "../../http-request.js";
import { processServerEvents } from "../../process-server-events.js";
import { models } from "aimodels";
import { calculateModelResponseTokens } from "../utils/token-calculator.js";
import { addInstructionAboutSchema } from "../prompt-for-json.js";
import { OpenAIChatCompletionsStreamHandler } from "./openai-chat-completions-stream-handler.js";
const STREAM_HANDLER_SYMBOL = Symbol("OpenAIChatCompletionsStreamHandler");
class OpenAIChatCompletionsLang extends LanguageProvider {
constructor(config) {
super(config.model);
__publicField(this, "_config");
__publicField(this, "modelInfo");
const modelInfo = models.id(config.model);
this.modelInfo = modelInfo;
this._config = config;
}
/** Decide how many tokens to request based on model info and optional limits */
computeRequestMaxTokens(messageCollection) {
if (this.modelInfo) {
return calculateModelResponseTokens(
this.modelInfo,
messageCollection,
this._config.maxTokens
);
}
return this._config.maxTokens || 32e3;
}
/** Build OpenAI-like request body including tools and json schema toggles */
buildRequestBody(messageCollection, requestMaxTokens, options) {
var _a;
const providerMessages = this.transformMessagesForProvider(messageCollection);
const base = __spreadValues(__spreadValues({
model: this._config.model,
messages: providerMessages,
stream: true,
max_tokens: requestMaxTokens
}, this._config.bodyProperties), (_a = options == null ? void 0 : options.providerSpecificBody) != null ? _a : {});
if (messageCollection.availableTools) {
base.tools = this.formatTools(messageCollection.availableTools);
}
return this.transformBody(base);
}
/** Build common request init for fetch */
buildCommonRequest(body, options) {
var _a;
return {
method: "POST",
headers: __spreadValues(__spreadValues(__spreadValues({
"Content-Type": "application/json",
// Always request SSE for streaming
"Accept": "text/event-stream"
}, this._config.apiKey ? { "Authorization": `Bearer ${this._config.apiKey}` } : {}), this._config.headers), (_a = options == null ? void 0 : options.providerSpecificHeaders) != null ? _a : {}),
body: JSON.stringify(body),
signal: options == null ? void 0 : options.signal
};
}
static custom(options) {
return new OpenAIChatCompletionsLang({
apiKey: options.apiKey,
model: options.model,
systemPrompt: options.systemPrompt || "",
maxTokens: options.maxTokens,
maxCompletionTokens: options.maxCompletionTokens,
baseURL: options.baseURL,
headers: options.headers,
bodyProperties: options.bodyProperties,
reasoningEffort: options.reasoningEffort
});
}
async ask(prompt, options) {
const messages = new LangMessages();
if (this._config.systemPrompt) {
messages.push(new LangMessage("user", this._config.systemPrompt));
}
messages.push(new LangMessage("user", prompt));
return await this.chat(messages, options);
}
transformBody(body) {
const transformedBody = __spreadValues({}, body);
if (this._config.reasoningEffort && this.supportsReasoning()) {
transformedBody.reasoning_effort = this._config.reasoningEffort;
}
if (this._config.maxCompletionTokens !== void 0 && this.supportsReasoning()) {
transformedBody.max_completion_tokens = this._config.maxCompletionTokens;
}
return transformedBody;
}
supportsReasoning() {
if (this.modelInfo) {
return this.modelInfo.canReason();
}
return false;
}
async chat(messages, options) {
const abortSignal = options == null ? void 0 : options.signal;
const result = messages instanceof LangMessages ? messages : new LangMessages(messages);
if (options == null ? void 0 : options.schema) {
const baseInstruction = result.instructions + "\n\n" || "";
result.instructions = baseInstruction + addInstructionAboutSchema(options.schema);
}
const requestMaxTokens = this.computeRequestMaxTokens(result);
if (this.supportsReasoning() && this._config.maxCompletionTokens === void 0) {
this._config.maxCompletionTokens = Math.max(requestMaxTokens, 25e3);
}
const body = this.buildRequestBody(result, requestMaxTokens, options);
const commonRequest = this.buildCommonRequest(body, options);
const onData = (data) => {
this.handleStreamData(data, result, options == null ? void 0 : options.onResult);
};
try {
const response = await fetch(`${this._config.baseURL}/chat/completions`, commonRequest).catch((err) => {
throw new Error(err);
});
await processServerEvents(response, onData, abortSignal);
} catch (error) {
if ((error == null ? void 0 : error.name) === "AbortError") {
result.aborted = true;
error.partialResult = result;
}
throw error;
}
result.finished = true;
const toolResults = await result.executeRequestedTools();
if ((options == null ? void 0 : options.onResult) && toolResults) options.onResult(toolResults);
return result;
}
formatTools(tools) {
return tools.map((tool) => ({
type: "function",
function: {
name: tool.name,
description: tool.description,
parameters: tool.parameters
}
}));
}
transformMessagesForProvider(messages) {
const out = [];
if (this._config.systemPrompt) {
out.push({ role: "system", content: this._config.systemPrompt });
}
if (messages.instructions) {
out.push({ role: "system", content: messages.instructions });
}
const pendingAssistantImages = [];
for (let i = 0; i < messages.length; i++) {
const msg = messages[i];
if (msg.role === "tool-results") {
const toolMessages = this.mapToolResultsMessage(msg);
out.push(...toolMessages);
continue;
}
if (msg.role !== "user" && msg.role !== "assistant") {
continue;
}
const mapped = this.mapMessageForProvider(msg);
if (!mapped) continue;
if (msg.role === "user") {
const prev = i > 0 ? messages[i - 1] : void 0;
if (pendingAssistantImages.length > 0) {
const contentArray = this.ensureContentArray(mapped);
for (const image of pendingAssistantImages) {
contentArray.push(...this.mapImageItemToContentParts(image));
}
pendingAssistantImages.length = 0;
}
if (this.shouldAppendVisionHint(msg, prev) || this.payloadHasImageParts(mapped)) {
this.appendVisionHint(mapped);
}
} else if (msg.role === "assistant") {
this.collectAssistantImages(msg, pendingAssistantImages);
}
out.push(mapped);
}
return out;
}
handleStreamData(data, result, onResult, _toolArgBuffers) {
let handler = result[STREAM_HANDLER_SYMBOL];
if (!handler) {
handler = new OpenAIChatCompletionsStreamHandler(result, onResult);
result[STREAM_HANDLER_SYMBOL] = handler;
} else if (onResult) {
handler.setOnResult(onResult);
}
handler.handleEvent(data);
if (data == null ? void 0 : data.finished) {
result.finished = true;
delete result[STREAM_HANDLER_SYMBOL];
}
}
mapMessageForProvider(message) {
const contentParts = this.buildContentParts(message);
const toolCalls = this.buildToolCalls(message);
if (contentParts.length === 0 && toolCalls.length === 0) {
if (message.role === "assistant") {
return { role: "assistant", content: "" };
}
if (message.role === "user") {
return { role: "user", content: "" };
}
return null;
}
const payload = { role: message.role };
if (contentParts.length > 0) {
if (contentParts.length === 1 && contentParts[0].type === "text") {
payload.content = contentParts[0].text;
} else {
payload.content = contentParts;
}
}
if (toolCalls.length > 0) {
payload.tool_calls = toolCalls;
if (!("content" in payload)) {
payload.content = "";
}
}
return payload;
}
buildContentParts(message) {
const parts = [];
for (const item of message.items) {
if (item.type === "text") {
const textItem = item;
if (typeof textItem.text === "string" && textItem.text.length > 0) {
parts.push({ type: "text", text: textItem.text });
}
} else if (item.type === "image") {
const imageItem = item;
const imageParts = this.mapImageItemToContentParts(imageItem);
if (imageParts.length > 0) {
parts.push(...imageParts);
}
} else if (item.type === "reasoning") {
continue;
}
}
return parts;
}
buildToolCalls(message) {
var _a;
const calls = [];
let fallbackIndex = 0;
for (const item of message.items) {
if (item.type !== "tool") continue;
const toolItem = item;
const id = toolItem.callId || `tool_call_${fallbackIndex++}`;
calls.push({
id,
type: "function",
function: {
name: toolItem.name,
arguments: JSON.stringify((_a = toolItem.arguments) != null ? _a : {})
}
});
}
return calls;
}
mapToolResultsMessage(message) {
const toolMessages = [];
for (const item of message.items) {
if (item.type !== "tool-result") continue;
const toolResult = item;
const rawResult = toolResult.result;
const content = typeof rawResult === "string" ? rawResult : JSON.stringify(rawResult != null ? rawResult : {});
toolMessages.push({
role: "tool",
tool_call_id: toolResult.callId,
name: toolResult.name,
content
});
}
return toolMessages;
}
mapImageItemToContentParts(image) {
const parts = [];
let dataUrl;
if (typeof image.base64 === "string" && image.base64.length > 0) {
const mimeType = image.mimeType || "image/png";
parts.push({
type: "input_image",
image_base64: image.base64,
mime_type: mimeType
});
dataUrl = `data:${mimeType};base64,${image.base64}`;
}
const url = typeof image.url === "string" && image.url.length > 0 ? image.url : dataUrl;
if (url) {
parts.push({
type: "image_url",
image_url: { url }
});
}
return parts;
}
shouldAppendVisionHint(message, previous) {
return this.messageHasImageItems(message) || this.messageHasImageItems(previous);
}
messageHasImageItems(message) {
var _a, _b;
if (!message) return false;
return (_b = (_a = message.items) == null ? void 0 : _a.some((item) => item.type === "image")) != null ? _b : false;
}
appendVisionHint(payload) {
const hintText = "Describe the visual details of the image, including the subject's fur color and explicitly name the surface or object it is on (for example, a table).";
const hintPart = { type: "text", text: hintText };
if (payload.content === void 0) {
payload.content = [hintPart];
return;
}
if (typeof payload.content === "string") {
payload.content = [
{ type: "text", text: payload.content },
hintPart
];
return;
}
if (Array.isArray(payload.content)) {
payload.content.push(hintPart);
return;
}
payload.content = [payload.content, hintPart];
}
collectAssistantImages(message, accumulator) {
for (const item of message.items) {
if (item.type === "image") {
accumulator.push(item);
}
}
}
ensureContentArray(payload) {
if (payload.content === void 0) {
payload.content = [];
}
if (typeof payload.content === "string") {
payload.content = [{ type: "text", text: payload.content }];
}
if (!Array.isArray(payload.content)) {
payload.content = [payload.content];
}
return payload.content;
}
payloadHasImageParts(payload) {
if (!Array.isArray(payload.content)) return false;
return payload.content.some(
(part) => (part == null ? void 0 : part.type) === "image_url" || (part == null ? void 0 : part.type) === "input_image"
);
}
setReasoningEffort(effort) {
this._config.reasoningEffort = effort;
return this;
}
getReasoningEffort() {
return this._config.reasoningEffort;
}
setMaxCompletionTokens(maxTokens) {
this._config.maxCompletionTokens = maxTokens;
return this;
}
getMaxCompletionTokens() {
return this._config.maxCompletionTokens;
}
}
export {
OpenAIChatCompletionsLang
};
//# sourceMappingURL=openai-chat-completions-lang.js.map