call-ai
Version:
Lightweight library for making AI API calls with streaming support
206 lines • 8.06 kB
JavaScript
import { globalDebug, keyStore, initKeyStore } from "./key-management.js";
import { handleApiError, checkForInvalidModelError } from "./error-handling.js";
import { responseMetadata, boxString } from "./response-metadata.js";
import { PACKAGE_VERSION } from "./version.js";
import { callAiFetch } from "./utils.js";
const FALLBACK_MODEL = "openrouter/auto";
async function callAINonStreaming(prompt, options = {}, isRetry = false) {
initKeyStore();
const messages = Array.isArray(prompt) ? prompt : [{ role: "user", content: prompt }];
const apiKey = options.apiKey;
const model = options.model || "openai/gpt-3.5-turbo";
const endpoint = options.endpoint || "https://openrouter.ai/api/v1";
const url = `${endpoint}/chat/completions`;
const schemaStrategy = options.schemaStrategy;
if (!schemaStrategy) {
throw new Error("Schema strategy is required for non-streaming calls");
}
const responseFormat = options.responseFormat || /gpt-4/.test(model) || /gpt-3.5/.test(model) ? "json" : undefined;
const debug = options.debug === undefined ? globalDebug : options.debug;
if (debug) {
console.log(`[callAi:${PACKAGE_VERSION}] Making non-streaming request to: ${url}`);
console.log(`[callAi:${PACKAGE_VERSION}] With model: ${model}`);
}
const requestBody = {
model,
messages,
max_tokens: options.maxTokens || 2048,
temperature: options.temperature !== undefined ? options.temperature : 0.7,
top_p: options.topP !== undefined ? options.topP : 1,
stream: false,
};
if (responseFormat === "json") {
requestBody.response_format = { type: "json_object" };
}
if (options.schema) {
Object.assign(requestBody, schemaStrategy.prepareRequest(options.schema, messages));
}
const headers = {
Authorization: `Bearer ${apiKey}`,
"HTTP-Referer": options.referer || "https://vibes.diy",
"X-Title": options.title || "Vibes",
"Content-Type": "application/json",
};
if (options.headers) {
Object.assign(headers, options.headers);
}
Object.keys(options).forEach((key) => {
if (![
"apiKey",
"model",
"endpoint",
"stream",
"schema",
"maxTokens",
"temperature",
"topP",
"responseFormat",
"referer",
"title",
"headers",
"skipRefresh",
"debug",
].includes(key)) {
requestBody[key] = options[key];
}
});
if (debug) {
console.log(`[callAi:${PACKAGE_VERSION}] Request headers:`, headers);
console.log(`[callAi:${PACKAGE_VERSION}] Request body:`, requestBody);
}
const meta = {
model,
endpoint,
timing: {
startTime: Date.now(),
endTime: 0,
duration: 0,
},
};
try {
const response = await callAiFetch(options)(url, {
method: "POST",
headers,
body: JSON.stringify(requestBody),
});
if (!response.ok) {
const { isInvalidModel, errorData } = await checkForInvalidModelError(response, model, debug);
if (isInvalidModel && !isRetry && !options.skipRetry) {
if (debug) {
console.log(`[callAi:${PACKAGE_VERSION}] Invalid model "${model}", falling back to "${FALLBACK_MODEL}"`);
}
return callAINonStreaming(prompt, {
...options,
model: FALLBACK_MODEL,
}, true);
}
const errorText = errorData ? JSON.stringify(errorData) : `HTTP error! Status: ${response.status}`;
throw new Error(errorText);
}
let result;
try {
if (/claude/.test(model)) {
result = await extractClaudeResponse(response);
}
else {
const json = await response.json();
result = extractContent(json, schemaStrategy);
}
}
catch (parseError) {
throw new Error(`Failed to parse API response: ${parseError instanceof Error ? parseError.message : String(parseError)}`);
}
const endTime = Date.now();
meta.timing.endTime = endTime;
meta.timing.duration = endTime - meta.timing.startTime;
const resultString = typeof result === "string" ? result : JSON.stringify(result);
const boxed = boxString(resultString);
responseMetadata.set(boxed, meta);
return resultString;
}
catch (error) {
const isNetworkError = error instanceof Error && (error.message.includes("Network") || error.name === "TypeError");
if (isNetworkError) {
if (debug) {
console.error(`[callAi:${PACKAGE_VERSION}] Network error during fetch:`, error);
}
throw error;
}
await handleApiError(error, "Non-streaming API call", options.debug, {
apiKey: apiKey || undefined,
endpoint: options.endpoint || undefined,
skipRefresh: options.skipRefresh,
});
if (keyStore().current && keyStore().current !== apiKey) {
if (debug) {
console.log(`[callAi:${PACKAGE_VERSION}] Retrying with refreshed API key`);
}
return callAINonStreaming(prompt, {
...options,
apiKey: keyStore().current,
}, isRetry);
}
throw error;
}
}
function extractContent(result, schemaStrategy) {
if (!result) {
return "";
}
if (result.choices && result.choices.length > 0) {
const choice = result.choices[0];
if (choice.message && choice.message.content) {
return schemaStrategy.processResponse(choice.message.content);
}
if (choice.message && choice.message.function_call) {
return schemaStrategy.processResponse(choice.message.function_call);
}
if (choice.message && choice.message.tool_calls) {
return schemaStrategy.processResponse(choice.message.tool_calls);
}
if (choice.message && Array.isArray(choice.message.content)) {
let textContent = "";
let toolUse = null;
for (const block of choice.message.content) {
if (block.type === "text") {
textContent += block.text || "";
}
else if (block.type === "tool_use") {
toolUse = block;
break;
}
}
if (toolUse) {
return schemaStrategy.processResponse(toolUse);
}
return schemaStrategy.processResponse(textContent);
}
if (choice.text) {
return schemaStrategy.processResponse(choice.text);
}
}
if (typeof result !== "string") {
throw new Error(`Failed to extract content from API response: ${JSON.stringify(result)}`);
}
return result;
}
async function extractClaudeResponse(response) {
try {
const timeoutPromise = new Promise((_, reject) => {
setTimeout(() => {
reject(new Error("Timeout extracting Claude response"));
}, 5000);
});
const responsePromise = response.json();
const json = await Promise.race([responsePromise, timeoutPromise]);
if (json.choices && json.choices.length > 0 && json.choices[0].message && json.choices[0].message.content) {
return json.choices[0].message.content;
}
return json;
}
catch (error) {
throw new Error(`Failed to extract Claude response: ${error instanceof Error ? error.message : String(error)}`);
}
}
export { callAINonStreaming, extractContent, extractClaudeResponse, PACKAGE_VERSION, FALLBACK_MODEL };
//# sourceMappingURL=non-streaming.js.map