UNPKG

aiwrapper

Version:

A Universal AI Wrapper for JavaScript & TypeScript

106 lines (105 loc) 3.81 kB
var __defProp = Object.defineProperty; var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value); import { httpRequestWithRetry as fetch } from "../../http-request.js"; import { processServerEvents } from "../../process-server-events.js"; import { LangResult, LanguageProvider } from "../language-provider.js"; import { LangMessages, LangMessage as ConversationMessage } from "../messages.js"; import { models } from "aimodels"; import { calculateModelResponseTokens } from "../utils/token-calculator.js"; class CohereLang extends LanguageProvider { constructor(options) { const modelName = options.model || "command-r-plus-08-2024"; super(modelName); __publicField(this, "_apiKey"); __publicField(this, "_model"); __publicField(this, "_systemPrompt"); __publicField(this, "_maxTokens"); __publicField(this, "modelInfo"); const modelInfo = models.id(modelName); if (!modelInfo) { console.error(`Invalid Cohere model: ${modelName}. Model not found in aimodels database.`); } this.modelInfo = modelInfo; this._apiKey = options.apiKey; this._model = modelName; this._systemPrompt = options.systemPrompt || ""; this._maxTokens = options.maxTokens; } async ask(prompt, options) { const messages = new LangMessages(); if (this._systemPrompt) { messages.push(new ConversationMessage("user", this._systemPrompt)); } messages.push(new ConversationMessage("user", prompt)); return await this.chat(messages, options); } async chat(messages, options) { const abortSignal = options == null ? void 0 : options.signal; const result = new LangResult(messages); const transformedMessages = messages.map((msg) => ({ role: msg.role === "assistant" ? "assistant" : "user", content: msg.text })); let maxTokens = this._maxTokens; if (this.modelInfo && !maxTokens) { maxTokens = calculateModelResponseTokens( this.modelInfo, messages, this._maxTokens ); } const requestBody = { messages: transformedMessages, model: this._model, stream: true, max_tokens: maxTokens, temperature: 0.7, preamble_override: this._systemPrompt || void 0 }; const onResult = options == null ? void 0 : options.onResult; const onData = (data) => { var _a, _b, _c; if (data.type === "message-end") { result.finished = true; const last = result.length > 0 ? result[result.length - 1] : void 0; if (last) onResult == null ? void 0 : onResult(last); return; } if (data.type === "content-delta" && ((_c = (_b = (_a = data.delta) == null ? void 0 : _a.message) == null ? void 0 : _b.content) == null ? void 0 : _c.text)) { const text = data.delta.message.content.text; } }; try { const response = await fetch(`https://api.cohere.com/v2/chat?alt=sse`, { method: "POST", headers: { "Content-Type": "application/json", "Authorization": `Bearer ${this._apiKey}`, "Accept": "text/event-stream" }, body: JSON.stringify(requestBody), signal: abortSignal }).catch((err) => { throw new Error(err); }); await processServerEvents(response, onData, abortSignal); } catch (error) { if ((error == null ? void 0 : error.name) === "AbortError") { result.aborted = true; error.partialResult = result; } throw error; } return result; } } export { CohereLang }; //# sourceMappingURL=cohere-lang.js.map