aiwrapper
Version:
A Universal AI Wrapper for JavaScript & TypeScript
186 lines (185 loc) • 6.17 kB
JavaScript
var __defProp = Object.defineProperty;
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
import {
LanguageProvider
} from "../language-provider.js";
import {
LangMessages
} from "../messages.js";
import { OpenAIResponseStreamHandler } from "../openai/responses/openai-responses-stream-handler.js";
const DEFAULT_MESSAGES = [
"Mini reply: hi there!",
"Small mock message incoming.",
"Little response from the mock provider."
];
const randomId = (prefix) => `${prefix}_${Date.now().toString(36)}_${Math.random().toString(36).slice(2, 8)}`;
const sleep = (ms) => !ms || ms <= 0 ? Promise.resolve() : new Promise((resolve) => setTimeout(resolve, ms));
const isOverrides = (value) => !!value && typeof value === "object";
class MockResponseStreamLang extends LanguageProvider {
constructor(options = {}) {
var _a;
super((_a = options.name) != null ? _a : "Mock Response Stream");
__publicField(this, "config");
__publicField(this, "presetIndex", 0);
this.config = options;
}
async ask(prompt, options) {
const messages = new LangMessages();
messages.addUserMessage(prompt);
return this.chat(messages, options);
}
async chat(messages, options) {
const messageCollection = messages instanceof LangMessages ? messages : new LangMessages(messages);
await this.streamMockResponse(messageCollection, options);
return messageCollection;
}
resolveOverrides(options) {
const providerBody = options == null ? void 0 : options.providerSpecificBody;
if (providerBody && typeof providerBody === "object" && providerBody !== null) {
const overrides = providerBody.mockResponseStream;
if (isOverrides(overrides)) {
return overrides;
}
}
return {};
}
pickMessage(overrides) {
var _a;
const messageSource = (_a = overrides.message) != null ? _a : this.config.message;
if (typeof messageSource === "function") {
return messageSource();
}
if (typeof messageSource === "string" && messageSource.length > 0) {
return messageSource;
}
const presetList = this.resolveMessageList(overrides);
if (presetList.length === 0) {
return "Mock response";
}
const idx = this.presetIndex % presetList.length;
const selected = presetList[idx];
this.presetIndex = (this.presetIndex + 1) % presetList.length;
return selected;
}
resolveMessageList(overrides) {
if (overrides.messages && overrides.messages.length > 0) {
return overrides.messages;
}
if (this.config.messages && this.config.messages.length > 0) {
return this.config.messages;
}
return DEFAULT_MESSAGES;
}
resolveChunkSize(overrides) {
var _a, _b;
const size = (_b = (_a = overrides.chunkSize) != null ? _a : this.config.chunkSize) != null ? _b : 12;
return size > 0 ? size : 12;
}
resolveSpeed(overrides) {
var _a, _b;
return (_b = (_a = overrides.speedMs) != null ? _a : this.config.speedMs) != null ? _b : 0;
}
chunkText(text, chunkSize) {
if (chunkSize <= 0 || text.length === 0) return [text];
const chunks = [];
for (let i = 0; i < text.length; i += chunkSize) {
chunks.push(text.slice(i, i + chunkSize));
}
return chunks.length > 0 ? chunks : [text];
}
async streamMockResponse(messages, options) {
const overrides = this.resolveOverrides(options);
const responseText = this.pickMessage(overrides);
const chunkSize = this.resolveChunkSize(overrides);
const delay = this.resolveSpeed(overrides);
const signal = options == null ? void 0 : options.signal;
let aborted = false;
let abortError = null;
const abortHandler = () => {
aborted = true;
abortError = new Error("The operation was aborted");
abortError.name = "AbortError";
};
if (signal) {
if (signal.aborted) {
abortHandler();
} else {
signal.addEventListener("abort", abortHandler, { once: true });
}
}
const handler = new OpenAIResponseStreamHandler(messages, options == null ? void 0 : options.onResult);
const responseId = randomId("resp_mock");
const itemId = randomId("msg_mock");
handler.handleEvent({
type: "response.created",
response: { id: responseId }
});
handler.handleEvent({
type: "response.output_item.added",
item: {
id: itemId,
type: "message",
status: "in_progress",
role: "assistant",
content: [],
text: ""
}
});
const chunks = this.chunkText(responseText, chunkSize);
try {
for (const chunk of chunks) {
if (aborted) break;
await sleep(delay);
if (aborted) break;
if (chunk.length > 0) {
handler.handleEvent({
type: "response.output_text.delta",
item_id: itemId,
delta: chunk
});
}
}
if (aborted) {
messages.aborted = true;
const err = abortError != null ? abortError : new Error("AbortError");
err.name = err.name || "AbortError";
err.partialResult = messages;
throw err;
}
handler.handleEvent({
type: "response.output_item.done",
item: {
id: itemId,
type: "message",
status: "completed",
role: "assistant",
content: [
{
type: "output_text",
text: responseText,
annotations: [],
logprobs: []
}
]
}
});
handler.handleEvent({
type: "response.completed",
response: {
id: responseId,
status: "completed"
}
});
messages.finished = true;
} finally {
if (signal && abortHandler) {
signal.removeEventListener("abort", abortHandler);
}
}
}
}
export {
MockResponseStreamLang
};
//# sourceMappingURL=mock-response-stream-lang.js.map