@axflow/models
Version:
Zero-dependency, modular SDK for building robust natural language applications
107 lines (104 loc) • 3.06 kB
JavaScript
// src/openai/chat.ts
import { POST, HttpError } from "@axflow/models/shared";
// src/openai/shared.ts
function headers(apiKey, customHeaders) {
const headers2 = {
accept: "application/json",
"content-type": "application/json",
...customHeaders
};
if (typeof apiKey === "string") {
headers2.authorization = `Bearer ${apiKey}`;
}
return headers2;
}
function streamTransformer(map) {
let buffer = [];
const decoder = new TextDecoder();
return (bytes, controller) => {
const chunk = decoder.decode(bytes);
for (let i = 0, len = chunk.length; i < len; ++i) {
const isChunkSeparator = chunk[i] === "\n" && buffer[buffer.length - 1] === "\n";
if (!isChunkSeparator) {
buffer.push(chunk[i]);
continue;
}
const parsedChunk = parseChunk(buffer.join(""));
if (parsedChunk) {
controller.enqueue(map(parsedChunk));
}
buffer = [];
}
};
}
var DATA_RE = /data:\s*(.+)/;
function parseChunk(chunk) {
chunk = chunk.trim();
if (chunk.length === 0) {
return null;
}
const match = chunk.match(DATA_RE);
try {
const data = match[1];
return data === "[DONE]" ? null : JSON.parse(data);
} catch (error) {
throw new Error(
`Encountered unexpected chunk while parsing OpenAI streaming response: ${JSON.stringify(
chunk
)}`
);
}
}
// src/openai/chat.ts
var OPENAI_CHAT_COMPLETIONS_API_URL = "https://api.openai.com/v1/chat/completions";
async function run(request, options) {
const url = options.apiUrl || OPENAI_CHAT_COMPLETIONS_API_URL;
const response = await POST(url, {
headers: headers(options.apiKey, options.headers),
body: JSON.stringify({ ...request, stream: false }),
fetch: options.fetch,
signal: options.signal
});
return response.json();
}
async function streamBytes(request, options) {
const url = options.apiUrl || OPENAI_CHAT_COMPLETIONS_API_URL;
const response = await POST(url, {
headers: headers(options.apiKey, options.headers),
body: JSON.stringify({ ...request, stream: true }),
fetch: options.fetch,
signal: options.signal
});
if (!response.body) {
throw new HttpError("Expected response body to be a ReadableStream", response);
}
return response.body;
}
function noop(chunk) {
return chunk;
}
async function stream(request, options) {
const byteStream = await streamBytes(request, options);
return byteStream.pipeThrough(new OpenAIChatDecoderStream(noop));
}
function chunkToToken(chunk) {
return chunk.choices[0].delta.content || "";
}
async function streamTokens(request, options) {
const byteStream = await streamBytes(request, options);
return byteStream.pipeThrough(new OpenAIChatDecoderStream(chunkToToken));
}
var OpenAIChat = class {
static run = run;
static stream = stream;
static streamBytes = streamBytes;
static streamTokens = streamTokens;
};
var OpenAIChatDecoderStream = class extends TransformStream {
constructor(map) {
super({ transform: streamTransformer(map) });
}
};
export {
OpenAIChat
};