@axflow/models
Version:
Zero-dependency, modular SDK for building robust natural language applications
130 lines (128 loc) • 4.38 kB
JavaScript
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// src/togetherai/inference.ts
var inference_exports = {};
__export(inference_exports, {
TogetherAIInference: () => TogetherAIInference
});
module.exports = __toCommonJS(inference_exports);
var import_shared = require("@axflow/models/shared");
var TOGETHERAI_INFERENCE_ENDPOINT = "https://api.together.xyz/inference";
function headers(apiKey, customHeaders) {
const headers2 = {
accept: "application/json",
"content-type": "application/json",
...customHeaders
};
if (typeof apiKey === "string") {
headers2.authorization = `Bearer ${apiKey}`;
}
return headers2;
}
async function run(request, options) {
const url = options.apiUrl || TOGETHERAI_INFERENCE_ENDPOINT;
const response = await (0, import_shared.POST)(url, {
headers: headers(options.apiKey, options.headers),
body: JSON.stringify({ ...request, stream_tokens: false }),
fetch: options.fetch,
signal: options.signal
});
return response.json();
}
async function streamBytes(request, options) {
const url = options.apiUrl || TOGETHERAI_INFERENCE_ENDPOINT;
const response = await (0, import_shared.POST)(url, {
headers: headers(options.apiKey, options.headers),
body: JSON.stringify({ ...request, stream_tokens: true }),
fetch: options.fetch,
signal: options.signal
});
if (!response.body) {
throw new import_shared.HttpError("Expected response body to be a ReadableStream", response);
}
return response.body;
}
function noop(chunk) {
return chunk;
}
async function stream(request, options) {
const byteStream = await streamBytes(request, options);
return byteStream.pipeThrough(new TogetherAIInferenceDecoderStream(noop));
}
function chunkToToken(chunk) {
return chunk.choices[0].text || "";
}
async function streamTokens(request, options) {
const byteStream = await streamBytes(request, options);
return byteStream.pipeThrough(new TogetherAIInferenceDecoderStream(chunkToToken));
}
var TogetherAIInference = class {
static run = run;
static stream = stream;
static streamBytes = streamBytes;
static streamTokens = streamTokens;
};
var TogetherAIInferenceDecoderStream = class _TogetherAIInferenceDecoderStream extends TransformStream {
static DATA_RE = /data:\s*(.+)/;
static parseChunk(chunk) {
chunk = chunk.trim();
if (chunk.length === 0) {
return null;
}
const match = chunk.match(_TogetherAIInferenceDecoderStream.DATA_RE);
try {
const data = match[1];
return data === "[DONE]" ? null : JSON.parse(data);
} catch (error) {
throw new Error(
`Encountered unexpected chunk while parsing TogetherAI streaming response: ${JSON.stringify(
chunk
)}`
);
}
}
static streamTransformer(map) {
let buffer = [];
const decoder = new TextDecoder();
return (bytes, controller) => {
const chunk = decoder.decode(bytes);
for (let i = 0, len = chunk.length; i < len; ++i) {
const isChunkSeparator = chunk[i] === "\n" && buffer[buffer.length - 1] === "\n";
if (!isChunkSeparator) {
buffer.push(chunk[i]);
continue;
}
const parsedChunk = _TogetherAIInferenceDecoderStream.parseChunk(
buffer.join("")
);
if (parsedChunk) {
controller.enqueue(map(parsedChunk));
}
buffer = [];
}
};
}
constructor(map) {
super({ transform: _TogetherAIInferenceDecoderStream.streamTransformer(map) });
}
};
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
TogetherAIInference
});
;