@durable-streams/tanstack-ai-transport
Version:
TanStack AI adapters for Durable Streams
388 lines (385 loc) • 12.9 kB
JavaScript
import { DurableStream, DurableStreamError, stream } from "@durable-streams/client";
//#region src/client.ts
function mergeHeaders(headers) {
if (!headers) return {};
if (headers instanceof Headers) return Object.fromEntries(headers.entries());
if (Array.isArray(headers)) return Object.fromEntries(headers);
return { ...headers };
}
async function parseJsonSafely(response) {
const contentType = response.headers.get(`content-type`) ?? ``;
if (!contentType.includes(`application/json`)) return void 0;
try {
return await response.json();
} catch {
return void 0;
}
}
function isAbsoluteUrl(url) {
return /^[a-zA-Z][a-zA-Z\d+\-.]*:/.test(url);
}
function resolveUrl$1(streamUrl, baseUrl) {
if (isAbsoluteUrl(streamUrl)) return streamUrl;
const candidateBaseUrls = [];
if (typeof window !== `undefined`) candidateBaseUrls.push(window.location.href);
candidateBaseUrls.push(baseUrl);
for (const candidateBaseUrl of candidateBaseUrls) {
if (!candidateBaseUrl) continue;
try {
return new URL(streamUrl, candidateBaseUrl).toString();
} catch {}
}
return streamUrl;
}
function durableStreamConnection(options) {
const sendUrl = options.sendUrl;
const state = {
streamUrl: options.readUrl ? resolveUrl$1(options.readUrl, sendUrl) : resolveUrl$1(sendUrl, sendUrl),
offset: options.initialOffset
};
return {
async *subscribe(abortSignal) {
const streamResponse = await stream({
url: state.streamUrl,
live: `sse`,
json: true,
offset: state.offset,
headers: mergeHeaders(options.headers),
signal: abortSignal
});
const emitSnapshot = options.emitSnapshotOnSubscribe !== false;
const shouldEmitSnapshot = emitSnapshot && state.offset === void 0;
let hasEmittedSnapshot = !shouldEmitSnapshot;
let snapshotMessages = [];
for await (const batch of readJsonBatchesFromStream(streamResponse, abortSignal)) {
state.offset = batch.offset;
if (abortSignal?.aborted) break;
if (!hasEmittedSnapshot) {
snapshotMessages = applyChunksToMessages(snapshotMessages, batch.items);
if (!batch.upToDate) continue;
hasEmittedSnapshot = true;
yield {
type: `MESSAGES_SNAPSHOT`,
messages: snapshotMessages
};
continue;
}
for (const chunk of batch.items) {
if (abortSignal?.aborted) break;
yield chunk;
}
}
},
async send(messages, data, abortSignal) {
const fetchClient = options.fetchClient ?? fetch;
const response = await fetchClient(sendUrl, {
method: `POST`,
headers: {
"Content-Type": `application/json`,
...mergeHeaders(options.headers)
},
body: JSON.stringify({
messages,
data
}),
signal: abortSignal
});
if (!response.ok) {
const body = await parseJsonSafely(response);
if (body && typeof body === `object` && `error` in body) throw new Error(String(body.error));
throw new Error(`HTTP error! status: ${response.status} ${response.statusText}`);
}
}
};
}
async function* readJsonBatchesFromStream(streamResponse, abortSignal) {
const queue = [];
const waiters = [];
let doneState = 0;
let error;
const push = (batch) => {
const waiter = waiters.shift();
if (waiter) {
queue.push(batch);
waiter();
return;
}
queue.push(batch);
};
const resolveDone = () => {
doneState = 1;
while (waiters.length > 0) {
const waiter = waiters.shift();
if (waiter) waiter();
}
};
const unsubscribe = streamResponse.subscribeJson((batch) => {
push({
items: batch.items,
offset: batch.offset,
upToDate: batch.upToDate
});
});
streamResponse.closed.then(() => resolveDone()).catch((streamError) => {
error = streamError;
resolveDone();
});
try {
while (!abortSignal?.aborted) {
if (queue.length > 0) {
const batch = queue.shift();
yield batch;
continue;
}
if (doneState === 1) break;
await new Promise((resolve) => {
waiters.push(resolve);
});
}
} finally {
unsubscribe();
}
if (error !== void 0 && !abortSignal?.aborted) throw error;
}
function textContentFromMessage(message) {
if (!message || typeof message !== `object`) return ``;
if (!Array.isArray(message.parts)) return ``;
return message.parts.filter((part) => part?.type === `text`).map((part) => typeof part?.content === `string` ? part.content : typeof part?.text === `string` ? part.text : ``).join(``);
}
function findLastTextPartIndex(parts) {
for (let index = parts.length - 1; index >= 0; index -= 1) if (parts[index]?.type === `text`) return index;
return -1;
}
function applyChunksToMessages(currentMessages, chunks) {
let messages = [...currentMessages];
const getOrCreateMessage = (messageId, role) => {
const index = messages.findIndex((message) => message?.id === messageId);
if (index >= 0) return index;
messages = [...messages, {
id: messageId,
role: role === `tool` ? `assistant` : role,
parts: []
}];
return messages.length - 1;
};
for (const chunk of chunks) {
if (!chunk || typeof chunk !== `object` || typeof chunk.type !== `string`) continue;
if (chunk.type === `MESSAGES_SNAPSHOT`) {
const snapshotMessages = chunk.messages;
messages = Array.isArray(snapshotMessages) ? snapshotMessages.map((message) => ({
...message,
parts: Array.isArray(message?.parts) ? [...message.parts] : []
})) : [];
continue;
}
if (chunk.type === `TEXT_MESSAGE_START`) {
const messageId = chunk.messageId;
const role = chunk.role;
if (typeof messageId !== `string` || typeof role !== `string`) continue;
getOrCreateMessage(messageId, role);
continue;
}
if (chunk.type === `TEXT_MESSAGE_CONTENT`) {
const messageId = chunk.messageId;
if (typeof messageId !== `string`) continue;
const index = getOrCreateMessage(messageId, `assistant`);
const message = messages[index];
const parts = Array.isArray(message.parts) ? [...message.parts] : [];
const previousText = textContentFromMessage(message);
const delta = typeof chunk.delta === `string` ? chunk.delta : typeof chunk.content === `string` ? chunk.content : ``;
const nextText = previousText + delta;
const textPartIndex = findLastTextPartIndex(parts);
if (textPartIndex >= 0) parts[textPartIndex] = {
...parts[textPartIndex],
content: nextText
};
else parts.push({
type: `text`,
content: nextText
});
messages[index] = {
...message,
parts
};
continue;
}
}
return messages;
}
function sanitizeChunkForStorage(chunk) {
if (chunk && typeof chunk === `object` && chunk.type === `TEXT_MESSAGE_CONTENT`) {
const nextChunk = { ...chunk };
delete nextChunk.content;
return nextChunk;
}
return chunk;
}
async function materializeSnapshotFromDurableStream(options) {
const streamResponse = await stream({
url: options.readUrl,
json: true,
live: false,
offset: options.offset,
headers: mergeHeaders(options.headers)
});
const chunks = await streamResponse.json();
return {
messages: applyChunksToMessages([], chunks),
offset: streamResponse.offset
};
}
//#endregion
//#region src/server.ts
const DEFAULT_CONTENT_TYPE = `application/json`;
function resolveUrl(url) {
return url instanceof URL ? url.toString() : url;
}
async function resolveHeaders(headers) {
if (!headers) return {};
const entries = await Promise.all(Object.entries(headers).map(async ([key, value]) => {
const resolved = typeof value === `function` ? await value() : value;
return [key, resolved];
}));
return Object.fromEntries(entries);
}
async function ensureStreamExists(stream$1, contentType, createIfMissing) {
if (!createIfMissing) return;
try {
await stream$1.create({ contentType });
} catch (error) {
if (error instanceof DurableStreamError && error.status === 409 && (error.code === `CONFLICT_EXISTS` || error.code === `CONFLICT_SEQ`)) return;
throw error;
}
}
async function ensureDurableStreamWithContentType(streamTarget, contentType) {
const writeUrl = resolveUrl(streamTarget.writeUrl);
const headers = await resolveHeaders(streamTarget.headers);
const createIfMissing = streamTarget.createIfMissing ?? true;
const stream$1 = new DurableStream({
url: writeUrl,
headers,
contentType
});
await ensureStreamExists(stream$1, contentType, createIfMissing);
return stream$1;
}
async function ensureDurableChatSessionStream(streamTarget) {
const configuredContentType = streamTarget.contentType;
if (configuredContentType !== void 0 && configuredContentType !== DEFAULT_CONTENT_TYPE) throw new Error(`Chat session streams must use content type "${DEFAULT_CONTENT_TYPE}"`);
return ensureDurableStreamWithContentType(streamTarget, DEFAULT_CONTENT_TYPE);
}
async function writeSourceToStream(source, stream$1, contentType) {
let finalOffset = ``;
let sourceError = void 0;
try {
for await (const chunk of source) await stream$1.append(JSON.stringify(chunk), { contentType });
} catch (error) {
sourceError = error;
} finally {
try {
const closeResult = await stream$1.close();
finalOffset = closeResult.finalOffset;
} catch (error) {
if (!(error instanceof DurableStreamError && error.code === `STREAM_CLOSED`) && sourceError === void 0) sourceError = error;
}
}
if (sourceError !== void 0) throw sourceError;
return finalOffset;
}
function messageText(message) {
if (!Array.isArray(message.parts)) return ``;
return message.parts.filter((part) => part.type === `text`).map((part) => typeof part.content === `string` ? part.content : typeof part.text === `string` ? part.text : ``).join(``);
}
function normalizeRole(role) {
if (role === `assistant` || role === `system` || role === `tool`) return role;
return `user`;
}
function toMessageEchoChunks(message) {
const messageId = typeof message.id === `string` && message.id.length > 0 ? message.id : crypto.randomUUID();
const role = normalizeRole(message.role);
const text = messageText(message);
const timestamp = Date.now();
return [
{
type: `TEXT_MESSAGE_START`,
messageId,
role,
model: `client`,
timestamp
},
...text.length > 0 ? [{
type: `TEXT_MESSAGE_CONTENT`,
messageId,
delta: text,
model: `client`,
timestamp
}] : [],
{
type: `TEXT_MESSAGE_END`,
messageId,
model: `client`,
timestamp
}
];
}
async function appendSanitizedChunksToStream(stream$1, chunks, contentType = DEFAULT_CONTENT_TYPE) {
for (const chunk of chunks) await stream$1.append(JSON.stringify(sanitizeChunkForStorage(chunk)), { contentType });
}
async function pipeSanitizedChunksToStream(source, stream$1, contentType = DEFAULT_CONTENT_TYPE) {
for await (const chunk of source) await stream$1.append(JSON.stringify(sanitizeChunkForStorage(chunk)), { contentType });
}
async function toDurableStreamResponse(source, options) {
const mode = options.mode ?? `immediate`;
const contentType = options.stream.contentType ?? DEFAULT_CONTENT_TYPE;
const readUrl = resolveUrl(options.stream.readUrl ?? options.stream.writeUrl);
const stream$1 = await ensureDurableStreamWithContentType(options.stream, contentType);
const writer = writeSourceToStream(source, stream$1, contentType);
if (mode === `await`) {
const finalOffset = await writer;
return Response.json({
streamUrl: readUrl,
finalOffset
}, {
status: 200,
headers: { Location: readUrl }
});
}
const backgroundTask = writer.catch((error) => {
console.error(`Durable stream write failed`, error);
});
options.waitUntil?.(backgroundTask);
const responseHeaders = new Headers({
Location: readUrl,
"Cache-Control": `no-store`
});
if (options.exposeLocationHeader !== false) responseHeaders.set(`Access-Control-Expose-Headers`, `Location`);
return Response.json({ streamUrl: readUrl }, {
status: 201,
headers: responseHeaders
});
}
async function toDurableChatSessionResponse(options) {
const mode = options.mode ?? `immediate`;
const contentType = DEFAULT_CONTENT_TYPE;
const stream$1 = await ensureDurableChatSessionStream(options.stream);
const newMessageChunks = options.newMessages.flatMap((message) => toMessageEchoChunks(message));
await appendSanitizedChunksToStream(stream$1, newMessageChunks, contentType);
const writeAssistant = pipeSanitizedChunksToStream(options.responseStream, stream$1, contentType);
if (mode === `await`) {
await writeAssistant;
return new Response(null, {
status: 200,
headers: { "Cache-Control": `no-store` }
});
}
const backgroundTask = writeAssistant.catch((error) => {
console.error(`Durable chat session write failed`, error);
});
options.waitUntil?.(backgroundTask);
return new Response(null, {
status: 202,
headers: { "Cache-Control": `no-store` }
});
}
//#endregion
export { appendSanitizedChunksToStream, durableStreamConnection, ensureDurableChatSessionStream, materializeSnapshotFromDurableStream, pipeSanitizedChunksToStream, sanitizeChunkForStorage, toDurableChatSessionResponse, toDurableStreamResponse, toMessageEchoChunks };