@tanstack/ai
Version:
Core TanStack AI library - Open source AI SDK
103 lines (102 loc) • 4.22 kB
TypeScript
import { StreamChunk } from './types.js';
/**
* Collect all text content from a StreamChunk async iterable and return as a string.
*
* This function consumes the entire stream, accumulating content from TEXT_MESSAGE_CONTENT events,
* and returns the final concatenated text.
*
* @param stream - AsyncIterable of StreamChunks from chat()
* @returns Promise<string> - The accumulated text content
*
* @example
* ```typescript
* const stream = chat({
* adapter: openaiText(),
* model: 'gpt-4o',
* messages: [{ role: 'user', content: 'Hello!' }]
* });
* const text = await streamToText(stream);
* console.log(text); // "Hello! How can I help you today?"
* ```
*/
export declare function streamToText(stream: AsyncIterable<StreamChunk>): Promise<string>;
/**
* Convert a StreamChunk async iterable to a ReadableStream in Server-Sent Events format
*
* This creates a ReadableStream that emits chunks in SSE format:
* - Each chunk is prefixed with "data: "
* - Each chunk is followed by "\n\n"
* - Stream ends with "data: [DONE]\n\n"
*
* @param stream - AsyncIterable of StreamChunks from chat()
* @param abortController - Optional AbortController to abort when stream is cancelled
* @returns ReadableStream in Server-Sent Events format
*/
export declare function toServerSentEventsStream(stream: AsyncIterable<StreamChunk>, abortController?: AbortController): ReadableStream<Uint8Array>;
/**
* Convert a StreamChunk async iterable to a Response in Server-Sent Events format
*
* This creates a Response that emits chunks in SSE format:
* - Each chunk is prefixed with "data: "
* - Each chunk is followed by "\n\n"
* - Stream ends with "data: [DONE]\n\n"
*
* @param stream - AsyncIterable of StreamChunks from chat()
* @param init - Optional Response initialization options (including `abortController`)
* @returns Response in Server-Sent Events format
*
* @example
* ```typescript
* const stream = chat({ adapter: openaiText(), model: "gpt-4o", messages: [...] });
* return toServerSentEventsResponse(stream, { abortController });
* ```
*/
export declare function toServerSentEventsResponse(stream: AsyncIterable<StreamChunk>, init?: ResponseInit & {
abortController?: AbortController;
}): Response;
/**
* Convert a StreamChunk async iterable to a ReadableStream in HTTP stream format (newline-delimited JSON)
*
* This creates a ReadableStream that emits chunks as newline-delimited JSON:
* - Each chunk is JSON.stringify'd and followed by "\n"
* - No SSE formatting (no "data: " prefix)
*
* This format is compatible with `fetchHttpStream` connection adapter.
*
* @param stream - AsyncIterable of StreamChunks from chat()
* @param abortController - Optional AbortController to abort when stream is cancelled
* @returns ReadableStream in HTTP stream format (newline-delimited JSON)
*
* @example
* ```typescript
* const stream = chat({ adapter: openaiText(), model: "gpt-4o", messages: [...] });
* const readableStream = toHttpStream(stream);
* // Use with Response for HTTP streaming (not SSE)
* return new Response(readableStream, {
* headers: { 'Content-Type': 'application/x-ndjson' }
* });
* ```
*/
export declare function toHttpStream(stream: AsyncIterable<StreamChunk>, abortController?: AbortController): ReadableStream<Uint8Array>;
/**
* Convert a StreamChunk async iterable to a Response in HTTP stream format (newline-delimited JSON)
*
* This creates a Response that emits chunks in HTTP stream format:
* - Each chunk is JSON.stringify'd and followed by "\n"
* - No SSE formatting (no "data: " prefix)
*
* This format is compatible with `fetchHttpStream` connection adapter.
*
* @param stream - AsyncIterable of StreamChunks from chat()
* @param init - Optional Response initialization options (including `abortController`)
* @returns Response in HTTP stream format (newline-delimited JSON)
*
* @example
* ```typescript
* const stream = chat({ adapter: openaiText(), model: "gpt-4o", messages: [...] });
* return toHttpResponse(stream, { abortController });
* ```
*/
export declare function toHttpResponse(stream: AsyncIterable<StreamChunk>, init?: ResponseInit & {
abortController?: AbortController;
}): Response;