@juspay/neurolink
Version:
Universal AI Development Platform with working MCP integration, multi-provider support, and professional CLI. Built-in tools operational, 58+ external MCP servers discoverable. Connect to filesystem, GitHub, database operations, and more. Build, test, and
40 lines (39 loc) • 1.74 kB
TypeScript
/**
* Streaming response handling for Amazon SageMaker Provider (Phase 2)
*
* This module provides full streaming support with automatic protocol detection
* and model-specific parsing for various SageMaker deployment patterns.
*/
import { ReadableStream } from "stream/web";
import type { SageMakerStreamChunk, SageMakerUsage, SageMakerConfig } from "./types.js";
/**
* Create a SageMaker streaming response with automatic protocol detection
*
* @param responseStream - Raw response stream from SageMaker endpoint
* @param endpointName - SageMaker endpoint name for capability detection
* @param config - SageMaker configuration
* @param options - Stream options and metadata
* @returns Promise resolving to ReadableStream compatible with AI SDK
*/
export declare function createSageMakerStream(responseStream: AsyncIterable<Uint8Array>, endpointName: string, config: SageMakerConfig, options?: {
prompt?: string;
abortSignal?: AbortSignal;
onChunk?: (chunk: SageMakerStreamChunk) => void;
onComplete?: (usage: SageMakerUsage) => void;
onError?: (error: Error) => void;
}): Promise<ReadableStream<unknown>>;
/**
* Create a synthetic stream from complete text (for backward compatibility)
*/
export declare function createSyntheticStream(text: string, usage: SageMakerUsage, options?: {
onChunk?: (chunk: SageMakerStreamChunk) => void;
onComplete?: (usage: SageMakerUsage) => void;
}): Promise<ReadableStream<unknown>>;
/**
* Estimate token usage from text content
*
* @param prompt - Input prompt text
* @param completion - Generated completion text
* @returns Estimated usage information
*/
export declare function estimateTokenUsage(prompt: string, completion: string): SageMakerUsage;