@baseai/core
Version:
The Web AI Framework's core - BaseAI.dev
269 lines (263 loc) • 9.34 kB
text/typescript
import { ChatCompletionStream } from 'openai/lib/ChatCompletionStream';
type OpenAIModels = 'openai:gpt-4o' | 'openai:gpt-4o-2024-08-06' | 'openai:gpt-4o-mini' | 'openai:gpt-4-turbo' | 'openai:gpt-4-turbo-preview' | 'openai:gpt-4-0125-preview' | 'openai:gpt-4-1106-preview' | 'openai:gpt-4' | 'openai:gpt-4-0613' | 'openai:gpt-4-32k' | 'openai:gpt-3.5-turbo' | 'openai:gpt-3.5-turbo-0125' | 'openai:gpt-3.5-turbo-1106' | 'openai:gpt-3.5-turbo-16k';
type TogetherModels = 'together:meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo' | 'together:meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo' | 'together:meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo' | 'together:meta-llama/Llama-3-70b-chat-hf' | 'together:meta-llama/Llama-3-8b-chat-hf' | 'together:togethercomputer/Llama-2-7B-32K-Instruct' | 'together:meta-llama/Llama-2-13b-chat-hf' | 'together:meta-llama/Llama-2-70b-chat-hf' | 'together:google/gemma-7b-it' | 'together:google/gemma-2b-it' | 'together:mistralai/Mistral-7B-Instruct-v0.1' | 'together:mistralai/Mistral-7B-Instruct-v0.2' | 'together:mistralai/Mixtral-8x7B-Instruct-v0.1' | 'together:mistralai/Mixtral-8x22B-Instruct-v0.1' | 'together:databricks/dbrx-instruct' | 'together:meta-llama/Llama-3.3-70B-Instruct-Turbo';
type AnthropicModels = 'anthropic:claude-3-5-sonnet-latest' | 'anthropic:claude-3-5-sonnet-20240620' | 'anthropic:claude-3-opus-20240229' | 'anthropic:claude-3-sonnet-20240229' | 'anthropic:claude-3-haiku-20240307' | 'anthropic:claude-3-5-haiku-20241022';
type GroqModels = 'groq:llama-3.1-70b-versatile' | 'groq:llama-3.1-8b-instant' | 'groq:llama3-70b-8192' | 'groq:llama3-8b-8192' | 'groq:mixtral-8x7b-32768' | 'groq:gemma2-9b-it' | 'groq:gemma-7b-it' | 'groq:llama-3.3-70b-versatile';
type GoogleModels = 'google:gemini-1.5-pro-latest' | 'google:gemini-1.5-flash-latest' | 'google:gemini-1.5-flash-8b-latest' | 'google:gemini-pro';
type CohereModels = 'cohere:command-r' | 'cohere:command-r-plus';
type FireworksAIModels = 'fireworks:llama-v3p1-405b-instruct' | 'fireworks:llama-v3p1-8b-instruct' | 'fireworks:llama-v3p1-70b-instruct' | 'fireworks:llama-v3-70b-instruct' | 'fireworks:yi-large' | 'fireworks:llama-v3p3-70b-instruct';
type PerplexityModels = 'perplexity:llama-3.1-sonar-huge-128k-online' | 'perplexity:llama-3.1-sonar-large-128k-online' | 'perplexity:llama-3.1-sonar-small-128k-online' | 'perplexity:llama-3.1-sonar-large-128k-chat' | 'perplexity:llama-3.1-sonar-small-128k-chat';
type MistralAIModels = 'mistral:mistral-large-latest' | 'mistral:open-mistral-nemo' | 'mistral:codestral-latest';
type XAIModels = 'xai:grok-beta';
type OllamaModels = `ollama:${string}`;
type MessageRole = 'function' | 'assistant' | 'system' | 'user' | 'tool';
interface Function {
name: string;
arguments: string;
}
interface ToolCallResult {
id: string;
type: 'function';
function: Function;
}
interface Message {
role: MessageRole;
content: string | null;
name?: string;
tool_call_id?: string;
tool_calls?: ToolCallResult[];
}
interface ToolFunction {
name: string;
}
interface ToolChoiceFunction {
type: 'function';
function: ToolFunction;
}
type ToolChoice = 'auto' | 'required' | ToolChoiceFunction;
interface Tools {
type: 'function';
function: {
name: string;
description?: string;
parameters?: Record<string, any>;
};
}
type Model = OpenAIModels | TogetherModels | AnthropicModels | GroqModels | GoogleModels | CohereModels | FireworksAIModels | PerplexityModels | MistralAIModels | XAIModels | OllamaModels;
interface Pipe$1 {
apiKey?: string;
name: string;
description?: string;
status: 'public' | 'private';
model: Model;
stream?: boolean;
json?: boolean;
store?: boolean;
moderate?: boolean;
top_p: number;
max_tokens: number;
temperature: number;
presence_penalty: number;
frequency_penalty: number;
stop: string[];
tool_choice: ToolChoice;
parallel_tool_calls: boolean;
messages: Message[];
variables: any[];
tools: any[];
memory: {
name: string;
}[];
}
interface Runner extends ChatCompletionStream<null> {
}
interface Variable {
name: string;
value: string;
}
interface RunOptions {
messages?: Message[];
variables?: Variable[];
threadId?: string;
rawResponse?: boolean;
runTools?: boolean;
tools?: Tools[];
name?: string;
apiKey?: string;
llmKey?: string;
}
interface RunOptionsStream extends RunOptions {
stream: boolean;
}
interface Usage {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
}
interface RunResponse {
completion: string;
threadId?: string;
id: string;
object: string;
created: number;
model: string;
choices: ChoiceGenerate[];
usage: Usage;
system_fingerprint: string | null;
rawResponse?: {
headers: Record<string, string>;
};
}
interface RunResponseStream {
stream: ReadableStream<any>;
threadId: string | null;
rawResponse?: {
headers: Record<string, string>;
};
}
interface PipeOptions extends Pipe$1 {
maxCalls?: number;
prod?: boolean;
}
interface ChoiceGenerate {
index: number;
message: Message;
logprobs: boolean | null;
finish_reason: string;
}
declare class Pipe {
private request;
private pipe;
private tools;
private maxCalls;
private hasTools;
private prod;
private baseUrl;
private entityApiKey?;
constructor(options: PipeOptions);
private getToolsFromPipe;
private runTools;
private hasNoToolCalls;
private getMessagesToSend;
private isStreamRequested;
private warnIfToolsWithStream;
private handleStreamResponse;
run(options: RunOptionsStream): Promise<RunResponseStream>;
run(options: RunOptions): Promise<RunResponse>;
private createRequest;
}
/**
* Generates text using the provided options.
*
* @param options - The options for generating text.
* @returns A promise that resolves to the generated text.
*/
declare const generateText: (options: RunOptions & {
pipe: Pipe;
}) => Promise<RunResponse>;
/**
* Streams text using the provided options.
*
* @param options - The options for streaming text.
* @returns A promise that resolves to the response of the stream operation.
*/
declare const streamText: (options: RunOptions & {
pipe: Pipe;
}) => Promise<RunResponseStream>;
interface ContentChunk {
type: 'content';
content: string;
}
interface ToolCallChunk {
type: 'toolCall';
toolCall: ToolCallResult;
}
interface ChoiceStream {
index: number;
delta: Delta;
logprobs: boolean | null;
finish_reason: string;
}
interface Delta {
role?: MessageRole;
content?: string;
tool_calls?: ToolCallResult[];
}
interface UnknownChunk {
type: 'unknown';
rawChunk: ChunkStream;
}
interface ChunkStream {
id: string;
object: string;
created: number;
model: string;
choices: ChoiceStream[];
}
interface Chunk {
type: 'content' | 'toolCall' | 'unknown';
content?: string;
toolCall?: ToolCallResult;
rawChunk?: ChunkStream;
}
/**
* Processes a chunk and returns a Chunk object.
*
* ```ts
* for await (const chunk of runner) {
* const processedChunk = processChunk({rawChunk: chunk});
* if (isContent(processedChunk)) {
* process.stdout.write(processedChunk.content);
* }
* }
* ```
*
* @param rawChunk - The raw chunk to process.
* @returns The processed Chunk object.
*/
declare const processChunk: ({ rawChunk }: {
rawChunk: any;
}) => Chunk;
/**
* Checks if the given chunk is a ContentChunk.
*
* @param chunk - The chunk to check.
* @returns True if the chunk is a ContentChunk, false otherwise.
*/
declare const isContent: (chunk: Chunk) => chunk is ContentChunk;
/**
* Determines if the given chunk is a ToolCallChunk.
*
* @param chunk - The chunk to be evaluated.
* @returns True if the chunk is of type 'toolCall', otherwise false.
*/
declare const isToolCall: (chunk: Chunk) => chunk is ToolCallChunk;
/**
* Checks if the given chunk is of type 'unknown'.
*
* @param chunk - The chunk to be checked.
* @returns True if the chunk is of type 'unknown', false otherwise.
*/
declare const isUnknown: (chunk: Chunk) => chunk is UnknownChunk;
/**
* Retrieves the text content from a given ChunkStream.
*
* @param chunk - The ChunkStream object.
* @returns The text content from the ChunkStream.
*/
declare const getTextContent: (chunk: any) => string;
/**
* Retrieves the text delta from a given chunk.
*
* @param chunk - The chunk stream to extract the text delta from.
* @returns The text delta content, or an empty string if it is not available.
*/
declare const getTextDelta: (chunk: ChunkStream) => string;
/**
* Writes the content of a TextStream to the standard output.
*
* @param stream - The TextStream to be printed.
* @returns A Promise that resolves when the printing is complete.
*/
declare const printStreamToStdout: (runner: Runner) => Promise<void>;
export { type Chunk, type ChunkStream, Pipe, type PipeOptions, type RunOptions, type RunOptionsStream, type RunResponse, type RunResponseStream, type Usage, type Variable, generateText, getTextContent, getTextDelta, isContent, isToolCall, isUnknown, printStreamToStdout, processChunk, streamText };