UNPKG

@copilotkit/runtime

Version:

<div align="center"> <a href="https://copilotkit.ai" target="_blank"> <img src="https://github.com/copilotkit/copilotkit/raw/main/assets/banner.png" alt="CopilotKit Logo"> </a>

131 lines (122 loc) 4.09 kB
import { b as CopilotServiceAdapter, C as CopilotRuntimeChatCompletionRequest, a as CopilotRuntimeChatCompletionResponse } from '../langserve-4a5c9217.js'; export { c as RemoteChain, R as RemoteChainParameters } from '../langserve-4a5c9217.js'; import { L as LangChainAdapter } from '../groq-adapter-25a2bd35.js'; export { G as GoogleGenerativeAIAdapter, f as GroqAdapter, e as GroqAdapterParams, a as OpenAIAdapter, O as OpenAIAdapterParams, c as OpenAIAssistantAdapter, b as OpenAIAssistantAdapterParams, d as UnifyAdapter, U as UnifyAdapterParams } from '../groq-adapter-25a2bd35.js'; import Anthropic from '@anthropic-ai/sdk'; import '../index-d4614f9b.js'; import '../graphql/types/base/index.js'; import 'rxjs'; import '@copilotkit/shared'; import 'openai'; import '@langchain/core/messages'; import '@langchain/core/tools'; import '@langchain/core/utils/stream'; import 'groq-sdk'; /** * Copilot Runtime adapter for Anthropic. * * ## Example * * ```ts * import { CopilotRuntime, AnthropicAdapter } from "@copilotkit/runtime"; * import Anthropic from "@anthropic-ai/sdk"; * * const copilotKit = new CopilotRuntime(); * * const anthropic = new Anthropic({ * apiKey: "<your-api-key>", * }); * * return new AnthropicAdapter({ anthropic }); * ``` */ interface AnthropicAdapterParams { /** * An optional Anthropic instance to use. If not provided, a new instance will be * created. */ anthropic?: Anthropic; /** * The model to use. */ model?: string; } declare class AnthropicAdapter implements CopilotServiceAdapter { private model; private _anthropic; get anthropic(): Anthropic; constructor(params?: AnthropicAdapterParams); process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>; } interface OllamaAdapterOptions { model?: string; } declare class ExperimentalOllamaAdapter implements CopilotServiceAdapter { private model; constructor(options?: OllamaAdapterOptions); process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>; } /** * Copilot Runtime adapter for AWS Bedrock. * * ## Example * * ```ts * import { CopilotRuntime, BedrockAdapter } from "@copilotkit/runtime"; * * const copilotKit = new CopilotRuntime(); * * return new BedrockAdapter({ * model: "amazon.nova-lite-v1:0", * region: "us-east-1", * credentials: { * accessKeyId: process.env.AWS_ACCESS_KEY_ID, * secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY * } * }); * ``` */ interface BedrockAdapterParams { /** * AWS Bedrock model ID to use. * @default "amazon.nova-lite-v1:0" */ model?: string; /** * AWS region where Bedrock is available. * @default "us-east-1" */ region?: string; /** * AWS credentials for Bedrock access. */ credentials?: { accessKeyId?: string; secretAccessKey?: string; }; } declare class BedrockAdapter extends LangChainAdapter { constructor(options?: BedrockAdapterParams); } /** * CopilotKit Empty Adapter * * This adapter is meant to preserve adherence to runtime requirements, while doing nothing * Ideal if you don't want to connect an LLM the to the runtime, and only use your LangGraph agent. * Be aware that Copilot Suggestions will not work if you use this adapter * * ## Example * * ```ts * import { CopilotRuntime, EmptyAdapter } from "@copilotkit/runtime"; * * const copilotKit = new CopilotRuntime(); * * return new EmptyAdapter(); * ``` */ declare class EmptyAdapter implements CopilotServiceAdapter { process(request: CopilotRuntimeChatCompletionRequest): Promise<CopilotRuntimeChatCompletionResponse>; } declare const ExperimentalEmptyAdapter: typeof EmptyAdapter; export { AnthropicAdapter, AnthropicAdapterParams, BedrockAdapter, BedrockAdapterParams, CopilotRuntimeChatCompletionRequest, CopilotRuntimeChatCompletionResponse, CopilotServiceAdapter, EmptyAdapter, ExperimentalEmptyAdapter, ExperimentalOllamaAdapter, LangChainAdapter };