UNPKG

@gguf/claw

Version:

Multi-channel AI gateway with extensible messaging integrations

55 lines (54 loc) 1.75 kB
export type ModelApi = "openai-completions" | "openai-responses" | "anthropic-messages" | "google-generative-ai" | "github-copilot" | "bedrock-converse-stream" | "ollama"; export type ModelCompatConfig = { supportsStore?: boolean; supportsDeveloperRole?: boolean; supportsReasoningEffort?: boolean; supportsUsageInStreaming?: boolean; supportsStrictMode?: boolean; maxTokensField?: "max_completion_tokens" | "max_tokens"; thinkingFormat?: "openai" | "zai" | "qwen"; requiresToolResultName?: boolean; requiresAssistantAfterToolResult?: boolean; requiresThinkingAsText?: boolean; requiresMistralToolIds?: boolean; }; export type ModelProviderAuthMode = "api-key" | "aws-sdk" | "oauth" | "token"; export type ModelDefinitionConfig = { id: string; name: string; api?: ModelApi; reasoning: boolean; input: Array<"text" | "image">; cost: { input: number; output: number; cacheRead: number; cacheWrite: number; }; contextWindow: number; maxTokens: number; headers?: Record<string, string>; compat?: ModelCompatConfig; }; export type ModelProviderConfig = { baseUrl: string; apiKey?: string; auth?: ModelProviderAuthMode; api?: ModelApi; headers?: Record<string, string>; authHeader?: boolean; models: ModelDefinitionConfig[]; }; export type BedrockDiscoveryConfig = { enabled?: boolean; region?: string; providerFilter?: string[]; refreshInterval?: number; defaultContextWindow?: number; defaultMaxTokens?: number; }; export type ModelsConfig = { mode?: "merge" | "replace"; providers?: Record<string, ModelProviderConfig>; bedrockDiscovery?: BedrockDiscoveryConfig; };