@kdump/code-cli-any-llm
Version:
> A unified gateway for the Gemini, opencode, crush, and Qwen Code AI CLIs
35 lines (34 loc) • 1.34 kB
TypeScript
import { OnApplicationBootstrap } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import type { Response } from 'express';
import { LlmProviderResolverService } from '../services/llm-provider-resolver.service';
export declare class OpenAIController implements OnApplicationBootstrap {
private readonly configService;
private readonly providerResolver;
private readonly logger;
private initialized;
private gatewayApiMode;
private aiProvider;
private useCodexProvider;
private useClaudeCodeProvider;
private llmProvider;
private providerContext?;
constructor(configService: ConfigService, providerResolver: LlmProviderResolverService);
onApplicationBootstrap(): void;
listModels(res: Response): Promise<void>;
createChatCompletion(body: any, res: Response): Promise<void>;
createResponse(body: any, res: Response): Promise<void>;
private initializeProvider;
private ensureApiEnabled;
private ensureProvider;
private getActiveProviderConfig;
private resolveDefaultModel;
private normalizeChatCompletionRequest;
private normalizeResponsesRequest;
private isStreaming;
private streamChatCompletion;
private streamResponse;
private extractDeltaContent;
private mergeUsage;
private transformToResponsesPayload;
}