taskflow-ai
Version:
TaskFlow AI - 智能PRD文档解析与任务管理助手,支持多模型AI协同、MCP编辑器集成,专为开发团队设计的CLI工具
72 lines (71 loc) • 2.06 kB
TypeScript
/**
* DeepSeek模型提供商
* 支持DeepSeek系列模型的API调用
*/
import { ChineseLLMProvider, ChineseLLMType, ModelConfig, ChatRequest, ChatResponse, StreamResponse, ModelInfo } from '../chinese-llm-provider';
import { Logger } from '../../../infra/logger';
import { ModelDetails } from '../../../types/model';
/**
* DeepSeek模型列表
*/
export declare enum DeepSeekModel {
DEEPSEEK_CHAT = "deepseek-chat",// DeepSeek Chat
DEEPSEEK_CODER = "deepseek-coder",// DeepSeek Coder
DEEPSEEK_V2 = "deepseek-v2",// DeepSeek V2
DEEPSEEK_V2_CHAT = "deepseek-v2-chat",// DeepSeek V2 Chat
DEEPSEEK_V2_CODER = "deepseek-v2-coder"
}
/**
* DeepSeek API配置
*/
export interface DeepSeekConfig extends ModelConfig {
endpoint?: string;
modelVersion?: DeepSeekModel;
}
/**
* DeepSeek模型提供商实现
*/
export declare class DeepSeekProvider extends ChineseLLMProvider {
readonly type = ChineseLLMType.DEEPSEEK;
readonly name = "DeepSeek";
readonly description = "DeepSeek\u5927\u6A21\u578BAPI\u670D\u52A1";
private client;
protected config: DeepSeekConfig;
constructor(config: DeepSeekConfig, logger: Logger);
/**
* 聊天对话
*/
chat(request: ChatRequest): Promise<ChatResponse>;
/**
* 流式聊天对话
*/
chatStream(request: ChatRequest, onChunk: (chunk: StreamResponse) => void): Promise<void>;
/**
* 流式聊天对话(返回迭代器)
*/
streamChat(request: ChatRequest): Promise<AsyncIterable<StreamResponse>>;
/**
* 解析流式响应
*/
private parseStreamResponse;
/**
* 验证API密钥
*/
validateApiKey(): Promise<boolean>;
/**
* 获取支持的模型列表
*/
getSupportedModels(): string[];
/**
* 获取模型信息
*/
getModelInfo(): Promise<ModelInfo>;
/**
* 获取特定模型信息
*/
getSpecificModelInfo(model: string): ModelDetails;
/**
* 更新配置
*/
updateConfig(newConfig: Partial<DeepSeekConfig>): void;
}