taskflow-ai
Version:
TaskFlow AI - 智能PRD文档解析与任务管理助手,支持多模型AI协同、MCP编辑器集成,专为开发团队设计的CLI工具
79 lines (78 loc) • 2.19 kB
TypeScript
/**
* 智谱AI模型提供商
* 支持GLM系列模型的API调用
*/
import { ChineseLLMProvider, ChineseLLMType, ModelConfig, ChatRequest, ChatResponse, StreamResponse, ModelInfo } from '../chinese-llm-provider';
import { Logger } from '../../../infra/logger';
/**
* 智谱AI模型列表
*/
export declare enum ZhipuModel {
GLM_4 = "glm-4",// GLM-4
GLM_4V = "glm-4v",// GLM-4V (多模态)
GLM_3_TURBO = "glm-3-turbo",// GLM-3-Turbo
CHATGLM_6B = "chatglm_6b",// ChatGLM-6B
CHATGLM_STD = "chatglm_std",// ChatGLM标准版
CHATGLM_LITE = "chatglm_lite",// ChatGLM轻量版
CHATGLM_PRO = "chatglm_pro"
}
/**
* 智谱AI API配置
*/
export interface ZhipuConfig extends ModelConfig {
endpoint?: string;
modelVersion?: ZhipuModel;
}
/**
* 智谱AI模型提供商实现
*/
export declare class ZhipuProvider extends ChineseLLMProvider {
readonly type = ChineseLLMType.ZHIPU_GLM;
readonly name = "Zhipu AI";
readonly description = "\u667A\u8C31AI GLM\u7CFB\u5217\u6A21\u578BAPI\u670D\u52A1";
private client;
protected config: ZhipuConfig;
constructor(config: ZhipuConfig, logger: Logger);
/**
* 聊天对话
*/
chat(request: ChatRequest): Promise<ChatResponse>;
/**
* 流式聊天对话
*/
chatStream(request: ChatRequest, onChunk: (chunk: StreamResponse) => void): Promise<void>;
/**
* 流式聊天对话(返回迭代器)
*/
streamChat(request: ChatRequest): Promise<AsyncIterable<StreamResponse>>;
/**
* 解析流式响应
*/
private parseStreamResponse;
/**
* 验证API密钥
*/
validateApiKey(): Promise<boolean>;
/**
* 获取支持的模型列表
*/
getSupportedModels(): string[];
/**
* 获取模型信息
*/
getModelInfo(): Promise<ModelInfo>;
/**
* 获取特定模型信息
*/
getSpecificModelInfo(model: string): {
name: string;
description: string;
maxTokens: number;
supportsFunctions?: boolean;
supportsVision?: boolean;
};
/**
* 更新配置
*/
updateConfig(newConfig: Partial<ZhipuConfig>): void;
}