flux-agent
Version:
FluxAgent - 一个可灵活插拔的AI Agent系统框架,基于TypeScript开发,支持流式执行、事件系统、插件系统、知识库管理等功能 (Protected Release) (Protected Release) (Protected Release) (Protected Release) (Protected Release) (Protected Release) (Protected Release) (Protected Release) (Protected Release) (
101 lines (100 loc) • 2.96 kB
TypeScript
import OpenAI from 'openai';
import { PhaseType } from './Phases';
export interface Message {
role: 'system' | 'user' | 'assistant' | 'tool';
content: string | Array<{
type: string;
text?: string;
image_url?: {
url: string;
expires?: number;
expiresAt?: number;
};
}>;
name?: string;
function_call?: {
name: string;
arguments: string;
};
phaseDeclear?: PhaseType;
tool_call_id?: string;
system?: boolean;
}
export interface Tool {
type: 'function';
function: {
name: string;
description: string;
parameters: Record<string, any>;
};
}
export interface LLMConfig {
modelName: string;
apiKey: string;
temperature?: number;
maxTokens?: number;
baseURL?: string;
frequency_penalty?: number;
presence_penalty?: number;
top_p?: number;
response_format?: {
type: 'json_object';
} | {
type: 'json_schema';
json_schema: {
name: string;
description?: string;
schema: Record<string, any>;
strict?: boolean;
};
};
}
export interface LLMResponse {
content: string | null;
streamId?: string;
toolCalls?: Array<{
name: string;
arguments: Record<string, any>;
}>;
usage?: OpenAI.CompletionUsage;
}
export interface LLMStreamCallbacks {
onToken?: (token: string, id?: string) => void;
onPartialResponse?: (partial: string, id?: string) => void;
onToolCallStart?: (toolName: string, id?: string) => void;
onToolCallComplete?: (toolCall: {
name: string;
arguments: Record<string, any>;
}, id?: string) => void;
onComplete?: (response: LLMResponse, id?: string) => void;
onError?: (error: Error, id?: string) => void;
}
export interface ILLM {
modelName: string;
chat(messages: Message[], tools?: Tool[]): Promise<LLMResponse>;
streamChat?(messages: Message[], tools?: Tool[], callbacks?: LLMStreamCallbacks): Promise<LLMResponse>;
}
export declare class OpenAILLM implements ILLM {
private client;
private llmName;
modelName: string;
private temperature;
private maxTokens?;
private options;
private onUsage?;
constructor(config: LLMConfig, llmName: string, onUsage?: (llmName: string, usage: OpenAI.CompletionUsage) => void);
chat(messages: Message[], tools?: Tool[], options?: {
response_format?: {
type: 'json_object';
} | {
type: 'json_schema';
json_schema: {
name: string;
description?: string;
schema: Record<string, any>;
strict?: boolean;
};
};
}): Promise<LLMResponse>;
streamChat(messages: Message[], tools?: Tool[], callbacks?: LLMStreamCallbacks, toolChoice?: 'auto' | 'required' | 'none'): Promise<LLMResponse>;
}