taskforce-aiagent
Version:
TaskForce is a modular, open-source, production-ready TypeScript agent framework for orchestrating AI agents, LLM-powered autonomous agents, task pipelines, dynamic toolchains, RAG workflows and memory/retrieval systems.
54 lines (53 loc) • 1.9 kB
TypeScript
import { Tool } from "../tools/base/baseTool.js";
import { SupportedModel } from "../configs/enum.js";
/**
* Options to control LLM generation behavior.
* Not all options are supported by all models.
*/
export interface GenerationOptions {
/**
* Controls randomness of outputs. Lower = more deterministic (e.g., 0.2), higher = more creative (e.g., 0.8)
*
* @default 0.7
* @supportedBy GPT-3.5, GPT-4, GPT-4o, Claude 3, Gemini 1.5 Pro/Flash, Mistral, Local LLaMA, DeepSeek
*/
temperature?: number;
/**
* Controls nucleus sampling (top-p sampling). Value between 0 and 1. Common values: 0.9 or 0.95
*
* @default 0.95
* @supportedBy GPT-3.5, GPT-4, GPT-4o, Claude 3, Gemini 1.5 Pro/Flash, Mistral, Local LLaMA, DeepSeek
*/
top_p?: number;
/**
* Maximum number of tokens in the output.
*
* @default Varies by model
* @supportedBy All models (with model-specific limits)
*/
max_tokens?: number;
/**
* Penalizes repeated content. Positive values (0.1–1.0) encourage diversity.
*
* @default 0
* @supportedBy GPT-3.5, GPT-4, GPT-4o, DeepSeek, Mistral, Local LLaMA
* @notSupportedBy Claude 3, Gemini
*/
presence_penalty?: number;
/**
* Penalizes tokens based on frequency. Positive values reduce repetition.
*
* @default 0
* @supportedBy GPT-3.5, GPT-4, GPT-4o, DeepSeek, Mistral, Local LLaMA
* @notSupportedBy Claude 3, Gemini
*/
frequency_penalty?: number;
}
export type ChatMessage = {
role: "system" | "user" | "assistant";
content: string;
};
/**
* Wraps callAIModel with telemetry recording if TELEMETRY=true.
*/
export declare function callAIModel(agentName: string, modelName: SupportedModel | string, messages: ChatMessage[], verbose?: boolean, tools?: Tool[], modelOptions?: GenerationOptions): Promise<string>;