openagentic
Version:
A TypeScript framework for building AI agents with self-contained tool orchestration capabilities
1,329 lines (1,324 loc) • 453 kB
JavaScript
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __esm = (fn, res) => function __init() {
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
};
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// src/types.ts
import { z } from "zod";
import "ai";
var AIModelSchema, MessageSchema, ExecutionResultSchema;
var init_types = __esm({
"src/types.ts"() {
"use strict";
AIModelSchema = z.object({
provider: z.enum(["openai", "anthropic", "google", "google-vertex", "perplexity", "xai", "custom"]),
model: z.string(),
apiKey: z.string().optional(),
baseURL: z.string().optional(),
temperature: z.number().min(0).max(2).optional().default(0.7),
maxTokens: z.number().positive().optional(),
topP: z.number().min(0).max(1).optional(),
project: z.string().optional(),
location: z.string().optional()
});
MessageSchema = z.object({
role: z.enum(["system", "user", "assistant", "tool"]),
content: z.string(),
toolCallId: z.string().optional(),
toolCalls: z.array(z.object({
toolCallId: z.string(),
toolName: z.string(),
args: z.record(z.any())
})).optional()
});
ExecutionResultSchema = z.object({
success: z.boolean(),
result: z.any().optional(),
error: z.string().optional(),
messages: z.array(MessageSchema),
iterations: z.number(),
toolCallsUsed: z.array(z.string()),
executionStats: z.object({
totalDuration: z.number(),
stepsExecuted: z.number(),
toolCallsExecuted: z.number(),
tokensUsed: z.number().optional(),
averageStepDuration: z.number(),
averageToolCallDuration: z.number()
}).optional(),
usage: z.object({
totalTokens: z.number(),
promptTokens: z.number(),
completionTokens: z.number()
}).optional()
});
}
});
// src/tools/utils.ts
var utils_exports = {};
__export(utils_exports, {
autoConvertLangChainTools: () => autoConvertLangChainTools,
convertLangchainTool: () => convertLangchainTool,
convertLangchainToolSync: () => convertLangchainToolSync,
getAnthropicModelInstance: () => getAnthropicModelInstance,
getModel: () => getModel,
hasLangChainTools: () => hasLangChainTools,
toOpenAgenticTool: () => toOpenAgenticTool
});
import "ai";
import { tool } from "ai";
import { z as z2 } from "zod";
import { createAnthropic } from "@ai-sdk/anthropic";
import { createAmazonBedrock } from "@ai-sdk/amazon-bedrock";
function isLangChainTool(tool26) {
return tool26 && typeof tool26 === "object" && typeof tool26.name === "string" && typeof tool26.description === "string" && (typeof tool26.call === "function" || typeof tool26.invoke === "function");
}
function convertLangChainSchema(schema) {
if (!schema) {
return z2.object({});
}
if (schema._def) {
return schema;
}
if (typeof schema === "object" && schema.type) {
return convertJsonSchemaToZod(schema);
}
return z2.any();
}
function convertJsonSchemaToZod(jsonSchema) {
if (jsonSchema.type === "object") {
const shape = {};
const properties = jsonSchema.properties || {};
for (const [key, prop] of Object.entries(properties)) {
shape[key] = convertJsonSchemaToZod(prop);
}
return z2.object(shape);
}
if (jsonSchema.type === "string") {
let schema = z2.string();
if (jsonSchema.description) {
schema = schema.describe(jsonSchema.description);
}
return schema;
}
if (jsonSchema.type === "number") {
return z2.number();
}
if (jsonSchema.type === "boolean") {
return z2.boolean();
}
if (jsonSchema.type === "array") {
const itemSchema = jsonSchema.items ? convertJsonSchemaToZod(jsonSchema.items) : z2.any();
return z2.array(itemSchema);
}
return z2.any();
}
async function convertLangchainTool(lcTool, opts) {
if (!isLangChainTool(lcTool)) {
throw new Error("Provided tool is not a valid LangChain Tool or StructuredTool");
}
let schema;
if (opts?.paramsSchema) {
schema = opts.paramsSchema;
} else if (lcTool.schema) {
schema = convertLangChainSchema(lcTool.schema);
} else {
schema = z2.object({
input: z2.string().describe("Input text for the tool")
});
}
const aiTool = tool({
description: lcTool.description,
parameters: schema,
execute: async (args) => {
try {
let result;
if (typeof lcTool.invoke === "function") {
result = await lcTool.invoke(args);
} else if (typeof lcTool.call === "function") {
const input = typeof args === "object" && args.input !== void 0 ? args.input : Object.keys(args).length === 1 ? Object.values(args)[0] : JSON.stringify(args);
result = await lcTool.call(input);
} else {
throw new Error("LangChain tool has no callable invoke or call method");
}
if (typeof result === "object") {
return JSON.stringify(result);
}
return String(result);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
console.error(`LangChain tool "${lcTool.name}" execution failed:`, errorMessage);
throw new Error(`Tool execution failed: ${errorMessage}`);
}
}
});
const toolDetails21 = {
toolId: opts?.toolId || `langchain_${lcTool.name.toLowerCase().replace(/[^a-z0-9]/g, "_")}`,
name: lcTool.name,
useCases: opts?.useCases || [
`LangChain ${lcTool.name} integration`,
lcTool.description || "Imported from LangChain"
],
logo: opts?.logo || "\u{1F517}"
// LangChain emoji
};
return {
...aiTool,
...toolDetails21
};
}
async function autoConvertLangChainTools(tools) {
if (!Array.isArray(tools)) {
return tools;
}
const convertedTools = await Promise.all(
tools.map(async (tool26) => {
if (isLangChainTool(tool26)) {
console.log(`\u{1F517} Auto-converting LangChain tool: ${tool26.name}`);
return await convertLangchainTool(tool26);
}
return tool26;
})
);
return convertedTools;
}
function hasLangChainTools(tools) {
if (!Array.isArray(tools)) {
return false;
}
return tools.some((tool26) => isLangChainTool(tool26));
}
function convertLangchainToolSync(lcTool, opts) {
if (!isLangChainTool(lcTool)) {
throw new Error("Provided tool is not a valid LangChain Tool or StructuredTool");
}
let schema;
if (opts?.paramsSchema) {
schema = opts.paramsSchema;
} else if (lcTool.schema) {
schema = convertLangChainSchema(lcTool.schema);
} else {
schema = z2.object({
input: z2.string().describe("Input text for the tool")
});
}
const aiTool = tool({
description: lcTool.description,
parameters: schema,
execute: async (args) => {
try {
let result;
if (typeof lcTool.invoke === "function") {
result = await lcTool.invoke(args);
} else if (typeof lcTool.call === "function") {
const input = typeof args === "object" && args.input !== void 0 ? args.input : Object.keys(args).length === 1 ? Object.values(args)[0] : JSON.stringify(args);
result = await lcTool.call(input);
} else {
throw new Error("LangChain tool has no callable invoke or call method");
}
if (typeof result === "object") {
return JSON.stringify(result);
}
return String(result);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
console.error(`LangChain tool "${lcTool.name}" execution failed:`, errorMessage);
throw new Error(`Tool execution failed: ${errorMessage}`);
}
}
});
const toolDetails21 = {
toolId: opts?.toolId || `langchain_${lcTool.name.toLowerCase().replace(/[^a-z0-9]/g, "_")}`,
name: lcTool.name,
useCases: opts?.useCases || [
`LangChain ${lcTool.name} integration`,
lcTool.description || "Imported from LangChain"
],
logo: opts?.logo || "\u{1F517}"
// LangChain emoji
};
return {
...aiTool,
...toolDetails21
};
}
function toOpenAgenticTool(tool26, details) {
return {
...tool26,
...details
};
}
function getModel(model) {
const bedrockCredentials = ProviderManager.getBedrockCredentials();
if (bedrockCredentials.accessKeyId && bedrockCredentials.secretAccessKey) {
if (model.includes("sonnet")) {
return "us.anthropic.claude-sonnet-4-20250514-v1:0";
} else if (model.includes("opus")) {
return "us.anthropic.claude-4-opus-20250514-v1:0";
} else {
return model;
}
} else {
return model;
}
}
function getAnthropicModelInstance(model) {
const bedrockCredentials = ProviderManager.getBedrockCredentials();
let modelInstance;
let provider;
if (bedrockCredentials.accessKeyId && bedrockCredentials.secretAccessKey) {
console.log("Using Bedrock");
provider = createAmazonBedrock({
region: bedrockCredentials.region,
accessKeyId: bedrockCredentials.accessKeyId,
secretAccessKey: bedrockCredentials.secretAccessKey
});
if (model.includes("sonnet")) {
modelInstance = provider("us.anthropic.claude-sonnet-4-20250514-v1:0");
console.log("Model: Claude Sonnet 4");
} else if (model.includes("opus")) {
modelInstance = provider("us.anthropic.claude-4-opus-20250514-v1:0");
console.log("Model: Claude Opus 4");
} else {
throw new Error(`Model "${model}" not supported`);
}
} else {
console.log("Using Anthropic");
const apiKey = process.env.ANTHROPIC_API_KEY;
if (!apiKey) {
throw new Error("ANTHROPIC_API_KEY environment variable is required");
}
provider = createAnthropic({
apiKey
});
modelInstance = provider(model);
console.log("Model:", model);
}
return { provider, modelInstance };
}
var init_utils = __esm({
"src/tools/utils.ts"() {
"use strict";
init_types();
init_manager();
}
});
// src/providers/manager.ts
var providerConfigs, ProviderManager;
var init_manager = __esm({
"src/providers/manager.ts"() {
"use strict";
init_utils();
providerConfigs = {
openai: {
baseURL: "https://api.openai.com/v1",
models: {
"gpt-4": {
contextWindow: 8192,
cost: { input: 0.03, output: 0.06 },
description: "Most capable GPT-4 model"
},
"gpt-4-turbo": {
contextWindow: 128e3,
cost: { input: 0.01, output: 0.03 },
description: "GPT-4 Turbo with larger context window"
},
"gpt-4o": {
contextWindow: 128e3,
cost: { input: 5e-3, output: 0.015 },
description: "GPT-4 Omni - fastest and most cost-effective"
},
"gpt-4o-mini": {
contextWindow: 128e3,
cost: { input: 15e-5, output: 6e-4 },
description: "Smaller, faster GPT-4o variant"
},
"o3": {
contextWindow: 2e5,
cost: { input: 0.06, output: 0.24 },
description: "Latest reasoning model"
},
"o3-mini": {
contextWindow: 2e5,
cost: { input: 0.015, output: 0.06 },
description: "Smaller o3 variant with faster inference"
}
}
},
anthropic: {
baseURL: "https://api.anthropic.com",
models: {
"claude-opus-4-20250514": {
contextWindow: 2e5,
cost: { input: 0.015, output: 0.075 },
description: "Most capable Claude 4 model"
},
"claude-sonnet-4-20250514": {
contextWindow: 2e5,
cost: { input: 3e-3, output: 0.015 },
description: "Balanced Claude 4 model for most use cases"
},
"claude-3-7-sonnet-latest": {
contextWindow: 2e5,
cost: { input: 3e-3, output: 0.015 },
description: "Latest Claude 3.7 Sonnet model"
},
"claude-3-5-sonnet-latest": {
contextWindow: 2e5,
cost: { input: 3e-3, output: 0.015 },
description: "Latest Claude 3.5 Sonnet model"
}
}
},
google: {
baseURL: "https://generativelanguage.googleapis.com/v1beta",
models: {
"gemini-2.5-pro": {
contextWindow: 2e6,
cost: { input: 1e-3, output: 2e-3 },
description: "Latest Gemini 2.5 Pro preview model"
},
"gemini-2.5-flash": {
contextWindow: 1e6,
cost: { input: 5e-4, output: 1e-3 },
description: "Fast Gemini 2.5 Flash preview model"
},
"gemini-1.5-pro": {
contextWindow: 2e6,
cost: { input: 125e-5, output: 5e-3 },
description: "Gemini 1.5 Pro with large context window"
},
"gemini-1.5-flash": {
contextWindow: 1e6,
cost: { input: 75e-6, output: 3e-4 },
description: "Fast and efficient Gemini 1.5 model"
},
"gemini-2.5-flash-lite-preview-06-17": {
contextWindow: 1e6,
cost: { input: 75e-6, output: 3e-4 },
description: "Fast and efficient Gemini 2.5 Flash Lite preview model"
}
}
},
"google-vertex": {
baseURL: "https://us-central1-aiplatform.googleapis.com",
models: {
"gemini-2.5-pro": {
contextWindow: 2e6,
cost: { input: 1e-3, output: 2e-3 },
description: "Latest Gemini 2.5 Pro preview model via Vertex AI"
},
"gemini-2.5-flash": {
contextWindow: 1e6,
cost: { input: 5e-4, output: 1e-3 },
description: "Fast Gemini 2.5 Flash preview model via Vertex AI"
},
"gemini-1.5-pro": {
contextWindow: 2e6,
cost: { input: 125e-5, output: 5e-3 },
description: "Gemini 1.5 Pro via Vertex AI"
},
"gemini-1.5-flash": {
contextWindow: 1e6,
cost: { input: 75e-6, output: 3e-4 },
description: "Fast Gemini 1.5 model via Vertex AI"
}
}
},
perplexity: {
baseURL: "https://api.perplexity.ai",
models: {
"llama-3.1-sonar-small-128k-online": {
contextWindow: 127072,
cost: { input: 2e-4, output: 2e-4 },
description: "Small Llama 3.1 Sonar with online search"
},
"llama-3.1-sonar-large-128k-online": {
contextWindow: 127072,
cost: { input: 1e-3, output: 1e-3 },
description: "Large Llama 3.1 Sonar with online search"
},
"llama-3.1-sonar-huge-128k-online": {
contextWindow: 127072,
cost: { input: 5e-3, output: 5e-3 },
description: "Huge Llama 3.1 Sonar with online search"
}
}
},
xai: {
baseURL: "https://api.x.ai/v1",
models: {
"grok-beta": {
contextWindow: 131072,
cost: { input: 5e-3, output: 0.015 },
description: "Grok conversational AI model"
}
}
}
};
ProviderManager = class {
static userApiKeys;
/**
* Set user-provided API keys that take precedence over environment variables
*/
static setUserApiKeys(apiKeys) {
this.userApiKeys = apiKeys;
if (apiKeys && Object.keys(apiKeys).length > 0) {
console.log("\u{1F511} User API keys configured for providers:", Object.keys(apiKeys).length);
}
}
/**
* Get AWS credentials from user API keys or environment variables
*/
static getAwsCredentials() {
const userKeys = this.userApiKeys;
return {
accessKeyId: userKeys?.awsAccessKeyId || process.env.MY_AWS_ACCESS_KEY_ID || process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: userKeys?.awsSecretAccessKey || process.env.MY_AWS_SECRET_ACCESS_KEY || process.env.AWS_SECRET_ACCESS_KEY,
region: userKeys?.awsRegion || process.env.MY_AWS_REGION || process.env.AWS_REGION,
bucketName: userKeys?.awsS3Bucket || process.env.MY_S3_BUCKET_NAME || process.env.S3_BUCKET_NAME
};
}
/**
* Get AWS Bedrock credentials from user API keys or environment variables
*/
static getBedrockCredentials() {
const userKeys = this.userApiKeys;
return {
accessKeyId: userKeys?.bedrockAccessKeyId || process.env.BEDROCK_ACCESS_KEY_ID,
secretAccessKey: userKeys?.bedrockSecretAccessKey || process.env.BEDROCK_SECRET_ACCESS_KEY,
region: userKeys?.bedrockRegion || process.env.BEDROCK_REGION
};
}
/**
* Create a model configuration from a string or AIModel object
* Automatically detects provider from model name if string is provided
*/
static createModel(input) {
if (typeof input === "string") {
return this.autoDetectProvider(input);
}
return this.validateAndNormalizeModel(input);
}
/**
* Create an AI SDK provider instance for the given model
*/
static async createProvider(model) {
const apiKey = model.apiKey ?? this.getDefaultApiKey(model.provider);
switch (model.provider) {
case "openai": {
const { createOpenAI: createOpenAI4 } = await import("@ai-sdk/openai");
const config = {};
if (apiKey !== void 0) config.apiKey = apiKey;
if (model.baseURL !== void 0) config.baseURL = model.baseURL;
return createOpenAI4(config);
}
case "anthropic": {
const { provider } = getAnthropicModelInstance(model.model);
return provider;
}
case "google": {
const { createGoogleGenerativeAI: createGoogleGenerativeAI3 } = await import("@ai-sdk/google");
const config = {};
if (apiKey !== void 0) config.apiKey = apiKey;
return createGoogleGenerativeAI3(config);
}
case "google-vertex": {
const { createVertex } = await import("@ai-sdk/google-vertex");
const config = {};
if (model.project !== void 0) config.project = model.project;
if (model.location !== void 0) config.location = model.location;
return createVertex(config);
}
case "perplexity": {
const { createPerplexity: createPerplexity2 } = await import("@ai-sdk/perplexity");
const config = {};
if (apiKey !== void 0) config.apiKey = apiKey;
return createPerplexity2(config);
}
case "xai": {
const { createXai: createXai2 } = await import("@ai-sdk/xai");
const config = {};
if (apiKey !== void 0) config.apiKey = apiKey;
return createXai2(config);
}
case "custom": {
if (!model.baseURL) {
throw new Error("Custom provider requires baseURL");
}
const { createOpenAI: createOpenAI4 } = await import("@ai-sdk/openai");
const config = {
baseURL: model.baseURL
};
if (apiKey !== void 0) config.apiKey = apiKey;
return createOpenAI4(config);
}
default:
throw new Error(`Unsupported provider: ${model.provider}`);
}
}
/**
* Create a provider for a specific provider name (for tool context)
*/
static async createProviderByName(providerName, apiKey) {
const key = apiKey ?? this.getDefaultApiKey(providerName);
switch (providerName) {
case "openai": {
const { createOpenAI: createOpenAI4 } = await import("@ai-sdk/openai");
if (!key) throw new Error("OpenAI API key not found");
return createOpenAI4({ apiKey: key });
}
case "anthropic": {
const { createAnthropic: createAnthropic3 } = await import("@ai-sdk/anthropic");
if (!key) throw new Error("Anthropic API key not found");
return createAnthropic3({ apiKey: key });
}
case "google": {
const { createGoogleGenerativeAI: createGoogleGenerativeAI3 } = await import("@ai-sdk/google");
if (!key) throw new Error("Google API key not found");
return createGoogleGenerativeAI3({ apiKey: key });
}
case "perplexity": {
const { createPerplexity: createPerplexity2 } = await import("@ai-sdk/perplexity");
if (!key) throw new Error("Perplexity API key not found");
return createPerplexity2({ apiKey: key });
}
case "xai": {
const { createXai: createXai2 } = await import("@ai-sdk/xai");
if (!key) throw new Error("xAI API key not found");
return createXai2({ apiKey: key });
}
default:
throw new Error(`Unsupported provider: ${providerName}`);
}
}
/**
* Get all available providers and their models
*/
static getAllProviders() {
return Object.entries(providerConfigs).map(([provider, config]) => ({
provider,
models: Object.keys(config.models)
}));
}
/**
* Get supported models for a provider
*/
static getProviderModels(provider) {
const config = providerConfigs[provider];
return config ? Object.keys(config.models) : [];
}
/**
* Check if a model is supported by a provider
*/
static isModelSupported(provider, model) {
const models = this.getProviderModels(provider);
return models.includes(model);
}
/**
* Get model information (context window, cost, description)
*/
static getModelInfo(provider, model) {
const config = providerConfigs[provider];
if (!config) {
throw new Error(`Unknown provider: ${provider}`);
}
const modelInfo = config.models[model];
if (!modelInfo) {
throw new Error(`Unknown model: ${model} for provider: ${provider}`);
}
return modelInfo;
}
// Private methods
static autoDetectProvider(modelName) {
let provider;
let apiKey;
if (modelName.includes("gpt") || modelName.includes("o3")) {
provider = "openai";
apiKey = this.getDefaultApiKey("openai");
} else if (modelName.includes("claude")) {
provider = "anthropic";
apiKey = this.getDefaultApiKey("anthropic");
} else if (modelName.includes("gemini")) {
provider = "google";
apiKey = this.getDefaultApiKey("google");
} else if (modelName.includes("grok")) {
provider = "xai";
apiKey = this.getDefaultApiKey("xai");
} else if (modelName.includes("llama") && modelName.includes("sonar")) {
provider = "perplexity";
apiKey = this.getDefaultApiKey("perplexity");
} else {
provider = "openai";
apiKey = this.getDefaultApiKey("openai");
console.warn(`Unknown model "${modelName}", defaulting to OpenAI provider`);
}
if (!this.isModelSupported(provider, modelName)) {
throw new Error(`Model "${modelName}" not found in ${provider} configuration`);
}
return {
provider,
model: modelName,
apiKey,
temperature: 0.7
};
}
static validateAndNormalizeModel(model) {
if (!model.provider || !model.model) {
throw new Error("AIModel must have provider and model fields");
}
const normalizedModel = { ...model };
if (normalizedModel.apiKey === void 0) {
normalizedModel.apiKey = this.getDefaultApiKey(normalizedModel.provider);
}
if (normalizedModel.temperature === void 0) {
normalizedModel.temperature = 0.7;
}
return normalizedModel;
}
static getDefaultApiKey(provider) {
if (this.userApiKeys && provider in this.userApiKeys) {
return this.userApiKeys[provider];
}
switch (provider) {
case "openai":
return process.env.OPENAI_API_KEY;
case "anthropic":
return process.env.ANTHROPIC_API_KEY;
case "google":
return process.env.GOOGLE_API_KEY;
case "google-vertex":
return void 0;
// Vertex uses service account auth
case "perplexity":
return process.env.PERPLEXITY_API_KEY;
case "xai":
return process.env.XAI_API_KEY;
case "custom":
return void 0;
// Custom providers handle their own auth
default:
return void 0;
}
}
};
}
});
// src/orchestrators/registry.ts
function registerOrchestrator(orchestrator) {
if (!orchestrator || !orchestrator.id) {
throw new Error("Orchestrator must have a valid ID");
}
if (orchestratorRegistry.has(orchestrator.id)) {
throw new Error(`Orchestrator with ID "${orchestrator.id}" already exists`);
}
validateOrchestrator(orchestrator);
orchestratorRegistry.set(orchestrator.id, orchestrator);
console.log(`\u{1F3AD} Orchestrator registered: ${orchestrator.id} (${orchestrator.type})`);
}
function getOrchestrator(id) {
if (!id || typeof id !== "string") {
return void 0;
}
return orchestratorRegistry.get(id);
}
function listOrchestrators() {
return Array.from(orchestratorRegistry.values());
}
function getOrchestratorsByType(type) {
return Array.from(orchestratorRegistry.values()).filter((o) => o.type === type);
}
function hasOrchestrator(id) {
return orchestratorRegistry.has(id);
}
function unregisterOrchestrator(id) {
const removed = orchestratorRegistry.delete(id);
if (removed) {
console.log(`\u{1F3AD} Orchestrator unregistered: ${id}`);
}
return removed;
}
function clearOrchestratorRegistry() {
const count = orchestratorRegistry.size;
orchestratorRegistry.clear();
console.log(`\u{1F3AD} Orchestrator registry cleared: ${count} orchestrators removed`);
}
function getRegistryStats() {
const orchestrators = Array.from(orchestratorRegistry.values());
const byType = {
"prompt-based": 0,
"custom-logic": 0
};
orchestrators.forEach((o) => {
byType[o.type]++;
});
return {
total: orchestrators.length,
byType,
orchestratorIds: orchestrators.map((o) => o.id)
};
}
function validateOrchestrator(orchestrator) {
if (!orchestrator.id || typeof orchestrator.id !== "string") {
throw new Error("Orchestrator must have a valid string ID");
}
if (!orchestrator.name || typeof orchestrator.name !== "string") {
throw new Error("Orchestrator must have a valid string name");
}
if (!orchestrator.description || typeof orchestrator.description !== "string") {
throw new Error("Orchestrator must have a valid string description");
}
if (!orchestrator.type || !["prompt-based", "custom-logic"].includes(orchestrator.type)) {
throw new Error("Orchestrator must have a valid type (prompt-based or custom-logic)");
}
if (typeof orchestrator.execute !== "function") {
throw new Error("Orchestrator must implement execute method");
}
if (typeof orchestrator.getName !== "function") {
throw new Error("Orchestrator must implement getName method");
}
if (typeof orchestrator.getDescription !== "function") {
throw new Error("Orchestrator must implement getDescription method");
}
if (typeof orchestrator.getType !== "function") {
throw new Error("Orchestrator must implement getType method");
}
if (orchestrator.type === "prompt-based") {
const promptOrchestrator = orchestrator;
if (!promptOrchestrator.systemPrompt || typeof promptOrchestrator.systemPrompt !== "string") {
throw new Error("Prompt-based orchestrator must have a valid systemPrompt property");
}
if (typeof promptOrchestrator.getSystemPrompt !== "function") {
throw new Error("Prompt-based orchestrator must implement getSystemPrompt method");
}
}
if (orchestrator.type === "custom-logic") {
const customOrchestrator = orchestrator;
if (typeof customOrchestrator.customLogic !== "function") {
throw new Error("Custom-logic orchestrator must implement customLogic method");
}
}
if (orchestrator.validate && typeof orchestrator.validate !== "function") {
throw new Error("Orchestrator validate property must be a function if provided");
}
if (orchestrator.initialize && typeof orchestrator.initialize !== "function") {
throw new Error("Orchestrator initialize property must be a function if provided");
}
if (orchestrator.cleanup && typeof orchestrator.cleanup !== "function") {
throw new Error("Orchestrator cleanup property must be a function if provided");
}
}
function resolveOrchestrator(orchestratorInput) {
if (!orchestratorInput) {
return void 0;
}
if (typeof orchestratorInput === "string") {
return getOrchestrator(orchestratorInput);
}
if (typeof orchestratorInput === "object" && orchestratorInput.id) {
try {
validateOrchestrator(orchestratorInput);
return orchestratorInput;
} catch (error) {
console.warn(`\u{1F3AD} Invalid orchestrator object: ${error instanceof Error ? error.message : String(error)}`);
return void 0;
}
}
return void 0;
}
var orchestratorRegistry;
var init_registry = __esm({
"src/orchestrators/registry.ts"() {
"use strict";
orchestratorRegistry = /* @__PURE__ */ new Map();
}
});
// src/orchestrator.ts
import { generateText } from "ai";
var Orchestrator;
var init_orchestrator = __esm({
"src/orchestrator.ts"() {
"use strict";
init_manager();
init_registry();
init_utils();
Orchestrator = class {
model;
tools = {};
messages = [];
iterations = 0;
maxIterations;
customLogic;
// Orchestrator support
orchestrator;
orchestratorOptions;
// Logging configuration
loggingConfig;
executionStartTime = 0;
stepTimings = [];
toolCallTimings = [];
stepsExecuted = 0;
toolCallsExecuted = 0;
constructor(options) {
this.model = ProviderManager.createModel(options.model);
this.maxIterations = options.maxIterations || 10;
this.customLogic = options.customLogic;
this.orchestratorOptions = {
orchestrator: options.orchestrator,
orchestratorId: options.orchestratorId,
orchestratorParams: options.orchestratorParams,
allowOrchestratorPromptOverride: options.allowOrchestratorPromptOverride ?? true,
allowOrchestratorToolControl: options.allowOrchestratorToolControl ?? true
};
this.orchestrator = resolveOrchestrator(
options.orchestrator || options.orchestratorId
);
this.loggingConfig = {
enableDebugLogging: options.enableDebugLogging ?? false,
logLevel: options.logLevel ?? "basic",
enableStepLogging: options.enableStepLogging ?? false,
enableToolLogging: options.enableToolLogging ?? false,
enableTimingLogging: options.enableTimingLogging ?? false,
enableStatisticsLogging: options.enableStatisticsLogging ?? false
};
if (options.tools) {
let processedTools = options.tools;
try {
const { hasLangChainTools: hasLangChainTools2, autoConvertLangChainTools: autoConvertLangChainTools2 } = (init_utils(), __toCommonJS(utils_exports));
if (hasLangChainTools2(options.tools)) {
console.log("\u{1F517} Detected LangChain tools, converting...");
processedTools = options.tools.map((tool26) => {
if (tool26 && typeof tool26 === "object" && typeof tool26.name === "string" && typeof tool26.description === "string" && (typeof tool26.call === "function" || typeof tool26.invoke === "function")) {
const { convertLangchainToolSync: convertLangchainToolSync2 } = (init_utils(), __toCommonJS(utils_exports));
return convertLangchainToolSync2(tool26);
}
return tool26;
});
}
} catch (error) {
console.warn("\u26A0\uFE0F LangChain tool conversion failed, using original tools:", error);
processedTools = options.tools;
}
processedTools.forEach((tool26, index) => {
const toolName = tool26.toolId;
if (this.tools[toolName]) {
throw new Error(`Tool with name ${toolName} already exists`);
}
this.tools[toolName] = tool26;
});
}
if (options.systemPrompt) {
this.messages.push({
role: "system",
content: options.systemPrompt
});
}
this.log("\u{1F527}", "Orchestrator initialized", {
model: `${this.model.provider}/${this.model.model}`,
toolsCount: Object.keys(this.tools).length,
maxIterations: this.maxIterations,
loggingLevel: this.loggingConfig.logLevel,
hasCustomLogic: !!this.customLogic,
hasOrchestrator: !!this.orchestrator,
orchestratorId: this.orchestrator?.id,
orchestratorType: this.orchestrator?.type
});
}
async execute(input) {
this.executionStartTime = Date.now();
this.resetExecutionStats();
try {
const inputType = typeof input === "string" ? "string" : Array.isArray(input) ? "message_array" : "invalid";
const inputLength = typeof input === "string" ? input.length : Array.isArray(input) ? input.length : "unknown";
this.log("\u{1F680}", "Execution starting", {
inputType,
inputLength,
modelInfo: `${this.model.provider}/${this.model.model}`,
toolsAvailable: Object.keys(this.tools).length,
maxSteps: this.maxIterations,
hasCustomLogic: !!this.customLogic,
hasOrchestrator: !!this.orchestrator,
orchestratorType: this.orchestrator?.type
});
if (this.customLogic) {
return await this.executeWithCustomLogic(input);
}
if (this.orchestrator) {
return await this.executeWithOrchestrator(input);
}
let result;
if (typeof input === "string") {
result = await this.executeWithString(input);
} else if (Array.isArray(input)) {
result = await this.executeWithMessages(input);
} else {
throw new Error("Input must be either a string or an array of messages");
}
const executionStats = this.calculateExecutionStats();
result.executionStats = executionStats;
this.log("\u2705", "Execution completed successfully", {
totalDuration: executionStats.totalDuration,
stepsExecuted: executionStats.stepsExecuted,
toolCallsExecuted: executionStats.toolCallsExecuted,
averageStepDuration: executionStats.averageStepDuration,
resultLength: result.result?.length || 0
});
return result;
} catch (error) {
const executionStats = this.calculateExecutionStats();
const errorMessage = error instanceof Error ? error.message : JSON.stringify(error);
this.log("\u274C", "Execution failed", {
error: errorMessage,
totalDuration: executionStats.totalDuration,
stepsExecuted: executionStats.stepsExecuted,
toolCallsExecuted: executionStats.toolCallsExecuted,
stackTrace: error instanceof Error ? error.stack : void 0
});
const errorResult = {
success: false,
error: errorMessage,
messages: this.messages,
iterations: this.iterations,
toolCallsUsed: [],
executionStats
};
return errorResult;
}
}
// Execute with orchestrator delegation
async executeWithOrchestrator(input) {
if (!this.orchestrator) {
throw new Error("Orchestrator not available");
}
this.log("\u{1F3AD}", "Delegating to orchestrator", {
orchestratorId: this.orchestrator.id,
orchestratorType: this.orchestrator.type,
orchestratorName: this.orchestrator.name
});
try {
if (this.orchestrator.type === "prompt-based") {
return await this.executeWithPromptBasedOrchestrator(input, this.orchestrator);
}
if (this.orchestrator.type === "custom-logic") {
return await this.executeWithCustomLogicOrchestrator(input, this.orchestrator);
}
throw new Error(`Unknown orchestrator type: ${this.orchestrator.type}`);
} catch (error) {
this.log("\u274C", "Orchestrator execution failed", {
orchestratorId: this.orchestrator.id,
error: error instanceof Error ? error.message : String(error),
stackTrace: error instanceof Error ? error.stack : void 0
});
throw error;
}
}
// Execute with custom logic orchestrator
async executeWithCustomLogicOrchestrator(input, orchestrator) {
this.log("\u{1F3AD}", "Executing with custom logic orchestrator", {
orchestratorId: orchestrator.id,
orchestratorName: orchestrator.name
});
try {
const context = {
model: this.model,
tools: Object.values(this.tools),
messages: this.messages,
iterations: this.iterations,
maxIterations: this.maxIterations,
loggingConfig: this.loggingConfig,
orchestratorParams: this.orchestratorOptions.orchestratorParams
};
if (orchestrator.initialize) {
await orchestrator.initialize(context);
}
if (orchestrator.validate) {
const isValid = await orchestrator.validate(input, context);
if (!isValid) {
throw new Error("Orchestrator validation failed");
}
}
const result = await orchestrator.execute(input, context);
this.iterations = result.iterations || 0;
this.stepsExecuted = result.iterations || 0;
if (orchestrator.cleanup) {
await orchestrator.cleanup(context);
}
this.log("\u2705", "Custom logic orchestrator execution completed", {
orchestratorId: orchestrator.id,
success: result.success,
iterations: result.iterations,
toolCallsUsed: result.toolCallsUsed?.length || 0
});
return result;
} catch (error) {
this.log("\u274C", "Custom logic orchestrator execution failed", {
orchestratorId: orchestrator.id,
error: error instanceof Error ? error.message : String(error),
stackTrace: error instanceof Error ? error.stack : void 0
});
throw error;
}
}
// Execute with prompt-based orchestrator (optimized path)
async executeWithPromptBasedOrchestrator(input, orchestrator) {
this.log("\u{1F3AD}", "Executing with prompt-based orchestrator", {
orchestratorId: orchestrator.id,
orchestratorName: orchestrator.name,
allowPromptOverride: this.orchestratorOptions.allowOrchestratorPromptOverride,
allowToolControl: this.orchestratorOptions.allowOrchestratorToolControl
});
const context = {
model: this.model,
tools: Object.values(this.tools),
messages: this.messages,
iterations: this.iterations,
maxIterations: this.maxIterations,
loggingConfig: this.loggingConfig,
orchestratorParams: this.orchestratorOptions.orchestratorParams
};
return await orchestrator.execute(input, context);
}
// Execute with string input (original behavior)
async executeWithString(input) {
const provider = await ProviderManager.createProvider(this.model);
const generateConfig = {
model: provider(getModel(this.model.model)),
prompt: input,
maxSteps: this.maxIterations,
onStepFinish: this.createStepFinishCallback()
};
const systemMessage = this.messages.find((m) => m.role === "system");
if (systemMessage) {
generateConfig.system = systemMessage.content;
}
if (Object.keys(this.tools).length > 0) {
generateConfig.tools = this.convertToAISDKTools();
}
if (this.model.temperature !== void 0) {
generateConfig.temperature = this.model.temperature;
}
if (this.model.maxTokens !== void 0) {
generateConfig.maxTokens = this.model.maxTokens;
}
if (this.model.topP !== void 0) {
generateConfig.topP = this.model.topP;
}
this.log("\u{1F4DD}", "Starting text generation", {
prompt: this.sanitizeForLogging(input),
systemMessage: systemMessage ? "present" : "none",
toolsEnabled: Object.keys(this.tools).length > 0
});
const result = await generateText(generateConfig);
this.iterations = result.steps?.length || 1;
this.stepsExecuted = this.iterations;
this.messages = [
...this.messages.filter((m) => m.role === "system"),
{ role: "user", content: input },
{ role: "assistant", content: result.text || "" }
];
const toolCallsUsed = [];
if (result.steps) {
result.steps.forEach((step) => {
if (step.toolCalls) {
step.toolCalls.forEach((toolCall) => {
toolCallsUsed.push(toolCall.toolName || toolCall.toolCallId || "unknown");
});
}
});
}
this.log("\u{1F4CA}", "Text generation completed", {
resultLength: result.text?.length || 0,
stepsExecuted: this.iterations,
toolCallsUsed: toolCallsUsed.length,
tokensUsed: result.usage?.totalTokens,
finishReason: result.finishReason,
usage: result.usage
});
const executionResult = {
success: true,
result: result.text,
messages: this.messages,
iterations: this.iterations,
usage: result.usage,
toolCallsUsed
};
return executionResult;
}
// Execute with message array (new behavior)
async executeWithMessages(inputMessages) {
if (inputMessages.length === 0) {
const errorResult = {
success: false,
error: "Empty message array provided. At least one message is required.",
messages: this.messages,
iterations: this.iterations,
toolCallsUsed: []
};
return errorResult;
}
const provider = await ProviderManager.createProvider(this.model);
const convertedMessages = this.convertCoresToAISDK(inputMessages);
this.log("\u{1F4DD}", "Processing message array", {
messageCount: inputMessages.length,
messageTypes: inputMessages.map((m) => m.role),
hasSystemMessage: inputMessages.some((m) => m.role === "system"),
lastMessageRole: inputMessages[inputMessages.length - 1]?.role
});
const generateConfig = {
model: provider(getModel(this.model.model)),
messages: convertedMessages,
maxSteps: this.maxIterations,
onStepFinish: this.createStepFinishCallback()
};
const systemMessage = inputMessages.find((m) => m.role === "system");
if (systemMessage) {
generateConfig.system = typeof systemMessage.content === "string" ? systemMessage.content : JSON.stringify(systemMessage.content);
} else {
const existingSystemMessage = this.messages.find((m) => m.role === "system");
if (existingSystemMessage) {
generateConfig.system = existingSystemMessage.content;
}
}
if (Object.keys(this.tools).length > 0) {
generateConfig.tools = this.convertToAISDKTools();
}
if (this.model.temperature !== void 0) {
generateConfig.temperature = this.model.temperature;
}
if (this.model.maxTokens !== void 0) {
generateConfig.maxTokens = this.model.maxTokens;
}
if (this.model.topP !== void 0) {
generateConfig.topP = this.model.topP;
}
const result = await generateText(generateConfig);
this.iterations = result.steps?.length || 1;
this.stepsExecuted = this.iterations;
this.messages = [
...this.messages.filter((m) => m.role === "system"),
...this.convertCoreToInternal(inputMessages.filter((m) => m.role !== "system")),
{ role: "assistant", content: result.text || "" }
];
const toolCallsUsed = [];
if (result.steps) {
result.steps.forEach((step) => {
if (step.toolCalls) {
step.toolCalls.forEach((toolCall) => {
toolCallsUsed.push(toolCall.toolName || toolCall.toolCallId || "unknown");
});
}
});
}
this.log("\u{1F4CA}", "Message array processing completed", {
resultLength: result.text?.length || 0,
stepsExecuted: this.iterations,
toolCallsUsed: toolCallsUsed.length,
messagesInHistory: this.messages.length,
tokensUsed: result.usage?.totalTokens,
finishReason: result.finishReason,
usage: result.usage
});
const executionResult = {
success: true,
result: result.text,
messages: this.messages,
iterations: this.iterations,
usage: result.usage,
toolCallsUsed
};
return executionResult;
}
// Tool management methods
addTool(tool26) {
const toolName = tool26.toolId;
if (this.tools[toolName]) {
throw new Error(`Tool with name ${toolName} already exists`);
}
this.tools[toolName] = tool26;
this.log("\u{1F527}", "Tool added", { toolId: tool26.toolId, toolName: tool26.name });
}
removeTool(toolName) {
if (this.tools[toolName]) {
delete this.tools[toolName];
this.log("\u{1F5D1}\uFE0F", "Tool removed", { toolId: toolName });
}
}
getTool(toolName) {
return this.tools[toolName];
}
getAllTools() {
return Object.values(this.tools);
}
// Model switching using ProviderManager
switchModel(model) {
const oldModel = `${this.model.provider}/${this.model.model}`;
this.model = ProviderManager.createModel(model);
const newModel = `${this.model.provider}/${this.model.model}`;
this.log("\u{1F504}", "Model switched", {
from: oldModel,
to: newModel,
temperature: this.model.temperature,
maxTokens: this.model.maxTokens
});
}
// Get model information
getModelInfo() {
try {
const modelInfo = ProviderManager.getModelInfo(this.model.provider, this.model.model);
return {
provider: this.model.provider,
model: this.model.model,
contextWindow: modelInfo?.contextWindow,
cost: modelInfo?.cost,
description: modelInfo?.description
};
} catch (error) {
return {
provider: this.model.provider,
model: this.model.model,
error: "Model info not available"
};
}
}
// Orchestrator management methods
getOrchestrator() {
return this.orchestrator;
}
setOrchestrator(orchestrator) {
const resolvedOrchestrator = resolveOrchestrator(orchestrator);
if (orchestrator && !resolvedOrchestrator) {
throw new Error(`Failed to resolve orchestrator: ${typeof orchestrator === "string" ? orchestrator : "invalid orchestrator object"}`);
}
const oldId = this.orchestrator?.id;
this.orchestrator = resolvedOrchestrator;
this.log("\u{1F3AD}", "Orchestrator changed", {
from: oldId || "none",
to: this.orchestrator?.id || "none",
type: this.orchestrator?.type
});
}
hasOrchestrator() {
return !!this.orchestrator;
}
// Utility methods
getMessages() {
return [...this.messages];
}
addMessage(message) {
this.messages.push(message);
this.log("\u{1F4AC}",