metis-code
Version:
Metis Code - multi-model coding CLI agent
1,334 lines (1,317 loc) • 931 kB
JavaScript
#!/usr/bin/env node
"use strict";
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __esm = (fn, res) => function __init() {
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
};
var __commonJS = (cb, mod) => function __require() {
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
};
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
// If the importer is in node compatibility mode or this is not an ESM
// file that has been converted to a CommonJS file using a Babel-
// compatible transform (i.e. "__esModule" has not been set), then set
// "default" to the CommonJS "module.exports" for node compatibility.
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
mod
));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// src/tools/files.ts
function ensureDirFor(filePath) {
const dir = import_path.default.dirname(filePath);
if (!import_fs.default.existsSync(dir)) import_fs.default.mkdirSync(dir, { recursive: true });
}
function writeText(filePath, content) {
ensureDirFor(filePath);
import_fs.default.writeFileSync(filePath, content);
}
function listFiles(root, ignore = []) {
const out = [];
const relRoot = import_path.default.resolve(root);
const ig = /* @__PURE__ */ new Set(["node_modules", ".git", "dist", ".metis", ...ignore.map((g) => g.replace("/**", ""))]);
function walk(dir) {
const entries = import_fs.default.readdirSync(dir, { withFileTypes: true });
for (const e of entries) {
const abs = import_path.default.join(dir, e.name);
const rel = import_path.default.relative(relRoot, abs);
if (e.isDirectory()) {
if (ig.has(e.name)) continue;
walk(abs);
} else if (e.isFile()) {
out.push(rel);
}
}
}
walk(relRoot);
return out;
}
function withinCwdSafe(targetPath, cwd = process.cwd()) {
const abs = import_path.default.resolve(cwd, targetPath);
const root = import_path.default.resolve(cwd);
return abs.startsWith(root + import_path.default.sep) || abs === root;
}
var import_fs, import_path;
var init_files = __esm({
"src/tools/files.ts"() {
"use strict";
import_fs = __toESM(require("fs"));
import_path = __toESM(require("path"));
}
});
// src/config/index.ts
function getGlobalConfigDir() {
const homeDir = import_os.default.homedir();
return import_path2.default.join(homeDir, ".metis");
}
function getGlobalSecretsPath() {
return import_path2.default.join(getGlobalConfigDir(), "secrets.json");
}
function getGlobalConfigPath() {
return import_path2.default.join(getGlobalConfigDir(), "config.json");
}
function loadConfig() {
const globalConfigPath = getGlobalConfigPath();
let base = {
provider: process.env.METIS_PROVIDER || "",
model: process.env.METIS_MODEL || "",
temperature: process.env.METIS_TEMPERATURE ? Number(process.env.METIS_TEMPERATURE) : 0.2,
safety: { dryRun: false, requireExecApproval: true },
ignore: ["node_modules/**", ".git/**", "dist/**", ".metis/sessions/**"]
};
if (import_fs2.default.existsSync(globalConfigPath)) {
try {
const disk = JSON.parse(import_fs2.default.readFileSync(globalConfigPath, "utf8"));
base = { ...base, ...disk };
} catch (e) {
console.warn("Failed to parse global config; using defaults.");
}
}
return base;
}
function saveGlobalConfig(config) {
const globalConfigPath = getGlobalConfigPath();
const globalDir = getGlobalConfigDir();
if (!import_fs2.default.existsSync(globalDir)) {
import_fs2.default.mkdirSync(globalDir, { recursive: true });
}
import_fs2.default.writeFileSync(globalConfigPath, JSON.stringify(config, null, 2));
}
function getGlobalConfigLocation() {
return getGlobalConfigPath();
}
function loadSecrets(cwd = process.cwd()) {
const out = {};
if (process.env.OPENAI_API_KEY) out.openai = process.env.OPENAI_API_KEY;
if (process.env.ANTHROPIC_API_KEY) out.anthropic = process.env.ANTHROPIC_API_KEY;
if (process.env.GROQ_API_KEY) out.groq = process.env.GROQ_API_KEY;
const globalSecretsPath = getGlobalSecretsPath();
if (import_fs2.default.existsSync(globalSecretsPath)) {
try {
const globalSecrets = JSON.parse(import_fs2.default.readFileSync(globalSecretsPath, "utf8"));
for (const [key, value] of Object.entries(globalSecrets)) {
if (!(key in out)) {
out[key] = value;
}
}
} catch {
}
}
const localMetisDir = import_path2.default.join(cwd, ".metis");
const localSecretsPath = import_path2.default.join(localMetisDir, "secrets.json");
if (import_fs2.default.existsSync(localSecretsPath)) {
try {
const localSecrets = JSON.parse(import_fs2.default.readFileSync(localSecretsPath, "utf8"));
for (const [key, value] of Object.entries(localSecrets)) {
if (!(key in out)) {
out[key] = value;
}
}
} catch {
}
}
return out;
}
function saveGlobalSecrets(secrets) {
const globalConfigDir = getGlobalConfigDir();
const globalSecretsPath = getGlobalSecretsPath();
if (!import_fs2.default.existsSync(globalConfigDir)) {
import_fs2.default.mkdirSync(globalConfigDir, { recursive: true });
}
let existingSecrets = {};
if (import_fs2.default.existsSync(globalSecretsPath)) {
try {
existingSecrets = JSON.parse(import_fs2.default.readFileSync(globalSecretsPath, "utf8"));
} catch {
}
}
const mergedSecrets = { ...existingSecrets, ...secrets };
import_fs2.default.writeFileSync(globalSecretsPath, JSON.stringify(mergedSecrets, null, 2) + "\n");
try {
import_fs2.default.chmodSync(globalSecretsPath, 384);
} catch {
}
}
function getGlobalSecretsLocation() {
return getGlobalSecretsPath();
}
var import_fs2, import_path2, import_os;
var init_config = __esm({
"src/config/index.ts"() {
"use strict";
import_fs2 = __toESM(require("fs"));
import_path2 = __toESM(require("path"));
import_os = __toESM(require("os"));
}
});
// src/tools/repo.ts
function scanRepo(cwd = process.cwd()) {
const cfg = loadConfig(cwd);
const files = listFiles(cwd, cfg.ignore);
const byExt = {};
for (const f of files) {
const ext = import_path3.default.extname(f) || "(noext)";
byExt[ext] = (byExt[ext] || 0) + 1;
}
let scripts;
const pkg2 = import_path3.default.join(cwd, "package.json");
if (import_fs3.default.existsSync(pkg2)) {
try {
const data = JSON.parse(import_fs3.default.readFileSync(pkg2, "utf8"));
if (data?.scripts) scripts = data.scripts;
} catch {
}
}
return { root: cwd, files, counts: { total: files.length, byExt }, scripts };
}
function summarizeRepo(maxFiles = 60, cwd = process.cwd()) {
const r = scanRepo(cwd);
const top = r.files.slice(0, maxFiles);
const extCounts = Object.entries(r.counts.byExt).sort((a, b) => b[1] - a[1]).slice(0, 10).map(([ext, n]) => `${ext}:${n}`).join(", ");
const scripts = r.scripts ? Object.keys(r.scripts).slice(0, 10).map((k) => `${k}`).join(", ") : "none";
return [
`Files: ${r.counts.total} total; top extensions: ${extCounts}`,
`package.json scripts: ${scripts}`,
`Sample files (${top.length}):`,
...top.map((f) => `- ${f}`)
].join("\n");
}
var import_fs3, import_path3;
var init_repo = __esm({
"src/tools/repo.ts"() {
"use strict";
import_fs3 = __toESM(require("fs"));
import_path3 = __toESM(require("path"));
init_files();
init_config();
}
});
// src/errors/MetisError.ts
var MetisError;
var init_MetisError = __esm({
"src/errors/MetisError.ts"() {
"use strict";
MetisError = class _MetisError extends Error {
constructor(message, code, category, recoverable = true, suggestions = []) {
super(message);
this.name = "MetisError";
this.code = code;
this.category = category;
this.recoverable = recoverable;
this.suggestions = suggestions;
}
static configMissing(configPath) {
return new _MetisError(
`Configuration file not found: ${configPath}`,
"CONFIG_MISSING",
"config",
true,
[
'Run "metiscode init" to create initial configuration',
"Create metis.config.json manually in your project root"
]
);
}
static apiKeyMissing(provider) {
return new _MetisError(
`API key missing for provider: ${provider}`,
"API_KEY_MISSING",
"config",
true,
[
`Run "metiscode auth set --provider ${provider} --key YOUR_API_KEY"`,
`Set ${provider.toUpperCase()}_API_KEY environment variable`,
"Check that .metis/secrets.json exists and contains your API key"
]
);
}
static toolExecutionFailed(toolName, reason) {
return new _MetisError(
`Tool '${toolName}' execution failed: ${reason}`,
"TOOL_EXECUTION_FAILED",
"tool",
true,
[
"Try running the tool individually to debug the issue",
"Check that required files and directories exist",
"Verify tool parameters are correct"
]
);
}
static providerRequestFailed(provider, httpCode) {
const codeMsg = httpCode ? ` (HTTP ${httpCode})` : "";
return new _MetisError(
`${provider} API request failed${codeMsg}`,
"PROVIDER_REQUEST_FAILED",
"provider",
true,
[
"Check your internet connection",
"Verify your API key is valid and has sufficient credits",
"Try again in a few moments - the service may be temporarily unavailable",
httpCode === 429 ? "You may have hit rate limits - wait before retrying" : ""
].filter(Boolean)
);
}
static fileNotFound(path44) {
return new _MetisError(
`File not found: ${path44}`,
"FILE_NOT_FOUND",
"user",
true,
[
"Check that the file path is correct",
"Ensure the file exists in your project",
"Use relative paths from your project root"
]
);
}
static taskTooComplex() {
return new _MetisError(
"Task did not complete within maximum iterations",
"TASK_TOO_COMPLEX",
"agent",
true,
[
"Break your task into smaller, more specific steps",
"Try being more explicit about what files to modify",
"Run individual operations separately"
]
);
}
static unsupportedProvider(provider) {
return new _MetisError(
`Unsupported provider: ${provider}`,
"UNSUPPORTED_PROVIDER",
"config",
false,
[
"Supported providers: openai, anthropic, groq",
"Check your metis.config.json file",
"Update to a supported provider"
]
);
}
static toolNotSupported(provider, details) {
return new _MetisError(
`Tool functionality not supported by ${provider}${details ? ": " + details : ""}`,
"TOOL_NOT_SUPPORTED",
"provider",
true,
[
`Switch to a different ${provider} model that supports function calling`,
provider === "groq" ? "Try llama-3.1-70b-versatile or mixtral-8x7b-32768" : "",
"Use text-only mode if function calling is not needed",
"Check provider documentation for supported models"
].filter(Boolean)
);
}
static requestTooLarge(provider) {
return new _MetisError(
`Request too large for ${provider} provider`,
"REQUEST_TOO_LARGE",
"provider",
true,
[
"Reduce the length of your message or context",
"Break your task into smaller parts",
"Use a model with larger context window",
"Remove unnecessary details from your request"
]
);
}
toUserFriendlyString() {
let msg = `Error: ${this.message}`;
if (this.suggestions.length > 0 && this.suggestions[0]) {
msg += `
Try: ${this.suggestions[0]}`;
}
return msg;
}
};
}
});
// src/providers/openai.ts
var import_openai, OpenAIProvider;
var init_openai = __esm({
"src/providers/openai.ts"() {
"use strict";
import_openai = __toESM(require("openai"));
init_MetisError();
OpenAIProvider = class {
constructor(init) {
this.name = "openai";
if (!init.apiKey) {
console.error("\u274C OpenAI API key not configured");
console.error("To fix this, run: metiscode config set apikey your-api-key");
this.client = {};
this.model = init.model || "gpt-4o";
this.temperature = init.temperature || 0.2;
return;
}
try {
this.client = new import_openai.default({ apiKey: init.apiKey });
this.model = init.model;
this.temperature = init.temperature;
} catch (error) {
throw MetisError.providerRequestFailed("openai");
}
}
async send(messages, opts) {
const temperature = opts?.temperature ?? this.temperature;
try {
const requestConfig = {
model: this.model,
temperature,
messages: messages.map((m) => ({
role: m.role,
content: m.content,
tool_calls: m.tool_calls,
tool_call_id: m.tool_call_id,
name: m.name
}))
};
const res = await this.client.chat.completions.create(requestConfig);
const choice = res.choices?.[0]?.message?.content ?? "";
return typeof choice === "string" ? choice : JSON.stringify(choice);
} catch (error) {
if (error.status) {
throw MetisError.providerRequestFailed("openai", error.status);
}
throw MetisError.providerRequestFailed("openai");
}
}
async sendWithTools(messages, tools, opts) {
const temperature = opts?.temperature ?? this.temperature;
try {
const requestConfig = {
model: this.model,
temperature,
max_tokens: opts?.max_tokens,
messages: messages.map((m) => ({
role: m.role,
content: m.content,
tool_calls: m.tool_calls,
tool_call_id: m.tool_call_id,
name: m.name
})),
tools: tools.map((tool) => ({
type: "function",
function: {
name: tool.name,
description: tool.description,
parameters: tool.parameters
}
})),
tool_choice: "auto"
};
const res = await this.client.chat.completions.create(requestConfig);
const message = res.choices?.[0]?.message;
if (!message) {
throw MetisError.providerRequestFailed("openai");
}
if (message.tool_calls && message.tool_calls.length > 0) {
return {
type: "tool_call",
content: message.content || "",
tool_calls: message.tool_calls.map((tc) => ({
id: tc.id,
type: "function",
function: {
name: tc.function.name,
arguments: tc.function.arguments
}
})),
usage: res.usage ? {
prompt_tokens: res.usage.prompt_tokens,
completion_tokens: res.usage.completion_tokens,
total_tokens: res.usage.total_tokens
} : void 0
};
}
return {
type: "text",
content: message.content || "",
usage: res.usage ? {
prompt_tokens: res.usage.prompt_tokens,
completion_tokens: res.usage.completion_tokens,
total_tokens: res.usage.total_tokens
} : void 0
};
} catch (error) {
if (error.status) {
throw MetisError.providerRequestFailed("openai", error.status);
}
throw MetisError.providerRequestFailed("openai");
}
}
supportsTools() {
return !this.model.includes("gpt-3.5-turbo-instruct");
}
};
}
});
// src/providers/anthropic.ts
var import_sdk, AnthropicProvider;
var init_anthropic = __esm({
"src/providers/anthropic.ts"() {
"use strict";
import_sdk = __toESM(require("@anthropic-ai/sdk"));
AnthropicProvider = class {
constructor(init) {
this.name = "anthropic";
if (!init.apiKey) throw new Error("ANTHROPIC_API_KEY missing");
this.client = new import_sdk.default({ apiKey: init.apiKey });
this.model = init.model;
this.temperature = init.temperature;
}
async send(messages, opts) {
const temperature = opts?.temperature ?? this.temperature;
const sys = messages.find((m) => m.role === "system")?.content;
const userAssistantPairs = messages.filter((m) => m.role !== "system");
const content = userAssistantPairs.map((m) => ({
role: m.role === "assistant" ? "assistant" : "user",
content: m.content
}));
const res = await this.client.messages.create({
model: this.model,
temperature,
system: sys,
max_tokens: 1024,
messages: content
});
const text = res.content.map((c) => c.type === "text" ? c.text : "").join("").trim();
return text;
}
async sendWithTools(messages, tools, opts) {
const temperature = opts?.temperature ?? this.temperature;
const sys = messages.find((m) => m.role === "system")?.content;
const userAssistantPairs = messages.filter((m) => m.role !== "system");
const content = userAssistantPairs.map((m) => {
if (m.role === "tool") {
return {
role: "user",
content: `Tool ${m.name || "unknown"} result: ${m.content}`
};
}
return {
role: m.role === "assistant" ? "assistant" : "user",
content: m.content
};
});
const anthropicTools = tools.map((tool) => ({
name: tool.name,
description: tool.description,
input_schema: tool.parameters
}));
const res = await this.client.messages.create({
model: this.model,
temperature,
system: sys,
max_tokens: opts?.max_tokens || 1024,
messages: content,
tools: anthropicTools
});
const toolUseBlocks = res.content.filter((c) => c.type === "tool_use");
const textBlocks = res.content.filter((c) => c.type === "text");
if (toolUseBlocks.length > 0) {
const toolCalls = toolUseBlocks.map((block) => ({
id: block.id,
type: "function",
function: {
name: block.name,
arguments: JSON.stringify(block.input)
}
}));
return {
type: "tool_call",
content: textBlocks.map((c) => c.text).join("").trim(),
tool_calls: toolCalls,
usage: res.usage ? {
prompt_tokens: res.usage.input_tokens,
completion_tokens: res.usage.output_tokens,
total_tokens: res.usage.input_tokens + res.usage.output_tokens
} : void 0
};
}
return {
type: "text",
content: textBlocks.map((c) => c.text).join("").trim(),
usage: res.usage ? {
prompt_tokens: res.usage.input_tokens,
completion_tokens: res.usage.output_tokens,
total_tokens: res.usage.input_tokens + res.usage.output_tokens
} : void 0
};
}
supportsTools() {
return this.model.includes("claude-3") || this.model.includes("claude-sonnet") || this.model.includes("claude-haiku");
}
};
}
});
// src/providers/rateLimiter.ts
var import_kleur, GROQ_RATE_LIMITS, GroqRateLimiter;
var init_rateLimiter = __esm({
"src/providers/rateLimiter.ts"() {
"use strict";
import_kleur = __toESM(require("kleur"));
GROQ_RATE_LIMITS = {
"openai/gpt-oss-20b": {
tokensPerMinute: 25e4,
requestsPerMinute: 30,
burstAllowance: 0.8
// Use 80% of limit to be safe
},
"llama-3.1-70b-versatile": {
tokensPerMinute: 6e3,
requestsPerMinute: 30,
burstAllowance: 0.8
},
"llama-3.1-8b-instant": {
tokensPerMinute: 3e4,
requestsPerMinute: 30,
burstAllowance: 0.8
},
"mixtral-8x7b-32768": {
tokensPerMinute: 5e3,
requestsPerMinute: 30,
burstAllowance: 0.8
},
"gemma-7b-it": {
tokensPerMinute: 3e4,
requestsPerMinute: 30,
burstAllowance: 0.8
}
};
GroqRateLimiter = class {
constructor(modelName) {
this.requestHistory = [];
this.modelName = modelName;
this.config = GROQ_RATE_LIMITS[modelName] || {
tokensPerMinute: 5e3,
// Conservative default
requestsPerMinute: 30,
burstAllowance: 0.8
};
}
/**
* Check if we should delay before making a request
* Returns delay in milliseconds (0 if no delay needed)
*/
async shouldDelay(estimatedTokens) {
const now = Date.now();
const oneMinuteAgo = now - 6e4;
this.requestHistory = this.requestHistory.filter(
(record) => record.timestamp > oneMinuteAgo
);
const recentTokens = this.requestHistory.reduce((sum, record) => sum + record.tokens, 0);
const recentRequests = this.requestHistory.length;
const tokenLimit = this.config.tokensPerMinute * this.config.burstAllowance;
const requestLimit = this.config.requestsPerMinute * this.config.burstAllowance;
const wouldExceedTokens = recentTokens + estimatedTokens > tokenLimit;
const wouldExceedRequests = recentRequests + 1 > requestLimit;
if (wouldExceedTokens || wouldExceedRequests) {
let delay = 0;
if (wouldExceedTokens && this.requestHistory.length > 0) {
const oldestRelevantRequest = this.requestHistory[0];
delay = Math.max(delay, oldestRelevantRequest.timestamp + 6e4 - now);
}
if (wouldExceedRequests && this.requestHistory.length > 0) {
const oldestRequest = this.requestHistory[0];
delay = Math.max(delay, oldestRequest.timestamp + 6e4 - now);
}
delay = Math.max(delay + 1e3, 2e3);
return delay;
}
const lastRequest = this.requestHistory[this.requestHistory.length - 1];
if (lastRequest && now - lastRequest.timestamp < 100) {
return 100;
}
return 0;
}
/**
* Record a completed request
*/
recordRequest(actualTokens) {
this.requestHistory.push({
timestamp: Date.now(),
tokens: actualTokens
});
}
/**
* Display a user-friendly delay message
*/
async delayWithMessage(delayMs) {
if (delayMs <= 0) return;
const seconds = Math.ceil(delayMs / 1e3);
const reason = this.getDelayReason();
process.stdout.write(import_kleur.default.yellow(`\u23F3 Rate limiting: waiting ${seconds}s ${reason}\r`));
await new Promise((resolve) => setTimeout(resolve, delayMs));
process.stdout.write(" ".repeat(80) + "\r");
}
getDelayReason() {
const oneMinuteAgo = Date.now() - 6e4;
const recentTokens = this.requestHistory.filter((r) => r.timestamp > oneMinuteAgo).reduce((sum, record) => sum + record.tokens, 0);
const tokenUsagePercent = Math.round(recentTokens / this.config.tokensPerMinute * 100);
return `(${tokenUsagePercent}% of token limit used)`;
}
/**
* Get current usage statistics
*/
getUsageStats() {
const oneMinuteAgo = Date.now() - 6e4;
const recentRequests = this.requestHistory.filter((r) => r.timestamp > oneMinuteAgo);
const tokens = recentRequests.reduce((sum, record) => sum + record.tokens, 0);
const requests = recentRequests.length;
return {
tokens,
requests,
tokensPercent: Math.round(tokens / this.config.tokensPerMinute * 100),
requestsPercent: Math.round(requests / this.config.requestsPerMinute * 100)
};
}
/**
* Estimate tokens from message content (rough approximation)
*/
static estimateTokens(messages, tools) {
let estimate = 0;
for (const message of messages) {
if (message.content) {
estimate += Math.ceil(message.content.length / 4);
}
}
if (tools && tools.length > 0) {
const toolsText = JSON.stringify(tools);
estimate += Math.ceil(toolsText.length / 4);
}
estimate += 1e3;
return estimate;
}
};
}
});
// src/providers/groq.ts
var GroqProvider;
var init_groq = __esm({
"src/providers/groq.ts"() {
"use strict";
init_MetisError();
init_rateLimiter();
GroqProvider = class {
constructor(init) {
this.name = "groq";
this.baseURL = "https://api.groq.com/openai/v1";
if (!init.apiKey) {
throw MetisError.apiKeyMissing("groq");
}
this.apiKey = init.apiKey;
this.model = init.model;
this.temperature = init.temperature;
this.rateLimiter = new GroqRateLimiter(this.model);
}
async send(messages, opts) {
const temperature = opts?.temperature ?? this.temperature;
const estimatedTokens = GroqRateLimiter.estimateTokens(messages);
const delay = await this.rateLimiter.shouldDelay(estimatedTokens);
if (delay > 0) {
await this.rateLimiter.delayWithMessage(delay);
}
try {
const response = await this.makeRequest("/chat/completions", {
model: this.model,
temperature,
messages: messages.map((m) => {
const groqMessage = {
role: m.role,
content: m.content || null
// Groq might prefer null over empty string
};
if (m.role === "assistant" && m.tool_calls) {
groqMessage.tool_calls = m.tool_calls;
if (!m.content) {
groqMessage.content = null;
}
}
if (m.role === "tool") {
groqMessage.tool_call_id = m.tool_call_id;
if (m.name) {
groqMessage.name = m.name;
}
if (!groqMessage.content) {
groqMessage.content = "";
}
}
return groqMessage;
})
});
const choice = response.choices?.[0]?.message?.content ?? "";
if (response.usage) {
this.rateLimiter.recordRequest(response.usage.total_tokens);
}
return typeof choice === "string" ? choice : JSON.stringify(choice);
} catch (error) {
if (error.status) {
throw MetisError.providerRequestFailed("groq", error.status);
}
throw MetisError.providerRequestFailed("groq");
}
}
async sendWithTools(messages, tools, opts) {
const temperature = opts?.temperature ?? this.temperature;
if (!this.supportsTools()) {
throw new Error(`Model ${this.model} does not support function calling. Please use a compatible model like llama-3.1-70b-versatile or mixtral-8x7b-32768`);
}
const estimatedTokens = GroqRateLimiter.estimateTokens(messages, tools);
const delay = await this.rateLimiter.shouldDelay(estimatedTokens);
if (delay > 0) {
await this.rateLimiter.delayWithMessage(delay);
}
try {
const requestConfig = {
model: this.model,
temperature,
max_tokens: opts?.max_tokens,
messages: messages.map((m) => {
const groqMessage = {
role: m.role,
content: m.content || null
// Groq might prefer null over empty string
};
if (m.role === "assistant" && m.tool_calls) {
groqMessage.tool_calls = m.tool_calls;
if (!m.content) {
groqMessage.content = null;
}
}
if (m.role === "tool") {
groqMessage.tool_call_id = m.tool_call_id;
if (m.name) {
groqMessage.name = m.name;
}
if (!groqMessage.content) {
groqMessage.content = "";
}
}
return groqMessage;
}),
tools: tools.map((tool) => ({
type: "function",
function: {
name: tool.name,
description: tool.description,
parameters: tool.parameters
}
})),
tool_choice: "auto"
};
if (this.supportsAdvancedFeatures()) {
requestConfig.service_tier = "on_demand";
}
if (process.env.METIS_VERBOSE === "true" || process.env.METIS_TRACE === "true") {
console.log("[Groq] Sending request with", requestConfig.messages.length, "messages");
console.log("[Groq] Message roles:", requestConfig.messages.map((m) => m.role).join(", "));
console.log("[Groq] Model:", this.model);
console.log("[Groq] Tools:", requestConfig.tools.map((t) => t.function.name).join(", "));
if (process.env.METIS_TRACE === "true") {
console.log("[Groq] Full request config:");
console.log(JSON.stringify(requestConfig, null, 2));
console.log("[Groq] Tool schemas:");
requestConfig.tools.forEach((tool) => {
console.log(`[Groq] ${tool.function.name}:`, JSON.stringify(tool.function.parameters, null, 2));
});
}
}
const response = await this.makeRequest("/chat/completions", requestConfig);
const message = response.choices?.[0]?.message;
if (!message) {
throw MetisError.providerRequestFailed("groq");
}
if (response.usage) {
this.rateLimiter.recordRequest(response.usage.total_tokens);
}
if (message.tool_calls && message.tool_calls.length > 0) {
return {
type: "tool_call",
content: message.content || "",
tool_calls: message.tool_calls.map((tc) => {
let cleanedArguments = tc.function.arguments;
if (typeof cleanedArguments === "string") {
cleanedArguments = cleanedArguments.replace(/}\s*"?\s*$/, "}");
cleanedArguments = cleanedArguments.replace(/}"\s*}/, "}}");
try {
JSON.parse(cleanedArguments);
} catch (e) {
const lastBrace = cleanedArguments.lastIndexOf("}");
if (lastBrace !== -1 && lastBrace < cleanedArguments.length - 1) {
cleanedArguments = cleanedArguments.substring(0, lastBrace + 1);
}
try {
JSON.parse(cleanedArguments);
} catch (e2) {
console.error("[Groq] Failed to parse tool arguments after repair:", cleanedArguments);
console.error("[Groq] Original arguments:", tc.function.arguments);
}
}
}
return {
id: tc.id,
type: "function",
function: {
name: tc.function.name,
arguments: cleanedArguments
}
};
}),
usage: response.usage ? {
prompt_tokens: response.usage.prompt_tokens,
completion_tokens: response.usage.completion_tokens,
total_tokens: response.usage.total_tokens
} : void 0
};
}
return {
type: "text",
content: message.content || "",
usage: response.usage ? {
prompt_tokens: response.usage.prompt_tokens,
completion_tokens: response.usage.completion_tokens,
total_tokens: response.usage.total_tokens
} : void 0
};
} catch (error) {
if (error.status === 429) {
const waitMatch = error.message?.match(/Please try again in (\d+\.?\d*)/);
const waitTime = waitMatch ? Math.ceil(parseFloat(waitMatch[1])) : 2;
const rateLimitError = new Error(`API rate limit reached. Waiting ${waitTime} seconds before retry...`);
rateLimitError.status = 429;
rateLimitError.waitTime = waitTime * 1e3;
throw rateLimitError;
}
if (error.status) {
throw MetisError.providerRequestFailed("groq", error.status);
}
throw MetisError.providerRequestFailed("groq");
}
}
supportsTools() {
const toolSupportedModels = [
"llama-3.1-70b-versatile",
"llama-3.1-8b-instant",
"llama-3.3-70b-versatile",
"mixtral-8x7b-32768",
"gemma2-9b-it",
"gemma-7b-it",
"openai/gpt-oss-20b"
// Added support for this model
];
return toolSupportedModels.some((model) => this.model.includes(model)) || this.model.includes("tool-use") || this.model.includes("function");
}
// Get current rate limit status (for debugging)
getRateLimitStatus() {
return this.rateLimiter.getUsageStats();
}
supportsAdvancedFeatures() {
return this.model.includes("llama-4") || this.model.includes("mixtral") || this.model.includes("gemma2");
}
async makeRequest(endpoint, body) {
const url = `${this.baseURL}${endpoint}`;
try {
const response = await fetch(url, {
method: "POST",
headers: {
"Authorization": `Bearer ${this.apiKey}`,
"Content-Type": "application/json"
},
body: JSON.stringify(body)
});
if (!response.ok) {
const errorText = await response.text();
let errorMessage = `HTTP ${response.status}`;
let errorDetails = "";
let fullErrorJson = null;
try {
fullErrorJson = JSON.parse(errorText);
errorMessage = fullErrorJson.error?.message || errorMessage;
errorDetails = fullErrorJson.error?.type || "";
if (response.status === 400) {
if (fullErrorJson.error?.code === "tool_use_failed" && fullErrorJson.error?.failed_generation) {
console.error("[Groq] Tool parsing error. Failed generation:", fullErrorJson.error.failed_generation);
errorMessage = "Groq generated malformed tool call JSON. This is a known issue with some models. Please try again or use a different model.";
console.error("[Groq] Malformed JSON from model:", fullErrorJson.error.failed_generation);
} else if (errorMessage.includes("tool") || errorMessage.includes("function")) {
errorMessage = "Model does not support function calling. Try a different model or use text-only mode.";
} else if (errorMessage.includes("max_tokens")) {
errorMessage = "Request too large. Try reducing message length or context.";
} else if (errorMessage.includes("invalid_request_error")) {
errorMessage = "Invalid request format. Check your input parameters.";
} else if (errorMessage.includes("content")) {
errorMessage = "Invalid message content format. Ensure all messages have valid content.";
} else {
errorMessage = `Bad request: ${errorMessage}. Check message format and model compatibility.`;
}
}
} catch {
}
console.error("[Groq] API Error Details:");
console.error("[Groq] Status:", response.status);
console.error("[Groq] Error Message:", errorMessage);
console.error("[Groq] Error Type:", errorDetails);
console.error("[Groq] Raw Response:", errorText);
const error = new Error(`groq API request failed (${errorMessage})`);
error.status = response.status;
error.details = errorDetails;
error.response = errorText;
throw error;
}
return await response.json();
} catch (error) {
if (error.status) {
throw error;
}
throw { message: error.message || "Network error" };
}
}
// Helper method to get available models
static getAvailableModels() {
return [
// Llama models
"llama3-groq-70b-8192-tool-use-preview",
"llama3-groq-8b-8192-tool-use-preview",
"meta-llama/llama-4-scout-17b-16e-instruct",
"llama-3.3-70b-versatile",
"llama-3.1-70b-versatile",
"llama-3.1-8b-instant",
// Mixtral models
"mixtral-8x7b-32768",
// Gemma models
"gemma2-9b-it",
"gemma-7b-it",
// Qwen models
"qwen2.5-72b-instruct",
// DeepSeek models
"deepseek-r1-distill-llama-70b"
];
}
};
}
});
// src/assets/loader.ts
var loader_exports = {};
__export(loader_exports, {
AssetLoader: () => AssetLoader
});
var import_fs4, import_path4, import_js_yaml, AssetLoader;
var init_loader = __esm({
"src/assets/loader.ts"() {
"use strict";
import_fs4 = __toESM(require("fs"));
import_path4 = __toESM(require("path"));
import_js_yaml = __toESM(require("js-yaml"));
AssetLoader = class {
constructor(basePath = process.cwd()) {
this.basePath = import_path4.default.join(basePath, ".metis");
}
// Persona loading with project-specific support
async loadPersona(name) {
const projectPersonaPath = import_path4.default.join(this.basePath, "persona.yaml");
if (name === "project" && import_fs4.default.existsSync(projectPersonaPath)) {
return this.parsePersonaFile(projectPersonaPath);
}
const personaPath = import_path4.default.join(this.basePath, "personas", `${name}.yaml`);
if (import_fs4.default.existsSync(personaPath)) {
return this.parsePersonaFile(personaPath);
}
const builtinPath = import_path4.default.join(__dirname, "..", "..", "assets", "personas", `${name}.yaml`);
if (!import_fs4.default.existsSync(builtinPath)) {
throw new Error(`Persona not found: ${name}`);
}
return this.parsePersonaFile(builtinPath);
}
// Load project-specific persona if it exists
async loadProjectPersona() {
const projectPersonaPath = import_path4.default.join(this.basePath, "persona.yaml");
if (import_fs4.default.existsSync(projectPersonaPath)) {
return this.parsePersonaFile(projectPersonaPath);
}
return null;
}
async listPersonas() {
const personas = [];
const workspaceDir = import_path4.default.join(this.basePath, "personas");
if (import_fs4.default.existsSync(workspaceDir)) {
const files = import_fs4.default.readdirSync(workspaceDir).filter((f) => f.endsWith(".yaml") || f.endsWith(".yml")).map((f) => import_path4.default.basename(f, import_path4.default.extname(f)));
personas.push(...files);
}
const builtinDir = import_path4.default.join(__dirname, "..", "..", "assets", "personas");
if (import_fs4.default.existsSync(builtinDir)) {
const files = import_fs4.default.readdirSync(builtinDir).filter((f) => f.endsWith(".yaml") || f.endsWith(".yml")).map((f) => import_path4.default.basename(f, import_path4.default.extname(f)));
personas.push(...files.filter((f) => !personas.includes(f)));
}
return personas;
}
// Workflow loading
async loadWorkflow(name) {
const workflowPath = import_path4.default.join(this.basePath, "workflows", `${name}.yaml`);
if (!import_fs4.default.existsSync(workflowPath)) {
throw new Error(`Workflow not found: ${name}`);
}
const content = import_fs4.default.readFileSync(workflowPath, "utf8");
const workflow = import_js_yaml.default.load(content);
if (!workflow.name || !workflow.steps) {
throw new Error(`Invalid workflow format: ${name}`);
}
return workflow;
}
async listWorkflows() {
const workflowsDir = import_path4.default.join(this.basePath, "workflows");
if (!import_fs4.default.existsSync(workflowsDir)) {
return [];
}
return import_fs4.default.readdirSync(workflowsDir).filter((f) => f.endsWith(".yaml") || f.endsWith(".yml")).map((f) => import_path4.default.basename(f, import_path4.default.extname(f)));
}
// Skill loading
async loadSkill(name) {
const skillPath = import_path4.default.join(this.basePath, "skills", `${name}.yaml`);
if (!import_fs4.default.existsSync(skillPath)) {
throw new Error(`Skill not found: ${name}`);
}
const content = import_fs4.default.readFileSync(skillPath, "utf8");
const skill = import_js_yaml.default.load(content);
if (!skill.name || !skill.tools) {
throw new Error(`Invalid skill format: ${name}`);
}
return skill;
}
async listSkills() {
const skillsDir = import_path4.default.join(this.basePath, "skills");
if (!import_fs4.default.existsSync(skillsDir)) {
return [];
}
return import_fs4.default.readdirSync(skillsDir).filter((f) => f.endsWith(".yaml") || f.endsWith(".yml")).map((f) => import_path4.default.basename(f, import_path4.default.extname(f)));
}
parsePersonaFile(filePath) {
const content = import_fs4.default.readFileSync(filePath, "utf8");
const persona = import_js_yaml.default.load(content);
if (!persona.name || !persona.system_prompt) {
throw new Error(`Invalid persona format: ${filePath}`);
}
return persona;
}
// Utility methods
async validateAsset(type, name) {
try {
switch (type) {
case "persona":
await this.loadPersona(name);
break;
case "workflow":
await this.loadWorkflow(name);
break;
case "skill":
await this.loadSkill(name);
break;
}
return true;
} catch {
return false;
}
}
async createPersona(persona, overwrite = false) {
const personaPath = import_path4.default.join(this.basePath, "personas", `${persona.name}.yaml`);
if (import_fs4.default.existsSync(personaPath) && !overwrite) {
throw new Error(`Persona already exists: ${persona.name}`);
}
const dir = import_path4.default.dirname(personaPath);
if (!import_fs4.default.existsSync(dir)) {
import_fs4.default.mkdirSync(dir, { recursive: true });
}
const yamlContent = import_js_yaml.default.dump(persona, { indent: 2 });
import_fs4.default.writeFileSync(personaPath, yamlContent);
}
};
}
});
// src/agent/simpleAgent.ts
var simpleAgent_exports = {};
__export(simpleAgent_exports, {
makeProvider: () => makeProvider,
runSimpleAgent: () => runSimpleAgent
});
function makeProvider() {
const cfg = loadConfig();
const secrets = loadSecrets();
const base = {
model: cfg.model,
temperature: cfg.temperature
};
if (cfg.provider === "openai") {
return new OpenAIProvider({ ...base, apiKey: secrets.openai || process.env.OPENAI_API_KEY });
}
if (cfg.provider === "anthropic") {
return new AnthropicProvider({
...base,
apiKey: secrets.anthropic || process.env.ANTHROPIC_API_KEY
});
}
if (cfg.provider === "groq") {
return new GroqProvider({
...base,
apiKey: secrets.groq || process.env.GROQ_API_KEY
});
}
throw new Error(`Unknown provider in config: ${cfg.provider}`);
}
async function runSimpleAgent(mode, task) {
const cfg = loadConfig();
const provider = makeProvider();
const repoSummary = summarizeRepo(60);
const personaName = process.env.METIS_PERSONA || "default";
const loader = new AssetLoader();
let persona;
try {
persona = await loader.loadPersona(personaName);
if (process.env.METIS_VERBOSE === "true") {
console.log(`Using persona: ${persona.name} - ${persona.description}`);
}
} catch (error) {
console.warn(`Failed to load persona '${personaName}': ${error.message}`);
console.warn("Falling back to default behavior");
persona = {
name: "fallback",
version: "1.0",
description: "Fallback persona",
system_prompt: "You are Metis, a helpful coding assistant.",
temperature: cfg.temperature
};
}
const systemPrompt = buildSystemPrompt(mode, persona, repoSummary);
const messages = [
{ role: "system", content: systemPrompt },
{ role: "user", content: task.trim() || "Plan repository changes for the task." }
];
const temperature = persona.temperature !== void 0 ? persona.temperature : cfg.temperature;
const out = await provider.send(messages, { temperature });
return out.trim();
}
function buildSystemPrompt(mode, persona, repoSummary) {
if (mode === "plan") {
return `${persona.system_prompt}
Your task is to propose a clear, minimal plan of steps to implement the user's request in this repository. Prefer diffs and focused changes.
Repository summary:
${repoSummary}`;
} else {
return `${persona.system_prompt}
Your task is to produce specific, minimal file-level changes and a patch.
Format strictly as a Metis Patch:
*** Begin Patch
*** Add File: path/relative/to/repo.ext
<full new file content>
*** Update File: another/path.ext
<full updated file content>
*** Delete File: another/path.ext
*** End Patch
Rules:
- For Add/Update, include the FULL file content exactly as it should be saved.
- Do not include code fences or explanations outside the patch envelope.
- Only touch files needed for the task.
- Use POSIX newlines.
Repository summary:
${repoSummary}`;
}
}
var init_simpleAgent = __esm({
"src/agent/simpleAgent.ts"() {
"use strict";
init_config();
init_openai();
init_anthropic();
init_groq();
init_repo();
init_loader();
}
});
// src/cli/dropdowns/DropdownHelpers.ts
var DropdownHelpers_exports = {};
__export(DropdownHelpers_exports, {
DropdownHelpers: () => DropdownHelpers
});
var import_inquirer, import_kleur3, DropdownHelpers;
var init_DropdownHelpers = __esm({
"src/cli/dropdowns/DropdownHelpers.ts"() {
"use strict";
import_inquirer = __toESM(require("inquirer"));
import_kleur3 = __toESM(require("kleur"));
DropdownHelpers = class {
/**
* Show a single-selection dropdown menu
*/
static async selectOne(options) {
const { selection } = await import_inquirer.default.prompt([
{
type: "list",
name: "selection",
message: options.message,
choices: options.choices,
pageSize: options.pageSize || 10,
default: options.defaultValue
}
]);
return selection;
}
/**
* Show a multi-selection checkbox menu
*/
static async selectMultiple(options) {
const { selections } = await import_inquirer.default.prompt([
{
type: "checkbox",
name: "selections",
message: options.message,
choices: options.choices,
pageSize: options.pageSize || 10,
validate: options.validate
}
]);
return selections;
}
/**
* Prompt for text input with validation
*/
stati