@stackmemoryai/stackmemory
Version:
Project-scoped memory for AI coding tools. Durable context across sessions with MCP integration, frames, smart retrieval, Claude Code skills, and automatic hooks.
152 lines (151 loc) • 4.76 kB
JavaScript
import { fileURLToPath as __fileURLToPath } from 'url';
import { dirname as __pathDirname } from 'path';
const __filename = __fileURLToPath(import.meta.url);
const __dirname = __pathDirname(__filename);
import Anthropic from "@anthropic-ai/sdk";
import { logger } from "../monitoring/logger.js";
class AnthropicLLMProvider {
client;
model;
temperature;
maxRetries;
timeout;
constructor(config) {
this.client = new Anthropic({
apiKey: config.apiKey
});
this.model = config.model || "claude-3-5-haiku-20241022";
this.temperature = config.temperature ?? 0.3;
this.maxRetries = config.maxRetries ?? 2;
this.timeout = config.timeout ?? 3e4;
logger.info("AnthropicLLMProvider initialized", {
model: this.model,
temperature: this.temperature
});
}
/**
* Analyze a prompt using the Anthropic API
*/
async analyze(prompt, maxTokens) {
const startTime = Date.now();
let lastError = null;
for (let attempt = 0; attempt <= this.maxRetries; attempt++) {
try {
const response = await this.makeRequest(prompt, maxTokens);
logger.debug("LLM analysis completed", {
model: this.model,
promptLength: prompt.length,
responseLength: response.length,
durationMs: Date.now() - startTime,
attempt
});
return response;
} catch (error) {
lastError = error instanceof Error ? error : new Error(String(error));
if (this.isRetryableError(error) && attempt < this.maxRetries) {
const backoffMs = Math.pow(2, attempt) * 1e3;
logger.warn("LLM request failed, retrying", {
attempt,
backoffMs,
error: lastError.message
});
await this.sleep(backoffMs);
continue;
}
break;
}
}
logger.error("LLM analysis failed after retries", lastError);
throw lastError;
}
/**
* Make the actual API request
*/
async makeRequest(prompt, maxTokens) {
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), this.timeout);
try {
const response = await this.client.messages.create({
model: this.model,
max_tokens: maxTokens,
temperature: this.temperature,
messages: [
{
role: "user",
content: prompt
}
]
});
const textContent = response.content.find((c) => c.type === "text");
if (!textContent || textContent.type !== "text") {
throw new Error("No text content in response");
}
return textContent.text;
} finally {
clearTimeout(timeoutId);
}
}
/**
* Check if an error is retryable
*/
isRetryableError(error) {
if (error instanceof Anthropic.RateLimitError) {
return true;
}
if (error instanceof Anthropic.APIConnectionError) {
return true;
}
if (error instanceof Anthropic.InternalServerError) {
return true;
}
if (error instanceof Error && error.name === "AbortError") {
return true;
}
return false;
}
sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
}
class LocalFallbackProvider {
async analyze(prompt, maxTokens) {
const lines = prompt.split("\n").filter((l) => l.trim());
const contentStart = lines.findIndex((l) => l.includes("Content:"));
if (contentStart === -1 || lines.length < 3) {
return "Context summary not available (local mode)";
}
const content = lines.slice(contentStart + 1).join("\n");
const sentences = content.split(/[.!?]+/).filter((s) => s.trim().length > 10);
const maxChars = maxTokens * 4;
let summary = "";
for (const sentence of sentences.slice(0, 5)) {
if (summary.length + sentence.length > maxChars) break;
summary += sentence.trim() + ". ";
}
return summary.trim() || "Context available (use LLM API for detailed analysis)";
}
}
function createLLMProvider() {
if (process.env["STACKMEMORY_LOCAL"] === "true" || process.env["LOCAL_ONLY"] === "true") {
logger.info("LOCAL mode - using heuristic summarization");
return new LocalFallbackProvider();
}
const apiKey = process.env["ANTHROPIC_API_KEY"];
if (!apiKey) {
logger.info(
"No ANTHROPIC_API_KEY found, LLM retrieval will use heuristics"
);
return new LocalFallbackProvider();
}
return new AnthropicLLMProvider({
apiKey,
model: process.env["ANTHROPIC_MODEL"] || "claude-3-5-haiku-20241022",
temperature: parseFloat(process.env["ANTHROPIC_TEMPERATURE"] || "0.3")
});
}
export {
AnthropicLLMProvider,
LocalFallbackProvider,
createLLMProvider
};
//# sourceMappingURL=llm-provider.js.map