@stackmemoryai/stackmemory
Version:
Lossless, project-scoped memory for AI coding tools. Durable context across sessions with 56 MCP tools, FTS5 search, conductor orchestrator, loop/watch monitoring, snapshot capture, pre-flight overlap checks, Claude/Codex/OpenCode wrappers, Linear sync, a
399 lines (398 loc) • 11.3 kB
JavaScript
import { fileURLToPath as __fileURLToPath } from 'url';
import { dirname as __pathDirname } from 'path';
const __filename = __fileURLToPath(import.meta.url);
const __dirname = __pathDirname(__filename);
class ClaudeAdapter {
id = "claude";
name = "Anthropic Claude";
version = "1.0.0";
extensions = {
claude: {
extendedThinking: { enabled: true, budgetTokens: 1e4 },
xmlOutput: { enabled: true },
computerUse: { enabled: true },
documentSupport: { enabled: true, maxPages: 100 }
}
};
apiKey;
baseUrl;
constructor(config) {
this.apiKey = config.apiKey;
this.baseUrl = config.baseUrl || "https://api.anthropic.com";
}
supportsExtension(extension) {
return extension === "claude";
}
async *stream(messages, options) {
const body = this.buildRequestBody(messages, options);
const response = await fetch(`${this.baseUrl}/v1/messages`, {
method: "POST",
headers: this.getHeaders(options.extensions),
body: JSON.stringify({ ...body, stream: true })
});
if (!response.ok) {
yield {
type: "error",
error: {
type: "api_error",
message: `Claude API error: ${response.status} ${response.statusText}`
}
};
return;
}
const reader = response.body?.getReader();
if (!reader) {
yield {
type: "error",
error: { type: "stream_error", message: "No response body" }
};
return;
}
const decoder = new TextDecoder();
let buffer = "";
while (true) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split("\n");
buffer = lines.pop() || "";
for (const line of lines) {
if (line.startsWith("data: ")) {
const data = line.slice(6);
if (data === "[DONE]") return;
try {
const event = JSON.parse(data);
yield this.normalizeEvent(event);
} catch {
}
}
}
}
}
async complete(messages, options) {
const body = this.buildRequestBody(messages, options);
const response = await fetch(`${this.baseUrl}/v1/messages`, {
method: "POST",
headers: this.getHeaders(options.extensions),
body: JSON.stringify(body)
});
if (!response.ok) {
throw new Error(
`Claude API error: ${response.status} ${response.statusText}`
);
}
const data = await response.json();
return {
content: data.content,
usage: {
inputTokens: data.usage?.input_tokens ?? 0,
outputTokens: data.usage?.output_tokens ?? 0
},
stopReason: data.stop_reason ?? "end_turn"
};
}
async validateConnection() {
try {
const response = await fetch(`${this.baseUrl}/v1/messages`, {
method: "POST",
headers: {
"Content-Type": "application/json",
"x-api-key": this.apiKey,
"anthropic-version": "2023-06-01"
},
body: JSON.stringify({
model: "claude-3-5-haiku-20241022",
max_tokens: 1,
messages: [{ role: "user", content: "hi" }]
})
});
return response.ok || response.status === 400;
} catch {
return false;
}
}
async listModels() {
return [
"claude-opus-4-6",
"claude-sonnet-4-5-20250929",
"claude-haiku-4-5-20251001",
"claude-sonnet-4-20250514",
"claude-3-5-haiku-20241022"
];
}
buildRequestBody(messages, options) {
const body = {
model: options.model,
max_tokens: options.maxTokens,
messages: messages.map((m) => ({
role: m.role,
content: m.content
}))
};
if (options.temperature !== void 0) {
body.temperature = options.temperature;
}
if (options.topP !== void 0) {
body.top_p = options.topP;
}
if (options.stopSequences?.length) {
body.stop_sequences = options.stopSequences;
}
if (options.system) {
body.system = options.system;
}
if (options.tools?.length) {
body.tools = options.tools.map((t) => ({
name: t.name,
description: t.description,
input_schema: t.inputSchema
}));
}
if (options.extensions?.extendedThinking?.enabled) {
body.thinking = {
type: "enabled",
budget_tokens: options.extensions.extendedThinking.budgetTokens || 1e4
};
body.temperature = 1;
}
return body;
}
getHeaders(extensions) {
const headers = {
"Content-Type": "application/json",
"x-api-key": this.apiKey,
"anthropic-version": "2023-06-01"
};
const flags = [];
if (extensions?.extendedThinking?.enabled) {
flags.push("interleaved-thinking-2025-05-14");
}
if (extensions?.computerUse?.enabled) {
flags.push("computer-use-2024-10-22");
}
if (extensions?.documentSupport?.enabled) {
flags.push("pdfs-2024-09-25");
}
if (flags.length > 0) {
headers["anthropic-beta"] = flags.join(",");
}
return headers;
}
normalizeEvent(event) {
return event;
}
}
class GPTAdapter {
id = "gpt";
name = "OpenAI GPT";
version = "1.0.0";
extensions = {
gpt: {
codeInterpreter: { enabled: true },
browsing: { enabled: true },
imageGeneration: {
enabled: true,
size: "1024x1024",
quality: "standard"
},
functionCalling: { mode: "auto" },
jsonMode: { enabled: true }
}
};
apiKey;
baseUrl;
constructor(config) {
this.apiKey = config.apiKey;
this.baseUrl = config.baseUrl || "https://api.openai.com";
}
supportsExtension(extension) {
return extension === "gpt";
}
async *stream(messages, options) {
const openaiMessages = messages.map((m) => ({
role: m.role,
content: typeof m.content === "string" ? m.content : m.content.filter((c) => c.type === "text").map((c) => c.text).join("")
}));
const body = {
model: options.model,
max_tokens: options.maxTokens,
messages: openaiMessages,
stream: true
};
if (options.temperature !== void 0) {
body.temperature = options.temperature;
}
if (options.topP !== void 0) {
body.top_p = options.topP;
}
if (options.extensions?.jsonMode?.enabled) {
body.response_format = { type: "json_object" };
}
if (options.tools?.length) {
body.tools = options.tools.map((t) => ({
type: "function",
function: {
name: t.name,
description: t.description,
parameters: t.inputSchema
}
}));
if (options.extensions?.functionCalling?.mode) {
body.tool_choice = options.extensions.functionCalling.mode;
}
}
const response = await fetch(`${this.baseUrl}/v1/chat/completions`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${this.apiKey}`
},
body: JSON.stringify(body)
});
if (!response.ok) {
yield {
type: "error",
error: {
type: "api_error",
message: `GPT API error: ${response.status} ${response.statusText}`
}
};
return;
}
yield {
type: "message_start",
message: {
id: `msg_${Date.now()}`,
model: options.model,
role: "assistant"
}
};
const reader = response.body?.getReader();
if (!reader) return;
const decoder = new TextDecoder();
let buffer = "";
const blockIndex = 0;
let blockStarted = false;
while (true) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split("\n");
buffer = lines.pop() || "";
for (const line of lines) {
if (line.startsWith("data: ")) {
const data = line.slice(6);
if (data === "[DONE]") {
if (blockStarted) {
yield { type: "content_block_stop", index: blockIndex };
}
yield { type: "message_stop" };
return;
}
try {
const parsed = JSON.parse(data);
const delta = parsed.choices?.[0]?.delta;
if (delta?.content && !blockStarted) {
blockStarted = true;
yield {
type: "content_block_start",
index: blockIndex,
contentBlock: { type: "text", text: "" }
};
}
if (delta?.content) {
yield {
type: "content_block_delta",
index: blockIndex,
delta: { type: "text_delta", text: delta.content }
};
}
} catch {
}
}
}
}
}
async complete(messages, options) {
const openaiMessages = messages.map((m) => ({
role: m.role,
content: typeof m.content === "string" ? m.content : m.content.filter((c) => c.type === "text").map((c) => c.text).join("")
}));
const body = {
model: options.model,
max_tokens: options.maxTokens,
messages: openaiMessages
};
if (options.temperature !== void 0) {
body.temperature = options.temperature;
}
const response = await fetch(`${this.baseUrl}/v1/chat/completions`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${this.apiKey}`
},
body: JSON.stringify(body)
});
if (!response.ok) {
throw new Error(
`GPT API error: ${response.status} ${response.statusText}`
);
}
const data = await response.json();
const choice = data.choices?.[0];
return {
content: [{ type: "text", text: choice?.message?.content ?? "" }],
usage: {
inputTokens: data.usage?.prompt_tokens ?? 0,
outputTokens: data.usage?.completion_tokens ?? 0
},
stopReason: choice?.finish_reason ?? "stop"
};
}
async validateConnection() {
try {
const response = await fetch(`${this.baseUrl}/v1/models`, {
headers: { Authorization: `Bearer ${this.apiKey}` }
});
return response.ok;
} catch {
return false;
}
}
async listModels() {
return ["gpt-4o", "gpt-4o-mini", "o3-mini", "o4-mini"];
}
}
function createProvider(id, config) {
switch (id) {
case "anthropic":
return new ClaudeAdapter(config);
case "openai":
case "qwen":
case "ollama":
return new GPTAdapter(config);
case "cerebras":
return new GPTAdapter({
apiKey: config.apiKey,
baseUrl: config.baseUrl || "https://api.cerebras.ai/v1"
});
case "deepinfra":
return new GPTAdapter({
apiKey: config.apiKey,
baseUrl: config.baseUrl || "https://api.deepinfra.com/v1/openai"
});
case "openrouter":
return new GPTAdapter({
apiKey: config.apiKey,
baseUrl: config.baseUrl || "https://openrouter.ai/api"
});
default:
throw new Error(`No adapter for provider: ${id}`);
}
}
export {
ClaudeAdapter,
GPTAdapter,
createProvider
};