@stackmemoryai/stackmemory
Version:
Project-scoped memory for AI coding tools. Durable context across sessions with MCP integration, frames, smart retrieval, Claude Code skills, and automatic hooks.
615 lines (614 loc) • 17.3 kB
JavaScript
import { fileURLToPath as __fileURLToPath } from 'url';
import { dirname as __pathDirname } from 'path';
const __filename = __fileURLToPath(import.meta.url);
const __dirname = __pathDirname(__filename);
class ClaudeAdapter {
id = "claude";
name = "Anthropic Claude";
version = "1.0.0";
extensions = {
claude: {
extendedThinking: { enabled: true, budgetTokens: 1e4 },
xmlOutput: { enabled: true },
computerUse: { enabled: true },
documentSupport: { enabled: true, maxPages: 100 }
}
};
apiKey;
baseUrl;
constructor(config) {
this.apiKey = config.apiKey;
this.baseUrl = config.baseUrl || "https://api.anthropic.com";
}
supportsExtension(extension) {
return extension === "claude";
}
async *stream(messages, options) {
const body = this.buildRequestBody(messages, options);
const response = await fetch(`${this.baseUrl}/v1/messages`, {
method: "POST",
headers: {
"Content-Type": "application/json",
"x-api-key": this.apiKey,
"anthropic-version": "2023-06-01",
"anthropic-beta": this.getBetaFlags(options.extensions)
},
body: JSON.stringify({ ...body, stream: true })
});
if (!response.ok) {
yield {
type: "error",
error: {
type: "api_error",
message: `Claude API error: ${response.status} ${response.statusText}`
}
};
return;
}
const reader = response.body?.getReader();
if (!reader) {
yield {
type: "error",
error: { type: "stream_error", message: "No response body" }
};
return;
}
const decoder = new TextDecoder();
let buffer = "";
while (true) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split("\n");
buffer = lines.pop() || "";
for (const line of lines) {
if (line.startsWith("data: ")) {
const data = line.slice(6);
if (data === "[DONE]") return;
try {
const event = JSON.parse(data);
yield this.normalizeEvent(event);
} catch {
}
}
}
}
}
async complete(messages, options) {
const body = this.buildRequestBody(messages, options);
const response = await fetch(`${this.baseUrl}/v1/messages`, {
method: "POST",
headers: {
"Content-Type": "application/json",
"x-api-key": this.apiKey,
"anthropic-version": "2023-06-01",
"anthropic-beta": this.getBetaFlags(options.extensions)
},
body: JSON.stringify(body)
});
if (!response.ok) {
throw new Error(
`Claude API error: ${response.status} ${response.statusText}`
);
}
const data = await response.json();
return {
content: data.content,
usage: {
inputTokens: data.usage?.input_tokens ?? 0,
outputTokens: data.usage?.output_tokens ?? 0
},
stopReason: data.stop_reason ?? "end_turn"
};
}
async validateConnection() {
try {
const response = await fetch(`${this.baseUrl}/v1/messages`, {
method: "POST",
headers: {
"Content-Type": "application/json",
"x-api-key": this.apiKey,
"anthropic-version": "2023-06-01"
},
body: JSON.stringify({
model: "claude-3-5-haiku-20241022",
max_tokens: 1,
messages: [{ role: "user", content: "hi" }]
})
});
return response.ok || response.status === 400;
} catch {
return false;
}
}
async listModels() {
return [
"claude-opus-4-20250514",
"claude-sonnet-4-20250514",
"claude-3-5-haiku-20241022",
"claude-3-opus-20240229"
];
}
buildRequestBody(messages, options) {
const body = {
model: options.model,
max_tokens: options.maxTokens,
messages: messages.map((m) => ({
role: m.role,
content: m.content
}))
};
if (options.temperature !== void 0) {
body.temperature = options.temperature;
}
if (options.topP !== void 0) {
body.top_p = options.topP;
}
if (options.stopSequences?.length) {
body.stop_sequences = options.stopSequences;
}
if (options.system) {
body.system = options.system;
}
if (options.tools?.length) {
body.tools = options.tools.map((t) => ({
name: t.name,
description: t.description,
input_schema: t.inputSchema
}));
}
if (options.extensions?.extendedThinking?.enabled) {
body.thinking = {
type: "enabled",
budget_tokens: options.extensions.extendedThinking.budgetTokens || 1e4
};
body.temperature = 1;
}
return body;
}
getBetaFlags(extensions) {
const flags = [];
if (extensions?.extendedThinking?.enabled) {
flags.push("interleaved-thinking-2025-05-14");
}
if (extensions?.computerUse?.enabled) {
flags.push("computer-use-2024-10-22");
}
if (extensions?.documentSupport?.enabled) {
flags.push("pdfs-2024-09-25");
}
return flags.join(",");
}
normalizeEvent(event) {
return event;
}
}
class GPTAdapter {
id = "gpt";
name = "OpenAI GPT";
version = "1.0.0";
extensions = {
gpt: {
codeInterpreter: { enabled: true },
browsing: { enabled: true },
imageGeneration: {
enabled: true,
size: "1024x1024",
quality: "standard"
},
functionCalling: { mode: "auto" },
jsonMode: { enabled: true }
}
};
apiKey;
baseUrl;
constructor(config) {
this.apiKey = config.apiKey;
this.baseUrl = config.baseUrl || "https://api.openai.com";
}
supportsExtension(extension) {
return extension === "gpt";
}
async *stream(messages, options) {
const openaiMessages = messages.map((m) => ({
role: m.role,
content: typeof m.content === "string" ? m.content : m.content.filter((c) => c.type === "text").map((c) => c.text).join("")
}));
const body = {
model: options.model,
max_tokens: options.maxTokens,
messages: openaiMessages,
stream: true
};
if (options.temperature !== void 0) {
body.temperature = options.temperature;
}
if (options.topP !== void 0) {
body.top_p = options.topP;
}
if (options.extensions?.jsonMode?.enabled) {
body.response_format = { type: "json_object" };
}
if (options.tools?.length) {
body.tools = options.tools.map((t) => ({
type: "function",
function: {
name: t.name,
description: t.description,
parameters: t.inputSchema
}
}));
if (options.extensions?.functionCalling?.mode) {
body.tool_choice = options.extensions.functionCalling.mode;
}
}
const response = await fetch(`${this.baseUrl}/v1/chat/completions`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${this.apiKey}`
},
body: JSON.stringify(body)
});
if (!response.ok) {
yield {
type: "error",
error: {
type: "api_error",
message: `GPT API error: ${response.status} ${response.statusText}`
}
};
return;
}
yield {
type: "message_start",
message: {
id: `msg_${Date.now()}`,
model: options.model,
role: "assistant"
}
};
const reader = response.body?.getReader();
if (!reader) return;
const decoder = new TextDecoder();
let buffer = "";
const blockIndex = 0;
let blockStarted = false;
while (true) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split("\n");
buffer = lines.pop() || "";
for (const line of lines) {
if (line.startsWith("data: ")) {
const data = line.slice(6);
if (data === "[DONE]") {
if (blockStarted) {
yield { type: "content_block_stop", index: blockIndex };
}
yield { type: "message_stop" };
return;
}
try {
const parsed = JSON.parse(data);
const delta = parsed.choices?.[0]?.delta;
if (delta?.content && !blockStarted) {
blockStarted = true;
yield {
type: "content_block_start",
index: blockIndex,
contentBlock: { type: "text", text: "" }
};
}
if (delta?.content) {
yield {
type: "content_block_delta",
index: blockIndex,
delta: { type: "text_delta", text: delta.content }
};
}
} catch {
}
}
}
}
}
async complete(messages, options) {
const openaiMessages = messages.map((m) => ({
role: m.role,
content: typeof m.content === "string" ? m.content : m.content.filter((c) => c.type === "text").map((c) => c.text).join("")
}));
const body = {
model: options.model,
max_tokens: options.maxTokens,
messages: openaiMessages
};
if (options.temperature !== void 0) {
body.temperature = options.temperature;
}
const response = await fetch(`${this.baseUrl}/v1/chat/completions`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${this.apiKey}`
},
body: JSON.stringify(body)
});
if (!response.ok) {
throw new Error(
`GPT API error: ${response.status} ${response.statusText}`
);
}
const data = await response.json();
const choice = data.choices?.[0];
return {
content: [{ type: "text", text: choice?.message?.content ?? "" }],
usage: {
inputTokens: data.usage?.prompt_tokens ?? 0,
outputTokens: data.usage?.completion_tokens ?? 0
},
stopReason: choice?.finish_reason ?? "stop"
};
}
async validateConnection() {
try {
const response = await fetch(`${this.baseUrl}/v1/models`, {
headers: { Authorization: `Bearer ${this.apiKey}` }
});
return response.ok;
} catch {
return false;
}
}
async listModels() {
return [
"gpt-4o",
"gpt-4o-mini",
"gpt-4-turbo",
"gpt-4",
"gpt-3.5-turbo",
"o1",
"o1-mini",
"o1-preview"
];
}
}
class GeminiAdapter {
id = "gemini";
name = "Google Gemini";
version = "1.0.0";
extensions = {
gemini: {
grounding: { enabled: true, dynamicThreshold: 0.3 },
multimodal: {
videoEnabled: true,
audioEnabled: true,
maxVideoDurationSec: 60
},
codeExecution: { enabled: true }
}
};
apiKey;
baseUrl;
constructor(config) {
this.apiKey = config.apiKey;
this.baseUrl = config.baseUrl || "https://generativelanguage.googleapis.com/v1beta";
}
supportsExtension(extension) {
return extension === "gemini";
}
async *stream(messages, options) {
const contents = messages.filter((m) => m.role !== "system").map((m) => ({
role: m.role === "assistant" ? "model" : "user",
parts: [
{
text: typeof m.content === "string" ? m.content : m.content.filter((c) => c.type === "text").map((c) => c.text).join("")
}
]
}));
const body = {
contents,
generationConfig: {
maxOutputTokens: options.maxTokens,
temperature: options.temperature,
topP: options.topP
}
};
const systemMsg = messages.find((m) => m.role === "system");
if (systemMsg || options.system) {
body.systemInstruction = {
parts: [
{
text: options.system || (typeof systemMsg?.content === "string" ? systemMsg.content : "")
}
]
};
}
if (options.extensions?.grounding?.enabled) {
body.tools = [
{
googleSearchRetrieval: {
dynamicRetrievalConfig: {
mode: "MODE_DYNAMIC",
dynamicThreshold: options.extensions.grounding.dynamicThreshold
}
}
}
];
}
if (options.extensions?.codeExecution?.enabled) {
body.tools = [
...body.tools || [],
{ codeExecution: {} }
];
}
if (options.extensions?.safetySettings) {
body.safetySettings = options.extensions.safetySettings;
}
const url = `${this.baseUrl}/models/${options.model}:streamGenerateContent?key=${this.apiKey}&alt=sse`;
const response = await fetch(url, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(body)
});
if (!response.ok) {
yield {
type: "error",
error: {
type: "api_error",
message: `Gemini API error: ${response.status} ${response.statusText}`
}
};
return;
}
yield {
type: "message_start",
message: {
id: `msg_${Date.now()}`,
model: options.model,
role: "assistant"
}
};
const reader = response.body?.getReader();
if (!reader) return;
const decoder = new TextDecoder();
let buffer = "";
const blockIndex = 0;
let blockStarted = false;
while (true) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split("\n");
buffer = lines.pop() || "";
for (const line of lines) {
if (line.startsWith("data: ")) {
try {
const parsed = JSON.parse(line.slice(6));
const text = parsed.candidates?.[0]?.content?.parts?.[0]?.text;
if (text && !blockStarted) {
blockStarted = true;
yield {
type: "content_block_start",
index: blockIndex,
contentBlock: { type: "text", text: "" }
};
}
if (text) {
yield {
type: "content_block_delta",
index: blockIndex,
delta: { type: "text_delta", text }
};
}
} catch {
}
}
}
}
if (blockStarted) {
yield { type: "content_block_stop", index: blockIndex };
}
yield { type: "message_stop" };
}
async complete(messages, options) {
const contents = messages.filter((m) => m.role !== "system").map((m) => ({
role: m.role === "assistant" ? "model" : "user",
parts: [
{
text: typeof m.content === "string" ? m.content : m.content.filter((c) => c.type === "text").map((c) => c.text).join("")
}
]
}));
const body = {
contents,
generationConfig: {
maxOutputTokens: options.maxTokens,
temperature: options.temperature
}
};
const url = `${this.baseUrl}/models/${options.model}:generateContent?key=${this.apiKey}`;
const response = await fetch(url, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(body)
});
if (!response.ok) {
throw new Error(
`Gemini API error: ${response.status} ${response.statusText}`
);
}
const data = await response.json();
const candidate = data.candidates?.[0];
const text = candidate?.content?.parts?.[0]?.text ?? "";
return {
content: [{ type: "text", text }],
usage: {
inputTokens: data.usageMetadata?.promptTokenCount ?? 0,
outputTokens: data.usageMetadata?.candidatesTokenCount ?? 0
},
stopReason: candidate?.finishReason ?? "STOP"
};
}
async validateConnection() {
try {
const response = await fetch(`${this.baseUrl}/models?key=${this.apiKey}`);
return response.ok;
} catch {
return false;
}
}
async listModels() {
return [
"gemini-2.0-flash-exp",
"gemini-1.5-pro",
"gemini-1.5-flash",
"gemini-1.5-flash-8b",
"gemini-1.0-pro"
];
}
}
function createProvider(id, config) {
switch (id) {
case "claude":
return new ClaudeAdapter(config);
case "gpt":
return new GPTAdapter(config);
case "gemini":
return new GeminiAdapter(config);
default:
throw new Error(`Unknown provider: ${id}`);
}
}
class ProviderRegistry {
providers = /* @__PURE__ */ new Map();
register(adapter) {
this.providers.set(adapter.id, adapter);
}
get(id) {
return this.providers.get(id);
}
list() {
return Array.from(this.providers.values());
}
has(id) {
return this.providers.has(id);
}
/**
* Find providers that support a specific extension
*/
findByExtension(extension) {
return this.list().filter((p) => p.supportsExtension(extension));
}
}
const providerRegistry = new ProviderRegistry();
export {
ClaudeAdapter,
GPTAdapter,
GeminiAdapter,
ProviderRegistry,
createProvider,
providerRegistry
};
//# sourceMappingURL=provider-adapter.js.map