@aliyun-rds/supabase-mcp-server
Version:
MCP (Model Context Protocol) server for self-hosted Supabase instances. Allows AI assistants to interact with your self-hosted Supabase database.
1,308 lines (1,285 loc) • 110 kB
JavaScript
#!/usr/bin/env node
// src/index.ts
import { Command } from "commander";
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
import {
CallToolRequestSchema,
ErrorCode,
ListToolsRequestSchema,
McpError
} from "@modelcontextprotocol/sdk/types.js";
// src/client/index.ts
import { createClient } from "@supabase/supabase-js";
import { Pool } from "pg";
var SelfhostedSupabaseClient = class _SelfhostedSupabaseClient {
options;
supabase;
pgPool = null;
// Lazy initialized pool for direct DB access
rpcFunctionExists = false;
// SQL definition for the helper function
static CREATE_EXECUTE_SQL_FUNCTION = `
CREATE OR REPLACE FUNCTION public.execute_sql(query text, read_only boolean DEFAULT false)
RETURNS jsonb -- Using jsonb is generally preferred over json
LANGUAGE plpgsql
AS $$
DECLARE
result jsonb;
BEGIN
-- Note: SET TRANSACTION READ ONLY might not behave as expected within a function
-- depending on the outer transaction state. Handle read-only logic outside if needed.
-- Execute the dynamic query and aggregate results into a JSONB array
EXECUTE 'SELECT COALESCE(jsonb_agg(t), ''[]''::jsonb) FROM (' || query || ') t' INTO result;
RETURN result;
EXCEPTION
WHEN others THEN
-- Rethrow the error with context, including the original SQLSTATE
RAISE EXCEPTION 'Error executing SQL (SQLSTATE: %): % ', SQLSTATE, SQLERRM;
END;
$$;
`;
// SQL to grant permissions
static GRANT_EXECUTE_SQL_FUNCTION = `
GRANT EXECUTE ON FUNCTION public.execute_sql(text, boolean) TO authenticated;
-- Optionally grant to anon if needed (uncomment if required):
-- GRANT EXECUTE ON FUNCTION public.execute_sql(text, boolean) TO anon;
`;
/**
* Creates an instance of SelfhostedSupabaseClient.
* Note: Call initialize() after creating the instance to check for RPC functions.
* @param options - Configuration options for the client.
*/
constructor(options) {
this.options = options;
const apiKey = options.supabaseServiceRoleKey || options.supabaseAnonKey;
this.supabase = createClient(options.supabaseUrl, apiKey, options.supabaseClientOptions);
if (!options.supabaseUrl || !options.supabaseAnonKey) {
throw new Error("Supabase URL and Anon Key are required.");
}
}
/**
* Factory function to create and asynchronously initialize the client.
* Checks for the existence of the helper RPC function.
*/
static async create(options) {
const client = new _SelfhostedSupabaseClient(options);
await client.initialize();
return client;
}
/**
* Initializes the client by checking for the required RPC function.
* Attempts to create the function if it doesn't exist and a service role key is provided.
*/
async initialize() {
console.error("Initializing SelfhostedSupabaseClient...");
try {
await this.checkAndCreateRpcFunction();
console.error(`RPC function 'public.execute_sql' status: ${this.rpcFunctionExists ? "Available" : "Unavailable"}`);
} catch (error) {
console.error("Error during client initialization:", error);
}
console.error("Initialization complete.");
}
// --- Public Methods (to be implemented) ---
/**
* Executes SQL using the preferred RPC method.
*/
async executeSqlViaRpc(query, readOnly = false) {
if (!this.rpcFunctionExists) {
console.error("Attempted to call executeSqlViaRpc, but RPC function is not available.");
return {
error: {
message: "execute_sql RPC function not found or client not properly initialized.",
code: "MCP_CLIENT_ERROR"
}
};
}
console.error(`Executing via RPC (readOnly: ${readOnly}): ${query.substring(0, 100)}...`);
try {
const { data, error } = await this.supabase.rpc("execute_sql", {
query,
read_only: readOnly
});
if (error) {
console.error("Error executing SQL via RPC:", error);
return {
error: {
message: error.message,
code: error.code,
// Propagate Supabase/PostgREST error code
details: error.details,
hint: error.hint
}
};
}
if (Array.isArray(data)) {
return data;
}
console.error("Unexpected response format from execute_sql RPC:", data);
return {
error: {
message: "Unexpected response format from execute_sql RPC. Expected JSON array.",
code: "MCP_RPC_FORMAT_ERROR"
}
};
} catch (rpcError) {
const errorMessage = rpcError instanceof Error ? rpcError.message : String(rpcError);
console.error("Exception during executeSqlViaRpc call:", rpcError);
return {
error: {
message: `Exception during RPC call: ${errorMessage}`,
code: "MCP_RPC_EXCEPTION"
}
};
}
}
/**
* Executes SQL directly against the database using the pg library.
* Requires DATABASE_URL to be configured.
* Useful for simple queries when RPC is unavailable or direct access is preferred.
* NOTE: Does not support transactions or parameterization directly.
* Consider executeTransactionWithPg for more complex operations.
*/
async executeSqlWithPg(query) {
if (!this.options.databaseUrl) {
return { error: { message: "DATABASE_URL is not configured. Cannot execute SQL directly.", code: "MCP_CONFIG_ERROR" } };
}
await this.ensurePgPool();
if (!this.pgPool) {
return { error: { message: "pg Pool not available after initialization attempt.", code: "MCP_POOL_ERROR" } };
}
let client;
try {
client = await this.pgPool.connect();
console.error(`Executing via pg: ${query.substring(0, 100)}...`);
const result = await client.query(query);
return result.rows;
} catch (dbError) {
const error = dbError instanceof Error ? dbError : new Error(String(dbError));
console.error("Error executing SQL with pg:", error);
const code = dbError?.code || "PG_ERROR";
return { error: { message: error.message, code } };
} finally {
client?.release();
}
}
/**
* Encodes special characters in database URL to prevent parsing errors
* Handles characters like #, $ that may cause issues with pg library
*/
encodeDatabaseUrl(url) {
try {
const parsedUrl = new URL(url);
if (parsedUrl.username) {
parsedUrl.username = encodeURIComponent(parsedUrl.username);
}
if (parsedUrl.password) {
parsedUrl.password = encodeURIComponent(parsedUrl.password);
}
return parsedUrl.toString();
} catch (error) {
console.error("Database URL contains special characters. Applying basic encoding (excluding @).");
return url.replace(/#/g, "%23").replace(/\$/g, "%24");
}
}
/**
* Ensures the pg connection pool is initialized.
* Should be called before accessing this.pgPool.
*/
async ensurePgPool() {
if (this.pgPool) return;
if (!this.options.databaseUrl) {
throw new Error("DATABASE_URL is not configured. Cannot initialize pg pool.");
}
console.error("Initializing pg pool...");
const encodedDbUrl = this.encodeDatabaseUrl(this.options.databaseUrl);
this.pgPool = new Pool({ connectionString: encodedDbUrl });
this.pgPool.on("error", (err, client) => {
console.error("PG Pool Error: Unexpected error on idle client", err);
});
try {
const client = await this.pgPool.connect();
console.error("pg pool connected successfully.");
client.release();
} catch (err) {
console.error("Failed to connect pg pool:", err);
await this.pgPool.end();
this.pgPool = null;
throw new Error(`Failed to connect pg pool: ${err instanceof Error ? err.message : String(err)}`);
}
}
/**
* Executes a series of operations within a single database transaction using the pg library.
* Requires DATABASE_URL to be configured.
* @param callback A function that receives a connected pg client and performs queries.
* It should return a promise that resolves on success or rejects on failure.
* The transaction will be committed if the promise resolves,
* and rolled back if it rejects.
*/
async executeTransactionWithPg(callback) {
if (!this.options.databaseUrl) {
throw new Error("DATABASE_URL is not configured. Cannot execute transaction directly.");
}
await this.ensurePgPool();
if (!this.pgPool) {
throw new Error("pg Pool not available for transaction.");
}
const client = await this.pgPool.connect();
try {
await client.query("BEGIN");
console.error("BEGIN transaction");
const result = await callback(client);
await client.query("COMMIT");
console.error("COMMIT transaction");
return result;
} catch (error) {
console.error("Transaction Error - Rolling back:", error);
await client.query("ROLLBACK");
console.error("ROLLBACK transaction");
throw error;
} finally {
client.release();
}
}
// --- Helper/Private Methods (to be implemented) ---
/**
* Executes SQL using postgres-meta API (/pg/query endpoint)
* This is available in Supabase instances and doesn't require direct database connection
*/
async executeSqlViaPostgresMeta(query) {
const url = `${this.options.supabaseUrl}/pg/query`;
const apiKey = this.options.supabaseServiceRoleKey || this.options.supabaseAnonKey;
const response = await fetch(url, {
method: "POST",
headers: {
"Content-Type": "application/json",
"apikey": apiKey
},
body: JSON.stringify({ query })
});
if (!response.ok) {
const errorText = await response.text();
throw new Error(`postgres-meta API error: ${response.status} ${errorText}`);
}
return await response.json();
}
async checkAndCreateRpcFunction() {
console.error("Checking for public.execute_sql RPC function...");
if (!this.options.supabaseServiceRoleKey) {
console.error("Cannot check/create 'public.execute_sql': supabaseServiceRoleKey not provided.");
this.rpcFunctionExists = false;
return;
}
try {
console.error("Checking if execute_sql function exists using postgres-meta API...");
const checkQuery = `
SELECT EXISTS (
SELECT 1
FROM pg_proc p
JOIN pg_namespace n ON p.pronamespace = n.oid
WHERE n.nspname = 'public'
AND p.proname = 'execute_sql'
) as function_exists;
`;
const checkResult = await this.executeSqlViaPostgresMeta(checkQuery);
if (checkResult && checkResult.length > 0 && checkResult[0].function_exists) {
console.error("'public.execute_sql' function found.");
this.rpcFunctionExists = true;
return;
}
console.error("'public.execute_sql' function not found. Creating...");
try {
console.error("Creating 'public.execute_sql' function using postgres-meta API...");
await this.executeSqlViaPostgresMeta(_SelfhostedSupabaseClient.CREATE_EXECUTE_SQL_FUNCTION);
await this.executeSqlViaPostgresMeta(_SelfhostedSupabaseClient.GRANT_EXECUTE_SQL_FUNCTION);
console.error("'public.execute_sql' function created and permissions granted successfully.");
console.error("Notifying PostgREST to reload schema cache...");
await this.executeSqlViaPostgresMeta("NOTIFY pgrst, 'reload schema'");
console.error("PostgREST schema reload notification sent.");
this.rpcFunctionExists = true;
} catch (postgresMetaError) {
console.error("Failed to create function via postgres-meta API:", postgresMetaError);
if (this.options.databaseUrl) {
try {
console.error("Falling back to direct DB connection...");
await this.executeSqlWithPg(_SelfhostedSupabaseClient.CREATE_EXECUTE_SQL_FUNCTION);
await this.executeSqlWithPg(_SelfhostedSupabaseClient.GRANT_EXECUTE_SQL_FUNCTION);
await this.executeSqlWithPg("NOTIFY pgrst, 'reload schema'");
console.error("'public.execute_sql' function created via direct DB connection.");
this.rpcFunctionExists = true;
} catch (pgError) {
const errorMessage = pgError instanceof Error ? pgError.message : String(pgError);
console.error("Failed to create function via direct DB connection:", pgError);
this.rpcFunctionExists = false;
console.error("RPC function creation failed. You can manually install using the install_execute_sql_function tool.");
}
} else {
const errorMessage = postgresMetaError instanceof Error ? postgresMetaError.message : String(postgresMetaError);
console.error("No fallback available (databaseUrl not provided)");
console.error("RPC function creation failed. You can manually install using the install_execute_sql_function tool.");
this.rpcFunctionExists = false;
}
}
} catch (err) {
const errorMessage = err instanceof Error ? err.message : String(err);
console.error("Exception during RPC function check/creation:", err);
console.error("RPC function check failed, but continuing initialization...");
console.error("You can manually install the execute_sql function using the install_execute_sql_function tool.");
this.rpcFunctionExists = false;
}
}
// --- Getters ---
getSupabaseUrl() {
return this.options.supabaseUrl;
}
getAnonKey() {
return this.options.supabaseAnonKey;
}
getServiceRoleKey() {
return this.options.supabaseServiceRoleKey;
}
/**
* Gets the configured JWT secret, if provided.
*/
getJwtSecret() {
return this.options.jwtSecret;
}
/**
* Gets the configured direct database connection URL, if provided.
*/
getDbUrl() {
return this.options.databaseUrl;
}
/**
* Checks if the direct database connection (pg) is configured.
*/
isPgAvailable() {
return !!this.options.databaseUrl;
}
/**
* Checks if the execute_sql RPC function is available.
*/
isRpcAvailable() {
return this.rpcFunctionExists;
}
};
// src/tools/list_tables.ts
import { z as z2 } from "zod";
// src/tools/utils.ts
import { z } from "zod";
import { exec } from "node:child_process";
import { promisify } from "node:util";
var execAsync = promisify(exec);
function handleSqlResponse(result, schema) {
if ("error" in result) {
throw new Error(`SQL Error (${result.error.code}): ${result.error.message}`);
}
try {
return schema.parse(result);
} catch (validationError) {
if (validationError instanceof z.ZodError) {
throw new Error(`Schema validation failed: ${validationError.errors.map((e) => `${e.path.join(".")}: ${e.message}`).join(", ")}`);
}
throw new Error(`Unexpected validation error: ${validationError}`);
}
}
async function runExternalCommand(command) {
try {
const { stdout, stderr } = await execAsync(command);
return { stdout, stderr, error: null };
} catch (error) {
const execError = error;
return {
stdout: execError.stdout || "",
stderr: execError.stderr || execError.message,
// Use message if stderr is empty
error: execError
};
}
}
async function executeSqlWithFallback(client, sql, readOnly = true) {
if (client.isPgAvailable()) {
console.info("Using direct database connection (bypassing JWT)...");
return await client.executeSqlWithPg(sql);
}
console.info("Falling back to RPC method...");
return await client.executeSqlViaRpc(sql, readOnly);
}
// src/tools/list_tables.ts
var ListTablesOutputSchema = z2.array(z2.object({
schema: z2.string(),
name: z2.string(),
comment: z2.string().nullable().optional()
// Add comment if available
}));
var ListTablesInputSchema = z2.object({
// No specific input needed for listing tables
// Optional: add schema filter later if needed
// schema: z.string().optional().describe('Filter tables by schema name.'),
});
var mcpInputSchema = {
type: "object",
properties: {},
required: []
};
var listTablesTool = {
name: "list_tables",
description: "Lists all accessible tables in the connected database, grouped by schema.",
inputSchema: ListTablesInputSchema,
// Use defined schema
mcpInputSchema,
// Add the static JSON schema for MCP
outputSchema: ListTablesOutputSchema,
// Use explicit types for input and context
execute: async (input, context) => {
const client = context.selfhostedClient;
console.error("Listing tables using Supabase REST API introspection...");
try {
if (client.isPgAvailable() || client.isRpcAvailable()) {
const listTablesSql = `
SELECT
table_schema as schema,
table_name as name,
NULL as comment
FROM
information_schema.tables
WHERE
table_type = 'BASE TABLE'
AND table_schema NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
AND table_schema NOT LIKE 'pg_temp_%'
AND table_schema NOT LIKE 'pg_toast_temp_%'
AND table_schema NOT IN ('auth', 'storage', 'extensions', 'graphql', 'graphql_public', 'pgbouncer', 'realtime', 'supabase_functions', 'supabase_migrations', '_realtime')
ORDER BY
table_schema,
table_name
`;
const result = await executeSqlWithFallback(client, listTablesSql, true);
return handleSqlResponse(result, ListTablesOutputSchema);
}
context.log("Cannot list tables: requires either direct database access (DATABASE_URL) or execute_sql RPC function", "error");
throw new Error("Cannot list tables: requires either direct database access (DATABASE_URL) or execute_sql RPC function. Please provide DATABASE_URL or install the execute_sql RPC function in your Supabase instance.");
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
context.log(`Error listing tables: ${errorMessage}`, "error");
throw error;
}
}
};
// src/tools/list_extensions.ts
import { z as z3 } from "zod";
var ListExtensionsOutputSchema = z3.array(z3.object({
name: z3.string(),
schema: z3.string(),
version: z3.string(),
description: z3.string().nullable().optional()
}));
var ListExtensionsInputSchema = z3.object({});
var mcpInputSchema2 = {
type: "object",
properties: {},
required: []
};
var listExtensionsTool = {
name: "list_extensions",
description: "Lists all installed PostgreSQL extensions in the database.",
inputSchema: ListExtensionsInputSchema,
mcpInputSchema: mcpInputSchema2,
outputSchema: ListExtensionsOutputSchema,
execute: async (input, context) => {
const client = context.selfhostedClient;
const listExtensionsSql = `
SELECT
pe.extname AS name,
pn.nspname AS schema,
pe.extversion AS version,
pd.description
FROM
pg_catalog.pg_extension pe
LEFT JOIN
pg_catalog.pg_namespace pn ON pn.oid = pe.extnamespace
LEFT JOIN
pg_catalog.pg_description pd ON pd.objoid = pe.oid AND pd.classoid = 'pg_catalog.pg_extension'::regclass
WHERE
pe.extname != 'plpgsql' -- Exclude the default plpgsql extension
ORDER BY
pe.extname
`;
const result = await executeSqlWithFallback(client, listExtensionsSql, true);
return handleSqlResponse(result, ListExtensionsOutputSchema);
}
};
// src/tools/list_migrations.ts
import { z as z4 } from "zod";
var ListMigrationsOutputSchema = z4.array(z4.object({
version: z4.string(),
name: z4.string(),
inserted_at: z4.string()
// Keep as string from DB
}));
var ListMigrationsInputSchema = z4.object({});
var mcpInputSchema3 = {
type: "object",
properties: {},
required: []
};
var listMigrationsTool = {
name: "list_migrations",
description: "Lists applied database migrations recorded in supabase_migrations.schema_migrations table.",
inputSchema: ListMigrationsInputSchema,
mcpInputSchema: mcpInputSchema3,
outputSchema: ListMigrationsOutputSchema,
execute: async (input, context) => {
const client = context.selfhostedClient;
const listMigrationsSql = `
SELECT
version,
name,
inserted_at
FROM
supabase_migrations.schema_migrations
ORDER BY
version
`;
const result = await executeSqlWithFallback(client, listMigrationsSql, true);
return handleSqlResponse(result, ListMigrationsOutputSchema);
}
};
// src/tools/apply_migration.ts
import { z as z5 } from "zod";
var ApplyMigrationInputSchema = z5.object({
version: z5.string().describe("The migration version string (e.g., '20240101120000')."),
name: z5.string().optional().describe("An optional descriptive name for the migration."),
sql: z5.string().describe("The SQL DDL content of the migration.")
});
var ApplyMigrationOutputSchema = z5.object({
success: z5.boolean(),
version: z5.string(),
message: z5.string().optional()
});
var mcpInputSchema4 = {
type: "object",
properties: {
version: { type: "string", description: "The migration version string (e.g., '20240101120000')." },
name: { type: "string", description: "An optional descriptive name for the migration." },
sql: { type: "string", description: "The SQL DDL content of the migration." }
},
required: ["version", "sql"]
};
var applyMigrationTool = {
name: "apply_migration",
description: "Applies a SQL migration script and records it in the supabase_migrations.schema_migrations table within a transaction. Requires direct database connection (DATABASE_URL).",
inputSchema: ApplyMigrationInputSchema,
mcpInputSchema: mcpInputSchema4,
outputSchema: ApplyMigrationOutputSchema,
execute: async (input, context) => {
const client = context.selfhostedClient;
try {
if (!client.isPgAvailable()) {
throw new Error("Direct database connection (DATABASE_URL) is required for applying migrations but is not configured or available.");
}
await client.executeTransactionWithPg(async (pgClient) => {
console.error(`Executing migration SQL for version ${input.version}...`);
await pgClient.query(input.sql);
console.error("Migration SQL executed successfully.");
console.error(`Recording migration version ${input.version} in schema_migrations...`);
await pgClient.query(
"INSERT INTO supabase_migrations.schema_migrations (version, name) VALUES ($1, $2);",
[input.version, input.name ?? ""]
);
console.error(`Migration version ${input.version} recorded.`);
});
return {
success: true,
version: input.version,
message: `Migration ${input.version} applied successfully.`
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
console.error(`Failed to apply migration ${input.version}:`, errorMessage);
throw new Error(`Failed to apply migration ${input.version}: ${errorMessage}`);
}
}
};
// src/tools/execute_sql.ts
import { z as z6 } from "zod";
var ExecuteSqlInputSchema = z6.object({
sql: z6.string().describe("The SQL query to execute."),
read_only: z6.boolean().optional().default(false).describe("Hint for the RPC function whether the query is read-only (best effort).")
// Future enhancement: Add option to force direct connection?
// use_direct_connection: z.boolean().optional().default(false).describe('Attempt to use direct DB connection instead of RPC.'),
});
var ExecuteSqlOutputSchema = z6.array(z6.unknown()).describe("The array of rows returned by the SQL query.");
var mcpInputSchema5 = {
type: "object",
properties: {
sql: { type: "string", description: "The SQL query to execute." },
read_only: { type: "boolean", default: false, description: "Hint for the RPC function whether the query is read-only (best effort)." }
},
required: ["sql"]
};
var executeSqlTool = {
name: "execute_sql",
description: "Executes an arbitrary SQL query against the database, using direct database connection when available or RPC function as fallback.",
inputSchema: ExecuteSqlInputSchema,
mcpInputSchema: mcpInputSchema5,
outputSchema: ExecuteSqlOutputSchema,
execute: async (input, context) => {
const client = context.selfhostedClient;
console.error(`Executing SQL (readOnly: ${input.read_only}): ${input.sql.substring(0, 100)}...`);
const result = await executeSqlWithFallback(client, input.sql, input.read_only);
return handleSqlResponse(result, ExecuteSqlOutputSchema);
}
};
// src/tools/get_database_connections.ts
import { z as z7 } from "zod";
var GetDbConnectionsOutputSchema = z7.array(z7.object({
datname: z7.string().nullable().describe("Database name"),
usename: z7.string().nullable().describe("User name"),
application_name: z7.string().nullable().describe("Application name (e.g., PostgREST, psql)"),
client_addr: z7.string().nullable().describe("Client IP address"),
backend_start: z7.string().nullable().describe("Time when the backend process started"),
state: z7.string().nullable().describe("Current connection state (e.g., active, idle)"),
query: z7.string().nullable().describe("Last or current query being executed"),
pid: z7.number().describe("Process ID of the backend")
}));
var GetDbConnectionsInputSchema = z7.object({});
var mcpInputSchema6 = {
type: "object",
properties: {},
required: []
};
var getDatabaseConnectionsTool = {
name: "get_database_connections",
description: "Retrieves information about active database connections from pg_stat_activity.",
inputSchema: GetDbConnectionsInputSchema,
mcpInputSchema: mcpInputSchema6,
outputSchema: GetDbConnectionsOutputSchema,
execute: async (input, context) => {
const client = context.selfhostedClient;
const getConnectionsSql = `
SELECT
pid,
datname,
usename,
application_name,
client_addr::text, -- Cast inet to text
backend_start::text, -- Cast timestamp to text
state,
query
FROM
pg_stat_activity
WHERE
backend_type = 'client backend' -- Exclude background workers, etc.
-- Optionally filter out self?
-- AND pid != pg_backend_pid()
ORDER BY
backend_start
`;
const result = await executeSqlWithFallback(client, getConnectionsSql, true);
return handleSqlResponse(result, GetDbConnectionsOutputSchema);
}
};
// src/tools/get_database_stats.ts
import { z as z8 } from "zod";
var GetDbStatsOutputSchema = z8.object({
database_stats: z8.array(z8.object({
datname: z8.string().nullable(),
numbackends: z8.number().nullable(),
xact_commit: z8.string().nullable(),
// bigint as string
xact_rollback: z8.string().nullable(),
// bigint as string
blks_read: z8.string().nullable(),
// bigint as string
blks_hit: z8.string().nullable(),
// bigint as string
tup_returned: z8.string().nullable(),
// bigint as string
tup_fetched: z8.string().nullable(),
// bigint as string
tup_inserted: z8.string().nullable(),
// bigint as string
tup_updated: z8.string().nullable(),
// bigint as string
tup_deleted: z8.string().nullable(),
// bigint as string
conflicts: z8.string().nullable(),
// bigint as string
temp_files: z8.string().nullable(),
// bigint as string
temp_bytes: z8.string().nullable(),
// bigint as string
deadlocks: z8.string().nullable(),
// bigint as string
checksum_failures: z8.string().nullable(),
// bigint as string
checksum_last_failure: z8.string().nullable(),
// timestamp as string
blk_read_time: z8.number().nullable(),
// double precision
blk_write_time: z8.number().nullable(),
// double precision
stats_reset: z8.string().nullable()
// timestamp as string
})).describe("Statistics per database from pg_stat_database"),
bgwriter_stats: z8.array(z8.object({
// Usually a single row
checkpoints_timed: z8.string().nullable(),
checkpoints_req: z8.string().nullable(),
checkpoint_write_time: z8.number().nullable(),
checkpoint_sync_time: z8.number().nullable(),
buffers_checkpoint: z8.string().nullable(),
buffers_clean: z8.string().nullable(),
maxwritten_clean: z8.string().nullable(),
buffers_backend: z8.string().nullable(),
buffers_backend_fsync: z8.string().nullable(),
buffers_alloc: z8.string().nullable(),
stats_reset: z8.string().nullable()
})).describe("Statistics from the background writer process from pg_stat_bgwriter")
});
var GetDbStatsInputSchema = z8.object({});
var mcpInputSchema7 = {
type: "object",
properties: {},
required: []
};
var getDatabaseStatsTool = {
name: "get_database_stats",
description: "Retrieves statistics about database activity and the background writer from pg_stat_database and pg_stat_bgwriter.",
inputSchema: GetDbStatsInputSchema,
mcpInputSchema: mcpInputSchema7,
outputSchema: GetDbStatsOutputSchema,
execute: async (input, context) => {
const client = context.selfhostedClient;
const getDbStatsSql = `
SELECT
datname,
numbackends,
xact_commit::text,
xact_rollback::text,
blks_read::text,
blks_hit::text,
tup_returned::text,
tup_fetched::text,
tup_inserted::text,
tup_updated::text,
tup_deleted::text,
conflicts::text,
temp_files::text,
temp_bytes::text,
deadlocks::text,
checksum_failures::text,
checksum_last_failure::text,
blk_read_time,
blk_write_time,
stats_reset::text
FROM pg_stat_database
`;
const getBgWriterStatsSql = `
SELECT
checkpoints_timed::text,
checkpoints_req::text,
checkpoint_write_time,
checkpoint_sync_time,
buffers_checkpoint::text,
buffers_clean::text,
maxwritten_clean::text,
buffers_backend::text,
buffers_backend_fsync::text,
buffers_alloc::text,
stats_reset::text
FROM pg_stat_bgwriter
`;
const [dbStatsResult, bgWriterStatsResult] = await Promise.all([
executeSqlWithFallback(client, getDbStatsSql, true),
executeSqlWithFallback(client, getBgWriterStatsSql, true)
]);
const dbStats = handleSqlResponse(dbStatsResult, GetDbStatsOutputSchema.shape.database_stats);
const bgWriterStats = handleSqlResponse(bgWriterStatsResult, GetDbStatsOutputSchema.shape.bgwriter_stats);
return {
database_stats: dbStats,
bgwriter_stats: bgWriterStats
};
}
};
// src/tools/get_project_url.ts
import { z as z9 } from "zod";
var GetProjectUrlInputSchema = z9.object({});
var GetProjectUrlOutputSchema = z9.object({
project_url: z9.string().url()
});
var mcpInputSchema8 = {
type: "object",
properties: {},
required: []
};
var getProjectUrlTool = {
name: "get_project_url",
description: "Returns the configured Supabase project URL for this server.",
inputSchema: GetProjectUrlInputSchema,
mcpInputSchema: mcpInputSchema8,
// Add static JSON schema
outputSchema: GetProjectUrlOutputSchema,
execute: async (input, context) => {
const client = context.selfhostedClient;
const url = client.getSupabaseUrl();
return { project_url: url };
}
};
// src/tools/get_anon_key.ts
import { z as z10 } from "zod";
var GetAnonKeyInputSchema = z10.object({});
var GetAnonKeyOutputSchema = z10.object({
anon_key: z10.string()
});
var mcpInputSchema9 = {
type: "object",
properties: {},
required: []
};
var getAnonKeyTool = {
name: "get_anon_key",
description: "Returns the configured Supabase anon key for this server.",
inputSchema: GetAnonKeyInputSchema,
mcpInputSchema: mcpInputSchema9,
outputSchema: GetAnonKeyOutputSchema,
execute: async (input, context) => {
const client = context.selfhostedClient;
const key = client.getAnonKey();
return { anon_key: key };
}
};
// src/tools/get_service_key.ts
import { z as z11 } from "zod";
var GetServiceKeyInputSchema = z11.object({});
var GetServiceKeyOutputSchema = z11.object({
service_key_status: z11.enum(["found", "not_configured"]).describe("Whether the service key was provided to the server."),
service_key: z11.string().optional().describe("The configured Supabase service role key (if configured).")
});
var mcpInputSchema10 = {
type: "object",
properties: {},
required: []
};
var getServiceKeyTool = {
name: "get_service_key",
description: "Returns the configured Supabase service role key for this server, if available.",
inputSchema: GetServiceKeyInputSchema,
mcpInputSchema: mcpInputSchema10,
outputSchema: GetServiceKeyOutputSchema,
execute: async (input, context) => {
const client = context.selfhostedClient;
const key = client.getServiceRoleKey();
if (key) {
return { service_key_status: "found", service_key: key };
}
return { service_key_status: "not_configured" };
}
};
// src/tools/generate_typescript_types.ts
import { z as z12 } from "zod";
import { writeFileSync } from "fs";
import { resolve, dirname } from "path";
import { mkdirSync } from "fs";
function normalizeOutputPath(inputPath) {
if (process.platform === "win32" && inputPath.match(/^\/[a-zA-Z]:/)) {
inputPath = inputPath.substring(1);
inputPath = inputPath.charAt(0).toUpperCase() + inputPath.slice(1);
}
return resolve(inputPath);
}
var GenerateTypesInputSchema = z12.object({
included_schemas: z12.array(z12.string()).optional().default(["public"]).describe("Database schemas to include in type generation."),
output_filename: z12.string().optional().default("database.types.ts").describe("Filename to save the generated types to in the workspace root."),
output_path: z12.string().describe("Absolute path where to save the file. If provided, output_filename will be ignored.")
});
var GenerateTypesOutputSchema = z12.object({
success: z12.boolean(),
message: z12.string().describe("Output message from the generation process."),
types: z12.string().optional().describe("The generated TypeScript types, if successful."),
file_path: z12.string().optional().describe("The absolute path to the saved types file, if successful."),
platform: z12.string().describe("Operating system platform (win32, darwin, linux).")
});
var mcpInputSchema11 = {
type: "object",
properties: {
included_schemas: {
type: "array",
items: { type: "string" },
default: ["public"],
description: "Database schemas to include in type generation."
},
output_filename: {
type: "string",
default: "database.types.ts",
description: "Filename to save the generated types to in the workspace root."
},
output_path: {
type: "string",
description: 'Absolute path where to download the generated TypeScript file. Examples: Windows: "C:\\\\path\\\\to\\\\project\\\\database.types.ts", macOS/Linux: "/path/to/project/database.types.ts". This parameter is required.'
}
},
required: ["output_path"]
// output_path is required for file download
};
var generateTypesTool = {
name: "generate_typescript_types",
description: "Generates TypeScript types from the database schema using the Supabase CLI (`supabase gen types`) and downloads the file to the specified absolute path. The tool returns the current platform (win32, darwin, linux) to help with path formatting. Requires DATABASE_URL configuration and Supabase CLI installed.",
inputSchema: GenerateTypesInputSchema,
mcpInputSchema: mcpInputSchema11,
// Add static JSON schema
outputSchema: GenerateTypesOutputSchema,
execute: async (input, context) => {
const client = context.selfhostedClient;
const dbUrl = client.getDbUrl();
if (!dbUrl) {
return {
success: false,
message: "Error: DATABASE_URL is not configured. Cannot generate types.",
platform: process.platform
};
}
const schemas = input.included_schemas.join(",");
const command = `supabase gen types typescript --db-url "${dbUrl}" --schema "${schemas}"`;
console.error(`Running command: ${command}`);
try {
const { stdout, stderr, error } = await runExternalCommand(command);
if (error) {
console.error(`Error executing supabase gen types: ${stderr || error.message}`);
return {
success: false,
message: `Command failed: ${stderr || error.message}`,
platform: process.platform
};
}
if (stderr) {
console.error(`supabase gen types produced stderr output: ${stderr}`);
}
let outputPath;
try {
outputPath = normalizeOutputPath(input.output_path);
console.error(`Normalized output path: ${outputPath}`);
} catch (pathError) {
const pathErrorMessage = pathError instanceof Error ? pathError.message : String(pathError);
console.error(`Invalid output path: ${pathErrorMessage}`);
return {
success: false,
message: `Invalid output path "${input.output_path}": ${pathErrorMessage}`,
platform: process.platform
};
}
try {
const outputDir = dirname(outputPath);
try {
mkdirSync(outputDir, { recursive: true });
} catch (dirError) {
if (dirError.code !== "EEXIST") {
throw dirError;
}
}
writeFileSync(outputPath, stdout, "utf8");
console.error(`Types saved to: ${outputPath}`);
} catch (writeError) {
const writeErrorMessage = writeError instanceof Error ? writeError.message : String(writeError);
console.error(`Failed to write types file: ${writeErrorMessage}`);
return {
success: false,
message: `Type generation succeeded but failed to save file: ${writeErrorMessage}. Platform: ${process.platform}. Attempted path: ${outputPath}`,
types: stdout,
platform: process.platform
};
}
console.error("Type generation and file save successful.");
return {
success: true,
message: `Types generated successfully and saved to ${outputPath}.${stderr ? `
Warnings:
${stderr}` : ""}`,
types: stdout,
file_path: outputPath,
platform: process.platform
};
} catch (err) {
const errorMessage = err instanceof Error ? err.message : String(err);
console.error(`Exception during type generation: ${errorMessage}`);
return {
success: false,
message: `Exception during type generation: ${errorMessage}. Platform: ${process.platform}`,
platform: process.platform
};
}
}
};
// src/tools/rebuild_hooks.ts
import { z as z13 } from "zod";
var RebuildHooksInputSchema = z13.object({});
var RebuildHooksOutputSchema = z13.object({
success: z13.boolean(),
message: z13.string()
});
var mcpInputSchema12 = {
type: "object",
properties: {},
required: []
};
var rebuildHooksTool = {
name: "rebuild_hooks",
description: "Attempts to restart the pg_net worker. Requires the pg_net extension to be installed and available.",
inputSchema: RebuildHooksInputSchema,
mcpInputSchema: mcpInputSchema12,
outputSchema: RebuildHooksOutputSchema,
execute: async (input, context) => {
const client = context.selfhostedClient;
const restartSql = "SELECT net.worker_restart()";
try {
console.error("Attempting to restart pg_net worker...");
const result = await executeSqlWithFallback(client, restartSql, false);
if ("error" in result) {
const notFound = result.error.code === "42883";
const message = `Failed to restart pg_net worker: ${result.error.message}${notFound ? " (Is pg_net installed and enabled?)" : ""}`;
console.error(message);
return { success: false, message };
}
console.error("pg_net worker restart requested successfully.");
return { success: true, message: "pg_net worker restart requested successfully." };
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
console.error(`Exception attempting to restart pg_net worker: ${errorMessage}`);
return { success: false, message: `Exception attempting to restart pg_net worker: ${errorMessage}` };
}
}
};
// src/tools/verify_jwt_secret.ts
import { z as z14 } from "zod";
var VerifyJwtInputSchema = z14.object({});
var VerifyJwtOutputSchema = z14.object({
jwt_secret_status: z14.enum(["found", "not_configured"]).describe("Whether the JWT secret was provided to the server."),
jwt_secret_preview: z14.string().optional().describe("A preview of the JWT secret (first few characters) if configured.")
});
var mcpInputSchema13 = {
type: "object",
properties: {},
required: []
};
var verifyJwtSecretTool = {
name: "verify_jwt_secret",
description: "Checks if the Supabase JWT secret is configured for this server and returns a preview.",
inputSchema: VerifyJwtInputSchema,
mcpInputSchema: mcpInputSchema13,
outputSchema: VerifyJwtOutputSchema,
execute: async (input, context) => {
const client = context.selfhostedClient;
const secret = client.getJwtSecret();
if (secret) {
const preview = `${secret.substring(0, Math.min(secret.length, 5))}...`;
return {
jwt_secret_status: "found",
jwt_secret_preview: preview
};
}
return { jwt_secret_status: "not_configured" };
}
};
// src/tools/list_auth_users.ts
import { z as z15 } from "zod";
var ListAuthUsersInputSchema = z15.object({
limit: z15.number().int().positive().optional().default(50).describe("Max number of users to return"),
offset: z15.number().int().nonnegative().optional().default(0).describe("Number of users to skip")
// Add filters later (e.g., by email pattern, role)
});
var AuthUserZodSchema = z15.object({
id: z15.string().uuid(),
email: z15.string().email().nullable(),
role: z15.string().nullable(),
// Timestamps returned as text from DB might not strictly be ISO 8601 / Zod datetime compliant
created_at: z15.string().nullable(),
last_sign_in_at: z15.string().nullable(),
raw_app_meta_data: z15.record(z15.unknown()).nullable(),
raw_user_meta_data: z15.record(z15.unknown()).nullable()
// Add more fields as needed (e.g., email_confirmed_at, phone)
});
var ListAuthUsersOutputSchema = z15.array(AuthUserZodSchema);
var mcpInputSchema14 = {
type: "object",
properties: {
limit: {
type: "number",
description: "Max number of users to return",
default: 50
},
offset: {
type: "number",
description: "Number of users to skip",
default: 0
}
},
required: []
};
var listAuthUsersTool = {
name: "list_auth_users",
description: "Lists users from the auth.users table.",
inputSchema: ListAuthUsersInputSchema,
mcpInputSchema: mcpInputSchema14,
outputSchema: ListAuthUsersOutputSchema,
execute: async (input, context) => {
const client = context.selfhostedClient;
const { limit, offset } = input;
console.error("Listing auth users using Supabase Admin API...");
try {
const { data, error } = await client.supabase.auth.admin.listUsers({
page: Math.floor(offset / limit) + 1,
perPage: limit
});
if (error) {
context.log(`Error listing users: ${error.message}`, "error");
throw new Error(`Failed to list users: ${error.message}`);
}
if (!data || !data.users) {
context.log("No users found or invalid response", "warning");
return [];
}
const users = data.users.map((user) => ({
id: user.id,
email: user.email || null,
role: user.role || null,
created_at: user.created_at || null,
last_sign_in_at: user.last_sign_in_at || null,
raw_app_meta_data: user.app_metadata || null,
raw_user_meta_data: user.user_metadata || null
}));
console.error(`Found ${users.length} users.`);
context.log(`Found ${users.length} users.`);
return ListAuthUsersOutputSchema.parse(users);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
context.log(`Error listing users: ${errorMessage}`, "error");
throw error;
}
}
};
// src/tools/get_auth_user.ts
import { z as z16 } from "zod";
var GetAuthUserInputSchema = z16.object({
user_id: z16.string().uuid().describe("The UUID of the user to retrieve.")
});
var AuthUserZodSchema2 = z16.object({
id: z16.string().uuid(),
email: z16.string().email().nullable(),
role: z16.string().nullable(),
created_at: z16.string().nullable(),
last_sign_in_at: z16.string().nullable(),
raw_app_meta_data: z16.record(z16.unknown()).nullable(),
raw_user_meta_data: z16.record(z16.unknown()).nullable()
// Add more fields as needed
});
var mcpInputSchema15 = {
type: "object",
properties: {
user_id: {
type: "string",
description: "The UUID of the user to retrieve.",
format: "uuid"
// Hint format if possible
}
},
required: ["user_id"]
};
var getAuthUserTool = {
name: "get_auth_user",
description: "Retrieves details for a specific user from auth.users by their ID.",
inputSchema: GetAuthUserInputSchema,
mcpInputSchema: mcpInputSchema15,
outputSchema: AuthUserZodSchema2,
// Use the single user Zod schema
execute: async (input, context) => {
const client = context.selfhostedClient;
const { user_id } = input;
console.error(`Getting auth user ${user_id} using Supabase Admin API...`);
try {
const { data, error } = await client.supabase.auth.admin.getUserById(user_id);
if (error) {
context.log(`Error getting user: ${error.message}`, "error");
throw new Error(`Failed to get user: ${error.message}`);
}
if (!data || !data.user) {
throw new Error(`User with ID ${user_id} not found.`);
}
const user = {
id: data.user.id,
email: data.user.email || null,
role: data.user.role || null,
created_at: data.user.created_at || null,
last_sign_in_at: data.user.last_sign_in_at || null,
raw_app_meta_data: data.user.app_metadata || null,
raw_user_meta_data: data.user.user_metadata || null
};
console.error(`Found user ${user_id}.`);
context.log(`Found user ${user_id}.`);
return AuthUserZodSchema2.parse(user);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
context.log(`Error getting user: ${errorMessage}`, "error");
throw error;
}
}
};
// src/tools/delete_auth_user.ts
import { z as z17 } from "zod";
var DeleteAuthUserInputSchema = z17.object({
user_id: z17.string().uuid().describe("The UUID of the user to delete.")
});
var DeleteAuthUserOutputSchema = z17.object({
success: z17.boolean(),
message: z17.string()
});
var mcpInputSchema16 = {
type: "object",
properties: {
user_id: {
type: "string",
format: "uuid",
description: "The UUID of the user to delete."
}
},
required: ["user_id"]
};
var deleteAuthUserTool = {
name: "delete_auth_user",
description: "Deletes a user using Supabase Admin API. Requires service role key.",
inputSchema: DeleteAuthUserInputSchema,
mcpInputSchema: mcpInputSchema16,
outputSchema: DeleteAuthUserOutputSchema,
execute: async (input, context) => {
const client = context.selfhostedClient;
const { user_id } = input;
if (!client) {
throw new Error("Supabase client is not initialized.");
}
context.log(`Deleting user ${user_id} using Supabase Admin API...`, "info");
try {
const { error } = await client.supabase.auth.admin.deleteUser(user_id);
if (error) {
context.log(`Error deleting user: ${error.message}`, "error");
return {
success: false,
message: `Failed to delete user: ${error.message}`
};
}
context.log(`User ${user_id} deleted successfully`, "info");
return {
success: true,
message: `Successfully deleted user with ID: ${user_id}`
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
context.log(`Error deleting user ${user_id}: ${errorMessage}`, "error");
throw new Error(`Failed to delete user ${user_id}: ${errorMessage}`);
}
}
};
// src/tools/create_auth_user.ts
import { z as z18 } from "zod";
var CreateAuthUserInputSchema = z18.object({
email: z18.string().email().describe("The email address for the new user."),
passwo