UNPKG

elasticsearch-mcp

Version:

Secure MCP server for Elasticsearch integration with comprehensive tools and Elastic Cloud support

304 lines 12.2 kB
"use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || (function () { var ownKeys = function(o) { ownKeys = Object.getOwnPropertyNames || function (o) { var ar = []; for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; return ar; }; return ownKeys(o); }; return function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); __setModuleDefault(result, mod); return result; }; })(); Object.defineProperty(exports, "__esModule", { value: true }); exports.ExportToCSVTool = void 0; const schemas_js_1 = require("../validation/schemas.js"); const handlers_js_1 = require("../errors/handlers.js"); const csv_writer_1 = require("csv-writer"); const fs = __importStar(require("fs/promises")); const path = __importStar(require("path")); const zlib = __importStar(require("zlib")); const util_1 = require("util"); const gzip = (0, util_1.promisify)(zlib.gzip); class ExportToCSVTool { elasticsearch; logger; constructor(elasticsearch, logger) { this.elasticsearch = elasticsearch; this.logger = logger.child({ tool: 'export-to-csv' }); } async execute(args) { try { const validatedArgs = schemas_js_1.ExportToCSVArgsSchema.parse(args); this.logger.info('Starting CSV export', { index: validatedArgs.index, hasQuery: !!validatedArgs.query, fieldCount: validatedArgs.fields?.length, maxRows: validatedArgs.maxRows, compress: validatedArgs.compress, }); const client = this.elasticsearch.getClient(); // Check if index exists const indexExists = await client.indices.exists({ index: validatedArgs.index, }); if (!indexExists) { throw new handlers_js_1.NotFoundError(`Index '${validatedArgs.index}' does not exist`); } // Generate filename const filename = this.generateFilename(validatedArgs); const tempFilename = `${filename}.tmp`; // Determine fields to export const fieldsToExport = await this.determineFields(validatedArgs); // Setup CSV writer const writer = this.createCSVWriter(tempFilename, fieldsToExport, validatedArgs.format); // Export data with scroll API for large datasets const rowsExported = await this.exportDataWithScroll(validatedArgs, writer, fieldsToExport); // Get file size const stats = await fs.stat(tempFilename); let finalFilename = filename; let fileSize = this.formatFileSize(stats.size); // Compress if requested if (validatedArgs.compress) { finalFilename = `${filename}.gz`; await this.compressFile(tempFilename, finalFilename); await fs.unlink(tempFilename); // Remove temp file const compressedStats = await fs.stat(finalFilename); fileSize = this.formatFileSize(compressedStats.size); } else { await fs.rename(tempFilename, finalFilename); } this.logger.info('CSV export completed successfully', { filename: finalFilename, rowsExported, fileSize, }); return { filename: finalFilename, rowsExported, fileSize, downloadUrl: `file://${path.resolve(finalFilename)}`, }; } catch (error) { if (error instanceof Error && error.name === 'ZodError') { throw new handlers_js_1.ValidationError('Invalid arguments for export_to_csv', { details: error.message, }); } if (error instanceof handlers_js_1.ValidationError || error instanceof handlers_js_1.NotFoundError) { throw error; } this.logger.error('Failed to export to CSV', {}, error); throw new handlers_js_1.ElasticsearchError('Failed to export data to CSV', error, { args }); } } generateFilename(args) { if (args.filename) { // Sanitize provided filename const sanitized = args.filename .replace(/[^a-zA-Z0-9._-]/g, '_') .replace(/_{2,}/g, '_'); return sanitized.endsWith('.csv') ? sanitized : `${sanitized}.csv`; } // Generate filename based on index and timestamp const timestamp = new Date().toISOString().replace(/[:.]/g, '-').split('T')[0]; return `${args.index}_export_${timestamp}.csv`; } async determineFields(args) { if (args.fields && args.fields.length > 0) { return args.fields; } // Get mapping to determine available fields const client = this.elasticsearch.getClient(); try { const mappingResponse = await client.indices.getMapping({ index: args.index, }); const indexMapping = Object.values(mappingResponse)[0]; const properties = indexMapping?.mappings?.properties || {}; // Extract field names from mapping const fields = this.extractFieldNames(properties); if (fields.length === 0) { // Fallback: get a sample document to determine fields const sampleResponse = await client.search({ index: args.index, body: { size: 1 }, }); if (sampleResponse.hits.hits.length > 0) { const sampleDoc = sampleResponse.hits.hits[0]._source; return Object.keys(sampleDoc || {}); } } return fields; } catch (error) { this.logger.warn('Failed to get field mapping, using sample document', { error: error.message }); // Fallback: try to get fields from a sample document const sampleResponse = await client.search({ index: args.index, body: { size: 1 }, }); if (sampleResponse.hits.hits.length > 0) { const sampleDoc = sampleResponse.hits.hits[0]._source; return Object.keys(sampleDoc || {}); } throw new handlers_js_1.ValidationError('Could not determine fields to export and no fields specified'); } } extractFieldNames(properties, prefix = '') { const fields = []; for (const [key, value] of Object.entries(properties)) { const fieldName = prefix ? `${prefix}.${key}` : key; if (typeof value === 'object' && value !== null) { const fieldConfig = value; if (fieldConfig.type) { // It's a field with a type fields.push(fieldName); } else if (fieldConfig.properties) { // It's an object with nested properties fields.push(...this.extractFieldNames(fieldConfig.properties, fieldName)); } } } return fields; } createCSVWriter(filename, fields, format) { const headers = fields.map(field => ({ id: field, title: field })); return (0, csv_writer_1.createObjectCsvWriter)({ path: filename, header: headers, fieldDelimiter: format?.delimiter || ',', recordDelimiter: '\n', append: false, }); } async exportDataWithScroll(args, writer, fields) { const client = this.elasticsearch.getClient(); const scrollSize = 1000; // Documents per scroll const maxRows = args.maxRows || 1000000; // Default 1M limit let totalExported = 0; // Build search query const searchBody = { size: Math.min(scrollSize, maxRows), _source: fields, }; if (args.query) { const sanitizedQuery = (0, schemas_js_1.sanitizeQuery)(args.query); searchBody.query = sanitizedQuery || { match_all: {} }; } else { searchBody.query = { match_all: {} }; } // Initial search with scroll let response = await client.search({ index: args.index, body: searchBody, scroll: '5m', }); while (response.hits.hits.length > 0 && totalExported < maxRows) { // Convert hits to CSV records const records = response.hits.hits.map((hit) => { const record = {}; for (const field of fields) { record[field] = this.extractFieldValue(hit._source, field); } return record; }); // Write to CSV await writer.writeRecords(records); totalExported += records.length; this.logger.debug('Exported batch', { batchSize: records.length, totalExported, }); // Check if we've reached the limit if (totalExported >= maxRows) { break; } // Continue scrolling if (response._scroll_id) { response = await client.scroll({ scroll_id: response._scroll_id, scroll: '5m', }); } else { break; } } // Clear scroll if (response._scroll_id) { try { await client.clearScroll({ scroll_id: response._scroll_id, }); } catch (error) { this.logger.warn('Failed to clear scroll', { error: error.message }); } } return totalExported; } extractFieldValue(source, fieldPath) { const parts = fieldPath.split('.'); let value = source; for (const part of parts) { if (value === null || value === undefined) { return null; } if (typeof value === 'object' && !Array.isArray(value)) { value = value[part]; } else { return null; } } // Convert complex objects to strings if (typeof value === 'object' && value !== null) { return JSON.stringify(value); } return value; } async compressFile(inputPath, outputPath) { const inputData = await fs.readFile(inputPath); const compressedData = await gzip(inputData); await fs.writeFile(outputPath, compressedData); } formatFileSize(bytes) { const units = ['B', 'KB', 'MB', 'GB']; let size = bytes; let unitIndex = 0; while (size >= 1024 && unitIndex < units.length - 1) { size /= 1024; unitIndex++; } return `${size.toFixed(2)} ${units[unitIndex]}`; } } exports.ExportToCSVTool = ExportToCSVTool; //# sourceMappingURL=export-to-csv.js.map