UNPKG

gmail-mcp-server

Version:

Gmail MCP Server with on-demand authentication for SIYA/Claude Desktop. Complete Gmail integration with multi-user support and OAuth2 security.

429 lines 15.1 kB
import { promises as fs } from 'fs'; import { createWriteStream } from 'fs'; import { pipeline } from 'stream/promises'; import { Transform } from 'stream'; import * as path from 'path'; import * as os from 'os'; import { lookup } from 'mime-types'; export class LargeAttachmentHandler { config; gmail; constructor(gmail, config) { this.gmail = gmail; this.config = { maxInlineSize: 25 * 1024 * 1024, // 25MB - Gmail's attachment limit chunkSize: 1024 * 1024, // 1MB chunks tempDir: path.join(os.tmpdir(), 'gmail-mcp-attachments'), compressionEnabled: true, ...config }; this.ensureTempDir(); } async ensureTempDir() { try { await fs.access(this.config.tempDir); } catch { await fs.mkdir(this.config.tempDir, { recursive: true }); } } /** * Process attachments for sending, handling large files appropriately */ async processAttachmentsForSending(attachments) { const results = []; for (const attachment of attachments) { const result = await this.processAttachment(attachment); results.push(result); } return results; } async processAttachment(attachment) { const { filename, content, contentType } = attachment; // Determine if content is base64 or file path let buffer; let isFilePath = false; try { // Check if it's a file path await fs.access(content); isFilePath = true; buffer = await fs.readFile(content); } catch { // Assume it's base64 content buffer = Buffer.from(content, 'base64'); } const size = buffer.length; const detectedContentType = contentType || lookup(filename) || 'application/octet-stream'; const isLarge = size > this.config.maxInlineSize; const attachmentId = this.generateAttachmentId(); if (isLarge) { // Handle large attachment const tempPath = path.join(this.config.tempDir, `${attachmentId}_${filename}`); await fs.writeFile(tempPath, buffer); return { attachmentId, filename, size, contentType: detectedContentType, isLarge: true, chunks: Math.ceil(size / this.config.chunkSize), tempPath }; } else { // Small attachment - can be handled inline return { attachmentId, filename, size, contentType: detectedContentType, isLarge: false }; } } /** * Create Gmail attachment parts for large files using streaming */ async createAttachmentParts(results) { const parts = []; for (const result of results) { if (result.isLarge && result.tempPath) { // For large files, we need to split into multiple parts or use resumable upload const part = await this.createLargeAttachmentPart(result); parts.push(part); } else { // Small attachment - create normal part const part = this.createSmallAttachmentPart(result); parts.push(part); } } return parts; } async createLargeAttachmentPart(result) { if (!result.tempPath) { throw new Error('Temp path required for large attachment'); } // Read file content in chunks and encode const fileContent = await fs.readFile(result.tempPath); const base64Content = fileContent.toString('base64'); return { headers: [ { name: 'Content-Type', value: result.contentType }, { name: 'Content-Disposition', value: `attachment; filename="${result.filename}"` }, { name: 'Content-Transfer-Encoding', value: 'base64' } ], body: { attachmentId: result.attachmentId, size: result.size, data: base64Content } }; } createSmallAttachmentPart(result) { return { headers: [ { name: 'Content-Type', value: result.contentType }, { name: 'Content-Disposition', value: `attachment; filename="${result.filename}"` } ], body: { attachmentId: result.attachmentId, size: result.size } }; } /** * Download attachment with streaming support for large files */ async downloadAttachment(options) { const { messageId, attachmentId, outputPath, streaming, chunkCallback } = options; try { // Get attachment metadata first const attachment = await this.gmail.users.messages.attachments.get({ userId: 'me', messageId, id: attachmentId }); if (!attachment.data.data) { throw new Error('No attachment data found'); } const data = Buffer.from(attachment.data.data, 'base64'); if (streaming && outputPath) { // Stream to file await this.streamToFile(data, outputPath, chunkCallback); return outputPath; } else if (outputPath) { // Write directly to file await fs.writeFile(outputPath, data); return outputPath; } else { // Return buffer return data; } } catch (error) { throw new Error(`Failed to download attachment: ${error}`); } } /** * Download attachment and save to local downloads directory with auto-generated path */ async downloadAttachmentToLocal(messageId, attachmentId, customPath) { try { // First get attachment info to determine filename const info = await this.getAttachmentInfo(messageId, attachmentId); // Create downloads directory if it doesn't exist const downloadsDir = customPath || path.join(process.cwd(), 'downloads'); await this.ensureDirectory(downloadsDir); // Generate unique filename to avoid conflicts const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); const uniqueFilename = `${timestamp}_${info.filename}`; const filePath = path.join(downloadsDir, uniqueFilename); // Download attachment to the generated path await this.downloadAttachment({ messageId, attachmentId, outputPath: filePath, streaming: info.size > 10 * 1024 * 1024 // Use streaming for files > 10MB }); return { filePath: path.resolve(filePath), // Return absolute path filename: info.filename, size: info.size, contentType: info.contentType }; } catch (error) { throw new Error(`Failed to download attachment to local: ${error}`); } } /** * Ensure directory exists */ async ensureDirectory(dirPath) { try { await fs.access(dirPath); } catch { await fs.mkdir(dirPath, { recursive: true }); } } async streamToFile(data, outputPath, chunkCallback) { const totalSize = data.length; let bytesWritten = 0; const transform = new Transform({ transform(chunk, encoding, callback) { bytesWritten += chunk.length; const progress = (bytesWritten / totalSize) * 100; if (chunkCallback) { chunkCallback(chunk, progress); } callback(null, chunk); } }); // Create readable stream from buffer const { Readable } = await import('stream'); const readable = Readable.from(data); const writable = createWriteStream(outputPath); await pipeline(readable, transform, writable); } /** * Clean up temporary files */ async cleanup(results) { for (const result of results) { if (result.tempPath) { try { await fs.unlink(result.tempPath); } catch (error) { console.warn(`Failed to cleanup temp file ${result.tempPath}:`, error); } } } } /** * Get attachment info without downloading */ async getAttachmentInfo(messageId, attachmentId) { try { const message = await this.gmail.users.messages.get({ userId: 'me', id: messageId, format: 'full' }); const attachment = this.findAttachmentInMessage(message.data, attachmentId); if (!attachment) { throw new Error('Attachment not found'); } return { filename: this.extractFilename(attachment), size: attachment.body?.size || 0, contentType: this.extractContentType(attachment) }; } catch (error) { throw new Error(`Failed to get attachment info: ${error}`); } } findAttachmentInMessage(message, attachmentId) { const findInParts = (parts) => { for (const part of parts) { if (part.body?.attachmentId === attachmentId) { return part; } if (part.parts) { const found = findInParts(part.parts); if (found) return found; } } return null; }; if (message.payload?.parts) { return findInParts(message.payload.parts); } return null; } extractFilename(part) { const contentDisposition = part.headers?.find(h => h.name?.toLowerCase() === 'content-disposition'); if (contentDisposition?.value) { const match = contentDisposition.value.match(/filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/); if (match) { return match[1].replace(/['"]/g, ''); } } return 'attachment'; } extractContentType(part) { const contentType = part.headers?.find(h => h.name?.toLowerCase() === 'content-type'); return contentType?.value || 'application/octet-stream'; } generateAttachmentId() { return `att_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`; } /** * Validate attachment size and type */ validateAttachment(attachment) { // Check file size (Gmail limit is 25MB) let size = 0; try { if (attachment.content.startsWith('/') || attachment.content.includes('\\')) { // File path const stats = require('fs').statSync(attachment.content); size = stats.size; } else { // Base64 content size = Buffer.from(attachment.content, 'base64').length; } } catch (error) { return { valid: false, error: 'Invalid attachment content or file path' }; } if (size > 25 * 1024 * 1024) { return { valid: false, error: 'Attachment exceeds 25MB Gmail limit' }; } // Check filename if (!attachment.filename || attachment.filename.trim() === '') { return { valid: false, error: 'Attachment filename is required' }; } return { valid: true }; } /** * List all downloaded attachments in the downloads directory */ async listDownloadedAttachments(downloadsDir) { const dir = downloadsDir || path.join(process.cwd(), 'downloads'); try { await fs.access(dir); } catch { return []; // Directory doesn't exist, no downloads } const files = await fs.readdir(dir); const attachments = []; for (const file of files) { try { const filePath = path.join(dir, file); const stats = await fs.stat(filePath); if (stats.isFile()) { attachments.push({ filename: file, filePath: path.resolve(filePath), size: stats.size, downloadDate: stats.birthtime, fileUrl: `file://${path.resolve(filePath)}` }); } } catch (error) { console.warn(`Error reading file ${file}:`, error); } } // Sort by download date (newest first) return attachments.sort((a, b) => b.downloadDate.getTime() - a.downloadDate.getTime()); } /** * Clean up old downloaded attachments */ async cleanupOldDownloads(maxAgeMs = 7 * 24 * 60 * 60 * 1000, downloadsDir) { const dir = downloadsDir || path.join(process.cwd(), 'downloads'); const now = Date.now(); let cleaned = 0; const errors = []; try { await fs.access(dir); } catch { return { cleaned: 0, errors: ['Downloads directory does not exist'] }; } const files = await fs.readdir(dir); for (const file of files) { try { const filePath = path.join(dir, file); const stats = await fs.stat(filePath); if (stats.isFile() && (now - stats.birthtime.getTime()) > maxAgeMs) { await fs.unlink(filePath); cleaned++; console.log(`Cleaned up old download: ${file}`); } } catch (error) { errors.push(`Failed to cleanup ${file}: ${error}`); } } return { cleaned, errors }; } /** * Get memory usage statistics */ getMemoryStats() { const memUsage = process.memoryUsage(); return { heapUsed: memUsage.heapUsed, heapTotal: memUsage.heapTotal, external: memUsage.external }; } } //# sourceMappingURL=attachment-handler.js.map