@gohcltech/bitbucket-mcp
Version:
Bitbucket integration for Claude via Model Context Protocol
286 lines • 8.31 kB
JavaScript
import pino from 'pino';
import fs from 'fs';
import path from 'path';
/**
* Simplified logging system with console and file output support.
*
* Provides structured logging with Pino for high-performance output.
* Supports multiple output destinations (console and file) without complex
* transport management.
*
* Features:
* - Console logging with pretty formatting
* - File logging in JSON format
* - Structured logging with metadata support
* - Automatic sensitive data redaction
* - Tool and operation-specific logging methods
* - Performance-optimized non-blocking writes
*
* @example
* ```typescript
* const logger = createLogger({
* level: 'info',
* service: 'bitbucket-mcp',
* logging: {
* enableConsoleLogging: true,
* enableFileLogging: true,
* logFilePath: './logs/app.log'
* }
* });
*
* logger.info('Server started', { port: 3000 });
* logger.toolStart('list_repositories', { workspace: 'myorg' });
* ```
*/
class Logger {
logger;
config;
fileStream;
constructor(config) {
this.config = config;
// Build transports array for Pino
const targets = [];
// Console transport
if (config.logging?.enableConsoleLogging !== false) {
targets.push({
target: 'pino-pretty',
level: config.level,
options: {
colorize: true,
translateTime: 'HH:MM:ss',
ignore: 'pid,hostname',
},
});
}
// File transport
if (config.logging?.enableFileLogging && config.logging.logFilePath) {
// Ensure log directory exists
const logDir = path.dirname(config.logging.logFilePath);
if (!fs.existsSync(logDir)) {
fs.mkdirSync(logDir, { recursive: true });
}
targets.push({
target: 'pino/file',
level: config.level,
options: {
destination: config.logging.logFilePath,
mkdir: true,
},
});
}
// Create the Pino logger with transport
this.logger = pino({
level: config.level,
base: {
service: config.service,
version: config.version || '1.0.0',
},
timestamp: pino.stdTimeFunctions.isoTime,
redact: {
paths: [
'accessToken',
'refreshToken',
'clientSecret',
'password',
'authorization',
'cookie',
'encryptionKey',
'apiToken',
'repositoryToken',
],
censor: '[REDACTED]',
},
transport: targets.length > 0 ? {
targets,
} : undefined,
});
}
error(message, error, meta) {
const errorInfo = this.formatError(error);
const combinedMeta = { ...meta, ...errorInfo };
this.logger.error(combinedMeta, message);
}
warn(message, meta) {
this.logger.warn(meta, message);
}
info(message, meta) {
this.logger.info(meta, message);
}
debug(message, meta) {
this.logger.debug(meta, message);
}
// Tool operation logging
toolStart(toolName, params) {
const meta = {
tool: toolName,
operation: 'start',
params: this.sanitizeParams(params),
};
this.info(`Tool started: ${toolName}`, meta);
}
toolSuccess(toolName, duration, meta) {
const combinedMeta = {
tool: toolName,
operation: 'success',
duration,
...meta,
};
this.info(`Tool completed: ${toolName}`, combinedMeta);
}
toolError(toolName, error, duration) {
const errorInfo = this.formatError(error);
const meta = {
tool: toolName,
operation: 'error',
duration,
...errorInfo,
};
this.error(`Tool failed: ${toolName}`, error, meta);
}
// API operation logging
apiRequest(method, url, meta) {
const combinedMeta = {
http: {
method,
url: this.sanitizeUrl(url),
},
operation: 'api_request',
...meta,
};
this.debug('API request', combinedMeta);
}
apiResponse(method, url, statusCode, duration) {
const meta = {
http: {
method,
url: this.sanitizeUrl(url),
statusCode,
},
operation: 'api_response',
duration,
};
this.debug('API response', meta);
}
apiError(method, url, error, duration) {
const errorInfo = this.formatError(error);
const meta = {
http: {
method,
url: this.sanitizeUrl(url),
},
operation: 'api_error',
duration,
...errorInfo,
};
this.error('API error', error, meta);
}
// Rate limiting logging
rateLimitHit(toolName, retryAfter) {
const meta = {
tool: toolName,
operation: 'rate_limit',
retryAfter,
};
this.warn('Rate limit hit', meta);
}
rateLimitRecovered(toolName) {
const meta = {
tool: toolName,
operation: 'rate_limit_recovered',
};
this.info('Rate limit recovered', meta);
}
formatError(error) {
if (error instanceof Error) {
return {
error: {
name: error.name,
message: error.message,
stack: error.stack,
},
};
}
return {
error: {
message: String(error),
},
};
}
sanitizeParams(params) {
const sanitized = { ...params };
const sensitiveKeys = ['token', 'password', 'secret', 'key', 'authorization'];
for (const key of Object.keys(sanitized)) {
if (sensitiveKeys.some(sensitive => key.toLowerCase().includes(sensitive))) {
sanitized[key] = '[REDACTED]';
}
}
return sanitized;
}
sanitizeUrl(url) {
try {
const urlObj = new URL(url);
// Remove any sensitive query parameters
const sensitiveParams = ['token', 'access_token', 'refresh_token', 'client_secret'];
for (const param of sensitiveParams) {
urlObj.searchParams.delete(param);
}
return urlObj.toString();
}
catch {
return url;
}
}
// Create child logger with additional context
child(bindings) {
const childLogger = new Logger({
level: this.logger.level,
service: this.logger.bindings().service,
version: this.logger.bindings().version,
logging: this.config.logging,
});
childLogger.logger = this.logger.child(bindings);
return childLogger;
}
/**
* Close logger and cleanup resources.
*/
async close() {
if (this.fileStream) {
this.fileStream.end();
}
await this.logger.flush();
}
}
// Singleton logger instance
let globalLogger;
export function createLogger(config) {
globalLogger = new Logger(config);
return globalLogger;
}
export function getLogger() {
if (!globalLogger) {
throw new Error('Logger not initialized. Call createLogger() first.');
}
return globalLogger;
}
/**
* Close the global logger and cleanup resources.
*/
export async function closeLogger() {
if (globalLogger) {
await globalLogger.close();
}
}
export { Logger };
/**
* Graceful shutdown handler for logging system.
*/
export async function gracefulShutdown() {
try {
await closeLogger();
}
catch (error) {
console.error('Error during logger shutdown:', error);
}
}
//# sourceMappingURL=logger.js.map