hfs
Version:
HTTP File Server
210 lines (209 loc) • 11.1 kB
JavaScript
// This file is part of HFS - Copyright 2021-2023, Massimo Melina <a@rejetto.com> - License https://www.gnu.org/licenses/gpl-3.0.txt
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.logMw = exports.loggers = void 0;
exports.getRotatedFiles = getRotatedFiles;
const stream_1 = require("stream");
const config_1 = require("./config");
const fs_1 = require("fs");
const util = __importStar(require("util"));
const promises_1 = require("fs/promises");
const lodash_1 = __importDefault(require("lodash"));
const util_files_1 = require("./util-files");
const auth_1 = require("./auth");
const misc_1 = require("./misc");
const path_1 = require("path");
const events_1 = __importDefault(require("./events"));
const connections_1 = require("./connections");
const index_1 = require("./index");
const serveGuiFiles_1 = require("./serveGuiFiles");
const fast_glob_1 = __importDefault(require("fast-glob"));
class Logger {
constructor(name) {
this.name = name;
this.path = '';
}
async setPath(path) {
var _a;
this.path = path;
(_a = this.stream) === null || _a === void 0 ? void 0 : _a.end();
this.last = undefined;
if (!path)
return this.stream = undefined;
try {
const stats = await (0, promises_1.stat)(path);
this.last = stats.mtime;
}
catch (_b) {
if (await (0, util_files_1.prepareFolder)(path) === false)
console.log("cannot create folder for", path);
}
this.reopen();
}
reopen() {
var _a;
return this.stream = (_a = (0, util_files_1.createFileWithPath)(this.path, { flags: 'a' })) === null || _a === void 0 ? void 0 : _a.on('error', () => this.stream = undefined);
}
}
// we'll have names same as config keys. These are used also by the get_log api.
const accessLogger = new Logger(misc_1.CFG.log);
const accessErrorLog = new Logger(misc_1.CFG.error_log);
exports.loggers = [accessLogger, accessErrorLog];
(0, config_1.defineConfig)(accessLogger.name, 'logs/access.log').sub(path => {
console.debug('access log file: ' + (path || 'disabled'));
accessLogger.setPath(path);
});
const errorLogFile = (0, config_1.defineConfig)(accessErrorLog.name, 'logs/access-error.log');
errorLogFile.sub(path => {
console.debug('access error log: ' + (path || 'disabled'));
accessErrorLog.setPath(path);
});
const logRotation = (0, config_1.defineConfig)(misc_1.CFG.log_rotation, 'weekly');
const dontLogNet = (0, config_1.defineConfig)(misc_1.CFG.dont_log_net, '127.0.0.1|::1', v => (0, misc_1.makeNetMatcher)(v));
const logUA = (0, config_1.defineConfig)(misc_1.CFG.log_ua, false);
const logSpam = (0, config_1.defineConfig)(misc_1.CFG.log_spam, false);
const debounce = lodash_1.default.debounce(cb => cb(), 1000); // with this technique, i'll be able to debounce some code respecting the references in its closure
const logMw = async (ctx, next) => {
const now = new Date(); // request start
// do it now so it's available for returning plugins
ctx.state.completed = Promise.race([(0, stream_1.once)(ctx.res, 'finish'), (0, stream_1.once)(ctx.res, 'close')]);
await next();
console.debug(ctx.status, ctx.method, ctx.originalUrl);
if (!logSpam.get()
&& (ctx.querystring.includes('{.exec|')
|| ctx.status === misc_1.HTTP_NOT_FOUND && /wlwmanifest.xml$|robots.txt$|\.(php)$|cgi/.test(ctx.path))) {
events_1.default.emit('spam', ctx);
return;
}
const conn = (0, connections_1.getConnection)(ctx); // collect reference before close
// don't await, as we don't want to hold the middlewares chain
ctx.state.completed.then(() => {
var _a, _b, _c, _d;
if (ctx.state.dontLog || ctx.state.considerAsGui && !serveGuiFiles_1.logGui.get())
return;
if (dontLogNet.compiled()(ctx.ip))
return;
const isError = ctx.status >= 400;
const logger = isError && accessErrorLog || accessLogger;
const rotate = (_a = logRotation.get()) === null || _a === void 0 ? void 0 : _a[0];
let { stream, last, path } = logger;
if (!stream)
return;
logger.last = now;
if (rotate && last) { // rotation enabled and a file exists?
const passed = Number(now) - Number(last)
- 3600000; // be pessimistic and count a possible DST change
if (rotate === 'm' && (passed >= 31 * misc_1.DAY || now.getMonth() !== last.getMonth())
|| rotate === 'd' && (passed >= misc_1.DAY || now.getDate() !== last.getDate()) // checking passed will solve the case when the day of the month is the same but a month has passed
|| rotate === 'w' && (passed >= 7 * misc_1.DAY || now.getDay() < last.getDay())) {
stream.end();
const suffix = '-' + last.getFullYear() + '-' + doubleDigit(last.getMonth() + 1) + '-' + doubleDigit(last.getDate());
const newPath = (0, misc_1.strinsert)(path, path.length - (0, path_1.extname)(path).length, suffix);
try { // other logging requests shouldn't happen while we are renaming. Since this is very infrequent we can tolerate solving this by making it sync.
(0, fs_1.renameSync)(path, newPath);
}
catch (e) { // ok, rename failed, but this doesn't mean we ain't gonna log
console.error(e.message || String(e));
}
stream = logger.reopen(); // keep variable updated
if (!stream)
return;
}
}
const format = '%s - %s [%s] "%s %s HTTP/%s" %d %s %s\n'; // Apache's Common Log Format
const a = now.toString().split(' '); // like nginx, our default log contains the time of log writing
const date = a[2] + '/' + a[1] + '/' + a[3] + ':' + a[4] + ' ' + ((_b = a[5]) === null || _b === void 0 ? void 0 : _b.slice(3));
const user = (0, auth_1.getCurrentUsername)(ctx);
const length = (_c = ctx.state.length) !== null && _c !== void 0 ? _c : ctx.length;
const uri = ctx.originalUrl;
const duration = (Date.now() - Number(now)) / 1000;
ctx.logExtra(ctx.vfsNode && {
speed: Math.round(length / duration),
...ctx.state.includesLastByte && ctx.res.finished && { dl: 1 }
} || ctx.state.uploadSize !== undefined && {
ul: ctx.state.uploads,
size: ctx.state.uploadSize,
speed: Math.round(ctx.state.uploadSize / duration),
});
if (conn === null || conn === void 0 ? void 0 : conn.country)
ctx.logExtra({ country: conn.country });
if (logUA.get())
ctx.logExtra({ ua: ctx.get('user-agent') || undefined });
const extra = ctx.state.logExtra;
if (events_1.default.anyListener(logger.name)) // small optimization: this event can happen often, while most times there's no listener, and the parameters object is constructed pointlessly. A benchmark measured it 20% faster (just the line), while maybe it was not necessary.
events_1.default.emit(logger.name, { ctx, length, user, ts: now, uri, extra });
debounce(() => // once in a while we check if the file is still good (not deleted, etc), or we'll reopen it
(0, promises_1.stat)(logger.path).catch(() => logger.reopen())); // async = smoother but we may lose some entries
stream.write(util.format(format, ctx.ip, user || '-', date, ctx.method, uri, ctx.req.httpVersion, ctx.status, (_d = length === null || length === void 0 ? void 0 : length.toString()) !== null && _d !== void 0 ? _d : '-', lodash_1.default.isEmpty(extra) ? '' : JSON.stringify(JSON.stringify(extra))));
});
};
exports.logMw = logMw;
events_1.default.once('app', () => {
index_1.app.context.logExtra = function (anything, params) {
lodash_1.default.merge(this.state, { logExtra: { ...anything, params } }); // params will be considered as parameters of the API
};
});
function doubleDigit(n) {
return n > 9 ? n : '0' + n;
}
async function getRotatedFiles() {
return Object.fromEntries(await Promise.all(exports.loggers.map(async (x) => {
const mask = (0, misc_1.strinsert)(x.path, x.path.length - (0, path_1.extname)(x.path).length, '-2*'); // including 2, initial digit of the year, will only take rotated files and not "-error"
return [x.name, (await (0, fast_glob_1.default)(mask, { stats: true })).map(x => { var _a; return ({ path: x.path, size: (_a = x.stats) === null || _a === void 0 ? void 0 : _a.size }); })];
})));
}
// dump console.error to file
let debugLogFile = (0, fs_1.createWriteStream)('debug.log', { flags: 'a' });
debugLogFile.once('open', () => {
const was = console.error;
console.error = function (...args) {
was.apply(this, args);
args = args.map(x => { var _a; return typeof x === 'string' ? x : ((_a = (0, misc_1.tryJson)(x)) !== null && _a !== void 0 ? _a : String(x)); });
debugLogFile.write((0, misc_1.formatTimestamp)(new Date) + ' - ' + args.join(' ') + '\n');
};
// limit log size
const LIMIT = 1000000;
const { path } = debugLogFile;
(0, misc_1.repeat)(misc_1.DAY, () => {
if ((0, fs_1.statSync)(path).size < LIMIT)
return; // no need
(0, fs_1.renameSync)(path, 'old-' + path);
debugLogFile = (0, fs_1.createWriteStream)(path); // new file
});
}).on('error', () => console.log("cannot create debug.log"));
;