hfs
Version:
HTTP File Server
271 lines (270 loc) • 13.9 kB
JavaScript
// This file is part of HFS - Copyright 2021-2023, Massimo Melina <a@rejetto.com> - License https://www.gnu.org/licenses/gpl-3.0.txt
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getProjectInfo = exports.blacklistedInstalledPlugins = exports.alerts = exports.downloading = void 0;
exports.downloadPlugin = downloadPlugin;
exports.getRepoInfo = getRepoInfo;
exports.readGithubFile = readGithubFile;
exports.readOnlinePlugin = readOnlinePlugin;
exports.readOnlineCompatiblePlugin = readOnlineCompatiblePlugin;
exports.getFolder2repo = getFolder2repo;
exports.apiGithubPaginated = apiGithubPaginated;
exports.searchPlugins = searchPlugins;
const events_1 = __importDefault(require("./events"));
const misc_1 = require("./misc");
const plugins_1 = require("./plugins");
const apiMiddleware_1 = require("./apiMiddleware");
const lodash_1 = __importDefault(require("lodash"));
const const_1 = require("./const");
const promises_1 = require("fs/promises");
const path_1 = require("path");
const fs_1 = require("fs");
const persistence_1 = require("./persistence");
const argv_1 = require("./argv");
const DIST_ROOT = 'dist';
exports.downloading = {};
function downloadProgress(repo, status) {
if (status === undefined)
delete exports.downloading[repo];
else
exports.downloading[repo] = status;
events_1.default.emit('pluginDownload', { repo, status });
}
// determine default branch, possibly without consuming api quota
async function getGithubDefaultBranch(repo) {
var _a;
if (!repo.includes('/'))
throw 'malformed repo';
const test = await (0, misc_1.httpString)(`https://github.com/${repo}/archive/refs/heads/main.zip`, { method: 'HEAD' }).then(() => 1, (err) => {
var _a;
if (((_a = err === null || err === void 0 ? void 0 : err.cause) === null || _a === void 0 ? void 0 : _a.statusCode) !== 404)
throw err;
return 0;
});
return test ? 'main' : (_a = (await getRepoInfo(repo))) === null || _a === void 0 ? void 0 : _a.default_branch;
}
async function downloadPlugin(repo, { branch = '', overwrite = false } = {}) {
var _a, _b, _c;
if (typeof repo !== 'string')
repo = repo.main;
if (exports.downloading[repo])
throw new apiMiddleware_1.ApiError(const_1.HTTP_CONFLICT, "already downloading");
const msg = await isPluginBlacklisted(repo);
if (msg)
throw new apiMiddleware_1.ApiError(const_1.HTTP_FORBIDDEN, "blacklisted: " + msg);
console.log('downloading plugin', repo);
downloadProgress(repo, true);
try {
const pl = (0, plugins_1.findPluginByRepo)(repo);
if (repo.includes('//')) { // custom repo
if (!pl)
throw new apiMiddleware_1.ApiError(const_1.HTTP_BAD_REQUEST, "bad repo");
const customRepo = (((_b = (_a = pl).getData) === null || _b === void 0 ? void 0 : _b.call(_a)) || pl).repo;
let url = customRepo === null || customRepo === void 0 ? void 0 : customRepo.zip;
if (!url)
throw new apiMiddleware_1.ApiError(const_1.HTTP_SERVER_ERROR, "bad plugin");
if (!url.includes('//'))
url = customRepo.web + url;
return await go(url, pl === null || pl === void 0 ? void 0 : pl.id, (_c = customRepo.zipRoot) !== null && _c !== void 0 ? _c : DIST_ROOT);
}
branch || (branch = await getGithubDefaultBranch(repo));
const short = repo.split('/')[1]; // second part, repo without the owner
if (!short)
throw new apiMiddleware_1.ApiError(const_1.HTTP_BAD_REQUEST, "bad repo");
const folder = overwrite && (pl === null || pl === void 0 ? void 0 : pl.id) // use existing folder
|| (getFolder2repo().hasOwnProperty(short) ? repo.replace('/', '-') // longer form only if another plugin is using short form, to avoid overwriting
: short.replace(/^hfs-/, ''));
const GITHUB_ZIP_ROOT = short + '-' + branch; // GitHub puts everything within this folder
return await go(`https://github.com/${repo}/archive/refs/heads/${branch}.zip`, folder, GITHUB_ZIP_ROOT + '/' + DIST_ROOT);
async function go(url, folder, zipRoot) {
const installPath = plugins_1.PATH + '/' + folder;
const tempInstallPath = installPath + '-installing' + plugins_1.DISABLING_SUFFIX;
const foldersToCopy = [
zipRoot + '-' + process.platform + '-' + process.arch,
zipRoot + '-' + process.platform,
zipRoot,
].map(x => x + '/');
// github zip doesn't have content-length, so we cannot produce progress event
const stream = await (0, misc_1.httpStream)(url);
await (0, misc_1.unzip)(stream, async (path) => {
const folder = foldersToCopy.find(x => path.startsWith(x));
if (!folder || path.endsWith('/'))
return false;
let dest = path.slice(folder.length);
dest = (0, path_1.join)(tempInstallPath, dest);
return (0, promises_1.rm)(dest, { force: true }).then(() => dest, () => false);
});
// ready to replace
const wasRunning = (0, plugins_1.isPluginRunning)(folder);
if (wasRunning)
await (0, plugins_1.stopPlugin)(folder); // stop old
let retry = 3;
while (retry--) { // move data, and consider late release of the resource, up to a few seconds
const res = (0, promises_1.rename)((0, path_1.join)(installPath, plugins_1.STORAGE_FOLDER), (0, path_1.join)(tempInstallPath, plugins_1.STORAGE_FOLDER));
if (await res.then(() => true, e => e.code === 'ENOENT'))
break;
await (0, misc_1.wait)(1000);
}
// delete old folder, but it may fail in the presence of .node files, so we rename it first as a precaution (clearing require.cache doesn't help)
const deleteMe = installPath + plugins_1.DELETE_ME_SUFFIX;
await (0, promises_1.rename)(installPath, deleteMe).catch(() => { });
await (0, promises_1.rm)(deleteMe, { recursive: true, force: true }).catch(e => console.warn(String(e)));
// final replace
await (0, promises_1.rename)(tempInstallPath, installPath)
.catch(e => { throw e.code !== 'ENOENT' ? e : new apiMiddleware_1.ApiError(const_1.HTTP_NOT_ACCEPTABLE, "missing main file"); });
if (wasRunning)
void (0, plugins_1.startPlugin)(folder) // don't wait, in case it fails to start. We still use startPlugin instead of enablePlugin, as it will take care of disabling other themes.
.catch(() => { }); // it will possibly fail (with 'miss') because the plugin has probably not been loaded yet.
events_1.default.emit('pluginDownloaded', { id: folder, repo });
return folder;
}
}
finally {
downloadProgress(repo, undefined);
}
}
function getRepoInfo(id) {
return apiGithub('repos/' + id);
}
function readGithubFile(uri) {
return (0, misc_1.httpString)('https://raw.githubusercontent.com/' + uri);
}
async function readOnlinePlugin(repo, branch = '') {
if (typeof repo !== 'string') { // non-github plugin
const folder = lodash_1.default.findKey(getFolder2repo(), x => x === repo);
if (!folder)
throw Error();
const pl = (0, plugins_1.getPluginInfo)(folder);
let { main } = pl.repo;
if (!main)
throw Error("missing repo.main");
if (!main.includes('//'))
main = pl.repo.web + main;
return (0, plugins_1.parsePluginSource)(main, await (0, misc_1.httpString)(main)); // use 'repo' as 'id' client-side
}
branch || (branch = await getGithubDefaultBranch(repo));
const res = await readGithubFile(`${repo}/${branch}/${DIST_ROOT}/plugin.js`);
const pl = (0, plugins_1.parsePluginSource)(repo, res); // use 'repo' as 'id' client-side
pl.branch = branch;
return pl;
}
async function readOnlineCompatiblePlugin(repo, branch = '') {
const pl = await readOnlinePlugin(repo, branch);
if (!(pl === null || pl === void 0 ? void 0 : pl.apiRequired))
return; // mandatory field
if (!pl.badApi)
return pl;
// we try other branches (starting with 'api')
const res = await apiGithub('repos/' + repo + '/branches');
const branches = res.map((x) => x === null || x === void 0 ? void 0 : x.name)
.filter((x) => typeof x === 'string' && x.startsWith('api'))
.sort().reverse();
for (const branch of branches) {
const pl = await readOnlinePlugin(repo, branch);
if (!pl)
continue;
if (!pl.apiRequired)
pl.badApi = '-';
if (!pl.badApi)
return pl;
}
}
function getFolder2repo() {
const ret = Object.fromEntries((0, plugins_1.getAvailablePlugins)().map(x => [x.id, x.repo]));
Object.assign(ret, Object.fromEntries((0, plugins_1.mapPlugins)(x => [x.id, x.getData().repo])));
return ret;
}
async function apiGithub(uri) {
return (0, misc_1.httpString)('https://api.github.com/' + uri, {
headers: {
'User-Agent': 'HFS',
Accept: 'application/vnd.github.v3+json',
}
}).then(JSON.parse, e => {
// https://docs.github.com/en/rest/overview/resources-in-the-rest-api?apiVersion=2022-11-28#rate-limiting
throw e.message === '403' ? Error('github_quota')
: e;
});
}
async function* apiGithubPaginated(uri) {
uri += uri.includes('?') ? '&' : '?';
const PAGE_SIZE = 100;
let page = 1;
let n = 0;
try {
while (1) {
const res = await apiGithub(uri + `page=${page++}&per_page=${PAGE_SIZE}`);
const a = res.items || res; // "search/repositories" returns an object, while "releases" returns simply an array
for (const x of a)
yield x;
const now = a.length;
n += now;
if (!now || n >= res.total_count)
break;
}
}
catch (e) {
if (e.message !== '422') // for some strange reason github api is returning this error if we search repos for a missing user, instead of empty set
throw e;
}
}
async function isPluginBlacklisted(repo) {
return (0, exports.getProjectInfo)().then(x => { var _a, _b; return ((_b = (_a = x === null || x === void 0 ? void 0 : x.repo_blacklist) === null || _a === void 0 ? void 0 : _a[repo]) === null || _b === void 0 ? void 0 : _b.message) || ''; }, () => undefined);
}
async function searchPlugins(text = '', { skipRepos = [''] } = {}) {
const searches = [encodeURI(text), ...text.split(' ').filter(Boolean).slice(0, 2).map(x => 'user:' + encodeURI(x))]; // first 2 words can be the author
const list = await Promise.all(searches.map(x => (0, misc_1.asyncGeneratorToArray)(apiGithubPaginated(`search/repositories?q=topic:hfs-plugin+${x}`))))
.then(all => all.flat()); // make it a single array
return new misc_1.AsapStream(list.map(async (it) => {
var _a;
const repo = it.full_name;
if (skipRepos.includes(repo) || await isPluginBlacklisted(repo))
return;
const pl = await readOnlineCompatiblePlugin(repo, it.default_branch).catch(() => undefined);
if (!pl)
return;
Object.assign(pl, {
downloading: exports.downloading[repo],
license: (_a = it.license) === null || _a === void 0 ? void 0 : _a.spdx_id,
}, lodash_1.default.pick(it, ['pushed_at', 'stargazers_count', 'default_branch']));
return pl;
}));
}
exports.alerts = persistence_1.storedMap.singleSync('alerts', []);
const cachedCentralInfo = persistence_1.storedMap.singleSync('cachedCentralInfo', ''); // persisting it could also be useful for no-internet instances, so that you can provide a fresher copy
exports.blacklistedInstalledPlugins = [];
// centralized hosted information, to be used as little as possible
const FN = 'central.json';
let builtIn = JSON.parse((0, fs_1.readFileSync)((0, path_1.join)(__dirname, '..', FN), 'utf8'));
exports.getProjectInfo = (0, misc_1.debounceAsync)(() => argv_1.argv.central === false ? Promise.resolve(builtIn) : readGithubFile(`${const_1.HFS_REPO}/${const_1.HFS_REPO_BRANCH}/${FN}`)
.then(JSON.parse, () => null)
.then(o => {
if (o)
cachedCentralInfo.set(o);
o || (o = { ...cachedCentralInfo.get() || builtIn }); // fall back to built-in
// merge byVersions info in the main object, but collect alerts separately, to preserve multiple instances
const allAlerts = [o.alert];
for (const [ver, more] of Object.entries((0, misc_1.popKey)(o, 'byVersion') || {}))
if (const_1.VERSION.match(new RegExp(ver))) {
allAlerts.push(more.alert);
Object.assign(o, more);
}
lodash_1.default.remove(allAlerts, x => !x);
exports.alerts.set(was => {
if (!lodash_1.default.isEqual(was, allAlerts))
for (const a of allAlerts)
console.log("ALERT:", a);
return allAlerts;
});
const black = (0, misc_1.onlyTruthy)(Object.keys(o.repo_blacklist || {}).map(plugins_1.findPluginByRepo));
exports.blacklistedInstalledPlugins = (0, misc_1.onlyTruthy)(black.map(x => lodash_1.default.isString(x.repo) && x.repo));
if (black.length) {
console.log("blacklisted plugins found:", black.join(', '));
for (const p of black)
(0, plugins_1.enablePlugin)(p.id, false);
}
return o;
}), { retain: misc_1.HOUR, retainFailure: 60000 });
;