gqlcheck
Version:
Performs additional checks on your GraphQL documents and operations to ensure they conform to your rules, whilst allow-listing existing operations and their constituent parts (and allowing overrides on a per-field basis). Rules include max selection set d
231 lines (230 loc) • 8.36 kB
JavaScript
Object.defineProperty(exports, "__esModule", { value: true });
exports.checkOperations = checkOperations;
const tslib_1 = require("tslib");
const promises_1 = require("node:fs/promises");
const os = tslib_1.__importStar(require("node:os"));
const node_worker_threads_1 = require("node:worker_threads");
const graphile_config_1 = require("graphile-config");
const load_1 = require("graphile-config/load");
const json5_1 = tslib_1.__importDefault(require("json5"));
const baseline_js_1 = require("./baseline.js");
function defer() {
let resolve;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let reject;
return Object.assign(new Promise((_resolve, _reject) => {
resolve = _resolve;
reject = _reject;
}), { resolve, reject });
}
async function loadBaseline(baselinePath) {
if (baselinePath == null) {
return null;
}
try {
const data = await (0, promises_1.readFile)(baselinePath, "utf8");
// TODO: safer casting
return json5_1.default.parse(data);
}
catch (e) {
if (typeof e === "object" &&
e != null &&
"code" in e &&
e.code === "ENOENT") {
return null;
}
throw new Error(`Failed to load baseline from configured '${baselinePath}'`, { cause: e });
}
}
const KB = 1024;
const MB = 1024 * KB;
const GB = 1024 * MB;
function defaultCount() {
const cores = os.cpus().length;
// Allow for 1.5GB per process. This should be overkill
const memory = os.freemem();
const memoryCoreLimit = Math.floor(memory / (1.5 * GB));
// TODO: we should never run more workers than total number of documents
// divided by ~500 otherwise we get diminishing returns (and even slowdown on
// high core machines!). Need to have an idea of how many documents to
// expect.
// Return at least 1, at most `cores` and at most `memoryCoreLimit`
return Math.max(1, Math.min(memoryCoreLimit, cores));
}
async function checkOperations(getDocuments, configPath, overrideConfig) {
const rawConfig = await (0, load_1.loadConfig)(configPath);
const config = (0, graphile_config_1.resolvePresets)([
rawConfig ?? {},
{ gqlcheck: overrideConfig },
]);
const { gqlcheck: { baselinePath, workerCount = defaultCount() } = {} } = config;
const baseline = await loadBaseline(baselinePath);
const workerPromises = [];
const handleError = (_worker, error) => {
console.error(`Worker exited with error: ${error}`);
process.exit(2);
};
for (let i = 0; i < workerCount; i++) {
const deferred = defer();
const workerNumber = i;
const workerData = {
configPath,
overrideConfig,
};
const worker = new node_worker_threads_1.Worker(`${__dirname}/worker.js`, {
workerData,
});
worker.on("error", (error) => {
worker.terminate();
handleError(worker, error);
});
worker.on("exit", (code) => {
if (code !== 0) {
handleError(worker, new Error(`Worker ${workerNumber} stopped with exit code ${code}`));
}
});
workerPromises[i] = deferred;
worker.once("message", (msg) => {
if (msg === "READY") {
deferred.resolve(worker);
}
else {
console.error(`Received unexpected response: %O`, msg);
process.exit(3);
}
});
}
const workers = await Promise.all(workerPromises);
const freeWorkers = [...workers];
const queue = [];
let workersActive = true;
function releaseWorker(worker) {
workers.splice(workers.indexOf(worker), 1);
worker.postMessage("STOP");
}
function _getFreeWorker() {
const worker = freeWorkers.pop();
if (worker) {
return worker;
}
else {
const promise = defer();
queue.push(promise);
return promise;
}
}
function _returnWorker(worker) {
const waiting = queue.pop();
if (waiting) {
waiting.resolve(worker);
}
else if (!workersActive) {
releaseWorker(worker);
}
else {
freeWorkers.push(worker);
}
}
function releaseWorkers() {
workersActive = false;
const workersToRelease = freeWorkers.splice(0, freeWorkers.length);
for (const worker of workersToRelease) {
releaseWorker(worker);
}
}
async function startWorkerTask(request) {
const resultPromise = defer();
const worker = await _getFreeWorker();
const handleResponse = (message) => {
worker.off("message", handleResponse);
_returnWorker(worker);
resultPromise.resolve(message);
};
worker.on("message", handleResponse);
worker.postMessage(request);
return { request, resultPromise };
}
const startedTasks = [];
let index = -1;
const sourceNames = new Set();
// In this loop the `await` is just for _starting_ a task, this gives us flow
// control (so we don't request more data than we have workers to handle, but
// we still make sure that all workers are busy). This is why we push a
// promise onto the list.
for await (const source of getDocuments()) {
index++;
const sourceName = typeof source === "string" ? String(index) : source.name;
if (sourceNames.has(sourceName)) {
throw new Error(`Source name '${sourceName}' has been used more than once, source names must be unique.`);
}
else {
sourceNames.add(sourceName);
}
const sourceString = typeof source === "string" ? source : source.body;
const task = await startWorkerTask({
sourceName,
sourceString,
});
startedTasks.push(task);
}
// Now that all the tasks have been started, we wait for them all to
// complete.
const allResults = [];
for (const task of startedTasks) {
const request = task.request;
const result = await task.resultPromise;
if (result.sourceName !== request.sourceName) {
throw new Error(`Internal consistency error: the result we received from the worker was for source '${result.sourceName}', but the request was for '${request.sourceName}'`);
}
allResults.push({ request, result });
}
releaseWorkers();
const results = Object.create(null);
const operationKindByOperationName = new Map();
const counts = Object.create(null);
for (const { request, result } of allResults) {
const { sourceName } = request;
const { operations } = result;
for (const operation of operations) {
const { operationName, operationKind } = operation;
if (!operationName)
continue;
const expectedOperationKind = operationKindByOperationName.get(operationName);
if (!expectedOperationKind) {
operationKindByOperationName.set(operationName, operationKind);
}
else if (expectedOperationKind !== operationKind) {
throw new Error(`Named operation '${operationName}' previously existed with operation type '${expectedOperationKind}', but another operation with the same name now has type '${operationKind}'. This is forbidden.`);
}
else {
// All good
}
}
results[sourceName] = {
output: result,
};
if (result.meta.count) {
for (const [key, value] of Object.entries(result.meta.count)) {
if (counts[key] === undefined) {
counts[key] = value;
}
else {
counts[key] += value;
}
}
}
}
let result = {
rawResultsBySourceName: results,
resultsBySourceName: results,
baseline,
resolvedPreset: config,
counts,
filtered: 0,
};
if (baseline) {
result = (0, baseline_js_1.filterBaseline)(baseline, result);
}
return result;
}
;