@graphql-codegen/cli
Version:
<p align="center"> <img src="https://github.com/dotansimha/graphql-code-generator/blob/master/logo.png?raw=true" /> </p>
329 lines (322 loc) • 17 kB
JavaScript
import { DetailedError, normalizeOutputParam, normalizeInstanceOrArray, normalizeConfig, getCachedDocumentNodeFromSchema, isDetailedError, } from '@graphql-codegen/plugin-helpers';
import { codegen } from '@graphql-codegen/core';
import { AggregateError } from '@graphql-tools/utils';
import { GraphQLError } from 'graphql';
import { getPluginByName } from './plugins.js';
import { getPresetByName } from './presets.js';
import { debugLog, printLogs } from './utils/debugging.js';
import { ensureContext, shouldEmitLegacyCommonJSImports } from './config.js';
import fs from 'fs';
import path from 'path';
import { cpus } from 'os';
import { createRequire } from 'module';
import { Listr } from 'listr2';
/**
* Poor mans ESM detection.
* Looking at this and you have a better method?
* Send a PR.
*/
const isESMModule = (typeof __dirname === 'string') === false;
const makeDefaultLoader = (from) => {
if (fs.statSync(from).isDirectory()) {
from = path.join(from, '__fake.js');
}
const relativeRequire = createRequire(from);
return async (mod) => {
return import(isESMModule
? /**
* For ESM we currently have no "resolve path" solution
* as import.meta is unavailable in a CommonJS context
* and furthermore unavailable in stable Node.js.
**/
mod
: relativeRequire.resolve(mod));
};
};
function createCache() {
const cache = new Map();
return function ensure(namespace, key, factory) {
const cacheKey = `${namespace}:${key}`;
const cachedValue = cache.get(cacheKey);
if (cachedValue) {
return cachedValue;
}
const value = factory();
cache.set(cacheKey, value);
return value;
};
}
export async function executeCodegen(input) {
const context = ensureContext(input);
const config = context.getConfig();
const pluginContext = context.getPluginContext();
const result = [];
let rootConfig = {};
let rootSchemas;
let rootDocuments;
const generates = {};
const cache = createCache();
function wrapTask(task, source, taskName, ctx) {
return () => {
return context.profiler.run(async () => {
try {
await Promise.resolve().then(() => task());
}
catch (error) {
if (source && !(error instanceof GraphQLError)) {
error.source = source;
}
ctx.errors.push(error);
throw error;
}
}, taskName);
};
}
async function normalize() {
/* Load Require extensions */
const requireExtensions = normalizeInstanceOrArray(config.require);
const loader = makeDefaultLoader(context.cwd);
for (const mod of requireExtensions) {
await loader(mod);
}
/* Root plugin config */
rootConfig = config.config || {};
/* Normalize root "schema" field */
rootSchemas = normalizeInstanceOrArray(config.schema);
/* Normalize root "documents" field */
rootDocuments = normalizeInstanceOrArray(config.documents);
/* Normalize "generators" field */
const generateKeys = Object.keys(config.generates || {});
if (generateKeys.length === 0) {
throw new DetailedError('Invalid Codegen Configuration!', `
Please make sure that your codegen config file contains the "generates" field, with a specification for the plugins you need.
It should looks like that:
schema:
- my-schema.graphql
generates:
my-file.ts:
- plugin1
- plugin2
- plugin3
`);
}
for (const filename of generateKeys) {
const output = (generates[filename] = normalizeOutputParam(config.generates[filename]));
if (!output.preset && (!output.plugins || output.plugins.length === 0)) {
throw new DetailedError('Invalid Codegen Configuration!', `
Please make sure that your codegen config file has defined plugins list for output "${filename}".
It should looks like that:
schema:
- my-schema.graphql
generates:
my-file.ts:
- plugin1
- plugin2
- plugin3
`);
}
}
if (rootSchemas.length === 0 &&
Object.keys(generates).some(filename => !generates[filename].schema ||
(Array.isArray(generates[filename].schema === 'object') &&
generates[filename].schema.length === 0))) {
throw new DetailedError('Invalid Codegen Configuration!', `
Please make sure that your codegen config file contains either the "schema" field
or every generated file has its own "schema" field.
It should looks like that:
schema:
- my-schema.graphql
or:
generates:
path/to/output:
schema: my-schema.graphql
`);
}
}
const isTest = process.env.NODE_ENV === 'test';
const tasks = new Listr([
{
title: 'Parse Configuration',
task: () => normalize(),
},
{
title: 'Generate outputs',
task: (ctx, task) => {
const generateTasks = Object.keys(generates).map(filename => {
const outputConfig = generates[filename];
const hasPreset = !!outputConfig.preset;
const title = `Generate to ${filename}`;
return {
title,
task: async (_, subTask) => {
let outputSchemaAst;
let outputSchema;
const outputFileTemplateConfig = outputConfig.config || {};
let outputDocuments = [];
const outputSpecificSchemas = normalizeInstanceOrArray(outputConfig.schema);
let outputSpecificDocuments = normalizeInstanceOrArray(outputConfig.documents);
const preset = hasPreset
? typeof outputConfig.preset === 'string'
? await getPresetByName(outputConfig.preset, makeDefaultLoader(context.cwd))
: outputConfig.preset
: null;
if (preset) {
if (preset.prepareDocuments) {
outputSpecificDocuments = await preset.prepareDocuments(filename, outputSpecificDocuments);
}
}
return subTask.newListr([
{
title: 'Load GraphQL schemas',
task: wrapTask(async () => {
debugLog(`[CLI] Loading Schemas`);
const schemaPointerMap = {};
const allSchemaDenormalizedPointers = [...rootSchemas, ...outputSpecificSchemas];
for (const denormalizedPtr of allSchemaDenormalizedPointers) {
if (typeof denormalizedPtr === 'string') {
schemaPointerMap[denormalizedPtr] = {};
}
else if (typeof denormalizedPtr === 'object') {
Object.assign(schemaPointerMap, denormalizedPtr);
}
}
const hash = JSON.stringify(schemaPointerMap);
const result = await cache('schema', hash, async () => {
const outputSchemaAst = await context.loadSchema(schemaPointerMap);
const outputSchema = getCachedDocumentNodeFromSchema(outputSchemaAst);
return {
outputSchemaAst,
outputSchema,
};
});
outputSchemaAst = result.outputSchemaAst;
outputSchema = result.outputSchema;
}, filename, `Load GraphQL schemas: ${filename}`, ctx),
},
{
title: 'Load GraphQL documents',
task: wrapTask(async () => {
debugLog(`[CLI] Loading Documents`);
const documentPointerMap = {};
const allDocumentsDenormalizedPointers = [...rootDocuments, ...outputSpecificDocuments];
for (const denormalizedPtr of allDocumentsDenormalizedPointers) {
if (typeof denormalizedPtr === 'string') {
documentPointerMap[denormalizedPtr] = {};
}
else if (typeof denormalizedPtr === 'object') {
Object.assign(documentPointerMap, denormalizedPtr);
}
}
const hash = JSON.stringify(documentPointerMap);
const result = await cache('documents', hash, async () => {
const documents = await context.loadDocuments(documentPointerMap);
return {
documents,
};
});
outputDocuments = result.documents;
}, filename, `Load GraphQL documents: ${filename}`, ctx),
},
{
title: 'Generate',
task: wrapTask(async () => {
debugLog(`[CLI] Generating output`);
const normalizedPluginsArray = normalizeConfig(outputConfig.plugins);
const pluginLoader = config.pluginLoader || makeDefaultLoader(context.cwd);
const pluginPackages = await Promise.all(normalizedPluginsArray.map(plugin => getPluginByName(Object.keys(plugin)[0], pluginLoader)));
const pluginMap = Object.fromEntries(pluginPackages.map((pkg, i) => {
const plugin = normalizedPluginsArray[i];
const name = Object.keys(plugin)[0];
return [name, pkg];
}));
const mergedConfig = {
...rootConfig,
...(typeof outputFileTemplateConfig === 'string'
? { value: outputFileTemplateConfig }
: outputFileTemplateConfig),
emitLegacyCommonJSImports: shouldEmitLegacyCommonJSImports(config, filename),
};
const outputs = preset
? await context.profiler.run(async () => preset.buildGeneratesSection({
baseOutputDir: filename,
presetConfig: outputConfig.presetConfig || {},
plugins: normalizedPluginsArray,
schema: outputSchema,
schemaAst: outputSchemaAst,
documents: outputDocuments,
config: mergedConfig,
pluginMap,
pluginContext,
profiler: context.profiler,
}), `Build Generates Section: ${filename}`)
: [
{
filename,
plugins: normalizedPluginsArray,
schema: outputSchema,
schemaAst: outputSchemaAst,
documents: outputDocuments,
config: mergedConfig,
pluginMap,
pluginContext,
profiler: context.profiler,
},
];
const process = async (outputArgs) => {
const output = await codegen({
...{
...outputArgs,
emitLegacyCommonJSImports: shouldEmitLegacyCommonJSImports(config, outputArgs.filename),
},
cache,
});
result.push({
filename: outputArgs.filename,
content: output,
hooks: outputConfig.hooks || {},
});
};
await context.profiler.run(() => Promise.all(outputs.map(process)), `Codegen: ${filename}`);
}, filename, `Generate: ${filename}`, ctx),
},
], {
// it stops when of the tasks failed
exitOnError: true,
});
},
// It doesn't stop when one of tasks failed, to finish at least some of outputs
exitOnError: false,
concurrent: cpus().length,
};
});
return task.newListr(generateTasks);
},
},
], {
rendererOptions: {
clearOutput: false,
collapse: true,
},
renderer: config.verbose ? 'verbose' : 'default',
ctx: { errors: [] },
rendererSilent: isTest || config.silent,
exitOnError: true,
});
// All the errors throw in `listr2` are collected in context
// Running tasks doesn't throw anything
const executedContext = await tasks.run();
if (config.debug) {
// if we have debug logs, make sure to print them before throwing the errors
printLogs();
}
if (executedContext.errors.length > 0) {
const errors = executedContext.errors.map(subErr => isDetailedError(subErr)
? `${subErr.message} for "${subErr.source}"${subErr.details}`
: subErr.message || subErr.toString());
const newErr = new AggregateError(executedContext.errors, `${errors.join('\n\n')}`);
// Best-effort to all stack traces for debugging
newErr.stack = `${newErr.stack}\n\n${executedContext.errors.map(subErr => subErr.stack).join('\n\n')}`;
throw newErr;
}
return result;
}