@graphql-codegen/hasura-allow-list
Version:
GraphQL Code Generator plugin to generate hasura allow liste metadata from graphql files
118 lines (117 loc) • 6.63 kB
JavaScript
import { Kind, print, visit, } from 'graphql';
import yaml from 'yaml';
/**
* Returns an array of fragments required for a given operation, recursively.
* Will throw an error if it cannot find one of the fragments required for the operation.
* @param operationDefinition the operation we want to find fragements for.
* @param fragmentDefinitions a list of fragments from the same document, some of which may be required by the operation.
* @param documentLocation location of the document the operation is sourced from. Only used to improve error messages.
* @returns an array of fragments required for the operation.
*/
function getOperationFragmentsRecursively(operationDefinition, fragmentDefinitions, documentLocation, config) {
var _a;
const requiredFragmentNames = new Set();
getRequiredFragments(operationDefinition);
// note: we should choose fragmentsOrder config that is compatible with other graphql-codegen plugins we use.
const order = (_a = config.fragmentsOrder) !== null && _a !== void 0 ? _a : 'global';
// order of fragments is determined by the order they are defined in the document.
if (order === 'document') {
return Array.from(requiredFragmentNames).map(name => fragmentDefinitions.find(definition => definition.name.value === name));
}
// order is determined by the global fragments definition order.
return fragmentDefinitions.filter(definition => requiredFragmentNames.has(definition.name.value));
/**
* Given a definition adds required fragments to requieredFragmentsNames, recursively.
* @param definition either an operation definition or a fragment definition.
*/
function getRequiredFragments(definition) {
visit(definition, {
FragmentSpread(fragmentSpreadNode) {
// added this check to prevent infinite recursion on recursive fragment definition (which itself isn't legal graphql)
// it seems graphql crashes anyways if a recursive fragment is defined, so maybe remove this check?
if (!requiredFragmentNames.has(fragmentSpreadNode.name.value)) {
requiredFragmentNames.add(fragmentSpreadNode.name.value);
const fragmentDefinition = fragmentDefinitions.find(definition => definition.name.value === fragmentSpreadNode.name.value);
if (!fragmentDefinition) {
throw new Error(`Missing fragment ${fragmentSpreadNode.name.value} for ${definition.kind === Kind.FRAGMENT_DEFINITION ? 'fragment' : 'operation'} ${definition.name.value} in file ${documentLocation}`);
}
else {
getRequiredFragments(fragmentDefinition);
}
}
return fragmentSpreadNode;
},
});
}
}
/**
* Gets a list of fragments from all documents. Will enforce fragment name uniqueness
* @param documents All the documents from which fragments will be extracted
* @returns global fragment definitions, guaranteed to have unique names
*/
function getGlobalFragments(documents) {
// keep a dictionary of each fragment and its location for better error messages
const fragmentDictionary = new Map();
// iterate over each document's fragments, and add them to the map
for (const document of documents) {
const fragmentDefinitions = document.document.definitions.filter(namedFragmentDefinitionFilter);
for (const fragment of fragmentDefinitions) {
const fragmentName = fragment.name.value;
// if the map already has a fragment by that name, throw an error with locations for both definitions
if (fragmentDictionary.has(fragment.name.value)) {
const locationA = document.location;
const locationB = fragmentDictionary.get(fragmentName);
throw new Error(`Duplicate fragment definitions for ${fragmentName} in files ${locationA}, ${locationB}`);
}
fragmentDictionary.set(fragmentName, document.location);
}
}
return documents.flatMap(document => document.document.definitions.filter(namedFragmentDefinitionFilter));
}
function getDocumentFragments(document) {
return document.document.definitions.filter(namedFragmentDefinitionFilter);
}
function namedOperationDefinitionFilter(definition) {
return definition.kind === Kind.OPERATION_DEFINITION && !!definition.name;
}
function namedFragmentDefinitionFilter(definition) {
return definition.kind === Kind.FRAGMENT_DEFINITION && !!definition.name;
}
export const plugin = async (schema, documents, config) => {
var _a;
if ('config_version' in config) {
throw new Error(`[hasura allow list plugin] Configuration error: configuration property config_version has been renamed configVersion. Please update your configuration accordingly.`);
}
if ('collection_name' in config) {
throw new Error(`[hasura allow list plugin] Configuration error: configuration property collection_name has been renamed collectionName. Please update your configuration accordingly.`);
}
const queries = [];
// if config globalFragments is set, get fragments from all documents
const globalFragments = !config.globalFragments ? false : getGlobalFragments(documents);
for (const document of documents) {
// filter out anonymous operations
const documentOperations = document.document.definitions.filter(namedOperationDefinitionFilter);
// depending on globalFragments settings, either use document level or global level fragments
const fragments = globalFragments || getDocumentFragments(document);
// for each operation in the document
for (const operation of documentOperations) {
// get fragments required by the operations
const requiredFragmentDefinitions = getOperationFragmentsRecursively(operation, fragments, document.location, config);
// insert the operation and any fragments to our queries definition.
// fragment order is preserved, and each fragment is separated by a new line
queries.push({
name: operation.name.value,
query: [operation, ...requiredFragmentDefinitions].map(print).join('\n'),
});
}
}
return yaml.stringify([
{
name: (_a = config.collectionName) !== null && _a !== void 0 ? _a : 'allowed-queries',
definition: {
queries,
},
},
]);
};
export default { plugin };