graphql-transformer-core
Version:
A framework to transform from GraphQL SDL to AWS cloudFormation.
537 lines • 26.1 kB
JavaScript
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getSanityCheckRules = exports.makeTransformConfigFromOldProject = exports.readV1ProjectConfiguration = exports.revertAPIMigration = exports.migrateAPIProject = exports.getS3KeyNamesFromDirectory = exports.uploadDeployment = exports.buildProject = exports.PARAMETERS_FILE_NAME = exports.CLOUDFORMATION_FILE_NAME = void 0;
const path = __importStar(require("path"));
const fs = __importStar(require("fs-extra"));
const glob = __importStar(require("glob"));
const cloudform_types_1 = require("cloudform-types");
const graphql_transformer_common_1 = require("graphql-transformer-common");
const GraphQLTransform_1 = require("../GraphQLTransform");
const fileUtils_1 = require("./fileUtils");
const transformConfig_1 = require("./transformConfig");
const sanity_check_1 = require("./sanity-check");
exports.CLOUDFORMATION_FILE_NAME = 'cloudformation-template.json';
exports.PARAMETERS_FILE_NAME = 'parameters.json';
async function buildProject(opts) {
await ensureMissingStackMappings(opts);
const builtProject = await _buildProject(opts);
if (opts.projectDirectory && !opts.dryRun) {
await writeDeploymentToDisk(builtProject, path.join(opts.projectDirectory, 'build'), opts.rootStackFileName, opts.buildParameters);
const lastBuildPath = opts.currentCloudBackendDirectory !== undefined ? path.join(opts.currentCloudBackendDirectory, 'build') : undefined;
const thisBuildPath = opts.projectDirectory !== undefined ? path.join(opts.projectDirectory, 'build') : undefined;
await (0, sanity_check_1.sanityCheckProject)(lastBuildPath, thisBuildPath, opts.rootStackFileName, opts.sanityCheckRules.diffRules, opts.sanityCheckRules.projectRules);
}
return builtProject;
}
exports.buildProject = buildProject;
async function _buildProject(opts) {
const userProjectConfig = await (0, transformConfig_1.loadProject)(opts.projectDirectory, opts);
const stackMapping = getStackMappingFromProjectConfig(userProjectConfig.config);
const transformers = await opts.transformersFactory(...opts.transformersFactoryArgs);
const transform = new GraphQLTransform_1.GraphQLTransform({
transformers,
stackMapping,
transformConfig: userProjectConfig.config,
featureFlags: opts.featureFlags,
});
let transformOutput = transform.transform(userProjectConfig.schema.toString());
if (userProjectConfig.config && userProjectConfig.config.Migration) {
transformOutput = adjustBuildForMigration(transformOutput, userProjectConfig.config.Migration);
}
const merged = mergeUserConfigWithTransformOutput(userProjectConfig, transformOutput);
return merged;
}
function getStackMappingFromProjectConfig(config) {
const stackMapping = getOrDefault(config, 'StackMapping', {});
const migrationConfig = config.Migration;
if (migrationConfig && migrationConfig.V1) {
const resourceIdsToHoist = migrationConfig.V1.Resources || [];
for (const idToHoist of resourceIdsToHoist) {
stackMapping[idToHoist] = 'root';
}
}
return stackMapping;
}
function adjustBuildForMigration(resources, migrationConfig) {
if (migrationConfig && migrationConfig.V1) {
const resourceIdsToHoist = migrationConfig.V1.Resources || [];
if (resourceIdsToHoist.length === 0) {
return resources;
}
const resourceIdMap = resourceIdsToHoist.reduce((acc, k) => ({ ...acc, [k]: true }), {});
for (const stackKey of Object.keys(resources.stacks)) {
const template = resources.stacks[stackKey];
for (const resourceKey of Object.keys(template.Resources)) {
if (resourceIdMap[resourceKey]) {
const resource = template.Resources[resourceKey];
template.Resources[resourceKey] = formatMigratedResource(resource);
}
}
}
const rootStack = resources.rootStack;
for (const resourceKey of Object.keys(rootStack.Resources)) {
if (resourceIdMap[resourceKey]) {
const resource = rootStack.Resources[resourceKey];
rootStack.Resources[resourceKey] = formatMigratedResource(resource);
}
}
}
return resources;
}
async function ensureMissingStackMappings(config) {
const { currentCloudBackendDirectory } = config;
let transformOutput = undefined;
if (currentCloudBackendDirectory) {
const missingStackMappings = {};
transformOutput = await _buildProject(config);
const copyOfCloudBackend = await (0, fileUtils_1.readFromPath)(currentCloudBackendDirectory);
const stackMapping = transformOutput.stackMapping;
if (copyOfCloudBackend && copyOfCloudBackend.build && copyOfCloudBackend.build.stacks) {
const customStacks = Object.keys(copyOfCloudBackend.stacks || {});
const stackNames = Object.keys(copyOfCloudBackend.build.stacks).filter((stack) => !customStacks.includes(stack));
for (const stackFileName of stackNames) {
const stackName = stackFileName.slice(0, stackFileName.length - path.extname(stackFileName).length);
const lastDeployedStack = JSON.parse(copyOfCloudBackend.build.stacks[stackFileName]);
if (lastDeployedStack) {
const resourceIdsInStack = Object.keys(lastDeployedStack.Resources);
for (const resourceId of resourceIdsInStack) {
if (stackMapping[resourceId] && stackName !== stackMapping[resourceId]) {
missingStackMappings[resourceId] = stackName;
}
}
const outputIdsInStack = Object.keys(lastDeployedStack.Outputs || {});
for (const outputId of outputIdsInStack) {
if (stackMapping[outputId] && stackName !== stackMapping[outputId]) {
missingStackMappings[outputId] = stackName;
}
}
}
}
const lastDeployedStack = JSON.parse(copyOfCloudBackend.build[config.rootStackFileName]);
const resourceIdsInStack = Object.keys(lastDeployedStack.Resources);
for (const resourceId of resourceIdsInStack) {
if (stackMapping[resourceId] && stackMapping[resourceId] !== 'root') {
missingStackMappings[resourceId] = 'root';
}
}
const outputIdsInStack = Object.keys(lastDeployedStack.Outputs || {});
for (const outputId of outputIdsInStack) {
if (stackMapping[outputId] && stackMapping[outputId] !== 'root') {
missingStackMappings[outputId] = 'root';
}
}
if (Object.keys(missingStackMappings).length) {
let conf = await (0, transformConfig_1.loadConfig)(config.projectDirectory);
conf = { ...conf, StackMapping: { ...getOrDefault(conf, 'StackMapping', {}), ...missingStackMappings } };
await (0, transformConfig_1.writeConfig)(config.projectDirectory, conf);
}
}
}
return transformOutput;
}
function mergeUserConfigWithTransformOutput(userConfig, transformOutput) {
const userFunctions = userConfig.functions || {};
const transformFunctions = transformOutput.functions;
const pipelineFunctions = transformOutput.pipelineFunctions;
for (const userFunction of Object.keys(userFunctions)) {
transformFunctions[userFunction] = userConfig.functions[userFunction];
}
for (const pipelineFunction of Object.keys(userConfig.pipelineFunctions)) {
pipelineFunctions[pipelineFunction] = userConfig.pipelineFunctions[pipelineFunction];
}
const userResolvers = userConfig.resolvers || {};
const transformResolvers = transformOutput.resolvers;
for (const userResolver of Object.keys(userResolvers)) {
transformResolvers[userResolver] = userConfig.resolvers[userResolver];
}
const userStacks = userConfig.stacks || {};
const transformStacks = transformOutput.stacks;
const rootStack = transformOutput.rootStack;
const resourceTypesToDependOn = {
'AWS::CloudFormation::Stack': true,
'AWS::AppSync::GraphQLApi': true,
'AWS::AppSync::GraphQLSchema': true,
};
const allResourceIds = Object.keys(rootStack.Resources).filter((k) => {
const resource = rootStack.Resources[k];
return resourceTypesToDependOn[resource.Type];
});
const parametersKeys = Object.keys(rootStack.Parameters);
const customStackParams = parametersKeys.reduce((acc, k) => ({
...acc,
[k]: cloudform_types_1.Fn.Ref(k),
}), {});
customStackParams[graphql_transformer_common_1.ResourceConstants.PARAMETERS.AppSyncApiId] = cloudform_types_1.Fn.GetAtt(graphql_transformer_common_1.ResourceConstants.RESOURCES.GraphQLAPILogicalID, 'ApiId');
let updatedParameters = rootStack.Parameters;
for (const userStack of Object.keys(userStacks)) {
if (transformOutput.stacks[userStack]) {
throw new Error(`You cannot provide a stack named ${userStack} as it \
will be overwritten by a stack generated by the GraphQL Transform.`);
}
const userDefinedStack = userConfig.stacks[userStack];
for (const key of Object.keys(userDefinedStack.Parameters)) {
if (customStackParams[key] == null) {
customStackParams[key] = cloudform_types_1.Fn.Ref(key);
if (updatedParameters[key]) {
throw new Error(`Cannot redefine CloudFormation parameter ${key} in stack ${userStack}.`);
}
else {
updatedParameters[key] = userDefinedStack.Parameters[key];
}
}
}
const parametersForStack = Object.keys(userDefinedStack.Parameters).reduce((acc, k) => ({
...acc,
[k]: customStackParams[k],
}), {});
transformStacks[userStack] = userDefinedStack;
const stackResourceId = userStack.split(/[^A-Za-z]/).join('');
const customNestedStack = new cloudform_types_1.CloudFormation.Stack({
Parameters: parametersForStack,
TemplateURL: cloudform_types_1.Fn.Join('/', [
'https://s3.amazonaws.com',
cloudform_types_1.Fn.Ref(graphql_transformer_common_1.ResourceConstants.PARAMETERS.S3DeploymentBucket),
cloudform_types_1.Fn.Ref(graphql_transformer_common_1.ResourceConstants.PARAMETERS.S3DeploymentRootKey),
'stacks',
userStack,
]),
}).dependsOn(allResourceIds);
rootStack.Resources[stackResourceId] = customNestedStack;
}
rootStack.Parameters = updatedParameters;
return {
...transformOutput,
resolvers: transformResolvers,
stacks: transformStacks,
};
}
const uploadDeployment = async (opts) => {
if (!opts.directory) {
throw new Error("You must provide a 'directory'");
}
if (!fs.existsSync(opts.directory)) {
throw new Error(`Invalid 'directory': directory does not exist at ${opts.directory}`);
}
if (!opts.upload || typeof opts.upload !== 'function') {
throw new Error("You must provide an 'upload' function");
}
const { directory, upload } = opts;
const fileNames = (0, exports.getS3KeyNamesFromDirectory)(directory);
const uploadPromises = fileNames.map(async (fileName) => {
const resourceContent = fs.createReadStream(path.join(directory, fileName));
await (0, fileUtils_1.handleFile)(upload, fileName, resourceContent);
});
await Promise.all(uploadPromises);
};
exports.uploadDeployment = uploadDeployment;
const getS3KeyNamesFromDirectory = (directory) => {
const fileNames = glob.sync('**/*', {
cwd: directory,
nodir: true,
posix: true,
});
return fileNames;
};
exports.getS3KeyNamesFromDirectory = getS3KeyNamesFromDirectory;
async function writeDeploymentToDisk(deployment, directory, rootStackFileName = 'rootStack.json', buildParameters) {
await (0, fileUtils_1.emptyDirectory)(directory);
const schema = deployment.schema;
const fullSchemaPath = path.normalize(directory + `/schema.graphql`);
fs.writeFileSync(fullSchemaPath, schema);
initStacksAndResolversDirectories(directory);
const resolverFileNames = Object.keys(deployment.resolvers);
const resolverRootPath = resolverDirectoryPath(directory);
for (const resolverFileName of resolverFileNames) {
const fullResolverPath = path.normalize(resolverRootPath + '/' + resolverFileName);
fs.writeFileSync(fullResolverPath, deployment.resolvers[resolverFileName]);
}
const pipelineFunctions = Object.keys(deployment.pipelineFunctions);
const pipelineFunctionRootPath = pipelineFunctionDirectoryPath(directory);
for (const functionFileName of pipelineFunctions) {
const fullTemplatePath = path.normalize(pipelineFunctionRootPath + '/' + functionFileName);
fs.writeFileSync(fullTemplatePath, deployment.pipelineFunctions[functionFileName]);
}
const stackNames = Object.keys(deployment.stacks);
const stackRootPath = stacksDirectoryPath(directory);
for (const stackFileName of stackNames) {
const fileNameParts = stackFileName.split('.');
if (fileNameParts.length === 1) {
fileNameParts.push('json');
}
const fullFileName = fileNameParts.join('.');
(0, fileUtils_1.throwIfNotJSONExt)(fullFileName);
const fullStackPath = path.normalize(stackRootPath + '/' + fullFileName);
let stackString = deployment.stacks[stackFileName];
stackString =
typeof stackString === 'string' ? deployment.stacks[stackFileName] : JSON.stringify(deployment.stacks[stackFileName], null, 4);
fs.writeFileSync(fullStackPath, stackString);
}
const functionNames = Object.keys(deployment.functions);
const functionRootPath = path.normalize(directory + `/functions`);
if (!fs.existsSync(functionRootPath)) {
fs.mkdirSync(functionRootPath);
}
for (const functionName of functionNames) {
const fullFunctionPath = path.normalize(functionRootPath + '/' + functionName);
const zipContents = fs.readFileSync(deployment.functions[functionName]);
fs.writeFileSync(fullFunctionPath, zipContents);
}
const rootStack = deployment.rootStack;
const rootStackPath = path.normalize(directory + `/${rootStackFileName}`);
const rootStackString = JSON.stringify(rootStack, null, 4);
fs.writeFileSync(rootStackPath, rootStackString);
const jsonString = JSON.stringify(buildParameters, null, 4);
const parametersOutputFilePath = path.join(directory, exports.PARAMETERS_FILE_NAME);
fs.writeFileSync(parametersOutputFilePath, jsonString);
}
async function migrateAPIProject(opts) {
const projectDirectory = opts.projectDirectory;
const cloudBackendDirectory = opts.cloudBackendDirectory || projectDirectory;
const copyOfCloudBackend = await (0, fileUtils_1.readFromPath)(cloudBackendDirectory);
if (copyOfCloudBackend.build && !copyOfCloudBackend.build[exports.CLOUDFORMATION_FILE_NAME]) {
copyOfCloudBackend.build[exports.CLOUDFORMATION_FILE_NAME] = copyOfCloudBackend[exports.CLOUDFORMATION_FILE_NAME];
}
const projectConfig = await (0, fileUtils_1.readFromPath)(projectDirectory);
const cloudBackendConfig = await readV1ProjectConfiguration(cloudBackendDirectory);
const transformConfig = makeTransformConfigFromOldProject(cloudBackendConfig);
await updateToIntermediateProject(projectDirectory, cloudBackendConfig, transformConfig);
return {
project: projectConfig,
cloudBackend: copyOfCloudBackend,
};
}
exports.migrateAPIProject = migrateAPIProject;
async function revertAPIMigration(directory, oldProject) {
await fs.remove(directory);
await (0, fileUtils_1.writeToPath)(directory, oldProject);
}
exports.revertAPIMigration = revertAPIMigration;
async function readV1ProjectConfiguration(projectDirectory) {
const { schema } = await (0, transformConfig_1.readSchema)(projectDirectory);
const cloudFormationTemplatePath = path.join(projectDirectory, exports.CLOUDFORMATION_FILE_NAME);
if (!fs.existsSync(cloudFormationTemplatePath)) {
throw new Error(`Could not find cloudformation template at ${cloudFormationTemplatePath}`);
}
const cloudFormationTemplateStr = await fs.readFile(cloudFormationTemplatePath);
const cloudFormationTemplate = JSON.parse(cloudFormationTemplateStr.toString());
const parametersFilePath = path.join(projectDirectory, 'parameters.json');
if (!fs.existsSync(parametersFilePath)) {
throw new Error(`Could not find parameters.json at ${parametersFilePath}`);
}
const parametersFileStr = await fs.readFile(parametersFilePath);
const parametersFile = JSON.parse(parametersFileStr.toString());
return {
template: cloudFormationTemplate,
parameters: parametersFile,
schema,
};
}
exports.readV1ProjectConfiguration = readV1ProjectConfiguration;
function makeTransformConfigFromOldProject(project) {
const migrationResourceIds = [];
for (const key of Object.keys(project.template.Resources)) {
const resource = project.template.Resources[key];
switch (resource.Type) {
case 'AWS::DynamoDB::Table': {
migrationResourceIds.push(key);
break;
}
case 'AWS::Elasticsearch::Domain': {
migrationResourceIds.push(key);
break;
}
case 'AWS::IAM::Role': {
if (key === 'ElasticSearchAccessIAMRole') {
migrationResourceIds.push(key);
}
break;
}
default: {
break;
}
}
}
return {
Migration: {
V1: {
Resources: migrationResourceIds,
},
},
};
}
exports.makeTransformConfigFromOldProject = makeTransformConfigFromOldProject;
function formatMigratedResource(obj) {
const jsonNode = obj && typeof obj.toJSON === 'function' ? obj.toJSON() : obj;
const withoutEncryption = removeSSE(jsonNode);
return withoutEncryption;
}
function removeSSE(resource) {
if (resource && resource.Properties && resource.Properties.SSESpecification) {
delete resource.Properties.SSESpecification;
}
return resource;
}
async function updateToIntermediateProject(projectDirectory, project, config) {
await (0, transformConfig_1.writeConfig)(projectDirectory, config);
const filteredResources = {};
for (const key of Object.keys(project.template.Resources)) {
const resource = project.template.Resources[key];
switch (resource.Type) {
case 'AWS::DynamoDB::Table':
case 'AWS::Elasticsearch::Domain':
case 'AWS::AppSync::GraphQLApi':
case 'AWS::AppSync::ApiKey':
case 'AWS::Cognito::UserPool':
case 'AWS::Cognito::UserPoolClient':
filteredResources[key] = formatMigratedResource(resource);
break;
case 'AWS::IAM::Role': {
if (key === 'ElasticSearchAccessIAMRole') {
filteredResources[key] = resource;
}
break;
}
case 'AWS::AppSync::GraphQLSchema': {
const alteredResource = { ...resource };
alteredResource.Properties.DefinitionS3Location = {
'Fn::Sub': [
's3://${S3DeploymentBucket}/${S3DeploymentRootKey}/schema.graphql',
{
S3DeploymentBucket: {
Ref: 'S3DeploymentBucket',
},
S3DeploymentRootKey: {
Ref: 'S3DeploymentRootKey',
},
},
],
};
filteredResources[key] = alteredResource;
break;
}
default:
break;
}
}
const filteredParameterValues = {
DynamoDBBillingMode: 'PROVISIONED',
};
const filteredTemplateParameters = {
env: {
Type: 'String',
Description: 'The environment name. e.g. Dev, Test, or Production',
Default: 'NONE',
},
S3DeploymentBucket: {
Type: 'String',
Description: 'The S3 bucket containing all deployment assets for the project.',
},
S3DeploymentRootKey: {
Type: 'String',
Description: 'An S3 key relative to the S3DeploymentBucket that points to the root of the deployment directory.',
},
};
for (const key of Object.keys(project.template.Parameters)) {
switch (key) {
case 'ResolverBucket':
case 'ResolverRootKey':
case 'DeploymentTimestamp':
case 'schemaGraphql':
break;
default: {
const param = project.template.Parameters[key];
filteredTemplateParameters[key] = param;
if (project.parameters[key]) {
filteredParameterValues[key] = project.parameters[key];
}
break;
}
}
}
const templateCopy = {
...project.template,
Resources: filteredResources,
Parameters: filteredTemplateParameters,
};
const oldCloudFormationTemplatePath = path.join(projectDirectory, exports.CLOUDFORMATION_FILE_NAME);
if (fs.existsSync(oldCloudFormationTemplatePath)) {
fs.unlinkSync(oldCloudFormationTemplatePath);
}
const cloudFormationTemplateOutputPath = path.join(projectDirectory, 'build', exports.CLOUDFORMATION_FILE_NAME);
fs.writeFileSync(cloudFormationTemplateOutputPath, JSON.stringify(templateCopy, null, 4));
const parametersInputPath = path.join(projectDirectory, exports.PARAMETERS_FILE_NAME);
fs.writeFileSync(parametersInputPath, JSON.stringify(filteredParameterValues, null, 4));
initStacksAndResolversDirectories(projectDirectory);
}
function initStacksAndResolversDirectories(directory) {
const resolverRootPath = resolverDirectoryPath(directory);
if (!fs.existsSync(resolverRootPath)) {
fs.mkdirSync(resolverRootPath);
}
const pipelineFunctionRootPath = pipelineFunctionDirectoryPath(directory);
if (!fs.existsSync(pipelineFunctionRootPath)) {
fs.mkdirSync(pipelineFunctionRootPath);
}
const stackRootPath = stacksDirectoryPath(directory);
if (!fs.existsSync(stackRootPath)) {
fs.mkdirSync(stackRootPath);
}
}
function pipelineFunctionDirectoryPath(rootPath) {
return path.normalize(rootPath + `/pipelineFunctions`);
}
function resolverDirectoryPath(rootPath) {
return path.normalize(rootPath + `/resolvers`);
}
function stacksDirectoryPath(rootPath) {
return path.normalize(rootPath + `/stacks`);
}
function getOrDefault(o, k, d) {
return o[k] || d;
}
function getSanityCheckRules(isNewAppSyncAPI, ff, allowDestructiveUpdates = false) {
let diffRules = [];
let projectRules = [];
if (!isNewAppSyncAPI) {
const iterativeUpdatesEnabled = ff.getBoolean('enableIterativeGSIUpdates');
if (iterativeUpdatesEnabled) {
if (!allowDestructiveUpdates) {
diffRules.push((0, sanity_check_1.getCantEditKeySchemaRule)(iterativeUpdatesEnabled), (0, sanity_check_1.getCantAddLSILaterRule)(iterativeUpdatesEnabled), (0, sanity_check_1.getCantRemoveLSILater)(iterativeUpdatesEnabled), (0, sanity_check_1.getCantEditLSIKeySchemaRule)(iterativeUpdatesEnabled), sanity_check_1.cantRemoveTableAfterCreation);
}
projectRules.push(sanity_check_1.cantHaveMoreThan500ResourcesRule);
}
else {
diffRules.push((0, sanity_check_1.getCantEditKeySchemaRule)(), (0, sanity_check_1.getCantAddLSILaterRule)(), (0, sanity_check_1.getCantRemoveLSILater)(), (0, sanity_check_1.getCantEditLSIKeySchemaRule)(), sanity_check_1.cantEditGSIKeySchemaRule, sanity_check_1.cantAddAndRemoveGSIAtSameTimeRule);
if (!allowDestructiveUpdates) {
diffRules.push(sanity_check_1.cantRemoveTableAfterCreation);
}
projectRules.push(sanity_check_1.cantHaveMoreThan500ResourcesRule, sanity_check_1.cantMutateMultipleGSIAtUpdateTimeRule);
}
}
return { diffRules, projectRules };
}
exports.getSanityCheckRules = getSanityCheckRules;
//# sourceMappingURL=amplifyUtils.js.map