graphql-transformer-core
Version:
A framework to transform from GraphQL SDL to AWS cloudFormation.
297 lines • 13.3 kB
JavaScript
;
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.DynamoDBProvisionStrategy = exports.removeAmplifyInput = exports.readSchema = exports.loadProject = exports.isDataStoreEnabled = exports.writeConfig = exports.loadConfig = exports.ConflictHandlerType = exports.TRANSFORM_CURRENT_VERSION = exports.TRANSFORM_BASE_VERSION = exports.TRANSFORM_CONFIG_FILE_NAME = void 0;
const path = __importStar(require("path"));
const lodash_1 = __importDefault(require("lodash"));
const graphql_1 = require("graphql");
const errors_1 = require("../errors");
const fileUtils_1 = require("./fileUtils");
const fs = require('fs-extra');
exports.TRANSFORM_CONFIG_FILE_NAME = `transform.conf.json`;
exports.TRANSFORM_BASE_VERSION = 4;
exports.TRANSFORM_CURRENT_VERSION = 5;
const MODEL_DIRECTIVE_NAME = 'model';
var ConflictHandlerType;
(function (ConflictHandlerType) {
ConflictHandlerType["OPTIMISTIC"] = "OPTIMISTIC_CONCURRENCY";
ConflictHandlerType["AUTOMERGE"] = "AUTOMERGE";
ConflictHandlerType["LAMBDA"] = "LAMBDA";
})(ConflictHandlerType = exports.ConflictHandlerType || (exports.ConflictHandlerType = {}));
async function loadConfig(projectDir) {
let config = {
Version: exports.TRANSFORM_CURRENT_VERSION,
};
try {
const configPath = path.join(projectDir, exports.TRANSFORM_CONFIG_FILE_NAME);
const configExists = await fs.exists(configPath);
if (configExists) {
const configStr = await fs.readFile(configPath);
config = JSON.parse(configStr.toString());
}
return config;
}
catch (err) {
return config;
}
}
exports.loadConfig = loadConfig;
async function writeConfig(projectDir, config) {
const configFilePath = path.join(projectDir, exports.TRANSFORM_CONFIG_FILE_NAME);
await fs.writeFile(configFilePath, JSON.stringify(config, null, 4));
return config;
}
exports.writeConfig = writeConfig;
const isDataStoreEnabled = async (projectDir) => {
var _a, _b;
const transformerConfig = await loadConfig(projectDir);
return ((_a = transformerConfig === null || transformerConfig === void 0 ? void 0 : transformerConfig.ResolverConfig) === null || _a === void 0 ? void 0 : _a.project) !== undefined || ((_b = transformerConfig === null || transformerConfig === void 0 ? void 0 : transformerConfig.ResolverConfig) === null || _b === void 0 ? void 0 : _b.models) !== undefined;
};
exports.isDataStoreEnabled = isDataStoreEnabled;
const loadProject = async (projectDirectory, opts) => {
const { schema, modelToDatasourceMap } = await (0, exports.readSchema)(projectDirectory);
const functions = {};
if (!(opts && opts.disableFunctionOverrides === true)) {
const functionDirectory = path.join(projectDirectory, 'functions');
const functionDirectoryExists = await fs.exists(functionDirectory);
if (functionDirectoryExists) {
const functionFiles = await fs.readdir(functionDirectory);
for (const functionFile of functionFiles) {
if (functionFile.indexOf('.') === 0) {
continue;
}
const functionFilePath = path.join(functionDirectory, functionFile);
functions[functionFile] = functionFilePath;
}
}
}
const pipelineFunctions = {};
if (!(opts && opts.disablePipelineFunctionOverrides === true)) {
const pipelineFunctionDirectory = path.join(projectDirectory, 'pipelineFunctions');
const pipelineFunctionDirectoryExists = await fs.exists(pipelineFunctionDirectory);
if (pipelineFunctionDirectoryExists) {
const pipelineFunctionFiles = await fs.readdir(pipelineFunctionDirectory);
for (const pipelineFunctionFile of pipelineFunctionFiles) {
if (pipelineFunctionFile.indexOf('.') === 0) {
continue;
}
const pipelineFunctionPath = path.join(pipelineFunctionDirectory, pipelineFunctionFile);
pipelineFunctions[pipelineFunctionFile] = await fs.readFile(pipelineFunctionPath, 'utf8');
}
}
}
const customQueries = new Map();
const customQueriesDirectoryName = 'sql-statements';
const customQueriesDirectory = path.join(projectDirectory, customQueriesDirectoryName);
const customQueriesDirExists = await fs.exists(customQueriesDirectory);
if (customQueriesDirExists) {
const queryFiles = await fs.readdir(customQueriesDirectory);
for (const queryFile of queryFiles) {
if (!queryFile.endsWith('.sql')) {
continue;
}
const queryFileName = path.parse(queryFile).name;
const queryFilePath = path.join(customQueriesDirectory, queryFile);
customQueries.set(queryFileName, await fs.readFile(queryFilePath, 'utf8'));
}
}
const resolvers = {};
if (!(opts && opts.disableResolverOverrides === true)) {
const resolverDirectory = path.join(projectDirectory, 'resolvers');
const resolverDirExists = await fs.exists(resolverDirectory);
if (resolverDirExists) {
const resolverFiles = await fs.readdir(resolverDirectory);
for (const resolverFile of resolverFiles) {
if (resolverFile.indexOf('.') === 0) {
continue;
}
const resolverFilePath = path.join(resolverDirectory, resolverFile);
resolvers[resolverFile] = await fs.readFile(resolverFilePath, 'utf8');
}
}
}
const stacksDirectory = path.join(projectDirectory, 'stacks');
const stacksDirExists = await fs.exists(stacksDirectory);
const stacks = {};
if (stacksDirExists) {
const stackFiles = await fs.readdir(stacksDirectory);
for (const stackFile of stackFiles) {
if (stackFile.indexOf('.') === 0) {
continue;
}
const stackFilePath = path.join(stacksDirectory, stackFile);
(0, fileUtils_1.throwIfNotJSONExt)(stackFile);
const stackBuffer = await fs.readFile(stackFilePath);
try {
stacks[stackFile] = JSON.parse(stackBuffer.toString());
}
catch (e) {
throw new Error(`The CloudFormation template ${stackFiles} does not contain valid JSON.`);
}
}
}
const config = await loadConfig(projectDirectory);
return {
functions,
pipelineFunctions,
stacks,
resolvers,
schema,
config,
modelToDatasourceMap,
customQueries,
};
};
exports.loadProject = loadProject;
const readSchema = async (projectDirectory) => {
let modelToDatasourceMap = new Map();
const schemaFilePaths = [path.join(projectDirectory, 'schema.graphql'), path.join(projectDirectory, 'schema.sql.graphql')];
const existingSchemaFiles = schemaFilePaths.filter((p) => fs.existsSync(p));
const schemaDirectoryPath = path.join(projectDirectory, 'schema');
let amplifyInputType;
let schema = '';
if (!lodash_1.default.isEmpty(existingSchemaFiles)) {
for (const file of existingSchemaFiles) {
const fileSchema = (await fs.readFile(file)).toString();
const { amplifyType, schema: fileSchemaWithoutAmplifyInput } = (0, exports.removeAmplifyInput)(fileSchema);
const datasourceType = file.endsWith('.sql.graphql')
? constructDataSourceType(getRDSDBTypeFromInput(amplifyType), false)
: constructDataSourceType('DYNAMODB');
modelToDatasourceMap = new Map([...modelToDatasourceMap.entries(), ...constructDataSourceMap(fileSchema, datasourceType).entries()]);
if (amplifyType) {
amplifyInputType = mergeTypeFields(amplifyInputType, amplifyType);
}
schema += fileSchemaWithoutAmplifyInput;
}
if (amplifyInputType) {
schema = (0, graphql_1.print)(amplifyInputType) + schema;
}
}
else if (fs.existsSync(schemaDirectoryPath)) {
const datasourceType = constructDataSourceType('DYNAMODB');
const schemaInDirectory = (await readSchemaDocuments(schemaDirectoryPath)).join('\n');
modelToDatasourceMap = new Map([
...modelToDatasourceMap.entries(),
...constructDataSourceMap(schemaInDirectory, datasourceType).entries(),
]);
schema += schemaInDirectory;
}
else {
throw new errors_1.ApiCategorySchemaNotFoundError(schemaFilePaths[0]);
}
return {
schema,
modelToDatasourceMap,
};
};
exports.readSchema = readSchema;
const getRDSDBTypeFromInput = (amplifyType) => {
var _a;
const engineInput = amplifyType.fields.find((f) => f.name.value === 'engine');
if (!engineInput) {
throw new Error('engine is not defined in the RDS schema file');
}
const engine = (_a = engineInput === null || engineInput === void 0 ? void 0 : engineInput.defaultValue) === null || _a === void 0 ? void 0 : _a.value;
switch (engine) {
case 'mysql':
return 'MYSQL';
case 'postgres':
return 'POSTGRES';
default:
throw new Error(`engine ${engine} specified in the RDS schema file is not supported`);
}
};
const removeAmplifyInput = (schema) => {
const parsedSchema = (0, graphql_1.parse)(schema);
const amplifyType = parsedSchema.definitions.find((obj) => obj.kind === graphql_1.Kind.INPUT_OBJECT_TYPE_DEFINITION && obj.name.value === 'AMPLIFY');
const schemaWithoutAmplifyInput = parsedSchema.definitions.filter((obj) => obj.kind !== graphql_1.Kind.INPUT_OBJECT_TYPE_DEFINITION || obj.name.value !== 'AMPLIFY');
parsedSchema.definitions = schemaWithoutAmplifyInput;
return {
amplifyType,
schema: (0, graphql_1.print)(parsedSchema),
};
};
exports.removeAmplifyInput = removeAmplifyInput;
const mergeTypeFields = (typeA, typeB) => {
if (!typeA && !typeB) {
return undefined;
}
if (!typeA || !typeB) {
return typeA || typeB;
}
const type = typeA;
typeB.fields.forEach((field) => {
if (!type.fields.find((f) => f.name.value === field.name.value)) {
type.fields.push(field);
}
});
return type;
};
async function readSchemaDocuments(schemaDirectoryPath) {
const files = await fs.readdir(schemaDirectoryPath);
let schemaDocuments = [];
for (const fileName of files) {
if (fileName.indexOf('.') === 0) {
continue;
}
const fullPath = `${schemaDirectoryPath}/${fileName}`;
const stats = await fs.lstat(fullPath);
if (stats.isDirectory()) {
const childDocs = await readSchemaDocuments(fullPath);
schemaDocuments = schemaDocuments.concat(childDocs);
}
else if (stats.isFile()) {
const schemaDoc = await fs.readFile(fullPath);
schemaDocuments.push(schemaDoc);
}
}
return schemaDocuments;
}
var DynamoDBProvisionStrategy;
(function (DynamoDBProvisionStrategy) {
DynamoDBProvisionStrategy["DEFAULT"] = "DEFAULT";
DynamoDBProvisionStrategy["AMPLIFY_TABLE"] = "AMPLIFY_TABLE";
})(DynamoDBProvisionStrategy = exports.DynamoDBProvisionStrategy || (exports.DynamoDBProvisionStrategy = {}));
const constructDataSourceType = (dbType, provisionDB = true, provisionStrategy = "DEFAULT") => {
return {
dbType,
provisionDB,
provisionStrategy,
};
};
const constructDataSourceMap = (schema, datasourceType) => {
const parsedSchema = (0, graphql_1.parse)(schema);
const result = new Map();
parsedSchema.definitions
.filter((obj) => obj.kind === graphql_1.Kind.OBJECT_TYPE_DEFINITION && obj.directives.some((dir) => dir.name.value === MODEL_DIRECTIVE_NAME))
.forEach((type) => {
result.set(type.name.value, datasourceType);
});
return result;
};
//# sourceMappingURL=transformConfig.js.map