UNPKG

@sap/cds-dk

Version:

Command line client and development toolkit for the SAP Cloud Application Programming Model

635 lines (574 loc) 30.2 kB
const fs = require('fs') const cds = require('../../../cds') const { exists, isdir, path } = cds.utils const { compileToHdbMigration, getJournalEntities, getMigrationTableName } = require('./2migration') const { find } = require('../../../util/fs') const { BuildError, relativePaths, BuildMessage, hasOptionValue, getWorkspacePaths, normalizePath } = require('../../util') const { CONTENT_PACKAGE_JSON, CONTENT_HDBTABLEDATA, CSV_FILE_DETECTION, CONTENT_ENV, CONTENT_DEFAULT_ENV_JSON, CONTENT_NODE_MODULES, CONTINUE_UNRESOLVED_SCHEMA_CHANGES, CSV_FILE_TARGET, OPTION_WS, SEVERITY_WARNING } = require('../../constants') const DEFAULT_COMPILE_DEST_FOLDER = path.normalize("src/gen") const FILE_EXT_CSV = ".csv" const FILE_EXT_HDBTABLEDATA = ".hdbtabledata" const FILE_EXT_HDBTABLE = ".hdbtable" const FILE_EXT_HDBMIGRATIONTABLE = ".hdbmigrationtable" const FILE_NAME_HDICONFIG = ".hdiconfig" const FILE_NAME_HDINAMESPACE = ".hdinamespace" const FILE_NAME_PACKAGE_JSON = "package.json" const PATH_LAST_DEV_CSN = "last-dev/csn.json" const FILE_NAME_UNDEPLOY_JSON = "undeploy.json" const DEPLOY_FORMAT = "deploy-format" const DEBUG = cds.debug('cli|build') // Some HDI artifacts do not require explicit plugin specification in _.hdiconfig_. // This list contains exceptions which will be ignored during the consistency check. const PLUGINLESS_HDB_FILES = new Set([".hdbgrants", ".hdbrevokes"]) module.exports = class HanaBuildPlugin extends require('../internal') { static get taskDefaults() { return { src: normalizePath(cds.env.folders.db) } } static hasTask() { if (cds.env.extends) return false const src = path.join(cds.root, this.taskDefaults?.src ?? 'db') if (!exists(src)) return false const db = cds.env.requires.db return db?.kind === 'hana' || db?.dialect === 'hana' } init() { this.task.options.compileDest = path.resolve(this.task.dest, this.task.options.compileDest || DEFAULT_COMPILE_DEST_FOLDER) } async build() { const { src, dest } = this.task const model = await this.model() if (!model) { return } let wsPaths = [] if (getJournalEntities(model).length || cds.cli.options?.[OPTION_WS] || hasOptionValue(this.task.options[OPTION_WS], true)) { // workspace paths don't include the current project root wsPaths = await getWorkspacePaths() } if (cds.cli.options.mocked) { cds.deploy?.include_external_entities_in?.(model) } // the order of 1 and 2 is important // 1. compile const hdiPlugins = await this._compileToHana(model, wsPaths) if (!cds.cli.options.dry) { // 2. copy resources if (this.hasBuildOption(CSV_FILE_DETECTION, false)) { await this._copyResources(src, dest) } else { await this._copyResourcesExt(src, dest, model) } // create additional stuff in dest await this._writeHdiConfig(hdiPlugins) await this._writeHdiNamespace() // TODO disabled as this contradicts the MTX domain concept which allows partial app deployments //await this._writeUndeployJson() if (!this.hasBuildOption(CONTENT_HDBTABLEDATA, false)) { await this._compileToHdbtabledata(model) } if (!this.hasBuildOption(CONTENT_PACKAGE_JSON, false)) { await this._writePackageJson() } // copy native hana artifacts from other workspaces including .hdbmigrationtable files defined for // common model artifacts that are not part of the current project if (wsPaths.length || cds.cli.options?.[OPTION_WS] || hasOptionValue(this.task.options[OPTION_WS], true)) { // first is winning, e.g. a .hdiconfig file might exist in multiple workspaces const seen = new Set() const wsFiles = (await this._getWorkspaceFiles(wsPaths)).filter(src => { if (seen.has(src.filePath)) return false seen.add(src.filePath) return true }); // copy artifacts to the src/gen folder in inplace mode const dest = path.join(this.task.dest, this.isStagingBuild() ? 'src' : 'src/gen') await Promise.all( wsFiles.map((src) => { const { dir, filePath } = src return this.copy(path.join(dir, filePath)).to(path.join(dest, filePath)) }) ) } } } /** * Deletes any content that has been created in folder '#this.task.dest/src/gen' by inplace mode. * <br> * Note: Content created in staging build will be deleted by default. */ async clean() { if (this.isStagingBuild()) { return super.clean() } return fs.promises.rm(this.task.options.compileDest, { force: true, recursive: true }) } /** * Copies all files located at <src> (except HANA artifacts not contained in <db>/src/**) to the folder <dest>. * '*.csv' files are read based on the corresponding CDS model file location and copied as flat list into folder '<dest>/src/gen>'. * * @param {string} src * @param {string} dest * @param {Object} model */ async _copyResourcesExt(src, dest, model) { const resources = Object.keys(await cds.deploy.resources(model)).reverse() // reverse to get reuse resources first and app resources last const dbSrc = path.join(src, 'src') // 1. copy csv files into 'src/gen/data' or 'src/gen/csv' subfolder if (resources.length) { // determine the CSV folder from the location of the CSVs defined in this build task db folder, use subfolder data by default const dbCSVs = resources.filter(res => res.startsWith(src) && !res.startsWith(dbSrc)) let csvFolder = this.getBuildOption(CSV_FILE_TARGET) if (csvFolder && !["data", "csv"].includes(csvFolder)) { throw new BuildError("Invalid value for option 'csvFileTarget'. Valid values are 'data' or 'csv'.") } if (!csvFolder) { const csvPath = path.join(src, "data") // determine subfolder name used by the application for backward compatibility csvFolder = !dbCSVs.length || dbCSVs.some(res => res && res.startsWith(csvPath)) ? "data" : "csv" } // preserve last-write-wins semantics for duplicate basenames while parallelizing distinct file copies const csvResourcesByName = new Map() for (const res of resources) { // do not duplicate resources that are already contained in db/src/** if (res && /\.csv$/.test(res) && !res.startsWith(dbSrc)) { csvResourcesByName.set(path.basename(res), res) } } await Promise.all(Array.from(csvResourcesByName.entries()).map(([fileName, res]) => { return this.copy(res).to(path.join(this.task.options.compileDest, csvFolder, fileName)) })) } if (this.isStagingBuild()) { let blockList = "\\.cds$|\\.csv$|\\.hdbtabledata$" blockList += !this.hasBuildOption(CONTENT_ENV, true) ? "|\\.env($|\\..*$)" : "" blockList += !this.hasBuildOption(CONTENT_DEFAULT_ENV_JSON, true) ? "|default-env\\.json$" : "" blockList = new RegExp(blockList) // 2. staging build: copy files except *.cds, .env, default-env.json, ./node_modules/** await this.copyNativeContent(src, dest, (entry) => { if (entry.startsWith(dbSrc)) { return true } if (isdir(entry)) { return !/(\/|\\)node_modules(\/|\\)?$/.test(entry) || (/(\/|\\)node_modules(\/|\\)?$/.test(entry) && this.hasBuildOption(CONTENT_NODE_MODULES, true)) } return !blockList.test(entry) }) } } /** * Copies the entire content of the db module located in the given <src> folder to the folder <dest>. * '*.csv' and '*.hdbtabledata' files located in a subfolder 'data' or 'csv' will be copied to '<dest>/src/gen/data>'||'<dest>/src/gen/csv>' * * @param {string} src * @param {string} dest */ async _copyResources(src, dest) { const dbCsvDir = path.join(src, "csv") const dbDataDir = path.join(src, "data") const csvDirs = [dbCsvDir, dbDataDir] const regexData = RegExp('\\.csv$|\\.hdbtabledata$') if (this.isStagingBuild()) { const regex = RegExp('\\.cds$|\\.csv$|\\.hdbtabledata$') await this.copyNativeContent(src, dest, (entry) => { if (isdir(entry)) { return !/(\/|\\)node_modules(\/|\\)?$/.test(entry) } return (!regex.test(entry) && entry !== cds.env.build.outputfile) || (regexData.test(entry) && !entry.startsWith(dbCsvDir) && !entry.startsWith(dbDataDir)) }) } // handle *.csv and *.hdbtabledata located in '<dbSrc>/data' and '<dbSrc>/csv' folder, // subfolders are not supported const allFiles = [] for (const csvDir of csvDirs) { allFiles.push(...await find(csvDir, { filter: entry => regexData.test(entry) && !isdir(entry) })) } return Promise.all(allFiles.map((file) => { return this.copy(file).to(path.join(this.task.options.compileDest, path.relative(src, file))) })) } /** * Generates *.hdbtabledata files in folder '#this.task.dest/src/gen' from *.csv files located in '#this.task.dest/src/**' folder. * The generated *.hdbtabledata files will link to their *.csv counterparts using relative links. The *.csv files have either * already been defined in the 'src' folder or they have been copied to '#this.task.dest/src/gen/**' folder if they have been * created outside 'src' folder. If custom *.hdbtabledata files are found nothing is generated for this particular folder. * <br> * Note: *.csv and *.hdbtabledata need to be copied to '#this.task.dest/src/gen**' if required before this method is called. * In inplace mode dest folder is refering to src folder. * * @param {object} model compiled csn */ async _compileToHdbtabledata(model) { const tableDataDirs = new Set() const destSrcDir = path.join(this.task.dest, "src") const allCsvFiles = await find(destSrcDir, { filter: (entry) => { if (isdir(entry)) { return true } if (/\.hdbtabledata$/.test(entry)) { tableDataDirs.add(path.dirname(entry)) } return /\.csv$/.test(entry) }}) if (allCsvFiles.length > 0) { const csvDirs = allCsvFiles.map(path.dirname).reduce((dirs, dir) => { if (!tableDataDirs.has(dir) && !dirs.includes(dir)) { // exclude any dir where a tabledata is present dirs.push(dir) } return dirs }, []) // ODM csv data comes with license comments, so strip these if (!this.hasBuildOption("stripCsvComments", false)) { await this._stripCsvComments(allCsvFiles) } const promises = [] const options = { ...this.options(), messages: this.messages, dirs: csvDirs, baseDir: this.task.options.compileDest } const toHdbtabledata = cds.compile.to.hdbtabledata if (!toHdbtabledata) throw `hdbtabledata plugin not found – make sure to install package '@cap-js/hana'` const tableDataResult = await toHdbtabledata(model, options) for (let [tableData, { file, csvFolder }] of tableDataResult) { // create .hdbtabledata side-by-side if .csv is contained in 'src/gen/**' subfolder // otherwise create in 'src/gen' let tableDataPath = csvFolder.startsWith(this.task.options.compileDest) ? csvFolder : this.task.options.compileDest tableDataPath = path.join(tableDataPath, file) promises.push(this.write(tableData).to(tableDataPath)) } await Promise.all(promises) } } async _stripCsvComments(csvFiles) { // Note: modification of csv files is only allowed for files located in the compile destination folder, // meaning having their origin location at db/data/* or db/csv/* await Promise.all(csvFiles.map(file => { if (this.isStagingBuild() || file.startsWith(this.task.options.compileDest)) { return cds.utils.csv?.stripComments(file, null, this.task.options.trimCsvWhitespaces) } })) } /** * Creates the hana artifacts from the given csn model and writes the files to the folder '<dest>/src/gen'. * * @param {object} model The compiled csn model */ async _compileToHana(model, wsPaths) { // compile to old format (.hdbcds) or new format (.hdbtable / .hdbview) const format = this.getBuildOption(DEPLOY_FORMAT) || cds.env.requires.db?.[DEPLOY_FORMAT] || cds.env.hana?.[DEPLOY_FORMAT] if (!cds.compile.to[format]) { return Promise.reject(new Error(`Invalid deploy-format defined: ${format}`)) } if (cds.env.features.journal === false || format === 'hdbcds') { // compatibility with existing cds.compile.to.hdbtable plugins cds < 8.0.0 return await this._compileToHdb(model, format) } else { return await this._compileToHdbmigration(model, format, wsPaths) } } async _compileToHdb(model, format) { const hdiPlugins = new Set() // enforces sqlNames option for compiler in tests const options = { ...this.options(), sql_mapping: cds.env.sql.names } const result = cds.compile.to[format](model, options) const promises = [] for (const [content, key] of result) { const suffix = key.suffix || path.extname(key.file) const file = key.file ? key.file : key.name + key.suffix hdiPlugins.add(suffix) promises.push(this.write(content).to(path.join(this.task.options.compileDest, file))) if (suffix === FILE_EXT_HDBTABLE) { const name = key.name || path.parse(key.file).name const dbSrcDir = path.join(this.task.src, "src") // issue an error in case a .hdbmigrationtable file already exists if (fs.existsSync(path.join(dbSrcDir, name + FILE_EXT_HDBMIGRATIONTABLE))) { const relDbSrcDir = path.relative(cds.root, dbSrcDir) const relDbDestDir = path.relative(cds.root, this.task.options.compileDest) throw new BuildError(`Multiple files exist defining the same HANA artifact - [${path.join(relDbSrcDir, name + FILE_EXT_HDBMIGRATIONTABLE)}, ${path.join(relDbDestDir, file)}].\nEither annotate the model entity using @cds.persistence.journal or undeploy the file [${path.join('src', name + FILE_EXT_HDBMIGRATIONTABLE)}] using an undeploy.json file.`) } } } await Promise.all(promises) return hdiPlugins } async _compileToHdbmigration(model, format, wsPaths) { const hdiPlugins = new Set() const relDbDestDir = path.relative(cds.root, this.task.options.compileDest) const dbSrcDir = path.join(this.task.src, "src") const relDbSrcDir = path.relative(cds.root, dbSrcDir) const lastDevCsnFolder = PATH_LAST_DEV_CSN const lastDevCsnDir = path.join(this.task.src, lastDevCsnFolder) let lastDev = null const promises = [] const migrationTableFiles = [] if (fs.existsSync(lastDevCsnDir)) { lastDev = JSON.parse((await fs.promises.readFile(lastDevCsnDir, 'utf-8')).toString()) } // enforces sqlNames option for compiler in tests, pass options from cds env, ensures that the correct format is taken const options = { ...this.options(), messages: this.messages, sql_mapping: cds.env.sql.names, "deploy-format": format } const { definitions, afterImage } = await compileToHdbMigration(model, lastDev, dbSrcDir, options) let validationError // determine the migration tables originating from a different workspaces const journalEntities = getJournalEntities(model) const wsMigrationTables = new Set() if (journalEntities.length) { for (const entity of journalEntities) { if (entity['$location']?.file && wsPaths.some(wsPath => path.resolve(cds.root, entity['$location'].file).startsWith(wsPath))) { wsMigrationTables.add(getMigrationTableName(entity, model)) } } } for (const { name, suffix, content, changed } of definitions) { hdiPlugins.add(suffix) const fileName = name + suffix if (suffix === FILE_EXT_HDBMIGRATIONTABLE) { // do not create .hdbmigrationtable files originating from a different workspace // the --ws build option will copy these artifacts to either gen/db/src or db/src/gen if (!wsMigrationTables.has(name)) { migrationTableFiles.push(path.join(relDbSrcDir, fileName)) if (changed) { promises.push(this.write(content).to(path.join(dbSrcDir, fileName))) } else { DEBUG?.(`no change, keep existing ${fileName}`) } } else { DEBUG?.(`skip generation of migration table [${fileName}] as it originates from a different workspace`) } } else { promises.push(this.write(content).to(path.join(this.task.options.compileDest, fileName))) if (suffix === FILE_EXT_HDBTABLE) { // issue an error in case a .hdbmigrationtable file already exists if (fs.existsSync(path.join(dbSrcDir, name + FILE_EXT_HDBMIGRATIONTABLE))) { validationError = new BuildError(`Multiple files exist defining the same HANA artifact - [${path.join(relDbSrcDir, name + FILE_EXT_HDBMIGRATIONTABLE)}, ${path.join(relDbDestDir, fileName)}].\nEither annotate the model entity using @cds.persistence.journal or undeploy the file [${path.join('src', name + FILE_EXT_HDBMIGRATIONTABLE)}] using an undeploy.json file.`) } } } } // The last-dev CSN shall only be updated, if all .hdbmigrationtable files could be successfully updated. // Of course, .hdbmigrationtable files already written would need to be manually reverted before the // cds build command is again executed. await Promise.all(promises) try { if (validationError) { throw validationError } await this._validateMigrationTableFiles() } finally { // update last-dev CSN version if (afterImage) { if (!_toEqualIgnoreMeta(lastDev, afterImage)) { await this.write(afterImage).to(lastDevCsnDir) } // add src/.hdiconfig if not existing if (migrationTableFiles.length > 0 && !fs.existsSync(path.join(dbSrcDir, FILE_NAME_HDICONFIG))) { const template = await _readTemplateAsJson('.hdiconfig-cloud') await this.write(template).to(path.join(dbSrcDir, FILE_NAME_HDICONFIG)) } } } return hdiPlugins } async _writePackageJson() { const pkgJson = path.join(this.task.src, "package.json") const exists = fs.existsSync(pkgJson) if (exists) { DEBUG?.(`skip create [${relativePaths(cds.root, pkgJson)}], already existing`) } if (this.isStagingBuild() && !exists) { const content = await _readTemplateAsJson(FILE_NAME_PACKAGE_JSON) await this.write(content).to(path.join(this.task.dest, FILE_NAME_PACKAGE_JSON)) } } /** * Create .hdiconfig file in <dest>src/gen folder of db module. */ async _writeHdiConfig(compileHdiPlugins) { const DEFAULT_HDI_PLUGINS = [FILE_EXT_CSV, FILE_EXT_HDBTABLEDATA, FILE_EXT_HDBTABLE, ".hdbview", ".hdbindex", ".hdbconstraint", ".hdbcalculationview"] const undeployHdiPlugins = await this._readTypesFromUndeployJson() // see CAP issue #6222 const hdiPlugins = new Set([...DEFAULT_HDI_PLUGINS, ...undeployHdiPlugins, ...compileHdiPlugins]) let hdiConfigPath // do not create the .hdiconfig file in 'gen/db' root folder for now // if (this.isStagingBuild() // && !fs.existsSync(path.join(this.task.dest, FILE_NAME_HDICONFIG)) // && !fs.existsSync(path.join(this.task.dest, 'cfg', FILE_NAME_HDICONFIG))) { // // in 'gen/db' if none exists // // ensures correct deployment of all artifacts including HANA native artifacts // // without the need of creating a static .hdiconfig file in the 'db' folder // // add all known plugins // const defaultHdiConfig = await this._readTemplateAsJson('.hdiconfig-cloud') // for (const plugin in defaultHdiConfig['file_suffixes']) { // hdiPlugins.add('.' + plugin) // } // hdiConfigPath = path.join(this.task.dest, FILE_NAME_HDICONFIG) // } else { // in 'gen/db/src/gen' // ensures correct deployment of generated and undeploy.json artifacts // a static .hdiconfig file in 'db' folder is required for any HANA native artifacts hdiConfigPath = path.join(this.task.options.compileDest, FILE_NAME_HDICONFIG) // } const template = await _readTemplateAsJson('.hdiconfig-all') let content = { 'file_suffixes': {} } // only use the required subset as SAP HANA cloud does not support all hdiPlugins for (const key in template['file_suffixes']) { if (hdiPlugins.has('.' + key)) { content['file_suffixes'][key] = template['file_suffixes'][key] } } if (Object.keys(content['file_suffixes']).length < hdiPlugins.size) { const missingPlugins = Array.from(hdiPlugins) .filter(key => !content['file_suffixes'][key.slice(1)]) .filter(key => !PLUGINLESS_HDB_FILES.has(key)) if (missingPlugins.length) { this.pushMessage(`'HANA database plugin not found for following file suffix [${missingPlugins.join(', ')}]`) } } await this.write(content).to(hdiConfigPath) } /** * Create .hdinamespace file in <dest>src/gen folder of db module. */ async _writeHdiNamespace() { // see issue #64 - add .hdinamespace file to prevent HDI from adding gen/ folder to the namespace. const hdiNamespace = path.join(this.task.options.compileDest, FILE_NAME_HDINAMESPACE) const content = await _readTemplateAsJson(FILE_NAME_HDINAMESPACE) return await this.write(content).to(hdiNamespace) } /** * Create undeploy.json file in <dest> folder of db module. */ async _writeUndeployJson() { if (this.isStagingBuild()) { // see issue #64 - add .hdinamespace file to prevent HDI from adding gen/ folder to the namespace. const undeployJsonDest = path.join(this.task.dest, FILE_NAME_UNDEPLOY_JSON) const undeployJsonSrc = path.join(this.task.src, FILE_NAME_UNDEPLOY_JSON) const templateEntries = await _readTemplateAsJson(FILE_NAME_UNDEPLOY_JSON) let newEntries = [] if (fs.existsSync(undeployJsonSrc)) { newEntries = await JSON.parse((await fs.promises.readFile(undeployJsonSrc, 'utf-8')).toString()) newEntries = Array.isArray(newEntries) ? newEntries : [] templateEntries.forEach(entry => { if (!newEntries.includes(entry)) { newEntries.push(entry) } }) } else { newEntries = templateEntries } // formatted output let content = '[\n' for (let i = 0; i < newEntries.length; i++) { content += ` "${newEntries[i]}"${i + 1 < newEntries.length ? ',' : ''}\n` } content += ']' await this.write(content).to(undeployJsonDest) } } async _readTypesFromUndeployJson() { const hdiPlugins = new Set() const file = path.join(this.task.src, "undeploy.json") if (fs.existsSync(file)) { const undeployList = JSON.parse((await fs.promises.readFile(file)).toString(), 'utf-8') if (Array.isArray(undeployList)) { const hdiconfig = await _readTemplateAsJson('.hdiconfig-all') const keys = new Set(Object.keys(hdiconfig['file_suffixes']).map(key => '.' + key)) undeployList.forEach(entry => { const extName = path.extname(entry) if (extName && !hdiPlugins.has(extName)) { if (keys.has(extName)) { hdiPlugins.add(extName) } else { this.pushMessage(`Ignoring invalid entry '${entry}' in undeploy.json file`, SEVERITY_WARNING) } } }) } } return hdiPlugins } async _validateMigrationTableFiles() { const dbSrcDir = path.join(this.task.src, "src") const migrationTableFiles = await find(dbSrcDir, { filter: (res) => { return fs.statSync(res).isFile() && path.extname(res) === FILE_EXT_HDBMIGRATIONTABLE }}) if (migrationTableFiles.length > 0) { const parser = require('./migrationtable') const resolutionMessages = [] await Promise.all(migrationTableFiles.map(async file => { try { const tableModel = await parser.read(file) if (/^>>>>>/m.test(tableModel.migrations.toString())) { // as this is not a build error, we do not abort cds build, instead only log as error resolutionMessages.push(new BuildMessage(`Manual resolution required for file ${path.relative(cds.root, file)}. Check migration version content for further details.`)) } } catch (e) { throw new Error(`${path.relative(cds.root, file)}: ${e.message}`, { cause: e }) } })) if (resolutionMessages.length) { // REVISIT: undocumented, we may want to delete in future versions if (!this.hasBuildOption(CONTINUE_UNRESOLVED_SCHEMA_CHANGES, true)) { throw new BuildError('Current model changes require manual resolution', resolutionMessages) } // adding to the task's message list resolutionMessages.forEach(m => this._messages.push(m)) } } } /** * Returns all hana native artifacts located in db/src/** workspace folders. * @param {Array} workspacePaths Array of workspace paths, 'cds.root' is excluded from the list * @returns {Promise<Array>} Array of workspace files, each entry contains the db src directory, * the relative file path and the file name. */ async _getWorkspaceFiles(workspacePaths) { if (!workspacePaths || !workspacePaths.length) { return [] } const dbPaths = new Set() workspacePaths.forEach(workspacePath => { // skip workspace matching the current project if cds build is executed in a workspace folder if (cds.root !== workspacePath) { const dbSrc = path.join(workspacePath, 'db/src') const dbCfg = path.join(workspacePath, 'db/cfg') if (fs.existsSync(dbSrc)) { dbPaths.add(dbSrc) } if (fs.existsSync(dbCfg)) { dbPaths.add(dbCfg) } } }) const workspaceFiles = [] for (const src of dbPaths) { const files = await find(src, { filter: (entry) => { if (entry.match(/(\/|\\)gen(\/|\\)?$/)) { // skip gen folder content return false } return true }}) files.forEach(file => workspaceFiles.push({ dir: src, filePath: path.relative(src, file), fileName: path.basename(file) })) } return workspaceFiles } } async function _readTemplateAsJson(template) { const content = await fs.promises.readFile(path.join(__dirname, 'template', template), 'utf-8') return JSON.parse(content.toString()) } function _toEqualIgnoreMeta(csn1, csn2) { function toString(csn) { return JSON.stringify(csn, (k, v) => { if (v?.creator) { // make sure it's the compiler meta tag if (k === 'meta' && v.creator?.startsWith('CDS Compiler')) return } return v }) } if (csn1 === csn2) { return true } if (!csn1 || !csn2) { return false } return toString(csn1) === toString(csn2) }