UNPKG

@universis/candidates

Version:

Universis api server plugin for study program candidates, internship selection etc

380 lines (370 loc) 15.2 kB
import fs from 'fs'; import { Workbook } from 'exceljs'; import { DataConfigurationStrategy } from '@themost/data'; import { cloneDeep, merge } from 'lodash'; import { Args, TraceUtils } from '@themost/common'; import moment from 'moment'; const XlsxContentType = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'; // use this value as a fallback if it hasn't been configured in the schema nor under settings/universis/candidates // since this is already widely used const DEFAULT_HEADER_ROW = 3; let dataConfiguration; function xlsPostParserWithConfig(context, opts = {}) { return async function (req, _, next) { try { Args.check(!!context, 'The application context cannot be empty'); const options = Object.assign( { fileProperty: 'file', schemaProperty: 'schema' }, opts ); // get source file definition const sourceFileDefinition = req && req.files && req.files[options.fileProperty]; Args.check(!!sourceFileDefinition, 'The source xls(x) file cannot be empty at this context'); // validate source file content type Args.check( sourceFileDefinition.contentType === XlsxContentType || (/\.(xlsx)$/i.test(sourceFileDefinition.contentFileName) && sourceFileDefinition.contentType === 'application/octet-stream'), 'The source file should be a valid xls(x) file' ); // get config file definition const schemaDefinition = req.files[options.schemaProperty]; Args.check(!!schemaDefinition, 'The schema (configuration) file cannot be empty at this context'); // read config const schema = JSON.parse(fs.readFileSync(schemaDefinition.path)); // validate basic required attributes Args.check( schema && Array.isArray(schema.columns) && typeof schema.model === 'string', `The required "model" and/or "columns" properties are missing from the configuration file` ); // get data configuration strategy const configuration = context.getApplication().getConfiguration(); dataConfiguration = configuration.getStrategy(DataConfigurationStrategy); Args.check(!!dataConfiguration, 'The DataConfigurationStrategy may not be empty in this context'); // get target model definition const model = cloneDeep(dataConfiguration.getModelDefinition(schema.model)); Args.check(!!model, `The target model ${schema.model} cannot be found in the current application context`); // validate non-virtual every model attribute in the schema for (const column of schema.columns) { // if column is virtual, continue if (column.virtual) { continue; } // get model attribute const modelAttribute = column.property && column.property.modelAttribute; // and validate it Args.check( !!modelAttribute, `Missing "modelAtrribute" property for the non-virtual column ${(column.property && column.property.title) || 'empty'}` ); Args.check( attributeIsValid(modelAttribute, model), `The modelAttribute ${modelAttribute} of the column ${column.property.title || 'empty'} is invalid in the current context of the ${model.name} entity` ); } // validate every extra attribute also for (const extraAttribute of schema.extraAttributes || []) { // get model attribute const modelAttribute = extraAttribute.modelAttribute; // and validate it Args.check(!!modelAttribute, `The modelAttribute property is missing from the ${extraAttribute.title || 'empty'} extraAttribute`); Args.check( attributeIsValid(modelAttribute, model), `The modelAttribute ${modelAttribute} of the extra attribute ${extraAttribute.title || 'empty'} is invalid in the current context of the ${model.name} entity` ); Args.check( Object.prototype.hasOwnProperty.call(extraAttribute, 'defaultValue'), `The defaultValue property is missing for the ${ extraAttribute.title || 'empty' } extraAttribute, where modelAttribute is ${modelAttribute}` ); } // read source file const workbook = new Workbook(); await workbook.xlsx.read(sourceFileDefinition); // get and validate header row const headerRow = Number(schema.headerRow) || configuration.getSourceAt('settings/universis/candidates/headerRow') || DEFAULT_HEADER_ROW; Args.check(typeof headerRow === 'number', 'The header row is invalid or cannot be determined'); const body = []; // enumerate worksheets workbook.worksheets.forEach((sheet) => { let headers = [], headersToSchemaColumns = new Map(); // enumerate rows sheet.eachRow((row, rowNumber) => { // do nothing until the headerRow if (rowNumber < headerRow) { return; } if (rowNumber === headerRow) { // get headers headers = row.values; // validate that all headers are configured const missingHeaders = []; headers.forEach((header, index) => { // find related column const schemaColumn = schema.columns.find((column) => { return column.property && normalizeValue(column.property.title) === normalizeValue(header); }); if (!schemaColumn) { missingHeaders.push(header); } else { // add it to the map headersToSchemaColumns.set(index, schemaColumn); } }); Args.check( missingHeaders.length === 0, `Based on the specified header row ${headerRow}, the header(s) "${missingHeaders.join( ', ' )}" is/are missing from the configuration file. All headers must be configured (even if they are virtual)` ); return; } let rowData = {}; // enumerate row values row.values.forEach((value, index) => { // note: header indexes in exceljs are >= 1 if (index <= 0) { return; } // get the schema column by header const schemaColumn = headersToSchemaColumns.get(index); // if the column is virtual, do nothing if (schemaColumn.virtual) { return; } // get modelAttribute const modelAttribute = schemaColumn.property.modelAttribute; // flatten the value value = flattenValue(value); // start handling the data if (!Array.isArray(schemaColumn.dataMappings)) { // check dateFormat if (typeof schemaColumn.property.dateFormat === 'string' && typeof value === 'string') { const dateFormat = schemaColumn.property.dateFormat; // try to convert date strictly const converted = moment(value, dateFormat, true); Args.check(converted.isValid(), `The date ${value} cannot be strictly converted to the format ${dateFormat}`); value = converted.toDate(); } // if no dataMappings are set, just format the value const formattedValue = formatValue(modelAttribute, value); // and merge it with the existing row data merge(rowData, formattedValue); } else { // find source value const mapping = schemaColumn.dataMappings.find((mapping) => { return normalizeValue(mapping.from) == normalizeValue(value); }); // and validate it Args.check( !!mapping, `The source ("from") value ${value || 'empty'} is missing from the dataMappings of the column ${ schemaColumn.property.title }` ); // validate target value Args.check( Object.prototype.hasOwnProperty.call(mapping, 'to'), `The target ("to") value is missing from the dataMappings of the column ${schemaColumn.property.title}, where "from" is ${mapping.from}` ); // format the value const formattedValue = formatValue(modelAttribute, mapping.to); // and merge it with the existing row data merge(rowData, formattedValue); } }); // enumerate extra attributes if (Array.isArray(schema.extraAttributes)) { schema.extraAttributes.forEach((attribute) => { // get model attribute const modelAttribute = attribute.modelAttribute; // format the value const formattedValue = formatValue(modelAttribute, attribute.defaultValue); // and merge it with the row data merge(rowData, formattedValue); }); } // push the row data body.push(rowData); }); }); // assign body to request req.body = body; // and exit return next(); } catch (err) { TraceUtils.error(err); return next(err); } }; } function normalizeValue(value) { if (typeof value !== 'string') { return value; } return value .normalize('NFKC') .replace(/[\p{Cc}\p{Cf}\u200B-\u200D\uFEFF]/gu, '') .replace(/\s+/g, ' ') .trim(); } function populateFieldsFromInheritance(model) { // validate model if (model == null || dataConfiguration == null) { return model; } // ensure model fields model.fields = model.fields || []; if (model.inherits) { // get inherted model let nextIheritedModel = cloneDeep(dataConfiguration.getModelDefinition(model.inherits)); do { // transfer non existing fields nextIheritedModel.fields = nextIheritedModel.fields || []; nextIheritedModel.fields.forEach((field) => { const exists = model.fields.find((baseField) => { return baseField.name === field.name; }); if (!exists) { model.fields.unshift(field); } }); // fetch next model, if any nextIheritedModel = cloneDeep(dataConfiguration.getModelDefinition(nextIheritedModel.inherits)); } while (nextIheritedModel); } if (model.implements) { // get implemented model let nextImplementedModel = cloneDeep(dataConfiguration.getModelDefinition(model.implements)); do { // ensure fields nextImplementedModel.fields = nextImplementedModel.fields || []; // transfer non existing fields nextImplementedModel.fields.forEach((field) => { const exists = model.fields.find((baseField) => { return baseField.name === field.name; }); if (!exists) { model.fields.unshift(field); } }); // get next model, if any nextImplementedModel = cloneDeep(dataConfiguration.getModelDefinition(nextImplementedModel.implements)); } while (nextImplementedModel); } return model; } function attributeIsValid(attribute, model) { try { Args.check(!!model && typeof attribute === 'string'); // populate model fields populateFieldsFromInheritance(model); // initiate current model as the parent model let currentModel = model; // split attribute into a stack const attributesStack = attribute.split('/'); // while the stack is not empty while (attributesStack.length > 0) { // validate current model (important for 2nd+ iterations) if (currentModel == null) { return false; } // shift an element const fieldName = attributesStack.shift(); // find field in the current model const field = currentModel.fields.find((field) => { return field.name === fieldName; }); // if it's not found, exit if (field == null) { return false; } // refresh current model and continue with the stack currentModel = cloneDeep(dataConfiguration.getModelDefinition(field.type)); populateFieldsFromInheritance(currentModel); } return true; } catch (err) { TraceUtils.error(err); return false; } } function flattenValue(value) { if (value !== null && typeof value === 'object' && Object.prototype.hasOwnProperty.call(value, 'text')) { value = value.text; } if (value !== null && typeof value === 'object' && Object.prototype.hasOwnProperty.call(value, 'result')) { value = value.result; } if (value !== null && typeof value === 'object' && Array.isArray(value.richText)) { value = value.richText.map((richText) => richText.text).join(''); } if (typeof value === 'string' && value.trim().length === 0) { return null; } return value; } function formatValue(path, value, delimiter = '/') { const parts = path.split(delimiter).filter(Boolean); return parts.reduceRight((acc, key) => ({ [key]: acc }), value); } /** * Allows data conversion to xls, independent from res. Similar to xlsParser function * @param {*} data The data to be converted * @returns Xlsx buffer */ function toXlsx(data) { return new Promise((resolve, reject) => { if (Array.isArray(data)) { const workbook = new Workbook(); const sheet = workbook.addWorksheet('Sheet1'); let rows = data.map(row => { let res = {}; Object.keys(row).forEach(key => { // if attribute is an object if (typeof row[key]==='object' && row[key] !== null) { // if attribute has property name if (Object.prototype.hasOwnProperty.call(row[key], 'name')) { // return this name res[key] = row[key]['name']; } else { // otherwise return object res[key] = row[key]; } } else { // set property value res[key] = row[key]; } }); return res; }); // add columns if (rows.length > 0) { sheet.columns = Object.keys(rows[0]).map(key => { return { header: key, key: key }; }); } rows.forEach(row => { sheet.addRow(row); }); // write to a new buffer return workbook.xlsx.writeBuffer().then((buffer) => { return resolve(buffer); }).catch(err => { return reject(err); }); } else { return reject('Unprocessable entity'); } }); } module.exports.xlsPostParserWithConfig = xlsPostParserWithConfig; module.exports.XlsxContentType = XlsxContentType; module.exports.toXlsx = toXlsx;