@vulcan-sql/core
Version:
Core package of VulcanSQL
97 lines • 4.64 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.CacheLayerLoader = void 0;
const tslib_1 = require("tslib");
const path = require("path");
const fs = require("fs");
const moment = require("moment");
const inversify_1 = require("inversify");
const types_1 = require("../../containers/types");
const models_1 = require("../../models/index");
const options_1 = require("../../options/index");
const utils_1 = require("../../lib/utils");
let CacheLayerLoader = class CacheLayerLoader {
constructor(options, dataSourceFactory) {
this.logger = (0, utils_1.getLogger)({ scopeName: 'CORE' });
this.dataSourceFactory = dataSourceFactory;
this.options = options;
// prepare cache data source
this.cacheStorage = this.dataSourceFactory(models_1.cacheProfileName);
}
/**
* Load the data to the cache storage according to cache settings
* @param templateName template source name
* @param cache the cache layer info settings from API schema
*/
load(templateName, cache) {
return tslib_1.__awaiter(this, void 0, void 0, function* () {
const { cacheTableName, sql, profile, indexes, folderSubpath, options: cacheOptions, } = cache;
const type = this.options.type;
const dataSource = this.dataSourceFactory(profile);
// generate directory for cache file path to export
// format => [folderPath]/[schema.templateSource]/[profileName]/[cacheTableName]]/[timestamp]
const subpath = folderSubpath || moment.utc().format('YYYYMMDDHHmmss');
const directory = path.resolve(this.options.folderPath, templateName, profile, cacheTableName, subpath);
const parquetFiles = this.getParquetFiles(directory);
if (!parquetFiles.length) {
if (!fs.existsSync(directory)) {
fs.mkdirSync(directory, { recursive: true });
}
// remove the files in other subfolder before export, cause we will not reuse cache files
const folderPath = path.resolve(this.options.folderPath, templateName, profile, cacheTableName);
const folders = fs
.readdirSync(folderPath)
.filter((file) => fs.statSync(path.resolve(folderPath, file)).isDirectory());
this.removeParquetFiles(folders, folderPath);
// 1. export to cache files according to each schema set the cache value
this.logger.debug(`Start to export to ${type} file in "${directory}"`);
yield dataSource.export({
sql,
directory,
profileName: profile,
type,
options: cacheOptions,
});
}
else {
this.logger.debug(`Parquet file \n ${parquetFiles.join('\n ')} found in ${directory}, skip export`);
}
this.logger.debug(`Start to load ${cacheTableName} in "${directory}"`);
// 2. load the files to cache data source
yield this.cacheStorage.import({
tableName: cacheTableName,
directory,
// use the "vulcan.cache" profile to import the cache data
profileName: models_1.cacheProfileName,
// default schema name for cache layer
schema: models_1.vulcanCacheSchemaName,
type,
indexes,
});
});
}
getParquetFiles(directory) {
if (!directory || !fs.existsSync(directory))
return [];
const files = fs.readdirSync(directory);
const parquetFiles = files.filter((file) => /\.parquet$/.test(file));
return parquetFiles;
}
removeParquetFiles(folders, folderPath) {
folders.forEach((folder) => {
const directory = path.resolve(folderPath, folder);
const parquetFiles = this.getParquetFiles(directory);
parquetFiles.forEach((file) => {
fs.unlinkSync(path.resolve(directory, file));
});
});
}
};
CacheLayerLoader = tslib_1.__decorate([
(0, inversify_1.injectable)(),
tslib_1.__param(0, (0, inversify_1.inject)(types_1.TYPES.CacheLayerOptions)),
tslib_1.__param(1, (0, inversify_1.inject)(types_1.TYPES.Factory_DataSource)),
tslib_1.__metadata("design:paramtypes", [options_1.CacheLayerOptions, Function])
], CacheLayerLoader);
exports.CacheLayerLoader = CacheLayerLoader;
//# sourceMappingURL=cacheLayerLoader.js.map