@mindconnect/mindconnect-nodejs
Version:
NodeJS Library for Siemens Insights Hub Connectivity - TypeScript SDK for Insights Hub and Industrial IoT - Command Line Interface - Insights Hub Development Proxy (Siemens Insights Hub was formerly known as MindSphere)
383 lines • 20.1 kB
JavaScript
;
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
const console_1 = require("console");
const csv = require("csvtojson");
const fs = require("fs");
const _ = require("lodash");
const path = require("path");
const test_utils_1 = require("../../../test/test-utils");
const sdk_1 = require("../../api/sdk");
const utils_1 = require("../../api/utils");
const command_utils_1 = require("./command-utils");
const ora = require("ora-classic");
let color = (0, command_utils_1.getColor)("magenta");
const warn = (0, command_utils_1.getColor)("yellow");
exports.default = (program) => {
program
.command("run-bulk")
.alias("rb")
.option("-d, --dir <directoryname>", "config file with agent configuration", "bulkupload")
.option("-y, --retry <number>", "retry attempts before giving up", "3")
.option("-l, --parallel <number>", "parallel chunk uploads", "3")
.option("-s, --size <size>", "entries per file ", `${Number.MAX_SAFE_INTEGER}`)
.option("-f, --force", "force generation of json files, file upload and creation of jobs")
.option("-k, --passkey <passkey>", "passkey")
.option("-i, --timeseries", `use ${warn("(deprecated)")} timeseries upload`)
.option("-v, --verbose", "verbose output")
.option("-t, --start", "start sending data to mindsphere")
.description(color("runs the timeseries (bulk) upload job from <directoryname> directory *"))
.action((options) => {
(() => __awaiter(void 0, void 0, void 0, function* () {
try {
checkRequiredParamaters(options);
const sdk = (0, command_utils_1.getSdk)(options);
color = (0, command_utils_1.adjustColor)(color, options);
(0, command_utils_1.homeDirLog)(options.verbose, color);
(0, command_utils_1.proxyLog)(options.verbose, color);
const asset = (yield createOrReadAsset(sdk, options));
(0, command_utils_1.modeInformation)(asset, options, color);
const aspects = getAspectsFromDirNames(options);
const spinner = ora("creating files");
!options.verbose && spinner.start("");
(0, command_utils_1.verboseLog)(`Starting bulk-import of timeseries data for twintype : ${asset.twinType}`, options.verbose, spinner);
const startDate = new Date();
let jobstate = {
options: {
size: options.size,
twintype: asset.twinType,
asset: asset,
},
uploadFiles: [],
bulkImports: [],
timeSeriesFiles: [],
};
if (fs.existsSync(path.resolve(`${options.dir}/jobstate.json`))) {
jobstate = require(path.resolve(`${options.dir}/jobstate.json`));
}
options.force &&
(0, command_utils_1.verboseLog)(`${warn("\nWARNING")} forcing the generation of json files can lead to conflicts if files have been already uploaded.\n`, options.verbose, spinner);
if (!jobstate.uploadFiles || jobstate.uploadFiles.length === 0 || options.force) {
const files = yield createJsonFilesForUpload({
aspects,
options,
spinner,
asset,
});
jobstate.uploadFiles = files;
saveJobState(options, jobstate);
}
else {
(0, command_utils_1.verboseLog)(`${color("skipping")} generation of json files..`, options.verbose, spinner);
yield (0, test_utils_1.sleep)(500);
}
asset.twinType === sdk_1.AssetManagementModels.TwinType.Simulation &&
verifySimulationFiles(jobstate.uploadFiles) &&
(0, command_utils_1.verboseLog)("All files verified", options.verbose, spinner);
yield (0, test_utils_1.sleep)(500);
!options.verbose && spinner.succeed("Done converting files to json.");
!options.start &&
console.log(`\nrun mdsp run-bulk with ${color("--start")} option to start sending data to mindsphere\n`);
// *
// * this is called only with the start - option
// *
if (options.start) {
const spinner = ora("running");
!options.verbose && spinner.start("");
if (asset.twinType === sdk_1.AssetManagementModels.TwinType.Simulation) {
yield runSimulationUpload(sdk, options, jobstate, spinner);
}
else if (asset.twinType === sdk_1.AssetManagementModels.TwinType.Performance &&
!options.timeseries) {
yield runSimulationUpload(sdk, options, jobstate, spinner);
}
else {
yield runTimeSeriesUpload(sdk, options, jobstate, spinner);
}
!options.verbose && spinner.succeed("Done");
const endDate = new Date();
(0, console_1.log)(`Run time: ${(endDate.getTime() - startDate.getTime()) / 1000} seconds`);
asset.twinType === sdk_1.AssetManagementModels.TwinType.Simulation &&
console.log(`\t run mdsp ${color("check-bulk")} command to check the progress of the job`);
}
}
catch (err) {
(0, command_utils_1.errorLog)(err, options.verbose);
}
}))();
})
.on("--help", () => {
(0, console_1.log)("\n Examples:\n");
(0, console_1.log)(` mdsp run-bulk runs the upload job from the ${color("bulkimport")} directory`);
(0, console_1.log)(` mdsp run-bulk --dir asset1 --verbose runs the upload job from the ${color("asset1")} with verbose output`);
});
};
function runTimeSeriesUpload(sdk, options, jobstate, spinner) {
return __awaiter(this, void 0, void 0, function* () {
const SLEEP_ON_429 = 5000; // wait 5s on 429
const SLEEP_BETWEEN = 2000; // wait 1 sec between posts
const pause = Math.max((options.size / 500) * SLEEP_BETWEEN, 2000); // wait at least two second, two second per 1000 records
const tsClient = sdk.GetTimeSeriesClient();
for (const file of jobstate.uploadFiles) {
const timeSeries = require(path.resolve(file.path));
if (jobstate.timeSeriesFiles.indexOf(file.path) >= 0 && !options.force) {
(0, command_utils_1.verboseLog)(`${color(timeSeries.length)} records from ${formatDate(file.mintime)} to ${formatDate(file.maxtime)} were already posted`, options.verbose, spinner);
yield (0, test_utils_1.sleep)(100);
continue;
}
yield (0, utils_1.retry)(options.retry, () => tsClient.PutTimeSeries(file.entity, file.propertyset, timeSeries), SLEEP_ON_429);
(0, command_utils_1.verboseLog)(`posted ${color(timeSeries.length)} records from ${formatDate(file.mintime)} to ${formatDate(file.maxtime)} with pause of ${(pause / 1000).toFixed(2)}s between records`, options.verbose, spinner);
jobstate.timeSeriesFiles.push(file.path);
saveJobState(options, jobstate);
yield (0, test_utils_1.sleep)(pause); // sleep 1 sec per 100 messages
}
});
}
function formatDate(date) {
if (date instanceof Date) {
return `${color(date.toISOString())}`;
}
else
return `${color(date)}`;
}
function runSimulationUpload(sdk, options, jobstate, spinner) {
return __awaiter(this, void 0, void 0, function* () {
yield uploadFiles(sdk, options, jobstate, spinner);
saveJobState(options, jobstate);
if (!jobstate.bulkImports || jobstate.bulkImports.length === 0) {
yield createUploadJobs(sdk, jobstate, options, spinner);
saveJobState(options, jobstate);
}
else {
(0, command_utils_1.verboseLog)(`the jobs for ${options.dir} have already been created.`, options.verbose, spinner);
yield (0, test_utils_1.sleep)(2000);
}
});
}
function createUploadJobs(sdk, jobstate, options, spinner) {
return __awaiter(this, void 0, void 0, function* () {
const results = _(jobstate.uploadFiles)
.groupBy((x) => {
const date = new Date(x.mintime);
date.setMinutes(0, 0, 0);
return JSON.stringify({ propertySet: x.propertyset, fullHourDate: date });
})
.map()
.value();
for (const fileInfos of results) {
const first = _(fileInfos).first() || (0, utils_1.throwError)("no data in results");
const data = {
entity: first.entity,
propertySetName: first.propertyset,
timeseriesFiles: fileInfos.map((x) => {
return { filepath: x.filepath, from: x.mintime, to: x.maxtime };
}),
};
jobstate.bulkImports.push({ data: [data] });
}
const bulkupload = sdk.GetTimeSeriesBulkClient();
for (const bulkImport of jobstate.bulkImports) {
const job = yield bulkupload.PostImportJob(bulkImport);
bulkImport.jobid = job.id;
(0, command_utils_1.verboseLog)(`Job with ${color("" + job.jobId)} is in status : ${job.status} [${job.message}]`, options.verbose, spinner);
}
});
}
function saveJobState(options, jobstate) {
fs.writeFileSync(`${options.dir}/jobstate.json`, JSON.stringify(jobstate, null, 2));
}
function uploadFiles(sdk, options, jobstate, spinner) {
return __awaiter(this, void 0, void 0, function* () {
var _a, _b;
const fileUploadClient = sdk.GetIoTFileClient();
for (const entry of jobstate.uploadFiles) {
let etag;
if (entry.etag === undefined || options.force) {
// force upload of files
const fileInfo = yield fileUploadClient.GetFiles(entry.entity, {
filter: `name eq ${path.basename(entry.filepath)} and path eq ${path.dirname(entry.filepath)}/`,
});
if (fileInfo.length === 1) {
etag = fileInfo[0].etag;
}
}
else {
(0, command_utils_1.verboseLog)(`The file ${color(entry.filepath)} was already uploaded`, options.verbose, spinner);
yield (0, test_utils_1.sleep)(500);
continue;
}
const result = yield (0, utils_1.retry)(options.retry, () => fileUploadClient.UploadFile(`${jobstate.options.asset.assetId}`, entry.filepath, entry.path, {
type: "application/json",
timestamp: fs.statSync(entry.path).mtime,
description: "bulk upload",
chunk: true,
retry: options.retry,
parallelUploads: options.parallel,
logFunction: (p) => {
(0, command_utils_1.verboseLog)(p, options.verbose, spinner);
},
verboseFunction: (p) => {
(0, command_utils_1.verboseLog)(p, options.verbose, spinner);
},
ifMatch: etag,
}));
entry.etag = etag === undefined ? "0" : `${etag + 1}`;
yield saveJobState(options, jobstate);
(0, command_utils_1.verboseLog)(`uploaded ${entry.filepath} with md5 checksum: ${result}`, options.verbose, spinner);
const fileInfo = yield fileUploadClient.GetFiles(`${jobstate.options.asset.assetId}`, {
filter: `name eq ${path.basename(entry.filepath)} and path eq ${path.dirname(entry.filepath)}/`,
});
entry.etag = `${(_b = (_a = fileInfo[0]) === null || _a === void 0 ? void 0 : _a.etag) !== null && _b !== void 0 ? _b : "0"}`;
(0, command_utils_1.verboseLog)(`Entry etag: ${entry.etag}`, options.verbose, spinner);
}
});
}
function createJsonFilesForUpload(_a) {
return __awaiter(this, arguments, void 0, function* ({ aspects, options, spinner, asset, }) {
const uploadJobs = [];
for (const aspect of aspects) {
const files = getFiles(options, aspect);
for (const file of files) {
let data = [];
let recordCount = 0;
let mintime;
let maxtime;
const maxSize = options.size;
maxSize > 0 || (0, utils_1.throwError)("the size must be greater than 0");
yield (0, command_utils_1.verboseLog)(`reading file: ${options.dir}/csv/${aspect}/${color(file)}`, options.verbose, spinner);
yield csv()
.fromFile(`${options.dir}/csv/${aspect}/${file}`)
.subscribe((json) => __awaiter(this, void 0, void 0, function* () {
data.push(json);
const timestamp = new Date(json._time);
({ mintime, maxtime } = determineMinAndMax(mintime, timestamp, maxtime));
if (data.length >= maxSize) {
const [path, newname] = writeDataAsJson({
mintime,
maxtime,
options,
aspect,
data,
});
uploadJobs.push({
entity: asset.assetId,
propertyset: aspect,
filepath: `bulk/${newname}.json`,
path: path,
mintime: mintime,
maxtime: maxtime,
});
data = [];
mintime = maxtime = undefined;
}
recordCount++;
}));
if (data.length > 0) {
const [path, newname] = writeDataAsJson({
mintime,
maxtime,
options,
aspect,
data,
});
uploadJobs.push({
entity: asset.assetId,
propertyset: aspect,
filepath: `bulk/${newname}.json`,
path: path,
mintime: mintime,
maxtime: maxtime,
});
}
(0, command_utils_1.verboseLog)(`total record count in ${file}: ${color(recordCount.toString())}`, options.verbose, spinner);
}
}
return uploadJobs;
});
}
function verifySimulationFiles(uploadJobs) {
const incompatibleFiles = uploadJobs.filter((fileInfo) => {
const minFullHour = new Date(fileInfo.mintime);
minFullHour.setMinutes(0, 0, 0);
const maxFullHour = new Date(fileInfo.maxtime);
maxFullHour.setMinutes(0, 0, 0);
return minFullHour.valueOf() !== maxFullHour.valueOf();
});
incompatibleFiles.length > 0 &&
(() => {
incompatibleFiles.forEach((f) => console.log(f.path));
(0, utils_1.throwError)(`there are ${incompatibleFiles.length} files which contain data which are not from the same hour!`);
})();
return true;
}
function determineMinAndMax(mintime, timestamp, maxtime) {
if (!mintime || timestamp < mintime) {
mintime = timestamp;
}
if (!maxtime || timestamp > maxtime) {
maxtime = timestamp;
}
return { mintime, maxtime };
}
function writeDataAsJson({ mintime, maxtime, options, aspect, data, }) {
mintime || maxtime || (0, utils_1.throwError)("the data is ivalid the timestamps are corrupted");
const newFileName = `${aspect}_${mintime && mintime.toISOString()}`.replace(/[^a-z0-9]/gi, "_");
(0, command_utils_1.verboseLog)(`writing ${options.dir}/json/${aspect}/${color(newFileName + ".json")}`, options.verbose);
const newPath = `${options.dir}/json/${aspect}/${newFileName}.json`;
fs.writeFileSync(newPath, JSON.stringify(data));
return [newPath, newFileName];
}
function getFiles(options, aspect, csvorjson = "csv") {
(0, command_utils_1.verboseLog)(`reading directory ${options.dir}/${csvorjson}/${color(aspect)}`, options.verbose);
const files = fs.readdirSync(`${options.dir}/${csvorjson}/${aspect}`).filter((x) => {
return fs.statSync(`${options.dir}/${csvorjson}/${aspect}/${x}`).isFile();
});
return files;
}
function getAspectsFromDirNames(options) {
const aspects = fs.readdirSync(`${options.dir}/csv/`).filter((x) => {
return fs.statSync(`${options.dir}/csv/${x}`).isDirectory();
});
(0, command_utils_1.verboseLog)(`aspect directories ${JSON.stringify(aspects)} in csv directory`, options.verbose);
return aspects;
}
function createOrReadAsset(sdk, options) {
return __awaiter(this, void 0, void 0, function* () {
let asset = require(path.resolve(`${options.dir}/asset.json`));
const assetMgmt = sdk.GetAssetManagementClient();
if (!asset.assetId) {
(0, command_utils_1.verboseLog)(`creating new asset ${color(asset.name)}`, options.verbose);
asset = yield assetMgmt.PostAsset(asset);
(0, command_utils_1.verboseLog)(`$asset ${color(asset.name)} with id ${color(asset.assetId)} created`, options.verbose);
}
else {
(0, command_utils_1.verboseLog)(`reading asset ${color(asset.name)} ${color(asset.assetId)}`, options.verbose);
asset = yield assetMgmt.GetAsset(asset.assetId);
(0, command_utils_1.verboseLog)(`asset ${color(asset.name)} ${color(asset.assetId)} was read from the MindSphere`, options.verbose);
}
fs.writeFileSync(`${options.dir}/asset.json`, JSON.stringify(asset, null, 2));
return asset;
});
}
function checkRequiredParamaters(options) {
if (`${options.dir}`.endsWith("/") || `${options.dir}`.endsWith("\\")) {
options.dir = `${options.dir}`.slice(0, -1);
}
(0, command_utils_1.verboseLog)(`reading directory: ${color(options.dir)}`, options.verbose);
!fs.existsSync(options.dir) && (0, utils_1.throwError)(`the directory ${color(options.dir)} doesn't exist!`);
!fs.existsSync(`${options.dir}/asset.json`) &&
(0, utils_1.throwError)(`the directory ${color(options.dir)} must contain the asset.json file. run mdsp prepare-bulk command first!`);
!fs.existsSync(`${options.dir}/json/`) &&
(0, utils_1.throwError)(`the directory ${color(options.dir)} must contain the json/ folder. run mdsp prepare-bulk command first!`);
!fs.existsSync(`${options.dir}/csv/`) &&
(0, utils_1.throwError)(`the directory ${color(options.dir)} must contain the csv/ folder. run mdsp prepare-bulk command first!`);
}
//# sourceMappingURL=iot-bulk-run.js.map