@mindconnect/mindconnect-nodejs
Version:
NodeJS Library for Siemens Insights Hub Connectivity - TypeScript SDK for Insights Hub and Industrial IoT - Command Line Interface - Insights Hub Development Proxy (Siemens Insights Hub was formerly known as MindSphere)
382 lines • 17.9 kB
JavaScript
;
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.MultipartUploader = void 0;
const crypto = require("crypto");
const debug = require("debug");
const fs = require("fs");
const http = require("http");
const path = require("path");
const stream = require("stream");
const url_1 = require("url");
const utils_1 = require("../../utils");
const _ = require("lodash");
const mime = require("mime-types");
const log = debug("multipart-uploader");
/**
* The multipart uploader handles the upload of the files to the mindsphere
* This class is shared between the MindConnectAgent and the IotFileClient
*
* @export
* @class MultipartUploader
* @extends {MindConnectBase}
*/
class MultipartUploader {
getTotalChunks(fileLength, chunkSize, optional) {
let totalChunks = Math.ceil(fileLength / chunkSize);
if (totalChunks > 1) {
totalChunks = Math.ceil(fileLength / (Math.floor(chunkSize / this.highWatermark) * this.highWatermark));
}
!optional.chunk &&
totalChunks > 1 &&
(0, utils_1.throwError)("File is too big. Enable chunked/multipart upload (CLI: --chunked) to upload it.");
optional.chunk && totalChunks > 1 && log("WARN: Chunking is experimental!");
return totalChunks;
}
getTimeStamp(optional, file) {
return optional.timestamp || (file instanceof Buffer ? new Date() : fs.statSync(file).ctime);
}
getFileType(optional, file) {
return (optional.type ||
(file instanceof Buffer ? "application/octet-stream" : `${mime.lookup(file)}` || "application/octet-stream"));
}
getStreamFromFile(file, chunksize) {
return file instanceof Buffer
? (() => {
const bufferStream = new stream.PassThrough({ highWaterMark: this.highWatermark });
for (let index = 0; index < file.length;) {
const end = Math.min(index + chunksize, file.length);
bufferStream.write(file.slice(index, end));
index = end;
}
bufferStream.end();
return bufferStream;
})()
: fs.createReadStream(path.resolve(file), { highWaterMark: this.highWatermark });
}
addDataToBuffer(current, data) {
const newLength = current.byteLength + data.byteLength;
const newBuffer = new Uint8Array(newLength);
newBuffer.set(current, 0);
newBuffer.set(data, current.byteLength);
current = newBuffer;
return current;
}
getBareUrl(url) {
const parsedUrl = new url_1.URL(url);
const bareUrl = url.replace(parsedUrl.search, "");
return bareUrl;
}
fix_iotFileUpload_3_2_0(result, previousEtag) {
// ! guess etag for the upload
// ! in may 2019 mindsphere was not returning eTags for multipart uploads in the header
// ! but was still expecting them for the new upload
// ! this fix guesses the new value of the eTag
return typeof result === "boolean"
? previousEtag !== undefined
? (previousEtag + 1).toString()
: "0"
: result;
}
setIfMatch(url, headers) {
let result;
const bareUrl = this.getBareUrl(url);
if (this.agent) {
const config = this.GetConfiguration();
if (config.urls && config.urls[bareUrl]) {
const eTag = config.urls[bareUrl];
const etagNumber = parseInt(eTag);
result = etagNumber;
headers["If-Match"] = etagNumber;
}
}
return result;
}
addUrl(url, result) {
if (!this.agent)
return;
const config = this.GetConfiguration();
if (!config.urls) {
config.urls = {};
}
const entry = this.getBareUrl(url);
config.urls[entry] = result;
}
MultipartOperation(_a) {
return __awaiter(this, arguments, void 0, function* ({ mode, entityId, uploadPath, ifMatch, description, fileType, timeStamp, }) {
const url = `/api/iotfile/v3/files/${entityId}/${uploadPath}?upload=${mode}`;
const token = yield this.GetToken();
const headers = {
description: description,
type: fileType,
};
timeStamp && (headers.timeStamp = timeStamp.toISOString());
ifMatch !== undefined && (headers["If-Match"] = ifMatch);
this.setIfMatch(`${this.GetGateway()}${url}`, headers);
const result = yield this.GetAuthorizer().HttpAction({
verb: "PUT",
authorization: token,
gateway: this.GetGateway(),
baseUrl: url,
octetStream: true,
additionalHeaders: headers,
noResponse: true,
returnHeaders: true,
body: Buffer.alloc(0),
});
return result;
});
}
UploadChunk(_a) {
return __awaiter(this, arguments, void 0, function* ({ description, chunks, totalChunks, fileType, timeStamp, uploadPath, entityId, buffer, ifMatch, }) {
if (buffer.length <= 0)
return false;
const headers = {
description: description,
type: fileType,
timestamp: timeStamp.toISOString(),
};
ifMatch !== undefined && (headers["If-Match"] = ifMatch);
let part = totalChunks === 1 ? "" : `?part=${chunks}`;
if (part === `?part=${totalChunks}`) {
part = `?upload=complete`;
}
const url = `/api/iotfile/v3/files/${entityId}/${uploadPath}${part}`;
const previousEtag = this.setIfMatch(`${this.GetGateway()}${url}`, headers);
const token = yield this.GetToken();
const gateway = this.GetGateway();
const resultHeaders = (yield this.GetAuthorizer().HttpAction({
verb: "PUT",
baseUrl: url,
gateway: gateway,
authorization: token,
body: buffer,
octetStream: true,
additionalHeaders: headers,
noResponse: true,
returnHeaders: true,
}));
const result = resultHeaders.get("ETag") || true;
// * only set the eTag after the upload is complete
if (totalChunks > 1 && !url.endsWith(`upload=complete`)) {
return true;
}
const newEtag = this.fix_iotFileUpload_3_2_0(result, previousEtag);
this.addUrl(`${gateway}${url}`, newEtag);
return true;
});
}
/**
* Abort the multipart operation.
*
* @param {string} entityId
* @param {string} filePath
*
* @memberOf MultipartUploader
*/
AbortUpload(entityId, filePath) {
return __awaiter(this, void 0, void 0, function* () {
yield this.MultipartOperation({ mode: "abort", entityId: entityId, uploadPath: filePath });
});
}
/**
* Upload file to MindSphere IOTFileService
*
* @param {string} entityId - asset id or agent.ClientId() for agent
* @param {string} filepath - mindsphere file path
* @param {(string | Buffer)} file - local path or Buffer
* @param {fileUploadOptionalParameters} [optional] - optional parameters: enable chunking, define retries etc.
* @returns {Promise<string>} - md5 hash of the file
*
* @memberOf MultipartUploader
*
*/
UploadFile(entityId, filepath, file, optional) {
return __awaiter(this, void 0, void 0, function* () {
let storedError;
let aborted = false;
optional = optional || {};
const verboseFunction = optional.verboseFunction;
try {
const result = yield this._UploadFile(entityId, filepath, file, optional);
return result;
}
catch (error) {
try {
storedError = error;
yield this.AbortUpload(entityId, filepath);
verboseFunction && verboseFunction("Aborting previous upload...");
aborted = true;
}
catch (_a) { }
}
// console.log(storedError, aborted);
storedError &&
aborted &&
(0, utils_1.throwError)(`Error occurred uploading the file. (Multipart upload was automatically aborted).\n Previous error: ${storedError.message} `);
storedError && !aborted && (0, utils_1.throwError)(storedError.message);
// typescript issue: https://github.com/microsoft/TypeScript/issues/13958
return "";
});
}
_UploadFile(entityId, filepath, file, optional) {
return __awaiter(this, void 0, void 0, function* () {
optional = optional || {};
const chunkSize = optional.chunkSize || 8 * 1024 * 1024;
optional.chunk &&
chunkSize < 5 * 1024 * 1024 &&
(0, utils_1.throwError)("The chunk size must be at least 5 MB for multipart upload.");
const fileLength = file instanceof Buffer ? file.length : fs.statSync(file).size;
const totalChunks = this.getTotalChunks(fileLength, chunkSize, optional);
const shortFileName = path.basename(filepath);
const fileInfo = {
description: optional.description || shortFileName,
timeStamp: this.getTimeStamp(optional, file),
fileType: this.getFileType(optional, file),
uploadPath: filepath,
totalChunks: totalChunks,
entityId: entityId,
ifMatch: optional.ifMatch,
};
const RETRIES = optional.retry || 1;
const logFunction = optional.logFunction;
const verboseFunction = optional.verboseFunction;
const mystream = this.getStreamFromFile(file, chunkSize);
const hash = crypto.createHash("md5");
const promises = [];
let current = new Uint8Array(0);
let chunks = 0;
if (verboseFunction)
verboseFunction(`file upload started for ${file}`);
const multipartLog = logFunction ? logFunction("multipart") : undefined;
if (verboseFunction)
verboseFunction(totalChunks > 1
? `starting multipart upload: There are: ${totalChunks} total parts.`
: `the file is small enough for normal upload`);
let startMultipart;
if (optional.chunk && totalChunks > 1) {
startMultipart = () => (0, utils_1.retry)(RETRIES, () => this.MultipartOperation(Object.assign({ mode: "start" }, fileInfo)), 300, multipartLog);
}
return new Promise((resolve, reject) => {
mystream
.on("error", (err) => reject(err))
.on("data", (data) => __awaiter(this, void 0, void 0, function* () {
if (current.byteLength + data.byteLength <= chunkSize) {
current = this.addDataToBuffer(current, data);
}
else {
if (current.byteLength > 0) {
const currentBuffer = Buffer.from(current);
const uploadLog = logFunction ? logFunction(`part upload (${chunks} part)`) : undefined;
chunks++;
const currentChunk = new Number(chunks).valueOf();
verboseFunction &&
verboseFunction(`reading chunk number ${chunks} with buffersize : ${(currentBuffer.length /
(1024 * 1024)).toFixed(2)} MB`);
promises.push(() => (0, utils_1.retry)(RETRIES, () => this.UploadChunk(Object.assign(Object.assign({}, fileInfo), { chunks: currentChunk, buffer: currentBuffer })), 300, uploadLog));
}
current = new Uint8Array(data.byteLength);
current.set(data, 0);
}
}))
.on("end", () => {
const currentBuffer = Buffer.from(current);
const uploadLog = logFunction ? logFunction(`part upload (last part)`) : undefined;
chunks++;
verboseFunction &&
verboseFunction(`reading chunk number ${chunks} with buffersize, ${currentBuffer.length}`);
const currentChunk = new Number(chunks).valueOf();
promises.push(() => (0, utils_1.retry)(RETRIES, () => this.UploadChunk(Object.assign(Object.assign({}, fileInfo), { chunks: currentChunk, buffer: currentBuffer })), 300, uploadLog));
})
.pipe(hash)
.on("finish", () => __awaiter(this, void 0, void 0, function* () {
try {
// * this is the last promise (for multipart) the one which completes the upload
// * this has to be awaited last.
startMultipart &&
(yield startMultipart()) &&
verboseFunction &&
verboseFunction("starting multipart upload");
const lastPromise = promises.pop();
// * the chunks before last can be uploaded in paralell to mindsphere
const maxParalellUploads = (optional && optional.parallelUploads) || 3;
http.globalAgent.maxSockets = 50;
const splitedPromises = _.chunk(promises, maxParalellUploads);
if (verboseFunction)
verboseFunction(`max parallel uploads ${maxParalellUploads}`);
for (const partPromises of splitedPromises) {
const uploadParts = [];
partPromises.forEach((f) => __awaiter(this, void 0, void 0, function* () {
uploadParts.push(f());
}));
if (verboseFunction)
verboseFunction(`uploading next ${uploadParts.length} part(s)`);
yield Promise.all(uploadParts);
if (verboseFunction)
verboseFunction(`uploaded ${uploadParts.length} part(s)`);
}
// * for non-multipart-upload this is the only promise which is ever resolved
// ! don't retry as this is already a retry operation! (from uploadchunk push)
if (verboseFunction)
verboseFunction(totalChunks > 1 ? `uploading last chunk of ${totalChunks} parts.` : `uploading file`);
yield lastPromise();
const md5 = hash.read().toString("hex");
if (verboseFunction)
verboseFunction(`uploaded file. md5 hash: ${md5}`);
resolve(md5);
}
catch (err) {
reject(new Error("upload failed: " + err));
}
}));
});
});
}
GetToken() {
return __awaiter(this, void 0, void 0, function* () {
!this.agent && !this.sdkClient && (0, utils_1.throwError)("invalid conifguraiton for multipart upload");
if (this.agent) {
return yield this.agent.GetAgentToken();
}
if (this.sdkClient) {
return yield this.sdkClient.GetToken();
}
return "";
});
}
GetGateway() {
!this.agent && !this.sdkClient && (0, utils_1.throwError)("invalid conifguraiton for multipart upload");
if (this.agent) {
return `${this.agent.GetMindConnectConfiguration().content.baseUrl}`;
}
if (this.sdkClient) {
return this.sdkClient.GetGateway();
}
return "";
}
GetConfiguration() {
!this.agent && !this.sdkClient && (0, utils_1.throwError)("invalid conifguraiton for multipart upload");
if (this.agent) {
return this.agent.GetMindConnectConfiguration();
}
}
GetAuthorizer() {
return (this.agent || this.sdkClient);
}
constructor(agent, sdkClient) {
this.agent = agent;
this.sdkClient = sdkClient;
this.highWatermark = 1 * 1024 * 1024;
!agent && !sdkClient && (0, utils_1.throwError)("you have to specify either agent or sdkclient");
}
}
exports.MultipartUploader = MultipartUploader;
//# sourceMappingURL=multipart-uploader.js.map