UNPKG

@dbp-topics/signature

Version:

[GitLab Repository](https://gitlab.tugraz.at/dbp/esign/signature) | [npmjs package](https://www.npmjs.com/package/@dbp-topics/signature) | [Unpkg CDN](https://unpkg.com/browse/@dbp-topics/signature/) | [Esign Bundle](https://gitlab.tugraz.at/dbp/esign/dbp

1 lines 536 kB
{"version":3,"file":"jszip.0c9bbbca.es.js","sources":["../../node_modules/jszip/dist/jszip.js"],"sourcesContent":["/*!\n\nJSZip v3.10.1 - A JavaScript class for generating and reading zip files\n<http://stuartk.com/jszip>\n\n(c) 2009-2016 Stuart Knightley <stuart [at] stuartk.com>\nDual licenced under the MIT license or GPLv3. See https://raw.github.com/Stuk/jszip/main/LICENSE.markdown.\n\nJSZip uses the library pako released under the MIT license :\nhttps://github.com/nodeca/pako/blob/main/LICENSE\n*/\n\n(function(f){if(typeof exports===\"object\"&&typeof module!==\"undefined\"){module.exports=f()}else if(typeof define===\"function\"&&define.amd){define([],f)}else{var g;if(typeof window!==\"undefined\"){g=window}else if(typeof global!==\"undefined\"){g=global}else if(typeof self!==\"undefined\"){g=self}else{g=this}g.JSZip = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require==\"function\"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error(\"Cannot find module '\"+o+\"'\");throw f.code=\"MODULE_NOT_FOUND\",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require==\"function\"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){\n\"use strict\";\nvar utils = require(\"./utils\");\nvar support = require(\"./support\");\n// private property\nvar _keyStr = \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\";\n\n\n// public method for encoding\nexports.encode = function(input) {\n var output = [];\n var chr1, chr2, chr3, enc1, enc2, enc3, enc4;\n var i = 0, len = input.length, remainingBytes = len;\n\n var isArray = utils.getTypeOf(input) !== \"string\";\n while (i < input.length) {\n remainingBytes = len - i;\n\n if (!isArray) {\n chr1 = input.charCodeAt(i++);\n chr2 = i < len ? input.charCodeAt(i++) : 0;\n chr3 = i < len ? input.charCodeAt(i++) : 0;\n } else {\n chr1 = input[i++];\n chr2 = i < len ? input[i++] : 0;\n chr3 = i < len ? input[i++] : 0;\n }\n\n enc1 = chr1 >> 2;\n enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);\n enc3 = remainingBytes > 1 ? (((chr2 & 15) << 2) | (chr3 >> 6)) : 64;\n enc4 = remainingBytes > 2 ? (chr3 & 63) : 64;\n\n output.push(_keyStr.charAt(enc1) + _keyStr.charAt(enc2) + _keyStr.charAt(enc3) + _keyStr.charAt(enc4));\n\n }\n\n return output.join(\"\");\n};\n\n// public method for decoding\nexports.decode = function(input) {\n var chr1, chr2, chr3;\n var enc1, enc2, enc3, enc4;\n var i = 0, resultIndex = 0;\n\n var dataUrlPrefix = \"data:\";\n\n if (input.substr(0, dataUrlPrefix.length) === dataUrlPrefix) {\n // This is a common error: people give a data url\n // (data:image/png;base64,iVBOR...) with a {base64: true} and\n // wonders why things don't work.\n // We can detect that the string input looks like a data url but we\n // *can't* be sure it is one: removing everything up to the comma would\n // be too dangerous.\n throw new Error(\"Invalid base64 input, it looks like a data url.\");\n }\n\n input = input.replace(/[^A-Za-z0-9+/=]/g, \"\");\n\n var totalLength = input.length * 3 / 4;\n if(input.charAt(input.length - 1) === _keyStr.charAt(64)) {\n totalLength--;\n }\n if(input.charAt(input.length - 2) === _keyStr.charAt(64)) {\n totalLength--;\n }\n if (totalLength % 1 !== 0) {\n // totalLength is not an integer, the length does not match a valid\n // base64 content. That can happen if:\n // - the input is not a base64 content\n // - the input is *almost* a base64 content, with a extra chars at the\n // beginning or at the end\n // - the input uses a base64 variant (base64url for example)\n throw new Error(\"Invalid base64 input, bad content length.\");\n }\n var output;\n if (support.uint8array) {\n output = new Uint8Array(totalLength|0);\n } else {\n output = new Array(totalLength|0);\n }\n\n while (i < input.length) {\n\n enc1 = _keyStr.indexOf(input.charAt(i++));\n enc2 = _keyStr.indexOf(input.charAt(i++));\n enc3 = _keyStr.indexOf(input.charAt(i++));\n enc4 = _keyStr.indexOf(input.charAt(i++));\n\n chr1 = (enc1 << 2) | (enc2 >> 4);\n chr2 = ((enc2 & 15) << 4) | (enc3 >> 2);\n chr3 = ((enc3 & 3) << 6) | enc4;\n\n output[resultIndex++] = chr1;\n\n if (enc3 !== 64) {\n output[resultIndex++] = chr2;\n }\n if (enc4 !== 64) {\n output[resultIndex++] = chr3;\n }\n\n }\n\n return output;\n};\n\n},{\"./support\":30,\"./utils\":32}],2:[function(require,module,exports){\n\"use strict\";\n\nvar external = require(\"./external\");\nvar DataWorker = require(\"./stream/DataWorker\");\nvar Crc32Probe = require(\"./stream/Crc32Probe\");\nvar DataLengthProbe = require(\"./stream/DataLengthProbe\");\n\n/**\n * Represent a compressed object, with everything needed to decompress it.\n * @constructor\n * @param {number} compressedSize the size of the data compressed.\n * @param {number} uncompressedSize the size of the data after decompression.\n * @param {number} crc32 the crc32 of the decompressed file.\n * @param {object} compression the type of compression, see lib/compressions.js.\n * @param {String|ArrayBuffer|Uint8Array|Buffer} data the compressed data.\n */\nfunction CompressedObject(compressedSize, uncompressedSize, crc32, compression, data) {\n this.compressedSize = compressedSize;\n this.uncompressedSize = uncompressedSize;\n this.crc32 = crc32;\n this.compression = compression;\n this.compressedContent = data;\n}\n\nCompressedObject.prototype = {\n /**\n * Create a worker to get the uncompressed content.\n * @return {GenericWorker} the worker.\n */\n getContentWorker: function () {\n var worker = new DataWorker(external.Promise.resolve(this.compressedContent))\n .pipe(this.compression.uncompressWorker())\n .pipe(new DataLengthProbe(\"data_length\"));\n\n var that = this;\n worker.on(\"end\", function () {\n if (this.streamInfo[\"data_length\"] !== that.uncompressedSize) {\n throw new Error(\"Bug : uncompressed data size mismatch\");\n }\n });\n return worker;\n },\n /**\n * Create a worker to get the compressed content.\n * @return {GenericWorker} the worker.\n */\n getCompressedWorker: function () {\n return new DataWorker(external.Promise.resolve(this.compressedContent))\n .withStreamInfo(\"compressedSize\", this.compressedSize)\n .withStreamInfo(\"uncompressedSize\", this.uncompressedSize)\n .withStreamInfo(\"crc32\", this.crc32)\n .withStreamInfo(\"compression\", this.compression)\n ;\n }\n};\n\n/**\n * Chain the given worker with other workers to compress the content with the\n * given compression.\n * @param {GenericWorker} uncompressedWorker the worker to pipe.\n * @param {Object} compression the compression object.\n * @param {Object} compressionOptions the options to use when compressing.\n * @return {GenericWorker} the new worker compressing the content.\n */\nCompressedObject.createWorkerFrom = function (uncompressedWorker, compression, compressionOptions) {\n return uncompressedWorker\n .pipe(new Crc32Probe())\n .pipe(new DataLengthProbe(\"uncompressedSize\"))\n .pipe(compression.compressWorker(compressionOptions))\n .pipe(new DataLengthProbe(\"compressedSize\"))\n .withStreamInfo(\"compression\", compression);\n};\n\nmodule.exports = CompressedObject;\n\n},{\"./external\":6,\"./stream/Crc32Probe\":25,\"./stream/DataLengthProbe\":26,\"./stream/DataWorker\":27}],3:[function(require,module,exports){\n\"use strict\";\n\nvar GenericWorker = require(\"./stream/GenericWorker\");\n\nexports.STORE = {\n magic: \"\\x00\\x00\",\n compressWorker : function () {\n return new GenericWorker(\"STORE compression\");\n },\n uncompressWorker : function () {\n return new GenericWorker(\"STORE decompression\");\n }\n};\nexports.DEFLATE = require(\"./flate\");\n\n},{\"./flate\":7,\"./stream/GenericWorker\":28}],4:[function(require,module,exports){\n\"use strict\";\n\nvar utils = require(\"./utils\");\n\n/**\n * The following functions come from pako, from pako/lib/zlib/crc32.js\n * released under the MIT license, see pako https://github.com/nodeca/pako/\n */\n\n// Use ordinary array, since untyped makes no boost here\nfunction makeTable() {\n var c, table = [];\n\n for(var n =0; n < 256; n++){\n c = n;\n for(var k =0; k < 8; k++){\n c = ((c&1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1));\n }\n table[n] = c;\n }\n\n return table;\n}\n\n// Create table on load. Just 255 signed longs. Not a problem.\nvar crcTable = makeTable();\n\n\nfunction crc32(crc, buf, len, pos) {\n var t = crcTable, end = pos + len;\n\n crc = crc ^ (-1);\n\n for (var i = pos; i < end; i++ ) {\n crc = (crc >>> 8) ^ t[(crc ^ buf[i]) & 0xFF];\n }\n\n return (crc ^ (-1)); // >>> 0;\n}\n\n// That's all for the pako functions.\n\n/**\n * Compute the crc32 of a string.\n * This is almost the same as the function crc32, but for strings. Using the\n * same function for the two use cases leads to horrible performances.\n * @param {Number} crc the starting value of the crc.\n * @param {String} str the string to use.\n * @param {Number} len the length of the string.\n * @param {Number} pos the starting position for the crc32 computation.\n * @return {Number} the computed crc32.\n */\nfunction crc32str(crc, str, len, pos) {\n var t = crcTable, end = pos + len;\n\n crc = crc ^ (-1);\n\n for (var i = pos; i < end; i++ ) {\n crc = (crc >>> 8) ^ t[(crc ^ str.charCodeAt(i)) & 0xFF];\n }\n\n return (crc ^ (-1)); // >>> 0;\n}\n\nmodule.exports = function crc32wrapper(input, crc) {\n if (typeof input === \"undefined\" || !input.length) {\n return 0;\n }\n\n var isArray = utils.getTypeOf(input) !== \"string\";\n\n if(isArray) {\n return crc32(crc|0, input, input.length, 0);\n } else {\n return crc32str(crc|0, input, input.length, 0);\n }\n};\n\n},{\"./utils\":32}],5:[function(require,module,exports){\n\"use strict\";\nexports.base64 = false;\nexports.binary = false;\nexports.dir = false;\nexports.createFolders = true;\nexports.date = null;\nexports.compression = null;\nexports.compressionOptions = null;\nexports.comment = null;\nexports.unixPermissions = null;\nexports.dosPermissions = null;\n\n},{}],6:[function(require,module,exports){\n\"use strict\";\n\n// load the global object first:\n// - it should be better integrated in the system (unhandledRejection in node)\n// - the environment may have a custom Promise implementation (see zone.js)\nvar ES6Promise = null;\nif (typeof Promise !== \"undefined\") {\n ES6Promise = Promise;\n} else {\n ES6Promise = require(\"lie\");\n}\n\n/**\n * Let the user use/change some implementations.\n */\nmodule.exports = {\n Promise: ES6Promise\n};\n\n},{\"lie\":37}],7:[function(require,module,exports){\n\"use strict\";\nvar USE_TYPEDARRAY = (typeof Uint8Array !== \"undefined\") && (typeof Uint16Array !== \"undefined\") && (typeof Uint32Array !== \"undefined\");\n\nvar pako = require(\"pako\");\nvar utils = require(\"./utils\");\nvar GenericWorker = require(\"./stream/GenericWorker\");\n\nvar ARRAY_TYPE = USE_TYPEDARRAY ? \"uint8array\" : \"array\";\n\nexports.magic = \"\\x08\\x00\";\n\n/**\n * Create a worker that uses pako to inflate/deflate.\n * @constructor\n * @param {String} action the name of the pako function to call : either \"Deflate\" or \"Inflate\".\n * @param {Object} options the options to use when (de)compressing.\n */\nfunction FlateWorker(action, options) {\n GenericWorker.call(this, \"FlateWorker/\" + action);\n\n this._pako = null;\n this._pakoAction = action;\n this._pakoOptions = options;\n // the `meta` object from the last chunk received\n // this allow this worker to pass around metadata\n this.meta = {};\n}\n\nutils.inherits(FlateWorker, GenericWorker);\n\n/**\n * @see GenericWorker.processChunk\n */\nFlateWorker.prototype.processChunk = function (chunk) {\n this.meta = chunk.meta;\n if (this._pako === null) {\n this._createPako();\n }\n this._pako.push(utils.transformTo(ARRAY_TYPE, chunk.data), false);\n};\n\n/**\n * @see GenericWorker.flush\n */\nFlateWorker.prototype.flush = function () {\n GenericWorker.prototype.flush.call(this);\n if (this._pako === null) {\n this._createPako();\n }\n this._pako.push([], true);\n};\n/**\n * @see GenericWorker.cleanUp\n */\nFlateWorker.prototype.cleanUp = function () {\n GenericWorker.prototype.cleanUp.call(this);\n this._pako = null;\n};\n\n/**\n * Create the _pako object.\n * TODO: lazy-loading this object isn't the best solution but it's the\n * quickest. The best solution is to lazy-load the worker list. See also the\n * issue #446.\n */\nFlateWorker.prototype._createPako = function () {\n this._pako = new pako[this._pakoAction]({\n raw: true,\n level: this._pakoOptions.level || -1 // default compression\n });\n var self = this;\n this._pako.onData = function(data) {\n self.push({\n data : data,\n meta : self.meta\n });\n };\n};\n\nexports.compressWorker = function (compressionOptions) {\n return new FlateWorker(\"Deflate\", compressionOptions);\n};\nexports.uncompressWorker = function () {\n return new FlateWorker(\"Inflate\", {});\n};\n\n},{\"./stream/GenericWorker\":28,\"./utils\":32,\"pako\":38}],8:[function(require,module,exports){\n\"use strict\";\n\nvar utils = require(\"../utils\");\nvar GenericWorker = require(\"../stream/GenericWorker\");\nvar utf8 = require(\"../utf8\");\nvar crc32 = require(\"../crc32\");\nvar signature = require(\"../signature\");\n\n/**\n * Transform an integer into a string in hexadecimal.\n * @private\n * @param {number} dec the number to convert.\n * @param {number} bytes the number of bytes to generate.\n * @returns {string} the result.\n */\nvar decToHex = function(dec, bytes) {\n var hex = \"\", i;\n for (i = 0; i < bytes; i++) {\n hex += String.fromCharCode(dec & 0xff);\n dec = dec >>> 8;\n }\n return hex;\n};\n\n/**\n * Generate the UNIX part of the external file attributes.\n * @param {Object} unixPermissions the unix permissions or null.\n * @param {Boolean} isDir true if the entry is a directory, false otherwise.\n * @return {Number} a 32 bit integer.\n *\n * adapted from http://unix.stackexchange.com/questions/14705/the-zip-formats-external-file-attribute :\n *\n * TTTTsstrwxrwxrwx0000000000ADVSHR\n * ^^^^____________________________ file type, see zipinfo.c (UNX_*)\n * ^^^_________________________ setuid, setgid, sticky\n * ^^^^^^^^^________________ permissions\n * ^^^^^^^^^^______ not used ?\n * ^^^^^^ DOS attribute bits : Archive, Directory, Volume label, System file, Hidden, Read only\n */\nvar generateUnixExternalFileAttr = function (unixPermissions, isDir) {\n\n var result = unixPermissions;\n if (!unixPermissions) {\n // I can't use octal values in strict mode, hence the hexa.\n // 040775 => 0x41fd\n // 0100664 => 0x81b4\n result = isDir ? 0x41fd : 0x81b4;\n }\n return (result & 0xFFFF) << 16;\n};\n\n/**\n * Generate the DOS part of the external file attributes.\n * @param {Object} dosPermissions the dos permissions or null.\n * @param {Boolean} isDir true if the entry is a directory, false otherwise.\n * @return {Number} a 32 bit integer.\n *\n * Bit 0 Read-Only\n * Bit 1 Hidden\n * Bit 2 System\n * Bit 3 Volume Label\n * Bit 4 Directory\n * Bit 5 Archive\n */\nvar generateDosExternalFileAttr = function (dosPermissions) {\n // the dir flag is already set for compatibility\n return (dosPermissions || 0) & 0x3F;\n};\n\n/**\n * Generate the various parts used in the construction of the final zip file.\n * @param {Object} streamInfo the hash with information about the compressed file.\n * @param {Boolean} streamedContent is the content streamed ?\n * @param {Boolean} streamingEnded is the stream finished ?\n * @param {number} offset the current offset from the start of the zip file.\n * @param {String} platform let's pretend we are this platform (change platform dependents fields)\n * @param {Function} encodeFileName the function to encode the file name / comment.\n * @return {Object} the zip parts.\n */\nvar generateZipParts = function(streamInfo, streamedContent, streamingEnded, offset, platform, encodeFileName) {\n var file = streamInfo[\"file\"],\n compression = streamInfo[\"compression\"],\n useCustomEncoding = encodeFileName !== utf8.utf8encode,\n encodedFileName = utils.transformTo(\"string\", encodeFileName(file.name)),\n utfEncodedFileName = utils.transformTo(\"string\", utf8.utf8encode(file.name)),\n comment = file.comment,\n encodedComment = utils.transformTo(\"string\", encodeFileName(comment)),\n utfEncodedComment = utils.transformTo(\"string\", utf8.utf8encode(comment)),\n useUTF8ForFileName = utfEncodedFileName.length !== file.name.length,\n useUTF8ForComment = utfEncodedComment.length !== comment.length,\n dosTime,\n dosDate,\n extraFields = \"\",\n unicodePathExtraField = \"\",\n unicodeCommentExtraField = \"\",\n dir = file.dir,\n date = file.date;\n\n\n var dataInfo = {\n crc32 : 0,\n compressedSize : 0,\n uncompressedSize : 0\n };\n\n // if the content is streamed, the sizes/crc32 are only available AFTER\n // the end of the stream.\n if (!streamedContent || streamingEnded) {\n dataInfo.crc32 = streamInfo[\"crc32\"];\n dataInfo.compressedSize = streamInfo[\"compressedSize\"];\n dataInfo.uncompressedSize = streamInfo[\"uncompressedSize\"];\n }\n\n var bitflag = 0;\n if (streamedContent) {\n // Bit 3: the sizes/crc32 are set to zero in the local header.\n // The correct values are put in the data descriptor immediately\n // following the compressed data.\n bitflag |= 0x0008;\n }\n if (!useCustomEncoding && (useUTF8ForFileName || useUTF8ForComment)) {\n // Bit 11: Language encoding flag (EFS).\n bitflag |= 0x0800;\n }\n\n\n var extFileAttr = 0;\n var versionMadeBy = 0;\n if (dir) {\n // dos or unix, we set the dos dir flag\n extFileAttr |= 0x00010;\n }\n if(platform === \"UNIX\") {\n versionMadeBy = 0x031E; // UNIX, version 3.0\n extFileAttr |= generateUnixExternalFileAttr(file.unixPermissions, dir);\n } else { // DOS or other, fallback to DOS\n versionMadeBy = 0x0014; // DOS, version 2.0\n extFileAttr |= generateDosExternalFileAttr(file.dosPermissions, dir);\n }\n\n // date\n // @see http://www.delorie.com/djgpp/doc/rbinter/it/52/13.html\n // @see http://www.delorie.com/djgpp/doc/rbinter/it/65/16.html\n // @see http://www.delorie.com/djgpp/doc/rbinter/it/66/16.html\n\n dosTime = date.getUTCHours();\n dosTime = dosTime << 6;\n dosTime = dosTime | date.getUTCMinutes();\n dosTime = dosTime << 5;\n dosTime = dosTime | date.getUTCSeconds() / 2;\n\n dosDate = date.getUTCFullYear() - 1980;\n dosDate = dosDate << 4;\n dosDate = dosDate | (date.getUTCMonth() + 1);\n dosDate = dosDate << 5;\n dosDate = dosDate | date.getUTCDate();\n\n if (useUTF8ForFileName) {\n // set the unicode path extra field. unzip needs at least one extra\n // field to correctly handle unicode path, so using the path is as good\n // as any other information. This could improve the situation with\n // other archive managers too.\n // This field is usually used without the utf8 flag, with a non\n // unicode path in the header (winrar, winzip). This helps (a bit)\n // with the messy Windows' default compressed folders feature but\n // breaks on p7zip which doesn't seek the unicode path extra field.\n // So for now, UTF-8 everywhere !\n unicodePathExtraField =\n // Version\n decToHex(1, 1) +\n // NameCRC32\n decToHex(crc32(encodedFileName), 4) +\n // UnicodeName\n utfEncodedFileName;\n\n extraFields +=\n // Info-ZIP Unicode Path Extra Field\n \"\\x75\\x70\" +\n // size\n decToHex(unicodePathExtraField.length, 2) +\n // content\n unicodePathExtraField;\n }\n\n if(useUTF8ForComment) {\n\n unicodeCommentExtraField =\n // Version\n decToHex(1, 1) +\n // CommentCRC32\n decToHex(crc32(encodedComment), 4) +\n // UnicodeName\n utfEncodedComment;\n\n extraFields +=\n // Info-ZIP Unicode Path Extra Field\n \"\\x75\\x63\" +\n // size\n decToHex(unicodeCommentExtraField.length, 2) +\n // content\n unicodeCommentExtraField;\n }\n\n var header = \"\";\n\n // version needed to extract\n header += \"\\x0A\\x00\";\n // general purpose bit flag\n header += decToHex(bitflag, 2);\n // compression method\n header += compression.magic;\n // last mod file time\n header += decToHex(dosTime, 2);\n // last mod file date\n header += decToHex(dosDate, 2);\n // crc-32\n header += decToHex(dataInfo.crc32, 4);\n // compressed size\n header += decToHex(dataInfo.compressedSize, 4);\n // uncompressed size\n header += decToHex(dataInfo.uncompressedSize, 4);\n // file name length\n header += decToHex(encodedFileName.length, 2);\n // extra field length\n header += decToHex(extraFields.length, 2);\n\n\n var fileRecord = signature.LOCAL_FILE_HEADER + header + encodedFileName + extraFields;\n\n var dirRecord = signature.CENTRAL_FILE_HEADER +\n // version made by (00: DOS)\n decToHex(versionMadeBy, 2) +\n // file header (common to file and central directory)\n header +\n // file comment length\n decToHex(encodedComment.length, 2) +\n // disk number start\n \"\\x00\\x00\" +\n // internal file attributes TODO\n \"\\x00\\x00\" +\n // external file attributes\n decToHex(extFileAttr, 4) +\n // relative offset of local header\n decToHex(offset, 4) +\n // file name\n encodedFileName +\n // extra field\n extraFields +\n // file comment\n encodedComment;\n\n return {\n fileRecord: fileRecord,\n dirRecord: dirRecord\n };\n};\n\n/**\n * Generate the EOCD record.\n * @param {Number} entriesCount the number of entries in the zip file.\n * @param {Number} centralDirLength the length (in bytes) of the central dir.\n * @param {Number} localDirLength the length (in bytes) of the local dir.\n * @param {String} comment the zip file comment as a binary string.\n * @param {Function} encodeFileName the function to encode the comment.\n * @return {String} the EOCD record.\n */\nvar generateCentralDirectoryEnd = function (entriesCount, centralDirLength, localDirLength, comment, encodeFileName) {\n var dirEnd = \"\";\n var encodedComment = utils.transformTo(\"string\", encodeFileName(comment));\n\n // end of central dir signature\n dirEnd = signature.CENTRAL_DIRECTORY_END +\n // number of this disk\n \"\\x00\\x00\" +\n // number of the disk with the start of the central directory\n \"\\x00\\x00\" +\n // total number of entries in the central directory on this disk\n decToHex(entriesCount, 2) +\n // total number of entries in the central directory\n decToHex(entriesCount, 2) +\n // size of the central directory 4 bytes\n decToHex(centralDirLength, 4) +\n // offset of start of central directory with respect to the starting disk number\n decToHex(localDirLength, 4) +\n // .ZIP file comment length\n decToHex(encodedComment.length, 2) +\n // .ZIP file comment\n encodedComment;\n\n return dirEnd;\n};\n\n/**\n * Generate data descriptors for a file entry.\n * @param {Object} streamInfo the hash generated by a worker, containing information\n * on the file entry.\n * @return {String} the data descriptors.\n */\nvar generateDataDescriptors = function (streamInfo) {\n var descriptor = \"\";\n descriptor = signature.DATA_DESCRIPTOR +\n // crc-32 4 bytes\n decToHex(streamInfo[\"crc32\"], 4) +\n // compressed size 4 bytes\n decToHex(streamInfo[\"compressedSize\"], 4) +\n // uncompressed size 4 bytes\n decToHex(streamInfo[\"uncompressedSize\"], 4);\n\n return descriptor;\n};\n\n\n/**\n * A worker to concatenate other workers to create a zip file.\n * @param {Boolean} streamFiles `true` to stream the content of the files,\n * `false` to accumulate it.\n * @param {String} comment the comment to use.\n * @param {String} platform the platform to use, \"UNIX\" or \"DOS\".\n * @param {Function} encodeFileName the function to encode file names and comments.\n */\nfunction ZipFileWorker(streamFiles, comment, platform, encodeFileName) {\n GenericWorker.call(this, \"ZipFileWorker\");\n // The number of bytes written so far. This doesn't count accumulated chunks.\n this.bytesWritten = 0;\n // The comment of the zip file\n this.zipComment = comment;\n // The platform \"generating\" the zip file.\n this.zipPlatform = platform;\n // the function to encode file names and comments.\n this.encodeFileName = encodeFileName;\n // Should we stream the content of the files ?\n this.streamFiles = streamFiles;\n // If `streamFiles` is false, we will need to accumulate the content of the\n // files to calculate sizes / crc32 (and write them *before* the content).\n // This boolean indicates if we are accumulating chunks (it will change a lot\n // during the lifetime of this worker).\n this.accumulate = false;\n // The buffer receiving chunks when accumulating content.\n this.contentBuffer = [];\n // The list of generated directory records.\n this.dirRecords = [];\n // The offset (in bytes) from the beginning of the zip file for the current source.\n this.currentSourceOffset = 0;\n // The total number of entries in this zip file.\n this.entriesCount = 0;\n // the name of the file currently being added, null when handling the end of the zip file.\n // Used for the emitted metadata.\n this.currentFile = null;\n\n\n\n this._sources = [];\n}\nutils.inherits(ZipFileWorker, GenericWorker);\n\n/**\n * @see GenericWorker.push\n */\nZipFileWorker.prototype.push = function (chunk) {\n\n var currentFilePercent = chunk.meta.percent || 0;\n var entriesCount = this.entriesCount;\n var remainingFiles = this._sources.length;\n\n if(this.accumulate) {\n this.contentBuffer.push(chunk);\n } else {\n this.bytesWritten += chunk.data.length;\n\n GenericWorker.prototype.push.call(this, {\n data : chunk.data,\n meta : {\n currentFile : this.currentFile,\n percent : entriesCount ? (currentFilePercent + 100 * (entriesCount - remainingFiles - 1)) / entriesCount : 100\n }\n });\n }\n};\n\n/**\n * The worker started a new source (an other worker).\n * @param {Object} streamInfo the streamInfo object from the new source.\n */\nZipFileWorker.prototype.openedSource = function (streamInfo) {\n this.currentSourceOffset = this.bytesWritten;\n this.currentFile = streamInfo[\"file\"].name;\n\n var streamedContent = this.streamFiles && !streamInfo[\"file\"].dir;\n\n // don't stream folders (because they don't have any content)\n if(streamedContent) {\n var record = generateZipParts(streamInfo, streamedContent, false, this.currentSourceOffset, this.zipPlatform, this.encodeFileName);\n this.push({\n data : record.fileRecord,\n meta : {percent:0}\n });\n } else {\n // we need to wait for the whole file before pushing anything\n this.accumulate = true;\n }\n};\n\n/**\n * The worker finished a source (an other worker).\n * @param {Object} streamInfo the streamInfo object from the finished source.\n */\nZipFileWorker.prototype.closedSource = function (streamInfo) {\n this.accumulate = false;\n var streamedContent = this.streamFiles && !streamInfo[\"file\"].dir;\n var record = generateZipParts(streamInfo, streamedContent, true, this.currentSourceOffset, this.zipPlatform, this.encodeFileName);\n\n this.dirRecords.push(record.dirRecord);\n if(streamedContent) {\n // after the streamed file, we put data descriptors\n this.push({\n data : generateDataDescriptors(streamInfo),\n meta : {percent:100}\n });\n } else {\n // the content wasn't streamed, we need to push everything now\n // first the file record, then the content\n this.push({\n data : record.fileRecord,\n meta : {percent:0}\n });\n while(this.contentBuffer.length) {\n this.push(this.contentBuffer.shift());\n }\n }\n this.currentFile = null;\n};\n\n/**\n * @see GenericWorker.flush\n */\nZipFileWorker.prototype.flush = function () {\n\n var localDirLength = this.bytesWritten;\n for(var i = 0; i < this.dirRecords.length; i++) {\n this.push({\n data : this.dirRecords[i],\n meta : {percent:100}\n });\n }\n var centralDirLength = this.bytesWritten - localDirLength;\n\n var dirEnd = generateCentralDirectoryEnd(this.dirRecords.length, centralDirLength, localDirLength, this.zipComment, this.encodeFileName);\n\n this.push({\n data : dirEnd,\n meta : {percent:100}\n });\n};\n\n/**\n * Prepare the next source to be read.\n */\nZipFileWorker.prototype.prepareNextSource = function () {\n this.previous = this._sources.shift();\n this.openedSource(this.previous.streamInfo);\n if (this.isPaused) {\n this.previous.pause();\n } else {\n this.previous.resume();\n }\n};\n\n/**\n * @see GenericWorker.registerPrevious\n */\nZipFileWorker.prototype.registerPrevious = function (previous) {\n this._sources.push(previous);\n var self = this;\n\n previous.on(\"data\", function (chunk) {\n self.processChunk(chunk);\n });\n previous.on(\"end\", function () {\n self.closedSource(self.previous.streamInfo);\n if(self._sources.length) {\n self.prepareNextSource();\n } else {\n self.end();\n }\n });\n previous.on(\"error\", function (e) {\n self.error(e);\n });\n return this;\n};\n\n/**\n * @see GenericWorker.resume\n */\nZipFileWorker.prototype.resume = function () {\n if(!GenericWorker.prototype.resume.call(this)) {\n return false;\n }\n\n if (!this.previous && this._sources.length) {\n this.prepareNextSource();\n return true;\n }\n if (!this.previous && !this._sources.length && !this.generatedError) {\n this.end();\n return true;\n }\n};\n\n/**\n * @see GenericWorker.error\n */\nZipFileWorker.prototype.error = function (e) {\n var sources = this._sources;\n if(!GenericWorker.prototype.error.call(this, e)) {\n return false;\n }\n for(var i = 0; i < sources.length; i++) {\n try {\n sources[i].error(e);\n } catch(e) {\n // the `error` exploded, nothing to do\n }\n }\n return true;\n};\n\n/**\n * @see GenericWorker.lock\n */\nZipFileWorker.prototype.lock = function () {\n GenericWorker.prototype.lock.call(this);\n var sources = this._sources;\n for(var i = 0; i < sources.length; i++) {\n sources[i].lock();\n }\n};\n\nmodule.exports = ZipFileWorker;\n\n},{\"../crc32\":4,\"../signature\":23,\"../stream/GenericWorker\":28,\"../utf8\":31,\"../utils\":32}],9:[function(require,module,exports){\n\"use strict\";\n\nvar compressions = require(\"../compressions\");\nvar ZipFileWorker = require(\"./ZipFileWorker\");\n\n/**\n * Find the compression to use.\n * @param {String} fileCompression the compression defined at the file level, if any.\n * @param {String} zipCompression the compression defined at the load() level.\n * @return {Object} the compression object to use.\n */\nvar getCompression = function (fileCompression, zipCompression) {\n\n var compressionName = fileCompression || zipCompression;\n var compression = compressions[compressionName];\n if (!compression) {\n throw new Error(compressionName + \" is not a valid compression method !\");\n }\n return compression;\n};\n\n/**\n * Create a worker to generate a zip file.\n * @param {JSZip} zip the JSZip instance at the right root level.\n * @param {Object} options to generate the zip file.\n * @param {String} comment the comment to use.\n */\nexports.generateWorker = function (zip, options, comment) {\n\n var zipFileWorker = new ZipFileWorker(options.streamFiles, comment, options.platform, options.encodeFileName);\n var entriesCount = 0;\n try {\n\n zip.forEach(function (relativePath, file) {\n entriesCount++;\n var compression = getCompression(file.options.compression, options.compression);\n var compressionOptions = file.options.compressionOptions || options.compressionOptions || {};\n var dir = file.dir, date = file.date;\n\n file._compressWorker(compression, compressionOptions)\n .withStreamInfo(\"file\", {\n name : relativePath,\n dir : dir,\n date : date,\n comment : file.comment || \"\",\n unixPermissions : file.unixPermissions,\n dosPermissions : file.dosPermissions\n })\n .pipe(zipFileWorker);\n });\n zipFileWorker.entriesCount = entriesCount;\n } catch (e) {\n zipFileWorker.error(e);\n }\n\n return zipFileWorker;\n};\n\n},{\"../compressions\":3,\"./ZipFileWorker\":8}],10:[function(require,module,exports){\n\"use strict\";\n\n/**\n * Representation a of zip file in js\n * @constructor\n */\nfunction JSZip() {\n // if this constructor is used without `new`, it adds `new` before itself:\n if(!(this instanceof JSZip)) {\n return new JSZip();\n }\n\n if(arguments.length) {\n throw new Error(\"The constructor with parameters has been removed in JSZip 3.0, please check the upgrade guide.\");\n }\n\n // object containing the files :\n // {\n // \"folder/\" : {...},\n // \"folder/data.txt\" : {...}\n // }\n // NOTE: we use a null prototype because we do not\n // want filenames like \"toString\" coming from a zip file\n // to overwrite methods and attributes in a normal Object.\n this.files = Object.create(null);\n\n this.comment = null;\n\n // Where we are in the hierarchy\n this.root = \"\";\n this.clone = function() {\n var newObj = new JSZip();\n for (var i in this) {\n if (typeof this[i] !== \"function\") {\n newObj[i] = this[i];\n }\n }\n return newObj;\n };\n}\nJSZip.prototype = require(\"./object\");\nJSZip.prototype.loadAsync = require(\"./load\");\nJSZip.support = require(\"./support\");\nJSZip.defaults = require(\"./defaults\");\n\n// TODO find a better way to handle this version,\n// a require('package.json').version doesn't work with webpack, see #327\nJSZip.version = \"3.10.1\";\n\nJSZip.loadAsync = function (content, options) {\n return new JSZip().loadAsync(content, options);\n};\n\nJSZip.external = require(\"./external\");\nmodule.exports = JSZip;\n\n},{\"./defaults\":5,\"./external\":6,\"./load\":11,\"./object\":15,\"./support\":30}],11:[function(require,module,exports){\n\"use strict\";\nvar utils = require(\"./utils\");\nvar external = require(\"./external\");\nvar utf8 = require(\"./utf8\");\nvar ZipEntries = require(\"./zipEntries\");\nvar Crc32Probe = require(\"./stream/Crc32Probe\");\nvar nodejsUtils = require(\"./nodejsUtils\");\n\n/**\n * Check the CRC32 of an entry.\n * @param {ZipEntry} zipEntry the zip entry to check.\n * @return {Promise} the result.\n */\nfunction checkEntryCRC32(zipEntry) {\n return new external.Promise(function (resolve, reject) {\n var worker = zipEntry.decompressed.getContentWorker().pipe(new Crc32Probe());\n worker.on(\"error\", function (e) {\n reject(e);\n })\n .on(\"end\", function () {\n if (worker.streamInfo.crc32 !== zipEntry.decompressed.crc32) {\n reject(new Error(\"Corrupted zip : CRC32 mismatch\"));\n } else {\n resolve();\n }\n })\n .resume();\n });\n}\n\nmodule.exports = function (data, options) {\n var zip = this;\n options = utils.extend(options || {}, {\n base64: false,\n checkCRC32: false,\n optimizedBinaryString: false,\n createFolders: false,\n decodeFileName: utf8.utf8decode\n });\n\n if (nodejsUtils.isNode && nodejsUtils.isStream(data)) {\n return external.Promise.reject(new Error(\"JSZip can't accept a stream when loading a zip file.\"));\n }\n\n return utils.prepareContent(\"the loaded zip file\", data, true, options.optimizedBinaryString, options.base64)\n .then(function (data) {\n var zipEntries = new ZipEntries(options);\n zipEntries.load(data);\n return zipEntries;\n }).then(function checkCRC32(zipEntries) {\n var promises = [external.Promise.resolve(zipEntries)];\n var files = zipEntries.files;\n if (options.checkCRC32) {\n for (var i = 0; i < files.length; i++) {\n promises.push(checkEntryCRC32(files[i]));\n }\n }\n return external.Promise.all(promises);\n }).then(function addFiles(results) {\n var zipEntries = results.shift();\n var files = zipEntries.files;\n for (var i = 0; i < files.length; i++) {\n var input = files[i];\n\n var unsafeName = input.fileNameStr;\n var safeName = utils.resolve(input.fileNameStr);\n\n zip.file(safeName, input.decompressed, {\n binary: true,\n optimizedBinaryString: true,\n date: input.date,\n dir: input.dir,\n comment: input.fileCommentStr.length ? input.fileCommentStr : null,\n unixPermissions: input.unixPermissions,\n dosPermissions: input.dosPermissions,\n createFolders: options.createFolders\n });\n if (!input.dir) {\n zip.file(safeName).unsafeOriginalName = unsafeName;\n }\n }\n if (zipEntries.zipComment.length) {\n zip.comment = zipEntries.zipComment;\n }\n\n return zip;\n });\n};\n\n},{\"./external\":6,\"./nodejsUtils\":14,\"./stream/Crc32Probe\":25,\"./utf8\":31,\"./utils\":32,\"./zipEntries\":33}],12:[function(require,module,exports){\n\"use strict\";\n\nvar utils = require(\"../utils\");\nvar GenericWorker = require(\"../stream/GenericWorker\");\n\n/**\n * A worker that use a nodejs stream as source.\n * @constructor\n * @param {String} filename the name of the file entry for this stream.\n * @param {Readable} stream the nodejs stream.\n */\nfunction NodejsStreamInputAdapter(filename, stream) {\n GenericWorker.call(this, \"Nodejs stream input adapter for \" + filename);\n this._upstreamEnded = false;\n this._bindStream(stream);\n}\n\nutils.inherits(NodejsStreamInputAdapter, GenericWorker);\n\n/**\n * Prepare the stream and bind the callbacks on it.\n * Do this ASAP on node 0.10 ! A lazy binding doesn't always work.\n * @param {Stream} stream the nodejs stream to use.\n */\nNodejsStreamInputAdapter.prototype._bindStream = function (stream) {\n var self = this;\n this._stream = stream;\n stream.pause();\n stream\n .on(\"data\", function (chunk) {\n self.push({\n data: chunk,\n meta : {\n percent : 0\n }\n });\n })\n .on(\"error\", function (e) {\n if(self.isPaused) {\n this.generatedError = e;\n } else {\n self.error(e);\n }\n })\n .on(\"end\", function () {\n if(self.isPaused) {\n self._upstreamEnded = true;\n } else {\n self.end();\n }\n });\n};\nNodejsStreamInputAdapter.prototype.pause = function () {\n if(!GenericWorker.prototype.pause.call(this)) {\n return false;\n }\n this._stream.pause();\n return true;\n};\nNodejsStreamInputAdapter.prototype.resume = function () {\n if(!GenericWorker.prototype.resume.call(this)) {\n return false;\n }\n\n if(this._upstreamEnded) {\n this.end();\n } else {\n this._stream.resume();\n }\n\n return true;\n};\n\nmodule.exports = NodejsStreamInputAdapter;\n\n},{\"../stream/GenericWorker\":28,\"../utils\":32}],13:[function(require,module,exports){\n\"use strict\";\n\nvar Readable = require(\"readable-stream\").Readable;\n\nvar utils = require(\"../utils\");\nutils.inherits(NodejsStreamOutputAdapter, Readable);\n\n/**\n* A nodejs stream using a worker as source.\n* @see the SourceWrapper in http://nodejs.org/api/stream.html\n* @constructor\n* @param {StreamHelper} helper the helper wrapping the worker\n* @param {Object} options the nodejs stream options\n* @param {Function} updateCb the update callback.\n*/\nfunction NodejsStreamOutputAdapter(helper, options, updateCb) {\n Readable.call(this, options);\n this._helper = helper;\n\n var self = this;\n helper.on(\"data\", function (data, meta) {\n if (!self.push(data)) {\n self._helper.pause();\n }\n if(updateCb) {\n updateCb(meta);\n }\n })\n .on(\"error\", function(e) {\n self.emit(\"error\", e);\n })\n .on(\"end\", function () {\n self.push(null);\n });\n}\n\n\nNodejsStreamOutputAdapter.prototype._read = function() {\n this._helper.resume();\n};\n\nmodule.exports = NodejsStreamOutputAdapter;\n\n},{\"../utils\":32,\"readable-stream\":16}],14:[function(require,module,exports){\n\"use strict\";\n\nmodule.exports = {\n /**\n * True if this is running in Nodejs, will be undefined in a browser.\n * In a browser, browserify won't include this file and the whole module\n * will be resolved an empty object.\n */\n isNode : typeof Buffer !== \"undefined\",\n /**\n * Create a new nodejs Buffer from an existing content.\n * @param {Object} data the data to pass to the constructor.\n * @param {String} encoding the encoding to use.\n * @return {Buffer} a new Buffer.\n */\n newBufferFrom: function(data, encoding) {\n if (Buffer.from && Buffer.from !== Uint8Array.from) {\n return Buffer.from(data, encoding);\n } else {\n if (typeof data === \"number\") {\n // Safeguard for old Node.js versions. On newer versions,\n // Buffer.from(number) / Buffer(number, encoding) already throw.\n throw new Error(\"The \\\"data\\\" argument must not be a number\");\n }\n return new Buffer(data, encoding);\n }\n },\n /**\n * Create a new nodejs Buffer with the specified size.\n * @param {Integer} size the size of the buffer.\n * @return {Buffer} a new Buffer.\n */\n allocBuffer: function (size) {\n if (Buffer.alloc) {\n return Buffer.alloc(size);\n } else {\n var buf = new Buffer(size);\n buf.fill(0);\n return buf;\n }\n },\n /**\n * Find out if an object is a Buffer.\n * @param {Object} b the object to test.\n * @return {Boolean} true if the object is a Buffer, false otherwise.\n */\n isBuffer : function(b){\n return Buffer.isBuffer(b);\n },\n\n isStream : function (obj) {\n return obj &&\n typeof obj.on === \"function\" &&\n typeof obj.pause === \"function\" &&\n typeof obj.resume === \"function\";\n }\n};\n\n},{}],15:[function(require,module,exports){\n\"use strict\";\nvar utf8 = require(\"./utf8\");\nvar utils = require(\"./utils\");\nvar GenericWorker = require(\"./stream/GenericWorker\");\nvar StreamHelper = require(\"./stream/StreamHelper\");\nvar defaults = require(\"./defaults\");\nvar CompressedObject = require(\"./compressedObject\");\nvar ZipObject = require(\"./zipObject\");\nvar generate = require(\"./generate\");\nvar nodejsUtils = require(\"./nodejsUtils\");\nvar NodejsStreamInputAdapter = require(\"./nodejs/NodejsStreamInputAdapter\");\n\n\n/**\n * Add a file in the current folder.\n * @private\n * @param {string} name the name of the file\n * @param {String|ArrayBuffer|Uint8Array|Buffer} data the data of the file\n * @param {Object} originalOptions the options of the file\n * @return {Object} the new file.\n */\nvar fileAdd = function(name, data, originalOptions) {\n // be sure sub folders exist\n var dataType = utils.getTypeOf(data),\n parent;\n\n\n /*\n * Correct options.\n */\n\n var o = utils.extend(originalOptions || {}, defaults);\n o.date = o.date || new Date();\n if (o.compression !== null) {\n o.compression = o.compression.toUpperCase();\n }\n\n if (typeof o.unixPermissions === \"string\") {\n o.unixPermissions = parseInt(o.unixPermissions, 8);\n }\n\n // UNX_IFDIR 0040000 see zipinfo.c\n if (o.unixPermissions && (o.unixPermissions & 0x4000)) {\n o.dir = true;\n }\n // Bit 4 Directory\n if (o.dosPermissions && (o.dosPermissions & 0x0010)) {\n o.dir = true;\n }\n\n if (o.dir) {\n name = forceTrailingSlash(name);\n }\n if (o.createFolders && (parent = parentFolder(name))) {\n folderAdd.call(this, parent, true);\n }\n\n var isUnicodeString = dataType === \"string\" && o.binary === false && o.base64 === false;\n if (!originalOptions || typeof originalOptions.binary === \"undefined\") {\n o.binary = !isUnicodeString;\n }\n\n\n var isCompressedEmpty = (data instanceof CompressedObject) && data.uncompressedSize === 0;\n\n if (isCompressedEmpty || o.dir || !data || data.length === 0) {\n o.base64 = false;\n o.binary = true;\n data = \"\";\n o.compression = \"STORE\";\n dataType = \"string\";\n }\n\n /*\n * Convert content to fit.\n */\n\n var zipObjectContent = null;\n if (data instanceof CompressedObject || data instanceof GenericWorker) {\n zipObjectContent = data;\n } else if (nodejsUtils.isNode && nodejsUtils.isStream(data)) {\n zipObjectContent = new NodejsStreamInputAdapter(name, data);\n } else {\n zipObjectContent = utils.prepareContent(name, data, o.binary, o.optimizedBinaryString, o.base64);\n }\n\n var object = new ZipObject(name, zipObjectContent, o);\n this.files[name] = object;\n /*\n TODO: we can't throw an exception because we have async promises\n (we can have a promise of a Date() for example) but returning a\n promise is useless because file(name, data) returns the JSZip\n object for chaining. Should we break that to allow the user\n to catch the error ?\n\n return external.Promise.resolve(zipObjectContent)\n .then(function () {\n return object;\n });\n */\n};\n\n/**\n * Find the parent folder of the path.\n * @private\n * @param {string} path the path to use\n * @return {string} the parent folder, or \"\"\n */\nvar parentFolder = function (path) {\n if (path.slice(-1) === \"/\") {\n path = path.substring(0, path.length - 1);\n }\n var lastSlash = path.lastIndexOf(\"/\");\n return (lastSlash > 0) ? path.substring(0, lastSlash) : \"\";\n};\n\n/**\n * Returns the path with a slash at the end.\n * @private\n * @param {String} path the path to check.\n * @return {String} the path with a trailing slash.\n */\nvar forceTrailingSlash = function(path) {\n // Check the name ends with a /\n if (path.slice(-1) !== \"/\") {\n path += \"/\"; // IE doesn't like substr(-1)\n }\n return path;\n};\n\n/**\n * Add a (sub) folder in the current folder.\n * @private\n * @param {string} name the folder's name\n * @param {boolean=} [createFolders] If true, automatically create sub\n * folders. Defaults to false.\n * @return {Object} the new folder.\n */\nvar folderAdd = function(name, createFolders) {\n createFolders = (typeof createFolders !== \"undefined\") ? createFolders