UNPKG

webgme-rosmod

Version:

This repository contains ROSMOD developed for WebGME. ROSMOD is a web-based, collaborative, modeling and execution environment for distributed embedded applications built using ROS http://rosmod.rcps.isis.vanderbilt.edu

1,540 lines (1,342 loc) 386 kB
/*! JSZip - A Javascript class for generating and reading zip files <http://stuartk.com/jszip> (c) 2009-2014 Stuart Knightley <stuart [at] stuartk.com> Dual licenced under the MIT license or GPLv3. See https://raw.github.com/Stuk/jszip/master/LICENSE.markdown. JSZip uses the library pako released under the MIT license : https://github.com/nodeca/pako/blob/master/LICENSE */ (function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.JSZip = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){ 'use strict'; var utils = require('./utils'); var support = require('./support'); // private property var _keyStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/="; // public method for encoding exports.encode = function(input) { var output = []; var chr1, chr2, chr3, enc1, enc2, enc3, enc4; var i = 0, len = input.length, remainingBytes = len; var isArray = utils.getTypeOf(input) !== "string"; while (i < input.length) { remainingBytes = len - i; if (!isArray) { chr1 = input.charCodeAt(i++); chr2 = i < len ? input.charCodeAt(i++) : 0; chr3 = i < len ? input.charCodeAt(i++) : 0; } else { chr1 = input[i++]; chr2 = i < len ? input[i++] : 0; chr3 = i < len ? input[i++] : 0; } enc1 = chr1 >> 2; enc2 = ((chr1 & 3) << 4) | (chr2 >> 4); enc3 = remainingBytes > 1 ? (((chr2 & 15) << 2) | (chr3 >> 6)) : 64; enc4 = remainingBytes > 2 ? (chr3 & 63) : 64; output.push(_keyStr.charAt(enc1) + _keyStr.charAt(enc2) + _keyStr.charAt(enc3) + _keyStr.charAt(enc4)); } return output.join(""); }; // public method for decoding exports.decode = function(input) { var chr1, chr2, chr3; var enc1, enc2, enc3, enc4; var i = 0, resultIndex = 0; input = input.replace(/[^A-Za-z0-9\+\/\=]/g, ""); var totalLength = input.length * 3 / 4; if(input.charAt(input.length - 1) === _keyStr.charAt(64)) { totalLength--; } if(input.charAt(input.length - 2) === _keyStr.charAt(64)) { totalLength--; } var output; if (support.uint8array) { output = new Uint8Array(totalLength); } else { output = new Array(totalLength); } while (i < input.length) { enc1 = _keyStr.indexOf(input.charAt(i++)); enc2 = _keyStr.indexOf(input.charAt(i++)); enc3 = _keyStr.indexOf(input.charAt(i++)); enc4 = _keyStr.indexOf(input.charAt(i++)); chr1 = (enc1 << 2) | (enc2 >> 4); chr2 = ((enc2 & 15) << 4) | (enc3 >> 2); chr3 = ((enc3 & 3) << 6) | enc4; output[resultIndex++] = chr1; if (enc3 !== 64) { output[resultIndex++] = chr2; } if (enc4 !== 64) { output[resultIndex++] = chr3; } } return output; }; },{"./support":27,"./utils":29}],2:[function(require,module,exports){ 'use strict'; var external = require("./external"); var DataWorker = require('./stream/DataWorker'); var DataLengthProbe = require('./stream/DataLengthProbe'); var Crc32Probe = require('./stream/Crc32Probe'); var DataLengthProbe = require('./stream/DataLengthProbe'); /** * Represent a compressed object, with everything needed to decompress it. * @constructor * @param {number} compressedSize the size of the data compressed. * @param {number} uncompressedSize the size of the data after decompression. * @param {number} crc32 the crc32 of the decompressed file. * @param {object} compression the type of compression, see lib/compressions.js. * @param {String|ArrayBuffer|Uint8Array|Buffer} data the compressed data. */ function CompressedObject(compressedSize, uncompressedSize, crc32, compression, data) { this.compressedSize = compressedSize; this.uncompressedSize = uncompressedSize; this.crc32 = crc32; this.compression = compression; this.compressedContent = data; } CompressedObject.prototype = { /** * Create a worker to get the uncompressed content. * @return {GenericWorker} the worker. */ getContentWorker : function () { var worker = new DataWorker(external.Promise.resolve(this.compressedContent)) .pipe(this.compression.uncompressWorker()) .pipe(new DataLengthProbe("data_length")); var that = this; worker.on("end", function () { if(this.streamInfo['data_length'] !== that.uncompressedSize) { throw new Error("Bug : uncompressed data size mismatch"); } }); return worker; }, /** * Create a worker to get the compressed content. * @return {GenericWorker} the worker. */ getCompressedWorker : function () { return new DataWorker(external.Promise.resolve(this.compressedContent)) .withStreamInfo("compressedSize", this.compressedSize) .withStreamInfo("uncompressedSize", this.uncompressedSize) .withStreamInfo("crc32", this.crc32) .withStreamInfo("compression", this.compression) ; } }; /** * Chain the given worker with other workers to compress the content with the * given compresion. * @param {GenericWorker} uncompressedWorker the worker to pipe. * @param {Object} compression the compression object. * @param {Object} compressionOptions the options to use when compressing. * @return {GenericWorker} the new worker compressing the content. */ CompressedObject.createWorkerFrom = function (uncompressedWorker, compression, compressionOptions) { return uncompressedWorker .pipe(new Crc32Probe()) .pipe(new DataLengthProbe("uncompressedSize")) .pipe(compression.compressWorker(compressionOptions)) .pipe(new DataLengthProbe("compressedSize")) .withStreamInfo("compression", compression); }; module.exports = CompressedObject; },{"./external":6,"./stream/Crc32Probe":22,"./stream/DataLengthProbe":23,"./stream/DataWorker":24}],3:[function(require,module,exports){ 'use strict'; var GenericWorker = require("./stream/GenericWorker"); exports.STORE = { magic: "\x00\x00", compressWorker : function (compressionOptions) { return new GenericWorker("STORE compression"); }, uncompressWorker : function () { return new GenericWorker("STORE decompression"); } }; exports.DEFLATE = require('./flate'); },{"./flate":7,"./stream/GenericWorker":25}],4:[function(require,module,exports){ 'use strict'; var utils = require('./utils'); /** * The following functions come from pako, from pako/lib/zlib/crc32.js * released under the MIT license, see pako https://github.com/nodeca/pako/ */ // Use ordinary array, since untyped makes no boost here function makeTable() { var c, table = []; for(var n =0; n < 256; n++){ c = n; for(var k =0; k < 8; k++){ c = ((c&1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1)); } table[n] = c; } return table; } // Create table on load. Just 255 signed longs. Not a problem. var crcTable = makeTable(); function crc32(crc, buf, len, pos) { var t = crcTable, end = pos + len; crc = crc ^ (-1); for (var i = pos; i < end; i++ ) { crc = (crc >>> 8) ^ t[(crc ^ buf[i]) & 0xFF]; } return (crc ^ (-1)); // >>> 0; } // That's all for the pako functions. /** * Compute the crc32 of a string. * This is almost the same as the function crc32, but for strings. Using the * same function for the two use cases leads to horrible performances. * @param {Number} crc the starting value of the crc. * @param {String} str the string to use. * @param {Number} len the length of the string. * @param {Number} pos the starting position for the crc32 computation. * @return {Number} the computed crc32. */ function crc32str(crc, str, len, pos) { var t = crcTable, end = pos + len; crc = crc ^ (-1); for (var i = pos; i < end; i++ ) { crc = (crc >>> 8) ^ t[(crc ^ str.charCodeAt(i)) & 0xFF]; } return (crc ^ (-1)); // >>> 0; } module.exports = function crc32wrapper(input, crc) { if (typeof input === "undefined" || !input.length) { return 0; } var isArray = utils.getTypeOf(input) !== "string"; if(isArray) { return crc32(crc|0, input, input.length, 0); } else { return crc32str(crc|0, input, input.length, 0); } }; // vim: set shiftwidth=4 softtabstop=4: },{"./utils":29}],5:[function(require,module,exports){ 'use strict'; exports.base64 = false; exports.binary = false; exports.dir = false; exports.createFolders = true; exports.date = null; exports.compression = null; exports.compressionOptions = null; exports.comment = null; exports.unixPermissions = null; exports.dosPermissions = null; },{}],6:[function(require,module,exports){ 'use strict'; var ES6Promise = require("es6-promise").Promise; /** * Let the user use/change some implementations. */ module.exports = { Promise: ES6Promise }; },{"es6-promise":37}],7:[function(require,module,exports){ 'use strict'; var USE_TYPEDARRAY = (typeof Uint8Array !== 'undefined') && (typeof Uint16Array !== 'undefined') && (typeof Uint32Array !== 'undefined'); var pako = require("pako"); var utils = require("./utils"); var GenericWorker = require("./stream/GenericWorker"); var ARRAY_TYPE = USE_TYPEDARRAY ? "uint8array" : "array"; exports.magic = "\x08\x00"; /** * Create a worker that uses pako to inflate/deflate. * @constructor * @param {String} action the name of the pako function to call : either "Deflate" or "Inflate". * @param {Object} options the options to use when (de)compressing. */ function FlateWorker(action, options) { GenericWorker.call(this, "FlateWorker/" + action); this._pako = new pako[action]({ raw:true, level : options.level || -1 // default compression }); // the `meta` object from the last chunk received // this allow this worker to pass around metadata this.meta = {}; var self = this; this._pako.onData = function(data) { self.push({ data : data, meta : self.meta }); }; } utils.inherits(FlateWorker, GenericWorker); /** * @see GenericWorker.processChunk */ FlateWorker.prototype.processChunk = function (chunk) { this.meta = chunk.meta; this._pako.push(utils.transformTo(ARRAY_TYPE, chunk.data), false); }; /** * @see GenericWorker.flush */ FlateWorker.prototype.flush = function () { GenericWorker.prototype.flush.call(this); this._pako.push([], true); }; /** * @see GenericWorker.cleanUp */ FlateWorker.prototype.cleanUp = function () { GenericWorker.prototype.cleanUp.call(this); this._pako = null; }; exports.compressWorker = function (compressionOptions) { return new FlateWorker("Deflate", compressionOptions); }; exports.uncompressWorker = function () { return new FlateWorker("Inflate", {}); }; },{"./stream/GenericWorker":25,"./utils":29,"pako":38}],8:[function(require,module,exports){ 'use strict'; var utils = require('../utils'); var GenericWorker = require('../stream/GenericWorker'); var utf8 = require('../utf8'); var crc32 = require('../crc32'); var signature = require('../signature'); /** * Transform an integer into a string in hexadecimal. * @private * @param {number} dec the number to convert. * @param {number} bytes the number of bytes to generate. * @returns {string} the result. */ var decToHex = function(dec, bytes) { var hex = "", i; for (i = 0; i < bytes; i++) { hex += String.fromCharCode(dec & 0xff); dec = dec >>> 8; } return hex; }; /** * Generate the UNIX part of the external file attributes. * @param {Object} unixPermissions the unix permissions or null. * @param {Boolean} isDir true if the entry is a directory, false otherwise. * @return {Number} a 32 bit integer. * * adapted from http://unix.stackexchange.com/questions/14705/the-zip-formats-external-file-attribute : * * TTTTsstrwxrwxrwx0000000000ADVSHR * ^^^^____________________________ file type, see zipinfo.c (UNX_*) * ^^^_________________________ setuid, setgid, sticky * ^^^^^^^^^________________ permissions * ^^^^^^^^^^______ not used ? * ^^^^^^ DOS attribute bits : Archive, Directory, Volume label, System file, Hidden, Read only */ var generateUnixExternalFileAttr = function (unixPermissions, isDir) { var result = unixPermissions; if (!unixPermissions) { // I can't use octal values in strict mode, hence the hexa. // 040775 => 0x41fd // 0100664 => 0x81b4 result = isDir ? 0x41fd : 0x81b4; } return (result & 0xFFFF) << 16; }; /** * Generate the DOS part of the external file attributes. * @param {Object} dosPermissions the dos permissions or null. * @param {Boolean} isDir true if the entry is a directory, false otherwise. * @return {Number} a 32 bit integer. * * Bit 0 Read-Only * Bit 1 Hidden * Bit 2 System * Bit 3 Volume Label * Bit 4 Directory * Bit 5 Archive */ var generateDosExternalFileAttr = function (dosPermissions, isDir) { // the dir flag is already set for compatibility return (dosPermissions || 0) & 0x3F; }; /** * Generate the various parts used in the construction of the final zip file. * @param {Object} streamInfo the hash with informations about the compressed file. * @param {Boolean} streamedContent is the content streamed ? * @param {Boolean} streamingEnded is the stream finished ? * @param {number} offset the current offset from the start of the zip file. * @param {String} platform let's pretend we are this platform (change platform dependents fields) * @param {Function} encodeFileName the function to encode the file name / comment. * @return {Object} the zip parts. */ var generateZipParts = function(streamInfo, streamedContent, streamingEnded, offset, platform, encodeFileName) { var file = streamInfo['file'], compression = streamInfo['compression'], useCustomEncoding = encodeFileName !== utf8.utf8encode, encodedFileName = utils.transformTo("string", encodeFileName(file.name)), utfEncodedFileName = utils.transformTo("string", utf8.utf8encode(file.name)), comment = file.comment, encodedComment = utils.transformTo("string", encodeFileName(comment)), utfEncodedComment = utils.transformTo("string", utf8.utf8encode(comment)), useUTF8ForFileName = utfEncodedFileName.length !== file.name.length, useUTF8ForComment = utfEncodedComment.length !== comment.length, dosTime, dosDate, extraFields = "", unicodePathExtraField = "", unicodeCommentExtraField = "", dir = file.dir, date = file.date; var dataInfo = { crc32 : 0, compressedSize : 0, uncompressedSize : 0 }; // if the content is streamed, the sizes/crc32 are only available AFTER // the end of the stream. if (!streamedContent || streamingEnded) { dataInfo.crc32 = streamInfo['crc32']; dataInfo.compressedSize = streamInfo['compressedSize']; dataInfo.uncompressedSize = streamInfo['uncompressedSize']; } var bitflag = 0; if (streamedContent) { bitflag |= 0x0008; } if (!useCustomEncoding && (useUTF8ForFileName || useUTF8ForComment)) { bitflag |= 0x0800; } var extFileAttr = 0; var versionMadeBy = 0; if (dir) { // dos or unix, we set the dos dir flag extFileAttr |= 0x00010; } if(platform === "UNIX") { versionMadeBy = 0x031E; // UNIX, version 3.0 extFileAttr |= generateUnixExternalFileAttr(file.unixPermissions, dir); } else { // DOS or other, fallback to DOS versionMadeBy = 0x0014; // DOS, version 2.0 extFileAttr |= generateDosExternalFileAttr(file.dosPermissions, dir); } // date // @see http://www.delorie.com/djgpp/doc/rbinter/it/52/13.html // @see http://www.delorie.com/djgpp/doc/rbinter/it/65/16.html // @see http://www.delorie.com/djgpp/doc/rbinter/it/66/16.html dosTime = date.getUTCHours(); dosTime = dosTime << 6; dosTime = dosTime | date.getUTCMinutes(); dosTime = dosTime << 5; dosTime = dosTime | date.getUTCSeconds() / 2; dosDate = date.getUTCFullYear() - 1980; dosDate = dosDate << 4; dosDate = dosDate | (date.getUTCMonth() + 1); dosDate = dosDate << 5; dosDate = dosDate | date.getUTCDate(); if (useUTF8ForFileName) { // set the unicode path extra field. unzip needs at least one extra // field to correctly handle unicode path, so using the path is as good // as any other information. This could improve the situation with // other archive managers too. // This field is usually used without the utf8 flag, with a non // unicode path in the header (winrar, winzip). This helps (a bit) // with the messy Windows' default compressed folders feature but // breaks on p7zip which doesn't seek the unicode path extra field. // So for now, UTF-8 everywhere ! unicodePathExtraField = // Version decToHex(1, 1) + // NameCRC32 decToHex(crc32(encodedFileName), 4) + // UnicodeName utfEncodedFileName; extraFields += // Info-ZIP Unicode Path Extra Field "\x75\x70" + // size decToHex(unicodePathExtraField.length, 2) + // content unicodePathExtraField; } if(useUTF8ForComment) { unicodeCommentExtraField = // Version decToHex(1, 1) + // CommentCRC32 decToHex(crc32(encodedComment), 4) + // UnicodeName utfEncodedComment; extraFields += // Info-ZIP Unicode Path Extra Field "\x75\x63" + // size decToHex(unicodeCommentExtraField.length, 2) + // content unicodeCommentExtraField; } var header = ""; // version needed to extract header += "\x0A\x00"; // general purpose bit flag // set bit 11 if utf8 header += decToHex(bitflag, 2); // compression method header += compression.magic; // last mod file time header += decToHex(dosTime, 2); // last mod file date header += decToHex(dosDate, 2); // crc-32 header += decToHex(dataInfo.crc32, 4); // compressed size header += decToHex(dataInfo.compressedSize, 4); // uncompressed size header += decToHex(dataInfo.uncompressedSize, 4); // file name length header += decToHex(encodedFileName.length, 2); // extra field length header += decToHex(extraFields.length, 2); var fileRecord = signature.LOCAL_FILE_HEADER + header + encodedFileName + extraFields; var dirRecord = signature.CENTRAL_FILE_HEADER + // version made by (00: DOS) decToHex(versionMadeBy, 2) + // file header (common to file and central directory) header + // file comment length decToHex(encodedComment.length, 2) + // disk number start "\x00\x00" + // internal file attributes TODO "\x00\x00" + // external file attributes decToHex(extFileAttr, 4) + // relative offset of local header decToHex(offset, 4) + // file name encodedFileName + // extra field extraFields + // file comment encodedComment; return { fileRecord: fileRecord, dirRecord: dirRecord }; }; /** * Generate the EOCD record. * @param {Number} entriesCount the number of entries in the zip file. * @param {Number} centralDirLength the length (in bytes) of the central dir. * @param {Number} localDirLength the length (in bytes) of the local dir. * @param {String} comment the zip file comment as a binary string. * @param {Function} encodeFileName the function to encode the comment. * @return {String} the EOCD record. */ var generateCentralDirectoryEnd = function (entriesCount, centralDirLength, localDirLength, comment, encodeFileName) { var dirEnd = ""; var encodedComment = utils.transformTo("string", encodeFileName(comment)); // end of central dir signature dirEnd = signature.CENTRAL_DIRECTORY_END + // number of this disk "\x00\x00" + // number of the disk with the start of the central directory "\x00\x00" + // total number of entries in the central directory on this disk decToHex(entriesCount, 2) + // total number of entries in the central directory decToHex(entriesCount, 2) + // size of the central directory 4 bytes decToHex(centralDirLength, 4) + // offset of start of central directory with respect to the starting disk number decToHex(localDirLength, 4) + // .ZIP file comment length decToHex(encodedComment.length, 2) + // .ZIP file comment encodedComment; return dirEnd; }; /** * Generate data descriptors for a file entry. * @param {Object} streamInfo the hash generated by a worker, containing informations * on the file entry. * @return {String} the data descriptors. */ var generateDataDescriptors = function (streamInfo) { var descriptor = ""; descriptor = signature.DATA_DESCRIPTOR + // crc-32 4 bytes decToHex(streamInfo['crc32'], 4) + // compressed size 4 bytes decToHex(streamInfo['compressedSize'], 4) + // uncompressed size 4 bytes decToHex(streamInfo['uncompressedSize'], 4); return descriptor; }; /** * A worker to concatenate other workers to create a zip file. * @param {Boolean} streamFiles `true` to stream the content of the files, * `false` to accumulate it. * @param {String} comment the comment to use. * @param {String} platform the platform to use, "UNIX" or "DOS". * @param {Function} encodeFileName the function to encode file names and comments. */ function ZipFileWorker(streamFiles, comment, platform, encodeFileName) { GenericWorker.call(this, "ZipFileWorker"); // The number of bytes written so far. This doesn't count accumulated chunks. this.bytesWritten = 0; // The comment of the zip file this.zipComment = comment; // The platform "generating" the zip file. this.zipPlatform = platform; // the function to encode file names and comments. this.encodeFileName = encodeFileName; // Should we stream the content of the files ? this.streamFiles = streamFiles; // If `streamFiles` is false, we will need to accumulate the content of the // files to calculate sizes / crc32 (and write them *before* the content). // This boolean indicates if we are accumulating chunks (it will change a lot // during the lifetime of this worker). this.accumulate = false; // The buffer receiving chunks when accumulating content. this.contentBuffer = []; // The list of generated directory records. this.dirRecords = []; // The offset (in bytes) from the beginning of the zip file for the current source. this.currentSourceOffset = 0; // The total number of entries in this zip file. this.entriesCount = 0; // the name of the file currently being added, null when handling the end of the zip file. // Used for the emited metadata. this.currentFile = null; this._sources = []; } utils.inherits(ZipFileWorker, GenericWorker); /** * @see GenericWorker.push */ ZipFileWorker.prototype.push = function (chunk) { var currentFilePercent = chunk.meta.percent || 0; var entriesCount = this.entriesCount; var remainingFiles = this._sources.length; if(this.accumulate) { this.contentBuffer.push(chunk); } else { this.bytesWritten += chunk.data.length; GenericWorker.prototype.push.call(this, { data : chunk.data, meta : { currentFile : this.currentFile, percent : entriesCount ? (currentFilePercent + 100 * (entriesCount - remainingFiles - 1)) / entriesCount : 100 } }); } }; /** * The worker started a new source (an other worker). * @param {Object} streamInfo the streamInfo object from the new source. */ ZipFileWorker.prototype.openedSource = function (streamInfo) { this.currentSourceOffset = this.bytesWritten; this.currentFile = streamInfo['file'].name; // don't stream folders (because they don't have any content) if(this.streamFiles && !streamInfo['file'].dir) { var record = generateZipParts(streamInfo, this.streamFiles, false, this.currentSourceOffset, this.zipPlatform, this.encodeFileName); this.push({ data : record.fileRecord, meta : {percent:0} }); } else { // we need to wait for the whole file before pushing anything this.accumulate = true; } }; /** * The worker finished a source (an other worker). * @param {Object} streamInfo the streamInfo object from the finished source. */ ZipFileWorker.prototype.closedSource = function (streamInfo) { this.accumulate = false; var record = generateZipParts(streamInfo, this.streamFiles, true, this.currentSourceOffset, this.zipPlatform, this.encodeFileName); this.dirRecords.push(record.dirRecord); if(this.streamFiles && !streamInfo['file'].dir) { // after the streamed file, we put data descriptors this.push({ data : generateDataDescriptors(streamInfo), meta : {percent:100} }); } else { // the content wasn't streamed, we need to push everything now // first the file record, then the content this.push({ data : record.fileRecord, meta : {percent:0} }); while(this.contentBuffer.length) { this.push(this.contentBuffer.shift()); } } this.currentFile = null; }; /** * @see GenericWorker.flush */ ZipFileWorker.prototype.flush = function () { var localDirLength = this.bytesWritten; for(var i = 0; i < this.dirRecords.length; i++) { this.push({ data : this.dirRecords[i], meta : {percent:100} }); } var centralDirLength = this.bytesWritten - localDirLength; var dirEnd = generateCentralDirectoryEnd(this.dirRecords.length, centralDirLength, localDirLength, this.zipComment, this.encodeFileName); this.push({ data : dirEnd, meta : {percent:100} }); }; /** * Prepare the next source to be read. */ ZipFileWorker.prototype.prepareNextSource = function () { this.previous = this._sources.shift(); this.openedSource(this.previous.streamInfo); if (this.isPaused) { this.previous.pause(); } else { this.previous.resume(); } }; /** * @see GenericWorker.registerPrevious */ ZipFileWorker.prototype.registerPrevious = function (previous) { this._sources.push(previous); var self = this; previous.on('data', function (chunk) { self.processChunk(chunk); }); previous.on('end', function () { self.closedSource(self.previous.streamInfo); if(self._sources.length) { self.prepareNextSource(); } else { self.end(); } }); previous.on('error', function (e) { self.error(e); }); return this; }; /** * @see GenericWorker.resume */ ZipFileWorker.prototype.resume = function () { if(!GenericWorker.prototype.resume.call(this)) { return false; } if (!this.previous && this._sources.length) { this.prepareNextSource(); return true; } if (!this.previous && !this._sources.length && !this.generatedError) { this.end(); return true; } }; /** * @see GenericWorker.error */ ZipFileWorker.prototype.error = function (e) { var sources = this._sources; if(!GenericWorker.prototype.error.call(this, e)) { return false; } for(var i = 0; i < sources.length; i++) { try { sources[i].error(e); } catch(e) { // the `error` exploded, nothing to do } } return true; }; /** * @see GenericWorker.lock */ ZipFileWorker.prototype.lock = function () { GenericWorker.prototype.lock.call(this); var sources = this._sources; for(var i = 0; i < sources.length; i++) { sources[i].lock(); } }; module.exports = ZipFileWorker; },{"../crc32":4,"../signature":20,"../stream/GenericWorker":25,"../utf8":28,"../utils":29}],9:[function(require,module,exports){ 'use strict'; var compressions = require('../compressions'); var ZipFileWorker = require('./ZipFileWorker'); /** * Find the compression to use. * @param {String} fileCompression the compression defined at the file level, if any. * @param {String} zipCompression the compression defined at the load() level. * @return {Object} the compression object to use. */ var getCompression = function (fileCompression, zipCompression) { var compressionName = fileCompression || zipCompression; var compression = compressions[compressionName]; if (!compression) { throw new Error(compressionName + " is not a valid compression method !"); } return compression; }; /** * Create a worker to generate a zip file. * @param {JSZip} zip the JSZip instance at the right root level. * @param {Object} options to generate the zip file. * @param {String} comment the comment to use. */ exports.generateWorker = function (zip, options, comment) { var zipFileWorker = new ZipFileWorker(options.streamFiles, comment, options.platform, options.encodeFileName); var entriesCount = 0; try { zip.forEach(function (relativePath, file) { entriesCount++; var compression = getCompression(file.options.compression, options.compression); var compressionOptions = file.options.compressionOptions || options.compressionOptions || {}; var dir = file.dir, date = file.date; file._compressWorker(compression, compressionOptions) .withStreamInfo("file", { name : relativePath, dir : dir, date : date, comment : file.comment || "", unixPermissions : file.unixPermissions, dosPermissions : file.dosPermissions }) .pipe(zipFileWorker); }); zipFileWorker.entriesCount = entriesCount; } catch (e) { zipFileWorker.error(e); } return zipFileWorker; }; },{"../compressions":3,"./ZipFileWorker":8}],10:[function(require,module,exports){ 'use strict'; /** * Representation a of zip file in js * @constructor */ function JSZip() { // if this constructor is used without `new`, it adds `new` before itself: if(!(this instanceof JSZip)) { return new JSZip(); } if(arguments.length) { throw new Error("The constructor with parameters has been removed in JSZip 3.0, please check the upgrade guide."); } // object containing the files : // { // "folder/" : {...}, // "folder/data.txt" : {...} // } this.files = {}; this.comment = null; // Where we are in the hierarchy this.root = ""; this.clone = function() { var newObj = new JSZip(); for (var i in this) { if (typeof this[i] !== "function") { newObj[i] = this[i]; } } return newObj; }; } JSZip.prototype = require('./object'); JSZip.prototype.loadAsync = require('./load'); JSZip.support = require('./support'); JSZip.defaults = require('./defaults'); JSZip.loadAsync = function (content, options) { return new JSZip().loadAsync(content, options); }; JSZip.external = require("./external"); module.exports = JSZip; },{"./defaults":5,"./external":6,"./load":11,"./object":13,"./support":27}],11:[function(require,module,exports){ 'use strict'; var utils = require('./utils'); var external = require("./external"); var utf8 = require('./utf8'); var utils = require('./utils'); var ZipEntries = require('./zipEntries'); var Crc32Probe = require('./stream/Crc32Probe'); var nodejsUtils = require("./nodejsUtils"); /** * Check the CRC32 of an entry. * @param {ZipEntry} zipEntry the zip entry to check. * @return {Promise} the result. */ function checkEntryCRC32(zipEntry) { return new external.Promise(function (resolve, reject) { var worker = zipEntry.decompressed.getContentWorker().pipe(new Crc32Probe()); worker.on("error", function (e) { reject(e); }) .on("end", function () { if (worker.streamInfo.crc32 !== zipEntry.decompressed.crc32) { reject(new Error("Corrupted zip : CRC32 mismatch")); } else { resolve(); } }) .resume(); }); } module.exports = function(data, options) { var zip = this; options = utils.extend(options || {}, { base64: false, checkCRC32: false, optimizedBinaryString: false, createFolders: false, decodeFileName: utf8.utf8decode }); if (nodejsUtils.isNode && nodejsUtils.isStream(data)) { return external.Promise.reject(new Error("JSZip can't accept a stream when loading a zip file.")); } return utils.prepareContent("the loaded zip file", data, true, options.optimizedBinaryString, options.base64) .then(function(data) { var zipEntries = new ZipEntries(options); zipEntries.load(data); return zipEntries; }).then(function checkCRC32(zipEntries) { var promises = [external.Promise.resolve(zipEntries)]; var files = zipEntries.files; if (options.checkCRC32) { for (var i = 0; i < files.length; i++) { promises.push(checkEntryCRC32(files[i])); } } return external.Promise.all(promises); }).then(function addFiles(results) { var zipEntries = results.shift(); var files = zipEntries.files; for (var i = 0; i < files.length; i++) { var input = files[i]; zip.file(input.fileNameStr, input.decompressed, { binary: true, optimizedBinaryString: true, date: input.date, dir: input.dir, comment : input.fileCommentStr.length ? input.fileCommentStr : null, unixPermissions : input.unixPermissions, dosPermissions : input.dosPermissions, createFolders: options.createFolders }); } if (zipEntries.zipComment.length) { zip.comment = zipEntries.zipComment; } return zip; }); }; },{"./external":6,"./nodejsUtils":12,"./stream/Crc32Probe":22,"./utf8":28,"./utils":29,"./zipEntries":30}],12:[function(require,module,exports){ (function (Buffer){ 'use strict'; module.exports = { /** * True if this is running in Nodejs, will be undefined in a browser. * In a browser, browserify won't include this file and the whole module * will be resolved an empty object. */ isNode : typeof Buffer !== "undefined", /** * Create a new nodejs Buffer. * @param {Object} data the data to pass to the constructor. * @param {String} encoding the encoding to use. * @return {Buffer} a new Buffer. */ newBuffer : function(data, encoding){ return new Buffer(data, encoding); }, /** * Find out if an object is a Buffer. * @param {Object} b the object to test. * @return {Boolean} true if the object is a Buffer, false otherwise. */ isBuffer : function(b){ return Buffer.isBuffer(b); }, isStream : function (obj) { return obj && typeof obj.on === "function" && typeof obj.pause === "function" && typeof obj.resume === "function"; } }; }).call(this,(typeof Buffer !== "undefined" ? Buffer : undefined)) },{}],13:[function(require,module,exports){ 'use strict'; var utf8 = require('./utf8'); var utils = require('./utils'); var GenericWorker = require('./stream/GenericWorker'); var StreamHelper = require('./stream/StreamHelper'); var defaults = require('./defaults'); var CompressedObject = require('./compressedObject'); var ZipObject = require('./zipObject'); var generate = require("./generate"); var nodejsUtils = require("./nodejsUtils"); var NodejsStreamInputAdapter = require("./nodejs/NodejsStreamInputAdapter"); /** * Add a file in the current folder. * @private * @param {string} name the name of the file * @param {String|ArrayBuffer|Uint8Array|Buffer} data the data of the file * @param {Object} o the options of the file * @return {Object} the new file. */ var fileAdd = function(name, data, o) { // be sure sub folders exist var dataType = utils.getTypeOf(data), parent; /* * Correct options. */ o = utils.extend(o || {}, defaults); o.date = o.date || new Date(); if (o.compression !== null) { o.compression = o.compression.toUpperCase(); } if (typeof o.unixPermissions === "string") { o.unixPermissions = parseInt(o.unixPermissions, 8); } // UNX_IFDIR 0040000 see zipinfo.c if (o.unixPermissions && (o.unixPermissions & 0x4000)) { o.dir = true; } // Bit 4 Directory if (o.dosPermissions && (o.dosPermissions & 0x0010)) { o.dir = true; } if (o.dir) { name = forceTrailingSlash(name); } if (o.createFolders && (parent = parentFolder(name))) { folderAdd.call(this, parent, true); } var isUnicodeString = dataType === "string" && o.binary === false && o.base64 === false; o.binary = !isUnicodeString; var isCompressedEmpty = (data instanceof CompressedObject) && data.uncompressedSize === 0; if (isCompressedEmpty || o.dir || !data || data.length === 0) { o.base64 = false; o.binary = true; data = ""; o.compression = "STORE"; dataType = "string"; } /* * Convert content to fit. */ var zipObjectContent = null; if (data instanceof CompressedObject || data instanceof GenericWorker) { zipObjectContent = data; } else if (nodejsUtils.isNode && nodejsUtils.isStream(data)) { zipObjectContent = new NodejsStreamInputAdapter(name, data); } else { zipObjectContent = utils.prepareContent(name, data, o.binary, o.optimizedBinaryString, o.base64); } var object = new ZipObject(name, zipObjectContent, o); this.files[name] = object; /* TODO: we can't throw an exception because we have async promises (we can have a promise of a Date() for example) but returning a promise is useless because file(name, data) returns the JSZip object for chaining. Should we break that to allow the user to catch the error ? return external.Promise.resolve(zipObjectContent) .then(function () { return object; }); */ }; /** * Find the parent folder of the path. * @private * @param {string} path the path to use * @return {string} the parent folder, or "" */ var parentFolder = function (path) { if (path.slice(-1) === '/') { path = path.substring(0, path.length - 1); } var lastSlash = path.lastIndexOf('/'); return (lastSlash > 0) ? path.substring(0, lastSlash) : ""; }; /** * Returns the path with a slash at the end. * @private * @param {String} path the path to check. * @return {String} the path with a trailing slash. */ var forceTrailingSlash = function(path) { // Check the name ends with a / if (path.slice(-1) !== "/") { path += "/"; // IE doesn't like substr(-1) } return path; }; /** * Add a (sub) folder in the current folder. * @private * @param {string} name the folder's name * @param {boolean=} [createFolders] If true, automatically create sub * folders. Defaults to false. * @return {Object} the new folder. */ var folderAdd = function(name, createFolders) { createFolders = (typeof createFolders !== 'undefined') ? createFolders : defaults.createFolders; name = forceTrailingSlash(name); // Does this folder already exist? if (!this.files[name]) { fileAdd.call(this, name, null, { dir: true, createFolders: createFolders }); } return this.files[name]; }; /** * Cross-window, cross-Node-context regular expression detection * @param {Object} object Anything * @return {Boolean} true if the object is a regular expression, * false otherwise */ function isRegExp(object) { return Object.prototype.toString.call(object) === "[object RegExp]"; } // return the actual prototype of JSZip var out = { /** * @see loadAsync */ load: function() { throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide."); }, /** * Call a callback function for each entry at this folder level. * @param {Function} cb the callback function: * function (relativePath, file) {...} * It takes 2 arguments : the relative path and the file. */ forEach: function(cb) { var filename, relativePath, file; for (filename in this.files) { if (!this.files.hasOwnProperty(filename)) { continue; } file = this.files[filename]; relativePath = filename.slice(this.root.length, filename.length); if (relativePath && filename.slice(0, this.root.length) === this.root) { // the file is in the current root cb(relativePath, file); // TODO reverse the parameters ? need to be clean AND consistent with the filter search fn... } } }, /** * Filter nested files/folders with the specified function. * @param {Function} search the predicate to use : * function (relativePath, file) {...} * It takes 2 arguments : the relative path and the file. * @return {Array} An array of matching elements. */ filter: function(search) { var result = []; this.forEach(function (relativePath, entry) { if (search(relativePath, entry)) { // the file matches the function result.push(entry); } }); return result; }, /** * Add a file to the zip file, or search a file. * @param {string|RegExp} name The name of the file to add (if data is defined), * the name of the file to find (if no data) or a regex to match files. * @param {String|ArrayBuffer|Uint8Array|Buffer} data The file data, either raw or base64 encoded * @param {Object} o File options * @return {JSZip|Object|Array} this JSZip object (when adding a file), * a file (when searching by string) or an array of files (when searching by regex). */ file: function(name, data, o) { if (arguments.length === 1) { if (isRegExp(name)) { var regexp = name; return this.filter(function(relativePath, file) { return !file.dir && regexp.test(relativePath); }); } else { // text var obj = this.files[this.root + name]; if (obj && !obj.dir) { return obj; } else { return null; } } } else { // more than one argument : we have data ! name = this.root + name; fileAdd.call(this, name, data, o); } return this; }, /** * Add a directory to the zip file, or search. * @param {String|RegExp} arg The name of the directory to add, or a regex to search folders. * @return {JSZip} an object with the new directory as the root, or an array containing matching folders. */ folder: function(arg) { if (!arg) { return this; } if (isRegExp(arg)) { return this.filter(function(relativePath, file) { return file.dir && arg.test(relativePath); }); } // else, name is a new folder var name = this.root + arg; var newFolder = folderAdd.call(this, name); // Allow chaining by returning a new object with this folder as the root var ret = this.clone(); ret.root = newFolder.name; return ret; }, /** * Delete a file, or a directory and all sub-files, from the zip * @param {string} name the name of the file to delete * @return {JSZip} this JSZip object */ remove: function(name) { name = this.root + name; var file = this.files[name]; if (!file) { // Look for any folders if (name.slice(-1) !== "/") { name += "/"; } file = this.files[name]; } if (file && !file.dir) { // file delete this.files[name]; } else { // maybe a folder, delete recursively var kids = this.filter(function(relativePath, file) { return file.name.slice(0, name.length) === name; }); for (var i = 0; i < kids.length; i++) { delete this.files[kids[i].name]; } } return this; }, /** * Generate the complete zip file * @param {Object} options the options to generate the zip file : * - compression, "STORE" by default. * - type, "base64" by default. Values are : string, base64, uint8array, arraybuffer, blob. * @return {String|Uint8Array|ArrayBuffer|Buffer|Blob} the zip file */ generate: function(options) { throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide."); }, /** * Generate the complete zip file as an internal stream. * @param {Object} options the options to generate the zip file : * - compression, "STORE" by default. * - type, "base64" by default. Values are : string, base64, uint8array, arraybuffer, blob. * @return {StreamHelper} the streamed zip file. */ generateInternalStream: function(options) { var worker, opts = {}; try { opts = utils.extend(options || {}, { streamFiles: false, compression: "STORE", compressionOptions : null, type: "", platform: "DOS", comment: null, mimeType: 'application/zip', encodeFileName: utf8.utf8encode }); opts.type = opts.type.toLowerCase(); opts.compression = opts.compression.toUpperCase(); // "binarystring" is prefered but the internals use "string". if(opts.type === "binarystring") { opts.type = "string"; } if (!opts.type) { throw new Error("No output type specified."); } utils.checkSupport(opts.type); // accept nodejs `process.platform` if( options.platform === 'darwin' || options.platform === 'freebsd' || options.platform === 'linux' || options.platform === 'sunos' ) { options.platform = "UNIX"; } if (options.platform === 'win32') { options.platform = "DOS"; } var comment = opts.comment || this.comment || ""; worker = generate.generateWorker(this, opts, comment); } catch (e) { worker = new GenericWorker("error"); worker.error(e); } return new StreamHelper(worker, opts.type || "string", opts.mimeType); }, /** * Generate the complete zip file asynchronously. * @see generateInternalStream */ generateAsync: function(options, onUpdate) { return this.generateInternalStream(options).accumulate(onUpdate); }, /** * Generate the complete zip file asynchronously. * @see generateInternalStream */ generateNodeStream: function(options, onUpdate) { options = options || {}; if (!options.type) { options.type = "nodebuffer"; } return this.generateInternalStream(options).toNodejsStream(onUpdate); } }; module.exports = out; },{"./compressedObject":2,"./defaults":5,"./generate":9,"./nodejs/NodejsStreamInputAdapter":35,"./nodejsUtils":12,"./stream/GenericWorker":25,"./stream/StreamHelper":26,"./utf8":28,"./utils":29,"./zipObject":32}],14:[function(require,module,exports){ 'use strict'; var DataReader = require('./DataReader'); var utils = require('../utils'); function ArrayReader(data) { DataReader.call(this, data); for(var i = 0; i < this.data.length; i++) { data[i] = data[i] & 0xFF; } } utils.inherits(ArrayReader, DataReader); /** * @see DataReader.byteAt */ ArrayReader.prototype.byteAt = function(i) {