UNPKG

tiny-crypto-suite

Version:

Tiny tools, big crypto — seamless encryption and certificate handling for modern web and Node apps.

1,663 lines (1,395 loc) 2.35 MB
/******/ (() => { // webpackBootstrap /******/ var __webpack_modules__ = ({ /***/ 20: /***/ ((module, __unused_webpack_exports, __webpack_require__) => { "use strict"; var Buffer = (__webpack_require__(2861).Buffer); var createHash = __webpack_require__(7108); var stream = __webpack_require__(6737); var inherits = __webpack_require__(6698); var sign = __webpack_require__(5359); var verify = __webpack_require__(4847); var algorithms = __webpack_require__(2951); Object.keys(algorithms).forEach(function (key) { algorithms[key].id = Buffer.from(algorithms[key].id, 'hex'); algorithms[key.toLowerCase()] = algorithms[key]; }); function Sign(algorithm) { stream.Writable.call(this); var data = algorithms[algorithm]; if (!data) { throw new Error('Unknown message digest'); } this._hashType = data.hash; this._hash = createHash(data.hash); this._tag = data.id; this._signType = data.sign; } inherits(Sign, stream.Writable); Sign.prototype._write = function _write(data, _, done) { this._hash.update(data); done(); }; Sign.prototype.update = function update(data, enc) { this._hash.update(typeof data === 'string' ? Buffer.from(data, enc) : data); return this; }; Sign.prototype.sign = function signMethod(key, enc) { this.end(); var hash = this._hash.digest(); var sig = sign(hash, key, this._hashType, this._signType, this._tag); return enc ? sig.toString(enc) : sig; }; function Verify(algorithm) { stream.Writable.call(this); var data = algorithms[algorithm]; if (!data) { throw new Error('Unknown message digest'); } this._hash = createHash(data.hash); this._tag = data.id; this._signType = data.sign; } inherits(Verify, stream.Writable); Verify.prototype._write = function _write(data, _, done) { this._hash.update(data); done(); }; Verify.prototype.update = function update(data, enc) { this._hash.update(typeof data === 'string' ? Buffer.from(data, enc) : data); return this; }; Verify.prototype.verify = function verifyMethod(key, sig, enc) { var sigBuffer = typeof sig === 'string' ? Buffer.from(sig, enc) : sig; this.end(); var hash = this._hash.digest(); return verify(sigBuffer, hash, key, this._signType, this._tag); }; function createSign(algorithm) { return new Sign(algorithm); } function createVerify(algorithm) { return new Verify(algorithm); } module.exports = { Sign: createSign, Verify: createVerify, createSign: createSign, createVerify: createVerify }; /***/ }), /***/ 41: /***/ ((module, __unused_webpack_exports, __webpack_require__) => { "use strict"; var $defineProperty = __webpack_require__(655); var $SyntaxError = __webpack_require__(8068); var $TypeError = __webpack_require__(9675); var gopd = __webpack_require__(5795); /** @type {import('.')} */ module.exports = function defineDataProperty( obj, property, value ) { if (!obj || (typeof obj !== 'object' && typeof obj !== 'function')) { throw new $TypeError('`obj` must be an object or a function`'); } if (typeof property !== 'string' && typeof property !== 'symbol') { throw new $TypeError('`property` must be a string or a symbol`'); } if (arguments.length > 3 && typeof arguments[3] !== 'boolean' && arguments[3] !== null) { throw new $TypeError('`nonEnumerable`, if provided, must be a boolean or null'); } if (arguments.length > 4 && typeof arguments[4] !== 'boolean' && arguments[4] !== null) { throw new $TypeError('`nonWritable`, if provided, must be a boolean or null'); } if (arguments.length > 5 && typeof arguments[5] !== 'boolean' && arguments[5] !== null) { throw new $TypeError('`nonConfigurable`, if provided, must be a boolean or null'); } if (arguments.length > 6 && typeof arguments[6] !== 'boolean') { throw new $TypeError('`loose`, if provided, must be a boolean'); } var nonEnumerable = arguments.length > 3 ? arguments[3] : null; var nonWritable = arguments.length > 4 ? arguments[4] : null; var nonConfigurable = arguments.length > 5 ? arguments[5] : null; var loose = arguments.length > 6 ? arguments[6] : false; /* @type {false | TypedPropertyDescriptor<unknown>} */ var desc = !!gopd && gopd(obj, property); if ($defineProperty) { $defineProperty(obj, property, { configurable: nonConfigurable === null && desc ? desc.configurable : !nonConfigurable, enumerable: nonEnumerable === null && desc ? desc.enumerable : !nonEnumerable, value: value, writable: nonWritable === null && desc ? desc.writable : !nonWritable }); } else if (loose || (!nonEnumerable && !nonWritable && !nonConfigurable)) { // must fall back to [[Set]], and was not explicitly asked to make non-enumerable, non-writable, or non-configurable obj[property] = value; // eslint-disable-line no-param-reassign } else { throw new $SyntaxError('This environment does not support defining a property as non-configurable, non-writable, or non-enumerable.'); } }; /***/ }), /***/ 76: /***/ ((module) => { "use strict"; /** @type {import('./functionCall')} */ module.exports = Function.prototype.call; /***/ }), /***/ 82: /***/ ((module, __unused_webpack_exports, __webpack_require__) => { var inherits = __webpack_require__(6698); var Buffer = (__webpack_require__(8287).Buffer); var asn1 = __webpack_require__(7568); var base = asn1.base; // Import DER constants var der = asn1.constants.der; function DEREncoder(entity) { this.enc = 'der'; this.name = entity.name; this.entity = entity; // Construct base tree this.tree = new DERNode(); this.tree._init(entity.body); }; module.exports = DEREncoder; DEREncoder.prototype.encode = function encode(data, reporter) { return this.tree._encode(data, reporter).join(); }; // Tree methods function DERNode(parent) { base.Node.call(this, 'der', parent); } inherits(DERNode, base.Node); DERNode.prototype._encodeComposite = function encodeComposite(tag, primitive, cls, content) { var encodedTag = encodeTag(tag, primitive, cls, this.reporter); // Short form if (content.length < 0x80) { var header = new Buffer(2); header[0] = encodedTag; header[1] = content.length; return this._createEncoderBuffer([ header, content ]); } // Long form // Count octets required to store length var lenOctets = 1; for (var i = content.length; i >= 0x100; i >>= 8) lenOctets++; var header = new Buffer(1 + 1 + lenOctets); header[0] = encodedTag; header[1] = 0x80 | lenOctets; for (var i = 1 + lenOctets, j = content.length; j > 0; i--, j >>= 8) header[i] = j & 0xff; return this._createEncoderBuffer([ header, content ]); }; DERNode.prototype._encodeStr = function encodeStr(str, tag) { if (tag === 'bitstr') { return this._createEncoderBuffer([ str.unused | 0, str.data ]); } else if (tag === 'bmpstr') { var buf = new Buffer(str.length * 2); for (var i = 0; i < str.length; i++) { buf.writeUInt16BE(str.charCodeAt(i), i * 2); } return this._createEncoderBuffer(buf); } else if (tag === 'numstr') { if (!this._isNumstr(str)) { return this.reporter.error('Encoding of string type: numstr supports ' + 'only digits and space'); } return this._createEncoderBuffer(str); } else if (tag === 'printstr') { if (!this._isPrintstr(str)) { return this.reporter.error('Encoding of string type: printstr supports ' + 'only latin upper and lower case letters, ' + 'digits, space, apostrophe, left and rigth ' + 'parenthesis, plus sign, comma, hyphen, ' + 'dot, slash, colon, equal sign, ' + 'question mark'); } return this._createEncoderBuffer(str); } else if (/str$/.test(tag)) { return this._createEncoderBuffer(str); } else if (tag === 'objDesc') { return this._createEncoderBuffer(str); } else { return this.reporter.error('Encoding of string type: ' + tag + ' unsupported'); } }; DERNode.prototype._encodeObjid = function encodeObjid(id, values, relative) { if (typeof id === 'string') { if (!values) return this.reporter.error('string objid given, but no values map found'); if (!values.hasOwnProperty(id)) return this.reporter.error('objid not found in values map'); id = values[id].split(/[\s\.]+/g); for (var i = 0; i < id.length; i++) id[i] |= 0; } else if (Array.isArray(id)) { id = id.slice(); for (var i = 0; i < id.length; i++) id[i] |= 0; } if (!Array.isArray(id)) { return this.reporter.error('objid() should be either array or string, ' + 'got: ' + JSON.stringify(id)); } if (!relative) { if (id[1] >= 40) return this.reporter.error('Second objid identifier OOB'); id.splice(0, 2, id[0] * 40 + id[1]); } // Count number of octets var size = 0; for (var i = 0; i < id.length; i++) { var ident = id[i]; for (size++; ident >= 0x80; ident >>= 7) size++; } var objid = new Buffer(size); var offset = objid.length - 1; for (var i = id.length - 1; i >= 0; i--) { var ident = id[i]; objid[offset--] = ident & 0x7f; while ((ident >>= 7) > 0) objid[offset--] = 0x80 | (ident & 0x7f); } return this._createEncoderBuffer(objid); }; function two(num) { if (num < 10) return '0' + num; else return num; } DERNode.prototype._encodeTime = function encodeTime(time, tag) { var str; var date = new Date(time); if (tag === 'gentime') { str = [ two(date.getFullYear()), two(date.getUTCMonth() + 1), two(date.getUTCDate()), two(date.getUTCHours()), two(date.getUTCMinutes()), two(date.getUTCSeconds()), 'Z' ].join(''); } else if (tag === 'utctime') { str = [ two(date.getFullYear() % 100), two(date.getUTCMonth() + 1), two(date.getUTCDate()), two(date.getUTCHours()), two(date.getUTCMinutes()), two(date.getUTCSeconds()), 'Z' ].join(''); } else { this.reporter.error('Encoding ' + tag + ' time is not supported yet'); } return this._encodeStr(str, 'octstr'); }; DERNode.prototype._encodeNull = function encodeNull() { return this._createEncoderBuffer(''); }; DERNode.prototype._encodeInt = function encodeInt(num, values) { if (typeof num === 'string') { if (!values) return this.reporter.error('String int or enum given, but no values map'); if (!values.hasOwnProperty(num)) { return this.reporter.error('Values map doesn\'t contain: ' + JSON.stringify(num)); } num = values[num]; } // Bignum, assume big endian if (typeof num !== 'number' && !Buffer.isBuffer(num)) { var numArray = num.toArray(); if (!num.sign && numArray[0] & 0x80) { numArray.unshift(0); } num = new Buffer(numArray); } if (Buffer.isBuffer(num)) { var size = num.length; if (num.length === 0) size++; var out = new Buffer(size); num.copy(out); if (num.length === 0) out[0] = 0 return this._createEncoderBuffer(out); } if (num < 0x80) return this._createEncoderBuffer(num); if (num < 0x100) return this._createEncoderBuffer([0, num]); var size = 1; for (var i = num; i >= 0x100; i >>= 8) size++; var out = new Array(size); for (var i = out.length - 1; i >= 0; i--) { out[i] = num & 0xff; num >>= 8; } if(out[0] & 0x80) { out.unshift(0); } return this._createEncoderBuffer(new Buffer(out)); }; DERNode.prototype._encodeBool = function encodeBool(value) { return this._createEncoderBuffer(value ? 0xff : 0); }; DERNode.prototype._use = function use(entity, obj) { if (typeof entity === 'function') entity = entity(obj); return entity._getEncoder('der').tree; }; DERNode.prototype._skipDefault = function skipDefault(dataBuffer, reporter, parent) { var state = this._baseState; var i; if (state['default'] === null) return false; var data = dataBuffer.join(); if (state.defaultBuffer === undefined) state.defaultBuffer = this._encodeValue(state['default'], reporter, parent).join(); if (data.length !== state.defaultBuffer.length) return false; for (i=0; i < data.length; i++) if (data[i] !== state.defaultBuffer[i]) return false; return true; }; // Utility methods function encodeTag(tag, primitive, cls, reporter) { var res; if (tag === 'seqof') tag = 'seq'; else if (tag === 'setof') tag = 'set'; if (der.tagByName.hasOwnProperty(tag)) res = der.tagByName[tag]; else if (typeof tag === 'number' && (tag | 0) === tag) res = tag; else return reporter.error('Unknown tag: ' + tag); if (res >= 0x1f) return reporter.error('Multi-octet tag encoding unsupported'); if (!primitive) res |= 0x20; res |= (der.tagClassByName[cls || 'universal'] << 6); return res; } /***/ }), /***/ 125: /***/ ((__unused_webpack_module, exports, __webpack_require__) => { var DES = __webpack_require__(4050) var aes = __webpack_require__(1241) var aesModes = __webpack_require__(530) var desModes = __webpack_require__(2438) var ebtk = __webpack_require__(8078) function createCipher (suite, password) { suite = suite.toLowerCase() var keyLen, ivLen if (aesModes[suite]) { keyLen = aesModes[suite].key ivLen = aesModes[suite].iv } else if (desModes[suite]) { keyLen = desModes[suite].key * 8 ivLen = desModes[suite].iv } else { throw new TypeError('invalid suite type') } var keys = ebtk(password, false, keyLen, ivLen) return createCipheriv(suite, keys.key, keys.iv) } function createDecipher (suite, password) { suite = suite.toLowerCase() var keyLen, ivLen if (aesModes[suite]) { keyLen = aesModes[suite].key ivLen = aesModes[suite].iv } else if (desModes[suite]) { keyLen = desModes[suite].key * 8 ivLen = desModes[suite].iv } else { throw new TypeError('invalid suite type') } var keys = ebtk(password, false, keyLen, ivLen) return createDecipheriv(suite, keys.key, keys.iv) } function createCipheriv (suite, key, iv) { suite = suite.toLowerCase() if (aesModes[suite]) return aes.createCipheriv(suite, key, iv) if (desModes[suite]) return new DES({ key: key, iv: iv, mode: suite }) throw new TypeError('invalid suite type') } function createDecipheriv (suite, key, iv) { suite = suite.toLowerCase() if (aesModes[suite]) return aes.createDecipheriv(suite, key, iv) if (desModes[suite]) return new DES({ key: key, iv: iv, mode: suite, decrypt: true }) throw new TypeError('invalid suite type') } function getCiphers () { return Object.keys(desModes).concat(aes.getCiphers()) } exports.createCipher = exports.Cipher = createCipher exports.createCipheriv = exports.Cipheriv = createCipheriv exports.createDecipher = exports.Decipher = createDecipher exports.createDecipheriv = exports.Decipheriv = createDecipheriv exports.listCiphers = exports.getCiphers = getCiphers /***/ }), /***/ 149: /***/ ((module, __unused_webpack_exports, __webpack_require__) => { /** * Functions to output keys in SSH-friendly formats. * * This is part of the Forge project which may be used under the terms of * either the BSD License or the GNU General Public License (GPL) Version 2. * * See: https://github.com/digitalbazaar/forge/blob/cbebca3780658703d925b61b2caffb1d263a6c1d/LICENSE * * @author https://github.com/shellac */ var forge = __webpack_require__(276); __webpack_require__(9504); __webpack_require__(1696); __webpack_require__(1267); __webpack_require__(1598); __webpack_require__(7619); var ssh = module.exports = forge.ssh = forge.ssh || {}; /** * Encodes (and optionally encrypts) a private RSA key as a Putty PPK file. * * @param privateKey the key. * @param passphrase a passphrase to protect the key (falsy for no encryption). * @param comment a comment to include in the key file. * * @return the PPK file as a string. */ ssh.privateKeyToPutty = function(privateKey, passphrase, comment) { comment = comment || ''; passphrase = passphrase || ''; var algorithm = 'ssh-rsa'; var encryptionAlgorithm = (passphrase === '') ? 'none' : 'aes256-cbc'; var ppk = 'PuTTY-User-Key-File-2: ' + algorithm + '\r\n'; ppk += 'Encryption: ' + encryptionAlgorithm + '\r\n'; ppk += 'Comment: ' + comment + '\r\n'; // public key into buffer for ppk var pubbuffer = forge.util.createBuffer(); _addStringToBuffer(pubbuffer, algorithm); _addBigIntegerToBuffer(pubbuffer, privateKey.e); _addBigIntegerToBuffer(pubbuffer, privateKey.n); // write public key var pub = forge.util.encode64(pubbuffer.bytes(), 64); var length = Math.floor(pub.length / 66) + 1; // 66 = 64 + \r\n ppk += 'Public-Lines: ' + length + '\r\n'; ppk += pub; // private key into a buffer var privbuffer = forge.util.createBuffer(); _addBigIntegerToBuffer(privbuffer, privateKey.d); _addBigIntegerToBuffer(privbuffer, privateKey.p); _addBigIntegerToBuffer(privbuffer, privateKey.q); _addBigIntegerToBuffer(privbuffer, privateKey.qInv); // optionally encrypt the private key var priv; if(!passphrase) { // use the unencrypted buffer priv = forge.util.encode64(privbuffer.bytes(), 64); } else { // encrypt RSA key using passphrase var encLen = privbuffer.length() + 16 - 1; encLen -= encLen % 16; // pad private key with sha1-d data -- needs to be a multiple of 16 var padding = _sha1(privbuffer.bytes()); padding.truncate(padding.length() - encLen + privbuffer.length()); privbuffer.putBuffer(padding); var aeskey = forge.util.createBuffer(); aeskey.putBuffer(_sha1('\x00\x00\x00\x00', passphrase)); aeskey.putBuffer(_sha1('\x00\x00\x00\x01', passphrase)); // encrypt some bytes using CBC mode // key is 40 bytes, so truncate *by* 8 bytes var cipher = forge.aes.createEncryptionCipher(aeskey.truncate(8), 'CBC'); cipher.start(forge.util.createBuffer().fillWithByte(0, 16)); cipher.update(privbuffer.copy()); cipher.finish(); var encrypted = cipher.output; // Note: this appears to differ from Putty -- is forge wrong, or putty? // due to padding we finish as an exact multiple of 16 encrypted.truncate(16); // all padding priv = forge.util.encode64(encrypted.bytes(), 64); } // output private key length = Math.floor(priv.length / 66) + 1; // 64 + \r\n ppk += '\r\nPrivate-Lines: ' + length + '\r\n'; ppk += priv; // MAC var mackey = _sha1('putty-private-key-file-mac-key', passphrase); var macbuffer = forge.util.createBuffer(); _addStringToBuffer(macbuffer, algorithm); _addStringToBuffer(macbuffer, encryptionAlgorithm); _addStringToBuffer(macbuffer, comment); macbuffer.putInt32(pubbuffer.length()); macbuffer.putBuffer(pubbuffer); macbuffer.putInt32(privbuffer.length()); macbuffer.putBuffer(privbuffer); var hmac = forge.hmac.create(); hmac.start('sha1', mackey); hmac.update(macbuffer.bytes()); ppk += '\r\nPrivate-MAC: ' + hmac.digest().toHex() + '\r\n'; return ppk; }; /** * Encodes a public RSA key as an OpenSSH file. * * @param key the key. * @param comment a comment. * * @return the public key in OpenSSH format. */ ssh.publicKeyToOpenSSH = function(key, comment) { var type = 'ssh-rsa'; comment = comment || ''; var buffer = forge.util.createBuffer(); _addStringToBuffer(buffer, type); _addBigIntegerToBuffer(buffer, key.e); _addBigIntegerToBuffer(buffer, key.n); return type + ' ' + forge.util.encode64(buffer.bytes()) + ' ' + comment; }; /** * Encodes a private RSA key as an OpenSSH file. * * @param key the key. * @param passphrase a passphrase to protect the key (falsy for no encryption). * * @return the public key in OpenSSH format. */ ssh.privateKeyToOpenSSH = function(privateKey, passphrase) { if(!passphrase) { return forge.pki.privateKeyToPem(privateKey); } // OpenSSH private key is just a legacy format, it seems return forge.pki.encryptRsaPrivateKey(privateKey, passphrase, {legacy: true, algorithm: 'aes128'}); }; /** * Gets the SSH fingerprint for the given public key. * * @param options the options to use. * [md] the message digest object to use (defaults to forge.md.md5). * [encoding] an alternative output encoding, such as 'hex' * (defaults to none, outputs a byte buffer). * [delimiter] the delimiter to use between bytes for 'hex' encoded * output, eg: ':' (defaults to none). * * @return the fingerprint as a byte buffer or other encoding based on options. */ ssh.getPublicKeyFingerprint = function(key, options) { options = options || {}; var md = options.md || forge.md.md5.create(); var type = 'ssh-rsa'; var buffer = forge.util.createBuffer(); _addStringToBuffer(buffer, type); _addBigIntegerToBuffer(buffer, key.e); _addBigIntegerToBuffer(buffer, key.n); // hash public key bytes md.start(); md.update(buffer.getBytes()); var digest = md.digest(); if(options.encoding === 'hex') { var hex = digest.toHex(); if(options.delimiter) { return hex.match(/.{2}/g).join(options.delimiter); } return hex; } else if(options.encoding === 'binary') { return digest.getBytes(); } else if(options.encoding) { throw new Error('Unknown encoding "' + options.encoding + '".'); } return digest; }; /** * Adds len(val) then val to a buffer. * * @param buffer the buffer to add to. * @param val a big integer. */ function _addBigIntegerToBuffer(buffer, val) { var hexVal = val.toString(16); // ensure 2s complement +ve if(hexVal[0] >= '8') { hexVal = '00' + hexVal; } var bytes = forge.util.hexToBytes(hexVal); buffer.putInt32(bytes.length); buffer.putBytes(bytes); } /** * Adds len(val) then val to a buffer. * * @param buffer the buffer to add to. * @param val a string. */ function _addStringToBuffer(buffer, val) { buffer.putInt32(val.length); buffer.putString(val); } /** * Hashes the arguments into one value using SHA-1. * * @return the sha1 hash of the provided arguments. */ function _sha1() { var sha = forge.md.sha1.create(); var num = arguments.length; for (var i = 0; i < num; ++i) { sha.update(arguments[i]); } return sha.digest(); } /***/ }), /***/ 172: /***/ ((module, __unused_webpack_exports, __webpack_require__) => { /** * Secure Hash Algorithm with 256-bit digest (SHA-256) implementation. * * See FIPS 180-2 for details. * * @author Dave Longley * * Copyright (c) 2010-2015 Digital Bazaar, Inc. */ var forge = __webpack_require__(276); __webpack_require__(8106); __webpack_require__(7619); var sha256 = module.exports = forge.sha256 = forge.sha256 || {}; forge.md.sha256 = forge.md.algorithms.sha256 = sha256; /** * Creates a SHA-256 message digest object. * * @return a message digest object. */ sha256.create = function() { // do initialization as necessary if(!_initialized) { _init(); } // SHA-256 state contains eight 32-bit integers var _state = null; // input buffer var _input = forge.util.createBuffer(); // used for word storage var _w = new Array(64); // message digest object var md = { algorithm: 'sha256', blockLength: 64, digestLength: 32, // 56-bit length of message so far (does not including padding) messageLength: 0, // true message length fullMessageLength: null, // size of message length in bytes messageLengthSize: 8 }; /** * Starts the digest. * * @return this digest object. */ md.start = function() { // up to 56-bit message length for convenience md.messageLength = 0; // full message length (set md.messageLength64 for backwards-compatibility) md.fullMessageLength = md.messageLength64 = []; var int32s = md.messageLengthSize / 4; for(var i = 0; i < int32s; ++i) { md.fullMessageLength.push(0); } _input = forge.util.createBuffer(); _state = { h0: 0x6A09E667, h1: 0xBB67AE85, h2: 0x3C6EF372, h3: 0xA54FF53A, h4: 0x510E527F, h5: 0x9B05688C, h6: 0x1F83D9AB, h7: 0x5BE0CD19 }; return md; }; // start digest automatically for first time md.start(); /** * Updates the digest with the given message input. The given input can * treated as raw input (no encoding will be applied) or an encoding of * 'utf8' maybe given to encode the input using UTF-8. * * @param msg the message input to update with. * @param encoding the encoding to use (default: 'raw', other: 'utf8'). * * @return this digest object. */ md.update = function(msg, encoding) { if(encoding === 'utf8') { msg = forge.util.encodeUtf8(msg); } // update message length var len = msg.length; md.messageLength += len; len = [(len / 0x100000000) >>> 0, len >>> 0]; for(var i = md.fullMessageLength.length - 1; i >= 0; --i) { md.fullMessageLength[i] += len[1]; len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0); md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0; len[0] = ((len[1] / 0x100000000) >>> 0); } // add bytes to input buffer _input.putBytes(msg); // process bytes _update(_state, _w, _input); // compact input buffer every 2K or if empty if(_input.read > 2048 || _input.length() === 0) { _input.compact(); } return md; }; /** * Produces the digest. * * @return a byte buffer containing the digest value. */ md.digest = function() { /* Note: Here we copy the remaining bytes in the input buffer and add the appropriate SHA-256 padding. Then we do the final update on a copy of the state so that if the user wants to get intermediate digests they can do so. */ /* Determine the number of bytes that must be added to the message to ensure its length is congruent to 448 mod 512. In other words, the data to be digested must be a multiple of 512 bits (or 128 bytes). This data includes the message, some padding, and the length of the message. Since the length of the message will be encoded as 8 bytes (64 bits), that means that the last segment of the data must have 56 bytes (448 bits) of message and padding. Therefore, the length of the message plus the padding must be congruent to 448 mod 512 because 512 - 128 = 448. In order to fill up the message length it must be filled with padding that begins with 1 bit followed by all 0 bits. Padding must *always* be present, so if the message length is already congruent to 448 mod 512, then 512 padding bits must be added. */ var finalBlock = forge.util.createBuffer(); finalBlock.putBytes(_input.bytes()); // compute remaining size to be digested (include message length size) var remaining = ( md.fullMessageLength[md.fullMessageLength.length - 1] + md.messageLengthSize); // add padding for overflow blockSize - overflow // _padding starts with 1 byte with first bit is set (byte value 128), then // there may be up to (blockSize - 1) other pad bytes var overflow = remaining & (md.blockLength - 1); finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow)); // serialize message length in bits in big-endian order; since length // is stored in bytes we multiply by 8 and add carry from next int var next, carry; var bits = md.fullMessageLength[0] * 8; for(var i = 0; i < md.fullMessageLength.length - 1; ++i) { next = md.fullMessageLength[i + 1] * 8; carry = (next / 0x100000000) >>> 0; bits += carry; finalBlock.putInt32(bits >>> 0); bits = next >>> 0; } finalBlock.putInt32(bits); var s2 = { h0: _state.h0, h1: _state.h1, h2: _state.h2, h3: _state.h3, h4: _state.h4, h5: _state.h5, h6: _state.h6, h7: _state.h7 }; _update(s2, _w, finalBlock); var rval = forge.util.createBuffer(); rval.putInt32(s2.h0); rval.putInt32(s2.h1); rval.putInt32(s2.h2); rval.putInt32(s2.h3); rval.putInt32(s2.h4); rval.putInt32(s2.h5); rval.putInt32(s2.h6); rval.putInt32(s2.h7); return rval; }; return md; }; // sha-256 padding bytes not initialized yet var _padding = null; var _initialized = false; // table of constants var _k = null; /** * Initializes the constant tables. */ function _init() { // create padding _padding = String.fromCharCode(128); _padding += forge.util.fillString(String.fromCharCode(0x00), 64); // create K table for SHA-256 _k = [ 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2]; // now initialized _initialized = true; } /** * Updates a SHA-256 state with the given byte buffer. * * @param s the SHA-256 state to update. * @param w the array to use to store words. * @param bytes the byte buffer to update with. */ function _update(s, w, bytes) { // consume 512 bit (64 byte) chunks var t1, t2, s0, s1, ch, maj, i, a, b, c, d, e, f, g, h; var len = bytes.length(); while(len >= 64) { // the w array will be populated with sixteen 32-bit big-endian words // and then extended into 64 32-bit words according to SHA-256 for(i = 0; i < 16; ++i) { w[i] = bytes.getInt32(); } for(; i < 64; ++i) { // XOR word 2 words ago rot right 17, rot right 19, shft right 10 t1 = w[i - 2]; t1 = ((t1 >>> 17) | (t1 << 15)) ^ ((t1 >>> 19) | (t1 << 13)) ^ (t1 >>> 10); // XOR word 15 words ago rot right 7, rot right 18, shft right 3 t2 = w[i - 15]; t2 = ((t2 >>> 7) | (t2 << 25)) ^ ((t2 >>> 18) | (t2 << 14)) ^ (t2 >>> 3); // sum(t1, word 7 ago, t2, word 16 ago) modulo 2^32 w[i] = (t1 + w[i - 7] + t2 + w[i - 16]) | 0; } // initialize hash value for this chunk a = s.h0; b = s.h1; c = s.h2; d = s.h3; e = s.h4; f = s.h5; g = s.h6; h = s.h7; // round function for(i = 0; i < 64; ++i) { // Sum1(e) s1 = ((e >>> 6) | (e << 26)) ^ ((e >>> 11) | (e << 21)) ^ ((e >>> 25) | (e << 7)); // Ch(e, f, g) (optimized the same way as SHA-1) ch = g ^ (e & (f ^ g)); // Sum0(a) s0 = ((a >>> 2) | (a << 30)) ^ ((a >>> 13) | (a << 19)) ^ ((a >>> 22) | (a << 10)); // Maj(a, b, c) (optimized the same way as SHA-1) maj = (a & b) | (c & (a ^ b)); // main algorithm t1 = h + s1 + ch + _k[i] + w[i]; t2 = s0 + maj; h = g; g = f; f = e; // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug // can't truncate with `| 0` e = (d + t1) >>> 0; d = c; c = b; b = a; // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug // can't truncate with `| 0` a = (t1 + t2) >>> 0; } // update hash state s.h0 = (s.h0 + a) | 0; s.h1 = (s.h1 + b) | 0; s.h2 = (s.h2 + c) | 0; s.h3 = (s.h3 + d) | 0; s.h4 = (s.h4 + e) | 0; s.h5 = (s.h5 + f) | 0; s.h6 = (s.h6 + g) | 0; s.h7 = (s.h7 + h) | 0; len -= 64; } } /***/ }), /***/ 206: /***/ ((module, __unused_webpack_exports, __webpack_require__) => { "use strict"; /* provided dependency */ var process = __webpack_require__(5606); // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. /*<replacement>*/ var pna = __webpack_require__(3225); /*</replacement>*/ module.exports = Readable; /*<replacement>*/ var isArray = __webpack_require__(2240); /*</replacement>*/ /*<replacement>*/ var Duplex; /*</replacement>*/ Readable.ReadableState = ReadableState; /*<replacement>*/ var EE = (__webpack_require__(7007).EventEmitter); var EElistenerCount = function (emitter, type) { return emitter.listeners(type).length; }; /*</replacement>*/ /*<replacement>*/ var Stream = __webpack_require__(5567); /*</replacement>*/ /*<replacement>*/ var Buffer = (__webpack_require__(4116).Buffer); var OurUint8Array = (typeof __webpack_require__.g !== 'undefined' ? __webpack_require__.g : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; function _uint8ArrayToBuffer(chunk) { return Buffer.from(chunk); } function _isUint8Array(obj) { return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; } /*</replacement>*/ /*<replacement>*/ var util = Object.create(__webpack_require__(5622)); util.inherits = __webpack_require__(6698); /*</replacement>*/ /*<replacement>*/ var debugUtil = __webpack_require__(2668); var debug = void 0; if (debugUtil && debugUtil.debuglog) { debug = debugUtil.debuglog('stream'); } else { debug = function () {}; } /*</replacement>*/ var BufferList = __webpack_require__(672); var destroyImpl = __webpack_require__(6278); var StringDecoder; util.inherits(Readable, Stream); var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; function prependListener(emitter, event, fn) { // Sadly this is not cacheable as some libraries bundle their own // event emitter implementation with them. if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any // userland ones. NEVER DO THIS. This is here only because this code needs // to continue to work with older versions of Node.js that do not include // the prependListener() method. The goal is to eventually remove this hack. if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; } function ReadableState(options, stream) { Duplex = Duplex || __webpack_require__(6248); options = options || {}; // Duplex streams are both readable and writable, but share // the same options object. // However, some cases require setting options to different // values for the readable and the writable sides of the duplex stream. // These options can be provided separately as readableXXX and writableXXX. var isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to // make all the buffer merging and length checks go away this.objectMode = !!options.objectMode; if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer // Note: 0 is a valid value, means "don't call _read preemptively ever" var hwm = options.highWaterMark; var readableHwm = options.readableHighWaterMark; var defaultHwm = this.objectMode ? 16 : 16 * 1024; if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; // cast to ints. this.highWaterMark = Math.floor(this.highWaterMark); // A linked list is used to store data chunks instead of an array because the // linked list can remove elements from the beginning faster than // array.shift() this.buffer = new BufferList(); this.length = 0; this.pipes = null; this.pipesCount = 0; this.flowing = null; this.ended = false; this.endEmitted = false; this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted // immediately, or on a later tick. We set this to true at first, because // any actions that shouldn't happen until "later" should generally also // not happen before the first read call. this.sync = true; // whenever we return null, then we set a flag to say // that we're awaiting a 'readable' event emission. this.needReadable = false; this.emittedReadable = false; this.readableListening = false; this.resumeScheduled = false; // has it been destroyed this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string // encoding is 'binary' so we have to make this configurable. // Everything else in the universe uses 'utf8', though. this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled this.readingMore = false; this.decoder = null; this.encoding = null; if (options.encoding) { if (!StringDecoder) StringDecoder = (__webpack_require__(6427)/* .StringDecoder */ .I); this.decoder = new StringDecoder(options.encoding); this.encoding = options.encoding; } } function Readable(options) { Duplex = Duplex || __webpack_require__(6248); if (!(this instanceof Readable)) return new Readable(options); this._readableState = new ReadableState(options, this); // legacy this.readable = true; if (options) { if (typeof options.read === 'function') this._read = options.read; if (typeof options.destroy === 'function') this._destroy = options.destroy; } Stream.call(this); } Object.defineProperty(Readable.prototype, 'destroyed', { get: function () { if (this._readableState === undefined) { return false; } return this._readableState.destroyed; }, set: function (value) { // we ignore the value if the stream // has not been initialized yet if (!this._readableState) { return; } // backward compatibility, the user is explicitly // managing destroyed this._readableState.destroyed = value; } }); Readable.prototype.destroy = destroyImpl.destroy; Readable.prototype._undestroy = destroyImpl.undestroy; Readable.prototype._destroy = function (err, cb) { this.push(null); cb(err); }; // Manually shove something into the read() buffer. // This returns true if the highWaterMark has not been hit yet, // similar to how Writable.write() returns true if you should // write() some more. Readable.prototype.push = function (chunk, encoding) { var state = this._readableState; var skipChunkCheck; if (!state.objectMode) { if (typeof chunk === 'string') { encoding = encoding || state.defaultEncoding; if (encoding !== state.encoding) { chunk = Buffer.from(chunk, encoding); encoding = ''; } skipChunkCheck = true; } } else { skipChunkCheck = true; } return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); }; // Unshift should *always* be something directly out of read() Readable.prototype.unshift = function (chunk) { return readableAddChunk(this, chunk, null, true, false); }; function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { var state = stream._readableState; if (chunk === null) { state.reading = false; onEofChunk(stream, state); } else { var er; if (!skipChunkCheck) er = chunkInvalid(state, chunk); if (er) { stream.emit('error', er); } else if (state.objectMode || chunk && chunk.length > 0) { if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { chunk = _uint8ArrayToBuffer(chunk); } if (addToFront) { if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); } else if (state.ended) { stream.emit('error', new Error('stream.push() after EOF')); } else { state.reading = false; if (state.decoder && !encoding) { chunk = state.decoder.write(chunk); if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); } else { addChunk(stream, state, chunk, false); } } } else if (!addToFront) { state.reading = false; } } return needMoreData(state); } function addChunk(stream, state, chunk, addToFront) { if (state.flowing && state.length === 0 && !state.sync) { stream.emit('data', chunk); stream.read(0); } else { // update the buffer info. state.length += state.objectMode ? 1 : chunk.length; if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); if (state.needReadable) emitReadable(stream); } maybeReadMore(stream, state); } function chunkInvalid(state, chunk) { var er; if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { er = new TypeError('Invalid non-string/buffer chunk'); } return er; } // if it's past the high water mark, we can push in some more. // Also, if we have no data yet, we can stand some // more bytes. This is to work around cases where hwm=0, // such as the repl. Also, if the push() triggered a // readable event, and the user called read(largeNumber) such that // needReadable was set, then we ought to push more, so that another // 'readable' event will be triggered. function needMoreData(state) { return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); } Readable.prototype.isPaused = function () { return this._readableState.flowing === false; }; // backwards compatibility. Readable.prototype.setEncoding = function (enc) { if (!StringDecoder) StringDecoder = (__webpack_require__(6427)/* .StringDecoder */ .I); this._readableState.decoder = new StringDecoder(enc); this._readableState.encoding = enc; return this; }; // Don't raise the hwm > 8MB var MAX_HWM = 0x800000; function computeNewHighWaterMark(n) { if (n >= MAX_HWM) { n = MAX_HWM; } else { // Get the next highest power of 2 to prevent increasing hwm excessively in // tiny amounts n--; n |= n >>> 1; n |= n >>> 2; n |= n >>> 4; n |= n >>> 8; n |= n >>> 16; n++; } return n; } // This function is designed to be inlinable, so please take care when making // changes to the function body. function howMuchToRead(n, state) { if (n <= 0 || state.length === 0 && state.ended) return 0; if (state.objectMode) return 1; if (n !== n) { // Only flow one buffer at a time if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; } // If we're asking for more than the current hwm, then raise the hwm. if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); if (n <= state.length) return n; // Don't have enough if (!state.ended) { state.needReadable = true; return 0; } return state.length; } // you can override either this method, or the async _read(n) below. Readable.prototype.read = function (n) { debug('read', n); n = parseInt(n, 10); var state = this._readableState; var nOrig = n; if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we // already have a bunch of data in the buffer, then just trigger // the 'readable' event and move on. if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { debug('read: emitReadable', state.length, state.ended); if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); return null; } n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up. if (n === 0 && state.ended) { if (state.length === 0) endReadable(this); return null; } // All the actual chunk generation logic needs to be // *below* the call to _read. The reason is that in certain // synthetic stream cases, such as passthrough streams, _read // may be a completely synchronous operation which may change // the state of the read buffer, providing enough data when // before there was *not* enough. // // So, the steps are: // 1. Figure out what the state of things will be after we do // a read from the buffer. // // 2. If that resulting state will trigger a _read, then call _read. // Note that this may be asynchronous, or synchronous. Yes, it is // deeply ugly to write APIs this way, but that still doesn't mean // that the Readable class should behave improperly, as streams are // designed to be sync/async agnostic. // Take note if the _read call is sync or async (ie, if the read call // has returned yet), so that we know whether or not it's safe to emit // 'readable' etc. // // 3. Actually pull the requested chunks out of the buffer and return. // if we need a readable event, then we need to do some reading. var doRead = state.needReadable; debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some if (state.length === 0 || state.length - n < state.highWaterMark) { doRead = true; debug('length less than watermark', doRead); } // however, if we've ended, then there's no point, and if we're already // reading, then it's unnecessary. if (state.ended || state.reading) { doRead = false; debug('reading or ended', doRead); } else if (doRead) { debug('do read'); state.reading = true; state.sync = true; // if the length is currently zero, then we *need* a readable event. if (state.length === 0) state.needReadable = true; // call internal read method this._read(state.highWaterMark); state.sync = false; // If _read pushed data synchronously, then `reading` will be false, // and we need to re-evaluate how much data we can return to the user. if (!state.reading) n = howMuchToRead(nOrig, state); } var ret; if (n > 0) ret = fromList(n, state);else ret = null; if (ret === null) { state.needReadable = true; n = 0; } else { state.length -= n; } if (state.length === 0) { // If we have nothing in the buffer, then we want to know // as soon as we *do* get something into the buffer. if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick. if (nOrig !== n && state.ended) endReadable(this); } if (ret !== null) this.emit('data', ret); return ret; }; function onEofChunk(stream, state) { if (state.ended) return; if (state.decoder) { var chunk = state.decoder.end(); if (chunk && chunk.length) { state.buffer.push(chunk); state.length += state.objectMode ? 1 : chunk.length; } } state.ended = true; // emit 'readable' now to make sure it gets picked up. emitReadable(stream); } // Don't emit readable right away in sync mode, because this can trigger // another read() call => stack overflow. This way, it might trigger // a nextTick recursion warning, but that's not so bad. function emitReadable(stream) { var state = stream._readableState; state.needReadable = false; if (!state.emittedReadable) { debug('emitReadable', state.flowing); state.emittedReadable = true; if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); } } function emitReadable_(stream) { debug('emit readable'); stream.emit('readable'); flow(stream); } // at this point, the user has presumably seen the 'readable' event, // and called read() to consume some data. that may have triggered // in turn another _read(n) call, in which case reading = true if // it's in progress. // However, if we're not ended, or reading, and the length < hwm, // then go ahead and try to read some more preemptively. function maybeReadMore(stream, state) { if (!state.readingMore) { state.readingMore = true; pna.nextTick(maybeReadMore_, stream, state); } } function maybeReadMore_(stream, state) { var len = state.length; while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { debug('maybeReadMore read 0'); stream.read(0); if (len === state.