snyk
Version:
snyk library and cli utility
1,627 lines (1,417 loc) • 351 kB
JavaScript
exports.id = 923;
exports.ids = [923];
exports.modules = {
/***/ 87315:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.checkSupport = void 0;
const contentTypes = __webpack_require__(42625);
const registry_call_1 = __webpack_require__(15271);
/**
* Root V2 endpoint, useful to check V2 support and validating credentials.
*/
async function checkSupport(registryBase, username, password, options = {}) {
const result = await registry_call_1.registryV2Call(registryBase, "/", // full url path should be "/v2/" as per spec
contentTypes.JSON, username, password, options);
// always return thruthy object, even for ECR
return result || {};
}
exports.checkSupport = checkSupport;
//# sourceMappingURL=check-support.js.map
/***/ }),
/***/ 42625:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.JSON = exports.LAYER = exports.IMAGE_CONFIG = exports.MANIFEST_LIST_V2 = exports.MANIFEST_V2 = void 0;
exports.MANIFEST_V2 = "application/vnd.docker.distribution.manifest.v2+json";
exports.MANIFEST_LIST_V2 = "application/vnd.docker.distribution.manifest.list.v2+json";
exports.IMAGE_CONFIG = "application/vnd.docker.container.image.v1+json";
exports.LAYER = "application/vnd.docker.image.rootfs.diff.tar.gzip";
exports.JSON = "application/json";
//# sourceMappingURL=content-types.js.map
/***/ }),
/***/ 80290:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getAuthTokenForEndpoint = void 0;
const contentTypes = __webpack_require__(42625);
const registry_call_1 = __webpack_require__(15271);
const needle_1 = __webpack_require__(48360);
/**
* getAuthTokenForEndpoint attempts to retrieve a token for an unauthorized endpoint.
* It will make a request to the endpoint and in case it returns an Unauthorized (401)
* response code with the 'www-Authenticate' header, it will make a consecutive call according
* to the details within this header.
* More info: https://docs.docker.com/registry/spec/auth/token/#how-to-authenticate
* @param registryBase - the container registry base URL
* @param endpoint - the endpoint to which we would like to make the request
* @param username - the username for authentication
* @param password - the password for authentication
* @param reqOptions - http request options
* @returns an auth token string if authentication succeeded or null if
* authorization is not required/not bases on bearer token
* @throws an error if http error status code is not 401
*/
async function getAuthTokenForEndpoint(registryBase, endpoint, username, password, reqOptions = {}) {
const reqConfig = registry_call_1.buildUnauthenticatedV2RequestConfig(registryBase, endpoint, contentTypes.JSON, reqOptions);
try {
await needle_1.needleWrapper(reqConfig);
return null;
}
catch (err) {
if (err.statusCode !== 401) {
throw err;
}
const challengeHeaders = err.headers["www-authenticate"];
if (!challengeHeaders) {
return null;
}
const [authBase, service, scope] = registry_call_1.parseChallengeHeaders(challengeHeaders);
if (!authBase) {
return null;
}
else {
// Bearer token
const token = await registry_call_1.getToken(registryBase, authBase, service, scope, username, password, reqOptions);
return token;
}
}
}
exports.getAuthTokenForEndpoint = getAuthTokenForEndpoint;
//# sourceMappingURL=get-auth-token-for-endpoint.js.map
/***/ }),
/***/ 50702:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getImageConfig = void 0;
const registry_call_1 = __webpack_require__(15271);
const contentTypes = __webpack_require__(42625);
const needle_1 = __webpack_require__(48360);
async function getImageConfig(registryBase, repo, digest, username, password, options = {}) {
const endpoint = `/${repo}/blobs/${digest}`;
const configResponse = await registry_call_1.registryV2Call(registryBase, endpoint, contentTypes.IMAGE_CONFIG, username, password, options);
return needle_1.parseResponseBody(configResponse);
}
exports.getImageConfig = getImageConfig;
//# sourceMappingURL=get-image-config.js.map
/***/ }),
/***/ 70395:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getImageSize = void 0;
const get_manifest_1 = __webpack_require__(32603);
async function getImageSize(registryBase, repo, tag, username, password, options = {}) {
const manifest = await get_manifest_1.getManifest(registryBase, repo, tag, username, password, options);
const layers = manifest.layers || [];
return layers.reduce((size, layerConfig) => size + layerConfig.size, 0);
}
exports.getImageSize = getImageSize;
//# sourceMappingURL=get-image-size.js.map
/***/ }),
/***/ 78924:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getLayer = void 0;
const registry_call_1 = __webpack_require__(15271);
const contentTypes = __webpack_require__(42625);
async function getLayer(registryBase, repo, digest, username, password, options = {}) {
var _a;
const accept = `${(_a = options.acceptLayer) !== null && _a !== void 0 ? _a : contentTypes.LAYER}`;
const endpoint = `/${repo}/blobs/${digest}`;
options = Object.assign({ json: false, encoding: null }, options);
const layerResponse = await registry_call_1.registryV2Call(registryBase, endpoint, accept, username, password, options);
// When the content type is wrongly set to text instead of binary, response body is returned as string instead of a buffer
// This forces a buffer response.
if (!(layerResponse.body instanceof Buffer)) {
return Buffer.from(layerResponse.raw);
}
return layerResponse.body;
}
exports.getLayer = getLayer;
//# sourceMappingURL=get-layer.js.map
/***/ }),
/***/ 32603:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.computeDigest = exports.getManifest = void 0;
const registry_call_1 = __webpack_require__(15271);
const contentTypes = __webpack_require__(42625);
const needle_1 = __webpack_require__(48360);
const crypto_1 = __webpack_require__(76417);
const digestRegex = /^sha256:[a-f0-9]{64}$/i;
/**
* A request to get image manifest by tag or digest
* @param registryBase the hostname (and optionally base path) from which to get the manifest.
* @param repo the requested image repository.
* @param tag the manifest tag or digest value
* @param username optional, username to be used for basic auth.
* @param password optional, password to be used for basic auth.
* @param options optional, request options.
* @param platform optional, the manifest platform (os and architecture), applicable only for multi-architecture manifests.
* Defaults to linux/amd64. In case the requested manifest platform is not found an error is thrown.
* @returns
*/
async function getManifest(registryBase, repo, imageReference, username, password, options = {}, platform) {
var _a;
const accept = (_a = options.acceptManifest) !== null && _a !== void 0 ? _a : `${contentTypes.MANIFEST_V2}, ${contentTypes.MANIFEST_LIST_V2}`;
const endpoint = `/${repo}/manifests/${imageReference}`;
let manifestResponse = await registry_call_1.registryV2Call(registryBase, endpoint, accept, username, password, options);
const contentType = manifestResponse.headers["content-type"];
let indexDigest;
let manifestDigest;
if (contentType === contentTypes.MANIFEST_LIST_V2) {
indexDigest = computeDigest(manifestResponse.body);
manifestDigest = getManifestDigestByPlatform(manifestResponse, platform);
// need to call again with actual manifest (and not a list of manifest-lists)
const endpoint = `/${repo}/manifests/${manifestDigest}`;
manifestResponse = await registry_call_1.registryV2Call(registryBase, endpoint, accept, username, password, options);
}
const dockerContentDigest = manifestResponse.headers["Docker-Content-Digest"];
switch (true) {
case manifestDigest != undefined:
break;
case isManifestDigest(imageReference):
manifestDigest = imageReference;
break;
case isManifestDigest(dockerContentDigest):
manifestDigest = dockerContentDigest;
break;
default:
manifestDigest = computeDigest(manifestResponse.body);
}
const parsedBody = needle_1.parseResponseBody(manifestResponse);
return Object.assign(Object.assign({}, parsedBody), { indexDigest, manifestDigest });
}
exports.getManifest = getManifest;
function isManifestDigest(imageReference) {
return digestRegex.test(imageReference);
}
function computeDigest(body) {
if (!body) {
return undefined;
}
if (typeof body !== "string") {
body = JSON.stringify(body);
}
const hexDigest = crypto_1.createHash("sha256")
.update(body)
.digest("hex")
.toLowerCase();
return `sha256:${hexDigest}`;
}
exports.computeDigest = computeDigest;
function getManifestDigestByPlatform(manifestResponse, platform) {
const defaultPlatform = {
os: "linux",
architecture: "amd64",
};
const platformFilter = platform ? platform : defaultPlatform;
const manifestList = needle_1.parseResponseBody(manifestResponse);
const manifestPlatform = getManifestByOsAndArch(manifestList.manifests, platformFilter.os, platformFilter.architecture);
if (manifestPlatform) {
return manifestPlatform.digest;
}
throw new Error(`no supported manifest found for platform: ${JSON.stringify(platformFilter)}`);
}
function getManifestByOsAndArch(platformManifest, os, architecture) {
return platformManifest.find(manifest => manifest.platform.os === os &&
manifest.platform.architecture === architecture);
}
//# sourceMappingURL=get-manifest.js.map
/***/ }),
/***/ 73905:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getRepos = void 0;
const contentTypes = __webpack_require__(42625);
const registry_call_1 = __webpack_require__(15271);
async function getRepos(registryBase, username, password, pageSize = 100, maxPages = Number.MAX_SAFE_INTEGER, options = {}) {
const endpoint = "/_catalog";
return await registry_call_1.paginatedV2Call(registryBase, contentTypes.JSON, username, password, endpoint, "repositories", pageSize, maxPages, options);
}
exports.getRepos = getRepos;
//# sourceMappingURL=get-repos.js.map
/***/ }),
/***/ 80353:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getTags = void 0;
const registry_call_1 = __webpack_require__(15271);
const contentTypes = __webpack_require__(42625);
async function getTags(registryBase, repo, username, password, pageSize = 1000, maxPages = Number.MAX_SAFE_INTEGER, options = {}) {
const endpoint = `/${repo}/tags/list`;
return await registry_call_1.paginatedV2Call(registryBase, contentTypes.JSON, username, password, endpoint, "tags", pageSize, maxPages, options);
}
exports.getTags = getTags;
//# sourceMappingURL=get-tags.js.map
/***/ }),
/***/ 28310:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.types = exports.registryCall = exports.getTags = exports.getRepos = exports.getManifest = exports.getLayer = exports.getImageSize = exports.getImageConfig = exports.getAuthTokenForEndpoint = exports.checkSupport = void 0;
const check_support_1 = __webpack_require__(87315);
Object.defineProperty(exports, "checkSupport", ({ enumerable: true, get: function () { return check_support_1.checkSupport; } }));
const get_auth_token_for_endpoint_1 = __webpack_require__(80290);
Object.defineProperty(exports, "getAuthTokenForEndpoint", ({ enumerable: true, get: function () { return get_auth_token_for_endpoint_1.getAuthTokenForEndpoint; } }));
const get_image_config_1 = __webpack_require__(50702);
Object.defineProperty(exports, "getImageConfig", ({ enumerable: true, get: function () { return get_image_config_1.getImageConfig; } }));
const get_image_size_1 = __webpack_require__(70395);
Object.defineProperty(exports, "getImageSize", ({ enumerable: true, get: function () { return get_image_size_1.getImageSize; } }));
const get_layer_1 = __webpack_require__(78924);
Object.defineProperty(exports, "getLayer", ({ enumerable: true, get: function () { return get_layer_1.getLayer; } }));
const get_manifest_1 = __webpack_require__(32603);
Object.defineProperty(exports, "getManifest", ({ enumerable: true, get: function () { return get_manifest_1.getManifest; } }));
const get_repos_1 = __webpack_require__(73905);
Object.defineProperty(exports, "getRepos", ({ enumerable: true, get: function () { return get_repos_1.getRepos; } }));
const get_tags_1 = __webpack_require__(80353);
Object.defineProperty(exports, "getTags", ({ enumerable: true, get: function () { return get_tags_1.getTags; } }));
const registry_call_1 = __webpack_require__(15271);
Object.defineProperty(exports, "registryCall", ({ enumerable: true, get: function () { return registry_call_1.registryCall; } }));
const types = __webpack_require__(47235);
exports.types = types;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 48360:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.parseResponseBody = exports.needleWrapper = void 0;
const needle = __webpack_require__(64484);
// TODO: this is a temporary code that allows setting needle default timeout (alias for
// open_timeout) to check how it affects the stability of our system, and specifically
// if it helps reducing 'socket hang up' errors.
// More information: https://www.npmjs.com/package/needle
if (process.env.DOCKER_REGISTRY_V2_CLIENT_NEEDLE_OPEN_TIMEOUT) {
const openTimeout = parseInt(process.env.DOCKER_REGISTRY_V2_CLIENT_NEEDLE_OPEN_TIMEOUT);
if (Number.isInteger(openTimeout)) {
needle.defaults({
timeout: openTimeout,
});
}
}
/**
* A wrapper function that uses `needle` for making HTTP requests,
* and returns a response that matches what the response it used to get from `request` library
* @param options request options
*/
async function needleWrapper(options, maxRetries) {
var _a, _b;
let uri = options.uri;
// append query parameters
if (options.qs) {
for (const key in options.qs) {
if (options.qs[key] !== undefined) {
uri += `&${key}=${options.qs[key]}`;
}
}
if (!uri.includes("?")) {
uri = uri.replace("&", "?");
}
}
let response;
let lastError;
let retries = maxRetries !== null && maxRetries !== void 0 ? maxRetries : 0;
while (!response && retries >= 0) {
retries--;
try {
response = await needle("get", uri, options);
}
catch (err) {
lastError = err;
}
}
if (!response) {
throw lastError;
}
// throw an error in case status code is not 2xx
if (response && response.statusCode >= 300) {
let message;
if (((_b = (_a = response.body) === null || _a === void 0 ? void 0 : _a.errors) === null || _b === void 0 ? void 0 : _b.length) > 0) {
message = response.body.errors[0].message;
}
else {
message = response.body;
}
if (typeof message !== "string") {
try {
message = JSON.stringify(message);
// eslint-disable-next-line no-empty
}
catch (_) { }
}
throw new NeedleWrapperException(message, response.statusCode, response.headers);
}
return response;
}
exports.needleWrapper = needleWrapper;
function parseResponseBody(response) {
let body;
try {
body = JSON.parse(response.body);
}
catch (err) {
body = response.body;
}
return body;
}
exports.parseResponseBody = parseResponseBody;
class NeedleWrapperException extends Error {
constructor(message, statusCode, headers) {
super(message);
this.statusCode = statusCode;
this.headers = headers;
}
}
//# sourceMappingURL=needle.js.map
/***/ }),
/***/ 15271:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.buildUnauthenticatedV2RequestConfig = exports.parseChallengeHeaders = exports.getToken = exports.paginatedV2Call = exports.registryCall = exports.registryV2Call = void 0;
const parseLink = __webpack_require__(54336);
const url = __webpack_require__(78835);
const needle_1 = __webpack_require__(48360);
const BEARER_REALM = "Bearer realm";
const MAX_RETRIES = 1;
async function registryV2Call(registryBase, endpoint, accept, username, password, reqOptions = {}) {
const reqConfig = buildUnauthenticatedV2RequestConfig(registryBase, endpoint, accept, reqOptions);
try {
return await needle_1.needleWrapper(reqConfig, MAX_RETRIES);
}
catch (err) {
if (err.statusCode === 401) {
if (!username || !password) {
// Supply and empty username and password if no credentials
// are provided. These might be added later by a broker client.
username = username ? username : "";
password = password ? password : "";
}
const authConfig = await setAuthConfig(registryBase, err, reqConfig, username, password, reqOptions);
try {
return await needle_1.needleWrapper(authConfig, MAX_RETRIES);
}
catch (err) {
if (err.statusCode === 307 || err.statusCode === 302) {
return await handleRedirect(err, reqConfig);
}
throw err;
}
}
if (err.statusCode === 307 || err.statusCode === 302) {
return await handleRedirect(err, reqConfig);
}
throw err;
}
}
exports.registryV2Call = registryV2Call;
/**
* WARNING!!!
*
* This function was created for a very specific usecase (https://snyksec.atlassian.net/browse/MAGMA-1262)
* It uses the existing mechanism of obtaining a token for authenticating, but can be used to hit any API endpoint,
* and not necessarily a Docker V2 endpoint.
* This is clearly an abuse of a library that's named after the v2 API, and this function should be considered a tech debt.
* Once it's no longer necessary, it is advised that this function is removed.
*
*/
async function registryCall(uri, username, password, reqOptions = {}) {
const reqConfig = applyRequestOptions({ uri: `https://${uri}` }, reqOptions);
try {
return await needle_1.needleWrapper(reqConfig, MAX_RETRIES);
}
catch (err) {
if (err.statusCode === 401) {
if (!username || !password) {
// Supply and empty username and password if no credentials
// are provided. These might be added later by a broker client.
username = username ? username : "";
password = password ? password : "";
}
const authConfig = await setAuthConfig("", err, reqConfig, username, password, reqOptions);
try {
return await needle_1.needleWrapper(authConfig, MAX_RETRIES);
}
catch (err) {
if (err.statusCode === 307 || err.statusCode === 302) {
return await handleRedirect(err, reqConfig);
}
throw err;
}
}
if (err.statusCode === 307 || err.statusCode === 302) {
return await handleRedirect(err, reqConfig);
}
throw err;
}
}
exports.registryCall = registryCall;
async function paginatedV2Call(registryBase, accept, username, password, endpoint, key, pageSize = 1000, maxPages = Number.MAX_SAFE_INTEGER, reqOptions = {}) {
const result = [];
let pageEndpoint = `${endpoint}?n=${pageSize}`;
let pageCount = 0;
while (pageCount < maxPages) {
const response = await registryV2Call(registryBase, pageEndpoint, accept, username, password, reqOptions);
const body = needle_1.parseResponseBody(response);
if (body && body[key]) {
result.push(...body[key]);
}
if (!response.headers.link) {
break;
}
pageCount += 1;
pageEndpoint = pageEndpointForLink(endpoint, response.headers.link);
}
return result;
}
exports.paginatedV2Call = paginatedV2Call;
async function getToken(registryBase, authBase, service, scope, username, password, reqOptions = {}) {
const reqConfig = applyRequestOptions({
uri: authBase,
qs: {
service,
scope,
},
}, Object.assign({}, reqOptions));
// Test truthiness, should be false when username and password are undefined
if (username && password) {
reqConfig.username = username;
reqConfig.password = password;
}
const response = await needle_1.needleWrapper(reqConfig);
const body = needle_1.parseResponseBody(response);
return body.token || body.access_token;
}
exports.getToken = getToken;
function parseChallengeHeaders(challengeHeaders) {
const headersMap = {};
const headerSplit = challengeHeaders.split(",");
for (let i = 0; i < headerSplit.length; i++) {
const entry = headerSplit[i];
if (!entry.includes("=") && i > 0) {
// we'll get here in case a value includes a comma. we want to concat what's after the comma to the previous value
headerSplit[i - 1] += `,${entry}`;
headerSplit.splice(i, 1);
i--;
}
}
headerSplit.forEach(entry => {
const [key, value] = entry.split("=");
headersMap[key] = JSON.parse(value);
});
return [headersMap[BEARER_REALM], headersMap.service, headersMap.scope];
}
exports.parseChallengeHeaders = parseChallengeHeaders;
function buildUnauthenticatedV2RequestConfig(registryBase, endpoint, accept, reqOptions = {}) {
return applyRequestOptions({
headers: { Accept: accept },
uri: `https://${registryBase}/v2${endpoint}`,
}, reqOptions);
}
exports.buildUnauthenticatedV2RequestConfig = buildUnauthenticatedV2RequestConfig;
function pageEndpointForLink(endpoint, link) {
const linkPath = parseLink(link).next.url;
const linkQuery = linkPath.split("?")[1];
return `${endpoint}?${linkQuery}`;
}
async function setAuthConfig(registryBase, err, reqConfig, username, password, reqOptions) {
// See: https://docs.docker.com/registry/spec/auth/token/#how-to-authenticate
const challengeHeaders = err.headers["www-authenticate"];
if (!challengeHeaders) {
throw err;
}
const [authBase, service, scope] = parseChallengeHeaders(challengeHeaders);
if (!authBase) {
// basic auth
return Object.assign(Object.assign({}, reqConfig), { username, password });
}
else {
// bearer token
const token = await getToken(registryBase, authBase, service, scope, username, password, reqOptions);
return Object.assign(Object.assign({}, reqConfig), { headers: Object.assign(Object.assign({}, reqConfig.headers), { Authorization: `Bearer ${token}` }) });
}
}
async function handleRedirect(err, config) {
// ACR does not handle redirects well, where automatic redirects
// fail due to an unexpected authorization header.
// the solution is to follow the redirect, however discarding
// the token.
const location = err.headers.location;
if (!location) {
throw err;
}
// Only clear the Authorization headers if the redirect is for
// azure container registries.
if (location.includes("azurecr.io")) {
delete config.headers.Authorization;
}
const redirectUrl = assertFullUrl(location, config.uri);
config.uri = redirectUrl;
return await needle_1.needleWrapper(config, MAX_RETRIES);
}
/*
* Takes request config and applies allowed options to it.
* @param reqConfig - request config that is passed to the request library.
* @param reqOptions - options passed in from outside of v2 client library.
*/
function applyRequestOptions(reqConfig, reqOptions) {
const options = Object.assign({}, reqOptions);
let uri = applyUriProtocol(reqConfig.uri, options.protocol);
delete options.protocol;
uri = applyUriHostMappings(uri, options.hostMappings);
delete options.hostMappings;
const headers = applyHeaders(reqConfig.headers, options.headers);
delete options.headers;
return Object.assign(Object.assign(Object.assign({}, reqConfig), options), { uri,
headers });
}
function applyUriProtocol(uri, protocol) {
if (!protocol) {
return uri;
}
const updatedUrl = url.parse(uri);
updatedUrl.protocol = protocol;
return url.format(updatedUrl);
}
/**
* Applies host mappings to given uri.
*
* @param uri
* @param mappings - Array of mappings. Each mapping is represented as array
* tuple: [host_regex_matcher, new_host].
*/
function applyUriHostMappings(uri, mappings) {
if (!mappings) {
return uri;
}
const updatedUrl = url.parse(uri);
const mapping = mappings.find(([matcher]) => updatedUrl.host.match(matcher));
if (!mapping) {
return uri;
}
updatedUrl.host = mapping[1];
return url.format(updatedUrl);
}
function applyHeaders(currentHeaders, addHeaders) {
return Object.assign(Object.assign({}, (currentHeaders || {})), (addHeaders || {}));
}
function assertFullUrl(redirectLocation, originalLocation) {
let redirectUrl;
try {
redirectUrl = new URL(redirectLocation);
}
catch (_a) {
// in case the location is not a proper URL (contains just the path), attach the original URL's origin
const originalUrl = new URL(originalLocation);
redirectUrl = new URL(redirectLocation, originalUrl.origin);
}
return redirectUrl.href;
}
//# sourceMappingURL=registry-call.js.map
/***/ }),
/***/ 47235:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
//# sourceMappingURL=types.js.map
/***/ }),
/***/ 54336:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
var qs = __webpack_require__(71191)
, url = __webpack_require__(78835)
, xtend = __webpack_require__(47529);
const PARSE_LINK_HEADER_MAXLEN = parseInt(process.env.PARSE_LINK_HEADER_MAXLEN) || 2000;
const PARSE_LINK_HEADER_THROW_ON_MAXLEN_EXCEEDED = process.env.PARSE_LINK_HEADER_THROW_ON_MAXLEN_EXCEEDED != null
function hasRel(x) {
return x && x.rel;
}
function intoRels (acc, x) {
function splitRel (rel) {
acc[rel] = xtend(x, { rel: rel });
}
x.rel.split(/\s+/).forEach(splitRel);
return acc;
}
function createObjects (acc, p) {
// rel="next" => 1: rel 2: next
var m = p.match(/\s*(.+)\s*=\s*"?([^"]+)"?/)
if (m) acc[m[1]] = m[2];
return acc;
}
function parseLink(link) {
try {
var m = link.match(/<?([^>]*)>(.*)/)
, linkUrl = m[1]
, parts = m[2].split(';')
, parsedUrl = url.parse(linkUrl)
, qry = qs.parse(parsedUrl.query);
parts.shift();
var info = parts
.reduce(createObjects, {});
info = xtend(qry, info);
info.url = linkUrl;
return info;
} catch (e) {
return null;
}
}
function checkHeader(linkHeader){
if (!linkHeader) return false;
if (linkHeader.length > PARSE_LINK_HEADER_MAXLEN) {
if (PARSE_LINK_HEADER_THROW_ON_MAXLEN_EXCEEDED) {
throw new Error('Input string too long, it should be under ' + PARSE_LINK_HEADER_MAXLEN + ' characters.');
} else {
return false;
}
}
return true;
}
module.exports = function (linkHeader) {
if (!checkHeader(linkHeader)) return null;
return linkHeader.split(/,\s*</)
.map(parseLink)
.filter(hasRel)
.reduce(intoRels, {});
};
/***/ }),
/***/ 5623:
/***/ ((module) => {
"use strict";
module.exports = balanced;
function balanced(a, b, str) {
if (a instanceof RegExp) a = maybeMatch(a, str);
if (b instanceof RegExp) b = maybeMatch(b, str);
var r = range(a, b, str);
return r && {
start: r[0],
end: r[1],
pre: str.slice(0, r[0]),
body: str.slice(r[0] + a.length, r[1]),
post: str.slice(r[1] + b.length)
};
}
function maybeMatch(reg, str) {
var m = str.match(reg);
return m ? m[0] : null;
}
balanced.range = range;
function range(a, b, str) {
var begs, beg, left, right, result;
var ai = str.indexOf(a);
var bi = str.indexOf(b, ai + 1);
var i = ai;
if (ai >= 0 && bi > 0) {
if(a===b) {
return [ai, bi];
}
begs = [];
left = str.length;
while (i >= 0 && !result) {
if (i == ai) {
begs.push(i);
ai = str.indexOf(a, i + 1);
} else if (begs.length == 1) {
result = [ begs.pop(), bi ];
} else {
beg = begs.pop();
if (beg < left) {
left = beg;
right = bi;
}
bi = str.indexOf(b, i + 1);
}
i = ai < bi && ai >= 0 ? ai : bi;
}
if (begs.length) {
result = [ left, right ];
}
}
return result;
}
/***/ }),
/***/ 3644:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
var concatMap = __webpack_require__(11048);
var balanced = __webpack_require__(5623);
module.exports = expandTop;
var escSlash = '\0SLASH'+Math.random()+'\0';
var escOpen = '\0OPEN'+Math.random()+'\0';
var escClose = '\0CLOSE'+Math.random()+'\0';
var escComma = '\0COMMA'+Math.random()+'\0';
var escPeriod = '\0PERIOD'+Math.random()+'\0';
function numeric(str) {
return parseInt(str, 10) == str
? parseInt(str, 10)
: str.charCodeAt(0);
}
function escapeBraces(str) {
return str.split('\\\\').join(escSlash)
.split('\\{').join(escOpen)
.split('\\}').join(escClose)
.split('\\,').join(escComma)
.split('\\.').join(escPeriod);
}
function unescapeBraces(str) {
return str.split(escSlash).join('\\')
.split(escOpen).join('{')
.split(escClose).join('}')
.split(escComma).join(',')
.split(escPeriod).join('.');
}
// Basically just str.split(","), but handling cases
// where we have nested braced sections, which should be
// treated as individual members, like {a,{b,c},d}
function parseCommaParts(str) {
if (!str)
return [''];
var parts = [];
var m = balanced('{', '}', str);
if (!m)
return str.split(',');
var pre = m.pre;
var body = m.body;
var post = m.post;
var p = pre.split(',');
p[p.length-1] += '{' + body + '}';
var postParts = parseCommaParts(post);
if (post.length) {
p[p.length-1] += postParts.shift();
p.push.apply(p, postParts);
}
parts.push.apply(parts, p);
return parts;
}
function expandTop(str) {
if (!str)
return [];
// I don't know why Bash 4.3 does this, but it does.
// Anything starting with {} will have the first two bytes preserved
// but *only* at the top level, so {},a}b will not expand to anything,
// but a{},b}c will be expanded to [a}c,abc].
// One could argue that this is a bug in Bash, but since the goal of
// this module is to match Bash's rules, we escape a leading {}
if (str.substr(0, 2) === '{}') {
str = '\\{\\}' + str.substr(2);
}
return expand(escapeBraces(str), true).map(unescapeBraces);
}
function identity(e) {
return e;
}
function embrace(str) {
return '{' + str + '}';
}
function isPadded(el) {
return /^-?0\d/.test(el);
}
function lte(i, y) {
return i <= y;
}
function gte(i, y) {
return i >= y;
}
function expand(str, isTop) {
var expansions = [];
var m = balanced('{', '}', str);
if (!m || /\$$/.test(m.pre)) return [str];
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
var isSequence = isNumericSequence || isAlphaSequence;
var isOptions = m.body.indexOf(',') >= 0;
if (!isSequence && !isOptions) {
// {a},b}
if (m.post.match(/,.*\}/)) {
str = m.pre + '{' + m.body + escClose + m.post;
return expand(str);
}
return [str];
}
var n;
if (isSequence) {
n = m.body.split(/\.\./);
} else {
n = parseCommaParts(m.body);
if (n.length === 1) {
// x{{a,b}}y ==> x{a}y x{b}y
n = expand(n[0], false).map(embrace);
if (n.length === 1) {
var post = m.post.length
? expand(m.post, false)
: [''];
return post.map(function(p) {
return m.pre + n[0] + p;
});
}
}
}
// at this point, n is the parts, and we know it's not a comma set
// with a single entry.
// no need to expand pre, since it is guaranteed to be free of brace-sets
var pre = m.pre;
var post = m.post.length
? expand(m.post, false)
: [''];
var N;
if (isSequence) {
var x = numeric(n[0]);
var y = numeric(n[1]);
var width = Math.max(n[0].length, n[1].length)
var incr = n.length == 3
? Math.abs(numeric(n[2]))
: 1;
var test = lte;
var reverse = y < x;
if (reverse) {
incr *= -1;
test = gte;
}
var pad = n.some(isPadded);
N = [];
for (var i = x; test(i, y); i += incr) {
var c;
if (isAlphaSequence) {
c = String.fromCharCode(i);
if (c === '\\')
c = '';
} else {
c = String(i);
if (pad) {
var need = width - c.length;
if (need > 0) {
var z = new Array(need + 1).join('0');
if (i < 0)
c = '-' + z + c.slice(1);
else
c = z + c;
}
}
}
N.push(c);
}
} else {
N = concatMap(n, function(el) { return expand(el, false) });
}
for (var j = 0; j < N.length; j++) {
for (var k = 0; k < post.length; k++) {
var expansion = pre + N[j] + post[k];
if (!isTop || isSequence || expansion)
expansions.push(expansion);
}
}
return expansions;
}
/***/ }),
/***/ 11048:
/***/ ((module) => {
module.exports = function (xs, fn) {
var res = [];
for (var i = 0; i < xs.length; i++) {
var x = fn(xs[i], i);
if (isArray(x)) res.push.apply(res, x);
else res.push(x);
}
return res;
};
var isArray = Array.isArray || function (xs) {
return Object.prototype.toString.call(xs) === '[object Array]';
};
/***/ }),
/***/ 10292:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
const MiniPass = __webpack_require__(84075)
const EE = __webpack_require__(28614).EventEmitter
const fs = __webpack_require__(35747)
let writev = fs.writev
/* istanbul ignore next */
if (!writev) {
// This entire block can be removed if support for earlier than Node.js
// 12.9.0 is not needed.
const binding = process.binding('fs')
const FSReqWrap = binding.FSReqWrap || binding.FSReqCallback
writev = (fd, iovec, pos, cb) => {
const done = (er, bw) => cb(er, bw, iovec)
const req = new FSReqWrap()
req.oncomplete = done
binding.writeBuffers(fd, iovec, pos, req)
}
}
const _autoClose = Symbol('_autoClose')
const _close = Symbol('_close')
const _ended = Symbol('_ended')
const _fd = Symbol('_fd')
const _finished = Symbol('_finished')
const _flags = Symbol('_flags')
const _flush = Symbol('_flush')
const _handleChunk = Symbol('_handleChunk')
const _makeBuf = Symbol('_makeBuf')
const _mode = Symbol('_mode')
const _needDrain = Symbol('_needDrain')
const _onerror = Symbol('_onerror')
const _onopen = Symbol('_onopen')
const _onread = Symbol('_onread')
const _onwrite = Symbol('_onwrite')
const _open = Symbol('_open')
const _path = Symbol('_path')
const _pos = Symbol('_pos')
const _queue = Symbol('_queue')
const _read = Symbol('_read')
const _readSize = Symbol('_readSize')
const _reading = Symbol('_reading')
const _remain = Symbol('_remain')
const _size = Symbol('_size')
const _write = Symbol('_write')
const _writing = Symbol('_writing')
const _defaultFlag = Symbol('_defaultFlag')
const _errored = Symbol('_errored')
class ReadStream extends MiniPass {
constructor (path, opt) {
opt = opt || {}
super(opt)
this.readable = true
this.writable = false
if (typeof path !== 'string')
throw new TypeError('path must be a string')
this[_errored] = false
this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
this[_path] = path
this[_readSize] = opt.readSize || 16*1024*1024
this[_reading] = false
this[_size] = typeof opt.size === 'number' ? opt.size : Infinity
this[_remain] = this[_size]
this[_autoClose] = typeof opt.autoClose === 'boolean' ?
opt.autoClose : true
if (typeof this[_fd] === 'number')
this[_read]()
else
this[_open]()
}
get fd () { return this[_fd] }
get path () { return this[_path] }
write () {
throw new TypeError('this is a readable stream')
}
end () {
throw new TypeError('this is a readable stream')
}
[_open] () {
fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd))
}
[_onopen] (er, fd) {
if (er)
this[_onerror](er)
else {
this[_fd] = fd
this.emit('open', fd)
this[_read]()
}
}
[_makeBuf] () {
return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain]))
}
[_read] () {
if (!this[_reading]) {
this[_reading] = true
const buf = this[_makeBuf]()
/* istanbul ignore if */
if (buf.length === 0)
return process.nextTick(() => this[_onread](null, 0, buf))
fs.read(this[_fd], buf, 0, buf.length, null, (er, br, buf) =>
this[_onread](er, br, buf))
}
}
[_onread] (er, br, buf) {
this[_reading] = false
if (er)
this[_onerror](er)
else if (this[_handleChunk](br, buf))
this[_read]()
}
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd]
this[_fd] = null
fs.close(fd, er => er ? this.emit('error', er) : this.emit('close'))
}
}
[_onerror] (er) {
this[_reading] = true
this[_close]()
this.emit('error', er)
}
[_handleChunk] (br, buf) {
let ret = false
// no effect if infinite
this[_remain] -= br
if (br > 0)
ret = super.write(br < buf.length ? buf.slice(0, br) : buf)
if (br === 0 || this[_remain] <= 0) {
ret = false
this[_close]()
super.end()
}
return ret
}
emit (ev, data) {
switch (ev) {
case 'prefinish':
case 'finish':
break
case 'drain':
if (typeof this[_fd] === 'number')
this[_read]()
break
case 'error':
if (this[_errored])
return
this[_errored] = true
return super.emit(ev, data)
default:
return super.emit(ev, data)
}
}
}
class ReadStreamSync extends ReadStream {
[_open] () {
let threw = true
try {
this[_onopen](null, fs.openSync(this[_path], 'r'))
threw = false
} finally {
if (threw)
this[_close]()
}
}
[_read] () {
let threw = true
try {
if (!this[_reading]) {
this[_reading] = true
do {
const buf = this[_makeBuf]()
/* istanbul ignore next */
const br = buf.length === 0 ? 0
: fs.readSync(this[_fd], buf, 0, buf.length, null)
if (!this[_handleChunk](br, buf))
break
} while (true)
this[_reading] = false
}
threw = false
} finally {
if (threw)
this[_close]()
}
}
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd]
this[_fd] = null
fs.closeSync(fd)
this.emit('close')
}
}
}
class WriteStream extends EE {
constructor (path, opt) {
opt = opt || {}
super(opt)
this.readable = false
this.writable = true
this[_errored] = false
this[_writing] = false
this[_ended] = false
this[_needDrain] = false
this[_queue] = []
this[_path] = path
this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
this[_mode] = opt.mode === undefined ? 0o666 : opt.mode
this[_pos] = typeof opt.start === 'number' ? opt.start : null
this[_autoClose] = typeof opt.autoClose === 'boolean' ?
opt.autoClose : true
// truncating makes no sense when writing into the middle
const defaultFlag = this[_pos] !== null ? 'r+' : 'w'
this[_defaultFlag] = opt.flags === undefined
this[_flags] = this[_defaultFlag] ? defaultFlag : opt.flags
if (this[_fd] === null)
this[_open]()
}
emit (ev, data) {
if (ev === 'error') {
if (this[_errored])
return
this[_errored] = true
}
return super.emit(ev, data)
}
get fd () { return this[_fd] }
get path () { return this[_path] }
[_onerror] (er) {
this[_close]()
this[_writing] = true
this.emit('error', er)
}
[_open] () {
fs.open(this[_path], this[_flags], this[_mode],
(er, fd) => this[_onopen](er, fd))
}
[_onopen] (er, fd) {
if (this[_defaultFlag] &&
this[_flags] === 'r+' &&
er && er.code === 'ENOENT') {
this[_flags] = 'w'
this[_open]()
} else if (er)
this[_onerror](er)
else {
this[_fd] = fd
this.emit('open', fd)
this[_flush]()
}
}
end (buf, enc) {
if (buf)
this.write(buf, enc)
this[_ended] = true
// synthetic after-write logic, where drain/finish live
if (!this[_writing] && !this[_queue].length &&
typeof this[_fd] === 'number')
this[_onwrite](null, 0)
return this
}
write (buf, enc) {
if (typeof buf === 'string')
buf = Buffer.from(buf, enc)
if (this[_ended]) {
this.emit('error', new Error('write() after end()'))
return false
}
if (this[_fd] === null || this[_writing] || this[_queue].length) {
this[_queue].push(buf)
this[_needDrain] = true
return false
}
this[_writing] = true
this[_write](buf)
return true
}
[_write] (buf) {
fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) =>
this[_onwrite](er, bw))
}
[_onwrite] (er, bw) {
if (er)
this[_onerror](er)
else {
if (this[_pos] !== null)
this[_pos] += bw
if (this[_queue].length)
this[_flush]()
else {
this[_writing] = false
if (this[_ended] && !this[_finished]) {
this[_finished] = true
this[_close]()
this.emit('finish')
} else if (this[_needDrain]) {
this[_needDrain] = false
this.emit('drain')
}
}
}
}
[_flush] () {
if (this[_queue].length === 0) {
if (this[_ended])
this[_onwrite](null, 0)
} else if (this[_queue].length === 1)
this[_write](this[_queue].pop())
else {
const iovec = this[_queue]
this[_queue] = []
writev(this[_fd], iovec, this[_pos],
(er, bw) => this[_onwrite](er, bw))
}
}
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd]
this[_fd] = null
fs.close(fd, er => er ? this.emit('error', er) : this.emit('close'))
}
}
}
class WriteStreamSync extends WriteStream {
[_open] () {
let fd
// only wrap in a try{} block if we know we'll retry, to avoid
// the rethrow obscuring the error's source frame in most cases.
if (this[_defaultFlag] && this[_flags] === 'r+') {
try {
fd = fs.openSync(this[_path], this[_flags], this[_mode])
} catch (er) {
if (er.code === 'ENOENT') {
this[_flags] = 'w'
return this[_open]()
} else
throw er
}
} else
fd = fs.openSync(this[_path], this[_flags], this[_mode])
this[_onopen](null, fd)
}
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd]
this[_fd] = null
fs.closeSync(fd)
this.emit('close')
}
}
[_write] (buf) {
// throw the original, but try to close if it fails
let threw = true
try {
this[_onwrite](null,
fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos]))
threw = false
} finally {
if (threw)
try { this[_close]() } catch (_) {}
}
}
}
exports.ReadStream = ReadStream
exports.ReadStreamSync = ReadStreamSync
exports.WriteStream = WriteStream
exports.WriteStreamSync = WriteStreamSync
/***/ }),
/***/ 84075:
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
const EE = __webpack_require__(28614)
const Stream = __webpack_require__(92413)
const Yallist = __webpack_require__(20315)
const SD = __webpack_require__(24304).StringDecoder
const EOF = Symbol('EOF')
const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
const EMITTED_END = Symbol('emittedEnd')
const EMITTING_END = Symbol('emittingEnd')
const CLOSED = Symbol('closed')
const READ = Symbol('read')
const FLUSH = Symbol('flush')
const FLUSHCHUNK = Symbol('flushChunk')
const ENCODING = Symbol('encoding')
const DECODER = Symbol('decoder')
const FLOWING = Symbol('flowing')
const PAUSED = Symbol('paused')
const RESUME = Symbol('resume')
const BUFFERLENGTH = Symbol('bufferLength')
const BUFFERPUSH = Symbol('bufferPush')
const BUFFERSHIFT = Symbol('bufferShift')
const OBJECTMODE = Symbol('objectMode')
const DESTROYED = Symbol('destroyed')
// TODO remove when Node v8 support drops
const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
const ASYNCITERATOR = doIter && Symbol.asyncIterator
|| Symbol('asyncIterator not implemented')
const ITERATOR = doIter && Symbol.iterator
|| Symbol('iterator not implemented')
// events that mean 'the stream is over'
// these are treated specially, and re-emitted
// if they are listened for after emitting.
const isEndish = ev =>
ev === 'end' ||
ev === 'finish' ||
ev === 'prefinish'
const isArrayBuffer = b => b instanceof ArrayBuffer ||
typeof b === 'object' &&
b.constructor &&
b.constructor.name === 'ArrayBuffer' &&
b.byteLength >= 0
const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
module.exports = class Minipass extends Stream {
constructor (options) {
super()
this[FLOWING] = false
// whether we're explicitly paused
this[PAUSED] = false
this.pipes = new Yallist()
this.buffer = new Yallist()
this[OBJECTMODE] = options && options.objectMode || false
if (this[OBJECTMODE])
this[ENCODING] = null
else
this[ENCODING] = options && options.encoding || null
if (this[ENCODING] === 'buffer')
this[ENCODING] = null
this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
this[EOF] = false
this[EMITTED_END] = false
this[EMITTING_END] = false
this[CLOSED] = false
this.writable = true
this.readable = true
this[BUFFERLENGTH] = 0
this[DESTROYED] = false
}
get bufferLength () { return this[BUFFERLENGTH] }
get encoding () { return this[ENCODING] }
set encoding (enc) {
if (this[OBJECTMODE])
throw new Error('cannot set encoding in objectMode')
if (this[ENCODING] && enc !== this[ENCODING] &&
(this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
throw new Error('cannot change encoding')
if (this[ENCODING] !== enc) {
this[DECODER] = enc ? new SD(enc) : null
if (this.buffer.length)
this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
}
this[ENCODING] = enc
}
setEncoding (enc) {
this.encoding = enc
}
get objectMode () { return this[OBJECTMODE] }
set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om }
write (chunk, encoding, cb) {
if (this[EOF])
throw new Error('write after end')
if (this[DESTROYED]) {
this.emit('error', Object.assign(
new Error('Cannot call write after a stream was destroyed'),
{ code: 'ERR_STREAM_DESTROYED' }
))
return true
}
if (typeof encoding === 'function')
cb = encoding, encoding = 'utf8'
if (!encoding)
encoding = 'utf8'
// convert array buffers and typed array views into buffers
// at some point in the future, we may want to do the opposite!
// leave strings and buffers as-is
// anything else switches us into object mode
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
if (isArrayBufferView(chunk))
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
else if (isArrayBuffer(chunk))
chunk = Buffer.from(chunk)
else if (typeof chunk !== 'string')
// use the setter so we throw if we have encoding set
this.objectMode = true
}
// this ensures at this point that the chunk is a buffer or string
// don't buffer it up or send it to the decod