@crawlee/core
Version:
The scalable web crawling and scraping library for JavaScript/Node.js. Enables development of data extraction and web automation jobs (not only) with headless Chrome and Puppeteer.
36 lines • 1.52 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.processHttpRequestOptions = processHttpRequestOptions;
const utils_1 = require("@crawlee/utils");
/**
* Converts {@link HttpRequestOptions} to a {@link HttpRequest}.
*/
function processHttpRequestOptions({ searchParams, form, json, username, password, ...request }) {
const url = new URL(request.url);
const headers = { ...request.headers };
(0, utils_1.applySearchParams)(url, searchParams);
if ([request.body, form, json].filter((value) => value !== undefined).length > 1) {
throw new Error('At most one of `body`, `form` and `json` may be specified in sendRequest arguments');
}
const body = (() => {
if (form !== undefined) {
return new URLSearchParams(form).toString();
}
if (json !== undefined) {
return JSON.stringify(json);
}
return request.body;
})();
if (form !== undefined) {
headers['content-type'] ?? (headers['content-type'] = 'application/x-www-form-urlencoded');
}
if (json !== undefined) {
headers['content-type'] ?? (headers['content-type'] = 'application/json');
}
if (username !== undefined || password !== undefined) {
const encodedAuth = Buffer.from(`${username ?? ''}:${password ?? ''}`).toString('base64');
headers.authorization = `Basic ${encodedAuth}`;
}
return { ...request, body, url, headers };
}
//# sourceMappingURL=base-http-client.js.map