UNPKG

scrape-url

Version:
43 lines (39 loc) 1.11 kB
'use strict'; var cheerio = require('cheerio'); var wretch = require('wretch'); async function scrape(method, options, selectors) { let url = options.url; if (!/\w:\/\//.test(options.url)) { url = `http://${options.url}`; } let normalizedSelectors = selectors; if (typeof selectors === "string") { normalizedSelectors = [selectors]; } const body = method === "POST" ? await wretch(url).post(options.body || {}).text() : await wretch(url).get().text(); const $ = cheerio.load(body); const result = [].concat( normalizedSelectors.map((selector) => { const elements = []; const selected = $(selector); selected.each((_, element) => { elements.push($(element)); }); return elements; }) ); return result.reduce((acc, curr) => { if (Array.isArray(curr)) { return acc.concat(curr); } return acc; }, []); } async function get(options, selectors) { return scrape("GET", options, selectors); } async function post(options, selectors) { return scrape("POST", options, selectors); } exports.get = get; exports.post = post;