UNPKG

e2pdf

Version:

A lightweight, highly efficient, and customizable Node.js library for crawling websites and converting pages into compact, AI-optimized PDFs. Ideal for data archiving, offline analysis, and feeding content to AI tools. Delivers fast performance and allows

3 lines (2 loc) 1.23 kB
#!/usr/bin/env node "use strict";var h=Object.create;var m=Object.defineProperty;var y=Object.getOwnPropertyDescriptor;var P=Object.getOwnPropertyNames;var C=Object.getPrototypeOf,O=Object.prototype.hasOwnProperty;var q=(e,r,s,t)=>{if(r&&typeof r=="object"||typeof r=="function")for(let a of P(r))!O.call(e,a)&&a!==s&&m(e,a,{get:()=>r[a],enumerable:!(t=y(r,a))||t.enumerable});return e};var o=(e,r,s)=>(s=e!=null?h(C(e)):{},q(r||!e||!e.__esModule?m(s,"default",{value:e,enumerable:!0}):s,e));var l=o(require("fs")),n=o(require("path")),w=require("crawlee"),b=async(e,r)=>{await new w.PlaywrightCrawler({...r==null?void 0:r.crawlerOptions,async requestHandler(t){var d,u;let a=await t.page.pdf(r==null?void 0:r.pdf),p=((r!=null&&r.out?r.out+n.default.sep:"")+t.request.loadedUrl.split("://")[1]+".pdf").replace("/.pdf","/home.pdf"),i=n.default.dirname(p);l.default.existsSync(i)||l.default.mkdirSync(i,{recursive:!0}),l.default.writeFileSync(p,a),(d=r==null?void 0:r.afterPrint)==null||d.call(r,a,t.request.loadedUrl,p),(u=r==null?void 0:r.crawlerOptions)!=null&&u.requestHandler?await r.crawlerOptions.requestHandler(t):await t.enqueueLinks()}},r==null?void 0:r.crawlerConfig).run([e])};process.argv.length>2&&b(process.argv[2]);