e2pdf
Version:
A lightweight, highly efficient, and customizable Node.js library for crawling websites and converting pages into compact, AI-optimized PDFs. Ideal for data archiving, offline analysis, and feeding content to AI tools. Delivers fast performance and allows
2 lines (1 loc) • 1.34 kB
JavaScript
var h=Object.create;var l=Object.defineProperty;var y=Object.getOwnPropertyDescriptor;var P=Object.getOwnPropertyNames;var C=Object.getPrototypeOf,O=Object.prototype.hasOwnProperty;var q=(e,r)=>{for(var t in r)l(e,t,{get:r[t],enumerable:!0})},w=(e,r,t,a)=>{if(r&&typeof r=="object"||typeof r=="function")for(let s of P(r))!O.call(e,s)&&s!==t&&l(e,s,{get:()=>r[s],enumerable:!(a=y(r,s))||a.enumerable});return e};var o=(e,r,t)=>(t=e!=null?h(C(e)):{},w(r||!e||!e.__esModule?l(t,"default",{value:e,enumerable:!0}):t,e)),S=e=>w(l({},"__esModule",{value:!0}),e);var H={};q(H,{e2pdf:()=>k});module.exports=S(H);var p=o(require("fs")),n=o(require("path")),b=require("crawlee"),k=async(e,r)=>{await new b.PlaywrightCrawler({...r==null?void 0:r.crawlerOptions,async requestHandler(a){var u,m;let s=await a.page.pdf(r==null?void 0:r.pdf),i=((r!=null&&r.out?r.out+n.default.sep:"")+a.request.loadedUrl.split("://")[1]+".pdf").replace("/.pdf","/home.pdf"),d=n.default.dirname(i);p.default.existsSync(d)||p.default.mkdirSync(d,{recursive:!0}),p.default.writeFileSync(i,s),(u=r==null?void 0:r.afterPrint)==null||u.call(r,s,a.request.loadedUrl,i),(m=r==null?void 0:r.crawlerOptions)!=null&&m.requestHandler?await r.crawlerOptions.requestHandler(a):await a.enqueueLinks()}},r==null?void 0:r.crawlerConfig).run([e])};0&&(module.exports={e2pdf});
;