UNPKG

node-web-crawler

Version:

Node Web Crawler is a web spider written with Nodejs. It gives you the full power of jQuery on the server to parse a big number of pages as they are downloaded, asynchronously. Scraping should be simple and fun!

62 lines (61 loc) 1.42 kB
{ "name": "node-web-crawler", "version": "0.0.6", "description": "Node Web Crawler is a web spider written with Nodejs. It gives you the full power of jQuery on the server to parse a big number of pages as they are downloaded, asynchronously. Scraping should be simple and fun!", "keywords": [ "dom", "javascript", "crawling", "spider", "scraper", "scraping", "jquery", "crawler", "node-web-crawler" ], "maintainers": [ { "name": "Jay Shah", "email": "shahjaykiran@gmail.com" } ], "licenses": [ { "type": "MIT", "url": "https://github.com/jaykshah/node-web-crawler/master/LICENSE.txt" } ], "repository": { "type": "git", "url": "https://github.com/jaykshah/node-web-crawler.git" }, "dependencies": { "cheerio": "0.18.0", "generic-pool": "2.1.1", "iconv": "2.1.6", "iconv-lite": "0.4.4", "jschardet": "1.1.0", "lodash": "2.4.1", "request": "2.42.0" }, "optionalDependencies": { "iconv": "*" }, "devDependencies": { "chai": "1.9.2", "mocha": "2.2.1", "mocha-testdata": "1.1.0", "sinon": "1.11.1", "jsdom": "3.1.1" }, "scripts": { "test": "./node_modules/mocha/bin/mocha --reporter spec --bail --timeout 10000 tests/*.js" }, "engines": [ "node >=0.8.x" ], "directories": { "lib": "lib" }, "main": "./lib/node-web-crawler" }