crawler
Version:
Crawler is a web spider written with Nodejs. It gives you the full power of jQuery on the server to parse a big number of pages as they are downloaded, asynchronously
35 lines (28 loc) • 574 B
JavaScript
var http = require('http');
var r = require('request');
const server = http.createServer(function(req, res) {
res.writeHead(200, { 'Content-Type': 'text/plain' });
res.write('"a')
const delay = (end, cb) => setTimeout(() => {
res.write('e')
if (end) {
res.end('"')
}
cb()
}, 800)
let n = 999999
delay(false, function next() {
n -= 1
const end = n < 1
delay(end, end ? () => (void 0) : next)
})
});
server.listen('3000')
r({
url: 'http://localhost:3000',
json: true,
timeout: 1000
}, (err, res) => {
console.log(err, res)
server.close()
})