UNPKG

glutenfree

Version:

A profiler/loganalyzer for nginx/Cetrea Aw.

70 lines (53 loc) 1.69 kB
#!/usr/bin/env node fs = require("fs") byline = require("byline") util = require("util") _ = require("underscore") argv = require('optimist') .usage('Usage: $0 -i [inputfile] -o [outputfile] -p [parser]') .demand(["i","o"]) .argv # require all line parsers as defined in configuration and in that particular order parsers = [] for file in fs.readdirSync("LineParsers") when file.match(/.*\.js/) do (file) -> p = require("./LineParsers/#{file}").parser if (argv.p? and file.match argv.p) or not argv.p console.log "loaded '#{file}' parser" parsers.push p # create stream stream = byline(fs.createReadStream(argv.i)) postProcess = (stats) -> for id, info of stats.uniques do (id, info) -> info.percent = (info.count/stats.total)*100 # line by line parsing entries = [] stats = { total: 0, uniques: {} } lineno = 0 console.clear = (msg) -> process.stdout.write("#{msg}\r") stream.on("data", (line) => console.clear "##{lineno++}/#{stats.total}" for parser in parsers parsed = parser.parse(line) if parsed stats.total++ stats.uniques[parsed.id] ?= parsed #stats.uniques[parsed.id].instances ?= [] #stats.uniques[parsed.id].instances.push parsed.variables # add count stats.uniques[parsed.id].count ?= 0 stats.uniques[parsed.id].count++ # entries.push(parsed) break # and do not try other parsers ).on("end", -> console.log "\r\npost processing" postProcess(stats) fs.writeFile( argv.o, JSON.stringify(stats), (err) -> if not err? then console.log "done" else console.log "error writing to #{argv.o}" ) )