@eagleoutice/flowr
Version:
Static Dataflow Analyzer and Program Slicer for the R Programming Language
48 lines • 2.62 kB
JavaScript
;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.postProcess = postProcess;
const comments_1 = require("./comments");
const fs_1 = __importDefault(require("fs"));
const path_1 = __importDefault(require("path"));
const summarizer_1 = require("../../../../util/summarizer");
const assert_1 = require("../../../../util/assert");
// monoids would be helpful :c
function appendCommentsPostProcessing(a, b, numberOfLines, filepath, skipForProjects) {
for (const [key, val] of Object.entries(b)) {
const get = a[key];
(0, assert_1.guard)(get !== undefined, `key ${key} is not present in the comments post processing`);
get.count.push(val);
get.fracOfLines.push(val / numberOfLines);
if (val > 0) {
get.uniqueFiles.add(filepath);
get.uniqueProjects.add(filepath.split(path_1.default.sep)[skipForProjects] ?? '');
}
}
}
const initialCommentsMeta = () => ({ count: [], uniqueProjects: new Set(), uniqueFiles: new Set(), fracOfLines: [] });
function mapComments(data, fn) {
const collected = {};
for (const [key, value] of Object.entries(data)) {
collected[key] = fn(value);
}
return collected;
}
function postProcess(featureRoot, info, outputPath, config) {
// for each we collect the count and the number of files that contain them
const collected = mapComments(comments_1.initialCommentInfo, initialCommentsMeta);
for (const [filepath, feature] of info.entries()) {
appendCommentsPostProcessing(collected, feature.comments, feature.stats.lines[0].length, filepath, config.projectSkip);
}
const fnOutStream = fs_1.default.createWriteStream(path_1.default.join(outputPath, 'comments.csv'));
fnOutStream.write(`kind,unique-projects,unique-files,${(0, summarizer_1.summarizedMeasurement2CsvHeader)('count')},${(0, summarizer_1.summarizedMeasurement2CsvHeader)('frac-of-lines')}\n`);
for (const [key, val] of Object.entries(collected)) {
const { count, uniqueProjects, uniqueFiles, fracOfLines } = val;
const counts = (0, summarizer_1.summarizeMeasurement)(count);
const lineFrac = (0, summarizer_1.summarizeMeasurement)(fracOfLines);
fnOutStream.write(`${JSON.stringify(key)},${uniqueProjects.size},${uniqueFiles.size},${(0, summarizer_1.summarizedMeasurement2Csv)(counts)},${(0, summarizer_1.summarizedMeasurement2Csv)(lineFrac)}\n`);
}
}
//# sourceMappingURL=post-process.js.map