@eagleoutice/flowr
Version:
Static Dataflow Analyzer and Program Slicer for the R Programming Language
119 lines • 5.69 kB
JavaScript
;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.postProcess = postProcess;
const post_processing_1 = require("../../post-processing");
const used_packages_1 = require("./used-packages");
const fs_1 = __importDefault(require("fs"));
const path_1 = __importDefault(require("path"));
const summarizer_1 = require("../../../../util/summarizer");
const files_1 = require("../../../../util/files");
const strings_1 = require("../../../../util/strings");
const arrays_1 = require("../../../../util/arrays");
function postProcess(featureRoot, info, outputPath, config) {
const collected = {};
for (const [filepath, data] of info.entries()) {
const value = data.usedPackages;
for (const [key, val] of Object.entries(value)) {
let get = collected[key];
if (!get) {
get = (0, post_processing_1.emptySummarizedWithProject)();
collected[key] = get;
}
get.count.push(val);
if (val > 0) {
(0, post_processing_1.recordFilePath)(get, filepath, config);
}
}
}
const variablesOutStream = fs_1.default.createWriteStream(path_1.default.join(outputPath, 'loading-functions.csv'));
variablesOutStream.write(`kind,unique-projects,unique-files,${(0, summarizer_1.summarizedMeasurement2CsvHeader)()}\n`);
for (const [key, val] of Object.entries(collected)) {
const data = val;
const sum = (0, summarizer_1.summarizeMeasurement)(data.count);
variablesOutStream.write(`${JSON.stringify(key)},${data.uniqueProjects.size},${data.uniqueFiles.size},${(0, summarizer_1.summarizedMeasurement2Csv)(sum)}\n`);
}
variablesOutStream.close();
// now we want to collect the names of the loaded libraries,
// we collect and store them separately (per kind) but also, we want store the summarized results in the end!
const loadedLibraries = new Map();
for (const key of Object.keys(used_packages_1.initialUsedPackageInfos)) {
const data = retrieveDataForLoad(key, featureRoot, outputPath, config);
for (const [name, val] of data.entries()) {
let get = loadedLibraries.get(name);
if (!get) {
get = (0, post_processing_1.emptySummarizedWithProject)();
loadedLibraries.set(name, get);
}
get.count.push(...val.count);
for (const uniqueFile of val.uniqueFiles) {
get.uniqueFiles.add(uniqueFile);
}
for (const uniqueProject of val.uniqueProjects) {
get.uniqueProjects.add(uniqueProject);
}
}
}
const output = path_1.default.join(outputPath, 'all-operators.csv');
const out = fs_1.default.createWriteStream(output);
out.write(`kind,unique-projects,unique-files,${(0, summarizer_1.summarizedMeasurement2CsvHeader)()}\n`);
for (const [key, val] of loadedLibraries.entries()) {
const { count, uniqueProjects, uniqueFiles } = val;
const sum = (0, summarizer_1.summarizeMeasurement)(count);
out.write(`${JSON.stringify(key)},${uniqueProjects.size},${uniqueFiles.size},${(0, summarizer_1.summarizedMeasurement2Csv)(sum)}\n`);
}
out.close();
}
// directly writes the results to the output path
function retrieveDataForLoad(operator, readFromPath, outputPath, config) {
const input = path_1.default.join(readFromPath, `${operator}.txt`);
if (!fs_1.default.existsSync(input)) {
// if there is nothing with this, just return :)
return new Map();
}
const collected = new Map();
(0, files_1.readLineByLineSync)(input, (line, lineNumber) => {
if (line.length === 0) {
return;
}
if (lineNumber % 2_500 === 0) {
console.log(` Processing line ${lineNumber} from ${input}`);
}
const [packages, context] = JSON.parse(line.toString());
// first, we have to collect what this file gives us
// we normalize surrounding quotation marks
const bag = (0, arrays_1.array2bag)(packages.map(p => {
if ((0, strings_1.startAndEndsWith)(p, '"') || (0, strings_1.startAndEndsWith)(p, "'") || (0, strings_1.startAndEndsWith)(p, '`')) {
return p.slice(1, -1);
}
else {
return p;
}
}));
// now we merge it into the global map (oh gosh this is so horrible
for (const [name, count] of bag.entries()) {
let get = collected.get(name);
if (!get) {
get = (0, post_processing_1.emptySummarizedWithProject)();
collected.set(name, get);
}
get.count.push(count);
if (count > 0) {
(0, post_processing_1.recordFilePath)(get, context, config);
}
}
});
const output = path_1.default.join(outputPath, `${operator}.csv`);
const out = fs_1.default.createWriteStream(output);
out.write(`kind,unique-projects,unique-files,${(0, summarizer_1.summarizedMeasurement2CsvHeader)()}\n`);
for (const [key, val] of collected.entries()) {
const { count, uniqueProjects, uniqueFiles } = val;
const sum = (0, summarizer_1.summarizeMeasurement)(count);
out.write(`${JSON.stringify(key)},${uniqueProjects.size},${uniqueFiles.size},${(0, summarizer_1.summarizedMeasurement2Csv)(sum)}\n`);
}
out.close();
return collected;
}
//# sourceMappingURL=post-process.js.map