@eagleoutice/flowr
Version:
Static Dataflow Analyzer and Program Slicer for the R Programming Language
170 lines • 11.6 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.summarizeAllSummarizedStats = summarizeAllSummarizedStats;
exports.summarizeAllUltimateStats = summarizeAllUltimateStats;
exports.processNextSummary = processNextSummary;
exports.processNextUltimateSummary = processNextUltimateSummary;
const process_1 = require("../first-phase/process");
const defaultmap_1 = require("../../../util/defaultmap");
const summarizer_1 = require("../../../util/summarizer");
const assert_1 = require("../../../util/assert");
const stats_1 = require("../../stats/stats");
function summarizeAllSummarizedStats(stats) {
const commonMeasurements = new defaultmap_1.DefaultMap(() => []);
const perSliceMeasurements = new defaultmap_1.DefaultMap(() => []);
const sliceTimesPerToken = [];
const reconstructTimesPerToken = [];
const totalPerSliceTimesPerToken = [];
const retrieveTimesPerToken = [];
const normalizeTimesPerToken = [];
const dataflowTimesPerToken = [];
const totalCommonTimesPerToken = [];
const memory = new defaultmap_1.DefaultMap(() => []);
const reductions = [];
const reductionsNoFluff = [];
const inputs = [];
const dataflows = [];
let failedToRepParse = 0;
let timesHitThreshold = 0;
let totalSlices = 0;
for (const stat of stats) {
for (const [k, v] of stat.commonMeasurements) {
commonMeasurements.get(k).push(Number(v));
}
for (const [k, v] of stat.perSliceMeasurements.measurements) {
perSliceMeasurements.get(k).push(v);
}
sliceTimesPerToken.push(stat.perSliceMeasurements.sliceTimePerToken);
reconstructTimesPerToken.push(stat.perSliceMeasurements.reconstructTimePerToken);
totalPerSliceTimesPerToken.push(stat.perSliceMeasurements.totalPerSliceTimePerToken);
retrieveTimesPerToken.push(stat.retrieveTimePerToken);
normalizeTimesPerToken.push(stat.normalizeTimePerToken);
dataflowTimesPerToken.push(stat.dataflowTimePerToken);
totalCommonTimesPerToken.push(stat.totalCommonTimePerToken);
for (const [k, v] of stat.memory) {
memory.get(k).push(v);
}
reductions.push(stat.perSliceMeasurements.reduction);
reductionsNoFluff.push(stat.perSliceMeasurements.reductionNoFluff);
inputs.push(stat.input);
dataflows.push(stat.dataflow);
failedToRepParse += stat.perSliceMeasurements.failedToRepParse;
totalSlices += stat.perSliceMeasurements.numberOfSlices;
timesHitThreshold += stat.perSliceMeasurements.timesHitThreshold;
}
return {
totalRequests: stats.length,
totalSlices: totalSlices,
commonMeasurements: new Map([...commonMeasurements.entries()].map(([k, v]) => [k, (0, summarizer_1.summarizeMeasurement)(v)])),
perSliceMeasurements: new Map([...perSliceMeasurements.entries()].map(([k, v]) => [k, (0, process_1.summarizeSummarizedMeasurement)(v)])),
sliceTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(sliceTimesPerToken),
reconstructTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(reconstructTimesPerToken),
totalPerSliceTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(totalPerSliceTimesPerToken),
retrieveTimePerToken: (0, process_1.summarizeTimePerToken)(retrieveTimesPerToken),
normalizeTimePerToken: (0, process_1.summarizeTimePerToken)(normalizeTimesPerToken),
dataflowTimePerToken: (0, process_1.summarizeTimePerToken)(dataflowTimesPerToken),
totalCommonTimePerToken: (0, process_1.summarizeTimePerToken)(totalCommonTimesPerToken),
failedToRepParse,
timesHitThreshold,
reduction: (0, process_1.summarizeSummarizedReductions)(reductions),
reductionNoFluff: (0, process_1.summarizeSummarizedReductions)(reductionsNoFluff),
input: {
numberOfLines: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfLines)),
numberOfNonEmptyLines: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfNonEmptyLines)),
numberOfCharacters: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfCharacters)),
numberOfCharactersNoComments: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfCharactersNoComments)),
numberOfNonWhitespaceCharacters: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfNonWhitespaceCharacters)),
numberOfNonWhitespaceCharactersNoComments: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfNonWhitespaceCharactersNoComments)),
numberOfRTokens: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfRTokens)),
numberOfRTokensNoComments: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfRTokensNoComments)),
numberOfNormalizedTokens: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfNormalizedTokens)),
numberOfNormalizedTokensNoComments: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfNormalizedTokensNoComments))
},
dataflow: {
numberOfNodes: (0, summarizer_1.summarizeMeasurement)(dataflows.map(d => d.numberOfNodes)),
numberOfFunctionDefinitions: (0, summarizer_1.summarizeMeasurement)(dataflows.map(d => d.numberOfFunctionDefinitions)),
numberOfCalls: (0, summarizer_1.summarizeMeasurement)(dataflows.map(d => d.numberOfCalls)),
numberOfEdges: (0, summarizer_1.summarizeMeasurement)(dataflows.map(d => d.numberOfEdges)),
sizeOfObject: (0, summarizer_1.summarizeMeasurement)(dataflows.map(d => d.sizeOfObject)),
storedVertexIndices: (0, summarizer_1.summarizeMeasurement)(dataflows.map(d => d.storedVertexIndices)),
storedEnvIndices: (0, summarizer_1.summarizeMeasurement)(dataflows.map(d => d.storedEnvIndices)),
overwrittenIndices: (0, summarizer_1.summarizeMeasurement)(dataflows.map(d => d.overwrittenIndices)),
}
};
}
function summarizeAllUltimateStats(stats) {
return {
// these should be deterministic, so we don't technically need to use max, but we do just in case something unexpected happens :)
totalRequests: Math.max(...stats.map(s => s.totalRequests)),
totalSlices: Math.max(...stats.map(s => s.totalSlices)),
failedToRepParse: Math.max(...stats.map(s => s.failedToRepParse)),
timesHitThreshold: Math.max(...stats.map(s => s.timesHitThreshold)),
// average out / summarize other measurements
commonMeasurements: new Map(stats_1.CommonSlicerMeasurements.map(m => [m, (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.commonMeasurements.get(m)))])),
perSliceMeasurements: new Map(stats_1.PerSliceMeasurements.map(m => [m, (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.perSliceMeasurements.get(m)))])),
sliceTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.sliceTimePerToken)),
reconstructTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.reconstructTimePerToken)),
totalPerSliceTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.totalPerSliceTimePerToken)),
retrieveTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.retrieveTimePerToken)),
normalizeTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.normalizeTimePerToken)),
dataflowTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.dataflowTimePerToken)),
totalCommonTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.totalCommonTimePerToken)),
reduction: (0, process_1.summarizeSummarizedReductions)(stats.map(s => s.reduction)),
reductionNoFluff: (0, process_1.summarizeSummarizedReductions)(stats.map(s => s.reductionNoFluff)),
input: {
numberOfLines: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfLines)),
numberOfNonEmptyLines: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfNonEmptyLines)),
numberOfCharacters: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfCharacters)),
numberOfCharactersNoComments: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfCharactersNoComments)),
numberOfNonWhitespaceCharacters: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfNonWhitespaceCharacters)),
numberOfNonWhitespaceCharactersNoComments: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfNonWhitespaceCharactersNoComments)),
numberOfRTokens: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfRTokens)),
numberOfRTokensNoComments: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfRTokensNoComments)),
numberOfNormalizedTokens: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfNormalizedTokens)),
numberOfNormalizedTokensNoComments: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfNormalizedTokensNoComments))
},
dataflow: {
numberOfNodes: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfNodes)),
numberOfFunctionDefinitions: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfFunctionDefinitions)),
numberOfCalls: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfCalls)),
numberOfEdges: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfEdges)),
sizeOfObject: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.sizeOfObject)),
storedVertexIndices: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.storedVertexIndices)),
storedEnvIndices: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.storedEnvIndices)),
overwrittenIndices: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.overwrittenIndices)),
}
};
}
function processNextSummary(line, allSummarized) {
let got = JSON.parse(line.toString());
got = {
summarize: {
...got.summarize,
// restore maps
memory: new Map(got.summarize.memory
.map(([k, v]) => [k, v])),
commonMeasurements: new Map(got.summarize.commonMeasurements
.map(([k, v]) => {
(0, assert_1.guard)(v.endsWith('n'), 'Expected a bigint');
return [k, BigInt(v.slice(0, -1))];
})),
perSliceMeasurements: {
...got.summarize.perSliceMeasurements,
// restore maps
measurements: new Map(got.summarize.perSliceMeasurements.measurements),
}
}
};
allSummarized.push(got.summarize);
}
function processNextUltimateSummary(line, allSummarized) {
let got = JSON.parse(line.toString());
got = {
...got,
// restore maps
commonMeasurements: new Map(got.commonMeasurements),
perSliceMeasurements: new Map(got.perSliceMeasurements),
};
allSummarized.push(got);
}
//# sourceMappingURL=process.js.map