UNPKG

@eagleoutice/flowr

Version:

Static Dataflow Analyzer and Program Slicer for the R Programming Language

289 lines 26.6 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.summarizeAllSummarizedStats = summarizeAllSummarizedStats; exports.summarizeAllUltimateStats = summarizeAllUltimateStats; exports.processNextSummary = processNextSummary; exports.processNextUltimateSummary = processNextUltimateSummary; const process_1 = require("../first-phase/process"); const defaultmap_1 = require("../../../util/collections/defaultmap"); const summarizer_1 = require("../../../util/summarizer"); const assert_1 = require("../../../util/assert"); const stats_1 = require("../../stats/stats"); const semantics_1 = require("../../../abstract-interpretation/data-frame/semantics"); const arrays_1 = require("../../../util/collections/arrays"); /** * This big function summarizes multiple summarized stats into one ultimate stat. */ function summarizeAllSummarizedStats(stats) { const commonMeasurements = new defaultmap_1.DefaultMap(() => []); const perSliceMeasurements = new defaultmap_1.DefaultMap(() => []); const sliceTimesPerToken = []; const reconstructTimesPerToken = []; const totalPerSliceTimesPerToken = []; const retrieveTimesPerToken = []; const normalizeTimesPerToken = []; const dataflowTimesPerToken = []; const totalCommonTimesPerToken = []; const controlFlowTimePerToken = []; const callGraphTimePerToken = []; const dataFrameShapeTimePerToken = []; const memory = new defaultmap_1.DefaultMap(() => []); const reductions = []; const reductionsNoFluff = []; const inputs = []; const dataflows = []; const dataFrameShapes = []; let failedToRepParse = 0; let timesHitThreshold = 0; let totalSlices = 0; for (const stat of stats) { for (const [k, v] of stat.commonMeasurements) { commonMeasurements.get(k).push(Number(v)); } for (const [k, v] of stat.perSliceMeasurements.measurements) { perSliceMeasurements.get(k).push(v); } sliceTimesPerToken.push(stat.perSliceMeasurements.sliceTimePerToken); reconstructTimesPerToken.push(stat.perSliceMeasurements.reconstructTimePerToken); totalPerSliceTimesPerToken.push(stat.perSliceMeasurements.totalPerSliceTimePerToken); retrieveTimesPerToken.push(stat.retrieveTimePerToken); normalizeTimesPerToken.push(stat.normalizeTimePerToken); dataflowTimesPerToken.push(stat.dataflowTimePerToken); totalCommonTimesPerToken.push(stat.totalCommonTimePerToken); if (stat.controlFlowTimePerToken !== undefined) { controlFlowTimePerToken.push(stat.controlFlowTimePerToken); } if (stat.callGraphTimePerToken !== undefined) { callGraphTimePerToken.push(stat.callGraphTimePerToken); } if (stat.dataFrameShapeTimePerToken !== undefined) { dataFrameShapeTimePerToken.push(stat.dataFrameShapeTimePerToken); } for (const [k, v] of stat.memory) { memory.get(k).push(v); } reductions.push(stat.perSliceMeasurements.reduction); reductionsNoFluff.push(stat.perSliceMeasurements.reductionNoFluff); inputs.push(stat.input); dataflows.push(stat.dataflow); if (stat.dataFrameShape !== undefined) { dataFrameShapes.push(stat.dataFrameShape); } failedToRepParse += stat.perSliceMeasurements.failedToRepParse; totalSlices += stat.perSliceMeasurements.numberOfSlices; timesHitThreshold += stat.perSliceMeasurements.timesHitThreshold; } return { totalRequests: stats.length, totalSlices: totalSlices, commonMeasurements: new Map([...commonMeasurements.entries()].map(([k, v]) => [k, (0, summarizer_1.summarizeMeasurement)(v)])), perSliceMeasurements: new Map([...perSliceMeasurements.entries()].map(([k, v]) => [k, (0, process_1.summarizeSummarizedMeasurement)(v)])), sliceTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(sliceTimesPerToken), reconstructTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(reconstructTimesPerToken), totalPerSliceTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(totalPerSliceTimesPerToken), retrieveTimePerToken: (0, process_1.summarizeTimePerToken)(retrieveTimesPerToken), normalizeTimePerToken: (0, process_1.summarizeTimePerToken)(normalizeTimesPerToken), dataflowTimePerToken: (0, process_1.summarizeTimePerToken)(dataflowTimesPerToken), totalCommonTimePerToken: (0, process_1.summarizeTimePerToken)(totalCommonTimesPerToken), controlFlowTimePerToken: controlFlowTimePerToken.length > 0 ? (0, process_1.summarizeTimePerToken)(controlFlowTimePerToken) : undefined, callGraphTimePerToken: callGraphTimePerToken.length > 0 ? (0, process_1.summarizeTimePerToken)(callGraphTimePerToken) : undefined, dataFrameShapeTimePerToken: dataFrameShapeTimePerToken.length > 0 ? (0, process_1.summarizeTimePerToken)(dataFrameShapeTimePerToken) : undefined, failedToRepParse, timesHitThreshold, reduction: (0, process_1.summarizeSummarizedReductions)(reductions), reductionNoFluff: (0, process_1.summarizeSummarizedReductions)(reductionsNoFluff), input: { numberOfLines: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfLines)), numberOfNonEmptyLines: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfNonEmptyLines)), numberOfCharacters: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfCharacters)), numberOfCharactersNoComments: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfCharactersNoComments)), numberOfNonWhitespaceCharacters: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfNonWhitespaceCharacters)), numberOfNonWhitespaceCharactersNoComments: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfNonWhitespaceCharactersNoComments)), numberOfRTokens: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfRTokens)), numberOfRTokensNoComments: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfRTokensNoComments)), numberOfNormalizedTokens: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfNormalizedTokens)), numberOfNormalizedTokensNoComments: (0, summarizer_1.summarizeMeasurement)(inputs.map(i => i.numberOfNormalizedTokensNoComments)) }, dataflow: { numberOfNodes: (0, summarizer_1.summarizeMeasurement)(dataflows.map(d => d.numberOfNodes)), numberOfFunctionDefinitions: (0, summarizer_1.summarizeMeasurement)(dataflows.map(d => d.numberOfFunctionDefinitions)), numberOfCalls: (0, summarizer_1.summarizeMeasurement)(dataflows.map(d => d.numberOfCalls)), numberOfEdges: (0, summarizer_1.summarizeMeasurement)(dataflows.map(d => d.numberOfEdges)), sizeOfObject: (0, summarizer_1.summarizeMeasurement)(dataflows.map(d => d.sizeOfObject)), }, dataFrameShape: stats.some(s => s.dataFrameShape !== undefined) ? { numberOfDataFrameFiles: (0, arrays_1.arraySum)(stats.map(s => s.dataFrameShape?.numberOfDataFrameFiles).filter(assert_1.isNotUndefined)), numberOfNonDataFrameFiles: (0, arrays_1.arraySum)(stats.map(s => s.dataFrameShape?.numberOfNonDataFrameFiles).filter(assert_1.isNotUndefined)), numberOfResultConstraints: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfResultConstraints).filter(assert_1.isNotUndefined)), numberOfResultingValues: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfResultingValues).filter(assert_1.isNotUndefined)), numberOfResultingBottom: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfResultingBottom).filter(assert_1.isNotUndefined)), numberOfResultingTop: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfResultingTop).filter(assert_1.isNotUndefined)), numberOfEmptyNodes: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfEmptyNodes).filter(assert_1.isNotUndefined)), numberOfOperationNodes: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfOperationNodes).filter(assert_1.isNotUndefined)), numberOfValueNodes: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfValueNodes).filter(assert_1.isNotUndefined)), sizeOfInfo: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.sizeOfInfo).filter(assert_1.isNotUndefined)), numberOfEntriesPerNode: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfEntriesPerNode).filter(assert_1.isNotUndefined)), numberOfOperations: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfOperations).filter(assert_1.isNotUndefined)), numberOfTotalConstraints: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfTotalConstraints).filter(assert_1.isNotUndefined)), numberOfTotalExact: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfTotalExact).filter(assert_1.isNotUndefined)), numberOfTotalValues: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfTotalValues).filter(assert_1.isNotUndefined)), numberOfTotalBottom: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfTotalBottom).filter(assert_1.isNotUndefined)), numberOfTotalTop: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfTotalTop).filter(assert_1.isNotUndefined)), inferredColNames: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.inferredColNames).filter(assert_1.isNotUndefined)), approxRangeColNames: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.approxRangeColNames).filter(assert_1.isNotUndefined)), numberOfColNamesExact: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesExact).filter(assert_1.isNotUndefined)), numberOfColNamesValues: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesValues).filter(assert_1.isNotUndefined)), numberOfColNamesBottom: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesBottom).filter(assert_1.isNotUndefined)), numberOfColNamesInfinite: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesInfinite).filter(assert_1.isNotUndefined)), numberOfColNamesTop: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesTop).filter(assert_1.isNotUndefined)), inferredColCount: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.inferredColCount).filter(assert_1.isNotUndefined)), approxRangeColCount: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.approxRangeColCount).filter(assert_1.isNotUndefined)), numberOfColCountExact: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountExact).filter(assert_1.isNotUndefined)), numberOfColCountValues: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountValues).filter(assert_1.isNotUndefined)), numberOfColCountBottom: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountBottom).filter(assert_1.isNotUndefined)), numberOfColCountInfinite: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountInfinite).filter(assert_1.isNotUndefined)), numberOfColCountTop: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountTop).filter(assert_1.isNotUndefined)), inferredRowCount: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.inferredRowCount).filter(assert_1.isNotUndefined)), approxRangeRowCount: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.approxRangeRowCount).filter(assert_1.isNotUndefined)), numberOfRowCountExact: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountExact).filter(assert_1.isNotUndefined)), numberOfRowCountValues: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountValues).filter(assert_1.isNotUndefined)), numberOfRowCountBottom: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountBottom).filter(assert_1.isNotUndefined)), numberOfRowCountInfinite: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountInfinite).filter(assert_1.isNotUndefined)), numberOfRowCountTop: (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountTop).filter(assert_1.isNotUndefined)), perOperationNumber: new Map(semantics_1.DataFrameOperationNames.map(n => [n, (0, summarizer_1.summarizeMeasurement)(stats.map(s => s.dataFrameShape?.perOperationNumber.get(n) ?? 0))])) } : undefined }; } /** * This big function summarizes multiple ultimate stats into one. */ function summarizeAllUltimateStats(stats) { return { // these should be deterministic, so we don't technically need to use max, but we do just in case something unexpected happens :) totalRequests: Math.max(...stats.map(s => s.totalRequests)), totalSlices: Math.max(...stats.map(s => s.totalSlices)), failedToRepParse: Math.max(...stats.map(s => s.failedToRepParse)), timesHitThreshold: Math.max(...stats.map(s => s.timesHitThreshold)), // average out / summarize other measurements commonMeasurements: new Map(stats_1.CommonSlicerMeasurements.filter(m => stats.some(s => s.commonMeasurements.has(m))).map(m => [m, (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.commonMeasurements.get(m)))])), perSliceMeasurements: new Map(stats_1.PerSliceMeasurements.map(m => [m, (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.perSliceMeasurements.get(m)))])), sliceTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.sliceTimePerToken)), reconstructTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.reconstructTimePerToken)), totalPerSliceTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.totalPerSliceTimePerToken)), retrieveTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.retrieveTimePerToken)), normalizeTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.normalizeTimePerToken)), dataflowTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.dataflowTimePerToken)), totalCommonTimePerToken: (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.totalCommonTimePerToken)), controlFlowTimePerToken: stats.some(s => s.controlFlowTimePerToken !== undefined) ? (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.controlFlowTimePerToken).filter(assert_1.isNotUndefined)) : undefined, dataFrameShapeTimePerToken: stats.some(s => s.dataFrameShapeTimePerToken !== undefined) ? (0, process_1.summarizeSummarizedTimePerToken)(stats.map(s => s.dataFrameShapeTimePerToken).filter(assert_1.isNotUndefined)) : undefined, reduction: (0, process_1.summarizeSummarizedReductions)(stats.map(s => s.reduction)), reductionNoFluff: (0, process_1.summarizeSummarizedReductions)(stats.map(s => s.reductionNoFluff)), input: { numberOfLines: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfLines)), numberOfNonEmptyLines: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfNonEmptyLines)), numberOfCharacters: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfCharacters)), numberOfCharactersNoComments: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfCharactersNoComments)), numberOfNonWhitespaceCharacters: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfNonWhitespaceCharacters)), numberOfNonWhitespaceCharactersNoComments: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfNonWhitespaceCharactersNoComments)), numberOfRTokens: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfRTokens)), numberOfRTokensNoComments: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfRTokensNoComments)), numberOfNormalizedTokens: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfNormalizedTokens)), numberOfNormalizedTokensNoComments: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.input.numberOfNormalizedTokensNoComments)) }, dataflow: { numberOfNodes: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfNodes)), numberOfFunctionDefinitions: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfFunctionDefinitions)), numberOfCalls: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfCalls)), numberOfEdges: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.numberOfEdges)), sizeOfObject: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataflow.sizeOfObject)), }, dataFrameShape: stats.some(s => s.dataFrameShape !== undefined) ? { numberOfDataFrameFiles: (0, arrays_1.arraySum)(stats.map(s => s.dataFrameShape?.numberOfDataFrameFiles).filter(assert_1.isNotUndefined)), numberOfNonDataFrameFiles: (0, arrays_1.arraySum)(stats.map(s => s.dataFrameShape?.numberOfNonDataFrameFiles).filter(assert_1.isNotUndefined)), numberOfResultConstraints: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfResultConstraints).filter(assert_1.isNotUndefined)), numberOfResultingValues: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfResultingValues).filter(assert_1.isNotUndefined)), numberOfResultingBottom: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfResultingBottom).filter(assert_1.isNotUndefined)), numberOfResultingTop: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfResultingTop).filter(assert_1.isNotUndefined)), numberOfEmptyNodes: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfEmptyNodes).filter(assert_1.isNotUndefined)), numberOfOperationNodes: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfOperationNodes).filter(assert_1.isNotUndefined)), numberOfValueNodes: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfValueNodes).filter(assert_1.isNotUndefined)), sizeOfInfo: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.sizeOfInfo).filter(assert_1.isNotUndefined)), numberOfEntriesPerNode: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfEntriesPerNode).filter(assert_1.isNotUndefined)), numberOfOperations: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfOperations).filter(assert_1.isNotUndefined)), numberOfTotalConstraints: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfTotalConstraints).filter(assert_1.isNotUndefined)), numberOfTotalExact: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfTotalExact).filter(assert_1.isNotUndefined)), numberOfTotalValues: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfTotalValues).filter(assert_1.isNotUndefined)), numberOfTotalBottom: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfTotalBottom).filter(assert_1.isNotUndefined)), numberOfTotalTop: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfTotalTop).filter(assert_1.isNotUndefined)), inferredColNames: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.inferredColNames).filter(assert_1.isNotUndefined)), approxRangeColNames: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.approxRangeColNames).filter(assert_1.isNotUndefined)), numberOfColNamesExact: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesExact).filter(assert_1.isNotUndefined)), numberOfColNamesValues: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesValues).filter(assert_1.isNotUndefined)), numberOfColNamesBottom: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesBottom).filter(assert_1.isNotUndefined)), numberOfColNamesInfinite: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesInfinite).filter(assert_1.isNotUndefined)), numberOfColNamesTop: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColNamesTop).filter(assert_1.isNotUndefined)), inferredColCount: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.inferredColCount).filter(assert_1.isNotUndefined)), approxRangeColCount: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.approxRangeColCount).filter(assert_1.isNotUndefined)), numberOfColCountExact: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountExact).filter(assert_1.isNotUndefined)), numberOfColCountValues: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountValues).filter(assert_1.isNotUndefined)), numberOfColCountBottom: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountBottom).filter(assert_1.isNotUndefined)), numberOfColCountInfinite: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountInfinite).filter(assert_1.isNotUndefined)), numberOfColCountTop: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfColCountTop).filter(assert_1.isNotUndefined)), inferredRowCount: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.inferredRowCount).filter(assert_1.isNotUndefined)), approxRangeRowCount: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.approxRangeRowCount).filter(assert_1.isNotUndefined)), numberOfRowCountExact: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountExact).filter(assert_1.isNotUndefined)), numberOfRowCountValues: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountValues).filter(assert_1.isNotUndefined)), numberOfRowCountBottom: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountBottom).filter(assert_1.isNotUndefined)), numberOfRowCountInfinite: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountInfinite).filter(assert_1.isNotUndefined)), numberOfRowCountTop: (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.numberOfRowCountTop).filter(assert_1.isNotUndefined)), perOperationNumber: new Map(semantics_1.DataFrameOperationNames.map(n => [n, (0, process_1.summarizeSummarizedMeasurement)(stats.map(s => s.dataFrameShape?.perOperationNumber.get(n)).filter(assert_1.isNotUndefined))])) } : undefined }; } /** * Processes the next summary line. */ function processNextSummary(line, allSummarized) { let got = JSON.parse(line.toString()); got = { summarize: { ...got.summarize, // restore maps memory: new Map(got.summarize.memory .map(([k, v]) => [k, v])), commonMeasurements: new Map(got.summarize.commonMeasurements .map(([k, v]) => { (0, assert_1.guard)(v.endsWith('n'), 'Expected a bigint'); return [k, BigInt(v.slice(0, -1))]; })), perSliceMeasurements: { ...got.summarize.perSliceMeasurements, // restore maps measurements: new Map(got.summarize.perSliceMeasurements.measurements), }, dataFrameShape: got.summarize.dataFrameShape !== undefined ? { ...got.summarize.dataFrameShape, perOperationNumber: new Map(got.summarize.dataFrameShape.perOperationNumber) } : undefined } }; allSummarized.push(got.summarize); } /** * Processes the next ultimate summary line. */ function processNextUltimateSummary(line, allSummarized) { let got = JSON.parse(line.toString()); got = { ...got, // restore maps commonMeasurements: new Map(got.commonMeasurements), perSliceMeasurements: new Map(got.perSliceMeasurements), dataFrameShape: got.dataFrameShape !== undefined ? { ...got.dataFrameShape, perOperationNumber: new Map(got.dataFrameShape.perOperationNumber) } : undefined }; allSummarized.push(got); } //# sourceMappingURL=process.js.map