@eagleoutice/flowr
Version:
Static Dataflow Analyzer and Program Slicer for the R Programming Language
372 lines • 17.3 kB
JavaScript
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.FlowRServerConnection = void 0;
exports.sanitizeAnalysisResults = sanitizeAnalysisResults;
const send_1 = require("./send");
const validate_1 = require("./validate");
const message_analysis_1 = require("./messages/message-analysis");
const message_slice_1 = require("./messages/message-slice");
const server_1 = require("./server");
const message_repl_1 = require("./messages/message-repl");
const core_1 = require("../core");
const extract_cfg_1 = require("../../../control-flow/extract-cfg");
const quads_1 = require("../../../util/quads");
const print_1 = require("../../../core/print/print");
const ansi_1 = require("../../../util/text/ansi");
const default_pipelines_1 = require("../../../core/steps/pipeline/default-pipelines");
const graph_1 = require("../../../dataflow/graph/graph");
const tmp = __importStar(require("tmp"));
const fs_1 = __importDefault(require("fs"));
const magic_comments_1 = require("../../../reconstruct/auto-select/magic-comments");
const message_lineage_1 = require("./messages/message-lineage");
const repl_lineage_1 = require("../commands/repl-lineage");
const assert_1 = require("../../../util/assert");
const auto_select_defaults_1 = require("../../../reconstruct/auto-select/auto-select-defaults");
const message_query_1 = require("./messages/message-query");
const query_1 = require("../../../queries/query");
const compact_1 = require("./compact");
/**
* Each connection handles a single client, answering to its requests.
* There is no need to construct this class manually, {@link FlowRServer} will do it for you.
*/
class FlowRServerConnection {
socket;
parser;
name;
logger;
allowRSessionAccess;
// maps token to information
fileMap = new Map();
// we do not have to ensure synchronized shell-access as we are always running synchronized
constructor(socket, name, parser, allowRSessionAccess) {
this.socket = socket;
this.parser = parser;
this.name = name;
this.logger = server_1.serverLog.getSubLogger({ name });
this.socket.on('data', data => this.handleData(String(data)));
this.socket.on('error', e => this.logger.error(`[${this.name}] Error while handling connection: ${String(e)}`));
this.socket.on('close', () => {
this.fileMap.clear();
});
this.allowRSessionAccess = allowRSessionAccess;
}
currentMessageBuffer = '';
handleData(message) {
if (!message.endsWith('\n')) {
this.currentMessageBuffer += message;
this.logger.trace(`[${this.name}] Received partial message. Buffering ${this.currentMessageBuffer.length}.`);
return;
}
message = this.currentMessageBuffer + message;
if (this.logger.settings.minLevel >= 2 /* LogLevel.Debug */) {
this.logger.debug(`[${this.name}] Received message: ${message}`);
}
this.currentMessageBuffer = '';
const request = (0, validate_1.validateBaseMessageFormat)(message);
if (request.type === 'error') {
(0, validate_1.answerForValidationError)(this.socket, request);
return;
}
switch (request.message.type) {
case 'request-file-analysis':
void this.handleFileAnalysisRequest(request.message);
break;
case 'request-slice':
this.handleSliceRequest(request.message);
break;
case 'request-repl-execution':
this.handleRepl(request.message);
break;
case 'request-lineage':
this.handleLineageRequest(request.message);
break;
case 'request-query':
this.handleQueryRequest(request.message);
break;
default:
(0, send_1.sendMessage)(this.socket, {
id: request.message.id,
type: 'error',
fatal: true,
reason: `The message type ${JSON.stringify(request.message.type ?? 'undefined')} is not supported.`
});
this.socket.end();
}
}
async handleFileAnalysisRequest(base) {
const requestResult = (0, validate_1.validateMessage)(base, message_analysis_1.requestAnalysisMessage);
if (requestResult.type === 'error') {
(0, validate_1.answerForValidationError)(this.socket, requestResult, base.id);
return;
}
const message = requestResult.message;
this.logger.info(`[${this.name}] Received file analysis request for ${message.filename ?? 'unknown file'}${message.filetoken ? ' with token: ' + message.filetoken : ''}`);
if (message.filetoken && this.fileMap.has(message.filetoken)) {
this.logger.warn(`File token ${message.filetoken} already exists. Overwriting.`);
// explicitly delete the previous store
this.fileMap.delete(message.filetoken);
}
const tempFile = tmp.fileSync({ postfix: '.R' });
const slicer = this.createPipelineExecutorForRequest(message, tempFile.name);
await slicer.allRemainingSteps(false).then(async (results) => await this.sendFileAnalysisResponse(slicer, results, message))
.catch(e => {
this.logger.error(`[${this.name}] Error while analyzing file ${message.filename ?? 'unknown file'}: ${String(e)}`);
(0, send_1.sendMessage)(this.socket, {
id: message.id,
type: 'error',
fatal: false,
reason: `Error while analyzing file ${message.filename ?? 'unknown file'}: ${String(e)}`
});
});
// this is an interestingly named function that means "I am a callback that removes a file" - so this deletes the file
tempFile.removeCallback();
}
async sendFileAnalysisResponse(slicer, results, message) {
let cfg = undefined;
if (message.cfg) {
cfg = (0, extract_cfg_1.extractCfg)(results.normalize, results.dataflow?.graph);
}
const config = () => ({ context: message.filename ?? 'unknown', getId: (0, quads_1.defaultQuadIdGenerator)() });
const sanitizedResults = sanitizeAnalysisResults(results);
const pipeline = slicer.getPipeline();
const parseStep = pipeline.steps.get('parse');
const normalizedStep = pipeline.steps.get('normalize');
const dataflowStep = pipeline.steps.get('dataflow');
(0, assert_1.guard)(parseStep !== undefined && normalizedStep !== undefined && dataflowStep !== undefined, 'All steps must be present');
if (message.format === 'n-quads') {
(0, send_1.sendMessage)(this.socket, {
type: 'response-file-analysis',
format: 'n-quads',
id: message.id,
cfg: cfg ? (0, extract_cfg_1.cfg2quads)(cfg, config()) : undefined,
results: {
parse: await (0, print_1.printStepResult)(parseStep, sanitizedResults.parse, 5 /* StepOutputFormat.RdfQuads */, config()),
normalize: await (0, print_1.printStepResult)(normalizedStep, sanitizedResults.normalize, 5 /* StepOutputFormat.RdfQuads */, config()),
dataflow: await (0, print_1.printStepResult)(dataflowStep, sanitizedResults.dataflow, 5 /* StepOutputFormat.RdfQuads */, config())
}
});
}
else if (message.format === 'compact') {
(0, send_1.sendMessage)(this.socket, {
type: 'response-file-analysis',
format: 'compact',
id: message.id,
cfg: cfg ? (0, compact_1.compact)(cfg) : undefined,
results: (0, compact_1.compact)(sanitizedResults)
});
}
else {
(0, send_1.sendMessage)(this.socket, {
type: 'response-file-analysis',
format: 'json',
id: message.id,
cfg,
results: sanitizedResults
});
}
}
createPipelineExecutorForRequest(message, tempFile) {
let request;
if (message.content !== undefined) {
// we store the code in a temporary file in case it's too big for the shell to handle
fs_1.default.writeFileSync(tempFile, message.content ?? '');
request = { request: 'file', content: tempFile };
}
else if (message.filepath !== undefined) {
if (typeof message.filepath === 'string') {
request = { request: 'file', content: message.filepath };
}
else {
request = message.filepath.map(fp => ({ request: 'file', content: fp }));
}
}
else {
throw new Error('Either content or filepath must be defined.');
}
const slicer = (0, default_pipelines_1.createSlicePipeline)(this.parser, {
request,
criterion: [] // currently unknown
});
if (message.filetoken) {
this.logger.info(`Storing file token ${message.filetoken}`);
this.fileMap.set(message.filetoken, {
filename: message.filename,
pipeline: slicer
});
}
return slicer;
}
handleSliceRequest(base) {
const requestResult = (0, validate_1.validateMessage)(base, message_slice_1.requestSliceMessage);
if (requestResult.type === 'error') {
(0, validate_1.answerForValidationError)(this.socket, requestResult, base.id);
return;
}
const request = requestResult.message;
this.logger.info(`[${request.filetoken}] Received slice request with criteria ${JSON.stringify(request.criterion)}`);
const fileInformation = this.fileMap.get(request.filetoken);
if (!fileInformation) {
(0, send_1.sendMessage)(this.socket, {
id: request.id,
type: 'error',
fatal: false,
reason: `The file token ${request.filetoken} has never been analyzed.`
});
return;
}
fileInformation.pipeline.updateRequest({
criterion: request.criterion,
autoSelectIf: request.noMagicComments ? auto_select_defaults_1.doNotAutoSelect : (0, magic_comments_1.makeMagicCommentHandler)(auto_select_defaults_1.doNotAutoSelect)
});
void fileInformation.pipeline.allRemainingSteps(true).then(results => {
(0, send_1.sendMessage)(this.socket, {
type: 'response-slice',
id: request.id,
results: Object.fromEntries(Object.entries(results)
.filter(([k,]) => default_pipelines_1.DEFAULT_SLICING_PIPELINE.steps.get(k)?.executed === 1 /* PipelineStepStage.OncePerRequest */))
});
}).catch(e => {
this.logger.error(`[${this.name}] Error while analyzing file for token ${request.filetoken}: ${String(e)}`);
(0, send_1.sendMessage)(this.socket, {
id: request.id,
type: 'error',
fatal: false,
reason: `Error while analyzing file for token ${request.filetoken}: ${String(e)}`
});
});
}
handleRepl(base) {
const requestResult = (0, validate_1.validateMessage)(base, message_repl_1.requestExecuteReplExpressionMessage);
if (requestResult.type === 'error') {
(0, validate_1.answerForValidationError)(this.socket, requestResult, base.id);
return;
}
const request = requestResult.message;
const out = (stream, msg) => {
(0, send_1.sendMessage)(this.socket, {
type: 'response-repl-execution',
id: request.id,
result: msg,
stream
});
};
void (0, core_1.replProcessAnswer)({
formatter: request.ansi ? ansi_1.ansiFormatter : ansi_1.voidFormatter,
stdout: msg => out('stdout', msg),
stderr: msg => out('stderr', msg)
}, request.expression, this.parser, this.allowRSessionAccess).then(() => {
(0, send_1.sendMessage)(this.socket, {
type: 'end-repl-execution',
id: request.id
});
});
}
handleLineageRequest(base) {
const requestResult = (0, validate_1.validateMessage)(base, message_lineage_1.requestLineageMessage);
if (requestResult.type === 'error') {
(0, validate_1.answerForValidationError)(this.socket, requestResult, base.id);
return;
}
const request = requestResult.message;
this.logger.info(`[${this.name}] Received lineage request for criterion ${request.criterion}`);
const fileInformation = this.fileMap.get(request.filetoken);
if (!fileInformation) {
(0, send_1.sendMessage)(this.socket, {
id: request.id,
type: 'error',
fatal: false,
reason: `The file token ${request.filetoken} has never been analyzed.`
});
return;
}
const { dataflow: dfg, normalize: ast } = fileInformation.pipeline.getResults(true);
(0, assert_1.guard)(dfg !== undefined, `Dataflow graph must be present (request: ${request.filetoken})`);
(0, assert_1.guard)(ast !== undefined, `AST must be present (request: ${request.filetoken})`);
const lineageIds = (0, repl_lineage_1.getLineage)(request.criterion, dfg.graph, ast.idMap);
(0, send_1.sendMessage)(this.socket, {
type: 'response-lineage',
id: request.id,
lineage: [...lineageIds]
});
}
handleQueryRequest(base) {
const requestResult = (0, validate_1.validateMessage)(base, message_query_1.requestQueryMessage);
if (requestResult.type === 'error') {
(0, validate_1.answerForValidationError)(this.socket, requestResult, base.id);
return;
}
const request = requestResult.message;
this.logger.info(`[${this.name}] Received query request`);
const fileInformation = this.fileMap.get(request.filetoken);
if (!fileInformation) {
(0, send_1.sendMessage)(this.socket, {
id: request.id,
type: 'error',
fatal: false,
reason: `The file token ${request.filetoken} has never been analyzed.`
});
return;
}
const { dataflow: dfg, normalize: ast } = fileInformation.pipeline.getResults(true);
(0, assert_1.guard)(dfg !== undefined, `Dataflow graph must be present (request: ${request.filetoken})`);
(0, assert_1.guard)(ast !== undefined, `AST must be present (request: ${request.filetoken})`);
const results = (0, query_1.executeQueries)({ dataflow: dfg, ast }, request.query);
(0, send_1.sendMessage)(this.socket, {
type: 'response-query',
id: request.id,
results
});
}
}
exports.FlowRServerConnection = FlowRServerConnection;
function sanitizeAnalysisResults(results) {
return {
...results,
normalize: {
...results.normalize,
idMap: undefined
},
dataflow: {
...results.dataflow,
// we want to keep the DataflowGraph type information, but not the idMap
graph: new graph_1.DataflowGraph(undefined).mergeWith(results.dataflow?.graph)
}
};
}
//# sourceMappingURL=connection.js.map