UNPKG

@eagleoutice/flowr

Version:

Static Dataflow Analyzer and Program Slicer for the R Programming Language

354 lines 15.8 kB
"use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || (function () { var ownKeys = function(o) { ownKeys = Object.getOwnPropertyNames || function (o) { var ar = []; for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; return ar; }; return ownKeys(o); }; return function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); __setModuleDefault(result, mod); return result; }; })(); var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.FlowRServerConnection = void 0; exports.sanitizeAnalysisResults = sanitizeAnalysisResults; const send_1 = require("./send"); const validate_1 = require("./validate"); const message_analysis_1 = require("./messages/message-analysis"); const message_slice_1 = require("./messages/message-slice"); const server_1 = require("./server"); const message_repl_1 = require("./messages/message-repl"); const core_1 = require("../core"); const extract_cfg_1 = require("../../../control-flow/extract-cfg"); const quads_1 = require("../../../util/quads"); const print_1 = require("../../../core/print/print"); const _00_parse_1 = require("../../../core/steps/all/core/00-parse"); const _10_normalize_1 = require("../../../core/steps/all/core/10-normalize"); const _20_dataflow_1 = require("../../../core/steps/all/core/20-dataflow"); const ansi_1 = require("../../../util/text/ansi"); const default_pipelines_1 = require("../../../core/steps/pipeline/default-pipelines"); const graph_1 = require("../../../dataflow/graph/graph"); const tmp = __importStar(require("tmp")); const fs_1 = __importDefault(require("fs")); const message_query_1 = require("./messages/message-query"); const compact_1 = require("./compact"); const flowr_analyzer_builder_1 = require("../../../project/flowr-analyzer-builder"); const slice_direction_1 = require("../../../util/slice-direction"); /** * Each connection handles a single client, answering to its requests. * There is no need to construct this class manually, {@link FlowRServer} will do it for you. */ class FlowRServerConnection { socket; parser; name; logger; allowRSessionAccess; config; // maps token to information fileMap = new Map(); // we do not have to ensure synchronized shell-access as we are always running synchronized constructor(socket, name, parser, allowRSessionAccess, config) { this.config = config; this.socket = socket; this.parser = parser; this.name = name; this.logger = server_1.serverLog.getSubLogger({ name }); this.socket.on('data', data => this.handleData(String(data))); this.socket.on('error', e => this.logger.error(`[${this.name}] Error while handling connection: ${String(e)}`)); this.socket.on('close', () => { this.fileMap.clear(); }); this.allowRSessionAccess = allowRSessionAccess; } currentMessageBuffer = ''; handleData(message) { if (!message.endsWith('\n')) { this.currentMessageBuffer += message; this.logger.trace(`[${this.name}] Received partial message. Buffering ${this.currentMessageBuffer.length}.`); return; } message = this.currentMessageBuffer + message; if (this.logger.settings.minLevel >= 2 /* LogLevel.Debug */) { this.logger.debug(`[${this.name}] Received message: ${message}`); } this.currentMessageBuffer = ''; const request = (0, validate_1.validateBaseMessageFormat)(message); if (request.type === 'error') { (0, validate_1.answerForValidationError)(this.socket, request); return; } switch (request.message.type) { case 'request-file-analysis': void this.handleFileAnalysisRequest(request.message); break; case 'request-slice': this.handleSliceRequest(request.message); break; case 'request-repl-execution': this.handleRepl(request.message); break; case 'request-query': this.handleQueryRequest(request.message); break; default: (0, send_1.sendMessage)(this.socket, { id: request.message.id, type: 'error', fatal: true, reason: `The message type ${JSON.stringify(request.message.type ?? 'undefined')} is not supported.` }); this.socket.end(); } } async handleFileAnalysisRequest(base) { const requestResult = (0, validate_1.validateMessage)(base, message_analysis_1.requestAnalysisMessage); if (requestResult.type === 'error') { (0, validate_1.answerForValidationError)(this.socket, requestResult, base.id); return; } const message = requestResult.message; this.logger.info(`[${this.name}] Received file analysis request for ${message.filename ?? 'unknown file'}${message.filetoken ? ' with token: ' + message.filetoken : ''}`); if (message.filetoken && this.fileMap.has(message.filetoken)) { this.logger.warn(`File token ${message.filetoken} already exists. Overwriting.`); // explicitly delete the previous store this.fileMap.delete(message.filetoken); } const tempFile = tmp.fileSync({ postfix: '.R' }); const analyzer = await this.createAnalyzerForRequest(message, tempFile.name); try { await this.sendFileAnalysisResponse(analyzer, message); } catch (e) { this.logger.error(`[${this.name}] Error while analyzing file ${message.filename ?? 'unknown file'}: ${String(e)}`); (0, send_1.sendMessage)(this.socket, { id: message.id, type: 'error', fatal: false, reason: `Error while analyzing file ${message.filename ?? 'unknown file'}: ${String(e)}` }); } // this is an interestingly named function that means "I am a callback that removes a file" - so this deletes the file tempFile.removeCallback(); } async sendFileAnalysisResponse(analyzer, message) { let cfg = undefined; if (message.cfg) { cfg = await analyzer.controlflow(); } const config = () => ({ context: message.filename ?? 'unknown', getId: (0, quads_1.defaultQuadIdGenerator)() }); const sanitizedResults = sanitizeAnalysisResults(await analyzer.parse(), await analyzer.normalize(), await analyzer.dataflow()); if (message.format === 'n-quads') { (0, send_1.sendMessage)(this.socket, { type: 'response-file-analysis', format: 'n-quads', id: message.id, cfg: cfg ? (0, extract_cfg_1.cfg2quads)(cfg, config()) : undefined, results: { parse: await (0, print_1.printStepResult)(_00_parse_1.PARSE_WITH_R_SHELL_STEP, await analyzer.parse(), 5 /* StepOutputFormat.RdfQuads */, config()), normalize: await (0, print_1.printStepResult)(_10_normalize_1.NORMALIZE, await analyzer.normalize(), 5 /* StepOutputFormat.RdfQuads */, config()), dataflow: await (0, print_1.printStepResult)(_20_dataflow_1.STATIC_DATAFLOW, await analyzer.dataflow(), 5 /* StepOutputFormat.RdfQuads */, config()) } }); } else if (message.format === 'compact') { (0, send_1.sendMessage)(this.socket, { type: 'response-file-analysis', format: 'compact', id: message.id, cfg: cfg ? (0, compact_1.compact)(cfg) : undefined, results: (0, compact_1.compact)(sanitizedResults) }); } else { (0, send_1.sendMessage)(this.socket, { type: 'response-file-analysis', format: 'json', id: message.id, cfg, results: sanitizedResults }); } } async createAnalyzerForRequest(message, tempFile) { let request; if (message.content !== undefined) { // we store the code in a temporary file in case it's too big for the shell to handle fs_1.default.writeFileSync(tempFile, message.content ?? ''); request = { request: 'file', content: tempFile }; } else if (message.filepath !== undefined) { if (typeof message.filepath === 'string') { request = { request: 'file', content: message.filepath }; } else { request = message.filepath.map(fp => ({ request: 'file', content: fp })); } } else { throw new Error('Either content or filepath must be defined.'); } const analyzer = await new flowr_analyzer_builder_1.FlowrAnalyzerBuilder() .setConfig(this.config) .setParser(this.parser) .build(); analyzer.addRequest(request); if (message.filetoken) { this.logger.info(`Storing file token ${message.filetoken}`); this.fileMap.set(message.filetoken, { filename: message.filename, analyzer: analyzer }); } return analyzer; } handleSliceRequest(base) { const requestResult = (0, validate_1.validateMessage)(base, message_slice_1.requestSliceMessage); if (requestResult.type === 'error') { (0, validate_1.answerForValidationError)(this.socket, requestResult, base.id); return; } const request = requestResult.message; this.logger.info(`[${request.filetoken}] Received ${request.direction ?? slice_direction_1.SliceDirection.Backward} slice request with criteria ${JSON.stringify(request.criterion)}`); const fileInformation = this.fileMap.get(request.filetoken); if (!fileInformation) { (0, send_1.sendMessage)(this.socket, { id: request.id, type: 'error', fatal: false, reason: `The file token ${request.filetoken} has never been analyzed.` }); return; } void fileInformation.analyzer.query([{ type: 'static-slice', criteria: request.criterion, noMagicComments: request.noMagicComments, direction: request.direction }]).then(result => { (0, send_1.sendMessage)(this.socket, { type: 'response-slice', id: request.id, results: Object.fromEntries(Object.entries(result) .filter(([k]) => default_pipelines_1.DEFAULT_SLICING_PIPELINE.steps.get(k)?.executed === 1 /* PipelineStepStage.OncePerRequest */)) }); }).catch(e => { this.logger.error(`[${this.name}] Error while analyzing file for token ${request.filetoken}: ${String(e)}`); (0, send_1.sendMessage)(this.socket, { id: request.id, type: 'error', fatal: false, reason: `Error while analyzing file for token ${request.filetoken}: ${String(e)}` }); }); } handleRepl(base) { const requestResult = (0, validate_1.validateMessage)(base, message_repl_1.requestExecuteReplExpressionMessage); if (requestResult.type === 'error') { (0, validate_1.answerForValidationError)(this.socket, requestResult, base.id); return; } const request = requestResult.message; const out = (stream, msg) => { (0, send_1.sendMessage)(this.socket, { type: 'response-repl-execution', id: request.id, result: msg, stream }); }; const analyzer = new flowr_analyzer_builder_1.FlowrAnalyzerBuilder() .setConfig(this.config) .setParser(this.parser) .buildSync(); void (0, core_1.replProcessAnswer)(analyzer, { formatter: request.ansi ? ansi_1.ansiFormatter : ansi_1.voidFormatter, stdout: msg => out('stdout', msg), stderr: msg => out('stderr', msg) }, request.expression, this.allowRSessionAccess).then(() => { (0, send_1.sendMessage)(this.socket, { type: 'end-repl-execution', id: request.id }); }); } handleQueryRequest(base) { const requestResult = (0, validate_1.validateMessage)(base, message_query_1.requestQueryMessage); if (requestResult.type === 'error') { (0, validate_1.answerForValidationError)(this.socket, requestResult, base.id); return; } const request = requestResult.message; this.logger.info(`[${this.name}] Received query request`); const fileInformation = this.fileMap.get(request.filetoken); if (!fileInformation) { (0, send_1.sendMessage)(this.socket, { id: request.id, type: 'error', fatal: false, reason: `The file token ${request.filetoken} has never been analyzed.` }); return; } void Promise.resolve(fileInformation.analyzer.query(request.query)).then(results => { (0, send_1.sendMessage)(this.socket, { type: 'response-query', id: request.id, results }); }).catch(e => { this.logger.error(`[${this.name}] Error while executing query: ${String(e)}`); (0, send_1.sendMessage)(this.socket, { id: request.id, type: 'error', fatal: false, reason: `Error while executing query: ${String(e)}` }); }); } } exports.FlowRServerConnection = FlowRServerConnection; /** * Sanitizes analysis results by removing any potentially sensitive information like id maps. */ function sanitizeAnalysisResults(parse, normalize, dataflow) { return { parse: parse, normalize: { ...normalize, idMap: undefined }, dataflow: { ...dataflow, // we want to keep the DataflowGraph type information, but not the idMap graph: new graph_1.DataflowGraph(undefined).mergeWith(dataflow?.graph) } }; } //# sourceMappingURL=connection.js.map