@skyramp/mcp
Version:
Skyramp MCP (Model Context Protocol) Server - AI-powered test generation and execution
155 lines (148 loc) • 5.73 kB
JavaScript
import { logger } from "../utils/logger.js";
import fs from "fs";
import path from "path";
export class ScenarioGenerationService {
async parseScenario(params) {
try {
logger.info("Parsing scenario into API requests", {
scenarioName: params.scenarioName,
});
// Check if we have API schema to work with
if (!params.apiSchema) {
return {
content: [
{
type: "text",
text: "Please provide an API schema so that I can parse the scenario and map it to specific API endpoints.",
},
],
isError: true,
};
}
// Generate a single trace request from the scenario
const traceRequest = this.generateTraceRequestFromInput(params);
if (!traceRequest) {
return {
content: [
{
type: "text",
text: "Could not generate a trace request from the provided scenario.",
},
],
isError: true,
};
}
// Handle file writing
//add hyphen to the scenario name
//make file in tmp directory
const scenarioName = params.scenarioName.replace(/ /g, "-").toLowerCase();
const fileName = `scenario_${scenarioName}.json`;
const filePath = path.join(params.outputDir, fileName);
try {
// Check if file exists to determine if we should append or create new
let existingRequests = [];
if (fs.existsSync(filePath)) {
try {
const existingContent = fs.readFileSync(filePath, "utf8");
existingRequests = JSON.parse(existingContent);
if (!Array.isArray(existingRequests)) {
existingRequests = [];
}
}
catch (parseError) {
// If file exists but can't be parsed, start fresh
existingRequests = [];
}
}
// Add the new request to the array
existingRequests.push(traceRequest);
// Write the updated array to the file
fs.writeFileSync(filePath, JSON.stringify(existingRequests, null, 2), "utf8");
logger.info("Trace request added to file", {
filePath,
totalRequests: existingRequests.length,
});
}
catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
return {
content: [
{
type: "text",
text: `Error writing trace file: ${errorMessage}`,
},
],
isError: true,
};
}
return {
content: [
{
type: "text",
text: `**Trace Request Generated**
**Original Scenario:** "${params.scenarioName}"
**File Updated:** ${filePath}
**Generated Trace Request:**
\`\`\`json
${JSON.stringify(traceRequest, null, 2)}
\`\`\`
**File Details:**
- Output file: ${fileName}
- Full path: ${filePath}
- Action: Request added to trace file
**Next Steps:**
- Call this tool again with the next step in your scenario to add more requests
- Use the complete JSON file as input for integration or load test generation`,
},
],
isError: false,
};
}
catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
return {
content: [
{
type: "text",
text: `Error generating trace request: ${errorMessage}`,
},
],
isError: true,
};
}
}
generateTraceRequestFromInput(params) {
const destination = params.destination;
// Use AI-provided parameters instead of parsing
const timestamp = new Date().toISOString();
const method = params.method;
const path = params.path;
const statusCode = params.statusCode ||
(method === "POST" ? 201 : method === "DELETE" ? 204 : 200);
const requestBody = params.requestBody ||
(method === "GET" || method === "DELETE" ? "" : "{}");
const responseBody = params.responseBody || (method === "DELETE" ? "" : "{}");
// Hardcode source IP and port for NLP consistency
const source = "192.168.65.1:39998";
return {
Source: source,
Destination: destination,
RequestBody: requestBody,
ResponseBody: responseBody,
RequestHeaders: {
"Content-Type": ["application/json"],
Authorization: ["Bearer demo-token"],
},
ResponseHeaders: {
"Content-Type": ["application/json"],
},
Method: method,
Path: path,
QueryParams: {},
StatusCode: statusCode,
Port: 443,
Timestamp: timestamp,
Scheme: "https",
};
}
}