@skyramp/mcp
Version:
Skyramp MCP (Model Context Protocol) Server - AI-powered test generation and execution
950 lines (949 loc) • 38.6 kB
JavaScript
import * as fs from "fs";
import * as path from "path";
import { simpleGit } from "simple-git";
import { logger } from "../utils/logger.js";
export class EnhancedDriftAnalysisService {
git;
repositoryPath;
constructor() { }
/**
* Analyze drift for multiple tests in batch
*/
async analyzeBatchDrift(testFiles, repositoryPath, options) {
logger.info(`Starting batch drift analysis for ${testFiles.length} tests`);
this.git = simpleGit(repositoryPath);
const results = [];
for (const testInfo of testFiles) {
try {
const result = await this.analyzeTestDrift({
testFile: testInfo.testFile,
repositoryPath,
testType: testInfo.testType,
includeApiSchema: options?.includeApiSchema ?? true,
includeDependencies: options?.includeDependencies ?? true,
});
results.push(result);
}
catch (error) {
logger.error(`Failed to analyze drift for ${testInfo.testFile}: ${error.message}`);
}
}
return results;
}
/**
* Analyze drift for a specific test file
*/
async analyzeTestDrift(options) {
const { testFile, repositoryPath, baselineCommit } = options;
logger.info(`Analyzing drift for test: ${testFile}`);
// Step 1: Initialize git and get baseline
this.repositoryPath = repositoryPath;
this.git = simpleGit(repositoryPath);
if (!(await this.git.checkIsRepo())) {
throw new Error(`Not a git repository: ${repositoryPath}`);
}
let baseline = baselineCommit ||
(await this.getTestBaselineCommit(testFile, repositoryPath));
// Handle no git history case
if (!baseline) {
return await this.analyzeCurrentState(testFile, repositoryPath, options);
}
const currentCommit = await this.getCurrentCommit();
// No changes if baseline equals current
if (baseline === currentCommit) {
return this.createNoChangeResult(testFile, baseline, currentCommit);
}
// Step 2: Extract dependencies and determine test type
// Use test type from options if provided (from discovery), otherwise calculate it
const testType = options.testType || "";
const dependencies = await this.extractDependenciesWithTransitive(testFile);
// Step 3: Get changes between commits
const allChanges = await this.getChangesBetweenCommits(baseline, currentCommit);
const affectedFiles = this.filterAffectedFiles(allChanges, Array.from(dependencies), testType);
const fileChanges = await this.getFileChanges(affectedFiles, baseline, currentCommit);
// Step 4: Analyze API and UI changes
const apiSchemaChanges = options.includeApiSchema
? await this.analyzeApiSchemaChanges(testFile, baseline, currentCommit)
: undefined;
const uiComponentChanges = this.analyzeUiComponentChanges(affectedFiles, fileChanges);
const { apiDependencyUpdates, uiDependencyUpdates } = await this.categorizeDependencyChanges(affectedFiles, fileChanges, baseline, currentCommit);
// Step 5: Get file contents with diffs for LLM analysis (if there are changes)
let fileContentsWithDiffs = [];
if (fileChanges.length > 0) {
fileContentsWithDiffs = await this.getFileContentsWithDiffs(affectedFiles, baseline, currentCommit);
logger.debug(`Collected ${fileContentsWithDiffs.length} files with diffs for LLM analysis`);
}
// Step 6: Collect changes and calculate drift
const changes = await this.collectChangesWithContext(apiSchemaChanges, uiComponentChanges, apiDependencyUpdates, uiDependencyUpdates);
const driftScore = await this.calculateDriftScore(testFile, changes, fileChanges, apiSchemaChanges, uiComponentChanges, testType, fileContentsWithDiffs);
// Step 6: Generate recommendations
const recommendations = this.generateRecommendations(driftScore, changes, apiSchemaChanges);
return {
testFile,
lastCommit: baseline,
currentCommit,
driftScore,
changes,
affectedFiles: {
files: affectedFiles,
apiDependencyUpdates: apiDependencyUpdates.length > 0 ? apiDependencyUpdates : undefined,
uiDependencyUpdates: uiDependencyUpdates.length > 0 ? uiDependencyUpdates : undefined,
},
apiSchemaChanges,
uiComponentChanges,
analysisTimestamp: new Date().toISOString(),
recommendations,
};
}
/**
* Get the commit hash when the test file was created/last modified
*/
async getTestBaselineCommit(testFile, repositoryPath) {
try {
if (!fs.existsSync(testFile)) {
throw new Error(`Test file does not exist: ${testFile}`);
}
const relativePath = path.relative(repositoryPath, testFile);
if (relativePath.startsWith("..")) {
throw new Error(`Test file is outside repository: ${testFile}`);
}
const log = await this.git.log({ file: relativePath, maxCount: 1 });
if (log.latest?.hash) {
return log.latest.hash;
}
// Fallback: raw git command
const result = await this.git.raw([
"log",
"--format=%H",
"-n",
"1",
"--",
relativePath,
]);
if (result && result.trim()) {
return result.trim();
}
logger.debug(`No git history found for file: ${relativePath}`);
return "";
}
catch (error) {
throw new Error(`Failed to get baseline commit: ${error.message}`);
}
}
/**
* Get current HEAD commit
*/
async getCurrentCommit() {
const log = await this.git.log({ maxCount: 1 });
if (!log.latest?.hash) {
throw new Error("Could not get current commit");
}
return log.latest.hash;
}
/**
* Get list of changed files between two commits
*/
async getChangesBetweenCommits(fromCommit, toCommit) {
try {
const diff = await this.git.diff(["--name-only", fromCommit, toCommit]);
return diff
.split("\n")
.map((f) => f.trim())
.filter((f) => f.length > 0);
}
catch (error) {
logger.error(`Failed to get changes between commits: ${error}`);
return [];
}
}
/**
* Get detailed change information for files
*/
async getFileChanges(affectedFiles, fromCommit, toCommit) {
const fileChanges = [];
for (const file of affectedFiles) {
try {
const diff = await this.git.diff([
`${fromCommit}..${toCommit}`,
"--",
file,
]);
const diffLines = diff.split("\n");
let linesAdded = 0;
let linesRemoved = 0;
diffLines.forEach((line) => {
if (line.startsWith("+") && !line.startsWith("+++"))
linesAdded++;
else if (line.startsWith("-") && !line.startsWith("---"))
linesRemoved++;
});
fileChanges.push({ file, linesAdded, linesRemoved });
logger.debug(`File changed: ${file}: +${linesAdded}/-${linesRemoved} lines`);
}
catch (error) {
logger.debug(`Could not get change info for ${file}: ${error}`);
}
}
return fileChanges;
}
/**
* Extract all dependencies (including transitive) from a test file
*/
async extractDependenciesWithTransitive(testFile, maxDepth = 3) {
const allDependencies = new Set();
const visited = new Set();
const queue = [
{ file: testFile, depth: 0 },
];
while (queue.length > 0) {
const { file, depth } = queue.shift();
if (visited.has(file) || depth > maxDepth)
continue;
visited.add(file);
const directDeps = await this.extractDependencies(file);
for (const dep of directDeps) {
allDependencies.add(dep);
const depPath = await this.resolveDependencyPath(dep, file, this.repositoryPath);
if (depPath && !visited.has(depPath)) {
queue.push({ file: depPath, depth: depth + 1 });
}
}
}
logger.debug(`Extracted ${allDependencies.size} dependencies (max depth: ${maxDepth})`);
return allDependencies;
}
/**
* Extract direct imports/requires from a file
*/
async extractDependencies(testFile) {
try {
const content = fs.readFileSync(testFile, "utf-8");
const dependencies = new Set();
// Python imports: only relative imports (from . import X, from .module import Y)
const pythonImports = content.match(/^(?:from|import)\s+([^\s]+)/gm) || [];
pythonImports.forEach((imp) => {
const match = imp.match(/(?:from|import)\s+([^\s]+)/);
if (match) {
const module = match[1];
// Only include relative imports (starting with .)
if (module.startsWith(".")) {
// Count leading dots to determine parent traversal level
const leadingDotsMatch = module.match(/^\.+/);
const leadingDots = leadingDotsMatch
? leadingDotsMatch[0].length
: 0;
// Remove leading dots and convert remaining dots to slashes
const moduleWithoutLeadingDots = module.substring(leadingDots);
const modulePath = moduleWithoutLeadingDots.replace(/\./g, "/");
// Build the correct relative path
// . (1 dot) = current package (no ../)
// .. (2 dots) = parent package (../)
// ... (3 dots) = grandparent package (../../)
const parentTraversal = leadingDots > 1 ? "../".repeat(leadingDots - 1) : "";
if (modulePath) {
dependencies.add(`${parentTraversal}${modulePath}.py`);
}
}
}
});
// JavaScript/TypeScript imports (only relative paths)
const jsImports = content.match(/^import\s+.*?from\s+['"]([^'"]+)['"]/gm) || [];
jsImports.forEach((imp) => {
const match = imp.match(/from\s+['"]([^'"]+)['"]/);
if (match) {
let depPath = match[1];
// Only include relative imports (starting with . or ..)
if (depPath.startsWith(".")) {
depPath = path.resolve(path.dirname(testFile), depPath);
depPath = path.relative(this.repositoryPath, depPath);
dependencies.add(depPath);
}
}
});
// Require statements (only relative paths)
const requireImports = content.match(/require\(['"]([^'"]+)['"]\)/g) || [];
requireImports.forEach((req) => {
const match = req.match(/require\(['"]([^'"]+)['"]\)/);
if (match) {
let depPath = match[1];
// Only include relative requires (starting with . or ..)
if (depPath.startsWith(".")) {
depPath = path.resolve(path.dirname(testFile), depPath);
depPath = path.relative(this.repositoryPath, depPath);
dependencies.add(depPath);
}
}
});
return Array.from(dependencies);
}
catch (error) {
logger.debug(`Could not extract dependencies: ${error}`);
return [];
}
}
/**
* Resolve a dependency string to an actual file path
*/
async resolveDependencyPath(dependency, fromFile, repositoryPath) {
try {
const extensions = [
"",
".ts",
".tsx",
".js",
".jsx",
".py",
"/index.ts",
"/index.js",
];
// Check if it's a relative import (starts with . or ..)
if (dependency.startsWith(".")) {
// Resolve relative to the importing file's directory
const fromDir = path.dirname(fromFile);
const resolvedPath = path.resolve(fromDir, dependency);
for (const ext of extensions) {
const withExt = resolvedPath + ext;
if (fs.existsSync(withExt))
return withExt;
}
return null;
}
// Otherwise, resolve relative to repository root
const fromRoot = path.join(repositoryPath, dependency);
for (const ext of extensions) {
const withExt = fromRoot + ext;
if (fs.existsSync(withExt))
return withExt;
}
return null;
}
catch (error) {
logger.debug(`Could not resolve dependency ${dependency}: ${error}`);
return null;
}
}
/**
* Filter changed files to only those that are dependencies of the test
*/
filterAffectedFiles(allChanges, dependencies, testType) {
const affected = new Set();
const isApiTest = [
"smoke",
"contract",
"integration",
"fuzz",
"load",
].includes(testType);
const isUiTest = ["ui", "e2e"].includes(testType?.toLowerCase());
logger.debug(`Filter mode - isApiTest: ${isApiTest}, isUiTest: ${isUiTest}, testType: ${testType}`);
// UI component patterns
const uiPatterns = [
/component/i,
/page/i,
/view/i,
/screen/i,
/template/i,
/\.tsx?$/,
/\.jsx?$/,
/\.vue$/,
/src.*ui/i,
/src.*frontend/i,
/\.css$/i,
/\.scss$/i,
/\.less$/i,
];
allChanges.forEach((file) => {
// Check if file is a dependency
const isDependency = dependencies.some((dep) => file === dep ||
file.includes(dep) ||
dep.includes(file) ||
file.endsWith(dep) ||
dep.endsWith(file));
if (isDependency) {
affected.add(file);
}
// For API tests: also include model/schema files
if (isApiTest && (file.includes("/model") || file.includes("/schema"))) {
affected.add(file);
}
// For UI tests: also include UI component files even if not directly imported
// UI tests use selectors and don't import components, so we need special handling
if (isUiTest) {
const isUiFile = uiPatterns.some((pattern) => pattern.test(file));
if (isUiFile) {
affected.add(file);
logger.debug(`Including UI file for UI test: ${file}`);
}
}
});
return Array.from(affected);
}
/**
* Analyze API schema changes (if OpenAPI/Swagger spec exists)
*/
async analyzeApiSchemaChanges(testFile, fromCommit, toCommit) {
const schemaPath = await this.extractApiSchemaPath(testFile);
if (!schemaPath)
return undefined;
try {
const oldSchema = await this.git.show([`${fromCommit}:${schemaPath}`]);
const newSchema = await this.git.show([`${toCommit}:${schemaPath}`]);
if (!oldSchema || !newSchema)
return undefined;
const oldParsed = JSON.parse(oldSchema);
const newParsed = JSON.parse(newSchema);
const changes = {
endpointsRemoved: [],
endpointsModified: [],
authenticationChanged: false,
};
const oldPaths = oldParsed.paths || {};
const newPaths = newParsed.paths || {};
// Find removed endpoints
for (const path in oldPaths) {
if (!newPaths[path]) {
for (const method in oldPaths[path]) {
changes.endpointsRemoved.push({ path, method });
}
}
}
// Find modified endpoints and removed methods from existing paths
for (const path in oldPaths) {
if (newPaths[path]) {
for (const method in oldPaths[path]) {
if (newPaths[path][method]) {
const oldEndpoint = JSON.stringify(oldPaths[path][method]);
const newEndpoint = JSON.stringify(newPaths[path][method]);
if (oldEndpoint !== newEndpoint) {
changes.endpointsModified.push({
path,
method,
changes: ["Parameters or response modified"],
});
}
}
else {
// Method exists in old schema but not in new schema
changes.endpointsRemoved.push({ path, method });
}
}
}
}
// Check authentication changes
// OpenAPI 3.x uses components.securitySchemes, Swagger 2.x uses securityDefinitions
const oldAuth = JSON.stringify(oldParsed.components?.securitySchemes ||
oldParsed.securityDefinitions ||
{});
const newAuth = JSON.stringify(newParsed.components?.securitySchemes ||
newParsed.securityDefinitions ||
{});
if (oldAuth !== newAuth) {
changes.authenticationChanged = true;
changes.authenticationDetails = "Security schemes have been modified";
}
return changes;
}
catch (error) {
logger.debug(`Could not analyze API schema changes: ${error}`);
return undefined;
}
}
/**
* Extract API schema path from test file comments/metadata
*/
async extractApiSchemaPath(testFile) {
try {
const content = fs.readFileSync(testFile, "utf-8");
const patterns = [
/apiSchema["\s:=]+["']([^"']+)["']/i,
/api_schema["\s:=]+["']([^"']+)["']/i,
/API Schema:\s*([^\s\n]+)/i,
];
for (const pattern of patterns) {
const match = content.match(pattern);
if (match && match[1])
return match[1];
}
return null;
}
catch (error) {
logger.debug(`Could not extract API schema path: ${error}`);
return null;
}
}
/**
* Analyze UI component changes from affected files
*/
analyzeUiComponentChanges(affectedFiles, fileChanges) {
const uiPatterns = [
/component/i,
/page/i,
/view/i,
/screen/i,
/template/i,
/\.tsx?$/,
/\.jsx?$/,
/\.vue$/,
/src.*ui/i,
/src.*frontend/i,
];
const componentFiles = [];
const routeFiles = [];
let hasSelectorsChanges = false;
let hasStylingChanges = false;
affectedFiles.forEach((file) => {
const isUiFile = uiPatterns.some((pattern) => pattern.test(file));
if (!isUiFile)
return;
if (/route|router|navigation/i.test(file)) {
routeFiles.push(file);
}
else if (/component|page|view|screen/i.test(file)) {
componentFiles.push(file);
}
const changeInfo = fileChanges.find((c) => c.file === file);
if (changeInfo &&
(changeInfo.linesAdded > 3 || changeInfo.linesRemoved > 3)) {
if (/\.css|\.scss|\.less|styled|style/.test(file)) {
hasStylingChanges = true;
}
else {
hasSelectorsChanges = true;
}
}
});
if (componentFiles.length === 0 &&
routeFiles.length === 0 &&
!hasSelectorsChanges &&
!hasStylingChanges) {
return undefined;
}
return {
componentFiles,
routeFiles,
hasSelectorsChanges,
hasStylingChanges,
};
}
/**
* Categorize file changes into API and UI dependency changes
*/
async categorizeDependencyChanges(affectedFiles, fileChanges, baseline, currentCommit) {
const apiDependencyUpdates = [];
const uiDependencyUpdates = [];
const apiPatterns = [
/api/i,
/endpoint/i,
/route/i,
/controller/i,
/handler/i,
/service/i,
/model/i,
/schema/i,
/router/i,
/\.(yaml|yml|json)$/i,
];
const uiPatterns = [
/component/i,
/page/i,
/view/i,
/screen/i,
/template/i,
/\.tsx?$/,
/\.jsx?$/,
/\.vue$/,
/src.*ui/i,
/src.*frontend/i,
/\.css$/i,
/\.scss$/i,
/\.less$/i,
];
for (const file of affectedFiles) {
const changeInfo = fileChanges.find((c) => c.file === file);
if (!changeInfo)
continue;
const isApiFile = apiPatterns.some((pattern) => pattern.test(file));
const isUiFile = uiPatterns.some((pattern) => pattern.test(file));
try {
const diff = await this.git.diff([
`${baseline}..${currentCommit}`,
"--",
file,
]);
const change = {
file,
linesAdded: changeInfo.linesAdded,
linesRemoved: changeInfo.linesRemoved,
diff,
};
if (isApiFile)
apiDependencyUpdates.push(change);
else if (isUiFile)
uiDependencyUpdates.push(change);
}
catch (error) {
logger.debug(`Could not get diff for ${file}: ${error}`);
}
}
return { apiDependencyUpdates, uiDependencyUpdates };
}
/**
* Get file contents with diffs for LLM analysis
* This provides the actual code changes that the LLM can analyze
*/
async getFileContentsWithDiffs(affectedFiles, fromCommit, toCommit) {
const fileContents = [];
for (const file of affectedFiles) {
try {
// Get git diff
const diff = await this.git.diff([
`${fromCommit}..${toCommit}`,
"--",
file,
]);
// Get current file content
const filePath = path.join(this.repositoryPath, file);
const currentContent = fs.existsSync(filePath)
? fs.readFileSync(filePath, "utf-8")
: "";
if (diff || currentContent) {
fileContents.push({
file,
diff,
currentContent: currentContent.slice(0, 5000), // Limit to 5000 chars
});
}
}
catch (error) {
logger.debug(`Could not get content/diff for ${file}: ${error}`);
}
}
return fileContents;
}
/**
* Collect all changes with context and detect breaking changes
*/
async collectChangesWithContext(apiSchemaChanges, uiComponentChanges, apiDependencyUpdates, uiDependencyUpdates) {
const changes = [];
// API schema changes
if (apiSchemaChanges) {
if (apiSchemaChanges.endpointsRemoved.length > 0) {
changes.push({
type: "endpoint_removed",
file: "API Schema",
description: `${apiSchemaChanges.endpointsRemoved.length} endpoint(s) removed`,
severity: "high",
});
}
if (apiSchemaChanges.endpointsModified.length > 0) {
changes.push({
type: "endpoint_modified",
file: "API Schema",
description: `${apiSchemaChanges.endpointsModified.length} endpoint(s) modified`,
severity: "medium",
});
}
if (apiSchemaChanges.authenticationChanged) {
changes.push({
type: "authentication_changed",
file: "API Schema",
description: "Authentication mechanism changed",
severity: "critical",
});
}
}
// API dependency changes
for (const apiChange of apiDependencyUpdates) {
if (apiChange.diff) {
changes.push(...this.detectTypeMismatches(apiChange.diff, apiChange.file));
const linesAdded = apiChange.linesAdded || 0;
const linesRemoved = apiChange.linesRemoved || 0;
if (linesAdded > 5 || linesRemoved > 5) {
changes.push({
type: "endpoint_modified",
file: apiChange.file,
description: `API file modified: +${linesAdded}/-${linesRemoved} lines`,
severity: linesRemoved > 10 ? "high" : "medium",
});
}
}
}
// UI dependency changes
for (const uiChange of uiDependencyUpdates) {
if (uiChange.diff) {
changes.push(...this.detectSelectorChanges(uiChange.diff, uiChange.file));
const linesAdded = uiChange.linesAdded || 0;
const linesRemoved = uiChange.linesRemoved || 0;
if (linesAdded > 5 || linesRemoved > 5) {
changes.push({
type: "ui_component_modified",
file: uiChange.file,
description: `UI component modified: +${linesAdded}/-${linesRemoved} lines`,
severity: "medium",
});
}
}
}
// UI component changes
if (uiComponentChanges) {
if (uiComponentChanges.componentFiles.length > 0) {
changes.push({
type: "ui_component_modified",
file: "UI Components",
description: `${uiComponentChanges.componentFiles.length} component file(s) modified`,
severity: "medium",
});
}
if (uiComponentChanges.routeFiles.length > 0) {
changes.push({
type: "route_changed",
file: "Route Definitions",
description: `${uiComponentChanges.routeFiles.length} route file(s) changed`,
severity: "high",
});
}
}
return changes;
}
/**
* Detect type mismatches in diffs (e.g., field: int -> field: string)
*/
detectTypeMismatches(diff, file) {
const changes = [];
const lines = diff.split("\n");
const fieldTypes = new Map();
const patterns = [
/^[-+]\s*(\w+)\s*:\s*(\w+)(?:\s*=|$)/,
/^[-+]\s*(\w+)\??\s*:\s*(\w+)/,
/^[-+]\s*(\w+)\s*:\s*(?:Optional|List|Dict|Union)\[(\w+)\]/,
];
for (const line of lines) {
for (const pattern of patterns) {
const match = line.match(pattern);
if (match) {
const fieldName = match[1];
const fieldType = match[2];
if (!fieldTypes.has(fieldName)) {
fieldTypes.set(fieldName, {});
}
const fieldInfo = fieldTypes.get(fieldName);
if (line.startsWith("-"))
fieldInfo.oldType = fieldType;
if (line.startsWith("+"))
fieldInfo.newType = fieldType;
}
}
}
for (const [fieldName, types] of fieldTypes.entries()) {
if (types.oldType && types.newType && types.oldType !== types.newType) {
changes.push({
type: "breaking_change",
file,
description: `Field "${fieldName}" type changed from ${types.oldType} to ${types.newType}`,
severity: "critical",
details: `Critical: API now expects ${types.newType} but test may send ${types.oldType}`,
});
}
}
return changes;
}
/**
* Detect selector changes in UI diffs
*/
detectSelectorChanges(diff, file) {
const changes = [];
const lines = diff.split("\n");
const selectorChanges = new Map();
const selectorPatterns = [
/(?:className|class)\s*[:=]\s*["']([^"']+)["']/,
/id\s*[:=]\s*["']([^"']+)["']/,
/data-testid\s*[:=]\s*["']([^"']+)["']/,
];
for (const line of lines) {
for (const pattern of selectorPatterns) {
const match = line.match(pattern);
if (match) {
const selectorValue = match[1];
if (!selectorChanges.has(selectorValue)) {
selectorChanges.set(selectorValue, {});
}
const selectorInfo = selectorChanges.get(selectorValue);
if (line.startsWith("-"))
selectorInfo.removed = selectorValue;
if (line.startsWith("+"))
selectorInfo.added = selectorValue;
}
}
}
for (const [selectorValue, info] of selectorChanges.entries()) {
if (info.removed && !info.added) {
changes.push({
type: "ui_component_modified",
file,
description: `Selector removed: "${selectorValue}"`,
severity: "high",
details: "Test may use this selector which no longer exists",
});
}
}
return changes;
}
/**
* Calculate drift score (0-100)
*
* SCORING:
* - 0-20: Minimal impact
* - 21-40: Low impact
* - 41-60: Medium impact
* - 61-80: High impact (breaking changes likely)
* - 81-100: Critical impact
*
* STRATEGY:
* 1. If there are file changes with diffs -> Use LLM to analyze affected files
* 2. Otherwise -> Use heuristic scoring based on detected changes
*/
async calculateDriftScore(testFile, changes, fileChanges, apiSchemaChanges, uiComponentChanges, testType, fileContentsWithDiffs) {
if (changes.length === 0 && !apiSchemaChanges && fileChanges.length === 0) {
return 0;
}
// Use heuristic scoring
let score = 0;
// Count by severity
changes.forEach((change) => {
switch (change.severity) {
case "critical":
score += 20;
break;
case "high":
score += 15;
break;
case "medium":
score += 10;
break;
case "low":
score += 5;
break;
}
});
// API schema changes
if (apiSchemaChanges) {
score += apiSchemaChanges.endpointsRemoved.length * 15;
score += apiSchemaChanges.endpointsModified.length * 10;
if (apiSchemaChanges.authenticationChanged)
score += 25;
}
// Large file changes indicate potential breaking changes
fileChanges.forEach((change) => {
if (change.linesAdded > 10 || change.linesRemoved > 10) {
score += 5;
}
});
return Math.min(Math.round(score), 100);
}
/**
* Generate actionable recommendations based on drift score
*/
generateRecommendations(driftScore, changes, apiSchemaChanges) {
const recommendations = [];
if (driftScore === 0) {
recommendations.push("✅ Test is up-to-date with current codebase");
return recommendations;
}
if (driftScore > 80) {
recommendations.push("CRITICAL: Test requires immediate update due to significant breaking changes");
recommendations.push("Consider rewriting the test to match current implementation");
}
else if (driftScore > 60) {
recommendations.push("⚠️ HIGH: Test should be reviewed and updated soon");
recommendations.push("Review breaking changes and update test assertions");
}
else if (driftScore > 40) {
recommendations.push("⚡ MEDIUM: Test may need updates for related code changes");
recommendations.push("Review changes and update if necessary");
}
else if (driftScore > 20) {
recommendations.push("💡 LOW: Minor changes detected, test likely still valid");
recommendations.push("Monitor for potential issues");
}
else {
recommendations.push("✨ MINIMAL: Changes have minimal impact on test");
}
// Specific recommendations
if (apiSchemaChanges) {
if (apiSchemaChanges.endpointsRemoved.length > 0) {
recommendations.push(`⚠️ ${apiSchemaChanges.endpointsRemoved.length} API endpoint(s) removed - update test`);
}
if (apiSchemaChanges.authenticationChanged) {
recommendations.push("🔐 Authentication mechanism changed - update test authentication");
}
}
const highSeverityChanges = changes.filter((c) => c.severity === "critical" || c.severity === "high");
if (highSeverityChanges.length > 0) {
recommendations.push(`${highSeverityChanges.length} high-severity change(s) require attention`);
}
return recommendations;
}
/**
* TODO: Analyze current state when no git history is available.
*/
async analyzeCurrentState(testFile, repositoryPath, options) {
logger.info(`Analyzing current state for test (no git history): ${testFile}`);
// Use test type from options if provided (from discovery), otherwise calculate it
const testType = options.testType;
if (options.testType) {
logger.debug(`Using cached test type '${testType}' from discovery (avoiding recalculation)`);
}
const changes = [];
const recommendations = [];
let driftScore = 0;
// Check if local dependent files exist
const dependencies = await this.extractDependenciesWithTransitive(testFile, 2);
const missingDependencies = [];
for (const dep of dependencies) {
const depPath = await this.resolveDependencyPath(dep, testFile, repositoryPath);
if (!depPath || !fs.existsSync(depPath)) {
missingDependencies.push(dep);
changes.push({
type: "dependency_changed",
file: dep,
description: `Missing dependency: ${dep}`,
severity: "high",
});
driftScore += 15;
}
}
if (missingDependencies.length === 0) {
recommendations.push("✓ All dependencies are present in the current codebase");
recommendations.push("Note: Git history enables tracking changes over time.");
}
else {
recommendations.push(`⚠️ ${missingDependencies.length} missing dependencies detected`);
recommendations.push("These imports may be broken or files may have been moved/deleted");
}
return {
testFile,
lastCommit: "",
currentCommit: "",
driftScore: Math.min(driftScore, 100),
changes,
affectedFiles: { files: missingDependencies },
analysisTimestamp: new Date().toISOString(),
recommendations,
};
}
/**
* Create a result for when there are no changes
*/
createNoChangeResult(testFile, baseline, currentCommit) {
return {
testFile,
lastCommit: baseline,
currentCommit,
driftScore: 0,
changes: [],
affectedFiles: { files: [] },
analysisTimestamp: new Date().toISOString(),
recommendations: ["Test is up-to-date with current codebase"],
};
}
}