xor-iqassist-appservice
Version:
AI-powered MCP server for comprehensive project testing with dual-path support (local files + web upload)
1,273 lines (1,123 loc) • 387 kB
JavaScript
import 'dotenv/config';
import express from 'express';
import multer from 'multer';
import archiver from 'archiver';
import yauzl from 'yauzl';
import fse from 'fs-extra';
import fg from 'fast-glob';
import cors from 'cors';
import path from 'path';
import { fileURLToPath } from 'url';
import { v4 as uuidv4 } from 'uuid';
import { promises as fs } from 'fs';
import simpleGit from 'simple-git';
import fetch from 'node-fetch';
import tar from 'tar';
// Enhanced system modules
import { secureDataHandler } from './lib/secure-data-handler.js';
import CleanupManager from './lib/cleanup-manager.js';
import PrivacyControls from './lib/privacy-controls.js';
import ErrorHandler from './lib/error-handler.js';
import TestExecutor from './lib/test-executor.js';
import JobStatusManager from './lib/job-status-manager.js';
import DockerTestManager from './lib/docker-test-manager.js';
import PerformanceTestManager from './lib/performance-test-manager.js';
import AdvancedAnalyticsManager from './lib/advanced-analytics-manager.js';
// Level 3 & 4 Enterprise Features
// COMMENTED OUT: AI Test Generator feature temporarily disabled
// import { AITestGenerator } from './lib/ai-test-generator.js';
import { VisualRegressionTester } from './lib/visual-regression-tester.js';
import { CrossBrowserTester } from './lib/cross-browser-tester.js';
import { MLTestOptimizer } from './lib/ml-test-optimizer.js';
import { DistributedTestManager } from './lib/distributed-test-manager.js';
import { PredictiveFailureAnalyzer } from './lib/predictive-failure-analyzer.js';
import { AutomatedTestMaintainer } from './lib/automated-test-maintainer.js';
// Data Quality & Generation Components
import { EnhancedProjectAnalyzer } from './lib/utils/enhanced-project-analyzer.js';
import { RealisticDataGenerator } from './lib/utils/realistic-data-generator.js';
import { DataValidator } from './lib/utils/data-validator.js';
import { TestDataSeeder } from './lib/utils/test-data-seeder.js';
import { DataQualityReporter } from './lib/utils/data-quality-reporter.js';
// Performance & Optimization Components
import PerformanceManager from './lib/utils/performance-manager.js';
import os from 'os';
// Monitoring & Analytics Components
import MonitoringAnalyticsManager from './lib/utils/monitoring-analytics-manager.js';
// User Experience Enhancement Components
import UXEnhancementManager from './lib/utils/ux-enhancement-manager.js';
// Dual-Path Support: Local Projects + Uploaded Projects
import ProjectToolWrapper from './lib/utils/project-tool-wrapper.js';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const app = express();
const PORT = 8080; // Force port 8080 for Azure Web App
// Middleware with large file support
app.use(cors());
app.use(express.json({ limit: '500mb' }));
app.use(express.urlencoded({ extended: true, limit: '500mb' }));
// Optimize for large uploads with extended timeouts
app.use((req, res, next) => {
// Set timeout to 60 minutes for all operations
req.setTimeout(60 * 60 * 1000); // 60 minutes
res.setTimeout(60 * 60 * 1000); // 60 minutes
// Extra long timeout for upload and processing routes
if (req.path === '/upload' || req.path.startsWith('/process/')) {
req.setTimeout(90 * 60 * 1000); // 90 minutes for uploads/processing
res.setTimeout(90 * 60 * 1000);
}
// Add timeout error handling
req.on('timeout', () => {
console.error('❌ Request timeout occurred for:', req.path);
if (!res.headersSent) {
res.status(504).json({
error: 'Request timeout',
message: 'The operation took too long to complete. Please try with a smaller file or contact support.'
});
}
});
res.on('timeout', () => {
console.error('❌ Response timeout occurred for:', req.path);
if (!res.headersSent) {
res.status(504).json({
error: 'Response timeout',
message: 'The server took too long to respond. Please try again.'
});
}
});
next();
});
// Serve static files
// Serve static files with cache control for UI updates
app.use(express.static(path.join(__dirname, 'public'), {
maxAge: 0, // No caching for development/frequent updates
etag: false,
lastModified: false,
setHeaders: (res, path) => {
// Force no-cache for HTML files to ensure UI updates
if (path.endsWith('.html')) {
res.setHeader('Cache-Control', 'no-cache, no-store, must-revalidate');
res.setHeader('Pragma', 'no-cache');
res.setHeader('Expires', '0');
}
}
}));
// Storage configuration for file uploads
const storage = multer.diskStorage({
destination: (req, file, cb) => {
const uploadDir = path.join(__dirname, 'uploads');
fse.ensureDirSync(uploadDir);
cb(null, uploadDir);
},
filename: (req, file, cb) => {
const uniqueId = uuidv4();
cb(null, `${uniqueId}-${file.originalname}`);
}
});
const upload = multer({
storage: storage,
limits: {
fileSize: 5 * 1024 * 1024 * 1024, // 5GB limit
fieldSize: 100 * 1024 * 1024, // 50MB field size changed from 100 to 1024
files: 100, // Max 10 files changed from 10 to 100
parts: 1000 // Max form parts
}
});
// Temporary processing directory
const TEMP_DIR = path.join(__dirname, 'temp');
const RESULTS_DIR = path.join(__dirname, 'results');
// Ensure directories exist
fse.ensureDirSync(TEMP_DIR);
fse.ensureDirSync(RESULTS_DIR);
// Initialize Dual-Path Project Tool Wrapper
// This enables both local project support (for Cursor IDE) and uploaded projects (for Web UI)
const projectToolWrapper = new ProjectToolWrapper({
pathResolver: {
enableLocalProjects: process.env.ENABLE_LOCAL_PROJECTS !== 'false', // Default: enabled
tempDir: TEMP_DIR,
resultsDir: RESULTS_DIR,
allowedDrives: process.env.ALLOWED_DRIVES?.split(',') || undefined,
maxPathDepth: parseInt(process.env.MAX_PATH_DEPTH) || 10
}
});
console.log('🔄 Dual-Path Support Initialized:', projectToolWrapper.getConfiguration());
// Initialize enhanced system modules
const jobStatusManager = new JobStatusManager({
statusFile: path.join(TEMP_DIR, 'job-statuses.json'),
saveInterval: 30000 // Save every 30 seconds
});
const cleanupManager = new CleanupManager({
retentionHours: 24,
checkIntervalMinutes: 60,
tempDir: TEMP_DIR,
resultsDir: RESULTS_DIR,
uploadsDir: path.join(__dirname, 'uploads')
});
const privacyControls = new PrivacyControls({
tempDir: TEMP_DIR,
resultsDir: RESULTS_DIR,
uploadsDir: path.join(__dirname, 'uploads')
});
const errorHandler = new ErrorHandler({
logDir: path.join(__dirname, 'error-logs'),
enableConsoleLog: true,
enableFileLog: true
});
// Initialize Phase 2: Professional Features
const dockerTestManager = new DockerTestManager({
nodeImage: 'node:18-alpine',
playwrightImage: 'mcr.microsoft.com/playwright:v1.40.0-jammy',
containerTimeout: 600000, // 10 minutes
maxConcurrentContainers: 3
});
const performanceTestManager = new PerformanceTestManager({
timeout: 300000, // 5 minutes
iterations: 3,
loadTimeThreshold: 3000,
fcpThreshold: 1500,
lcpThreshold: 2500,
clsThreshold: 0.1,
scoreThreshold: 70
});
const advancedAnalyticsManager = new AdvancedAnalyticsManager({
analyticsDir: path.join(TEMP_DIR, 'analytics'),
retentionDays: 90
});
// Performance & Optimization Manager
const performanceManager = new PerformanceManager({
cacheDir: path.join(TEMP_DIR, 'performance-cache'),
maxMemorySize: 200 * 1024 * 1024, // 200MB memory cache
maxDiskSize: 2 * 1024 * 1024 * 1024, // 2GB disk cache
maxMemoryThreshold: 512 * 1024 * 1024, // 512MB memory threshold
maxConcurrency: Math.min(4, os.cpus().length),
batchSize: 50,
enableDetailedProfiling: true
});
// Monitoring & Analytics Manager
const monitoringManager = new MonitoringAnalyticsManager({
baseDir: TEMP_DIR,
metricsDir: path.join(TEMP_DIR, 'metrics'),
monitoringDir: path.join(TEMP_DIR, 'monitoring'),
alertsDir: path.join(TEMP_DIR, 'alerts'),
analyticsDir: path.join(TEMP_DIR, 'analytics'),
reportsDir: path.join(TEMP_DIR, 'health-reports'),
retentionDays: 30,
enableRealTimeMetrics: true,
enableAutoReporting: true,
enableEmailAlerts: false, // Configure as needed
enableSlackAlerts: false, // Configure as needed
enableWebhookAlerts: false, // Configure as needed
websocketPort: 8081 // Monitoring WebSocket on port 8081
});
// User Experience Enhancement Manager
const uxManager = new UXEnhancementManager({
baseDir: TEMP_DIR,
websocketPort: 8082, // UX WebSocket on port 8082
statusDir: path.join(TEMP_DIR, 'status'),
explanationsDir: path.join(TEMP_DIR, 'explanations'),
retryDir: path.join(TEMP_DIR, 'retries'),
docsDir: path.join(TEMP_DIR, 'docs'),
progressDir: path.join(TEMP_DIR, 'progress'),
enableWebSocket: true, // Re-enable WebSocket now that ports are separated
enableStatusPersistence: true,
enablePersistence: true,
maxRetries: 3,
baseDelay: 1000,
maxDelay: 30000
});
// Level 3 & 4 Enterprise Feature Managers
// COMMENTED OUT: AI Test Generator temporarily disabled
/*
const aiTestGenerator = new AITestGenerator(TEMP_DIR, {
maxTokens: 4000,
temperature: 0.7,
model: 'claude-3.5-sonnet',
provider: 'anthropic'
});
*/
const visualRegressionTester = new VisualRegressionTester(TEMP_DIR, {
threshold: 0.2,
viewports: [
{ name: 'desktop', width: 1920, height: 1080 },
{ name: 'tablet', width: 768, height: 1024 },
{ name: 'mobile', width: 375, height: 667 }
],
browsers: ['chromium', 'firefox', 'webkit']
});
const crossBrowserTester = new CrossBrowserTester(TEMP_DIR, {
browsers: [
{ name: 'chromium', version: 'latest', engine: 'Blink' },
{ name: 'firefox', version: 'latest', engine: 'Gecko' },
{ name: 'webkit', version: 'latest', engine: 'WebKit' }
],
parallel: true,
timeout: 30000
});
const mlTestOptimizer = new MLTestOptimizer(TEMP_DIR, {
optimizationStrategies: ['execution_time', 'failure_probability', 'code_coverage', 'priority_scoring'],
mlModels: ['decision_tree', 'random_forest', 'neural_network', 'ensemble'],
confidenceThreshold: 0.8
});
const distributedTestManager = new DistributedTestManager(TEMP_DIR, {
distributionStrategy: 'load_balanced',
maxConcurrency: 10,
failoverEnabled: true
});
const predictiveFailureAnalyzer = new PredictiveFailureAnalyzer(TEMP_DIR, {
predictionModels: ['time_series', 'classification', 'anomaly_detection', 'ensemble'],
confidenceThreshold: 0.75,
alertThreshold: 0.8
});
const automatedTestMaintainer = new AutomatedTestMaintainer(TEMP_DIR, {
maintenanceStrategies: ['refactoring', 'optimization', 'cleanup', 'modernization'],
automationLevel: 'semi-automatic',
safetyChecks: true
});
// Initialize job status manager (loads from disk)
// This will be called in the server startup function
/**
* Repository Integration Functions
*/
// Parse repository URL to get download info
function parseRepositoryUrl(repoUrl) {
const url = new URL(repoUrl);
const pathParts = url.pathname.split('/').filter(p => p);
if (url.hostname.includes('github.com')) {
const owner = pathParts[0];
const repo = pathParts[1]?.replace('.git', '');
const branch = url.searchParams.get('branch') || 'main';
return {
provider: 'github',
owner,
repo,
branch,
downloadUrl: `https://api.github.com/repos/${owner}/${repo}/tarball/${branch}`,
webUrl: `https://github.com/${owner}/${repo}`
};
} else if (url.hostname.includes('gitlab.com')) {
const owner = pathParts[0];
const repo = pathParts[1]?.replace('.git', '');
const branch = url.searchParams.get('branch') || 'main';
return {
provider: 'gitlab',
owner,
repo,
branch,
downloadUrl: `https://gitlab.com/${owner}/${repo}/-/archive/${branch}/${repo}-${branch}.tar.gz`,
webUrl: `https://gitlab.com/${owner}/${repo}`
};
}
throw new Error('Unsupported repository provider. Only GitHub and GitLab are supported.');
}
// Download repository as tar/zip
async function downloadRepository(repoInfo, targetDir, accessToken = null) {
console.log(`🔄 Downloading ${repoInfo.provider} repository: ${repoInfo.owner}/${repoInfo.repo}`);
try {
const headers = {
'User-Agent': 'xor-iqassist-tool'
};
// Add authorization header for private repositories
if (accessToken) {
if (repoInfo.provider === 'github') {
headers['Authorization'] = `token ${accessToken}`;
} else if (repoInfo.provider === 'gitlab') {
headers['PRIVATE-TOKEN'] = accessToken;
}
console.log(`🔐 Using access token for private repository`);
}
const response = await fetch(repoInfo.downloadUrl, { headers });
if (!response.ok) {
if (response.status === 404) {
throw new Error(`Repository not found or is private. If this is a private repository, please provide an access token.`);
} else if (response.status === 401 || response.status === 403) {
throw new Error(`Access denied. Please check your access token or repository permissions.`);
} else {
throw new Error(`Failed to download repository: ${response.status} ${response.statusText}`);
}
}
console.log(`📦 Downloading repository content...`);
// Get content length for progress tracking
const contentLength = response.headers.get('content-length');
console.log(`📊 Repository size: ${contentLength ? Math.round(contentLength / 1024 / 1024) + 'MB' : 'Unknown size'}`);
const buffer = await response.arrayBuffer();
const tempTarPath = path.join(targetDir, 'temp-repo.tar.gz');
await fse.writeFile(tempTarPath, Buffer.from(buffer));
console.log(`💾 Downloaded ${buffer.byteLength} bytes (${Math.round(buffer.byteLength / 1024 / 1024)}MB) to: ${tempTarPath}`);
// Extract the tar file
const extractDir = path.join(targetDir, 'extracted');
await fse.ensureDir(extractDir);
console.log(`📂 Extracting repository...`);
try {
await tar.extract({
file: tempTarPath,
cwd: extractDir,
strip: 1 // Remove the top-level directory from the archive
});
// Verify extraction
const extractedFiles = await fse.readdir(extractDir);
console.log(`✅ Repository extracted successfully - ${extractedFiles.length} items found`);
console.log(`📁 Top-level items: ${extractedFiles.slice(0, 10).join(', ')}${extractedFiles.length > 10 ? '...' : ''}`);
} catch (extractError) {
console.error(`❌ Tar extraction failed:`, extractError);
throw new Error(`Failed to extract repository archive: ${extractError.message}`);
}
// Clean up tar file
await fse.remove(tempTarPath);
return extractDir;
} catch (error) {
console.error(`❌ Error downloading repository: ${error.message}`);
console.error(`❌ Full error stack:`, error.stack);
console.error(`❌ Error code:`, error.code);
console.error(`❌ Error name:`, error.name);
// Provide more specific error messages
if (error.code === 'ENOTFOUND') {
throw new Error(`Network error: Could not resolve hostname. Please check your internet connection.`);
} else if (error.code === 'ECONNREFUSED') {
throw new Error(`Connection refused: Could not connect to repository server.`);
} else if (error.message.includes('tar') || error.message.includes('extract')) {
throw new Error(`Archive extraction failed: ${error.message}. The repository archive may be corrupted.`);
} else if (error.message.includes('fetch')) {
throw new Error(`Repository download failed: ${error.message}. Check repository URL and access permissions.`);
}
throw error;
}
}
/**
* IQAssist Core Implementation with Real File Operations
*/
class IQAssistProcessor {
constructor(projectPath, jobId) {
this.projectPath = projectPath;
this.jobId = jobId;
this.resultsPath = path.join(RESULTS_DIR, jobId);
fse.ensureDirSync(this.resultsPath);
// Initialize Data Quality & Generation components
this.dataValidator = new DataValidator({
strictMode: true,
allowEmptyArrays: false,
allowNullValues: false,
logLevel: 'INFO'
});
this.dataQualityReporter = new DataQualityReporter({
reportDir: path.join(this.resultsPath, 'data-quality-reports'),
enableFileOutput: true,
logLevel: 'INFO'
});
// Enhanced project analyzer will be initialized when needed with logger
this.enhancedProjectAnalyzer = null;
this.realisticDataGenerator = null;
this.testDataSeeder = null;
this.projectAnalysis = null; // Will store comprehensive project analysis
// Performance optimization integration
this.performanceManager = performanceManager;
// Monitoring & Analytics integration
this.monitoringManager = monitoringManager;
// User Experience Enhancement integration
this.uxManager = uxManager;
}
/**
* Track service execution with comprehensive monitoring
*/
trackServiceExecution(serviceName, executionData) {
try {
return this.monitoringManager.trackServiceExecution(serviceName, {
...executionData,
jobId: this.jobId,
userId: executionData.userId || 'system',
sessionId: executionData.sessionId || this.jobId,
timestamp: Date.now()
});
} catch (error) {
console.warn(`⚠️ Failed to track service execution for ${serviceName}:`, error.message);
}
}
/**
* Start operation with comprehensive UX tracking
*/
startUXOperation(serviceName, operationName, options = {}) {
try {
return this.uxManager.startOperation(serviceName, operationName, {
...options,
jobId: this.jobId,
userId: options.userId || 'system',
metadata: {
projectPath: this.projectPath,
jobId: this.jobId,
...options.metadata
}
});
} catch (error) {
console.warn(`⚠️ Failed to start UX tracking for ${serviceName}:${operationName}:`, error.message);
return null;
}
}
async ensureIqassistScaffold() {
// Only create essential directories that will always be used
const essentialDirs = [
path.join(this.projectPath, ".iqassist"),
path.join(this.projectPath, ".iqassist", "config"),
path.join(this.projectPath, ".iqassist", "reports"),
path.join(this.projectPath, ".iqassist", "runner"),
];
for (const dir of essentialDirs) {
await fse.ensureDir(dir);
}
console.log('📁 Created essential .iqassist scaffold directories');
return path.join(this.projectPath, ".iqassist");
}
async generateCodeSummary() {
console.log('📊 Analyzing project comprehensively...');
try {
// Real comprehensive file analysis
const allFiles = await fg(['**/*'], {
cwd: this.projectPath,
dot: false,
ignore: ['node_modules/**', '.git/**', '.iqassist/**', 'dist/**', 'build/**']
});
console.log(`📁 Found ${allFiles.length} files to analyze`);
const fileTypes = {};
const languages = new Set();
const frameworks = new Set();
const techStack = [];
const features = [];
// Analyze each file
for (const file of allFiles) {
const ext = path.extname(file).toLowerCase();
fileTypes[ext] = (fileTypes[ext] || 0) + 1;
// Language detection
if (['.js', '.jsx'].includes(ext)) languages.add('JavaScript');
if (['.ts', '.tsx'].includes(ext)) languages.add('TypeScript');
if (['.py'].includes(ext)) languages.add('Python');
if (['.java'].includes(ext)) languages.add('Java');
if (['.cs'].includes(ext)) languages.add('C#');
if (['.css', '.scss', '.sass'].includes(ext)) languages.add('CSS');
if (['.html', '.htm'].includes(ext)) languages.add('HTML');
if (['.json'].includes(ext)) languages.add('JSON');
// Framework detection from file names and paths
if (file.includes('react') || file.includes('jsx') || file.includes('component')) frameworks.add('React');
if (file.includes('vue')) frameworks.add('Vue');
if (file.includes('angular')) frameworks.add('Angular');
if (file.includes('express') || file.includes('server')) frameworks.add('Express');
if (file.includes('next')) frameworks.add('Next.js');
if (file.includes('gatsby')) frameworks.add('Gatsby');
if (file.includes('svelte')) frameworks.add('Svelte');
}
// Enhanced package.json analysis and project name extraction
let projectName = null;
const packageFiles = allFiles.filter(f => f.endsWith('package.json'));
for (const pkgFile of packageFiles) {
try {
const pkgPath = path.join(this.projectPath, pkgFile);
const pkg = await fse.readJson(pkgPath);
const deps = { ...(pkg.dependencies || {}), ...(pkg.devDependencies || {}) };
// Extract project name from package.json (prefer root package.json)
if (pkg.name && (pkgFile === 'package.json' || !projectName)) {
// Clean up package name (remove scoping, convert to readable format)
projectName = pkg.name
.replace(/^@[^\/]+\//, '') // Remove scoping like @company/
.replace(/[-_]/g, ' ') // Replace dashes/underscores with spaces
.replace(/\b\w/g, l => l.toUpperCase()); // Title case
}
// Framework detection from dependencies
if (deps.react) { frameworks.add('React'); techStack.push('React'); }
if (deps.vue) { frameworks.add('Vue'); techStack.push('Vue'); }
if (deps['@angular/core']) { frameworks.add('Angular'); techStack.push('Angular'); }
if (deps.express) { frameworks.add('Express'); techStack.push('Express'); }
if (deps.next) { frameworks.add('Next.js'); techStack.push('Next.js'); }
if (deps.typescript) { techStack.push('TypeScript'); languages.add('TypeScript'); }
if (deps.vite) techStack.push('Vite');
if (deps.webpack) techStack.push('Webpack');
if (deps.jest) techStack.push('Jest');
if (deps['@playwright/test']) techStack.push('Playwright');
} catch (error) {
console.warn(`Could not read ${pkgFile}:`, error.message);
}
}
// Try to extract project name from README.md if not found in package.json
if (!projectName) {
const readmeFiles = allFiles.filter(f => f.toLowerCase().includes('readme'));
for (const readmeFile of readmeFiles) {
try {
const readmePath = path.join(this.projectPath, readmeFile);
const readmeContent = await fse.readFile(readmePath, 'utf8');
const lines = readmeContent.split('\n');
// Look for title lines (# Title or ## Title)
for (const line of lines.slice(0, 20)) { // Check first 20 lines
const titleMatch = line.match(/^#+\s+(.+)$/);
if (titleMatch && titleMatch[1].trim()) {
const title = titleMatch[1].trim();
// Skip generic readme titles
if (!title.toLowerCase().includes('readme') &&
!title.toLowerCase().includes('documentation') &&
!title.toLowerCase().includes('table of contents')) {
projectName = title;
console.log(`✅ Extracted project name from README: "${projectName}"`);
break;
}
}
}
if (projectName) break;
} catch (error) {
// Ignore README parsing errors
}
}
}
// Feature detection from file structure
const frontendDir = path.join(this.projectPath, "frontend");
const backendDir = path.join(this.projectPath, "Backend");
const rootSrcDir = path.join(this.projectPath, "src");
if (await fse.pathExists(frontendDir) || await fse.pathExists(rootSrcDir)) {
// Frontend structure detected
}
// Comprehensive Frontend Component Detection
const componentFiles = allFiles.filter(f => {
const ext = path.extname(f).toLowerCase();
const baseName = path.basename(f, ext);
const fileName = f.toLowerCase();
// Include JS/TS files that could be frontend
if (['.jsx', '.tsx', '.vue', '.js', '.ts', '.html', '.htm'].includes(ext)) {
return (
// Traditional component patterns
baseName.match(/^[A-Z]/) || // Pascal case components
f.includes('component') ||
f.includes('Component') ||
f.includes('/src/') ||
f.includes('/components/') ||
f.includes('/pages/') ||
f.includes('/views/') ||
// Dashboard specific patterns
fileName.includes('dashboard') ||
fileName.includes('chart') ||
fileName.includes('graph') ||
fileName.includes('plot') ||
fileName.includes('canvas') ||
fileName.includes('svg') ||
fileName.includes('d3') ||
fileName.includes('visualization') ||
fileName.includes('report') ||
fileName.includes('analytics') ||
// Frontend-like directories
f.includes('/js/') ||
f.includes('/scripts/') ||
f.includes('/assets/') ||
f.includes('/static/') ||
f.includes('/public/') ||
f.includes('/client/') ||
f.includes('/frontend/') ||
f.includes('/ui/') ||
f.includes('/web/') ||
// Generic JS files that might be frontend (but exclude obvious backend)
(!fileName.includes('server') &&
!fileName.includes('backend') &&
!fileName.includes('api') &&
!fileName.includes('database') &&
!fileName.includes('model') &&
!fileName.includes('controller') &&
!fileName.includes('service') &&
!fileName.includes('config') &&
!fileName.includes('test') &&
!fileName.includes('spec') &&
ext === '.js')
);
}
return false;
});
console.log(`🎨 Found ${componentFiles.length} potential frontend components`);
for (const file of componentFiles.slice(0, 50)) { // Analyze up to 50 components
try {
const filePath = path.join(this.projectPath, file);
const content = await fse.readFile(filePath, 'utf-8');
const baseName = path.basename(file, path.extname(file));
// Check if it's actually a React/Vue/Angular component
const componentIndicators = [
/import.*React/i,
/from ['"]react['"]/i,
/export default.*component/i,
/export.*function.*\(/i,
/export.*const.*=.*\(/i,
/<template>/i,
/<script>/i,
/@Component/i,
/class.*extends.*Component/i,
/function.*\(.*props.*\)/i,
/const.*=.*\(.*\).*=>/i,
// More comprehensive frontend patterns
/document\./i,
/window\./i,
/getElementById/i,
/querySelector/i,
/addEventListener/i,
/innerHTML/i,
/textContent/i,
/createElement/i,
/appendChild/i,
// Dashboard/Chart specific patterns
/chart/i,
/dashboard/i,
/graph/i,
/plot/i,
/canvas/i,
/svg/i,
/d3\./i,
/Chart\.js/i,
/chartjs/i,
// Common frontend patterns
/render/i,
/template/i,
/view/i,
/component/i,
/module\.exports.*function/i,
/exports\./i,
// Generic JS that could be frontend
/function.*\w+.*\(/,
/var.*=.*function/,
/let.*=.*function/,
/const.*=.*function/
];
if (componentIndicators.some(pattern => pattern.test(content))) {
features.push({
name: baseName,
description: `UI component (${path.extname(file).substring(1).toUpperCase()})`,
type: "frontend",
files: [file],
framework: frameworks.has('React') ? 'React' : frameworks.has('Vue') ? 'Vue' : frameworks.has('Angular') ? 'Angular' : 'JavaScript'
});
console.log(`✅ Frontend component detected: ${baseName} (${file})`);
}
} catch (error) {
console.warn(`⚠️ Could not analyze component ${file}: ${error.message}`);
}
}
// Comprehensive API detection (move this BEFORE using apiEndpoints)
const apiFiles = [];
const apiEndpoints = [];
const functionFiles = [];
// Deep content analysis for actual API detection
for (const file of allFiles) {
if (['.js', '.ts', '.py', '.cs', '.java', '.json'].includes(path.extname(file))) {
try {
const filePath = path.join(this.projectPath, file);
const content = await fse.readFile(filePath, 'utf-8');
// Detect Azure Functions (function.json files)
if (file.endsWith('function.json')) {
try {
const funcConfig = JSON.parse(content);
if (funcConfig.bindings) {
const httpTrigger = funcConfig.bindings.find(b => b.type === 'httpTrigger');
if (httpTrigger) {
const funcName = path.basename(path.dirname(file));
const methods = httpTrigger.methods || ['GET', 'POST'];
const route = httpTrigger.route || funcName;
functionFiles.push(file);
methods.forEach(method => {
apiEndpoints.push({
file: file,
method: method.toUpperCase(),
endpoint: `/${route}`,
type: 'azure_function',
functionName: funcName
});
});
console.log(`🔵 Azure Function detected: ${funcName} (${methods.join(', ')}) -> /${route}`);
}
}
} catch (parseError) {
console.warn(`⚠️ Could not parse function.json ${file}: ${parseError.message}`);
}
}
// Detect API routes and endpoints from actual code
const routePatterns = [
/app\.(get|post|put|delete|patch)\s*\(\s*['"`]([^'"`]+)['"`]/gi,
/router\.(get|post|put|delete|patch)\s*\(\s*['"`]([^'"`]+)['"`]/gi,
/@(Get|Post|Put|Delete|Patch)\s*\(\s*['"`]([^'"`]+)['"`]/gi,
/Route\s*\[\s*['"`](GET|POST|PUT|DELETE|PATCH)['"`]\s*,?\s*['"`]([^'"`]+)['"`]/gi,
/function\s+(\w+)\s*\([^)]*res(ponse)?\s*[,)]/gi,
/export\s+(async\s+)?function\s+(\w+)/gi,
/exports\.(\w+)\s*=/gi,
/module\.exports\s*=\s*(\w+)/gi
];
for (const pattern of routePatterns) {
let match;
while ((match = pattern.exec(content)) !== null) {
const method = match[1] || match[3] || 'Unknown';
const endpoint = match[2] || match[4] || `function_${match[1] || match[2] || 'handler'}`;
if (endpoint && endpoint !== 'function_undefined') {
apiEndpoints.push({
file,
method: method.toUpperCase(),
endpoint,
line: content.substring(0, match.index).split('\n').length,
type: 'code_detected'
});
}
}
}
// Check if file contains API-related code
const apiIndicators = [
/express\(\)/i,
/fastapi/i,
/flask/i,
/django/i,
/app\.listen/i,
/createServer/i,
/HttpServlet/i,
/Controller/i,
/RestController/i,
/@app\.route/i,
/func main.*http/i,
/httpTrigger/i,
/azureFunction/i,
/module\.exports/i,
/exports\./i
];
if (apiIndicators.some(pattern => pattern.test(content))) {
apiFiles.push(file);
}
} catch (error) {
console.warn(`⚠️ Could not analyze file ${file}: ${error.message}`);
}
}
}
const hasApi = apiFiles.length > 0 || apiEndpoints.length > 0;
console.log(`🔍 API Analysis Complete: ${apiEndpoints.length} endpoints, ${functionFiles.length} functions, ${apiFiles.length} API files`);
// Comprehensive Backend Features Detection using detected APIs
if (apiEndpoints && apiEndpoints.length > 0) {
const groupedEndpoints = {};
apiEndpoints.forEach(ep => {
const featureName = ep.functionName ||
ep.endpoint.split('/')[1] ||
path.basename(ep.file, path.extname(ep.file));
if (!groupedEndpoints[featureName]) {
groupedEndpoints[featureName] = {
name: featureName,
description: ep.type === 'azure_function' ? 'Azure Function API' : 'REST API Endpoint',
type: "backend",
files: [ep.file],
endpoints: [],
apiType: ep.type
};
}
groupedEndpoints[featureName].endpoints.push({
method: ep.method,
route: ep.endpoint,
line: ep.line
});
});
Object.values(groupedEndpoints).forEach(feature => {
features.push(feature);
console.log(`✅ Backend feature detected: ${feature.name} (${feature.endpoints.length} endpoints)`);
});
}
// Additional Service/Module Detection
const serviceFiles = allFiles.filter(f => {
const fileName = f.toLowerCase();
return fileName.includes('service') ||
fileName.includes('controller') ||
fileName.includes('model') ||
fileName.includes('repository') ||
fileName.includes('middleware') ||
fileName.includes('util') ||
fileName.includes('helper');
});
for (const file of serviceFiles.slice(0, 20)) {
const baseName = path.basename(file, path.extname(file));
const serviceType = file.includes('service') ? 'Service' :
file.includes('controller') ? 'Controller' :
file.includes('model') ? 'Model' :
file.includes('repository') ? 'Repository' :
file.includes('middleware') ? 'Middleware' : 'Utility';
features.push({
name: baseName,
description: `${serviceType} module`,
type: "backend",
files: [file],
category: serviceType.toLowerCase()
});
console.log(`✅ ${serviceType} detected: ${baseName} (${file})`);
}
// Legacy backend analysis for traditional structure
if (await fse.pathExists(backendDir)) {
techStack.push("Backend");
// Check for Azure Functions in traditional structure
const functionDirs = await fg(["**/function.json"], { cwd: backendDir });
for (const funcFile of functionDirs) {
const funcDir = path.dirname(funcFile);
// Only add if not already detected by comprehensive analysis
if (!features.some(f => f.name === path.basename(funcDir))) {
features.push({
name: path.basename(funcDir),
description: "Azure Function",
type: "backend",
files: [path.join("Backend", funcFile)]
});
}
}
// Backend package.json
const bePkg = path.join(backendDir, "package.json");
if (await fse.pathExists(bePkg)) {
const pkg = JSON.parse(await fs.readFile(bePkg, "utf8"));
const deps = { ...(pkg.dependencies||{}), ...(pkg.devDependencies||{}) };
if (deps.express) techStack.push("Express");
if (deps["@azure/functions"]) techStack.push("Azure Functions");
}
}
const summary = {
projectName: projectName || 'Application', // Use extracted name or fallback to 'Application'
totalFiles: allFiles.length,
fileTypes: fileTypes,
languages: Array.from(languages),
frameworks: Array.from(frameworks),
techStack: [...new Set(techStack)],
features: features,
complexity: allFiles.length > 1000 ? "High" : allFiles.length > 100 ? "Medium" : "Low",
analyzed_at: new Date().toISOString(),
project_structure: {
has_frontend: await fse.pathExists(frontendDir) || allFiles.some(f => f.includes('src/') || f.includes('component')),
has_backend: await fse.pathExists(backendDir) || allFiles.some(f => f.includes('server') || f.includes('api')),
has_apis: hasApi,
api_count: apiEndpoints.length,
function_count: functionFiles.length,
estimated_complexity: features.length > 10 ? "high" : features.length > 5 ? "medium" : "low",
structure_analysis: {
frontend_files: allFiles.filter(f => f.includes('src/') || f.includes('component') || f.includes('page')).length,
backend_files: allFiles.filter(f => f.includes('server') || f.includes('api') || f.includes('service')).length,
test_files: allFiles.filter(f => f.includes('test') || f.includes('spec')).length,
config_files: allFiles.filter(f => f.includes('config') || f.endsWith('.config.js')).length,
api_files: apiFiles.length,
function_files: functionFiles.length
}
},
api_analysis: {
total_endpoints: apiEndpoints.length,
azure_functions: functionFiles.length,
api_files: apiFiles.length,
endpoints: apiEndpoints.map(ep => ({
method: ep.method,
endpoint: ep.endpoint,
file: ep.file,
type: ep.type,
functionName: ep.functionName
})),
function_files: functionFiles
}
};
// Save to project and results
const summaryFile = path.join(this.projectPath, ".iqassist", "config", "code_summary.json");
const resultsFile = path.join(this.resultsPath, "code_summary.json");
await fs.writeFile(summaryFile, JSON.stringify(summary, null, 2));
await fs.writeFile(resultsFile, JSON.stringify(summary, null, 2));
return { summary, summaryFile };
} catch (error) {
console.error("Error generating code summary:", error);
throw error;
}
}
/**
* Enhanced code analysis using the new Enhanced Project Analyzer
*/
async generateEnhancedCodeSummary() {
try {
console.log(`📊 Analyzing project structure with Enhanced Project Analyzer for ${this.jobId}...`);
// Start UX tracking for project analysis
const uxOperation = this.startUXOperation('Enhanced Project Analysis', 'Project Structure Analysis', {
totalSteps: 5,
metadata: {
projectPath: this.projectPath,
analysisType: 'enhanced'
}
});
// Use performance optimization for project analysis
const startTime = Date.now();
const analysisResult = await this.performanceManager.executeOptimized(
'EnhancedProjectAnalysis',
async () => {
// Step 1: Initialize Enhanced Project Analyzer
if (uxOperation) {
uxOperation.updateProgress(0, 20, 'Initializing Enhanced Project Analyzer...');
}
if (!this.enhancedProjectAnalyzer) {
const { EnhancedLogger } = await import('./lib/utils/enhanced-logger.js');
const logger = new EnhancedLogger('IQAssistProcessor', { logLevel: 'INFO' });
this.enhancedProjectAnalyzer = new EnhancedProjectAnalyzer(this.projectPath, logger);
}
// Step 2: Start project analysis
if (uxOperation) {
uxOperation.updateProgress(1, 40, 'Scanning project structure...');
}
// Perform comprehensive project analysis
const result = await this.enhancedProjectAnalyzer.analyzeProject();
// Step 3: Complete analysis
if (uxOperation) {
uxOperation.completeStep(2, 'Project analysis completed', result);
}
return result;
},
{
jobId: this.jobId,
cacheKey: `project-analysis:${this.projectPath}`,
cacheTTL: 12 * 60 * 60 * 1000, // 12 hours
context: { projectPath: this.projectPath },
profiling: true,
progress: true
}
);
// Track service execution
this.trackServiceExecution('EnhancedProjectAnalysis', {
success: true,
responseTime: Date.now() - startTime,
endpoint: '/process/enhanced-analysis',
features: ['project-analysis', 'enhanced-analyzer'],
metadata: {
projectPath: this.projectPath,
filesAnalyzed: analysisResult?.structure?.totalFiles || 0,
componentsFound: analysisResult?.components?.length || 0
}
});
this.projectAnalysis = analysisResult;
// Step 4: Initialize data generators
if (uxOperation) {
uxOperation.updateProgress(3, 80, 'Initializing data generators...');
}
// Initialize realistic data generator with project analysis
this.realisticDataGenerator = new RealisticDataGenerator(this.projectAnalysis, {
seed: Date.now(),
dataQuality: 'high',
logLevel: 'INFO'
});
// Initialize test data seeder
this.testDataSeeder = new TestDataSeeder(this.projectPath, this.projectAnalysis, {
seedCount: 10,
includeEdgeCases: true,
generateFiles: true,
outputDir: path.join(this.resultsPath, 'test-data-seeds'),
logLevel: 'INFO'
});
// Convert enhanced analysis to legacy format for compatibility
const legacySummary = this.convertToLegacyFormat(this.projectAnalysis);
// Save both formats
const summaryFile = path.join(this.projectPath, ".iqassist", "config", "code_summary.json");
const enhancedAnalysisFile = path.join(this.projectPath, ".iqassist", "config", "enhanced_analysis.json");
const resultsFile = path.join(this.resultsPath, "code_summary.json");
const enhancedResultsFile = path.join(this.resultsPath, "enhanced_analysis.json");
await fse.ensureDir(path.dirname(summaryFile));
await fs.writeFile(summaryFile, JSON.stringify(legacySummary, null, 2));
await fs.writeFile(enhancedAnalysisFile, JSON.stringify(this.projectAnalysis, null, 2));
await fs.writeFile(resultsFile, JSON.stringify(legacySummary, null, 2));
await fs.writeFile(enhancedResultsFile, JSON.stringify(this.projectAnalysis, null, 2));
// Step 5: Complete analysis
if (uxOperation) {
uxOperation.complete({
totalFiles: this.projectAnalysis.structure.totalFiles,
components: this.projectAnalysis.components.length,
apis: this.projectAnalysis.apis.length,
services: this.projectAnalysis.services.length,
complexity: this.projectAnalysis.complexity.averageComplexity.toFixed(2),
quality: this.projectAnalysis.quality.maintainabilityIndex.toFixed(2),
frameworks: this.projectAnalysis.structure.frameworks.join(', ')
}, 'Enhanced code analysis completed successfully');
}
console.log(`✅ Enhanced code analysis completed:`);
console.log(` 📁 Total files: ${this.projectAnalysis.structure.totalFiles}`);
console.log(` 🧩 Components: ${this.projectAnalysis.components.length}`);
console.log(` 🔗 APIs: ${this.projectAnalysis.apis.length}`);
console.log(` 🛠️ Services: ${this.projectAnalysis.services.length}`);
console.log(` 📊 Complexity: ${this.projectAnalysis.complexity.averageComplexity.toFixed(2)}`);
console.log(` 🎯 Quality: ${this.projectAnalysis.quality.maintainabilityIndex.toFixed(2)}%`);
console.log(` 🏗️ Frameworks: ${this.projectAnalysis.structure.frameworks.join(', ')}`);
return { summary: legacySummary, summaryFile };
} catch (error) {
console.error(`❌ Enhanced code analysis failed: ${error.message}`);
console.error(error.stack);
// Handle UX failure
if (uxOperation) {
uxOperation.fail(error, 'Enhanced code analysis failed, falling back to original method');
}
// Fallback to original method
console.log('🔄 Falling back to original code analysis...');
return await this.generateCodeSummary();
}
}
/**
* Convert enhanced analysis to legacy format for compatibility
*/
convertToLegacyFormat(analysis) {
// Extract project name from package.json or README instead of using directory name (which would be Job ID)
let projectName = 'Unknown Project';
// Try to get project name from analysis metadata first
if (analysis.structure && analysis.structure.projectName) {
projectName = analysis.structure.projectName;
} else {
// Try to extract from package.json in the project (check multiple locations)
const possiblePackageLocations = [
path.join(this.projectPath, 'package.json'),
path.join(this.projectPath, 'frontend', 'package.json'),
path.join(this.projectPath, 'backend', 'package.json'),
path.join(this.projectPath, 'admin', 'package.json'),
path.join(this.projectPath, 'client', 'package.json'),
path.join(this.projectPath, 'server', 'package.json')
];
for (const pkgPath of possiblePackageLocations) {
try {
if (fs.existsSync(pkgPath)) {
const pkgContent = fs.readFileSync(pkgPath, 'utf8');
const pkg = JSON.parse(pkgContent);
if (pkg.name) {
projectName = pkg.name
.replace(/^@[^\/]+\//, '') // Remove scoping like @company/
.replace(/[-_]/g, ' ') // Replace dashes/underscores with spaces
.replace(/\b\w/g, l => l.toUpperCase()); // Title case
console.log(`✅ Extracted project name from ${pkgPath}: "${projectName}"`);
break;
}
}
} catch (error) {
// Continue to next location
}
}
// If still not found, try README
if (projectName === 'Unknown Project') {
const readmeLocations = [
path.join(this.projectPath, 'README.md'),
path.join(this.projectPath, 'readme.md'),
path.join(this.projectPath, 'README.MD'),
path.join(this.projectPath, 'Readme.md')
];
for (const readmePath of readmeLocations) {
try {
if (fs.existsSync(readmePath)) {
const readmeContent = fs.readFileSync(readmePath, 'utf8');
const lines = readmeContent.split('\n');
// Look for title lines (# Title or ## Title)
for (const line of lines.slice(0, 20)) { // Check first 20 lines
const titleMatch = line.match(/^#+\s+(.+)$/);
if (titleMatch && titleMatch[1].trim()) {
const title = titleMatch[1].trim();
// Skip generic readme titles
if (!title.toLowerCase().includes('readme') &&
!title.toLowerCase().includes('documentation') &&
!title.toLowerCase().includes('table of contents')) {
projectName = title;
console.log(`✅ Extracted project name from README: "${projectName}"`);
break;
}
}
}
if (projectName !== 'Unknown Project') break;
}
} catch (error) {
// Continue to next location
}
}
}
// If still not found, try to extract from repository URL
if (projectName === 'Unknown Project') {
if (global.jobStatuses && global.jobStatuses[this.jobId] && global.jobStatuses[this.jobId].repoUrl) {
const repoUrl = global.jobStatuses[this.jobId].repoUrl;
const repoMatch = repoUrl.match(/\/([^\/]+?)(?:\.git)?(?:\/)?$/);
if (repoMatch && repoMatch[1]) {
projectName = repoMatch[1]
.replace(/[-_]/g, ' ') // Replace dashes/underscores with spaces
.replace(/\b\w/g, l => l.toUpperCase()); // Title case
console.log(`✅ Extracted project name from repository URL: "${projectName}"`);
}
}
}