UNPKG

@samiyev/guardian

Version:

Research-backed code quality guardian for AI-assisted development. Detects hardcodes, secrets, circular deps, framework leaks, entity exposure, and 9 architecture violations. Enforces Clean Architecture/DDD principles. Works with GitHub Copilot, Cursor, W

58 lines 2.87 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.AnalyzeProject = void 0; const BaseUseCase_1 = require("./BaseUseCase"); const ResponseDto_1 = require("../dtos/ResponseDto"); const CollectFiles_1 = require("./pipeline/CollectFiles"); const ParseSourceFiles_1 = require("./pipeline/ParseSourceFiles"); const ExecuteDetection_1 = require("./pipeline/ExecuteDetection"); const AggregateResults_1 = require("./pipeline/AggregateResults"); const constants_1 = require("../../shared/constants"); /** * Main use case for analyzing a project's codebase * Orchestrates the analysis pipeline through focused components */ class AnalyzeProject extends BaseUseCase_1.UseCase { fileCollectionStep; parsingStep; detectionPipeline; resultAggregator; // eslint-disable-next-line max-params constructor(fileScanner, codeParser, hardcodeDetector, namingConventionDetector, frameworkLeakDetector, entityExposureDetector, dependencyDirectionDetector, repositoryPatternDetector, aggregateBoundaryDetector, secretDetector, anemicModelDetector, duplicateValueTracker) { super(); this.fileCollectionStep = new CollectFiles_1.CollectFiles(fileScanner); this.parsingStep = new ParseSourceFiles_1.ParseSourceFiles(codeParser); this.detectionPipeline = new ExecuteDetection_1.ExecuteDetection(hardcodeDetector, namingConventionDetector, frameworkLeakDetector, entityExposureDetector, dependencyDirectionDetector, repositoryPatternDetector, aggregateBoundaryDetector, secretDetector, anemicModelDetector, duplicateValueTracker); this.resultAggregator = new AggregateResults_1.AggregateResults(); } async execute(request) { try { const { sourceFiles } = await this.fileCollectionStep.execute({ rootDir: request.rootDir, include: request.include, exclude: request.exclude, }); const { dependencyGraph, totalFunctions } = this.parsingStep.execute({ sourceFiles, rootDir: request.rootDir, }); const detectionResult = await this.detectionPipeline.execute({ sourceFiles, dependencyGraph, }); const response = this.resultAggregator.execute({ sourceFiles, dependencyGraph, totalFunctions, ...detectionResult, }); return ResponseDto_1.ResponseDto.ok(response); } catch (error) { const errorMessage = `${constants_1.ERROR_MESSAGES.FAILED_TO_ANALYZE}: ${error instanceof Error ? error.message : String(error)}`; return ResponseDto_1.ResponseDto.fail(errorMessage); } } } exports.AnalyzeProject = AnalyzeProject; //# sourceMappingURL=AnalyzeProject.js.map