@sun-asterisk/sunlint
Version:
☀️ SunLint - Multi-language static analysis tool for code quality and security | Sun* Engineering Standards
1,151 lines (992 loc) • 33.9 kB
JavaScript
/**
* GitHub Annotate Service
* Đọc file JSON kết quả và comment annotation lên GitHub PR tương ứng
* Usage: githubAnnotateService.annotate({ jsonFile, githubToken, repo, prNumber })
*/
const fs = require('fs');
let Octokit;
// GitHub API limits
const MAX_COMMENTS_PER_REVIEW = 30;
const MAX_COMMENT_LENGTH = 65536;
const MAX_RETRIES = 3;
const RETRY_DELAY_MS = 1000;
/**
* Custom error classes
*/
class ValidationError extends Error {
constructor(message) {
super(message);
this.name = 'ValidationError';
}
}
class GitHubAPIError extends Error {
constructor(message, statusCode, originalError) {
super(message);
this.name = 'GitHubAPIError';
this.statusCode = statusCode;
this.originalError = originalError;
}
}
/**
* Logger với các levels khác nhau
*/
const logger = {
info: (message, data) => {
console.log(`[INFO] ${message}`, data ? JSON.stringify(data, null, 2) : '');
},
warn: (message, data) => {
console.warn(`[WARN] ${message}`, data ? JSON.stringify(data, null, 2) : '');
},
error: (message, error) => {
console.error(`[ERROR] ${message}`, error?.message || error);
if (error?.stack) {
console.error(error.stack);
}
},
debug: (message, data) => {
if (process.env.DEBUG === 'true') {
console.log(`[DEBUG] ${message}`, data ? JSON.stringify(data, null, 2) : '');
}
}
};
/**
* Sleep utility for retry mechanism
* @param {number} ms - Milliseconds to sleep
* @returns {Promise<void>}
*/
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
/**
* Generate AI summary using GitHub Models API
* @param {Array} violations - Array of violations
* @param {Object} stats - Statistics object
* @returns {Promise<string|null>} AI-generated summary or null
*/
async function generateAISummary(violations, stats) {
const token = process.env.GITHUB_TOKEN;
if (!token) {
logger.debug('No GITHUB_TOKEN, skipping AI summary');
return null;
}
// Only generate AI summary if there are violations
if (violations.length === 0) {
return null;
}
try {
// Prepare violations summary for AI (limit to top issues)
const topViolations = violations.slice(0, 20).map(v => ({
rule: v.rule,
file: v.file.split('/').pop(), // Just filename
message: v.message?.substring(0, 100)
}));
// Group by rule
const ruleGroups = {};
for (const v of violations) {
ruleGroups[v.rule] = (ruleGroups[v.rule] || 0) + 1;
}
const prompt = `You are a code review assistant. Analyze these code quality violations and provide a brief, actionable summary in 2-3 sentences.
Violations by rule:
${Object.entries(ruleGroups).map(([rule, count]) => `- ${rule}: ${count} issues`).join('\n')}
Sample issues:
${topViolations.slice(0, 5).map(v => `- [${v.rule}] ${v.file}: ${v.message}`).join('\n')}
Stats: ${stats.errorCount} errors, ${stats.warningCount} warnings in ${stats.filesWithIssues} files.
Provide a concise summary focusing on:
1. Main patterns/issues found
2. Priority areas to fix
Keep it under 100 words, no markdown headers.`;
const response = await fetch('https://models.inference.ai.azure.com/chat/completions', {
method: 'POST',
headers: {
'Authorization': `Bearer ${token}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
model: 'gpt-4o-mini',
messages: [{ role: 'user', content: prompt }],
max_tokens: 200,
temperature: 0.3
})
});
if (!response.ok) {
logger.debug(`GitHub Models API error: ${response.status}`);
return null;
}
const data = await response.json();
const aiSummary = data.choices?.[0]?.message?.content?.trim();
if (aiSummary) {
logger.info('AI summary generated successfully');
return aiSummary;
}
return null;
} catch (error) {
logger.debug(`AI summary generation failed: ${error.message}`);
return null;
}
}
/**
* Calculate quality score from violations
* @param {number} errorCount - Number of errors
* @param {number} warningCount - Number of warnings
* @param {number} totalFiles - Total files analyzed
* @returns {Object} Score object with value and grade
*/
function calculateQualityScore(errorCount, warningCount, totalFiles) {
// Base score: 100
// Deduct 5 points per error, 1 point per warning
// Minimum score: 0
const errorPenalty = errorCount * 5;
const warningPenalty = warningCount * 1;
const totalPenalty = errorPenalty + warningPenalty;
// Scale penalty based on file count (larger projects get less penalty per violation)
const scaleFactor = totalFiles > 10 ? Math.log10(totalFiles) : 1;
const adjustedPenalty = Math.round(totalPenalty / scaleFactor);
const value = Math.max(0, Math.min(100, 100 - adjustedPenalty));
// Determine grade
let grade;
if (value >= 90) grade = 'A';
else if (value >= 80) grade = 'B';
else if (value >= 70) grade = 'C';
else if (value >= 60) grade = 'D';
else grade = 'F';
return { value, grade };
}
/**
* Retry wrapper cho async functions
* @param {Function} fn - Async function to retry
* @param {number} maxRetries - Max number of retries
* @param {number} delayMs - Delay between retries
* @returns {Promise<any>}
*/
async function withRetry(fn, maxRetries = MAX_RETRIES, delayMs = RETRY_DELAY_MS) {
let lastError;
for (let attempt = 1; attempt <= maxRetries; attempt++) {
try {
return await fn();
} catch (error) {
lastError = error;
// Không retry cho validation errors hoặc 404
if (error instanceof ValidationError || error.status === 404) {
throw error;
}
// Retry cho network errors và rate limits
const isRetryable =
error.status === 429 || // Rate limit
error.status >= 500 || // Server errors
error.code === 'ECONNRESET' ||
error.code === 'ETIMEDOUT' ||
error.code === 'ENOTFOUND';
if (!isRetryable || attempt === maxRetries) {
throw error;
}
const waitTime = error.status === 429 ? delayMs * attempt * 2 : delayMs * attempt;
logger.warn(`Attempt ${attempt}/${maxRetries} failed, retrying in ${waitTime}ms...`, {
error: error.message,
status: error.status
});
await sleep(waitTime);
}
}
throw lastError;
}
/**
* Validate input parameters
* @param {Object} options
* @throws {ValidationError}
*/
function validateInput({ jsonFile, githubToken, repo, prNumber }) {
if (!jsonFile || typeof jsonFile !== 'string') {
throw new ValidationError('jsonFile is required and must be a string');
}
if (!githubToken && !process.env.GITHUB_TOKEN) {
throw new ValidationError('githubToken is required or GITHUB_TOKEN env var must be set');
}
if (!repo || typeof repo !== 'string') {
throw new ValidationError('repo is required and must be a string');
}
const repoParts = repo.split('/');
if (repoParts.length !== 2 || !repoParts[0] || !repoParts[1]) {
throw new ValidationError('repo must be in format "owner/repo"');
}
if (!prNumber || typeof prNumber !== 'number' || prNumber <= 0 || !Number.isInteger(prNumber)) {
throw new ValidationError('prNumber must be a positive integer');
}
}
/**
* Read and parse JSON file
* @param {string} jsonFile - Path to JSON file
* @returns {Object} Parsed JSON
* @throws {Error}
*/
function readJsonFile(jsonFile) {
if (!fs.existsSync(jsonFile)) {
throw new Error(`Result file not found: ${jsonFile}`);
}
let stats;
try {
stats = fs.statSync(jsonFile);
} catch (error) {
throw new Error(`Cannot access file ${jsonFile}: ${error.message}`);
}
if (!stats.isFile()) {
throw new Error(`Path is not a file: ${jsonFile}`);
}
if (stats.size === 0) {
logger.warn('Result file is empty', { jsonFile });
return [];
}
// Check file size (warn if > 10MB)
const maxSize = 10 * 1024 * 1024;
if (stats.size > maxSize) {
logger.warn(`Result file is very large (${(stats.size / 1024 / 1024).toFixed(2)}MB)`, { jsonFile });
}
let content;
try {
content = fs.readFileSync(jsonFile, 'utf8');
} catch (error) {
throw new Error(`Cannot read file ${jsonFile}: ${error.message}`);
}
try {
return JSON.parse(content);
} catch (error) {
throw new Error(`Invalid JSON in file ${jsonFile}: ${error.message}`);
}
}
/**
* Get git root directory
* @param {string} cwd - Current working directory
* @returns {string} Git root path
*/
function getGitRoot(cwd = process.cwd()) {
try {
const { execSync } = require('child_process');
const gitRoot = execSync('git rev-parse --show-toplevel', {
cwd,
encoding: 'utf8'
}).trim();
return gitRoot;
} catch (error) {
logger.warn('Not a git repository, using cwd as root');
return cwd;
}
}
/**
* Normalize path to be relative from git root
* @param {string} filePath - File path (absolute or relative)
* @param {string} gitRoot - Git root directory
* @returns {string} Normalized relative path
*/
function normalizePathFromGitRoot(filePath, gitRoot) {
let normalized = filePath;
// Convert absolute path to relative from git root
if (filePath.startsWith(gitRoot)) {
normalized = filePath.slice(gitRoot.length);
if (normalized.startsWith('/') || normalized.startsWith('\\')) {
normalized = normalized.slice(1);
}
}
// Normalize path separators to forward slash
normalized = normalized.replace(/\\/g, '/');
return normalized;
}
/**
* Parse violations from JSON data
* @param {Array|Object} raw - Raw JSON data
* @param {string} gitRoot - Git root directory for path normalization
* @returns {Array} Array of violation objects
*/
function parseViolations(raw, gitRoot) {
const violations = [];
if (Array.isArray(raw)) {
for (const fileObj of raw) {
if (!fileObj || typeof fileObj !== 'object') {
logger.warn('Skipping invalid file object', { fileObj });
continue;
}
if (!fileObj.filePath || !Array.isArray(fileObj.messages)) {
logger.debug('Skipping file object without filePath or messages', { fileObj });
continue;
}
// Normalize path relative to git root (same as GitHub API)
const relPath = normalizePathFromGitRoot(fileObj.filePath, gitRoot);
for (const msg of fileObj.messages) {
if (!msg || typeof msg !== 'object') {
logger.warn('Skipping invalid message object', { msg });
continue;
}
// Validate line number
const line = parseInt(msg.line, 10);
if (!line || line <= 0) {
logger.warn('Skipping message with invalid line number', { msg, file: relPath });
continue;
}
violations.push({
file: relPath,
line: line,
rule: msg.ruleId || 'unknown',
severity: msg.severity === 2 ? 'error' : 'warning',
message: msg.message || 'No message provided'
});
}
}
} else if (raw && typeof raw === 'object') {
const rawViolations = raw.violations || [];
if (!Array.isArray(rawViolations)) {
throw new Error('violations property must be an array');
}
// Normalize paths for raw violations too
violations.push(...rawViolations.map(v => ({
...v,
file: normalizePathFromGitRoot(v.file, gitRoot)
})));
} else {
throw new Error('JSON data must be an array or object with violations property');
}
return violations;
}
/**
* Get existing review comments to avoid duplicates
* @param {Object} octokit - Octokit instance
* @param {string} owner - Repo owner
* @param {string} repoName - Repo name
* @param {number} prNumber - PR number
* @returns {Promise<Array>} Array of existing comments
*/
async function getExistingComments(octokit, owner, repoName, prNumber) {
try {
const comments = [];
let page = 1;
let hasMore = true;
while (hasMore) {
const response = await octokit.pulls.listReviewComments({
owner,
repo: repoName,
pull_number: prNumber,
per_page: 100,
page
});
comments.push(...response.data);
hasMore = response.data.length === 100;
page++;
}
return comments;
} catch (error) {
// Non-critical error, log and continue
logger.warn('Failed to fetch existing comments, duplicate detection disabled', {
error: error.message
});
return [];
}
}
/**
* Check if comment already exists
* @param {Array} existingComments - Existing PR comments
* @param {Object} newComment - New comment to check
* @returns {boolean}
*/
function isCommentDuplicate(existingComments, newComment) {
return existingComments.some(existing =>
existing.path === newComment.path &&
existing.line === newComment.line &&
existing.body === newComment.body
);
}
/**
* Truncate comment body if too long
* @param {string} body - Comment body
* @returns {string}
*/
function truncateComment(body) {
if (body.length <= MAX_COMMENT_LENGTH) {
return body;
}
const truncated = body.substring(0, MAX_COMMENT_LENGTH - 100);
return `${truncated}\n\n... (comment truncated, too long)`;
}
/**
* Create review comments in batches
* @param {Object} octokit - Octokit instance
* @param {string} owner - Repo owner
* @param {string} repoName - Repo name
* @param {number} prNumber - PR number
* @param {string} headSha - Commit SHA
* @param {Array} comments - Comments to post
* @param {boolean} hasError - Whether there are errors
* @returns {Promise<Array>} Array of review responses
*/
async function createReviewsInBatches(octokit, owner, repoName, prNumber, headSha, comments, hasError) {
const reviews = [];
const batches = [];
// Split comments into batches
for (let i = 0; i < comments.length; i += MAX_COMMENTS_PER_REVIEW) {
batches.push(comments.slice(i, i + MAX_COMMENTS_PER_REVIEW));
}
logger.info(`Creating ${batches.length} review(s) with ${comments.length} comment(s)`);
for (let i = 0; i < batches.length; i++) {
const batch = batches[i];
const isLastBatch = i === batches.length - 1;
try {
const reviewRes = await withRetry(async () => {
return await octokit.pulls.createReview({
owner,
repo: repoName,
pull_number: prNumber,
commit_id: headSha,
event: 'COMMENT',
body: isLastBatch && batches.length > 1
? `SunLint found ${comments.length} issue(s) across multiple reviews.`
: undefined,
comments: batch
});
});
reviews.push(reviewRes.data);
logger.info(`Review ${i + 1}/${batches.length} created with ${batch.length} comment(s)`, {
reviewId: reviewRes.data.id
});
// Add delay between batches to avoid rate limiting
if (i < batches.length - 1) {
await sleep(500);
}
} catch (error) {
logger.error(`Failed to create review ${i + 1}/${batches.length}`, error);
throw new GitHubAPIError(
`Failed to create review: ${error.message}`,
error.status,
error
);
}
}
return reviews;
}
/**
* Parse patch to get valid line numbers
* @param {string} patch - GitHub patch string
* @returns {Set<number>} Set of valid line numbers
*/
function parseValidLineNumbers(patch) {
const validLines = new Set();
if (!patch) {
return validLines;
}
const lines = patch.split('\n');
let currentLine = 0;
for (const line of lines) {
// Parse hunk header: @@ -old_start,old_count +new_start,new_count @@
const hunkMatch = line.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/);
if (hunkMatch) {
currentLine = parseInt(hunkMatch[1], 10);
continue;
}
// Skip lines that don't start with +, -, or space
if (!line.startsWith('+') && !line.startsWith('-') && !line.startsWith(' ')) {
continue;
}
// Lines starting with + or space are valid (added or context)
if (line.startsWith('+') || line.startsWith(' ')) {
validLines.add(currentLine);
currentLine++;
} else if (line.startsWith('-')) {
// Deleted lines don't increment the line counter
continue;
}
}
return validLines;
}
/**
* Get PR files with detailed information for validation
* @param {Object} octokit - Octokit instance
* @param {string} owner - Repo owner
* @param {string} repoName - Repo name
* @param {number} prNumber - PR number
* @returns {Promise<Map>} Map of filename to file info
*/
async function getPRFilesInfo(octokit, owner, repoName, prNumber) {
const filesMap = new Map();
try {
let page = 1;
let hasMore = true;
while (hasMore) {
const response = await octokit.pulls.listFiles({
owner,
repo: repoName,
pull_number: prNumber,
per_page: 100,
page
});
for (const file of response.data) {
// Parse valid line numbers from patch
const validLines = parseValidLineNumbers(file.patch);
const fileInfo = {
filename: file.filename,
previous_filename: file.previous_filename, // For renamed files
additions: file.additions,
deletions: file.deletions,
changes: file.changes,
status: file.status, // 'added', 'removed', 'modified', 'renamed'
validLines: validLines, // Set of valid line numbers
patch: file.patch // Keep patch for debugging
};
// Store by current filename
filesMap.set(file.filename, fileInfo);
// For renamed files, also map old name to new name
if (file.status === 'renamed' && file.previous_filename) {
filesMap.set(file.previous_filename, {
...fileInfo,
isOldName: true, // Flag to indicate this is old name
newFilename: file.filename // Reference to new name
});
}
}
hasMore = response.data.length === 100;
page++;
}
return filesMap;
} catch (error) {
throw new GitHubAPIError(
`Failed to fetch PR files: ${error.message}`,
error.status,
error
);
}
}
/**
* Annotate GitHub PR with SunLint results
* @param {Object} options
* @param {string} options.jsonFile - Path to JSON result file
* @param {string} [options.githubToken] - GitHub token (with repo:write), falls back to GITHUB_TOKEN env
* @param {string} options.repo - GitHub repo in format owner/repo
* @param {number} options.prNumber - Pull request number
* @param {boolean} [options.skipDuplicates=true] - Skip duplicate comments
* @returns {Promise<Object>} Result object with summary
* @throws {ValidationError} When input validation fails
* @throws {GitHubAPIError} When GitHub API calls fail
* @throws {Error} For other errors
*/
async function annotate({
jsonFile,
githubToken,
repo,
prNumber,
skipDuplicates = true
}) {
const startTime = Date.now();
try {
// Step 1: Validate input
logger.info('Starting GitHub annotation process', { jsonFile, repo, prNumber });
validateInput({ jsonFile, githubToken, repo, prNumber });
// Step 2: Read and parse JSON file
logger.info('Reading result file', { jsonFile });
const raw = readJsonFile(jsonFile);
// Step 2.5: Get git root for path normalization
const gitRoot = getGitRoot();
logger.debug('Git root directory', { gitRoot });
// Step 3: Parse violations with git root normalization
logger.info('Parsing violations');
const violations = parseViolations(raw, gitRoot);
if (violations.length === 0) {
logger.info('No violations found');
return {
success: true,
message: 'No violations to comment',
stats: {
totalViolations: 0,
commentsCreated: 0,
duplicatesSkipped: 0,
duration: Date.now() - startTime
}
};
}
logger.info(`Found ${violations.length} violation(s)`, {
errors: violations.filter(v => v.severity === 'error').length,
warnings: violations.filter(v => v.severity === 'warning').length
});
// Step 4: Initialize Octokit
const token = githubToken || process.env.GITHUB_TOKEN;
const [owner, repoName] = repo.split('/');
if (!Octokit) {
logger.debug('Loading @octokit/rest');
Octokit = (await import('@octokit/rest')).Octokit;
}
const octokit = new Octokit({ auth: token });
// Step 5: Get PR info
logger.info('Fetching PR information');
let prData;
try {
const response = await withRetry(async () => {
return await octokit.pulls.get({
owner,
repo: repoName,
pull_number: prNumber
});
});
prData = response.data;
} catch (error) {
if (error.status === 404) {
throw new GitHubAPIError(
`PR #${prNumber} not found in ${repo}`,
404,
error
);
}
throw new GitHubAPIError(
`Failed to fetch PR: ${error.message}`,
error.status,
error
);
}
const headSha = prData.head.sha;
logger.info('PR information retrieved', {
state: prData.state,
sha: headSha,
title: prData.title
});
// Check if PR is open
if (prData.state !== 'open') {
logger.warn('PR is not open', { state: prData.state });
}
// Step 6: Get PR files
logger.info('Fetching PR files');
const prFilesInfo = await getPRFilesInfo(octokit, owner, repoName, prNumber);
const prFiles = Array.from(prFilesInfo.keys());
logger.info(`PR has ${prFiles.length} file(s) changed`);
// Step 7: Filter and validate violations
const matchingViolations = [];
let filesSkipped = 0;
let linesSkipped = 0;
let renamedFilesHandled = 0;
// Debug: Log sample paths for comparison
logger.debug('Path comparison debug:', {
sampleViolationFiles: violations.slice(0, 3).map(v => v.file),
samplePRFiles: Array.from(prFilesInfo.keys()).slice(0, 3),
totalViolations: violations.length,
totalPRFiles: prFilesInfo.size
});
for (const v of violations) {
let targetFile = v.file;
let fileInfo = prFilesInfo.get(targetFile);
// If file not found, check if it's a renamed file (old name)
if (!fileInfo && prFilesInfo.has(targetFile)) {
const oldFileInfo = prFilesInfo.get(targetFile);
if (oldFileInfo.isOldName && oldFileInfo.newFilename) {
logger.debug(`Mapping renamed file: ${targetFile} -> ${oldFileInfo.newFilename}`);
targetFile = oldFileInfo.newFilename;
fileInfo = prFilesInfo.get(targetFile);
renamedFilesHandled++;
}
}
// Skip if file not in PR
if (!fileInfo) {
logger.debug(`Skipping violation - file not in PR: ${v.file}`);
filesSkipped++;
continue;
}
// Skip if file is deleted/removed
if (fileInfo.status === 'removed') {
logger.debug(`Skipping violation - file removed: ${targetFile}`);
filesSkipped++;
continue;
}
// Validate line number against patch
if (fileInfo.validLines && fileInfo.validLines.size > 0) {
if (!fileInfo.validLines.has(v.line)) {
logger.debug(`Skipping violation - line ${v.line} not in PR diff: ${targetFile}`);
linesSkipped++;
continue;
}
}
// Add to matching violations with updated filename
matchingViolations.push({
...v,
file: targetFile // Use potentially renamed filename
});
}
if (matchingViolations.length === 0) {
logger.info('No violations match PR files or valid lines', {
totalViolations: violations.length,
filesSkipped,
linesSkipped
});
return {
success: true,
message: 'No matching PR file violations to comment',
stats: {
totalViolations: violations.length,
matchingViolations: 0,
filesSkipped,
linesSkipped,
renamedFilesHandled,
commentsCreated: 0,
duplicatesSkipped: 0,
duration: Date.now() - startTime
}
};
}
logger.info(`${matchingViolations.length} violation(s) match PR files and valid lines`, {
filesSkipped,
linesSkipped,
renamedFilesHandled
});
// Step 8: Get existing comments to avoid duplicates
let existingComments = [];
if (skipDuplicates) {
logger.info('Fetching existing comments for duplicate detection');
existingComments = await getExistingComments(octokit, owner, repoName, prNumber);
logger.info(`Found ${existingComments.length} existing comment(s)`);
}
// Step 9: Prepare review comments
const reviewComments = [];
let duplicatesSkipped = 0;
for (const v of matchingViolations) {
const commentBody = truncateComment(`[${v.rule}] ${v.message}`);
const comment = {
path: v.file,
line: v.line,
side: 'RIGHT',
body: commentBody
};
if (skipDuplicates && isCommentDuplicate(existingComments, comment)) {
duplicatesSkipped++;
logger.debug('Skipping duplicate comment', { file: v.file, line: v.line });
continue;
}
reviewComments.push(comment);
}
if (reviewComments.length === 0) {
logger.info('All comments are duplicates, nothing to post');
return {
success: true,
message: 'All comments already exist on PR',
stats: {
totalViolations: violations.length,
matchingViolations: matchingViolations.length,
filesSkipped,
linesSkipped,
renamedFilesHandled,
commentsCreated: 0,
duplicatesSkipped,
duration: Date.now() - startTime
}
};
}
logger.info(`Preparing to create ${reviewComments.length} comment(s)`, {
duplicatesSkipped
});
// Step 10: Create reviews
const hasError = matchingViolations.some(v => v.severity === 'error');
const reviews = await createReviewsInBatches(
octokit,
owner,
repoName,
prNumber,
headSha,
reviewComments,
hasError
);
const duration = Date.now() - startTime;
logger.info('Annotation completed successfully', {
reviewsCreated: reviews.length,
commentsCreated: reviewComments.length,
filesSkipped,
linesSkipped,
renamedFilesHandled,
duration: `${duration}ms`
});
return {
success: true,
message: `Created ${reviews.length} review(s) with ${reviewComments.length} comment(s)`,
reviews: reviews.map(r => ({
id: r.id,
html_url: r.html_url
})),
stats: {
totalViolations: violations.length,
matchingViolations: matchingViolations.length,
filesSkipped,
linesSkipped,
renamedFilesHandled,
commentsCreated: reviewComments.length,
duplicatesSkipped,
reviewsCreated: reviews.length,
hasErrors: hasError,
duration
}
};
} catch (error) {
logger.error('Annotation failed', error);
// Re-throw with more context
if (error instanceof ValidationError || error instanceof GitHubAPIError) {
throw error;
}
throw new Error(`GitHub annotation failed: ${error.message}`);
}
}
/**
* Post summary comment on GitHub PR
* @param {Object} options
* @param {string} options.jsonFile - Path to JSON result file
* @param {string} [options.githubToken] - GitHub token, falls back to GITHUB_TOKEN env
* @param {string} options.repo - GitHub repo in format owner/repo
* @param {number} options.prNumber - Pull request number
* @returns {Promise<Object>} Result object
*/
async function postSummaryComment({
jsonFile,
githubToken,
repo,
prNumber
}) {
const startTime = Date.now();
try {
// Step 1: Validate input
logger.info('Starting GitHub summary comment process', { jsonFile, repo, prNumber });
validateInput({ jsonFile, githubToken, repo, prNumber });
// Step 2: Read and parse JSON file
logger.info('Reading result file', { jsonFile });
const raw = readJsonFile(jsonFile);
// Step 2.5: Get git root for path normalization
const gitRoot = getGitRoot();
logger.debug('Git root directory', { gitRoot });
// Step 3: Parse violations with git root normalization
logger.info('Parsing violations for summary');
const violations = parseViolations(raw, gitRoot);
// Step 4: Initialize Octokit
const token = githubToken || process.env.GITHUB_TOKEN;
const [owner, repoName] = repo.split('/');
if (!Octokit) {
logger.debug('Loading @octokit/rest');
Octokit = (await import('@octokit/rest')).Octokit;
}
const octokit = new Octokit({ auth: token });
// Step 5: Calculate statistics
const totalViolations = violations.length;
const errorCount = violations.filter(v => v.severity === 'error').length;
const warningCount = violations.filter(v => v.severity === 'warning').length;
// Group by file
const fileGroups = {};
for (const v of violations) {
if (!fileGroups[v.file]) {
fileGroups[v.file] = [];
}
fileGroups[v.file].push(v);
}
const filesWithIssues = Object.keys(fileGroups).length;
const totalFiles = raw.length || filesWithIssues;
// Step 6: Calculate quality score
const score = calculateQualityScore(errorCount, warningCount, totalFiles);
// Step 6.5: Generate AI summary (if available)
const stats = { errorCount, warningCount, filesWithIssues, totalViolations };
const aiSummary = await generateAISummary(violations, stats);
// Step 7: Generate compact summary markdown
const emoji = errorCount > 0 ? '❌' : warningCount > 0 ? '⚠️' : '✅';
let summary = `## ${emoji} SunLint Report\n\n`;
if (totalViolations === 0) {
summary += `🎯 **Score: ${score.value}/100** (${score.grade})\n\n`;
summary += '✅ **No violations found!**\n';
} else {
// Score display with color indicator
const scoreEmoji = score.value >= 80 ? '🟢' : score.value >= 60 ? '🟡' : '🔴';
summary += `### ${scoreEmoji} Quality Score: **${score.value}/100** (${score.grade})\n\n`;
// AI Summary (if available)
if (aiSummary) {
summary += `#### 🤖 AI Analysis\n`;
summary += `${aiSummary}\n\n`;
}
// Compact summary table
summary += `| Metric | Count |\n`;
summary += `|:-------|------:|\n`;
summary += `| 📋 Total Violations | ${totalViolations} |\n`;
summary += `| ❌ Errors | ${errorCount} |\n`;
summary += `| ⚠️ Warnings | ${warningCount} |\n`;
summary += `| 📁 Files | ${filesWithIssues} |\n\n`;
summary += '> 💡 See inline comments for details\n';
}
summary += '\n---\n';
summary += '<sub>Generated by [SunLint](https://github.com/sun-asterisk/engineer-excellence)';
// Add link to full report if available
if (process.env.GITHUB_RUN_ID) {
const runUrl = `https://github.com/${repo}/actions/runs/${process.env.GITHUB_RUN_ID}`;
summary += ` • [View run](${runUrl})`;
}
summary += '</sub>\n';
// Step 7: Try to find existing SunLint comment
logger.info('Checking for existing summary comment');
let existingComment = null;
try {
const { data: comments } = await octokit.issues.listComments({
owner,
repo: repoName,
issue_number: prNumber,
per_page: 100
});
existingComment = comments.find(comment =>
comment.user.type === 'Bot' &&
comment.body.includes('SunLint Report')
);
} catch (error) {
logger.warn('Failed to fetch existing comments', { error: error.message });
}
// Step 8: Post or update comment
let commentResult;
try {
if (existingComment) {
logger.info('Updating existing summary comment', { commentId: existingComment.id });
commentResult = await withRetry(async () => {
return await octokit.issues.updateComment({
owner,
repo: repoName,
comment_id: existingComment.id,
body: summary
});
});
logger.info('Summary comment updated successfully');
} else {
logger.info('Creating new summary comment');
commentResult = await withRetry(async () => {
return await octokit.issues.createComment({
owner,
repo: repoName,
issue_number: prNumber,
body: summary
});
});
logger.info('Summary comment created successfully');
}
} catch (error) {
throw new GitHubAPIError(
`Failed to post summary comment: ${error.message}`,
error.status,
error
);
}
const duration = Date.now() - startTime;
logger.info('Summary comment completed', {
action: existingComment ? 'updated' : 'created',
duration: `${duration}ms`
});
return {
success: true,
action: existingComment ? 'updated' : 'created',
commentId: commentResult.data.id,
commentUrl: commentResult.data.html_url,
stats: {
totalViolations,
errorCount,
warningCount,
filesWithIssues,
duration
}
};
} catch (error) {
logger.error('Summary comment failed', error);
if (error instanceof ValidationError || error instanceof GitHubAPIError) {
throw error;
}
throw new Error(`GitHub summary comment failed: ${error.message}`);
}
}
module.exports = {
annotate,
postSummaryComment,
ValidationError,
GitHubAPIError
};