buddy-bot
Version:
Automated & optimized dependency updates for JavaScript & TypeScript projects. Like Renovate & Dependabot.
92 lines (63 loc) • 50.9 kB
JavaScript
// @bun
import{f as d,g as r}from"./chunk-kxav0xyr.js";import{h as l,i as n}from"./chunk-ag75h4mh.js";import{j as m}from"./chunk-97jdq049.js";import{l as s}from"./chunk-nbn2s36m.js";import{m as o,q as i}from"./chunk-7q14q1yz.js";import{x as P,y as p}from"./chunk-eywnktd9.js";import{A as g,B as c}from"./chunk-956t2kcb.js";import"./chunk-5fv4yjx7.js";import{D as J,H as j}from"./chunk-cmr43ay2.js";import KK from"fs";import w from"process";class b{generateDashboard(Q,K={}){let{showOpenPRs:X=!0,showDetectedDependencies:Z=!0,showDeprecatedDependencies:z=!0,bodyTemplate:$}=K,W="Dependency Dashboard";if($)return{title:"Dependency Dashboard",body:this.applyTemplate($,Q)};let O=this.generateDefaultHeader(Q);if(z&&Q.deprecatedDependencies&&Q.deprecatedDependencies.length>0)O+=this.generateDeprecatedDependenciesSection(Q.deprecatedDependencies);if(X&&Q.openPRs.length>0)O+=this.generateOpenPRsSection(Q.openPRs);if(Z)O+=this.generateDetectedDependenciesSection(Q.detectedDependencies);return O+=this.generateFooter(),{title:"Dependency Dashboard",body:O}}generateDefaultHeader(Q){return`This issue lists Buddy Bot updates and detected dependencies. Read the [Dependency Dashboard](https://buddy-bot.sh/features/dependency-dashboard) docs to learn more.
`}generateOpenPRsSection(Q){let K=`## Open
The following updates have all been created. To force a retry/rebase of any, click on a checkbox below.
`;for(let X of Q){let Z=this.extractPackageInfo(X),z=X.head,$=X.url.includes("/pull/")&&X.url.includes("github.com")?`../pull/${X.number}`:X.url;if(K+=` - [ ] <!-- rebase-branch=${z} -->[${X.title}](${$})`,Z.length>0)K+=` (\`${Z.join("`, `")}\`)`;K+=`
`}return K+=` - [ ] <!-- rebase-all-open-prs -->**Click on this checkbox to rebase all open PRs at once**
`,K+=`
`,K}generateDetectedDependenciesSection(Q){let K=`## Detected dependencies
`;if(Q.packageJson.length>0)K+=this.generatePackageJsonSection(Q.packageJson);let X=Q.dependencyFiles.filter((W)=>W.path==="composer.json"||W.path.endsWith("/composer.json")),Z=X.filter((W)=>W.path==="composer.json"),z=X.filter((W)=>W.path!=="composer.json");if(Z.length>0)K+=this.generateComposerSection(Z);if(Q.githubActions.length>0)K+=this.generateGitHubActionsSection(Q.githubActions);let $=Q.dependencyFiles.filter((W)=>!W.path.endsWith("/composer.json")&&W.path!=="composer.json");if($.length>0||z.length>0)K+=this.generateDependencyFilesSection([...$,...z]);return K}generatePackageJsonSection(Q){let K=`<details><summary>npm</summary>
<blockquote>
`;for(let X of Q){let Z=X.path.split("/").pop()||X.path;K+=`<details><summary>${Z}</summary>
`;let z={dependencies:X.dependencies.filter(($)=>$.type==="dependencies"),devDependencies:X.dependencies.filter(($)=>$.type==="devDependencies"),peerDependencies:X.dependencies.filter(($)=>$.type==="peerDependencies"),optionalDependencies:X.dependencies.filter(($)=>$.type==="optionalDependencies")};for(let[$,W]of Object.entries(z))if(W.length>0)for(let O of W)K+=` - \`${O.name} ${O.currentVersion}\`
`;K+=`
</details>
`}return K+=`</blockquote>
</details>
`,K}generateGitHubActionsSection(Q){let K=`<details><summary>github-actions</summary>
<blockquote>
`;for(let X of Q){K+=`<details><summary>${X.path}</summary>
`;let Z=new Map;for(let z of X.dependencies){let $=`${z.name}@${z.currentVersion}`;if(!Z.has($))Z.set($,{name:z.name,currentVersion:z.currentVersion})}for(let z of Z.values())K+=` - \`${z.name} ${z.currentVersion}\`
`;K+=`
</details>
`}return K+=`</blockquote>
</details>
`,K}generateDependencyFilesSection(Q){let K=`<details><summary>dependency-files</summary>
<blockquote>
`;for(let X of Q){K+=`<details><summary>${X.path}</summary>
`;for(let Z of X.dependencies)K+=` - \`${Z.name} ${Z.currentVersion}\`
`;K+=`
</details>
`}return K+=`</blockquote>
</details>
`,K}generateComposerSection(Q){let K=`<details><summary>composer</summary>
<blockquote>
`;for(let X of Q){let Z=X.path.split("/").pop()||X.path;K+=`<details><summary>${Z}</summary>
`;let z={require:X.dependencies.filter(($)=>$.type==="require"),"require-dev":X.dependencies.filter(($)=>$.type==="require-dev")};for(let[$,W]of Object.entries(z))if(W.length>0)for(let O of W)K+=` - \`${O.name} ${O.currentVersion}\`
`;K+=`
</details>
`}return K+=`</blockquote>
</details>
`,K}generateDeprecatedDependenciesSection(Q){let K=`> [!WARNING]
> These dependencies are deprecated and should be updated to avoid potential security risks and compatibility issues.
| Datasource | Name | Replacement PR? |
|------------|------|-----------------|
`;for(let X of Q){let Z=`\`${X.name}\``,$=(X.replacementAvailable?"available":"unavailable")==="available"?"":"";K+=`| ${X.datasource} | ${Z} | ${$} |
`}return K+=`
`,K}generateFooter(){return`---
- [ ] <!-- manual job -->Check this box to trigger a request for Buddy Bot to run again on this repository
`}extractPackageInfo(Q){let K=[],X=[/update.*?dependency\s+(\S+)/i,/update\s+require(?:-dev)?\s+(\S+)\s+to\s+v?\d+/i,/update\s+(\S+)\s+to\s+v?\d+/i,/bump\s+(\S+)\s+from/i,/chore\(deps\):\s*update\s+dependency\s+(\S+)/i];for(let z of X){let $=Q.title.match(z);if($&&$[1]&&!K.includes($[1]))K.push($[1])}let Z=[{name:"npm",pattern:/### npm Dependencies[\s\S]*?(?=###|\n\n---|z)/i},{name:"pkgx",pattern:/### Launchpad\/pkgx Dependencies[\s\S]*?(?=###|\n\n---|z)/i},{name:"actions",pattern:/### GitHub Actions[\s\S]*?(?=###|\n\n---|z)/i}];for(let z of Z){let $=Q.body.match(z.pattern);if($){let O=$[0].match(/\|\s*\[([^\]]+)\]\([^)]+\)\s*\|/g);if(O)for(let Y of O){let q=Y.match(/\|\s*\[([^\]]+)\]/);if(q&&q[1]){let S=q[1].trim();if(S.includes("`")&&S.includes("->")){let H=Y.match(/\]\(([^)]+)\)/);if(H&&H[1]){let E=H[1].match(/\/diffs\/npm\/([^/]+)\//);if(E&&E[1]){let I=decodeURIComponent(E[1]);if(I&&I.length>1&&!K.includes(I))K.push(I)}}continue}if(!S.includes("://")&&!S.includes("Compare Source")&&!S.includes("badge")&&!S.includes("!")&&!S.startsWith("[![")&&!S.includes("`")&&!S.includes("->")&&!S.includes(" -> ")&&!S.match(/^\d+\.\d+/)&&!S.includes(" ")&&S.length>0&&!K.includes(S))K.push(S)}}}}if(K.length<3){let z=Q.body.match(/`([^`]+)`/g);if(z)for(let $ of z){let W=$.replace(/`/g,"").trim();if(W.includes("->")||W.includes(" -> ")||W.includes("` -> `")||W.match(/^\d+\.\d+/)||W.match(/^v\d+/)||W.match(/[\d.]+\s*->\s*[\d.]+/)||W.match(/^[\d.]+$/)||W.match(/^\d+\.\d+\.\d+/)||W.match(/^\d+\.\d+\.\d+\./)||W.match(/^\^?\d+\.\d+/)||W.match(/^~\d+\.\d+/)||W.includes("://")||W.includes("Compare Source")||W.includes("badge")||W.includes(" "))continue;if(W=W.split(",")[0].trim(),W&&W.length>1&&!K.includes(W)&&(W.startsWith("@")||W.includes("/")||W.match(/^[a-z][a-z0-9.-]*$/i)))K.push(W)}}return K}applyTemplate(Q,K){return Q.replace(/\{\{repository\.owner\}\}/g,K.repository.owner).replace(/\{\{repository\.name\}\}/g,K.repository.name).replace(/\{\{openPRs\.count\}\}/g,K.openPRs.length.toString()).replace(/\{\{lastUpdated\}\}/g,K.lastUpdated.toISOString()).replace(/\{\{detectedDependencies\.packageJson\.count\}\}/g,K.detectedDependencies.packageJson.length.toString()).replace(/\{\{detectedDependencies\.githubActions\.count\}\}/g,K.detectedDependencies.githubActions.length.toString()).replace(/\{\{detectedDependencies\.dependencyFiles\.count\}\}/g,K.detectedDependencies.dependencyFiles.length.toString())}}var{Glob:e}=globalThis.Bun;import{readdir as F,readFile as C,stat as U}from"fs/promises";import{join as B}from"path";class y{projectPath;logger;ignoreGlobs=[];constructor(Q,K,X){this.projectPath=Q;this.logger=K;if(X&&X.length>0)this.ignoreGlobs=X.map((Z)=>new e(Z)),this.logger.info(`Initialized ${this.ignoreGlobs.length} ignore patterns: ${X.join(", ")}`)}async scanProject(){this.logger.info("Scanning project for package files...");let Q=Date.now(),K=[];try{let X=await this.findFiles("package.json");for(let q of X){if(this.shouldIgnorePath(q))continue;let S=await this.parsePackageJsonFile(q);if(S)K.push(S)}let Z=await this.findDependencyFiles();for(let q of Z){if(this.shouldIgnorePath(q))continue;let S=await this.parseDependencyFile(q);if(S)K.push(S)}let z=await this.findLockFiles();for(let q of z){if(this.shouldIgnorePath(q))continue;let S=await this.parseLockFile(q);if(S)K.push(S)}let $=await this.findComposerFiles();for(let q of $){if(this.shouldIgnorePath(q))continue;let S=await this.parseComposerFile(q);if(S)K.push(S)}let W=await this.findGitHubActionsFiles();for(let q of W){if(this.shouldIgnorePath(q))continue;let S=await this.parseGitHubActionsFile(q);if(S)K.push(S)}let O=await this.findDockerfiles();this.logger.info(`\uD83D\uDD0D Found ${O.length} Dockerfile(s): ${O.join(", ")}`);for(let q of O){if(this.shouldIgnorePath(q))continue;let S=await this.parseDockerfile(q);if(S)K.push(S),this.logger.info(`\uD83D\uDCE6 Parsed Dockerfile: ${q} with ${S.dependencies.length} dependencies`)}let Y=Date.now()-Q;return this.logger.success(`Found ${K.length} package files in ${Y}ms`),K}catch(X){throw this.logger.error("Failed to scan project:",X),new o(`Failed to scan project: ${X instanceof Error?X.message:"Unknown error"}`)}}async parsePackageJsonFile(Q){try{let K=B(this.projectPath,Q),X=await C(K,"utf-8"),Z=JSON.parse(X),z=[];return this.extractDependencies(Z.dependencies,"dependencies",Q,z),this.extractDependencies(Z.devDependencies,"devDependencies",Q,z),this.extractDependencies(Z.peerDependencies,"peerDependencies",Q,z),this.extractDependencies(Z.optionalDependencies,"optionalDependencies",Q,z),{path:Q,type:"package.json",content:X,dependencies:z}}catch(K){return this.logger.warn(`Failed to parse package.json file ${Q}:`,K),null}}async parseLockFile(Q){try{let K=B(this.projectPath,Q),X=await C(K,"utf-8"),Z=Q.split("/").pop()||"",z;if(Z==="bun.lockb")z="bun.lockb";else if(Z==="package-lock.json")z="package-lock.json";else if(Z==="yarn.lock")z="yarn.lock";else if(Z==="pnpm-lock.yaml")z="pnpm-lock.yaml";else return null;let $=await this.extractLockFileDependencies(X,z,Q);return{path:Q,type:z,content:X,dependencies:$}}catch(K){return this.logger.warn(`Failed to parse lock file ${Q}:`,K),null}}async parseDependencyFile(Q){try{let K=B(this.projectPath,Q),X=await C(K,"utf-8");return await c(Q,X)}catch(K){return this.logger.warn(`Failed to parse dependency file ${Q}:`,K),null}}async findDependencyFiles(){let Q=[],K=["deps.yaml","deps.yml","dependencies.yaml","dependencies.yml","pkgx.yaml","pkgx.yml",".deps.yaml",".deps.yml"];for(let z of K){let $=await this.findFiles(z);Q.push(...$)}let X=await this.findFilesByPattern("*.yaml"),Z=await this.findFilesByPattern("*.yml");for(let z of[...X,...Z])if(g(z)&&!Q.includes(z))Q.push(z);return Q}async findGitHubActionsFiles(){let Q=[];try{let X=B(this.projectPath,".github","workflows");if((await U(X).catch(()=>null))?.isDirectory()){let z=await this.findFilesByPatternInDir("*.yaml",X),$=await this.findFilesByPatternInDir("*.yml",X);for(let W of[...z,...$])if(d(W))Q.push(W)}}catch{}return Q}async findDockerfiles(){let Q=[];try{let K=["Dockerfile","dockerfile","Dockerfile.dev","Dockerfile.prod","Dockerfile.production","Dockerfile.development","Dockerfile.test","Dockerfile.staging"];for(let Z of K){let z=await this.findFiles(Z);Q.push(...z)}let X=await this.findFilesByPattern("Dockerfile*");for(let Z of X)if(!Q.includes(Z)&&l(Z))Q.push(Z)}catch{}return Q}async parseGitHubActionsFile(Q){try{let K=B(this.projectPath,Q),X=await C(K,"utf-8");return await r(Q,X)}catch(K){return this.logger.warn(`Failed to parse GitHub Actions file ${Q}:`,K),null}}async parseDockerfile(Q){try{let K=B(this.projectPath,Q),X=await C(K,"utf-8");return await n(Q,X)}catch(K){return this.logger.warn(`Failed to parse Dockerfile ${Q}:`,K),null}}async findComposerFiles(){let Q=[];try{let K=await this.findFiles("composer.json");Q.push(...K);let X=await this.findFiles("composer.lock");Q.push(...X)}catch{}return Q}async parseComposerFile(Q){try{let K=B(this.projectPath,Q),X=await C(K,"utf-8"),{parseComposerFile:Z}=await import("./chunk-smqahw1y.js");return await Z(Q,X)}catch(K){return this.logger.warn(`Failed to parse Composer file ${Q}:`,K),null}}extractDependencies(Q,K,X,Z){if(!Q)return;for(let[z,$]of Object.entries(Q))Z.push({name:z,currentVersion:$,type:K,file:this.getRelativePath(X)})}async extractLockFileDependencies(Q,K,X){let Z=[];if(K==="package-lock.json")try{let z=JSON.parse(Q);if(z.packages){for(let[$,W]of Object.entries(z.packages))if($&&$!==""&&W&&typeof W==="object"){let O=W;if(O.version){let Y=$.startsWith("node_modules/")?$.replace("node_modules/",""):$;Z.push({name:Y,currentVersion:O.version,type:O.dev?"devDependencies":"dependencies",file:this.getRelativePath(X)})}}}}catch(z){this.logger.warn("Failed to parse package-lock.json:",z)}return Z}async findFiles(Q,K=this.projectPath){let X=[];try{let Z=await F(K);for(let z of Z){let $=B(K,z);try{let W=await U($);if(W.isDirectory()){if(!this.shouldSkipDirectory(z)){let O=await this.findFiles(Q,$);X.push(...O)}}else if(W.isFile()&&z===Q){let Y=j("path").relative(this.projectPath,$);X.push(Y)}}catch{continue}}}catch{}return X}async findLockFiles(){let Q=["bun.lockb","package-lock.json","yarn.lock","pnpm-lock.yaml"],K=[];for(let X of Q){let Z=await this.findFiles(X);K.push(...Z)}return K}async findFilesByPattern(Q){let K=[],X=Q.replace("*.","");try{let Z=await F(this.projectPath);for(let z of Z){let $=B(this.projectPath,z),W=await U($);if(W.isFile()&&z.endsWith(`.${X}`)){let Y=j("path").relative(this.projectPath,$);K.push(Y)}else if(W.isDirectory()&&!this.shouldSkipDirectory(z)){let O=await this.findFilesByPatternInDir(Q,$);K.push(...O)}}}catch{}return K}async findFilesByPatternInDir(Q,K){let X=[],Z=Q.replace("*.","");try{let z=await F(K);for(let $ of z){let W=B(K,$),O=await U(W);if(O.isFile()&&$.endsWith(`.${Z}`)){let q=j("path").relative(this.projectPath,W);X.push(q)}else if(O.isDirectory()&&!this.shouldSkipDirectory($)){let Y=await this.findFilesByPatternInDir(Q,W);X.push(...Y)}}}catch{}return X}shouldIgnorePath(Q){if(this.ignoreGlobs.length===0)return!1;let K=Q;if(Q.startsWith("/")||Q.includes(":"))K=this.getRelativePath(Q);for(let X of this.ignoreGlobs)if(X.match(K))return this.logger.debug(`Ignoring path: ${K} (matched pattern)`),!0;return!1}shouldSkipDirectory(Q){return["node_modules",".git",".next",".nuxt","dist","build","coverage",".nyc_output","tmp","temp",".cache",".vscode",".idea"].includes(Q)||Q.startsWith(".")}getRelativePath(Q){return j("path").relative(this.projectPath,Q)}async getDependencyCount(){return(await this.scanProject()).reduce((K,X)=>K+X.dependencies.length,0)}async getUniqueDependencies(){return(await this.scanProject()).flatMap((Z)=>Z.dependencies).filter((Z,z,$)=>z===$.findIndex((W)=>W.name===Z.name))}}class k{async checkDeprecatedDependencies(Q){let K=[];for(let X of Q){let Z=await this.checkFileForDeprecatedDependencies(X);K.push(...Z)}return K}async checkFileForDeprecatedDependencies(Q){let K=[];for(let X of Q.dependencies){let Z=await this.checkDependencyDeprecation(X,Q.type);if(Z.deprecated)K.push({name:X.name,currentVersion:X.currentVersion,datasource:this.getDatasourceFromFileType(Q.type),file:Q.path,type:X.type,replacementAvailable:!1,deprecationMessage:Z.message,suggestedReplacement:Z.suggestedReplacement})}return K}async checkDependencyDeprecation(Q,K){try{if(K==="package.json"||K.includes("lock"))return await this.checkNpmDeprecation(Q);else if(K==="composer.json"||K==="composer.lock")return await this.checkComposerDeprecation(Q);else if(K==="deps.yaml"||K==="deps.yml"||K.includes("deps"))return await this.checkBunDeprecation(Q);return{deprecated:!1}}catch(X){return console.warn(`Failed to check deprecation for ${Q.name}:`,X),{deprecated:!1}}}async checkNpmDeprecation(Q){try{let K=await fetch(`https://registry.npmjs.org/${Q.name}`);if(!K.ok)return{deprecated:!1};let X=await K.json();if(X.deprecated)return{deprecated:!0,message:X.deprecated,suggestedReplacement:this.extractSuggestedReplacement(X.deprecated)};let Z=X.versions?.[Q.currentVersion];if(Z?.deprecated)return{deprecated:!0,message:Z.deprecated,suggestedReplacement:this.extractSuggestedReplacement(Z.deprecated)};return{deprecated:!1}}catch(K){return console.warn(`Failed to check npm deprecation for ${Q.name}:`,K),{deprecated:!1}}}async checkComposerDeprecation(Q){try{let K=await fetch(`https://packagist.org/packages/${Q.name}.json`);if(!K.ok)return{deprecated:!1};let X=await K.json();if(X.package?.abandoned){let Z=typeof X.package.abandoned==="string"?X.package.abandoned:null;return{deprecated:!0,message:`Package is abandoned${Z?`, use ${Z} instead`:""}`,suggestedReplacement:Z||void 0}}return{deprecated:!1}}catch(K){return console.warn(`Failed to check Composer deprecation for ${Q.name}:`,K),{deprecated:!1}}}async checkBunDeprecation(Q){return await this.checkNpmDeprecation(Q)}extractSuggestedReplacement(Q){let K=[/use\s+([a-z0-9@/-]+)\s+instead/i,/replaced\s+by\s+([a-z0-9@/-]+)/i,/migrate\s+to\s+([a-z0-9@/-]+)/i,/switch\s+to\s+([a-z0-9@/-]+)/i];for(let X of K){let Z=Q.match(X);if(Z)return Z[1]}return}getDatasourceFromFileType(Q){if(Q==="package.json"||Q.includes("lock"))return"npm";else if(Q==="composer.json"||Q==="composer.lock")return"composer";else if(Q==="deps.yaml"||Q==="deps.yml"||Q.includes("deps"))return"bun";else if(Q==="github-actions")return"github-actions";return"unknown"}}class x{verbose;constructor(Q=!1){this.verbose=Q}info(Q,...K){console.log(Q,...K)}warn(Q,...K){console.warn(Q,...K)}error(Q,...K){console.error(Q,...K)}debug(Q,...K){if(this.verbose)console.log(`\x1B[90m\uD83D\uDC1B\x1B[0m ${Q}`,...K)}success(Q,...K){console.log(`\x1B[32m\u2713\x1B[0m ${Q}`,...K)}static verbose(){return new x(!0)}static quiet(){return new x(!1)}}class QK{config;projectPath;logger;scanner;registryClient;dashboardGenerator;constructor(Q,K=w.cwd()){this.config=Q;this.projectPath=K;this.logger=new x(Q.verbose??!1),this.scanner=new y(this.projectPath,this.logger,this.config.packages?.ignorePaths),this.registryClient=new i(this.projectPath,this.logger,this.config),this.dashboardGenerator=new b}async scanForUpdates(){let Q=Date.now();this.logger.info("Starting dependency update scan...");try{let K=await this.scanner.scanProject(),X=K.reduce((H,_)=>H+_.dependencies.length,0),Z=[];if(this.config.packages?.ignore&&this.config.packages.ignore.length>0)Z=(await this.registryClient.getOutdatedPackages()).filter((_)=>!this.config.packages.ignore.includes(_.name));else Z=await this.registryClient.getOutdatedPackages();let z=await this.checkDependencyFilesForUpdates(K),$=await this.checkGitHubActionsForUpdates(K),W=await this.checkDockerfilesForUpdates(K),O=[...Z,...z,...$,...W];if(this.config.packages?.ignore&&this.config.packages.ignore.length>0)O=O.filter((H)=>!this.config.packages.ignore.includes(H.name));if(this.config.packages?.strategy)O=this.filterUpdatesByStrategy(O,this.config.packages.strategy);O=await this.filterUpdatesByMinimumReleaseAge(O),O=p(O);let Y=this.config.packages?.groups?this.groupUpdatesByConfig(O):P(O),q=Date.now()-Q,S={totalPackages:X,updates:O,groups:Y,scannedAt:new Date,duration:q};return this.logger.success(`Scan completed in ${q}ms. Found ${O.length} updates.`),S}catch(K){throw this.logger.error("Failed to scan for updates:",K),K}}async createPullRequests(Q){this.logger.info("Creating pull requests for updates...");try{if(!this.config.repository){this.logger.error("\u274C Repository configuration required for PR creation"),this.logger.info("Configure repository.provider, repository.owner, repository.name in buddy-bot.config.ts");return}let K=w.env.BUDDY_BOT_TOKEN||w.env.GITHUB_TOKEN;if(!K){this.logger.error("\u274C GITHUB_TOKEN or BUDDY_BOT_TOKEN environment variable required for PR creation");return}let X=!!w.env.BUDDY_BOT_TOKEN;if(w.env.BUDDY_BOT_TOKEN)console.log("\u2705 BUDDY_BOT_TOKEN detected - workflow permissions enabled"),console.log(`\uD83D\uDD11 Token length: ${w.env.BUDDY_BOT_TOKEN.length} characters`);else console.log("\u26A0\uFE0F BUDDY_BOT_TOKEN not found - workflow permissions disabled"),console.log("\uD83D\uDCA1 Ensure BUDDY_BOT_TOKEN is properly configured in GitHub secrets"),console.log("\uD83D\uDCA1 The workflow should set: env: BUDDY_BOT_TOKEN: ${{ secrets.BUDDY_BOT_TOKEN }}");let Z=new m(K,this.config.repository.owner,this.config.repository.name,X),z=new s(this.config);for(let $ of Q.groups)try{this.logger.info(`Creating PR for group: ${$.name} (${$.updates.length} updates)`);let W=$.title,O=await z.generateBody($),Y=await Z.getPullRequests("open"),q=`buddy-bot/update-${$.name.toLowerCase().replace(/\s+/g,"-")}-`,S=Y.find((G)=>(G.title===W||G.head.startsWith(q)||this.isSimilarPRTitle(G.title,W))&&(G.author==="github-actions[bot]"||G.author.includes("buddy")||G.head.startsWith("buddy-bot/"))&&!G.head.includes("renovate/")&&!G.head.includes("dependabot/")&&!G.author.toLowerCase().includes("renovate")&&!G.author.toLowerCase().includes("dependabot"));if(S){if(this.logger.info(`\uD83D\uDD04 Found existing PR #${S.number}: ${S.title}`),this.shouldAutoClosePR(S,$.updates)){this.logger.info(`\uD83D\uDD12 Auto-closing PR #${S.number} due to respectLatest config change`);try{await Z.closePullRequest(S.number),await Z.deleteBranch(S.head),this.logger.success(`\u2705 Auto-closed PR #${S.number} and deleted branch ${S.head}`);continue}catch(A){this.logger.error(`\u274C Failed to auto-close PR #${S.number}:`,A)}}if(this.checkIfUpdatesMatch(S.body,$.updates)){this.logger.info("\u2705 Existing PR has the same updates, skipping creation");continue}else{this.logger.info("\uD83D\uDD04 Updates differ, will update existing PR with new content");let A=S.head;try{let{spawn:L}=await import("child_process"),D=(R,t)=>{return new Promise((a,h)=>{let f=L(R,t,{stdio:"pipe"});f.on("close",(u)=>{if(u===0)a();else h(Error(`Git command failed with code ${u}`))}),f.on("error",h)})};await D("git",["checkout","main"]),await D("git",["reset","--hard","HEAD"]),await D("git",["clean","-fd"]),console.log(`\uD83E\uDDF9 Reset to clean main state before updating existing PR ${S.number}`)}catch(L){console.warn("\u26A0\uFE0F Failed to reset to clean state, continuing anyway:",L)}let M=await this.generateAllFileUpdates($.updates);if(M.length===0)this.logger.warn(`\u2139\uFE0F No file changes generated for existing PR ${S.number}, updating metadata only`);else{this.logger.info(`\uD83D\uDCDD Regenerated ${M.length} file changes for existing PR ${S.number}`);try{let{hasBranchDifferences:L}=await import("./chunk-tt3h8yyb.js");if(!await L(M,A))this.logger.info(`\u2139\uFE0F No content differences for ${A}; will merge main to keep branch up-to-date`),await Z.commitChanges(A,"chore: merge main to keep branch up-to-date",[]),this.logger.success(`\u2705 Merged main into ${A} to keep it up-to-date`);else await Z.commitChanges(A,`${$.title} (updated)`,M),this.logger.success(`\u2705 Updated files in branch ${A} with latest dependency versions`)}catch(L){this.logger.warn("\u26A0\uFE0F Failed to compare branch content, proceeding with commit:",L),await Z.commitChanges(A,`${$.title} (updated)`,M),this.logger.success(`\u2705 Updated files in branch ${A} with latest dependency versions`)}}let N=z.generateLabels($);await Z.updatePullRequest(S.number,{title:W,body:O,labels:N,reviewers:this.config.pullRequest?.reviewers,assignees:this.config.pullRequest?.assignees}),this.logger.success(`\u2705 Updated existing PR #${S.number}: ${W}`),this.logger.info(`\uD83D\uDD17 ${S.url}`);continue}}let H=Date.now(),_=`buddy-bot/update-${$.name.toLowerCase().replace(/\s+/g,"-")}-${H}`;await Z.createBranch(_,this.config.repository.baseBranch||"main");try{let{spawn:G}=await import("child_process"),V=(A,M)=>{return new Promise((N,L)=>{let D=G(A,M,{stdio:"pipe"});D.on("close",(R)=>{if(R===0)N();else L(Error(`Git command failed with code ${R}`))}),D.on("error",L)})};await V("git",["checkout","main"]),await V("git",["reset","--hard","HEAD"]),await V("git",["clean","-fd"]),console.log(`\uD83E\uDDF9 Reset to clean main state before generating updates for ${$.name}`)}catch(G){console.warn("\u26A0\uFE0F Failed to reset to clean state, continuing anyway:",G)}let E=await this.generateAllFileUpdates($.updates);if(E.length===0){this.logger.warn(`\u2139\uFE0F No file changes generated for group ${$.name}, skipping PR creation`);continue}let I=!1;for(let G of E)try{let V=await import("fs");if(V.existsSync(G.path)){if(V.readFileSync(G.path,"utf-8")!==G.content){I=!0;break}}else{I=!0;break}}catch{I=!0;break}if(!I){this.logger.warn(`\u2139\uFE0F No actual content changes for group ${$.name}, skipping PR creation`);continue}this.logger.info(`\uD83D\uDCDD Generated ${E.length} file changes for ${$.name}`),await Z.commitChanges(_,$.title,E);let v=z.generateLabels($),T=await Z.createPullRequest({title:W,body:O,head:_,base:this.config.repository.baseBranch||"main",draft:!1,reviewers:this.config.pullRequest?.reviewers,assignees:this.config.pullRequest?.assignees,labels:v});this.logger.success(`\u2705 Created PR #${T.number}: ${T.title}`),this.logger.info(`\uD83D\uDD17 ${T.url}`)}catch(W){this.logger.error(`\u274C Failed to create PR for group ${$.name}:`,W)}this.logger.success(`\u2705 Completed PR creation for ${Q.groups.length} group(s)`)}catch(K){throw this.logger.error("Failed to create pull requests:",K),K}}shouldRespectVersion(Q){if(!(this.config.packages?.respectLatest??!0))return!1;let X=["latest","*","main","master","develop","dev"],Z=Q.toLowerCase().trim();return X.includes(Z)}async checkDependencyFilesForUpdates(Q){let{isDependencyFile:K}=await import("./chunk-956t2kcb.js"),{resolveDependencyFile:X}=await import("./chunk-5fv4yjx7.js"),Z=[],z=Q.filter(($)=>K($.path));for(let $ of z)try{this.logger.info(`Checking dependency file: ${$.path}`);let W=await X($.path);for(let O of W.allDependencies||[]){if(this.shouldRespectVersion(O.constraint)){this.logger.debug(`Skipping ${O.name} - version "${O.constraint}" should be respected`);continue}if(O.constraint!==O.version&&O.version){let Y=O.constraint.replace(/^[\^~>=<]+/,"");if(!this.isNewerVersion(Y,O.version)){this.logger.debug(`Skipping ${O.name} - latest (${O.version}) is not newer than constraint (${Y})`);continue}let q=this.getUpdateType(Y,O.version),S=O.version;Z.push({name:O.name,currentVersion:O.constraint,newVersion:S,updateType:q,dependencyType:"dependencies",file:$.path,metadata:void 0,releaseNotesUrl:void 0,changelogUrl:void 0,homepage:void 0})}}}catch(W){this.logger.error(`Failed to check dependency file ${$.path}:`,W)}return Z}async checkGitHubActionsForUpdates(Q){let{isGitHubActionsFile:K}=await import("./chunk-kxav0xyr.js"),{fetchLatestActionVersion:X}=await import("./chunk-kxav0xyr.js"),Z=[],z=Q.filter((W)=>K(W.path));this.logger.info(`\uD83D\uDD0D Found ${z.length} GitHub Actions workflow files`);for(let W of z)try{this.logger.info(`Checking GitHub Actions file: ${W.path}`);let O=W.dependencies.filter((Y)=>Y.type==="github-actions");this.logger.info(`Found ${O.length} GitHub Actions in ${W.path}`);for(let Y of O)try{this.logger.info(`Checking action: ${Y.name}@${Y.currentVersion}`);let q=await X(Y.name);if(q)if(this.logger.info(`Latest version for ${Y.name}: ${q}`),q!==Y.currentVersion){let S=this.getUpdateType(Y.currentVersion,q);this.logger.info(`Update available: ${Y.name} ${Y.currentVersion} \u2192 ${q} (${S})`),Z.push({name:Y.name,currentVersion:Y.currentVersion,newVersion:q,updateType:S,dependencyType:"github-actions",file:W.path,metadata:void 0,releaseNotesUrl:`https://github.com/${Y.name}/releases`,changelogUrl:void 0,homepage:`https://github.com/${Y.name}`})}else this.logger.info(`No update needed for ${Y.name}: already at ${q}`);else this.logger.warn(`Could not fetch latest version for ${Y.name}`)}catch(q){this.logger.warn(`Failed to check version for action ${Y.name}:`,q)}}catch(O){this.logger.error(`Failed to check GitHub Actions file ${W.path}:`,O)}this.logger.info(`Generated ${Z.length} GitHub Actions updates`);let $=Z.reduce((W,O)=>{if(!W.find((q)=>q.name===O.name&&q.currentVersion===O.currentVersion&&q.newVersion===O.newVersion&&q.file===O.file))W.push(O);return W},[]);return this.logger.info(`After deduplication: ${$.length} unique GitHub Actions updates`),$}async checkDockerfilesForUpdates(Q){let{isDockerfile:K}=await import("./chunk-ag75h4mh.js"),{fetchLatestDockerImageVersion:X}=await import("./chunk-ag75h4mh.js"),Z=[],z=Q.filter((W)=>K(W.path));this.logger.info(`\uD83D\uDD0D Found ${z.length} Dockerfile(s)`);for(let W of z)try{this.logger.info(`Checking Dockerfile: ${W.path}`);let O=W.dependencies.filter((Y)=>Y.type==="docker-image");this.logger.info(`Found ${O.length} Docker images in ${W.path}`);for(let Y of O)try{if(this.logger.info(`Checking Docker image: ${Y.name}:${Y.currentVersion}`),this.shouldRespectVersion(Y.currentVersion)){this.logger.debug(`Skipping ${Y.name} - version "${Y.currentVersion}" should be respected`);continue}let q=await X(Y.name);if(q)if(this.logger.info(`Latest version for ${Y.name}: ${q}`),q!==Y.currentVersion){let S=this.getUpdateType(Y.currentVersion,q);this.logger.info(`Update available: ${Y.name} ${Y.currentVersion} \u2192 ${q} (${S})`),Z.push({name:Y.name,currentVersion:Y.currentVersion,newVersion:q,updateType:S,dependencyType:"docker-image",file:W.path,metadata:void 0,releaseNotesUrl:`https://hub.docker.com/r/${Y.name}/tags`,changelogUrl:void 0,homepage:`https://hub.docker.com/r/${Y.name}`})}else this.logger.info(`No update needed for ${Y.name}: already at ${q}`);else this.logger.warn(`Could not fetch latest version for Docker image ${Y.name}`)}catch(q){this.logger.warn(`Failed to check version for Docker image ${Y.name}:`,q)}}catch(O){this.logger.error(`Failed to check Dockerfile ${W.path}:`,O)}this.logger.info(`Generated ${Z.length} Docker image updates`);let $=Z.reduce((W,O)=>{if(!W.find((q)=>q.name===O.name&&q.currentVersion===O.currentVersion&&q.newVersion===O.newVersion&&q.file===O.file))W.push(O);return W},[]);return this.logger.info(`After deduplication: ${$.length} unique Docker image updates`),$}getUpdateType(Q,K){try{let X=Q.replace(/^[v^~>=<@]+/,""),Z=K.replace(/^[v^~>=<@]+/,""),z=(O)=>{let Y=O.split(".");while(Y.length<3)Y.push("0");return Y.join(".")};if(X=z(X),Z=z(Z),Bun.semver.order(Z,X)<=0)return"patch";let $=X.split(".").map(Number),W=Z.split(".").map(Number);if(W[0]>$[0])return"major";if(W[0]===$[0]&&W[1]>$[1])return"minor";return"patch"}catch{return"patch"}}isNewerVersion(Q,K){try{return Bun.semver.order(K.replace(/^[v^~>=<@]+/,""),Q.replace(/^[v^~>=<@]+/,""))>0}catch{return!1}}async generateAllFileUpdates(Q){let K=[],X=Q.filter((Y)=>Y.file.endsWith("package.json")&&!Y.file.includes(".yaml")&&!Y.file.includes(".yml")&&!Y.file.includes(".github/workflows/")),Z=new Map;for(let Y of X){if(!Z.has(Y.file))Z.set(Y.file,[]);Z.get(Y.file).push(Y)}for(let[Y,q]of Z)try{let S=KK.readFileSync(Y,"utf-8"),H=JSON.parse(S);for(let _ of q){let E=!1,I=_.name.replace(/\s*\(dev\)$/,"").replace(/\s*\(peer\)$/,"").replace(/\s*\(optional\)$/,""),v=["dependencies","devDependencies","peerDependencies","optionalDependencies"];for(let T of v)if(H[T]&&H[T][I]){let V=H[T][I].match(/^(\D*)/),A=V?V[1]:"",M=I.replace(/[.*+?^${}()|[\]\\]/g,"\\$&"),N=T.replace(/[.*+?^${}()|[\]\\]/g,"\\$&"),L=new RegExp(`("${N}"\\s*:\\s*\\{[^}]*?)("${M}"\\s*:\\s*")([^"]+)(")([^}]*?\\})`,"gs"),D=`${A}${_.newVersion}`;S=S.replace(L,`$1$2${D}$4$5`),E=!0;break}if(!E)console.warn(`Package ${I} not found in ${Y}`)}K.push({path:Y,content:S,type:"update"})}catch(S){console.warn(`Failed to update ${Y}:`,S)}let z=Q.filter((Y)=>{let q=Y.file.toLowerCase();return(q.endsWith("deps.yaml")||q.endsWith("deps.yml")||q.endsWith("dependencies.yaml")||q.endsWith("dependencies.yml"))&&!Y.file.includes(".github/workflows/")});if(z.length>0)try{let{generateDependencyFileUpdates:Y}=await import("./chunk-956t2kcb.js"),q=await Y(z);K.push(...q)}catch(Y){this.logger.error("Failed to generate dependency file updates:",Y)}let $=Q.filter((Y)=>Y.file.endsWith("composer.json")||Y.file.endsWith("composer.lock"));if($.length>0)try{let{generateComposerUpdates:Y}=await import("./chunk-smqahw1y.js"),q=await Y($);K.push(...q)}catch(Y){this.logger.error("Failed to generate Composer updates:",Y)}let W=Q.filter((Y)=>Y.file.includes(".github/workflows/"));if(W.length>0)try{let{generateGitHubActionsUpdates:Y}=await import("./chunk-kxav0xyr.js"),q=await Y(W);K.push(...q)}catch(Y){this.logger.error("Failed to generate GitHub Actions updates:",Y)}let O=Q.filter((Y)=>Y.dependencyType==="docker-image");if(O.length>0)try{let{generateDockerfileUpdates:Y}=await import("./chunk-ag75h4mh.js"),q=await Y(O);K.push(...q)}catch(Y){this.logger.error("Failed to generate Dockerfile updates:",Y)}return K}async run(){let Q=await this.scanForUpdates();if(Q.updates.length===0)return this.logger.info("No updates available!"),Q;if(this.config.pullRequest)await this.createPullRequests(Q);return Q}async checkPackages(Q){return this.logger.info(`Checking specific packages: ${Q.join(", ")}`),this.registryClient.getUpdatesForPackages(Q)}async checkPackagesWithPattern(Q){return this.logger.info(`Checking packages with pattern: ${Q}`),this.registryClient.getUpdatesWithPattern(Q)}filterUpdatesByStrategy(Q,K){if(K==="all")return Q;return Q.filter((X)=>{switch(K){case"major":return X.updateType==="major";case"minor":return X.updateType==="major"||X.updateType==="minor";case"patch":return!0;default:return!0}})}async filterUpdatesByMinimumReleaseAge(Q){let K=this.config.packages?.minimumReleaseAge??0;if(K===0)return Q;this.logger.info(`Applying minimum release age filter (${K} minutes)...`);let X=[];for(let z of Q)try{if(await this.registryClient.meetsMinimumReleaseAge(z.name,z.newVersion,z.dependencyType))X.push(z);else this.logger.debug(`Filtered out ${z.name}@${z.newVersion} (${z.dependencyType}) due to minimum release age requirement`)}catch($){this.logger.warn(`Error checking release age for ${z.name}@${z.newVersion} (${z.dependencyType}), including update:`,$),X.push(z)}let Z=Q.length-X.length;if(Z>0)this.logger.info(`Filtered out ${Z} updates due to minimum release age requirement`);return X}groupUpdatesByConfig(Q){let K=[],X=[...Q];if(this.config.packages?.groups)for(let Z of this.config.packages.groups){let z=[];for(let $ of Z.patterns){let W=new RegExp($.replace("*",".*")),O=X.filter((Y)=>W.test(Y.name));z.push(...O),O.forEach((Y)=>{let q=X.indexOf(Y);if(q>-1)X.splice(q,1)})}if(z.length>0){let $=z;if(Z.strategy)$=this.filterUpdatesByStrategy(z,Z.strategy);K.push({name:Z.name,updates:$,updateType:this.getHighestUpdateType($),title:`chore(deps): update ${Z.name}`,body:`Update ${$.length} packages in ${Z.name} group`})}}if(X.length>0){let Z=P(X);K.push(...Z)}return K}getHighestUpdateType(Q){if(Q.some((K)=>K.updateType==="major"))return"major";if(Q.some((K)=>K.updateType==="minor"))return"minor";return"patch"}isSimilarPRTitle(Q,K){if(Q.toLowerCase()===K.toLowerCase())return!0;if(K.toLowerCase().includes("update dependency ")){let z=K.match(/update dependency (\S+)/i),$=Q.match(/update dependency (\S+)/i);if(z&&$)return z[1]===$[1]}let X=Q.toLowerCase(),Z=K.toLowerCase();if(X.includes("all non-major")&&Z.includes("dependency ")||Z.includes("all non-major")&&X.includes("dependency "))return!1;if(X.includes("dependency ")&&Z.includes("dependency "))return!1;return!1}checkIfUpdatesMatch(Q,K){if(!Q)return!1;let X=/([\w@\-./]+):\s*(\d+\.\d+\.\d\S*)\s*\u2192\s*(\d+\.\d+\.\d\S*)/g,Z=new Map,z;while((z=X.exec(Q))!==null){let[,$,W,O]=z;Z.set($,{from:W,to:O})}for(let $ of K){let W=Z.get($.name);if(!W||W.to!==$.newVersion)return!1}return Z.size===K.length}generatePRLabels(Q){let K=new Set;K.add("dependencies");let X=Q.updates.map((W)=>W.updateType),Z={major:X.includes("major"),minor:X.includes("minor"),patch:X.includes("patch")};if(Z.major)K.add("major");if(Z.minor)K.add("minor");if(Z.patch)K.add("patch");if(Q.updates.length>5)K.add("dependencies");let z=["helmet","express-rate-limit","cors","bcrypt","jsonwebtoken"];if(Q.updates.some((W)=>z.some((O)=>W.name.includes(O))))K.add("security");if(this.config.pullRequest?.labels)this.config.pullRequest.labels.forEach((W)=>{if(W!=="dependencies")K.add(W)});return Array.from(K)}shouldAutoClosePR(Q,K){if(this.shouldAutoCloseForRespectLatest(Q))return!0;if(this.shouldAutoCloseForIgnorePaths(Q))return!0;if(this.shouldAutoCloseForRemovedFiles(Q))return!0;return!1}shouldAutoCloseForRespectLatest(Q){if(!(this.config.packages?.respectLatest??!0))return!1;let X=["latest","*","main","master","develop","dev"],Z=Q.body.toLowerCase();if(!X.some((O)=>Z.includes(O.toLowerCase())))return!1;return this.extractPackagesFromPRBody(Q.body).filter((O)=>{let Y=new RegExp(`\\|\\s*\\[${O.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}\\]\\([^)]+\\)\\s*\\|\\s*([^|]+)\\s*\\|`,"i"),q=Q.body.match(Y);if(!q){let E=new RegExp(`${O.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}[^\\w]*[:=]\\s*["']?([^"'
]+)["']?`,"i"),I=Q.body.match(E);if(!I)return!1;let v=I[1].toLowerCase().trim();return X.includes(v)}let H=q[1].trim().match(/^([^\u2192]+)\u2192/);if(!H)return!1;let _=H[1].trim().toLowerCase();return X.includes(_)}).length>0}shouldAutoCloseForIgnorePaths(Q){let K=this.config.packages?.ignorePaths;if(!K||K.length===0)return!1;let X=this.extractFilePathsFromPRBody(Q.body);if(X.length===0)return!1;let{Glob:Z}=globalThis.Bun,z=X.filter(($)=>{let W=$.replace(/^\.\//,"");return K.some((O)=>{try{return new Z(O).match(W)}catch(Y){return this.logger.debug(`Failed to match path ${W} against pattern ${O}: ${Y}`),!1}})});if(z.length>0)return this.logger.debug(`PR #${Q.number} contains files now in ignorePaths: ${z.join(", ")}`),!0;return!1}shouldAutoCloseForRemovedFiles(Q){try{let K=this.extractFilePathsFromPRBody(Q.body);if(K.length===0)return!1;let X=j("fs"),Z=j("path"),z=K.filter(($)=>{let W=Z.join(this.projectPath,$);return!X.existsSync(W)});if(z.length>0){if(this.logger.info(`PR #${Q.number} references removed files: ${z.join(", ")}`),this.config.packages?.ignorePaths&&this.config.packages.ignorePaths.length>0){let{Glob:S}=globalThis.Bun;if(z.filter((_)=>{return!this.config.packages.ignorePaths.some((E)=>{try{return new S(E).match(_)}catch(I){return this.logger.warn(`Invalid glob pattern '${E}':`,I),!1}})}).length>0)return this.logger.info(`Some removed files are outside ignored paths - not auto-closing PR #${Q.number}`),!1}if(z.some((S)=>S.endsWith("composer.json")))return this.logger.info(`composer.json was removed - PR #${Q.number} should be auto-closed`),!0;let W=Q.body.toLowerCase(),O=W.includes("composer")||z.some((S)=>S.includes("composer")),Y=W.includes("package.json")||z.some((S)=>S.includes("package.json")),q=z.some((S)=>S.endsWith("deps.yaml")||S.endsWith("deps.yml")||S.endsWith("dependencies.yaml")||S.endsWith("dependencies.yml"));if(O||Y||q)return this.logger.info(`PR #${Q.number} is about removed dependency system - should be auto-closed`),!0}return!1}catch(K){return this.logger.debug(`Failed to check for removed files in PR #${Q.number}: ${K}`),!1}}extractFilePathsFromPRBody(Q){let K=[],X=/\|\s*\[[^\]]+\]\([^)]*\)\s*\|[^|]*\|\s*\*\*([^*]+)\*\*\s*\|/g,Z;while((Z=X.exec(Q))!==null){let O=Z[1].trim();if(O&&!K.includes(O))K.push(O)}let z=/\*\*([^*]+\.(?:json|yaml|yml|lock))\*\*/g;while((Z=z.exec(Q))!==null){let O=Z[1].trim();if(O&&!K.includes(O))K.push(O)}let $=/\|[^|]+\|[^|]+\|([^|]+)\|[^|]*\|/g;while((Z=$.exec(Q))!==null){let O=Z[1].trim();if(O&&(O.includes("/")||/\.(?:json|yaml|yml|lock)$/.test(O))&&!K.includes(O))K.push(O)}let W=/(?:^|\s)([\w-]+(?:\/[\w.-]+)*\/[\w.-]+\.(?:json|yaml|yml|lock))(?:\s|$)/gm;while((Z=W.exec(Q))!==null){let O=Z[1].trim();if(O&&!K.includes(O))K.push(O)}return K}extractPackagesFromPRBody(Q){let K=[],X=Q.match(/\|[^|]*\|[^|]*\|[^|]*\|[^|]*\|/g);if(X)for(let z of X){let $=z.match(/\[([^\]]+)\]\([^)]*\)/);if($)K.push($[1])}let Z=Q.match(/<summary>([^<]+)<\/summary>/g);if(Z)for(let z of Z){let $=z.replace(/<summary>/,"").replace(/<\/summary>/,"").trim();if($&&!K.includes($))K.push($)}return K}extractPackageUpdatesFromPRBody(Q){let K=[],X=/\|\s*\[([^\]]+)\][^|]*\|\s*\[?`\^?([^`]+)`\s*->\s*`\^?([^`]+)`\]?/g,Z;while((Z=X.exec(Q))!==null){let[,z,$,W]=Z;K.push({name:z,currentVersion:$,newVersion:W})}return K}getConfig(){return this.config}getProjectPath(){return this.projectPath}async checkAndCloseObsoletePRs(Q,K=!1){try{this.logger.info("\uD83D\uDD0D Scanning for obsolete PRs due to removed dependency files...");let Z=(await Q.getPullRequests("open")).filter(($)=>$.head.startsWith("buddy-bot/")||$.author==="github-actions[bot]"||$.labels.includes("dependencies")||$.labels.includes("dependency")||$.title.toLowerCase().includes("update")||$.title.toLowerCase().includes("chore(deps)")||$.title.toLowerCase().includes("composer"));this.logger.info(`Found ${Z.length} dependency-related PRs to check`);let z=0;for(let $ of Z)try{if(this.shouldAutoCloseForRemovedFiles($))if(this.logger.info(`\uD83D\uDD12 PR #${$.number} should be auto-closed: ${$.title}`),K)this.logger.info(`\uD83D\uDD0D [DRY RUN] Would auto-close PR #${$.number}`),z++;else try{let O=this.generateCloseReason($);try{await Q.createComment($.number,O)}catch(Y){this.logger.warn(`\u26A0\uFE0F Could not add close reason comment to PR #${$.number}:`,Y)}if(await Q.closePullRequest($.number),$.head.startsWith("buddy-bot/"))try{await Q.deleteBranch($.head),this.logger.success(`\u2705 Auto-closed PR #${$.number} and deleted branch ${$.head}`)}catch(Y){this.logger.warn(`\u26A0\uFE0F Auto-closed PR #${$.number} but failed to delete branch: ${Y}`)}else this.logger.success(`\u2705 Auto-closed PR #${$.number}`);z++}catch(O){this.logger.error(`\u274C Failed to auto-close PR #${$.number}:`,O)}}catch(W){this.logger.warn(`\u26A0\uFE0F Error checking PR #${$.number}:`,W)}if(z>0)this.logger.success(`\u2705 ${K?"Would auto-close":"Auto-closed"} ${z} obsolete PR(s)`);else this.logger.info("\uD83D\uDCCB No obsolete PRs found")}catch(X){throw this.logger.error("Failed to check for obsolete PRs:",X),X}}async checkAndCloseSatisfiedPRs(Q,K=!1){try{this.logger.info("\uD83D\uDD0D Checking for PRs where dependencies are already at target version...");let Z=(await Q.getPullRequests("open")).filter((O)=>O.head.startsWith("buddy-bot/")||O.author==="github-actions[bot]"||O.labels.includes("dependencies")||O.labels.includes("dependency"));this.logger.info(`Found ${Z.length} dependency PRs to validate`);let z=0,$=await this.scanForUpdates(),W=new Map;for(let O of $.updates)W.set(O.name,O);for(let O of Z)try{let Y=this.extractPackageUpdatesFromPRBody(O.body);if(Y.length===0){this.logger.debug(`PR #${O.number}: Could not extract package updates, skipping`);continue}if(Y.every((S)=>{let H=W.get(S.name);if(!H)return this.logger.debug(`PR #${O.number}: ${S.name} not in current scan (satisfied - no longer needs direct update)`),!0;if(H.newVersion===S.newVersion)return this.logger.debug(`PR #${O.number}: ${S.name} still needs update to ${S.newVersion}`),!1;try{if(!this.isNewerVersion(H.currentVersion,S.newVersion))return this.logger.debug(`PR #${O.number}: ${S.name} already at or beyond ${S.newVersion}`),!0}catch{return!1}return!1}))if(this.logger.info(`\u2705 PR #${O.number} is satisfied (dependencies at target version): ${O.title}`),K)this.logger.info(`\uD83D\uDD0D [DRY RUN] Would close PR #${O.number}`),z++;else try{let S=Y.filter((E)=>!W.has(E.name)),H=Y.filter((E)=>{let I=W.get(E.name);if(!I)return!1;try{return!this.isNewerVersion(I.currentVersion,E.newVersion)}catch{return!1}}),_=`\uD83E\uDD16 **Auto-closing satisfied PR**
`;if(S.length>0)_+=`This PR was automatically closed because the following packages are no longer direct dependencies (possibly moved to peer dependencies, transitive dependencies, or removed):
`,S.forEach((E)=>{_+=`- **${E.name}**: ${E.currentVersion} \u2192 ${E.newVersion}
`}),_+=`
`;if(H.length>0)_+=`The following packages are already at the target version or newer:
`,H.forEach((E)=>{_+=`- **${E.name}**: ${E.currentVersion} \u2192 ${E.newVersion}
`}),_+=`
`;_+="If this was closed in error, please reopen and add a comment explaining why.";try{await Q.createComment(O.number,_)}catch(E){this.logger.warn(`\u26A0\uFE0F Could not add comment to PR #${O.number}:`,E)}if(await Q.closePullRequest(O.number),O.head.startsWith("buddy-bot/"))try{await Q.deleteBranch(O.head),this.logger.success(`\u2705 Closed PR #${O.number} and deleted branch ${O.head}`)}catch(E){this.logger.warn(`\u26A0\uFE0F Closed PR #${O.number} but failed to delete branch: ${E}`)}else this.logger.success(`\u2705 Closed PR #${O.number}`);z++}catch(S){this.logger.error(`\u274C Failed to close PR #${O.number}:`,S)}}catch(Y){this.logger.warn(`\u26A0\uFE0F Error validating PR #${O.number}:`,Y)}if(z>0)this.logger.success(`\u2705 ${K?"Would close":"Closed"} ${z} satisfied PR(s)`);else this.logger.info("\uD83D\uDCCB No satisfied PRs found")}catch(X){throw this.logger.error("Failed to check for satisfied PRs:",X),X}}generateCloseReason(Q){let X=this.extractFilePathsFromPRBody(Q.body).filter((O)=>{let Y=j("fs"),S=j("path").join(this.projectPath,O);return!Y.existsSync(S)}),Z=X.some((O)=>O.includes("composer")),z=X.some((O)=>O.includes("package.json")),$=X.some((O)=>O.endsWith("deps.yaml")||O.endsWith("deps.yml")||O.endsWith("dependencies.yaml")||O.endsWith("dependencies.yml")),W=`\uD83E\uDD16 **Auto-closing obsolete PR**
`;if(Z)W+="This PR was automatically closed because `composer.json` has been removed from the project, indicating that Composer is no longer used for dependency management.\n\n";else if(z)W+="This PR was automatically closed because `package.json` has been removed from the project, indicating that npm/yarn/pnpm is no longer used for dependency management.\n\n";else if($)W+=`This PR was automatically closed because the dependency files it references have been removed from the project.
`;else W+=`This PR was automatically closed because the dependency files it references are no longer present in the project.
`;if(X.length>0)W+=`**Removed files:**
${X.map((O)=>`- \`${O}\``).join(`
`)}
`;return W+="If this was closed in error, please reopen the PR and update the dependency files accordingly.",W}async createOrUpdateDashboard(){try{if(this.logger.info("Creating or updating dependency dashboard..."),!this.config.repository)throw Error("Repository configuration is required for dashboard");if(this.config.repository.provider!=="github")throw Error("Dashboard is currently only supported for GitHub repositories");let Q=this.config.repository.token||w.env.BUDDY_BOT_TOKEN||w.env.GITHUB_TOKEN||"",K=!!w.env.BUDDY_BOT_TOKEN,X=new m(Q,this.config.repository.owner,this.config.repository.name,K),Z=await this.collectDashboardData(X),z=this.config.dashboard||{},{title:$,body:W}=this.dashboardGenerator.generateDashboard(Z,{showOpenPRs:z.showOpenPRs??!0,showDetectedDependencies:z.showDetectedDependencies??!0,showDeprecatedDependencies:z.showDeprecatedDependencies??!0,bodyTemplate:z.bodyTemplate}),O=await this.findExistingDashboard(X,z.issueNumber),Y;if(O)this.logger.info(`Updating existing dashboard issue #${O.number}`),Y=await X.updateIssue(O.number,{title:z.title||$,body:W,labels:z.labels||["dependencies","dashboard"],assignees:z.assignees}),this.logger.success(`\u2705 Successfully updated dashboard issue #${Y.number}`);else{this.logger.info("Creating new dashboard issue"),this.logger.info("Performing final check for existing dashboards before creation...");let q=await this.findExistingDashboard(X,z.issueNumber);if(q)this.logger.info(`Race condition detected! Found existing dashboard #${q.number} during final check`),Y=await X.updateIssue(q.number,{title:z.title||$,body:W,labels:z.labels||["dependencies","dashboard"],assignees:z.assignees}),this.logger.success(`\u2705 Updated existing dashboard issue #${Y.number} (race condition avoided)`);else Y=await X.createIssue({title:z.title||$,body:W,labels:z.labels||["dependencies","dashboard"],assignees:z.assignees}),this.logger.success(`\u2705 Successfully created new dashboard issue #${Y.number}`)}return this.logger.success(`\u2705 Dashboard updated: ${Y.url}`),Y}catch(Q){throw this.logger.error("Failed to create or update dashboard:",Q),Q}}async collectDashboardData(Q){let[K,X]=await Promise.all([this.scanner.scanProject(),Q.getPullRequests("open")]),Z=X.filter((q)=>q.labels.includes("dependencies")||q.labels.includes("dependency")||q.labels.includes("deps")||q.title.toLowerCase().includes("update")||q.title.toLowerCase().includes("chore(deps)")||q.title.toLowerCase().includes("bump")||q.title.toLowerCase().includes("upgrade")||q.title.toLowerCase().includes("renovate")||q.head.includes("renovate/")||q.head.includes("dependabot/")||q.head.includes("buddy-bot/")||q.head.includes("update-")||q.head.includes("bump-")),z=K.filter((q)=>q.type==="package.json"),$=K.filter((q)=>q.path.includes(".github/workflows/")&&(q.path.endsWith(".yml")||q.path.endsWith(".yaml"))),W=K.filter((q)=>!q.path.includes(".github/workflows/")&&q.type!=="package.json"),Y=await new k().checkDeprecatedDependencies(K);return{openPRs:Z,detectedDependencies:{packageJson:z,dependencyFiles:W,githubActions:$},deprecatedDependencies:Y,repository:{owner:this.config.repository.owner,name:this.config.repository.name,provider:this.config.repository.provider},lastUpdated:new Date}}async findExistingDashboard(Q,K){try{if(this.logger.info("Searching for existing dashboard issue..."),K){this.logger.info(`Looking for specific dashboard issue #${K}`);let O=(await Q.getIssues("open")).find((Y)=>Y.number===K);if(O)return this.logger.info(`Found specified dashboard issue #${O.number}: ${O.title}`),O;else return this.logger.warn(`Specified dashboard issue #${K} not found`),null}let X=await Q.getIssues("open");this.logger.info(`Found ${X.length} open issues to search through`);for(let W of X){let O=W.labels.includes("dashboard")&&W.labels.includes("dependencies"),Y=W.title.toLowerCase().includes("dependency dashboard"),q=W.body.includes("This issue lists Buddy Bot updates and detected dependencies");if(O&&(Y||q))return this.logger.info(`Found existing dashboard issue #${W.number}: ${W.title}`),this.logger.info(` - Labels: ${W.labels.join(", ")}`),this.logger.info(` - Title matches: ${Y}`),this.logger