@openinc/parse-server-opendash
Version:
Parse Server Cloud Code for open.INC Stack.
189 lines (188 loc) • 7.21 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.GitHubClient = void 0;
const config_1 = require("../config");
/**
* GitHub API Client for fetching repository data
*/
class GitHubClient {
constructor(token, importConfig = config_1.DEFAULT_IMPORT_CONFIG) {
this.baseUrl = "https://api.github.com";
this.branch = null;
this.repository = null;
this.tree = null;
this.sha = null;
this.headers = {
Authorization: `Bearer ${token}`,
Accept: "application/vnd.github.v3+json",
"User-Agent": "OpenInc-Documentation-Importer",
};
this.importConfig = importConfig;
}
/**
* Validate the GitHub token
*/
async validateToken() {
const response = await fetch(`${this.baseUrl}/user`, {
headers: this.headers,
});
if (!response.ok) {
const error = await response.text();
throw new Error(`Invalid token: ${response.status} ${response.statusText} - ${error}`);
}
return response.json();
}
/**
* Get repository information
*/
async getRepository() {
const { organization, repository } = this.importConfig;
const response = await fetch(`${this.baseUrl}/repos/${organization}/${repository}`, {
headers: this.headers,
});
if (!response.ok) {
const error = await response.text();
throw new Error(`Failed to access repository: ${response.status} ${response.statusText} - ${error}`);
}
return response.json();
}
/**
* Get all branches for a repository
*/
async getBranches() {
const { organization, repository } = this.importConfig;
const response = await fetch(`${this.baseUrl}/repos/${organization}/${repository}/branches`, {
headers: this.headers,
});
if (!response.ok) {
throw new Error(`Failed to fetch branches: ${response.status} ${response.statusText}`);
}
return response.json();
}
/**
* Get a specific branch
*/
async getBranch() {
const { organization, repository, branch } = this.importConfig;
const response = await fetch(`${this.baseUrl}/repos/${organization}/${repository}/branches/${branch}`, {
headers: this.headers,
});
if (!response.ok) {
const error = await response.text();
throw new Error(`Failed to fetch branch '${branch}': ${response.status} ${response.statusText} - ${error}`);
}
return response.json();
}
/**
* Get repository tree (file structure)
*/
async getTree(sha, recursive = true) {
const { organization, repository } = this.importConfig;
const url = `${this.baseUrl}/repos/${organization}/${repository}/git/trees/${sha}${recursive ? "?recursive=1" : ""}`;
const response = await fetch(url, {
headers: this.headers,
});
if (!response.ok) {
throw new Error(`Failed to fetch repository tree: ${response.status} ${response.statusText}`);
}
return response.json();
}
/**
* Get file content
*/
async getFileContent(path) {
const { organization, repository, branch } = this.importConfig;
const url = `${this.baseUrl}/repos/${organization}/${repository}/contents/${path}${`?ref=${branch}`}`;
const response = await fetch(url, {
headers: this.headers,
});
if (!response.ok) {
throw new Error(`Failed to fetch file content: ${response.status} ${response.statusText}`);
}
return response.json();
}
/**
* Get the last commit information for a specific file
*/
async getFileLastCommit(path, ref) {
const { organization, repository } = this.importConfig;
const url = `${this.baseUrl}/repos/${organization}/${repository}/commits?path=${encodeURIComponent(path)}&per_page=1${ref ? `&sha=${ref}` : ""}`;
const response = await fetch(url, {
headers: this.headers,
});
if (!response.ok) {
throw new Error(`Failed to fetch file commit info: ${response.status} ${response.statusText}`);
}
const commits = (await response.json());
return commits.length > 0 ? commits[0] : null;
}
/**
* Get commit information for multiple files in batch
*/
async getMultipleFileCommits(paths) {
const commitMap = new Map();
// Process files in batches to avoid rate limiting
const batchSize = 5;
for (let i = 0; i < paths.length; i += batchSize) {
const batch = paths.slice(i, i + batchSize);
const promises = batch.map(async (path) => {
try {
const commit = await this.getFileLastCommit(path);
return { path, commit };
}
catch (error) {
console.warn(`⚠️ Failed to get commit info for ${path}:`, error);
return { path, commit: null };
}
});
const results = await Promise.all(promises);
results.forEach(({ path, commit }) => {
if (commit) {
commitMap.set(path, commit);
}
});
// Small delay to be respectful to GitHub API
if (i + batchSize < paths.length) {
await new Promise((resolve) => setTimeout(resolve, 100));
}
}
return commitMap;
}
/**
* Fetches the content of each linked file from GitHub.
* @param ref Branch or commit SHA
* @param paths Array of relative file paths (from repo root)
* @returns Map of path to file content (decoded as string if possible)
*/
async fetchLinkedFiles(ref, paths) {
const result = new Map();
for (const path of paths) {
try {
const file = await this.getFileContent(path);
// GitHub returns content as base64 for binary/text files
if (file && file.content && file.encoding === "base64") {
const buffer = Buffer.from(file.content, "base64");
// Try to decode as UTF-8 string, fallback to Buffer for binary
const asString = buffer.toString("utf8");
// Heuristic: treat as string if no replacement chars
if (!asString.includes("\uFFFD")) {
result.set(path, asString);
}
else {
result.set(path, buffer);
}
}
else {
// For raw text files, just use the content
result.set(path, file.content ?? "");
}
}
catch (err) {
console.warn(`⚠️ Could not fetch ${path}:`, err);
result.set(path, "");
}
}
return result;
}
}
exports.GitHubClient = GitHubClient;