@openinc/parse-server-opendash
Version:
Parse Server Cloud Code for open.INC Stack.
155 lines (154 loc) • 7.17 kB
JavaScript
;
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.LinkResolver = void 0;
const types_1 = require("../../../types");
const GitHubClient_1 = require("./GitHubClient");
const path_1 = __importDefault(require("path"));
/**
* Service to resolve and upload linked local files/images in documentation content
*/
class LinkResolver {
constructor(gitHubToken, files = []) {
this.files = [];
this.gitHubClient = new GitHubClient_1.GitHubClient(gitHubToken);
this.files = files;
}
// Returns a map of unique local file/image links to their GitHub content
async resolveLinks(user, tenant) {
console.log("[LinkResolver] Resolving links in documentation files...");
const linkMap = this.scanLinksWithPositions();
// first we build an array with unique absoluteLinks.
const uniqueAbsoluteLinks = Array.from(new Set(Array.from(linkMap.values()).flatMap((occs) => occs.map((o) => o.absoluteLink))));
console.log(`[LinkResolver] Found ${uniqueAbsoluteLinks.length} unique local links to resolve.`);
// we can now fetch all files for absoluteLinks
const fetchedFiles = new Map();
await Promise.all(uniqueAbsoluteLinks.map(async (filePath) => {
try {
const file = await this.gitHubClient.getFileContent(filePath);
if (file && file.content && file.encoding === "base64") {
const buffer = Buffer.from(file.content, "base64");
// Try to decode as UTF-8 string, fallback to Buffer for binary
const asString = buffer.toString("utf8");
if (!asString.includes("\uFFFD")) {
fetchedFiles.set(filePath, asString);
}
else {
fetchedFiles.set(filePath, buffer);
}
}
else {
fetchedFiles.set(filePath, file.content ?? "");
}
}
catch (err) {
console.warn(`[LinkResolver] ⚠️ Could not fetch ${filePath}:`, err);
fetchedFiles.set(filePath, "");
}
}));
// old absoluteLink => new file link
const newLinkMap = new Map();
// now we can build a map of absoluteLink => new Asset URL
for (const [absoluteLink, content] of fetchedFiles) {
const fileName = absoluteLink.split("/").pop();
// map the data to a Parse File and save as Asset
const data = typeof content === "string"
? { base64: Buffer.from(content, "utf8").toString("base64") }
: { base64: content.toString("base64") };
try {
const parseFile = await new Parse.File(fileName || "file", data).save({
useMasterKey: true,
});
const asset = new types_1.Assets({
file: parseFile,
context: "documentation",
tenant,
user,
description: fileName,
meta: {
importedByDocumentation: true,
},
});
const savedAsset = await asset.save(null, { useMasterKey: true });
const newURL = savedAsset.file?.url() || "";
newLinkMap.set(absoluteLink, newURL);
}
catch (err) {
console.error(`[LinkResolver] Failed to upload linked asset for ${absoluteLink}:`, err);
}
}
console.log(`[LinkResolver] Resolved and uploaded ${newLinkMap.size} linked assets.`);
// Finally, replace links in each file's content
for (const file of this.files) {
const occurrences = linkMap.get(file.path);
if (occurrences && occurrences.length) {
file.content = this.replaceLinksInContent(occurrences, file.content ?? "", newLinkMap);
}
}
console.log("[LinkResolver] Link resolution completed.");
}
// Scan all files and return a map: filePath -> array of link occurrences
scanLinksWithPositions() {
const regex = /(!?\[[^\]]*\]\(([^)]+)\))/g;
const result = new Map();
for (const file of this.files) {
const occurrences = [];
const content = file.content || "";
let match;
while ((match = regex.exec(content)) !== null) {
const markdown = match[1];
const rawLink = match[2].trim();
// Skip URLs and mailto
if (/^https?:\/\//i.test(rawLink) || /^mailto:/i.test(rawLink))
continue;
// Skip pure anchor links
if (rawLink.startsWith("#"))
continue;
// If link contains a fragment, strip it (e.g., file.md#section → file.md)
const [linkPath] = rawLink.split("#");
if (!linkPath || !linkPath.trim())
continue;
// Ignore links to other markdown files, but warn
if (/\.md$/i.test(linkPath.trim())) {
console.warn(`[LinkResolver] ⚠️ Ignoring link to markdown file "${linkPath.trim()}" in "${file.path}"`);
continue;
}
const baseDir = path_1.default.posix.dirname(file.path);
const absoluteLink = path_1.default.posix.normalize(path_1.default.posix.join("./docs", baseDir, linkPath.trim()));
occurrences.push({
filePath: file.path,
link: rawLink,
absoluteLink,
start: match.index,
end: match.index + markdown.length,
markdown,
});
}
if (occurrences.length)
result.set(file.path, occurrences);
}
return result;
}
// Replace links in content with new links using positions
replaceLinksInContent(occurrences, content, assetLinkMap) {
let offset = 0; // track content length changes
let newContent = content;
for (const occ of occurrences) {
const newLink = assetLinkMap.get(occ.absoluteLink);
if (!newLink)
continue;
// Replace only the link part inside markdown
const before = newContent.slice(0, occ.start + offset);
const matchText = newContent.slice(occ.start + offset, occ.end + offset);
const after = newContent.slice(occ.end + offset);
// Replace link inside [text](link)
const replaced = matchText.replace(occ.link, newLink);
newContent = before + replaced + after;
offset += replaced.length - matchText.length;
}
return newContent;
}
}
exports.LinkResolver = LinkResolver;