@mdfriday/foundry
Version:
The core engine of MDFriday. Convert Markdown and shortcodes into fully themed static sites – Hugo-style, powered by TypeScript.
361 lines • 13.5 kB
JavaScript
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ResourceImpl = exports.PublishOnce = exports.ResourceHash = void 0;
const resources_1 = require("../../../domain/resources");
const crypto_1 = require("crypto");
const stream_1 = require("stream");
const paths_1 = require("../../../domain/paths");
const promises_1 = require("stream/promises");
const log_1 = require("../../../../pkg/log");
const log = (0, log_1.getDomainLogger)('resources', { component: 'publisher' });
class ResourceHash {
constructor() {
this.value = '';
this.size = 0;
this.resource = null;
}
async setup(resource) {
if (this.value)
return; // Already calculated
this.resource = resource;
const readSeeker = await resource.readSeekCloser();
const hash = (0, crypto_1.createHash)('sha256');
let totalSize = 0;
// Read the content and calculate hash and size
const chunks = [];
readSeeker.on('data', (chunk) => {
hash.update(chunk);
totalSize += chunk.length;
chunks.push(chunk);
});
await new Promise((resolve, reject) => {
readSeeker.on('end', () => {
this.value = hash.digest('hex');
this.size = totalSize;
resolve();
});
readSeeker.on('error', reject);
});
await readSeeker.close();
}
}
exports.ResourceHash = ResourceHash;
class PublishOnce {
constructor() {
this.hasPublished = false;
this.publishPromise = null;
}
async do(publishFn) {
if (this.hasPublished)
return;
if (this.publishPromise) {
return this.publishPromise;
}
this.publishPromise = publishFn().then(() => {
this.hasPublished = true;
});
return this.publishPromise;
}
}
exports.PublishOnce = PublishOnce;
class ResourceImpl {
constructor(openReadSeekCloser, mediaType, paths, data = {}, publisher) {
this.h = new ResourceHash();
this.openReadSeekCloser = openReadSeekCloser;
this._mediaType = mediaType;
this.paths = paths;
this._data = data;
this.publisher = publisher;
this.publishOnce = new PublishOnce();
}
name() {
return this.paths.pathFile();
}
nameNormalized() {
return this.paths.targetPath().replace(/\\/g, '/');
}
mediaType() {
return this._mediaType;
}
resourceType() {
return this._mediaType.mainType;
}
relPermalink() {
// Publish first if needed
this.publish();
return this.paths.targetLink();
}
permalink() {
// Publish first if needed
this.publish();
return this.paths.targetPath();
}
async publish() {
const targetPath = this.targetPath();
if (!this.publisher) {
log.error("⚠️ [Resource.publish] No publisher for: %s", targetPath);
return;
}
await this.publishOnce.do(async () => {
let publicWriter = null;
let readSeeker = null;
try {
publicWriter = await this.publisher.openPublishFileForWriting(this.paths.targetPath());
readSeeker = await this.readSeekCloser();
await this.copyStreamToFile(readSeeker, publicWriter);
}
catch (error) {
log.errorf("❌ [Resource.publish] Error publishing %s: %s", targetPath, error);
throw error;
}
finally {
// Ensure resources are always closed in the correct order
// Close readSeeker first
if (readSeeker) {
try {
await readSeeker.close();
}
catch (closeError) {
log.errorf("❌ [Resource.publish] Failed to close ReadSeekCloser %s: %s", targetPath, closeError);
}
}
// Then close publicWriter
if (publicWriter) {
try {
await publicWriter.file.close();
}
catch (closeError) {
log.errorf("❌ [Resource.publish] Failed to close public writer %s: %s", targetPath, closeError);
}
}
}
});
}
/**
* Helper method to copy stream content to file
* TypeScript equivalent of golang's io.Copy functionality
*/
async copyStreamToFile(source, destination) {
const targetPath = this.targetPath();
try {
await (0, promises_1.pipeline)(source, destination);
}
catch (err) {
log.errorf("❌ [copyStreamToFile] Error during pipeline for %s: %s", targetPath, err);
throw err;
}
}
targetPath() {
return this.paths.targetPath();
}
data() {
return this._data;
}
async readSeekCloser() {
const result = await this.openReadSeekCloser();
// Check if the stream has any data available
if (result && typeof result.read === 'function') {
const testChunk = result.read();
if (testChunk) {
// Put the data back by creating a new stream with the data
const Readable = require('stream').Readable;
const newStream = new Readable();
newStream.push(testChunk);
// Try to read any remaining data
let remainingChunk;
while ((remainingChunk = result.read()) !== null) {
if (remainingChunk) {
newStream.push(remainingChunk);
}
}
newStream.push(null); // End the stream
// Return the new stream with seek and close methods
return Object.assign(newStream, {
seek: async (offset, whence) => 0,
close: async () => Promise.resolve()
});
}
}
return result;
}
async content(ctx) {
const readSeeker = await this.readSeekCloser();
return new Promise((resolve, reject) => {
let content = '';
readSeeker.on('data', (chunk) => {
content += chunk.toString();
});
readSeeker.on('end', () => {
resolve(content);
});
readSeeker.on('error', reject);
});
}
async hash() {
await this.h.setup(this);
return this.h.value;
}
async size() {
await this.h.setup(this);
return this.h.size;
}
cloneTo(targetPath) {
const cloned = this.clone();
cloned.paths = cloned.paths.fromTargetPath(targetPath);
return cloned;
}
clone() {
const cloned = new ResourceImpl(this.openReadSeekCloser, this._mediaType, this.paths, { ...this._data }, this.publisher);
cloned.h = this.h; // Share the hash
return cloned;
}
key() {
// Following Golang implementation: paths.PathEscape(l.paths.TargetLink())
// TODO: implement path escaping if needed
return this.paths.targetLink();
}
// Template-compatible method names (capitalized for Golang template compatibility)
// These getters allow templates to access methods using capitalized names like RelPermalink
get RelPermalink() {
return this.relPermalink();
}
get Permalink() {
return this.permalink();
}
get Name() {
return this.name();
}
get MediaType() {
return this.mediaType();
}
get ResourceType() {
return this.resourceType();
}
get TargetPath() {
return this.targetPath();
}
get Data() {
return this.data();
}
// Async template-compatible methods
get Content() {
return this.content();
}
get Hash() {
return () => this.hash();
}
get Size() {
return () => this.size();
}
meta() {
return new resources_1.ResourceMetadataImpl(this.paths.targetPath(), this._mediaType.type, this._data);
}
mergeData(incoming) {
if (!incoming || Object.keys(incoming).length === 0) {
return;
}
for (const [key, value] of Object.entries(incoming)) {
if (!(key in this._data)) {
this._data[key] = value;
}
}
}
// Implementation of ResourceTransformer interface
async transform(...transformations) {
let currentResource = this;
for (let i = 0; i < transformations.length; i++) {
const transformation = transformations[i];
try {
// Create transformation context - following golang's approach
const sourceReader = await currentResource.readSeekCloser();
const targetWriter = new stream_1.PassThrough();
// Buffer to collect the transformed content
let transformedContent = '';
const contentChunks = [];
const ctx = {
source: {
from: sourceReader,
inPath: currentResource.targetPath(),
inMediaType: currentResource.mediaType()
},
target: {
to: targetWriter
},
data: { ...currentResource.data() },
addOutPathIdentifier: (identifier) => {
// Use proper path parsing like golang version
const currentPath = currentResource.targetPath();
const pathInfo = paths_1.PathDomain.parseBasic(currentPath);
// Clean up the directory part - handle relative paths properly
let cleanDir = pathInfo.dir;
// Remove trailing slashes
cleanDir = cleanDir.replace(/\/+$/, '');
// Reconstruct path: dir + file without ext + identifier + ext
let newPath = '';
if (cleanDir) {
newPath = cleanDir + '/';
}
const nameWithIdentifier = pathInfo.nameWithoutExt + identifier;
newPath += nameWithIdentifier + pathInfo.ext;
// Update context for next transformation
ctx.data.targetPath = newPath;
},
updateBuffer: () => {
// Implementation for buffer updates
},
updateSource: () => {
// Implementation for source updates
},
close: () => {
// Cleanup resources
sourceReader.close();
}
};
// Set up data collection from target writer BEFORE calling transform
targetWriter.on('data', (chunk) => {
contentChunks.push(chunk);
});
// Promise to track when target writer finishes
const targetWriterFinished = new Promise((resolve, reject) => {
targetWriter.on('end', () => {
transformedContent = Buffer.concat(contentChunks).toString();
resolve();
});
targetWriter.on('error', (error) => {
reject(error);
});
});
// Apply transformation - this should complete and call targetWriter.end()
await transformation.transform(ctx);
// Wait for the target writer to finish collecting all data
await targetWriterFinished;
// Create new resource with transformed content
const newTargetPath = ctx.data.targetPath || currentResource.targetPath();
const newResource = this.createTransformedResource(transformedContent, newTargetPath, currentResource.mediaType(), ctx.data);
currentResource = newResource;
// Cleanup
ctx.close();
}
catch (error) {
throw error;
}
}
return currentResource;
}
createTransformedResource(content, targetPath, mediaType, data) {
const opener = async () => {
const readable = new stream_1.PassThrough();
readable.end(content);
return Object.assign(readable, {
seek: async (offset, whence) => 0,
close: async () => Promise.resolve()
});
};
// Use the existing paths to derive new paths, following golang's pattern
// This preserves the URLConfig information from the original resource
const newPaths = this.paths.fromTargetPath(targetPath);
return new ResourceImpl(opener, mediaType, newPaths, data, this.publisher);
}
}
exports.ResourceImpl = ResourceImpl;
//# sourceMappingURL=resource.js.map