ultimate-jekyll-manager
Version:
Ultimate Jekyll dependency manager
483 lines (409 loc) • 14.4 kB
JavaScript
// Libraries
const Manager = new (require('../../build.js'));
const logger = Manager.logger('imagemin');
const { src, dest, watch, series } = require('gulp');
const glob = require('glob').globSync;
const responsive = require('gulp-responsive-modern');
const path = require('path');
const jetpack = require('fs-jetpack');
const GitHubCache = require('./utils/github-cache');
// Load package
const rootPathProject = Manager.getRootPath('project');
// Settings
const CACHE_DIR = '.temp/imagemin';
const CACHE_BRANCH = 'uj-imagemin';
const RECHECK_DAYS = 0; // Set to 0 to always check hash
const LOUD = Manager.isServer() || process.env.UJ_LOUD_LOGS === 'true';
// Variables
let githubCache;
// Track processed files during development session (file path -> process time)
const processedFiles = new Map();
// Glob
const input = [
// Files to include
'src/assets/images/**/*.{jpg,jpeg,png,gif,svg,webp}',
// Files to exclude
// '!dist/**',
];
const output = 'dist/assets/images';
const delay = 250;
// Main task
async function imagemin(complete) {
// Log
logger.log('Starting...');
// Use glob to get file count for matching files
let files = glob(input);
// If there's no files, complete
if (files.length === 0) {
// Log
logger.log('Found 0 images to process');
// Complete
return complete();
}
// Log
logger.log(`Found ${files.length} images to process`);
// In development mode, filter out already processed images
files = filterProcessedFiles(files);
if (files.length === 0) {
return complete();
}
// Setup cache
const disableCache = process.env.UJ_IMAGEMIN_CACHE === 'false';
githubCache = await setupCache(disableCache);
// Load or create meta cache
const metaPath = path.join(CACHE_DIR, 'meta.json');
let meta = githubCache ? githubCache.loadMetadata(metaPath) : {};
// Analyze files and determine what needs processing
const {
filesToProcess,
cachedFiles,
validCachePaths
} = analyzeFiles(files, meta, githubCache, metaPath);
// Track files that are newly processed
const newlyProcessedFiles = new Set();
// If there's no files to process, complete
if (filesToProcess.length === 0) {
logger.log('✅ All images already processed (from cache)');
await pushCacheToGitHub(githubCache, meta, validCachePaths, 0, cachedFiles.length);
return complete();
}
// Log
logger.log(`🔄 Processing ${filesToProcess.length} new/changed images`);
// Process images: resize and convert to webp
// Use base option to preserve directory structure
return src(filesToProcess, { base: 'src/assets/images' })
.pipe(
responsive({
'**/*.{jpg,jpeg,png}': [
// 1024 resized version in original format
{
width: 1024,
rename: { suffix: '-1024px' }
},
// 1024 resized webp version
{
width: 1024,
format: 'webp',
rename: { suffix: '-1024px' }
},
// Tablet resized version (640px wide) in original format
{
width: 640,
rename: { suffix: '-640px' }
},
// Tablet resized webp version
{
width: 640,
format: 'webp',
rename: { suffix: '-640px' }
},
// Mobile resized version (320px wide) in original format
{
width: 320,
rename: { suffix: '-320px' }
},
// Mobile resized webp version
{
width: 320,
format: 'webp',
rename: { suffix: '-320px' }
},
// Original size webp version
{
format: 'webp',
rename: { suffix: '' }
},
// Original size in original format
{
rename: { suffix: '' }
}
]
}, {
quality: 80,
progressive: true,
withMetadata: false,
withoutEnlargement: false,
skipOnEnlargement: false,
})
)
.pipe(dest(output))
.on('data', (file) => {
// Also save to cache
const relativePath = path.relative(path.join(rootPathProject, output), file.path);
const cachePath = path.join(CACHE_DIR, 'images', relativePath);
jetpack.copy(file.path, cachePath, { overwrite: true });
newlyProcessedFiles.add(cachePath);
})
.on('finish', async () => {
// Track processed files in development mode
trackProcessedFiles(filesToProcess);
// Save meta file
if (githubCache) {
githubCache.saveMetadata(metaPath, meta);
}
// Push cache (include newly processed files)
await pushCacheToGitHub(githubCache, meta, validCachePaths, filesToProcess.length, cachedFiles.length, newlyProcessedFiles);
// Log
logger.log('✅ Finished!');
logger.log(`📊 Summary: ${filesToProcess.length} processed, ${cachedFiles.length} from cache`);
// Complete
return complete();
});
}
// Watcher task
function imageminWatcher(complete) {
// Quit if in build mode
if (Manager.isBuildMode()) {
logger.log('[watcher] Skipping watcher in build mode');
return complete();
}
// Log
logger.log('[watcher] Watching for changes...');
// Watch for changes
watch(input, { delay: delay, dot: true }, imagemin)
.on('change', (path) => {
logger.log(`[watcher] File changed (${path})`);
});
// Complete
return complete();
}
// Default Task
module.exports = series(
// Manager.wrapTask('imagemin', imagemin),
imagemin,
imageminWatcher
);
// ============================================================================
// Helper Functions
// ============================================================================
// Filter already processed files in development mode
function filterProcessedFiles(files) {
if (Manager.isBuildMode()) {
return files;
}
const filtered = files.filter(file => {
// Check if we've already processed this file in this session
if (processedFiles.has(file)) {
// Check if the file has been modified since we processed it
const srcStat = jetpack.inspect(file, { times: true });
const processedTime = processedFiles.get(file);
// If file hasn't changed since we processed it, skip
if (srcStat && processedTime && srcStat.modifyTime <= processedTime) {
return false;
}
}
// File needs processing
return true;
});
// Log summary
if (filtered.length === 0) {
logger.log('✅ All images already processed in this session. Skipping.');
} else if (filtered.length < files.length) {
logger.log(`⏭️ Skipping ${files.length - filtered.length} already processed images`);
logger.log(`🔄 Processing ${filtered.length} new/unprocessed images`);
}
return filtered;
}
// Setup GitHub cache
async function setupCache(disableCache) {
const useCache = !disableCache;
const canPushCache = useCache && process.env.GH_TOKEN && process.env.GITHUB_REPOSITORY;
// Log cache status
if (disableCache) {
logger.log('⚠️ Cache disabled (UJ_IMAGEMIN_CACHE=false)');
} else if (!canPushCache && useCache) {
logger.log('📦 Cache enabled (local only - no GitHub credentials)');
}
if (!useCache) {
return null;
}
// Initialize GitHub cache
const cache = new GitHubCache({
branchName: CACHE_BRANCH,
cacheDir: CACHE_DIR,
logger: logger,
cacheType: 'Image',
description: 'processed image cache for faster builds'
});
if (canPushCache) {
// Pull cached images from branch
await cache.fetchBranch();
// Log cache contents
const cacheImageDir = path.join(CACHE_DIR, 'images');
if (jetpack.exists(cacheImageDir)) {
const cachedFiles = glob(path.join(cacheImageDir, '**/*'), { nodir: true });
logger.log(`📦 Found ${cachedFiles.length} files in cache`);
if (LOUD && cachedFiles.length > 0) {
logger.log('📋 Cached files:');
cachedFiles.forEach(file => {
const relative = path.relative(cacheImageDir, file);
logger.log(` - ${relative}`);
});
}
} else {
logger.log('📦 No cache directory found, starting fresh');
}
} else if (useCache) {
logger.warn('⚠️ GitHub credentials not available, running without cache push/pull');
}
return cache;
}
// Get expected output files for an image
function getExpectedOutputs(file, relativePath) {
const baseNameWithoutExt = path.basename(file, path.extname(file));
const dirName = path.dirname(relativePath).replace(/^src\/assets\/images\/?/, '');
return [
`${baseNameWithoutExt}-1024px.jpg`,
`${baseNameWithoutExt}-1024px.webp`,
`${baseNameWithoutExt}-640px.jpg`,
`${baseNameWithoutExt}-640px.webp`,
`${baseNameWithoutExt}-320px.jpg`,
`${baseNameWithoutExt}-320px.webp`,
`${baseNameWithoutExt}.webp`,
path.basename(file), // Original
].map(outputFile => ({
fileName: outputFile,
cachePath: path.join(CACHE_DIR, 'images', dirName, outputFile),
destPath: path.join(output, dirName, outputFile),
dirName: dirName
}));
}
// Check if cached versions exist and copy them
function checkAndUseCached(expectedOutputs) {
let allExist = true;
const missingFiles = [];
for (const { fileName, cachePath, destPath } of expectedOutputs) {
if (jetpack.exists(cachePath)) {
// Copy from cache to destination
jetpack.copy(cachePath, destPath, { overwrite: true });
if (LOUD) {
logger.log(` ✅ Found in cache: ${fileName}`);
}
} else {
allExist = false;
missingFiles.push(fileName);
if (LOUD) {
logger.log(` ❌ Missing from cache: ${fileName}`);
}
if (!LOUD) break; // Only break if not in loud mode
}
}
if (LOUD && !allExist) {
logger.log(` 🚫 Cannot use cache - missing ${missingFiles.length} files`);
}
return allExist;
}
// Process files and determine what needs processing
function analyzeFiles(files, meta, githubCache, metaPath) {
const filesToProcess = [];
const cachedFiles = [];
const validCachePaths = new Set();
for (const file of files) {
const relativePath = path.relative(rootPathProject, file);
const hash = githubCache ? githubCache.calculateHash(file) : null;
if (LOUD) {
logger.log(`🔍 Checking: ${relativePath} (hash: ${hash ? hash.substring(0, 8) : 'N/A'})`);
}
// Get expected output files
const expectedOutputs = getExpectedOutputs(file, relativePath);
// Track these as valid cache files
expectedOutputs.forEach(({ fileName, dirName }) => {
validCachePaths.add(path.join('images', dirName, fileName));
});
// Check if file needs processing
const entry = meta[relativePath];
const age = entry?.timestamp
? (Date.now() - new Date(entry.timestamp).getTime()) / (1000 * 60 * 60 * 24)
: Infinity;
const useCached = entry
&& entry.hash === hash
&& (RECHECK_DAYS === 0 || age < RECHECK_DAYS);
if (LOUD && entry) {
logger.log(` 📝 Meta entry found:`);
logger.log(` - Stored hash: ${entry.hash ? entry.hash.substring(0, 8) : 'N/A'}`);
logger.log(` - Current hash: ${hash ? hash.substring(0, 8) : 'N/A'}`);
logger.log(` - Hash match: ${entry.hash === hash}`);
logger.log(` - Age: ${age.toFixed(1)} days`);
logger.log(` - Use cached: ${useCached}`);
}
if (useCached) {
// Check if cached versions exist
const allCachedExist = checkAndUseCached(expectedOutputs);
if (allCachedExist) {
cachedFiles.push(relativePath);
logger.log(`📦 Using cache: ${relativePath}`);
continue;
} else if (LOUD) {
logger.log(` ♻️ Will reprocess: ${relativePath}`);
}
}
// File needs processing
filesToProcess.push(file);
if (LOUD) {
logger.log(` 🔄 Adding to process queue: ${relativePath}`);
}
// Update meta entry
meta[relativePath] = {
timestamp: new Date().toISOString(),
hash,
};
}
// Log cache summary
if (cachedFiles.length > 0) {
logger.log(`📦 Used cache for ${cachedFiles.length} images`);
}
return {
filesToProcess,
cachedFiles,
validCachePaths
};
}
// Push cache to GitHub
async function pushCacheToGitHub(githubCache, meta, validCachePaths, processedCount, fromCacheCount, newlyProcessedFiles = new Set()) {
const canPushCache = githubCache && process.env.GH_TOKEN && process.env.GITHUB_REPOSITORY;
if (!canPushCache) return;
// Collect all files that should be in cache
const allCacheFiles = [];
// First add all newly processed files (these definitely exist)
newlyProcessedFiles.forEach(filePath => {
allCacheFiles.push(filePath);
});
// Then add existing valid cache files
validCachePaths.forEach(relativePath => {
const fullPath = path.join(CACHE_DIR, relativePath);
// Only add if it exists and wasn't already added as a newly processed file
if (jetpack.exists(fullPath) && !newlyProcessedFiles.has(fullPath)) {
allCacheFiles.push(fullPath);
}
});
// Add and save meta.json
const metaPath = path.join(CACHE_DIR, 'meta.json');
allCacheFiles.push(metaPath);
githubCache.saveMetadata(metaPath, meta);
// Push with orphan detection and stats
await githubCache.pushBranch(allCacheFiles, {
validFiles: validCachePaths,
stats: {
timestamp: new Date().toISOString(),
sourceCount: processedCount + fromCacheCount,
cachedCount: allCacheFiles.length - 1,
processedNow: processedCount,
fromCache: fromCacheCount,
details: `Each source image is processed into multiple formats and sizes:
- Original format at 1024px, 640px, 320px widths
- WebP format at 1024px, 640px, 320px widths
- Original size in both original and WebP formats
Images are cached based on file content hash, so unchanged images are never reprocessed.`
}
});
}
// Track processed files in development mode
function trackProcessedFiles(filesToProcess) {
if (!Manager.isBuildMode()) {
const now = Date.now();
filesToProcess.forEach(file => {
processedFiles.set(file, now);
});
logger.log(`📝 Tracked ${filesToProcess.length} processed files in session`);
}
}