strapi-to-lokalise-plugin
Version:
Preview and sync Lokalise translations from Strapi admin
629 lines (538 loc) • 19.3 kB
JavaScript
;
const { randomUUID } = require('crypto');
const STORE_KEY = 'syncJobs';
const DEFAULT_BATCH_SIZE = 1000;
const SMALL_JOB_THRESHOLD = 25; // Jobs with ≤25 keys use larger batch size (effectively no batching)
const SMALL_JOB_BATCH_SIZE = 10000; // Effectively no batching for small jobs
const getStore = () =>
strapi.store({
type: 'plugin',
name: 'lokalise-sync',
});
const generateId = () => {
if (typeof randomUUID === 'function') {
return randomUUID();
}
return `${Date.now()}-${Math.random().toString(16).slice(2, 10)}`;
};
const nowIso = () => new Date().toISOString();
const clone = (value) => JSON.parse(JSON.stringify(value));
const summarizeBatches = (batches = []) =>
batches.map(({ id, index, type, totalKeys, status, attempts = 0, startedAt, finishedAt, error }) => ({
id,
index,
type,
totalKeys,
status,
attempts,
startedAt,
finishedAt,
error: error
? {
message: error.message,
occurredAt: error.occurredAt,
}
: null,
}));
const sanitizeJob = (job, { includeInternal = false } = {}) => {
if (!job) {
return null;
}
const summary = {
id: job.id,
status: job.status,
tag: job.tag || null,
locale: job.locale || null,
totalBatches: job.totalBatches || 0,
totalKeys: job.totalKeys || 0,
processedKeys: job.processedKeys || 0,
completedBatches: job.completedBatches || 0,
failedBatches: job.failedBatches || 0,
cancelledBatches: job.cancelledBatches || 0,
createdAt: job.createdAt,
updatedAt: job.updatedAt,
lastError: job.lastError
? {
message: job.lastError.message,
occurredAt: job.lastError.occurredAt,
}
: null,
selectionSummary: job.selectionSummary || {},
batches: summarizeBatches(job.batches || []),
};
if (includeInternal) {
summary._internal = {
batches: clone(job.batches),
};
}
return summary;
};
const readJobs = async () => {
const store = getStore();
const value = await store.get({ key: STORE_KEY });
return value || {};
};
const writeJobs = async (jobs) => {
const store = getStore();
await store.set({ key: STORE_KEY, value: jobs });
};
const computeSelectionSummary = (selection = []) =>
selection.reduce((acc, item) => {
if (!item || !item.type || !Array.isArray(item.keyNames)) {
return acc;
}
const normalizedType = item.type;
const existing = acc[normalizedType] || { totalKeys: 0 };
existing.totalKeys += item.keyNames.length;
acc[normalizedType] = existing;
return acc;
}, {});
const flattenSelectionToBatches = (jobId, selection, batchSize = DEFAULT_BATCH_SIZE) => {
const batches = [];
let index = 0;
let totalKeys = 0;
(selection || []).forEach((item) => {
if (!item || !item.type || !Array.isArray(item.keyNames) || item.keyNames.length === 0) {
return;
}
const uniqueKeyNames = Array.from(
new Set(
item.keyNames
.map((key) => (typeof key === 'string' ? key.trim() : null))
.filter((key) => typeof key === 'string' && key.length > 0)
)
);
// CRITICAL: Extract keys array with lokalise_key_id if provided
const providedKeys = Array.isArray(item.keys) ? item.keys : [];
const keysMap = new Map();
providedKeys.forEach(k => {
const keyName = k.key_name || k.keyName;
if (keyName) {
keysMap.set(keyName, {
key_name: keyName,
lokalise_key_id: typeof k.lokalise_key_id === 'number' ? k.lokalise_key_id : (typeof k.lokalise_key_id === 'string' && !isNaN(Number(k.lokalise_key_id)) ? Number(k.lokalise_key_id) : null),
entry_id: k.entry_id || null,
existing_tags: Array.isArray(k.existing_tags) ? k.existing_tags : [],
});
}
});
for (let i = 0; i < uniqueKeyNames.length; i += batchSize) {
const slice = uniqueKeyNames.slice(i, i + batchSize);
if (slice.length === 0) {
continue;
}
// Build keys array for this batch with lokalise_key_id
const batchKeys = slice.map(keyName => {
const keyData = keysMap.get(keyName);
return keyData || { key_name: keyName, lokalise_key_id: null, entry_id: null, existing_tags: [] };
});
// Log how many keys have lokalise_key_id
const keysWithId = batchKeys.filter(k => k.lokalise_key_id).length;
if (keysWithId > 0) {
strapi.log.info(`📦 Batch ${index + 1}: ${keysWithId}/${batchKeys.length} keys have lokalise_key_id from frontend`);
}
index += 1;
batches.push({
id: `${jobId}-batch-${index}`,
index,
type: item.type,
keyNames: slice,
keys: batchKeys, // CRITICAL: Include keys array with lokalise_key_id
totalKeys: slice.length,
status: 'pending',
attempts: 0,
});
totalKeys += slice.length;
}
});
return { batches, totalKeys };
};
const normalizeSelectionChunk = (selection = []) =>
selection
.filter((item) => item && item.type && Array.isArray(item.keyNames) && item.keyNames.length > 0)
.map((item) => {
const uniqueKeyNames = Array.from(
new Set(
item.keyNames
.map((key) => (typeof key === 'string' ? key.trim() : null))
.filter((key) => typeof key === 'string' && key.length > 0)
)
);
return {
type: item.type,
keyNames: uniqueKeyNames,
};
})
.filter((item) => item.keyNames.length > 0);
const mergeSelectionBuffer = (buffer = {}, selection = []) => {
const normalized = normalizeSelectionChunk(selection);
if (normalized.length === 0) {
return buffer;
}
const nextBuffer = { ...buffer };
normalized.forEach(({ type, keyNames }) => {
const existingSet = new Set(Array.isArray(nextBuffer[type]) ? nextBuffer[type] : []);
keyNames.forEach((keyName) => existingSet.add(keyName));
nextBuffer[type] = Array.from(existingSet);
});
return nextBuffer;
};
const selectionBufferToArray = (buffer = {}) =>
Object.entries(buffer)
.map(([type, keyNames]) => ({
type,
keyNames: Array.isArray(keyNames) ? keyNames.slice() : [],
}))
.filter((item) => item.type && Array.isArray(item.keyNames) && item.keyNames.length > 0);
module.exports = {
async listJobs() {
const jobs = await readJobs();
return Object.values(jobs)
.sort((a, b) => new Date(b.createdAt) - new Date(a.createdAt))
.map((job) => sanitizeJob(job));
},
async getJob(jobId, options = {}) {
if (!jobId) {
return null;
}
const jobs = await readJobs();
const job = jobs[jobId];
if (!job) {
return null;
}
return sanitizeJob(job, options);
},
async createEmptyJob({ tag = null, locale = null } = {}) {
const jobs = await readJobs();
const jobId = generateId();
const now = nowIso();
const job = {
id: jobId,
status: 'initializing',
tag,
locale,
totalBatches: 0,
totalKeys: 0,
processedKeys: 0,
completedBatches: 0,
failedBatches: 0,
cancelledBatches: 0,
createdAt: now,
updatedAt: now,
lastError: null,
selectionSummary: {},
selectionBuffer: {},
batches: [],
};
jobs[jobId] = job;
await writeJobs(jobs);
return sanitizeJob(job);
},
async appendSelection(jobId, selection = []) {
if (!jobId) {
throw new Error('jobId is required');
}
if (!Array.isArray(selection) || selection.length === 0) {
throw new Error('Selection chunk is required');
}
const jobs = await readJobs();
const job = jobs[jobId];
if (!job) {
throw new Error(`Job ${jobId} not found`);
}
if (job.status !== 'initializing' && job.status !== 'pending') {
throw new Error(`Job ${jobId} cannot accept additional selection in status "${job.status}"`);
}
if (!job.selectionBuffer && (!job.batches || job.batches.length > 0)) {
throw new Error(`Job ${jobId} is already finalized`);
}
job.selectionBuffer = mergeSelectionBuffer(job.selectionBuffer || {}, selection);
const bufferSelection = selectionBufferToArray(job.selectionBuffer);
job.selectionSummary = computeSelectionSummary(bufferSelection);
job.totalKeys = bufferSelection.reduce(
(sum, item) => sum + (Array.isArray(item.keyNames) ? item.keyNames.length : 0),
0
);
job.updatedAt = nowIso();
await writeJobs(jobs);
return sanitizeJob(job);
},
async finalizeJob(jobId, { batchSize = DEFAULT_BATCH_SIZE } = {}) {
if (!jobId) {
throw new Error('jobId is required');
}
const jobs = await readJobs();
const job = jobs[jobId];
if (!job) {
throw new Error(`Job ${jobId} not found`);
}
if (job.status === 'cancelled') {
throw new Error(`Job ${jobId} is cancelled`);
}
if (!job.selectionBuffer && job.batches && job.batches.length > 0) {
return sanitizeJob(job);
}
const selectionArray = selectionBufferToArray(job.selectionBuffer || {});
if (selectionArray.length === 0) {
throw new Error('No selection has been provided for this job');
}
// OPTIMIZATION: For small jobs (≤25 keys), use larger batch size to avoid unnecessary batching
// This allows the fast path in syncBatch to handle the entire job as one batch
const totalKeysInSelection = selectionArray.reduce(
(sum, item) => sum + (Array.isArray(item.keyNames) ? item.keyNames.length : 0),
0
);
const effectiveBatchSize = totalKeysInSelection <= SMALL_JOB_THRESHOLD
? SMALL_JOB_BATCH_SIZE
: batchSize;
if (totalKeysInSelection <= SMALL_JOB_THRESHOLD) {
strapi.log.info(`⚡ Small job detected (${totalKeysInSelection} keys) - using fast batch size (${effectiveBatchSize}) for immediate processing`);
}
const { batches, totalKeys } = flattenSelectionToBatches(jobId, selectionArray, effectiveBatchSize);
if (batches.length === 0) {
throw new Error('Selection does not contain any keys to sync');
}
job.batches = batches;
job.totalBatches = batches.length;
job.totalKeys = totalKeys;
job.processedKeys = 0;
job.completedBatches = 0;
job.failedBatches = 0;
job.cancelledBatches = 0;
job.selectionSummary = computeSelectionSummary(selectionArray);
job.status = 'pending';
job.lastError = null;
delete job.selectionBuffer;
job.updatedAt = nowIso();
await writeJobs(jobs);
// OPTIMIZATION: For small jobs (≤25 keys), process all batches immediately in background
// This eliminates the delay waiting for frontend to trigger batches
// Use process.nextTick to ensure job is saved before processing starts
if (totalKeysInSelection <= SMALL_JOB_THRESHOLD && batches.length > 0) {
strapi.log.info(`⚡ Auto-processing ${batches.length} batch(es) for small job (${totalKeysInSelection} keys) - no wait for frontend trigger`);
// Store reference to this module for async callback
const jobsService = this;
// Process in background (non-blocking) - batches will be marked as processing by processBatch
process.nextTick(function() {
(async function() {
for (const batch of batches) {
try {
// processBatch will mark batch as processing, preventing duplicate triggers
await jobsService.processBatch(jobId, batch.id);
} catch (err) {
// Log but continue - job status will reflect the error
strapi.log.error(`[BATCH ${batch.index}] Auto-process failed: ${err.message || String(err)}`);
}
}
})();
});
}
return sanitizeJob(job);
},
async createJob({ selection = [], tag = null, locale = null, batchSize = DEFAULT_BATCH_SIZE } = {}) {
if (!Array.isArray(selection) || selection.length === 0) {
throw new Error('Selection array is required to create a job');
}
// OPTIMIZATION: Calculate total keys to determine if this is a small job
const totalKeys = selection.reduce(
(sum, item) => sum + (Array.isArray(item.keyNames) ? item.keyNames.length : 0),
0
);
const effectiveBatchSize = totalKeys <= SMALL_JOB_THRESHOLD
? SMALL_JOB_BATCH_SIZE
: batchSize;
const job = await this.createEmptyJob({ tag, locale });
await this.appendSelection(job.id, selection);
return this.finalizeJob(job.id, { batchSize: effectiveBatchSize });
},
async cancelJob(jobId) {
if (!jobId) {
throw new Error('jobId is required');
}
const jobs = await readJobs();
const job = jobs[jobId];
if (!job) {
throw new Error(`Job ${jobId} not found`);
}
job.status = 'cancelled';
job.updatedAt = nowIso();
if (Array.isArray(job.batches) && job.batches.length > 0) {
job.cancelledBatches = job.batches.filter((batch) => batch.status !== 'completed').length;
job.batches = job.batches.map((batch) => {
if (batch.status === 'completed') {
return batch;
}
return {
...batch,
status: 'cancelled',
finishedAt: batch.finishedAt || nowIso(),
};
});
} else {
job.cancelledBatches = 0;
}
await writeJobs(jobs);
return sanitizeJob(job);
},
async clearJob(jobId) {
if (!jobId) {
throw new Error('jobId is required');
}
const jobs = await readJobs();
if (!jobs[jobId]) {
return null;
}
const removed = jobs[jobId];
delete jobs[jobId];
await writeJobs(jobs);
return sanitizeJob(removed);
},
async resetBatch(jobId, batchId) {
if (!jobId || !batchId) {
throw new Error('jobId and batchId are required');
}
const jobs = await readJobs();
const job = jobs[jobId];
if (!job) {
throw new Error(`Job ${jobId} not found`);
}
if (!Array.isArray(job.batches) || job.batches.length === 0) {
throw new Error(`Job ${jobId} is not ready for processing`);
}
const batch = job.batches.find((item) => item.id === batchId);
if (!batch) {
throw new Error(`Batch ${batchId} not found in job ${jobId}`);
}
if (batch.status === 'completed') {
return sanitizeJob(job);
}
batch.status = 'pending';
batch.error = null;
batch.startedAt = null;
batch.finishedAt = null;
job.failedBatches = job.batches.filter((item) => item.status === 'failed').length;
job.status = 'pending';
job.lastError = null;
job.updatedAt = nowIso();
await writeJobs(jobs);
return sanitizeJob(job);
},
async markBatchProcessing(job, batch) {
batch.status = 'processing';
batch.attempts = (batch.attempts || 0) + 1;
batch.startedAt = nowIso();
batch.finishedAt = null;
batch.error = null;
job.status = 'running';
job.updatedAt = nowIso();
},
async markBatchComplete(job, batch, processedKeys, extra = {}) {
batch.status = 'completed';
batch.finishedAt = nowIso();
batch.noOp = !!extra.noOp;
if (extra.noOp) {
batch.statusMessage = 'Already up-to-date in Lokalise';
} else if (extra.totalUpdated > 0) {
batch.statusMessage = `${extra.totalUpdated} key(s) updated in Lokalise`;
} else if (extra.totalSynced > 0) {
batch.statusMessage = `${extra.totalSynced} key(s) sent to Lokalise`;
} else {
delete batch.statusMessage;
}
job.processedKeys = (job.processedKeys || 0) + (processedKeys || batch.totalKeys || 0);
job.completedBatches = (job.completedBatches || 0) + 1;
job.failedBatches = job.batches.filter((item) => item.status === 'failed').length;
job.updatedAt = nowIso();
job.lastError = null;
const hasPending = job.batches.some((item) => item.status === 'pending' || item.status === 'processing');
job.status = hasPending ? 'running' : 'completed';
},
async markBatchFailed(job, batch, error) {
batch.status = 'failed';
batch.finishedAt = nowIso();
batch.error = {
message: error?.message || 'Batch failed',
occurredAt: nowIso(),
};
job.failedBatches = (job.failedBatches || 0) + 1;
job.updatedAt = nowIso();
job.status = 'paused';
job.lastError = {
message: error?.message || 'Batch failed',
occurredAt: nowIso(),
};
},
async processBatch(jobId, batchId, options = {}) {
if (!jobId || !batchId) {
throw new Error('jobId and batchId are required');
}
const jobs = await readJobs();
const job = jobs[jobId];
if (!job) {
throw new Error(`Job ${jobId} not found`);
}
if (job.status === 'cancelled') {
throw new Error(`Job ${jobId} is cancelled`);
}
const batch = job.batches.find((item) => item.id === batchId);
if (!batch) {
throw new Error(`Batch ${batchId} not found in job ${jobId}`);
}
if (batch.status === 'completed') {
return sanitizeJob(job);
}
await this.markBatchProcessing(job, batch);
await writeJobs(jobs);
try {
const lokaliseService = strapi.plugin('lokalise-sync').service('lokalise-sync');
const batchSelection = {
type: batch.type,
keyNames: batch.keyNames,
keys: Array.isArray(batch.keys) ? batch.keys : undefined,
};
const cancelCheck = async () => {
const latest = await this.getJob(jobId);
return latest && latest.status === 'cancelled';
};
const syncResult = await lokaliseService.syncSelection(
[batchSelection],
{},
{ tag: job.tag, locale: job.locale, jobId, batchId, batchIndex: batch.index, cancelCheck, ...options }
);
const summary = Array.isArray(syncResult?.result) ? syncResult.result : [];
const totalSynced = summary.reduce(
(sum, entry) => sum + (Number(entry?.totalSynced) || 0),
0
);
const totalUpdated = summary.reduce(
(sum, entry) => sum + (Number(entry?.totalUpdated) || 0),
0
);
const noopBatch =
summary.length > 0 &&
summary.every((entry) => entry?.noop === true || ((Number(entry?.totalSynced) || 0) === 0 && (Number(entry?.totalUpdated) || 0) === 0));
await this.markBatchComplete(job, batch, undefined, {
noOp: noopBatch,
totalSynced,
totalUpdated,
});
await writeJobs(jobs);
return sanitizeJob(job);
} catch (err) {
// Check if job was cancelled - don't mark as failed, just stop processing
if (err.message === 'JOB_CANCELLED') {
strapi.log.info(`[BATCH ${batch.index}] Job cancelled - stopping batch processing`);
// Don't mark batch as failed, just return current job state
const latest = await this.getJob(jobId);
return latest ? sanitizeJob(latest) : sanitizeJob(job);
}
strapi.log.error(`[BATCH ${batch.index}] ERROR: ${err.message || String(err)}`);
if (err.stack) {
strapi.log.error(`[BATCH ${batch.index}] ERROR STACK: ${err.stack}`);
}
await this.markBatchFailed(job, batch, err);
await writeJobs(jobs);
throw err;
}
},
};