payloadcms-import-export-plugin
Version:
A comprehensive Payload CMS plugin that enables seamless import and export of collection data with support for CSV and JSON formats, featuring advanced field mapping, duplicate handling, and batch processing capabilities.
238 lines (237 loc) • 8.13 kB
JavaScript
/* eslint-disable perfectionist/sort-objects */ import { stringify } from 'csv-stringify/sync';
import { APIError } from 'payload';
import { Readable } from 'stream';
import { flattenObject } from './flattenObject.js';
import { getCustomFieldFunctions } from './getCustomFieldFunctions.js';
import { getFilename } from './getFilename.js';
import { getSelect } from './getSelect.js';
export const createExport = async (args)=>{
const { download, input: { id, name: nameArg, collectionSlug, debug = true, drafts, exportsCollection, fields, format, locale: localeInput, sort, user, where }, req: { locale: localeArg, payload }, req } = args;
if (debug) {
req.payload.logger.info({
message: 'Starting export process with args:',
collectionSlug,
drafts,
fields,
format
});
}
const locale = localeInput ?? localeArg;
const collectionConfig = payload.config.collections.find(({ slug })=>slug === collectionSlug);
if (!collectionConfig) {
throw new APIError(`Collection with slug ${collectionSlug} not found`);
}
const name = `${nameArg ?? `${getFilename()}-${collectionSlug}`}.${format}`;
const isCSV = format === 'csv';
const select = Array.isArray(fields) && fields.length > 0 ? getSelect(fields) : undefined;
if (debug) {
req.payload.logger.info({
message: 'Export configuration:',
name,
isCSV,
locale
});
}
const findArgs = {
collection: collectionSlug,
depth: 1,
draft: drafts === 'yes',
limit: 100,
locale,
overrideAccess: false,
page: 0,
select,
sort,
user,
where
};
if (debug) {
req.payload.logger.info({
message: 'Find arguments:',
findArgs
});
}
console.log("fields : ", collectionConfig.flattenedFields);
const toCSVFunctions = getCustomFieldFunctions({
fields: collectionConfig.flattenedFields
});
const disabledFieldsDot = collectionConfig.admin?.custom?.['plugin-import-export']?.disabledFields ?? [];
const disabledFields = disabledFieldsDot.map((f)=>f.replace(/\./g, '_'));
const filterDisabled = (row)=>{
for (const key of disabledFields){
delete row[key];
}
return row;
};
if (download) {
if (debug) {
req.payload.logger.info('Pre-scanning all columns before streaming');
}
const allColumnsSet = new Set();
const allColumns = [];
let scanPage = 1;
let hasMore = true;
while(hasMore){
const result = await payload.find({
...findArgs,
page: scanPage
});
result.docs.forEach((doc)=>{
const flat = filterDisabled(flattenObject({
doc,
fields,
toCSVFunctions
}));
Object.keys(flat).forEach((key)=>{
if (!allColumnsSet.has(key)) {
allColumnsSet.add(key);
allColumns.push(key);
}
});
});
hasMore = result.hasNextPage;
scanPage += 1;
}
if (debug) {
req.payload.logger.info(`Discovered ${allColumns.length} columns`);
}
const encoder = new TextEncoder();
let isFirstBatch = true;
let streamPage = 1;
const stream = new Readable({
async read () {
const result = await payload.find({
...findArgs,
page: streamPage
});
if (debug) {
req.payload.logger.info(`Streaming batch ${streamPage} with ${result.docs.length} docs`);
}
if (result.docs.length === 0) {
this.push(null);
return;
}
const batchRows = result.docs.map((doc)=>filterDisabled(flattenObject({
doc,
fields,
toCSVFunctions
})));
const paddedRows = batchRows.map((row)=>{
const fullRow = {};
for (const col of allColumns){
fullRow[col] = row[col] ?? '';
}
return fullRow;
});
const csvString = stringify(paddedRows, {
header: isFirstBatch,
columns: allColumns
});
this.push(encoder.encode(csvString));
isFirstBatch = false;
streamPage += 1;
if (!result.hasNextPage) {
if (debug) {
req.payload.logger.info('Stream complete - no more pages');
}
this.push(null); // End the stream
}
}
});
return new Response(stream, {
headers: {
'Content-Disposition': `attachment; filename="${name}"`,
'Content-Type': isCSV ? 'text/csv' : 'application/json'
}
});
}
// Non-download path (buffered export)
if (debug) {
req.payload.logger.info('Starting file generation');
}
const outputData = [];
const rows = [];
const columnsSet = new Set();
const columns = [];
let page = 1;
let hasNextPage = true;
while(hasNextPage){
const result = await payload.find({
...findArgs,
page
});
if (debug) {
req.payload.logger.info(`Processing batch ${findArgs.page} with ${result.docs.length} documents`);
}
if (isCSV) {
const batchRows = result.docs.map((doc)=>filterDisabled(flattenObject({
doc,
fields,
toCSVFunctions
})));
// Track discovered column keys
batchRows.forEach((row)=>{
Object.keys(row).forEach((key)=>{
if (!columnsSet.has(key)) {
columnsSet.add(key);
columns.push(key);
}
});
});
rows.push(...batchRows);
} else {
const jsonInput = result.docs.map((doc)=>JSON.stringify(doc));
outputData.push(jsonInput.join(',\n'));
}
hasNextPage = result.hasNextPage;
page += 1;
}
if (isCSV) {
const paddedRows = rows.map((row)=>{
const fullRow = {};
for (const col of columns){
fullRow[col] = row[col] ?? '';
}
return fullRow;
});
outputData.push(stringify(paddedRows, {
header: true,
columns
}));
}
const buffer = Buffer.from(format === 'json' ? `[${outputData.join(',')}]` : outputData.join(''));
if (debug) {
req.payload.logger.info(`${format} file generation complete`);
}
if (!id) {
if (debug) {
req.payload.logger.info('Creating new export file');
}
req.file = {
name,
data: buffer,
mimetype: isCSV ? 'text/csv' : 'application/json',
size: buffer.length
};
} else {
if (debug) {
req.payload.logger.info(`Updating existing export with id: ${id}`);
}
await req.payload.update({
id,
collection: exportsCollection,
data: {},
file: {
name,
data: buffer,
mimetype: isCSV ? 'text/csv' : 'application/json',
size: buffer.length
},
user
});
}
if (debug) {
req.payload.logger.info('Export process completed successfully');
}
};
//# sourceMappingURL=createExport.js.map