sfdx-hardis
Version:
Swiss-army-knife Toolbox for Salesforce. Allows you to define a complete CD/CD Pipeline. Orchestrate base commands and assist users with interactive wizards
720 lines • 37.4 kB
JavaScript
// External Libraries and Node.js Modules
import fs from 'fs-extra';
import * as path from 'path';
import c from 'chalk';
import open from 'open';
import * as split from 'split';
import { PromisePool } from '@supercharge/promise-pool';
// Salesforce Specific and Other Specific Libraries
import { SfError } from '@salesforce/core';
import Papa from 'papaparse';
import ExcelJS from 'exceljs';
// Project Specific Utilities
import { getCurrentGitBranch, isCI, isGitRepo, uxLog } from './index.js';
import { bulkQuery, soqlQuery, bulkQueryByChunks } from './apiUtils.js';
import { prompts } from './prompts.js';
import { getApiVersion, getReportDirectory } from '../../config/index.js';
import { WebSocketClient } from '../websocketClient.js';
import { FileDownloader } from './fileDownloader.js';
export const filesFolderRoot = path.join('.', 'scripts', 'files');
export class FilesExporter {
filesPath;
conn;
pollTimeout;
recordsChunkSize;
startChunkNumber;
parentRecordsChunkSize;
commandThis;
dtl = null; // export config
exportedFilesFolder = '';
recordsChunk = [];
chunksNumber = 1;
recordsChunkQueue = [];
recordsChunkQueueRunning = false;
queueInterval;
bulkApiRecordsEnded = false;
recordChunksNumber = 0;
totalSoqlRequests = 0;
totalParentRecords = 0;
parentRecordsWithFiles = 0;
recordsIgnored = 0;
filesDownloaded = 0;
filesErrors = 0;
filesIgnoredType = 0;
filesIgnoredExisting = 0;
apiUsedBefore = 0;
apiLimit = 0;
constructor(filesPath, conn, options, commandThis) {
this.filesPath = filesPath;
this.conn = conn;
this.pollTimeout = options?.pollTimeout || 600000;
this.recordsChunkSize = options?.recordsChunkSize || 1000;
this.parentRecordsChunkSize = 100000;
this.startChunkNumber = options?.startChunkNumber || 0;
this.commandThis = commandThis;
if (options.exportConfig) {
this.dtl = options.exportConfig;
}
}
async processExport() {
// Get config
if (this.dtl === null) {
this.dtl = await getFilesWorkspaceDetail(this.filesPath);
}
uxLog(this.commandThis, c.cyan(`Exporting files from ${c.green(this.dtl.full_label)} ...`));
uxLog(this.commandThis, c.italic(c.grey(this.dtl.description)));
// Make sure export folder for files is existing
this.exportedFilesFolder = path.join(this.filesPath, 'export');
await fs.ensureDir(this.exportedFilesFolder);
await this.calculateApiConsumption();
this.startQueue();
await this.processParentRecords();
await this.queueCompleted();
return await this.buildResult();
}
// Calculate API consumption
async calculateApiConsumption() {
const countSoqlQuery = this.dtl.soqlQuery.replace(/SELECT (.*) FROM/gi, 'SELECT COUNT() FROM');
this.totalSoqlRequests++;
const countSoqlQueryRes = await soqlQuery(countSoqlQuery, this.conn);
this.chunksNumber = Math.round(countSoqlQueryRes.totalSize / this.recordsChunkSize);
const estimatedApiCalls = Math.round(this.chunksNumber * 2) + 1;
this.apiUsedBefore = this.conn?.limitInfo?.apiUsage?.used
? this.conn.limitInfo.apiUsage.used - 1
: this.apiUsedBefore;
this.apiLimit = this.conn?.limitInfo?.apiUsage?.limit;
// Check if there are enough API calls available
if (this.apiLimit - this.apiUsedBefore < estimatedApiCalls + 1000) {
throw new SfError(`You don't have enough API calls available (${c.bold(this.apiLimit - this.apiUsedBefore)}) to perform this export that could consume ${c.bold(estimatedApiCalls)} API calls`);
}
// Request user confirmation
if (!isCI) {
const warningMessage = c.cyanBright(`This export of files could run on ${c.bold(c.yellow(countSoqlQueryRes.totalSize))} records, in ${c.bold(c.yellow(this.chunksNumber))} chunks, and consume up to ${c.bold(c.yellow(estimatedApiCalls))} API calls on the ${c.bold(c.yellow(this.apiLimit - this.apiUsedBefore))} remaining API calls. Do you want to proceed ?`);
const promptRes = await prompts({ type: 'confirm', message: warningMessage });
if (promptRes.value !== true) {
throw new SfError('Command cancelled by user');
}
if (this.startChunkNumber === 0) {
uxLog(this, c.yellow(c.italic('Use --startchunknumber command line argument if you do not want to start from first chunk')));
}
}
}
// Run chunks one by one, and don't wait to have all the records fetched to start it
startQueue() {
this.queueInterval = setInterval(async () => {
if (this.recordsChunkQueueRunning === false && this.recordsChunkQueue.length > 0) {
this.recordsChunkQueueRunning = true;
const recordChunk = this.recordsChunkQueue.shift();
await this.processRecordsChunk(recordChunk);
this.recordsChunkQueueRunning = false;
// Manage last chunk
}
else if (this.bulkApiRecordsEnded === true &&
this.recordsChunkQueue.length === 0 &&
this.recordsChunk.length > 0) {
const recordsToProcess = [...this.recordsChunk];
this.recordsChunk = [];
this.recordsChunkQueue.push(recordsToProcess);
}
}, 1000);
}
// Wait for the queue to be completed
async queueCompleted() {
await new Promise((resolve) => {
const completeCheckInterval = setInterval(async () => {
if (this.bulkApiRecordsEnded === true &&
this.recordsChunkQueueRunning === false &&
this.recordsChunkQueue.length === 0 &&
this.recordsChunk.length === 0) {
clearInterval(completeCheckInterval);
resolve(true);
}
if (globalThis.sfdxHardisFatalError === true) {
uxLog(this, c.red('Fatal error while processing chunks queue'));
process.exit(1);
}
}, 1000);
});
clearInterval(this.queueInterval);
this.queueInterval = null;
}
async processParentRecords() {
// Query parent records using SOQL defined in export.json file
this.totalSoqlRequests++;
this.conn.bulk.pollTimeout = this.pollTimeout || 600000; // Increase timeout in case we are on a bad internet connection or if the bulk api batch is queued
// Use bulkQueryByChunks to handle large queries
const queryRes = await bulkQueryByChunks(this.dtl.soqlQuery, this.conn, this.parentRecordsChunkSize);
for (const record of queryRes.records) {
this.totalParentRecords++;
const parentRecordFolderForFiles = path.resolve(path.join(this.exportedFilesFolder, record[this.dtl.outputFolderNameField] || record.Id));
if (this.dtl.overwriteParentRecords !== true && fs.existsSync(parentRecordFolderForFiles)) {
uxLog(this, c.grey(`Skipped record - ${record[this.dtl.outputFolderNameField] || record.Id} - Record files already downloaded`));
this.recordsIgnored++;
continue;
}
await this.addToRecordsChunk(record);
}
this.bulkApiRecordsEnded = true;
}
async addToRecordsChunk(record) {
this.recordsChunk.push(record);
// If chunk size is reached , process the chunk of records
if (this.recordsChunk.length === this.recordsChunkSize) {
const recordsToProcess = [...this.recordsChunk];
this.recordsChunk = [];
this.recordsChunkQueue.push(recordsToProcess);
}
}
async processRecordsChunk(records) {
this.recordChunksNumber++;
if (this.recordChunksNumber < this.startChunkNumber) {
uxLog(this, c.cyan(`Skip parent records chunk #${this.recordChunksNumber} because it is lesser than ${this.startChunkNumber}`));
return;
}
uxLog(this, c.cyan(`Processing parent records chunk #${this.recordChunksNumber} on ${this.chunksNumber} (${records.length} records) ...`));
// Process records in batches of 200 for Attachments and 1000 for ContentVersions to avoid hitting the SOQL query limit
const attachmentBatchSize = 200;
const contentVersionBatchSize = 1000;
for (let i = 0; i < records.length; i += attachmentBatchSize) {
const batch = records.slice(i, i + attachmentBatchSize);
// Request all Attachment related to all records of the batch using REST API
const parentIdIn = batch.map((record) => `'${record.Id}'`).join(',');
const attachmentQuery = `SELECT Id, Name, ContentType, ParentId FROM Attachment WHERE ParentId IN (${parentIdIn})`;
this.totalSoqlRequests++;
const attachments = await this.conn.query(attachmentQuery);
if (attachments.records.length > 0) {
// Download attachments using REST API
await PromisePool.withConcurrency(5)
.for(attachments.records)
.process(async (attachment) => {
try {
await this.downloadAttachmentFile(attachment, batch);
}
catch (e) {
this.filesErrors++;
uxLog(this, c.red('Download file error: ' + attachment.Name + '\n' + e));
}
});
}
else {
uxLog(this, c.grey('No Attachments found for the parent records in this batch'));
}
}
for (let i = 0; i < records.length; i += contentVersionBatchSize) {
const batch = records.slice(i, i + contentVersionBatchSize);
// Request all ContentDocumentLink related to all records of the batch
const linkedEntityIdIn = batch.map((record) => `'${record.Id}'`).join(',');
const linkedEntityInQuery = `SELECT ContentDocumentId,LinkedEntityId FROM ContentDocumentLink WHERE LinkedEntityId IN (${linkedEntityIdIn})`;
this.totalSoqlRequests++;
const contentDocumentLinks = await bulkQueryByChunks(linkedEntityInQuery, this.conn, this.parentRecordsChunkSize);
if (contentDocumentLinks.records.length > 0) {
// Retrieve all ContentVersion related to ContentDocumentLink
const contentDocIdIn = contentDocumentLinks.records.map((link) => `'${link.ContentDocumentId}'`);
// Loop on contentDocIdIn by contentVersionBatchSize
for (let j = 0; j < contentDocIdIn.length; j += contentVersionBatchSize) {
const contentDocIdBatch = contentDocIdIn.slice(j, j + contentVersionBatchSize).join(',');
// Log the progression of contentDocIdBatch
uxLog(this, c.cyan(`Processing ContentDocumentId chunk #${Math.ceil((j + 1) / contentVersionBatchSize)} on ${Math.ceil(contentDocIdIn.length / contentVersionBatchSize)}`));
// Request all ContentVersion related to all records of the batch
const contentVersionSoql = `SELECT Id,ContentDocumentId,Description,FileExtension,FileType,PathOnClient,Title FROM ContentVersion WHERE ContentDocumentId IN (${contentDocIdBatch}) AND IsLatest = true`;
this.totalSoqlRequests++;
const contentVersions = await bulkQueryByChunks(contentVersionSoql, this.conn, this.parentRecordsChunkSize);
// ContentDocument object can be linked to multiple other objects even with same type (for example: same attachment can be linked to multiple EmailMessage objects).
// Because of this when we fetch ContentVersion for ContentDocument it can return less results than there is ContentDocumentLink objects to link.
// To fix this we create a list of ContentVersion and ContentDocumentLink pairs.
// This way we have multiple pairs and we will download ContentVersion objects for each linked object.
const versionsAndLinks = [];
contentVersions.records.forEach((contentVersion) => {
contentDocumentLinks.records.forEach((contentDocumentLink) => {
if (contentDocumentLink.ContentDocumentId === contentVersion.ContentDocumentId) {
versionsAndLinks.push({
contentVersion: contentVersion,
contentDocumentLink: contentDocumentLink,
});
}
});
});
// Download files
await PromisePool.withConcurrency(5)
.for(versionsAndLinks)
.process(async (versionAndLink) => {
try {
await this.downloadContentVersionFile(versionAndLink.contentVersion, batch, versionAndLink.contentDocumentLink);
}
catch (e) {
this.filesErrors++;
uxLog(this, c.red('Download file error: ' + versionAndLink.contentVersion.Title + '\n' + e));
}
});
}
}
else {
uxLog(this, c.grey('No ContentDocumentLinks found for the parent records in this batch'));
}
}
}
async downloadFile(fetchUrl, outputFile) {
const downloadResult = await new FileDownloader(fetchUrl, { conn: this.conn, outputFile: outputFile, label: 'file' }).download();
if (downloadResult.success) {
this.filesDownloaded++;
}
else {
this.filesErrors++;
}
}
async downloadAttachmentFile(attachment, records) {
// Retrieve initial record to build output files folder name
const parentAttachment = records.filter((record) => record.Id === attachment.ParentId)[0];
// Build record output files folder (if folder name contains slashes or antislashes, replace them by spaces)
const attachmentParentFolderName = (parentAttachment[this.dtl.outputFolderNameField] || parentAttachment.Id).replace(/[/\\?%*:|"<>]/g, '-');
const parentRecordFolderForFiles = path.resolve(path.join(this.exportedFilesFolder, attachmentParentFolderName));
// Define name of the file
const outputFile = path.join(parentRecordFolderForFiles, attachment.Name.replace(/[/\\?%*:|"<>]/g, '-'));
// Create directory if not existing
await fs.ensureDir(parentRecordFolderForFiles);
// Download file locally
const fetchUrl = `${this.conn.instanceUrl}/services/data/v${getApiVersion()}/sobjects/Attachment/${attachment.Id}/Body`;
await this.downloadFile(fetchUrl, outputFile);
}
async downloadContentVersionFile(contentVersion, records, contentDocumentLink) {
// Retrieve initial record to build output files folder name
const parentRecord = records.filter((record) => record.Id === contentDocumentLink.LinkedEntityId)[0];
// Build record output files folder (if folder name contains slashes or antislashes, replace them by spaces)
const parentFolderName = (parentRecord[this.dtl.outputFolderNameField] || parentRecord.Id).replace(/[/\\?%*:|"<>]/g, '-');
const parentRecordFolderForFiles = path.resolve(path.join(this.exportedFilesFolder, parentFolderName));
// Define name of the file
let outputFile =
// Id
this.dtl?.outputFileNameFormat === 'id'
? path.join(parentRecordFolderForFiles, contentVersion.Id)
: // Title + Id
this.dtl?.outputFileNameFormat === 'title_id'
? path.join(parentRecordFolderForFiles, `${contentVersion.Title.replace(/[/\\?%*:|"<>]/g, '-')}_${contentVersion.Id}`)
: // Id + Title
this.dtl?.outputFileNameFormat === 'id_title'
? path.join(parentRecordFolderForFiles, `${contentVersion.Id}_${contentVersion.Title.replace(/[/\\?%*:|"<>]/g, '-')}`)
: // Title
path.join(parentRecordFolderForFiles, contentVersion.Title.replace(/[/\\?%*:|"<>]/g, '-'));
// Add file extension if missing in file title, and replace .snote by .html
if (contentVersion.FileExtension && path.extname(outputFile) !== contentVersion.FileExtension) {
outputFile =
outputFile + '.' + (contentVersion.FileExtension !== 'snote' ? contentVersion.FileExtension : 'html');
}
// Check file extension
if (this.dtl.fileTypes !== 'all' && !this.dtl.fileTypes.includes(contentVersion.FileType)) {
uxLog(this, c.grey(`Skipped - ${outputFile.replace(this.exportedFilesFolder, '')} - File type ignored`));
this.filesIgnoredType++;
return;
}
// Check file overwrite
if (this.dtl.overwriteFiles !== true && fs.existsSync(outputFile)) {
uxLog(this, c.yellow(`Skipped - ${outputFile.replace(this.exportedFilesFolder, '')} - File already existing`));
this.filesIgnoredExisting++;
return;
}
// Create directory if not existing
await fs.ensureDir(parentRecordFolderForFiles);
// Download file locally
const fetchUrl = `${this.conn.instanceUrl}/services/data/v${getApiVersion()}/sobjects/ContentVersion/${contentVersion.Id}/VersionData`;
await this.downloadFile(fetchUrl, outputFile);
}
// Build stats & result
async buildResult() {
const connAny = this.conn;
const apiCallsRemaining = connAny?.limitInfo?.apiUsage?.used
? (connAny?.limitInfo?.apiUsage?.limit || 0) - (connAny?.limitInfo?.apiUsage?.used || 0)
: null;
uxLog(this, c.cyan(`API limit: ${c.bold(connAny?.limitInfo?.apiUsage?.limit || null)}`));
uxLog(this, c.cyan(`API used before process: ${c.bold(this.apiUsedBefore)}`));
uxLog(this, c.cyan(`API used after process: ${c.bold(connAny?.limitInfo?.apiUsage?.used || null)}`));
uxLog(this, c.cyan(`API calls remaining for today: ${c.bold(apiCallsRemaining)}`));
uxLog(this, c.cyan(`Total SOQL requests: ${c.bold(this.totalSoqlRequests)}`));
uxLog(this, c.cyan(`Total parent records found: ${c.bold(this.totalParentRecords)}`));
uxLog(this, c.cyan(`Total parent records with files: ${c.bold(this.parentRecordsWithFiles)}`));
uxLog(this, c.cyan(`Total parent records ignored because already existing: ${c.bold(this.recordsIgnored)}`));
uxLog(this, c.cyan(`Total files downloaded: ${c.bold(this.filesDownloaded)}`));
uxLog(this, c.cyan(`Total file download errors: ${c.bold(this.filesErrors)}`));
uxLog(this, c.cyan(`Total file skipped because of type constraint: ${c.bold(this.filesIgnoredType)}`));
uxLog(this, c.cyan(`Total file skipped because previously downloaded: ${c.bold(this.filesIgnoredExisting)}`));
return {
totalParentRecords: this.totalParentRecords,
parentRecordsWithFiles: this.parentRecordsWithFiles,
filesDownloaded: this.filesDownloaded,
filesErrors: this.filesErrors,
recordsIgnored: this.recordsIgnored,
filesIgnoredType: this.filesIgnoredType,
filesIgnoredExisting: this.filesIgnoredExisting,
apiLimit: connAny?.limitInfo?.apiUsage?.limit || null,
apiUsedBefore: this.apiUsedBefore,
apiUsedAfter: connAny?.limitInfo?.apiUsage?.used || null,
apiCallsRemaining,
};
}
}
export class FilesImporter {
filesPath;
conn;
commandThis;
dtl = null; // export config
exportedFilesFolder = '';
handleOverwrite = false;
constructor(filesPath, conn, options, commandThis) {
this.filesPath = filesPath;
this.exportedFilesFolder = path.join(this.filesPath, 'export');
this.handleOverwrite = options?.handleOverwrite === true;
this.conn = conn;
this.commandThis = commandThis;
if (options.exportConfig) {
this.dtl = options.exportConfig;
}
}
async processImport() {
// Get config
if (this.dtl === null) {
this.dtl = await getFilesWorkspaceDetail(this.filesPath);
}
uxLog(this.commandThis, c.cyan(`Importing files from ${c.green(this.dtl.full_label)} ...`));
uxLog(this.commandThis, c.italic(c.grey(this.dtl.description)));
// Get folders and files
const allRecordFolders = fs.readdirSync(this.exportedFilesFolder).filter((file) => {
return fs.statSync(path.join(this.exportedFilesFolder, file)).isDirectory();
});
let totalFilesNumber = 0;
for (const folder of allRecordFolders) {
totalFilesNumber += fs.readdirSync(path.join(this.exportedFilesFolder, folder)).length;
}
await this.calculateApiConsumption(totalFilesNumber);
// Query parent objects to find Ids corresponding to field value used as folder name
const parentObjectsRes = await bulkQuery(this.dtl.soqlQuery, this.conn);
const parentObjects = parentObjectsRes.records;
let successNb = 0;
let errorNb = 0;
for (const recordFolder of allRecordFolders) {
uxLog(this, c.grey(`Processing record ${recordFolder} ...`));
const recordFolderPath = path.join(this.exportedFilesFolder, recordFolder);
// List files in folder
const files = fs.readdirSync(recordFolderPath).filter((file) => {
return fs.statSync(path.join(this.exportedFilesFolder, recordFolder, file)).isFile();
});
// Find Id of parent object using folder name
const parentRecordIds = parentObjects.filter((parentObj) => parentObj[this.dtl.outputFolderNameField] === recordFolder);
if (parentRecordIds.length === 0) {
uxLog(this, c.red(`Unable to find Id for ${this.dtl.outputFolderNameField}=${recordFolder}`));
continue;
}
const parentRecordId = parentRecordIds[0].Id;
let existingDocuments = [];
// Collect existing documents if we handle file overwrite
if (this.handleOverwrite) {
const existingDocsQuery = `SELECT Id, ContentDocumentId, Title FROM ContentVersion WHERE FirstPublishLocationId = '${parentRecordId}'`;
const existingDocsQueryRes = await this.conn.query(existingDocsQuery);
existingDocuments = existingDocsQueryRes.records;
}
for (const file of files) {
const fileData = fs.readFileSync(path.join(recordFolderPath, file));
const contentVersionParams = {
Title: file,
PathOnClient: file,
VersionData: fileData.toString('base64'),
};
const matchingExistingDocs = existingDocuments.filter((doc) => doc.Title === file);
if (matchingExistingDocs.length > 0) {
contentVersionParams.ContentDocumentId = matchingExistingDocs[0].ContentDocumentId;
uxLog(this, c.grey(`Overwriting file ${file} ...`));
}
else {
contentVersionParams.FirstPublishLocationId = parentRecordId;
uxLog(this, c.grey(`Uploading file ${file} ...`));
}
try {
const insertResult = await this.conn.sobject('ContentVersion').create(contentVersionParams);
if (insertResult.length === 0) {
uxLog(this, c.red(`Unable to upload file ${file}`));
errorNb++;
}
else {
successNb++;
}
}
catch (e) {
uxLog(this, c.red(`Unable to upload file ${file}: ${e.message}`));
errorNb++;
}
}
}
uxLog(this, c.green(`Uploaded ${successNb} files`));
if (errorNb > 0) {
uxLog(this, c.yellow(`Errors during the upload of ${successNb} files`));
}
return { successNb: successNb, errorNb: errorNb };
}
// Calculate API consumption
async calculateApiConsumption(totalFilesNumber) {
const bulkCallsNb = 1;
if (this.handleOverwrite) {
totalFilesNumber = totalFilesNumber * 2;
}
// Check if there are enough API calls available
// Request user confirmation
if (!isCI) {
const warningMessage = c.cyanBright(`Files import consumes one REST API call per uploaded file.
(Estimation: ${bulkCallsNb} Bulks calls and ${totalFilesNumber} REST calls) Do you confirm you want to proceed ?`);
const promptRes = await prompts({ type: 'confirm', message: warningMessage });
if (promptRes.value !== true) {
throw new SfError('Command cancelled by user');
}
}
}
}
export async function selectFilesWorkspace(opts = { selectFilesLabel: 'Please select a files folder to export' }) {
if (!fs.existsSync(filesFolderRoot)) {
throw new SfError("There is no files root folder 'scripts/files' in your workspace. Create it and define a files export configuration");
}
const filesFolders = fs
.readdirSync(filesFolderRoot, { withFileTypes: true })
.filter((dirent) => dirent.isDirectory())
.map((dirent) => path.join('.', 'scripts', 'files', dirent.name));
if (filesFolders.length === 0) {
throw new SfError('There is no file exports folder in your workspace');
}
const choices = [];
for (const filesFolder of filesFolders) {
const dtl = await getFilesWorkspaceDetail(filesFolder);
if (dtl !== null) {
choices.push({
title: `📁 ${dtl.full_label}`,
description: dtl.description,
value: filesFolder,
});
}
}
const filesDirResult = await prompts({
type: 'select',
name: 'value',
message: c.cyanBright(opts.selectFilesLabel),
choices: choices,
});
return filesDirResult.value;
}
export async function getFilesWorkspaceDetail(filesWorkspace) {
const exportFile = path.join(filesWorkspace, 'export.json');
if (!fs.existsSync(exportFile)) {
uxLog(this, c.yellow(`Your File export folder ${c.bold(filesWorkspace)} must contain an ${c.bold('export.json')} configuration file`));
return null;
}
const exportFileJson = JSON.parse(await fs.readFile(exportFile, 'utf8'));
const folderName = (filesWorkspace.replace(/\\/g, '/').match(/([^/]*)\/*$/) || '')[1];
const hardisLabel = exportFileJson.sfdxHardisLabel || folderName;
const hardisDescription = exportFileJson.sfdxHardisDescription || filesWorkspace;
const soqlQuery = exportFileJson.soqlQuery || '';
const fileTypes = exportFileJson.fileTypes || 'all';
const outputFolderNameField = exportFileJson.outputFolderNameField || 'Name';
const outputFileNameFormat = exportFileJson.outputFileNameFormat || 'title';
const overwriteParentRecords = exportFileJson.overwriteParentRecords === false ? false : exportFileJson.overwriteParentRecords || true;
const overwriteFiles = exportFileJson.overwriteFiles || false;
return {
full_label: `[${folderName}]${folderName != hardisLabel ? `: ${hardisLabel}` : ''}`,
label: hardisLabel,
description: hardisDescription,
soqlQuery: soqlQuery,
fileTypes: fileTypes,
outputFolderNameField: outputFolderNameField,
outputFileNameFormat: outputFileNameFormat,
overwriteParentRecords: overwriteParentRecords,
overwriteFiles: overwriteFiles,
};
}
export async function promptFilesExportConfiguration(filesExportConfig, override = false) {
const questions = [];
if (override === false) {
questions.push(...[
{
type: 'text',
name: 'filesExportPath',
message: c.cyanBright('Please input the files export config folder name (PascalCase format). Ex: "OpportunitiesPDF"'),
},
{
type: 'text',
name: 'sfdxHardisLabel',
message: c.cyanBright('Please input a label for the files export configuration'),
initial: filesExportConfig.sfdxHardisLabel,
},
{
type: 'text',
name: 'sfdxHardisDescription',
message: c.cyanBright('Please input a description of the files export configuration'),
initial: filesExportConfig.sfdxHardisDescription,
},
]);
}
questions.push(...[
{
type: 'text',
name: 'soqlQuery',
message: 'Please input the main SOQL Query to fetch the parent records of files (ContentVersions). Ex: SELECT Id,Name from Opportunity',
initial: filesExportConfig.soqlQuery,
},
{
type: 'text',
name: 'outputFolderNameField',
message: 'Please input the field to use to build the name of the folder containing downloaded files (can be the name, or another field like External ID)',
initial: filesExportConfig.outputFolderNameField,
},
{
type: 'select',
name: 'outputFileNameFormat',
choices: [
{ value: 'title', title: 'title (ex: "Cloudity New Project")' },
{ value: 'title_id', title: 'title_id (ex: "Cloudity New Project_006bR00000Bet7WQAR")' },
{ value: 'id_title', title: 'id_title (ex: "006bR00000Bet7WQAR_Cloudity New Project")' },
{ value: 'id', title: 'id (ex: "006bR00000Bet7WQAR")' },
],
message: 'Please select the format of output files names',
initial: filesExportConfig.outputFileNameFormat,
},
{
type: 'confirm',
name: 'overwriteParentRecords',
message: 'Do you want to try to download files attached to a parent records whose folder is already existing in local folders ?',
initial: filesExportConfig.overwriteParentRecords,
},
{
type: 'confirm',
name: 'overwriteFiles',
message: 'Do you want to overwrite file that has already been previously downloaded ?',
initial: filesExportConfig.overwriteFiles,
},
]);
const resp = await prompts(questions);
const filesConfig = Object.assign(filesExportConfig, {
filesExportPath: resp.filesExportPath,
sfdxHardisLabel: resp.sfdxHardisLabel || filesExportConfig.sfdxHardisLabel,
sfdxHardisDescription: resp.sfdxHardisDescription || filesExportConfig.sfdxHardisDescription,
soqlQuery: resp.soqlQuery,
outputFolderNameField: resp.outputFolderNameField,
outputFileNameFormat: resp.outputFileNameFormat,
overwriteParentRecords: resp.overwriteParentRecords,
overwriteFiles: resp.overwriteFiles,
});
return filesConfig;
}
export async function countLinesInFile(file) {
let readError;
let lineCount = 0;
return await new Promise((resolve) => {
fs.createReadStream(file)
.pipe(split())
.on('data', () => {
lineCount++;
})
.on('end', () => {
if (readError) {
return;
}
resolve(lineCount - 1);
})
.on('error', (error) => {
readError = true;
resolve(error);
});
});
}
/**
* @description This function generates a report path for a given file name prefix.
* It retrieves the report directory and the current branch name.
* If the branch name is not available in the environment variable CI_COMMIT_REF_NAME, it tries to get the current git branch.
* If both are not available, it uses the string "Missing CI_COMMIT_REF_NAME variable".
* It then joins the report directory, file name prefix, and branch name to form the full path of the report.
*
* @param {string} fileNamePrefix - The prefix for the file name.
* @param {string} outputFile - The output file path. If null, a new path is generated.
* @param {Object} [options] - Additional options for generating the report path.
* @param {boolean} [options.withDate=false] - Whether to append a timestamp to the file name.
* @returns {Promise<string>} - A Promise that resolves to the full path of the report.
*/
export async function generateReportPath(fileNamePrefix, outputFile, options = { withDate: false }) {
if (outputFile == null) {
const reportDir = await getReportDirectory();
const branchName = (!isGitRepo()) ? 'no-git' :
process.env.CI_COMMIT_REF_NAME ||
(await getCurrentGitBranch({ formatted: true })) ||
'branch-not-found';
let newOutputFile = path.join(reportDir, `${fileNamePrefix}-${branchName.split('/').pop()}.csv`);
if (options.withDate) {
// Add date time info
const date = new Date().toISOString().replace(/[:.]/g, '-').replace('T', '_').split('.')[0];
newOutputFile = path.join(reportDir, `${fileNamePrefix}-${branchName.split('/').pop()}-${date}.csv`);
}
return newOutputFile;
}
else {
await fs.ensureDir(path.dirname(outputFile));
return outputFile;
}
}
/**
* @description This function generates a CSV file from the provided data and writes it to the specified output path.
* If the operation is successful, it logs a message and requests to open the file.
* If an error occurs during the operation, it logs the error message and stack trace.
*
* @param {any[]} data - The data to be written to the CSV file.
* @param {string} outputPath - The path where the CSV file will be written.
* @returns {Promise<void>} - A Promise that resolves when the operation is complete.
*/
export async function generateCsvFile(data, outputPath) {
const result = {};
try {
const csvContent = Papa.unparse(data);
await fs.writeFile(outputPath, csvContent, 'utf8');
uxLog(this, c.italic(c.cyan(`Please see detailed CSV log in ${c.bold(outputPath)}`)));
result.csvFile = outputPath;
WebSocketClient.requestOpenFile(outputPath);
if (data.length > 0) {
try {
// Generate mirror XSLX file
const xlsDirName = path.join(path.dirname(outputPath), 'xls');
const xslFileName = path.basename(outputPath).replace('.csv', '.xlsx');
const xslxFile = path.join(xlsDirName, xslFileName);
await fs.ensureDir(xlsDirName);
await csvToXls(outputPath, xslxFile);
uxLog(this, c.italic(c.cyan(`Please see detailed XSLX log in ${c.bold(xslxFile)}`)));
result.xlsxFile = xslxFile;
if (!isCI && !(process.env.NO_OPEN === 'true')) {
try {
uxLog(this, c.italic(c.grey(`Opening XSLX file ${c.bold(xslxFile)}... (define NO_OPEN=true to disable this)`)));
await open(xslxFile, { wait: false });
}
catch (e) {
uxLog(this, c.yellow('Error while opening XSLX file:\n' + e.message + '\n' + e.stack));
}
}
}
catch (e2) {
uxLog(this, c.yellow('Error while generating XSLX log file:\n' + e2.message + '\n' + e2.stack));
}
}
else {
uxLog(this, c.grey(`No XLS file generated as ${outputPath} is empty`));
}
}
catch (e) {
uxLog(this, c.yellow('Error while generating CSV log file:\n' + e.message + '\n' + e.stack));
}
return result;
}
async function csvToXls(csvFile, xslxFile) {
const workbook = new ExcelJS.Workbook();
const worksheet = await workbook.csv.readFile(csvFile);
// Set filters
worksheet.autoFilter = 'A1:Z1';
// Adjust column size (only if the file is not too big, to avoid performances issues)
if (worksheet.rowCount < 5000) {
worksheet.columns.forEach((column) => {
const lengths = (column.values || []).map((v) => (v || '').toString().length);
const maxLength = Math.max(...lengths.filter((v) => typeof v === 'number'));
column.width = maxLength;
});
}
await workbook.xlsx.writeFile(xslxFile);
}
//# sourceMappingURL=filesUtils.js.map