sfdx-hardis
Version:
Swiss-army-knife Toolbox for Salesforce. Allows you to define a complete CD/CD Pipeline. Orchestrate base commands and assist users with interactive wizards
174 lines • 8.11 kB
JavaScript
import { execSfdxJson, uxLog } from './index.js';
import c from 'chalk';
import ora from 'ora';
// Constants for record limits
const MAX_CHUNKS = Number(process.env.SOQL_MAX_BATCHES ?? 50);
const CHUNK_SIZE = Number(process.env.SOQL_CHUNK_SIZE ?? 200);
const MAX_RECORDS = MAX_CHUNKS * CHUNK_SIZE;
// Perform simple SOQL query (max results: 10000)
export async function soqlQuery(soqlQuery, conn) {
uxLog(this, c.grey('SOQL REST: ' +
c.italic(soqlQuery.length > 500 ? soqlQuery.substr(0, 500) + '...' : soqlQuery) +
' on ' +
conn.instanceUrl));
// First query
const res = await conn.query(soqlQuery);
let pageRes = Object.assign({}, res);
let batchCount = 1;
// Get all page results
while (pageRes.done === false && pageRes.nextRecordsUrl && batchCount < MAX_CHUNKS) {
uxLog(this, c.grey(`Fetching batch ${batchCount + 1}/${MAX_CHUNKS}...`));
pageRes = await conn.queryMore(pageRes.nextRecordsUrl);
res.records.push(...pageRes.records);
batchCount++;
}
if (!pageRes.done) {
uxLog(this, c.yellow(`Warning: Query limit of ${MAX_RECORDS} records reached. Some records were not retrieved.`));
uxLog(this, c.yellow(`Consider using bulkQuery for larger datasets.`));
}
uxLog(this, c.grey(`SOQL REST: Retrieved ${res.records.length} records in ${batchCount} chunks(s)`));
return res;
}
// Perform simple SOQL query with Tooling API
export async function soqlQueryTooling(soqlQuery, conn) {
uxLog(this, c.grey('SOQL REST Tooling: ' +
c.italic(soqlQuery.length > 500 ? soqlQuery.substr(0, 500) + '...' : soqlQuery) +
' on ' +
conn.instanceUrl));
// First query
const res = await conn.tooling.query(soqlQuery);
let pageRes = Object.assign({}, res);
// Get all page results
while (pageRes.done === false && pageRes.nextRecordsUrl) {
pageRes = await conn.tooling.queryMore(pageRes.nextRecordsUrl || "");
res.records.push(...pageRes.records);
}
return res;
}
let spinnerQ;
const maxRetry = Number(process.env.BULK_QUERY_RETRY || 5);
// Same than soqlQuery but using bulk. Do not use if there will be too many results for javascript to handle in memory
export async function bulkQuery(soqlQuery, conn, retries = 3) {
const queryLabel = soqlQuery.length > 500 ? soqlQuery.substr(0, 500) + '...' : soqlQuery;
uxLog(this, c.grey('[BulkApiV2] ' + c.italic(queryLabel)));
conn.bulk.pollInterval = 5000; // 5 sec
conn.bulk.pollTimeout = 60000; // 60 sec
// Start query
try {
spinnerQ = ora({ text: `[BulkApiV2] Bulk Query: ${queryLabel}`, spinner: 'moon' }).start();
const recordStream = await conn.bulk2.query(soqlQuery);
recordStream.on('error', (err) => {
uxLog(this, c.yellow('Bulk Query error: ' + err));
globalThis.sfdxHardisFatalError = true;
});
// Wait for all results
const records = await recordStream.toArray();
spinnerQ.succeed(`[BulkApiV2] Bulk Query completed with ${records.length} results.`);
return { records: records };
}
catch (e) {
spinnerQ.fail(`[BulkApiV2] Bulk query error: ${e.message}`);
// Try again if the reason is a timeout and max number of retries is not reached yet
if ((e + '').includes('ETIMEDOUT') && retries < maxRetry) {
uxLog(this, c.yellow('[BulkApiV2] Bulk Query retry attempt #' + retries + 1));
return await bulkQuery(soqlQuery, conn, retries + 1);
}
else {
throw e;
}
}
}
// When you might have more than 1000 elements in a IN condition, you need to split the request into several requests
// Think to use {{IN}} in soqlQuery
export async function bulkQueryChunksIn(soqlQuery, conn, inElements, batchSize = 1000, retries = 3) {
const results = { records: [] };
for (let i = 0; i < inElements.length; i += batchSize) {
const inElementsChunk = inElements.slice(i, i + batchSize);
const replacementString = "'" + inElementsChunk.join("','") + "'";
const soqlQueryWithInConstraint = soqlQuery.replace('{{IN}}', replacementString);
const chunkResults = await bulkQuery(soqlQueryWithInConstraint, conn, retries);
results.records.push(...chunkResults.records);
}
return results;
}
// New method to bulk query records by chunks of 10000
export async function bulkQueryByChunks(soqlQuery, conn, batchSize = 100000, retries = 3) {
const results = { records: [] };
let lastRecordId = null;
let hasMoreRecords = true;
while (hasMoreRecords) {
let soqlQueryWithLimit = `${soqlQuery} ORDER BY Id LIMIT ${batchSize}`;
if (lastRecordId) {
soqlQueryWithLimit = `${soqlQuery} WHERE Id > '${lastRecordId}' ORDER BY Id LIMIT ${batchSize}`;
}
const chunkResults = await bulkQuery(soqlQueryWithLimit, conn, retries);
results.records.push(...chunkResults.records);
if (chunkResults.records.length > 0) {
lastRecordId = chunkResults.records[chunkResults.records.length - 1].Id;
}
hasMoreRecords = chunkResults.records.length === batchSize;
}
return results;
}
let spinner;
// Same than soqlQuery but using bulk. Do not use if there will be too many results for javascript to handle in memory
export async function bulkUpdate(objectName, action, records, conn) {
uxLog(this, c.grey(`SOQL BULK on object ${c.bold(objectName)} with action ${c.bold(action)} (${c.bold(records.length)} records)`));
conn.bulk2.pollInterval = 5000; // 5 sec
conn.bulk2.pollTimeout = 60000; // 60 sec
// Initialize Job
spinner = ora({ text: `[BulkApiV2] Bulk Load on ${objectName} (${action})`, spinner: 'moon' }).start();
const job = conn.bulk2.createJob({
operation: action,
object: objectName,
});
job.on('open', () => {
spinner.text = `[BulkApiV2] Load Job ${job.id} successfully created.`;
});
// Upload job data
await job.open();
await job.uploadData(records);
await job.close();
// Monitor job execution
job.on('inProgress', (jobInfo) => {
spinner.text = `[BulkApiV2] Processed: ${jobInfo.numberRecordsProcessed}. Failed: ${jobInfo.numberRecordsFailed}`;
});
job.on('failed', (e) => {
spinner.fail(`[BulkApiV2] Error: ${e.message}`);
});
await job.poll();
const res = await job.getAllResults();
spinner.succeed(`Bulk Load on ${objectName} (${action}) completed.`);
return res;
}
export async function bulkDelete(objectName, recordIds, conn) {
const records = recordIds.map(recordId => { return { Id: recordId }; });
return await bulkUpdate(objectName, "delete", records, conn);
}
export async function bulkDeleteTooling(objectName, recordsIds, conn) {
uxLog(this, c.grey(`[ToolingApi] Delete ${recordsIds.length} records on ${objectName}: ${JSON.stringify(recordsIds)}`));
try {
const deleteJobResults = await conn.tooling.destroy(objectName, recordsIds, { allOrNone: false });
return deleteJobResults;
}
catch (e) {
uxLog(this, c.yellow(`[ToolingApi] jsforce error while calling Tooling API. Fallback to to unitary delete (longer but should work !)`));
uxLog(this, c.grey(e.message));
const deleteJobResults = [];
for (const record of recordsIds) {
const deleteCommand = `sf data:delete:record --sobject ${objectName} --record-id ${record} --target-org ${conn.getUsername()} --use-tooling-api`;
const deleteCommandRes = await execSfdxJson(deleteCommand, this, {
fail: false,
output: true
});
const deleteResult = { Id: record, success: true };
if (!(deleteCommandRes.status === 0)) {
deleteResult.success = false;
deleteResult.error = JSON.stringify(deleteCommandRes);
}
deleteJobResults.push(deleteResult);
}
return { results: deleteJobResults };
}
}
//# sourceMappingURL=apiUtils.js.map