@jsforce/jsforce-node
Version:
Salesforce API Library for JavaScript
683 lines (682 loc) • 21 kB
JavaScript
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Bulk = exports.Batch = exports.Job = void 0;
/**
* @file Manages Salesforce Bulk API related operations
* @author Shinichi Tomita <shinichi.tomita@gmail.com>
*/
const events_1 = require("events");
const stream_1 = require("stream");
const multistream_1 = __importDefault(require("multistream"));
const record_stream_1 = require("../record-stream");
const http_api_1 = __importDefault(require("../http-api"));
const jsforce_1 = require("../jsforce");
const stream_2 = require("../util/stream");
const is_1 = __importDefault(require("@sindresorhus/is"));
/**
* Class for Bulk API Job
*/
class Job extends events_1.EventEmitter {
type;
operation;
options;
id;
state;
_bulk;
_batches;
_jobInfo;
_error;
/**
*
*/
constructor(bulk, type, operation, options, jobId) {
super();
this._bulk = bulk;
this.type = type;
this.operation = operation;
this.options = options || {};
this.id = jobId ?? null;
this.state = this.id ? 'Open' : 'Unknown';
this._batches = {};
// default error handler to keep the latest error
this.on('error', (error) => (this._error = error));
}
/**
* Return latest jobInfo from cache
*/
info() {
// if cache is not available, check the latest
if (!this._jobInfo) {
this._jobInfo = this.check();
}
return this._jobInfo;
}
/**
* Open new job and get jobinfo
*/
open() {
const bulk = this._bulk;
const options = this.options;
// if sobject type / operation is not provided
if (!this.type || !this.operation) {
throw new Error('type / operation is required to open a new job');
}
// if not requested opening job
if (!this._jobInfo) {
let operation = this.operation.toLowerCase();
if (operation === 'harddelete') {
operation = 'hardDelete';
}
if (operation === 'queryall') {
operation = 'queryAll';
}
const body = `
<?xml version="1.0" encoding="UTF-8"?>
<jobInfo xmlns="http://www.force.com/2009/06/asyncapi/dataload">
<operation>${operation}</operation>
<object>${this.type}</object>
${options.extIdField
? `<externalIdFieldName>${options.extIdField}</externalIdFieldName>`
: ''}
${options.concurrencyMode
? `<concurrencyMode>${options.concurrencyMode}</concurrencyMode>`
: ''}
${options.assignmentRuleId
? `<assignmentRuleId>${options.assignmentRuleId}</assignmentRuleId>`
: ''}
<contentType>CSV</contentType>
</jobInfo>
`.trim();
this._jobInfo = (async () => {
try {
const res = await bulk._request({
method: 'POST',
path: '/job',
body,
headers: {
'Content-Type': 'application/xml; charset=utf-8',
},
responseType: 'application/xml',
});
this.emit('open', res.jobInfo);
this.id = res.jobInfo.id;
this.state = res.jobInfo.state;
return res.jobInfo;
}
catch (err) {
this.emit('error', err);
throw err;
}
})();
}
return this._jobInfo;
}
/**
* Create a new batch instance in the job
*/
createBatch() {
const batch = new Batch(this);
batch.on('queue', () => {
this._batches[batch.id] = batch;
});
return batch;
}
/**
* Get a batch instance specified by given batch ID
*/
batch(batchId) {
let batch = this._batches[batchId];
if (!batch) {
batch = new Batch(this, batchId);
this._batches[batchId] = batch;
}
return batch;
}
/**
* Check the latest job status from server
*/
check() {
const bulk = this._bulk;
const logger = bulk._logger;
this._jobInfo = (async () => {
const jobId = await this.ready();
const res = await bulk._request({
method: 'GET',
path: '/job/' + jobId,
responseType: 'application/xml',
});
logger.debug(res.jobInfo);
this.id = res.jobInfo.id;
this.type = res.jobInfo.object;
this.operation = res.jobInfo.operation;
this.state = res.jobInfo.state;
return res.jobInfo;
})();
return this._jobInfo;
}
/**
* Wait till the job is assigned to server
*/
ready() {
return this.id
? Promise.resolve(this.id)
: this.open().then(({ id }) => id);
}
/**
* List all registered batch info in job
*/
async list() {
const bulk = this._bulk;
const logger = bulk._logger;
const jobId = await this.ready();
const res = await bulk._request({
method: 'GET',
path: '/job/' + jobId + '/batch',
responseType: 'application/xml',
});
logger.debug(res.batchInfoList.batchInfo);
const batchInfoList = Array.isArray(res.batchInfoList.batchInfo)
? res.batchInfoList.batchInfo
: [res.batchInfoList.batchInfo];
return batchInfoList;
}
/**
* Close opened job
*/
async close() {
if (!this.id) {
return;
}
try {
const jobInfo = await this._changeState('Closed');
this.id = null;
this.emit('close', jobInfo);
return jobInfo;
}
catch (err) {
this.emit('error', err);
throw err;
}
}
/**
* Set the status to abort
*/
async abort() {
if (!this.id) {
return;
}
try {
const jobInfo = await this._changeState('Aborted');
this.id = null;
this.emit('abort', jobInfo);
return jobInfo;
}
catch (err) {
this.emit('error', err);
throw err;
}
}
/**
* @private
*/
async _changeState(state) {
const bulk = this._bulk;
const logger = bulk._logger;
this._jobInfo = (async () => {
const jobId = await this.ready();
const body = `
<?xml version="1.0" encoding="UTF-8"?>
<jobInfo xmlns="http://www.force.com/2009/06/asyncapi/dataload">
<state>${state}</state>
</jobInfo>
`.trim();
const res = await bulk._request({
method: 'POST',
path: '/job/' + jobId,
body: body,
headers: {
'Content-Type': 'application/xml; charset=utf-8',
},
responseType: 'application/xml',
});
logger.debug(res.jobInfo);
this.state = res.jobInfo.state;
return res.jobInfo;
})();
return this._jobInfo;
}
}
exports.Job = Job;
/*--------------------------------------------*/
class PollingTimeoutError extends Error {
jobId;
batchId;
/**
*
*/
constructor(message, jobId, batchId) {
super(message);
this.name = 'PollingTimeout';
this.jobId = jobId;
this.batchId = batchId;
}
}
/*--------------------------------------------*/
/**
* Batch (extends Writable)
*/
class Batch extends stream_1.Writable {
job;
id;
_bulk;
_uploadStream;
_downloadStream;
_dataStream;
_result;
_error;
/**
*
*/
constructor(job, id) {
super({ objectMode: true });
this.job = job;
this.id = id;
this._bulk = job._bulk;
// default error handler to keep the latest error
this.on('error', (error) => (this._error = error));
//
// setup data streams
//
const converterOptions = { nullValue: '#N/A' };
const uploadStream = (this._uploadStream = new record_stream_1.Serializable());
const uploadDataStream = uploadStream.stream('csv', converterOptions);
const downloadStream = (this._downloadStream = new record_stream_1.Parsable());
const downloadDataStream = downloadStream.stream('csv', converterOptions);
this.on('finish', () => uploadStream.end());
uploadDataStream.once('readable', async () => {
try {
// ensure the job is opened in server or job id is already assigned
await this.job.ready();
// pipe upload data to batch API request stream
uploadDataStream.pipe(this._createRequestStream());
}
catch (err) {
this.emit('error', err);
}
});
// duplex data stream, opened access to API programmers by Batch#stream()
this._dataStream = (0, stream_2.concatStreamsAsDuplex)(uploadDataStream, downloadDataStream);
}
/**
* Connect batch API and create stream instance of request/response
*
* @private
*/
_createRequestStream() {
const bulk = this._bulk;
const logger = bulk._logger;
const req = bulk._request({
method: 'POST',
path: '/job/' + this.job.id + '/batch',
headers: {
'Content-Type': 'text/csv',
},
responseType: 'application/xml',
});
(async () => {
try {
const res = await req;
logger.debug(res.batchInfo);
this.id = res.batchInfo.id;
this.emit('queue', res.batchInfo);
}
catch (err) {
this.emit('error', err);
}
})();
return req.stream();
}
/**
* Implementation of Writable
*/
_write(record_, enc, cb) {
const { Id, type, attributes, ...rrec } = record_;
let record;
switch (this.job.operation) {
case 'insert':
record = rrec;
break;
case 'delete':
case 'hardDelete':
record = { Id };
break;
default:
record = { Id, ...rrec };
}
this._uploadStream.write(record, enc, cb);
}
/**
* Returns duplex stream which accepts CSV data input and batch result output
*/
stream() {
return this._dataStream;
}
/**
* Execute batch operation
*/
execute(input) {
// if batch is already executed
if (this._result) {
throw new Error('Batch already executed.');
}
this._result = new Promise((resolve, reject) => {
this.once('response', resolve);
this.once('error', reject);
});
if (is_1.default.nodeStream(input)) {
// if input has stream.Readable interface
input.pipe(this._dataStream);
}
else {
const recordData = structuredClone(input);
if (Array.isArray(recordData)) {
for (const record of recordData) {
for (const key of Object.keys(record)) {
if (typeof record[key] === 'boolean') {
record[key] = String(record[key]);
}
}
this.write(record);
}
this.end();
}
else if (typeof recordData === 'string') {
this._dataStream.write(recordData, 'utf8');
this._dataStream.end();
}
}
// return Batch instance for chaining
return this;
}
run = this.execute;
exec = this.execute;
/**
* Promise/A+ interface
* Delegate to promise, return promise instance for batch result
*/
then(onResolved, onReject) {
if (!this._result) {
this.execute();
}
return this._result.then(onResolved, onReject);
}
/**
* Check the latest batch status in server
*/
async check() {
const bulk = this._bulk;
const logger = bulk._logger;
const jobId = this.job.id;
const batchId = this.id;
if (!jobId || !batchId) {
throw new Error('Batch not started.');
}
const res = await bulk._request({
method: 'GET',
path: '/job/' + jobId + '/batch/' + batchId,
responseType: 'application/xml',
});
logger.debug(res.batchInfo);
return res.batchInfo;
}
/**
* Polling the batch result and retrieve
*/
poll(interval, timeout) {
const jobId = this.job.id;
const batchId = this.id;
if (!jobId || !batchId) {
throw new Error('Batch not started.');
}
const startTime = new Date().getTime();
const endTime = startTime + timeout;
if (timeout === 0) {
throw new PollingTimeoutError(`Skipping polling because of timeout = 0ms. Job Id = ${jobId} | Batch Id = ${batchId}`, jobId, batchId);
}
const poll = async () => {
const now = new Date().getTime();
if (endTime < now) {
const err = new PollingTimeoutError('Polling time out. Job Id = ' + jobId + ' , batch Id = ' + batchId, jobId, batchId);
this.emit('error', err);
return;
}
let res;
try {
res = await this.check();
}
catch (err) {
this.emit('error', err);
return;
}
if (res.state === 'Failed') {
if (parseInt(res.numberRecordsProcessed, 10) > 0) {
this.retrieve();
}
else {
this.emit('error', new Error(res.stateMessage));
}
}
else if (res.state === 'Completed') {
this.retrieve();
}
else if (res.state === 'NotProcessed') {
this.emit('error', new Error('Job has been aborted'));
}
else {
this.emit('inProgress', res);
setTimeout(poll, interval);
}
};
setTimeout(poll, interval);
}
/**
* Retrieve batch result
*/
async retrieve() {
const bulk = this._bulk;
const jobId = this.job.id;
const job = this.job;
const batchId = this.id;
if (!jobId || !batchId) {
throw new Error('Batch not started.');
}
try {
const resp = await bulk._request({
method: 'GET',
path: '/job/' + jobId + '/batch/' + batchId + '/result',
});
let results;
if (job.operation === 'query' || job.operation === 'queryAll') {
const res = resp;
const resultId = res['result-list'].result;
results = (Array.isArray(resultId)
? resultId
: [resultId]).map((id) => ({ id, batchId, jobId }));
}
else {
const res = resp;
results = res.map((ret) => ({
id: ret.Id || null,
success: ret.Success === 'true',
created: ret.Created === 'true',
errors: ret.Error ? [ret.Error] : [],
}));
}
this.emit('response', results);
return results;
}
catch (err) {
this.emit('error', err);
throw err;
}
}
/**
* Fetch query batch result as a record stream
*
* @param {String} resultId - Result id
* @returns {RecordStream} - Record stream, convertible to CSV data stream
*/
result(resultId) {
const jobId = this.job.id;
const batchId = this.id;
if (!jobId || !batchId) {
throw new Error('Batch not started.');
}
const resultStream = new record_stream_1.Parsable();
const resultDataStream = resultStream.stream('csv');
this._bulk
._request({
method: 'GET',
path: '/job/' + jobId + '/batch/' + batchId + '/result/' + resultId,
responseType: 'application/octet-stream',
})
.stream()
.pipe(resultDataStream);
return resultStream;
}
}
exports.Batch = Batch;
/*--------------------------------------------*/
/**
*
*/
class BulkApi extends http_api_1.default {
beforeSend(request) {
request.headers = {
...request.headers,
'X-SFDC-SESSION': this._conn.accessToken ?? '',
};
}
isSessionExpired(response) {
return (response.statusCode === 400 &&
response.body.includes('<exceptionCode>InvalidSessionId</exceptionCode>'));
}
hasErrorInResponseBody(body) {
return !!body.error;
}
parseError(body) {
return {
errorCode: body.error.exceptionCode,
message: body.error.exceptionMessage,
};
}
}
/*--------------------------------------------*/
/**
* Class for Bulk API
*
* @class
*/
class Bulk {
_conn;
_logger;
/**
* Polling interval in milliseconds
*
* Default: 1000 (1 second)
*/
pollInterval = 1000;
/**
* Polling timeout in milliseconds
*
* Default: 30000 (30 seconds)
*/
pollTimeout = 30000;
/**
*
*/
constructor(conn) {
this._conn = conn;
this._logger = conn._logger;
}
/**
*
*/
_request(request_) {
const conn = this._conn;
const { path, responseType, ...rreq } = request_;
const baseUrl = [conn.instanceUrl, 'services/async', conn.version].join('/');
const request = {
...rreq,
url: baseUrl + path,
};
return new BulkApi(this._conn, { responseType }).request(request);
}
load(type, operation, optionsOrInput, input) {
let options = {};
if (typeof optionsOrInput === 'string' ||
Array.isArray(optionsOrInput) ||
is_1.default.nodeStream(optionsOrInput)) {
// when options is not plain hash object, it is omitted
input = optionsOrInput;
}
else {
options = optionsOrInput;
}
const job = this.createJob(type, operation, options);
const batch = job.createBatch();
const cleanup = () => job.close();
const cleanupOnError = (err) => {
if (err.name !== 'PollingTimeout') {
cleanup();
}
};
batch.on('response', cleanup);
batch.on('error', cleanupOnError);
batch.on('queue', () => {
batch?.poll(this.pollInterval, this.pollTimeout);
});
return batch.execute(input);
}
/**
* Execute bulk query and get record stream
*/
async query(soql) {
const m = soql.replace(/\([\s\S]+\)/g, '').match(/FROM\s+(\w+)/i);
if (!m) {
throw new Error('No sobject type found in query, maybe caused by invalid SOQL.');
}
const type = m[1];
const recordStream = new record_stream_1.Parsable();
const dataStream = recordStream.stream('csv');
const results = await this.load(type, 'query', soql);
const streams = results.map((result) => this.job(result.jobId).batch(result.batchId).result(result.id).stream());
(0, multistream_1.default)(streams).pipe(dataStream);
return recordStream;
}
/**
* Create a new job instance
*/
createJob(type, operation, options = {}) {
return new Job(this, type, operation, options);
}
/**
* Get a job instance specified by given job ID
*
* @param {String} jobId - Job ID
* @returns {Bulk~Job}
*/
job(jobId) {
return new Job(this, null, null, null, jobId);
}
}
exports.Bulk = Bulk;
/*--------------------------------------------*/
/*
* Register hook in connection instantiation for dynamically adding this API module features
*/
(0, jsforce_1.registerModule)('bulk', (conn) => new Bulk(conn));
exports.default = Bulk;