@heroku/salesforce-sdk-nodejs
Version:
Salesforce SDK for Heroku Apps.
423 lines (422 loc) • 14.5 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.createBulkApi = createBulkApi;
const create_connections_js_1 = require("../utils/create-connections.js");
const bulk_js_1 = require("jsforce/lib/api/bulk.js");
const sync_1 = require("csv-stringify/sync");
const csv_stringify_1 = require("csv-stringify");
const http_api_js_1 = require("jsforce/lib/http-api.js");
const sync_2 = require("csv-parse/sync");
const luxon_1 = require("luxon");
const SIZE_1_MB = 1000000;
const SIZE_100_MB = 100 * SIZE_1_MB;
const CSV_OPTIONS = { delimiter: "," };
function createBulkApi(clientOptions) {
const connection = (0, create_connections_js_1.createConnection)(clientOptions);
const getDefaultPollingOptions = () => {
return {
pollInterval: connection.bulk2.pollInterval,
pollTimeout: connection.bulk2.pollTimeout,
};
};
const getIngestJob = (jobReference) => {
return new bulk_js_1.IngestJobV2({
connection: connection,
jobInfo: {
id: jobReference.id,
},
pollingOptions: getDefaultPollingOptions(),
});
};
const getQueryJob = (jobReference) => {
const job = new bulk_js_1.QueryJobV2({
connection: connection,
query: undefined,
operation: undefined,
pollingOptions: getDefaultPollingOptions(),
});
job.jobInfo = Object.assign({}, job.jobInfo, { id: jobReference.id });
return job;
};
const bulkApi = {
abort(jobReference) {
switch (jobReference.type) {
case "ingestJob":
return getIngestJob(jobReference).abort();
case "queryJob":
return getQueryJob(jobReference).abort();
}
},
delete(jobReference) {
switch (jobReference.type) {
case "ingestJob":
return getIngestJob(jobReference).delete();
case "queryJob":
return getQueryJob(jobReference).delete();
}
},
getFailedResults(jobReference) {
return fetchIngestResults({
connection,
jobReference,
resultType: "failedResults",
});
},
getInfo(jobReference) {
switch (jobReference.type) {
case "ingestJob":
return getIngestJob(jobReference).check().then(toIngestJobInfo);
case "queryJob":
return getQueryJob(jobReference).check().then(toQueryJobInfo);
}
},
getMoreQueryResults(currentResult, getQueryJobResultsOptions) {
return fetchQueryResults({
connection,
getQueryJobResultsOptions,
jobReference: currentResult.jobReference,
locator: currentResult.locator,
});
},
getQueryResults(jobReference, getQueryJobResultsOptions) {
return fetchQueryResults({
connection,
jobReference,
getQueryJobResultsOptions,
});
},
getSuccessfulResults(jobReference) {
return fetchIngestResults({
connection,
jobReference,
resultType: "successfulResults",
});
},
getUnprocessedRecords(jobReference) {
return fetchIngestResults({
connection,
jobReference,
resultType: "unprocessedrecords",
});
},
async ingest(options) {
const results = [];
const { dataTable } = options;
for await (const ingestDataTablePayload of bulkApi.splitDataTable(dataTable)) {
let job;
try {
job = connection.bulk2.createJob(options);
await job.open();
await streamDataTableIntoJob(job, dataTable);
await job.close();
results.push({ id: job.id, type: "ingestJob" });
}
catch (e) {
if (e instanceof Error) {
results.push({
unprocessedRecords: ingestDataTablePayload,
error: toClientError(e),
jobReference: typeof job?.id === "string"
? { id: job.id, type: "ingestJob" }
: undefined,
});
}
}
}
return results;
},
async query(options) {
const url = new URL([
connection.instanceUrl,
"services/data",
`v${connection.version}`,
"jobs/query",
].join("/"));
const apiClient = new BulkApiClient(connection);
const job = await apiClient.request({
url: url.toString(),
method: "POST",
body: JSON.stringify({
operation: options.operation ?? "query",
query: options.soql,
}),
headers: {
"Content-Type": "application/json; charset=utf-8",
},
});
return {
id: job.id,
type: "queryJob",
};
},
createDataTableBuilder(columns) {
const rows = [];
function addArrayRow(row) {
addRowWithExtractor(row, (array, columnName) => {
return array[columns.indexOf(columnName)];
});
}
function addMapRow(row) {
addRowWithExtractor(row, (map, columnName) => map.get(columnName));
}
function addRowWithExtractor(row, fieldValueExtractor) {
const mappedRow = columns.reduce((acc, column) => {
const value = fieldValueExtractor(row, column);
acc.set(column, value);
return acc;
}, new Map());
rows.push(mappedRow);
}
return {
addRow(row, fieldValueExtractor) {
if (Array.isArray(row)) {
addArrayRow(row);
}
else if (row instanceof Map) {
addMapRow(row);
}
else {
addRowWithExtractor(row, fieldValueExtractor);
}
return this;
},
addRows(rows, fieldValueExtractor) {
rows.forEach((row) => {
this.addRow(row, fieldValueExtractor);
});
return this;
},
build() {
return Object.assign(rows, {
columns,
});
},
};
},
splitDataTable(dataTable) {
const columns = dataTable.columns;
const splitDataTables = [];
const columnsLine = (0, sync_1.stringify)([columns], CSV_OPTIONS);
const columnsSize = Buffer.byteLength(columnsLine);
let currentSize = columnsSize;
let dataTableBuilder = bulkApi.createDataTableBuilder(columns);
dataTable.forEach((row) => {
const rowValues = dataTable.columns.map((column) => row.get(column));
const rowLine = (0, sync_1.stringify)([rowValues], CSV_OPTIONS);
const rowSize = Buffer.byteLength(rowLine);
if (currentSize + rowSize < SIZE_100_MB) {
currentSize += rowSize;
}
else {
splitDataTables.push(dataTableBuilder.build());
currentSize = columnsSize + rowSize;
dataTableBuilder = bulkApi.createDataTableBuilder(columns);
}
dataTableBuilder.addRow(row);
});
splitDataTables.push(dataTableBuilder.build());
return splitDataTables;
},
formatDate(value) {
const dateTime = luxon_1.DateTime.fromJSDate(value).toUTC();
if (dateTime.isValid) {
return dateTime.toISODate();
}
throw new Error(`Invalid Date`);
},
formatDateTime(value) {
const dateTime = luxon_1.DateTime.fromJSDate(value).toUTC();
if (dateTime.isValid) {
return dateTime.toISO();
}
throw new Error(`Invalid DateTime`);
},
formatNullValue() {
return "#N/A";
},
};
return bulkApi;
}
function toClientError(error) {
if (isClientError(error)) {
return error;
}
return Object.assign(error, {
errorCode: "UNKNOWN",
});
}
function isClientError(error) {
return typeof error.errorCode === "string";
}
async function streamDataTableIntoJob(job, dataTable) {
await new Promise((resolve, reject) => {
const stringifier = (0, csv_stringify_1.stringify)(CSV_OPTIONS);
stringifier.on("error", reject);
job.uploadData(stringifier).then(resolve, reject);
stringifier.write(dataTable.columns);
dataTable.forEach((row) => {
const rowValues = dataTable.columns.map((column) => row.get(column));
stringifier.write(rowValues);
});
stringifier.end();
});
}
function toIngestJobInfo(jobInfo) {
return {
...toJobInfo(jobInfo),
jobType: "V2Ingest",
operation: jobInfo.operation,
state: jobInfo.state,
};
}
function toQueryJobInfo(jobInfo) {
return {
...toJobInfo(jobInfo),
jobType: "V2Query",
operation: jobInfo.operation,
state: jobInfo.state,
};
}
function toJobInfo(jobInfo) {
if (jobInfo.jobType === "BigObjectIngest" || jobInfo.jobType === "Classic") {
throw new Error(`JobType "${jobInfo.jobType}" is not supported`);
}
return {
...jobInfo,
apiVersion: parseInt(`${jobInfo.apiVersion}`, 10),
columnDelimiter: "COMMA",
concurrencyMode: "Parallel",
contentType: "CSV",
createdById: jobInfo.createdById,
createdDate: jobInfo.createdDate,
id: jobInfo.id,
lineEnding: "LF",
object: jobInfo.object,
operation: jobInfo.operation,
state: jobInfo.state,
systemModstamp: jobInfo.systemModstamp,
jobType: jobInfo.jobType,
};
}
function resultsToDataTable(results, responseColumns) {
const columns = convertToColumns(responseColumns);
const rows = results.map((result) => {
return columns.reduce((acc, column) => {
acc.set(column, result[column]);
return acc;
}, new Map());
});
const dataTable = Object.assign(rows, {
columns,
});
return dataTable;
}
function convertToColumns(columns) {
if (columns.length < 1) {
throw new Error("parsed data table has no columns");
}
const [first, ...rest] = columns;
return [first, ...rest];
}
function parseColumnsFromResponse(response) {
try {
const headerLine = response.body.substring(0, response.body.indexOf("\n"));
return (0, sync_2.parse)(headerLine, CSV_OPTIONS)[0];
}
catch (e) {
return [];
}
}
async function fetchIngestResults(options) {
const { connection, jobReference } = options;
const url = [
connection.instanceUrl,
"services/data",
`v${connection.version}`,
"jobs/ingest",
jobReference.id,
options.resultType,
].join("/");
const api = new BulkApiClient(connection);
let columns = [];
api.once("response", (res) => {
columns = parseColumnsFromResponse(res);
});
const records = await api.request({
method: "GET",
url: url.toString(),
headers: {
Accept: "text/csv",
},
});
return resultsToDataTable(records, columns);
}
async function fetchQueryResults(options) {
const { connection, jobReference } = options;
const url = new URL([
connection.instanceUrl,
"services/data",
`v${connection.version}`,
"jobs/query",
jobReference.id,
"results",
].join("/"));
if (options.locator) {
url.searchParams.set("locator", options.locator);
}
if (options.getQueryJobResultsOptions?.maxRecords) {
url.searchParams.set("maxRecords", `${options.getQueryJobResultsOptions.maxRecords}`);
}
const api = new BulkApiClient(connection);
let columns = [];
let locator;
let numberOfRecords = 0;
api.once("response", (res) => {
columns = parseColumnsFromResponse(res);
if ("sforce-locator" in res.headers) {
const headerValue = res.headers["sforce-locator"];
if (headerValue && headerValue !== "null") {
locator = headerValue;
}
}
if ("sforce-numberofrecords" in res.headers) {
const headerValue = res.headers["sforce-numberofrecords"];
if (headerValue && /^\d+$/.test(headerValue)) {
numberOfRecords = parseInt(headerValue, 10);
}
}
});
const records = await api.request({
method: "GET",
url: url.toString(),
headers: {
Accept: "text/csv",
},
});
return {
locator,
numberOfRecords,
jobReference,
done: locator === undefined,
dataTable: resultsToDataTable(records, columns),
};
}
class BulkApiClient extends http_api_js_1.HttpApi {
constructor(connection) {
super(connection, {});
}
hasErrorInResponseBody(body) {
return (Array.isArray(body) &&
typeof body[0] === "object" &&
"errorCode" in body[0]);
}
isSessionExpired(response) {
return (response.statusCode === 401 && /INVALID_SESSION_ID/.test(response.body));
}
parseError(body) {
return {
errorCode: body[0].errorCode,
message: body[0].message,
};
}
}