@heroku/applink
Version:
Applink SDK for Heroku Apps.
284 lines (283 loc) • 10.6 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", { value: true });
exports.DataApiImpl = void 0;
const connection_1 = require("jsforce/lib/connection");
const unit_of_work_1 = require("./unit-of-work");
const maps_1 = require("../utils/maps");
const knownBinaryFields = { ContentVersion: ["VersionData"] };
class DataApiImpl {
constructor(accessToken, apiVersion, domainUrl) {
this.accessToken = accessToken;
this.apiVersion = apiVersion;
this.domainUrl = domainUrl;
}
async connect() {
if (!this.conn) {
this.conn = new connection_1.Connection({
accessToken: this.accessToken,
instanceUrl: this.domainUrl,
version: this.apiVersion,
});
}
return this.conn;
}
async promisifyRequests(callback) {
let conn;
let result;
try {
conn = await this.connect();
result = callback(conn);
}
catch (e) {
return Promise.reject(e);
}
return Promise.resolve(result);
}
async create(recordCreate) {
return this.promisifyRequests(async (conn) => {
try {
const fields = buildCreateFields(recordCreate);
const response = await conn.insert(recordCreate.type, fields);
this.validate_record_response(response);
return { id: response.id };
}
catch (e) {
return this.handle_bad_response(e);
}
});
}
async query(soql) {
return this.promisifyRequests(async (conn) => {
try {
const response = await conn.query(soql);
this.validate_records_response(response);
const records = await Promise.all(response.records.map((record_data) => buildQueriedRecord(conn, record_data)));
return {
done: response.done,
totalSize: response.totalSize,
records,
nextRecordsUrl: response.nextRecordsUrl,
};
}
catch (e) {
return this.handle_bad_response(e);
}
});
}
async queryMore(queryResult) {
if (!queryResult.nextRecordsUrl) {
return Promise.resolve({
done: queryResult.done,
totalSize: queryResult.totalSize,
records: [],
nextRecordsUrl: queryResult.nextRecordsUrl,
});
}
return this.promisifyRequests(async (conn) => {
try {
const response = await conn.queryMore(queryResult.nextRecordsUrl);
this.validate_records_response(response);
const records = await Promise.all(response.records.map((record_data) => buildQueriedRecord(conn, record_data)));
return {
done: response.done,
totalSize: response.totalSize,
records,
nextRecordsUrl: response.nextRecordsUrl,
};
}
catch (e) {
return this.handle_bad_response(e);
}
});
}
async update(recordUpdate) {
return this.promisifyRequests(async (conn) => {
const fields = buildUpdateFields(recordUpdate);
try {
const response = await conn.update(recordUpdate.type, fields);
this.validate_record_response(response);
return { id: response.id };
}
catch (e) {
return this.handle_bad_response(e);
}
});
}
async delete(type, id) {
return this.promisifyRequests(async (conn) => {
try {
const response = await conn.delete(type, id);
this.validate_record_response(response);
return { id: response.id };
}
catch (e) {
return this.handle_bad_response(e);
}
});
}
newUnitOfWork() {
return new unit_of_work_1.UnitOfWorkImpl(this.apiVersion);
}
commitUnitOfWork(unitOfWork) {
return this.promisifyRequests(async (conn) => {
const subrequests = unitOfWork.subrequests;
if (subrequests.length === 0) {
return Promise.resolve(new Map());
}
const requestBody = {
graphs: [
{
graphId: "graph0",
compositeRequest: subrequests.map(([referenceId, subrequest]) => {
return {
referenceId: referenceId.toString(),
method: subrequest.httpMethod,
url: subrequest.buildUri(this.apiVersion),
body: subrequest.body,
};
}),
},
],
};
const requestResult = await conn.requestPost(`/services/data/v${this.apiVersion}/composite/graph`, requestBody);
if (requestResult.graphs.length != 1) {
throw new Error("Composite REST API unexpectedly returned more or less than one graph!");
}
const subrequestResults = Promise.all(requestResult.graphs[0].graphResponse.compositeResponse.map((compositeResponse) => {
const subrequest = subrequests.find(([subrequestReferenceId]) => subrequestReferenceId.toString() ===
compositeResponse.referenceId);
return subrequest[1]
.processResponse(compositeResponse.httpStatusCode, compositeResponse.httpHeaders, compositeResponse.body)
.then((recordModificationResult) => [
subrequest[0],
recordModificationResult,
]);
}));
return subrequestResults.then((keyValues) => new Map(keyValues));
});
}
validate_response(response) {
if (typeof response !== "object") {
throw new Error("Could not parse API response as JSON: " + JSON.stringify(response));
}
}
validate_record_response(response) {
this.validate_response(response);
if (typeof response.id === "undefined") {
throw new Error("Could not read API response `id`: " + JSON.stringify(response));
}
}
validate_records_response(response) {
this.validate_response(response);
if (typeof response.records !== "object" ||
typeof response.records.map !== "function") {
throw new Error("Could not read API response `records`: " + JSON.stringify(response));
}
}
handle_bad_response(error) {
if (error.constructor.name == "HttpApiError" &&
error.errorCode &&
error.errorCode.startsWith("ERROR_HTTP_")) {
error.content = error.message;
error.message = "Unexpected response with status: " + error.errorCode;
}
throw error;
}
}
exports.DataApiImpl = DataApiImpl;
async function buildQueriedRecord(conn, data) {
const type = data.attributes.type;
const fields = {};
const binaryFields = {};
const subQueryResults = {};
for await (const [key, val] of Object.entries(data)) {
if (key === "attributes" || val == null) {
continue;
}
if (type in knownBinaryFields && knownBinaryFields[type].includes(key)) {
binaryFields[key] = await eagerlyLoadBinaryField(conn, type, key, val);
fields[key] = val;
}
else if (typeof val === "object") {
if ("attributes" in val) {
fields[key] = await buildQueriedRecord(conn, val);
}
else {
subQueryResults[key] = await buildSubQueryResult(conn, key, val);
}
}
else if (val) {
fields[key] = val;
}
}
const queriedRecord = {
type,
fields: (0, maps_1.createCaseInsensitiveMap)(fields),
subQueryResults: (0, maps_1.createCaseInsensitiveMap)(subQueryResults),
};
if (Object.keys(binaryFields).length) {
return {
...queriedRecord,
binaryFields: (0, maps_1.createCaseInsensitiveMap)(binaryFields),
};
}
return queriedRecord;
}
function buildCreateFields(record) {
const fields = { ...record.fields };
if (record.type in knownBinaryFields) {
for (const binFieldName of knownBinaryFields[record.type]) {
if (record.binaryFields &&
Buffer.isBuffer(record.binaryFields[binFieldName])) {
if (record.fields[binFieldName]) {
throw new Error(`${binFieldName} provided in both fields and binaryFields of ${record.type}, but is only supported in one or the other.`);
}
fields[binFieldName] =
record.binaryFields[binFieldName].toString("base64");
}
}
}
return fields;
}
function buildUpdateFields(record) {
const fields = buildCreateFields(record);
for (const idKey of ["id", "Id", "ID", "iD"]) {
if (idKey in record.fields) {
delete fields[idKey];
fields["Id"] = record.fields[idKey];
break;
}
}
if (fields["Id"] === undefined) {
fields["Id"] = "";
}
return fields;
}
async function eagerlyLoadBinaryField(conn, type, key, val) {
try {
if (typeof val === "string") {
const body = await conn.request(val);
if (typeof body === "string") {
return Buffer.from(body, "binary");
}
}
}
catch (err) {
throw new Error(`Unable to load binary field data for ${type}.${key}: ${err}`);
}
throw new Error(`Unable to load binary field data for ${type}.${key}`);
}
async function buildSubQueryResult(conn, key, val) {
const { done, nextRecordsUrl, totalSize, records } = val ?? {};
if (typeof done === "boolean" &&
typeof totalSize === "number" &&
Array.isArray(records)) {
return {
done,
nextRecordsUrl,
totalSize,
records: await Promise.all(records.map((r) => buildQueriedRecord(conn, r))),
};
}
throw new Error(`Unable to load subQuery data for ${key}`);
}