dynamo-plus
Version:
251 lines (244 loc) • 8.61 kB
JavaScript
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
// src/index.ts
var src_exports = {};
__export(src_exports, {
DynamoPlus: () => DynamoPlus
});
module.exports = __toCommonJS(src_exports);
var import_client_dynamodb = require("@aws-sdk/client-dynamodb");
var import_lib_dynamodb = require("@aws-sdk/lib-dynamodb");
// src/utils/chunk.ts
var chunk = (originalArr, chunkSize) => {
const input = Array.from(originalArr);
const chunks = [];
while (input.length) {
chunks.push(input.splice(0, chunkSize));
}
return chunks;
};
// src/utils/batchWriteRetry.ts
var batchWriteRetry = async (dynamo, writeParams) => {
const { UnprocessedItems = {} } = await dynamo.batchWrite(writeParams);
if (Object.keys(UnprocessedItems).length) {
await batchWriteRetry(dynamo, { RequestItems: UnprocessedItems });
}
};
// src/utils/batchReadRetry.ts
var batchReadRetry = async (dynamo, getParams) => {
const { Responses = {}, UnprocessedKeys = {} } = await dynamo.batchGet(getParams);
if (Object.keys(UnprocessedKeys).length) {
const retriedResponses = await batchReadRetry(dynamo, { ...getParams, RequestItems: UnprocessedKeys });
Object.keys(retriedResponses).forEach((tableName) => {
if (!Responses[tableName])
Responses[tableName] = [];
Responses[tableName].push(...retriedResponses[tableName]);
});
}
return Responses;
};
// src/utils/combineGenerators.ts
var neverPromise = new Promise(() => {
});
var getNext = async (asyncIterator, index) => await asyncIterator.next().then((result) => ({ index, result }));
var combineAsyncIterables = async function* (asyncIterables) {
const asyncIterators = Array.from(asyncIterables, (o) => o[Symbol.asyncIterator]());
let remainingIterables = asyncIterators.length;
const nextPromises = asyncIterators.map(getNext);
try {
while (remainingIterables) {
const { index, result } = await Promise.race(nextPromises);
if (result.done) {
nextPromises[index] = neverPromise;
remainingIterables--;
} else {
nextPromises[index] = getNext(asyncIterators[index], index);
yield result.value;
}
}
} finally {
for (const [index, iterator] of asyncIterators.entries()) {
if (nextPromises[index] !== neverPromise && iterator.return != null) {
void iterator.return();
}
}
}
};
// src/index.ts
var resetErrorStack = async (err) => {
throw new Error(err.message);
};
var DynamoPlus = class {
client;
constructor(dynamoDBClientConfig = {}, translateConfig = {}) {
this.client = import_lib_dynamodb.DynamoDBDocumentClient.from(
new import_client_dynamodb.DynamoDBClient(dynamoDBClientConfig),
translateConfig
);
}
// #region Core Methods
async batchGet(input) {
return await this.client.send(new import_lib_dynamodb.BatchGetCommand(input)).catch(resetErrorStack);
}
async batchWrite(input) {
return await this.client.send(new import_lib_dynamodb.BatchWriteCommand(input)).catch(resetErrorStack);
}
async delete(input) {
return await this.client.send(new import_lib_dynamodb.DeleteCommand(input)).catch(resetErrorStack);
}
async get(input) {
const result = await this.client.send(new import_lib_dynamodb.GetCommand(input)).catch(resetErrorStack);
return result.Item ? result.Item : void 0;
}
async put(input) {
return await this.client.send(new import_lib_dynamodb.PutCommand(input)).catch(resetErrorStack);
}
async query(input) {
return await this.client.send(new import_lib_dynamodb.QueryCommand(input)).catch(resetErrorStack);
}
async scan(input) {
return await this.client.send(new import_lib_dynamodb.ScanCommand(input)).catch(resetErrorStack);
}
async transactGet(input) {
return await this.client.send(new import_lib_dynamodb.TransactGetCommand(input)).catch(resetErrorStack);
}
async transactWrite(input) {
return await this.client.send(new import_lib_dynamodb.TransactWriteCommand(input)).catch(resetErrorStack);
}
async update(input) {
return await this.client.send(new import_lib_dynamodb.UpdateCommand(input)).catch(resetErrorStack);
}
// #endregion
// #region dynamo-plus addons
async deleteAll(params) {
const {
TableName,
Keys,
BatchSize = 25
} = params;
if (BatchSize > 25)
throw new Error("Cant batch more than 25 items at a time: https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html");
const batches = chunk(Keys, BatchSize);
await batches.reduce(async (chain, batch) => {
await chain;
await batchWriteRetry(this, {
RequestItems: {
[TableName]: batch.map((key) => ({ DeleteRequest: { Key: key } }))
}
});
}, Promise.resolve()).catch(resetErrorStack);
}
async getAll(params) {
const {
TableName,
Keys = [],
BatchSize = 100
} = params;
if (BatchSize > 100)
throw new Error("Cant retrieve more than 100 items at a time: https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchGetItem.html");
const batches = chunk(Keys, BatchSize);
return await batches.reduce(async (chain, batch) => {
const previousResults = await chain;
const output = await batchReadRetry(this, {
RequestItems: {
[TableName]: {
Keys: batch
}
}
});
return [...previousResults, ...output[TableName] ? output[TableName] : []];
}, Promise.resolve([])).catch(resetErrorStack);
}
async putAll(params) {
const {
TableName,
Items,
BatchSize = 25
} = params;
if (BatchSize > 25)
throw new Error("Cant batch more than 25 items at a time: https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html");
const batches = chunk(Items, BatchSize);
await batches.reduce(async (chain, batch) => {
await chain;
await batchWriteRetry(this, {
RequestItems: {
[TableName]: batch.map((item) => ({ PutRequest: { Item: item } }))
}
});
}, Promise.resolve()).catch(resetErrorStack);
}
async *queryIterator(params, pageSize = 100) {
const paginator = (0, import_lib_dynamodb.paginateQuery)({ client: this.client, pageSize }, params);
for await (const page of paginator) {
if (page.Items) {
for (const item of page.Items) {
yield item;
}
}
}
}
async queryAll(params) {
const queryResults = this.queryIterator(params);
const results = [];
for await (const item of queryResults) {
results.push(item);
}
return results;
}
async *scanSegmentIterator(params, pageSize = 100) {
const paginator = (0, import_lib_dynamodb.paginateScan)({ client: this.client, pageSize }, params);
for await (const page of paginator) {
if (page.Items) {
for (const item of page.Items) {
yield item;
}
}
}
}
async *scanIterator(params, pageSize = 100, parallelScanSegments = 1) {
if (parallelScanSegments < 1)
throw new Error("You must partition table into at least 1 segment for the scan");
const segmentIterators = Array.from({ length: parallelScanSegments }).map((val, i) => {
return this.scanSegmentIterator(
{
...params,
Segment: i,
TotalSegments: parallelScanSegments
},
pageSize
);
});
for await (const item of combineAsyncIterables(segmentIterators)) {
yield item;
}
}
async scanAll(params) {
const scanResults = this.scanIterator(params);
const results = [];
for await (const item of scanResults) {
results.push(item);
}
return results;
}
// #endregion
};
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
DynamoPlus
});
;