UNPKG

@bitblit/ratchet-aws

Version:

Common tools for use with AWS browser and node

671 lines 29.4 kB
import { BatchGetCommand, BatchWriteCommand, DeleteCommand, GetCommand, PutCommand, QueryCommand, ScanCommand, UpdateCommand, } from '@aws-sdk/lib-dynamodb'; import { ConditionalCheckFailedException, ProvisionedThroughputExceededException } from '@aws-sdk/client-dynamodb'; import { Logger } from '@bitblit/ratchet-common/logger/logger'; import { PromiseRatchet } from '@bitblit/ratchet-common/lang/promise-ratchet'; import { ErrorRatchet } from '@bitblit/ratchet-common/lang/error-ratchet'; import { DurationRatchet } from '@bitblit/ratchet-common/lang/duration-ratchet'; import { RequireRatchet } from '@bitblit/ratchet-common/lang/require-ratchet'; import { NumberRatchet } from '@bitblit/ratchet-common/lang/number-ratchet'; export class DynamoRatchet { awsDDB; constructor(awsDDB) { this.awsDDB = awsDDB; if (!awsDDB) { throw 'awsDDB may not be null'; } } get dynamoDBDocumentClient() { return this.awsDDB; } getDDB() { return this.awsDDB; } async tableIsEmpty(tableName) { const scan = { TableName: tableName, Limit: 1, }; const ScanCommandOutput = await this.throughputSafeScanOrQuery((o) => this.scanPromise(o), scan); return ScanCommandOutput.Items.length === 0; } async scanPromise(input) { return this.awsDDB.send(new ScanCommand(input)); } async queryPromise(input) { return this.awsDDB.send(new QueryCommand(input)); } async throughputSafeScanOrQuery(proc, input, maxTries, inCurrentTry) { let rval = null; if (input) { let currentTry = inCurrentTry ?? 0; do { currentTry++; try { rval = await proc(input); } catch (err) { if (DynamoRatchet.objectIsErrorWithProvisionedThroughputExceededExceptionCode(err)) { const wait = Math.pow(2, currentTry) * 1000; Logger.debug('Exceeded scan throughput for %j : Try %d of %d (Waiting %d ms)', input, currentTry, maxTries, wait); await PromiseRatchet.wait(wait); currentTry++; } else { throw err; } } } while (!rval && (!maxTries || currentTry < maxTries)); if (!rval) { ErrorRatchet.throwFormattedErr('throughputSafeScan failed - tried %d times, kept running into throughput exceeded : %j', maxTries, input); } } return rval; } async fullyExecuteQueryCount(qry, delayMS = 0) { try { qry.Select = 'COUNT'; Logger.debug('Executing count query : %j', qry); const rval = { count: 0, scannedCount: 0, pages: 0, }; const start = new Date().getTime(); let qryResults = null; const myLimit = qry.Limit; qry.Limit = null; do { qryResults = await this.throughputSafeScanOrQuery((o) => this.queryPromise(o), qry); rval.count += qryResults['Count']; rval.scannedCount += qryResults['ScannedCount']; rval.pages++; qry['ExclusiveStartKey'] = qryResults.LastEvaluatedKey; await PromiseRatchet.wait(delayMS); Logger.silly('Rval is now %j', rval); if (myLimit && rval.count >= myLimit && qry['ExclusiveStartKey']) { Logger.info('Aborting query since hit limit of %d', myLimit); qry['ExclusiveStartKey'] = null; } } while (qry['ExclusiveStartKey']); const end = new Date().getTime(); Logger.debug('Finished, returned %j in %s for %j', rval, DurationRatchet.formatMsDuration(end - start, true), qry); return rval; } catch (err) { Logger.error('Failed with %s, q: %j', err, qry, err); return null; } } async fullyExecuteQuery(qry, delayMS = 0, softLimit = null) { const rval = []; await this.fullyExecuteProcessOverQuery(qry, async (v) => { rval.push(v); }, delayMS, softLimit); return rval; } async fullyExecuteProcessOverQuery(qry, proc, delayMS = 0, softLimit = null) { let cnt = 0; try { Logger.debug('Executing query : %j', qry); const start = new Date().getTime(); Logger.debug('Pulling %j', qry); let qryResults = await this.throughputSafeScanOrQuery((o) => this.queryPromise(o), qry); for (const qri of qryResults.Items) { await proc(qri); cnt++; } let pages = 0; let blankPages = 0; while (qryResults.LastEvaluatedKey && (softLimit === null || cnt < softLimit) && !qry.Limit) { Logger.silly('Found more rows - requery with key %j', qryResults.LastEvaluatedKey); qry['ExclusiveStartKey'] = qryResults.LastEvaluatedKey; qryResults = await this.throughputSafeScanOrQuery((o) => this.queryPromise(o), qry); for (const qri of qryResults.Items) { await proc(qri); cnt++; } Logger.silly('Have processed %d items', cnt); pages++; blankPages += qryResults.Count === 0 ? 1 : 0; await PromiseRatchet.wait(delayMS); } const end = new Date().getTime(); Logger.debug('Finished, processed %d rows in %s for %j (%d blank pages, %d total pages)', cnt, DurationRatchet.formatMsDuration(end - start, true), qry, blankPages, pages); } catch (err) { Logger.error('Failed with %s, q: %j', err, qry, err); } return cnt; } async fullyExecuteScanCount(scan, delayMS = 0) { try { scan.Select = 'COUNT'; const rval = { count: 0, scannedCount: 0, pages: 0, }; Logger.debug('Executing scan count : %j', scan); const start = new Date().getTime(); let qryResults = null; const myLimit = scan.Limit; scan.Limit = null; do { qryResults = await this.throughputSafeScanOrQuery((o) => this.scanPromise(o), scan); rval.count += qryResults['Count']; rval.scannedCount += qryResults['ScannedCount']; rval.pages++; scan['ExclusiveStartKey'] = qryResults?.LastEvaluatedKey; await PromiseRatchet.wait(delayMS); Logger.silly('Rval is now %j', rval); if (myLimit && rval.count >= myLimit && scan['ExclusiveStartKey']) { Logger.info('Aborting scan since hit limit of %d', myLimit); scan['ExclusiveStartKey'] = null; } } while (scan['ExclusiveStartKey']); const end = new Date().getTime(); Logger.debug('Finished, returned %j in %s for %j', rval, DurationRatchet.formatMsDuration(end - start, true), scan); return rval; } catch (err) { Logger.error('Failed with %s, q: %j', err, scan, err); return null; } } async fullyExecuteScan(scan, delayMS = 0, softLimit = null) { const rval = []; await this.fullyExecuteProcessOverScan(scan, async (v) => { rval.push(v); }, delayMS, softLimit); return rval; } async fullyExecuteProcessOverScan(scan, proc, delayMS = 0, softLimit = null) { let cnt = 0; try { Logger.debug('Executing scan : %j', scan); const start = new Date().getTime(); Logger.debug('Pulling %j', scan); let qryResults = await this.throughputSafeScanOrQuery((o) => this.scanPromise(o), scan); for (const qri of qryResults.Items) { await proc(qri); cnt++; } while (qryResults.LastEvaluatedKey && (softLimit === null || cnt < softLimit) && !scan.Limit) { Logger.silly('Found more rows - requery with key %j', qryResults.LastEvaluatedKey); scan['ExclusiveStartKey'] = qryResults.LastEvaluatedKey; qryResults = await this.throughputSafeScanOrQuery((o) => this.scanPromise(o), scan); for (const qri of qryResults.Items) { await proc(qri); cnt++; } Logger.silly('Rval is now %d items', cnt); await PromiseRatchet.wait(delayMS); } const end = new Date().getTime(); Logger.debug('Finished, processed %d results in %s for %j', cnt, DurationRatchet.formatMsDuration(end - start, true), scan); } catch (err) { Logger.error('Failed with %s, q: %j', err, scan, err); } return cnt; } async writeAllInBatches(tableName, elements, batchSize) { if (!batchSize || batchSize < 2) { throw new Error('Batch size needs to be at least 2, was ' + batchSize); } let rval = 0; if (!!elements && elements.length > 0) { let batchItems = []; elements.forEach((el) => { batchItems.push({ PutRequest: { Item: el, ReturnConsumedCapacity: 'TOTAL', TableName: tableName, }, }); }); Logger.debug('Processing %d batch items to %s', batchItems.length, tableName); while (batchItems.length > 0) { const curBatch = batchItems.slice(0, Math.min(batchItems.length, batchSize)); batchItems = batchItems.slice(curBatch.length); const params = { RequestItems: {}, ReturnConsumedCapacity: 'TOTAL', ReturnItemCollectionMetrics: 'SIZE', }; params.RequestItems[tableName] = curBatch; let tryCount = 1; let done = false; let batchResults = null; while (!done && tryCount < 7) { try { batchResults = await this.awsDDB.send(new BatchWriteCommand(params)); } catch (err) { if (DynamoRatchet.objectIsErrorWithProvisionedThroughputExceededExceptionCode(err)) { Logger.info('Caught ProvisionedThroughputExceededException - retrying delete'); batchResults = { UnprocessedItems: params.RequestItems }; } else { throw err; } } if (!!batchResults && !!batchResults.UnprocessedItems && !!batchResults.UnprocessedItems[tableName] && batchResults.UnprocessedItems[tableName].length > 0) { const backoff = Math.pow(2, tryCount); Logger.warn('Found %d unprocessed items. Backing off %d seconds and trying again', batchResults.UnprocessedItems[tableName].length, backoff); await PromiseRatchet.wait(backoff * 1000); tryCount++; params.RequestItems[tableName] = batchResults.UnprocessedItems[tableName]; } else { done = true; } } if (!!batchResults && !!batchResults.UnprocessedItems && !!batchResults.UnprocessedItems[tableName] && batchResults.UnprocessedItems[tableName].length > 0) { Logger.error('After 6 tries there were still %d unprocessed items', batchResults.UnprocessedItems[tableName].length); rval += curBatch.length - batchResults.UnprocessedItems[tableName].length; Logger.warn('FIX Unprocessed : %j', batchResults.UnprocessedItems); } else { rval += curBatch.length; } } } return rval; } async fetchFullObjectsMatchingKeysOnlyIndexQuery(qry, keyNames, batchSize = 25) { RequireRatchet.notNullOrUndefined(qry); RequireRatchet.notNullOrUndefined(qry.TableName); RequireRatchet.notNullOrUndefined(keyNames); RequireRatchet.true(keyNames.length > 0); const keyDataSrc = await this.fullyExecuteQuery(qry); const keysOnly = DynamoRatchet.stripAllToKeysOnly(keyDataSrc, keyNames); const rval = await this.fetchAllInBatches(qry.TableName, keysOnly, batchSize); return rval; } async fetchAllInBatches(tableName, inKeys, batchSize) { if (!batchSize || batchSize < 2 || batchSize > 100) { throw new Error('Batch size needs to be at least 2 and no more than 100, was ' + batchSize); } let rval = []; const batches = []; let remain = Object.assign([], inKeys); while (remain.length > 0) { const curBatch = remain.slice(0, Math.min(remain.length, batchSize)); remain = remain.slice(curBatch.length); const tableEntry = {}; tableEntry[tableName] = { Keys: curBatch, }; const nextBatch = { RequestItems: tableEntry, ReturnConsumedCapacity: 'TOTAL', }; batches.push(nextBatch); } Logger.debug('Created %d batches', batches.length); for (let i = 0; i < batches.length; i++) { if (batches.length > 1) { Logger.info('Processing batch %d of %d', i + 1, batches.length); } const input = batches[i]; let tryCount = 1; do { Logger.silly('Pulling %j', input); const res = await this.awsDDB.send(new BatchGetCommand(input)); rval = rval.concat(res.Responses[tableName]); if (!!res.UnprocessedKeys && !!res.UnprocessedKeys[tableName] && res.UnprocessedKeys[tableName].Keys.length > 0 && tryCount < 15) { Logger.silly('Found %d unprocessed, waiting', res.UnprocessedKeys[tableName].Keys); await PromiseRatchet.wait(Math.pow(2, tryCount) * 1000); tryCount++; } input.RequestItems = res.UnprocessedKeys; } while (!input.RequestItems && input.RequestItems[tableName].Keys.length > 0); } return rval; } async deleteAllInBatches(tableName, keys, batchSize) { if (!batchSize || batchSize < 2) { throw new Error('Batch size needs to be at least 2, was ' + batchSize); } let rval = 0; if (!!keys && keys.length > 0) { let batchItems = []; keys.forEach((el) => { batchItems.push({ DeleteRequest: { Key: el, ReturnConsumedCapacity: 'TOTAL', TableName: tableName, }, }); }); Logger.debug('Processing %d DeleteBatch items to %s', batchItems.length, tableName); while (batchItems.length > 0) { const curBatch = batchItems.slice(0, Math.min(batchItems.length, batchSize)); batchItems = batchItems.slice(curBatch.length); const params = { RequestItems: {}, ReturnConsumedCapacity: 'TOTAL', ReturnItemCollectionMetrics: 'SIZE', }; params.RequestItems[tableName] = curBatch; let tryCount = 1; let done = false; let batchResults = null; while (!done && tryCount < 7) { try { batchResults = await this.awsDDB.send(new BatchWriteCommand(params)); } catch (err) { if (DynamoRatchet.objectIsErrorWithProvisionedThroughputExceededExceptionCode(err)) { Logger.info('Caught ProvisionedThroughputExceededException - retrying delete'); batchResults = { UnprocessedItems: params.RequestItems }; } else { throw err; } } if (!!batchResults && !!batchResults.UnprocessedItems && !!batchResults.UnprocessedItems[tableName] && batchResults.UnprocessedItems[tableName].length > 0) { const backoff = Math.pow(2, tryCount); Logger.warn('Found %d unprocessed items. Backing off %d seconds and trying again', batchResults.UnprocessedItems[tableName].length, backoff); await PromiseRatchet.wait(backoff * 1000); tryCount++; params.RequestItems[tableName] = batchResults.UnprocessedItems[tableName]; } else { done = true; } } if (!!batchResults && !!batchResults.UnprocessedItems && !!batchResults.UnprocessedItems[tableName] && batchResults.UnprocessedItems[tableName].length > 0) { Logger.error('After 6 tries there were still %d unprocessed items', batchResults.UnprocessedItems[tableName].length); rval += curBatch.length - batchResults.UnprocessedItems[tableName].length; Logger.warn('FIX Unprocessed : %j', batchResults.UnprocessedItems); } else { rval += curBatch.length; } Logger.debug('%d Remain, DeleteBatch Results : %j', batchItems.length, batchResults); } } return rval; } async simplePut(tableName, value, autoRetryCount = 3) { let rval = null; let currentTry = 0; const params = { Item: value, ReturnConsumedCapacity: 'TOTAL', TableName: tableName, }; while (!rval && currentTry < autoRetryCount) { try { rval = await this.awsDDB.send(new PutCommand(params)); } catch (err) { if (DynamoRatchet.objectIsErrorWithProvisionedThroughputExceededExceptionCode(err)) { const wait = Math.pow(2, currentTry) * 1000; Logger.debug('Exceeded write throughput for %j : Try %d of %d (Waiting %d ms)', params, currentTry, autoRetryCount, wait); await PromiseRatchet.wait(wait); currentTry++; } else { throw err; } } } if (!rval) { Logger.warn('Unable to write %j to DDB after %d tries, giving up', params, autoRetryCount); } return rval; } async simplePutOnlyIfFieldIsNullOrUndefined(tableName, value, fieldName) { let rval = false; const params = { Item: value, ReturnConsumedCapacity: 'TOTAL', ConditionExpression: 'attribute_not_exists(#fieldName) OR #fieldName = :null ', ExpressionAttributeNames: { '#fieldName': fieldName, }, ExpressionAttributeValues: { ':null': null, }, TableName: tableName, }; try { const wrote = await this.awsDDB.send(new PutCommand(params)); Logger.silly('Wrote : %j', wrote); rval = true; } catch (err) { if (DynamoRatchet.objectIsErrorWithProvisionedThroughputExceededExceptionCode(err)) { Logger.debug('Exceeded write throughput for %j : (Waiting 2000 ms)', params); await PromiseRatchet.wait(2000); rval = await this.simplePutOnlyIfFieldIsNullOrUndefined(tableName, value, fieldName); } else if (err && err instanceof ConditionalCheckFailedException) { Logger.debug('Failed to write %j due to null field failure'); rval = false; } else { throw err; } } return rval; } async simplePutWithCollisionAvoidance(tableName, value, keyNames, adjustFunction, maxAdjusts = null, autoRetryCount = 3) { RequireRatchet.true(keyNames && keyNames.length > 0 && keyNames.length < 3, 'You must pass 1 or 2 key names'); let pio = null; let currentTry = 0; const attrNames = { '#key0': keyNames[0], }; const attrValues = { ':key0': value[keyNames[0]], }; let condExp = '#key0 <> :key0'; if (keyNames.length > 1) { condExp += ' AND #key1 <> :key1'; attrNames['#key1'] = keyNames[1]; attrValues[':key1'] = value[keyNames[1]]; } const params = { Item: value, ReturnConsumedCapacity: 'TOTAL', ConditionExpression: condExp, ExpressionAttributeNames: attrNames, ExpressionAttributeValues: attrValues, TableName: tableName, }; let adjustCount = 0; while (!pio && currentTry < autoRetryCount && (!maxAdjusts || adjustCount < maxAdjusts)) { try { pio = await this.awsDDB.send(new PutCommand(params)); } catch (err) { if (DynamoRatchet.objectIsErrorWithProvisionedThroughputExceededExceptionCode(err)) { currentTry++; const wait = Math.pow(2, currentTry) * 1000; Logger.debug('Exceeded write throughput for %j : Try %d of %d (Waiting %d ms)', params, currentTry, autoRetryCount, wait); await PromiseRatchet.wait(wait); } else if (err && err instanceof ConditionalCheckFailedException) { let newValue = Object.assign({}, params.Item); Logger.info('Failed to write %j due to collision - adjusting and retrying', newValue); newValue = adjustFunction(newValue); params.Item = newValue; params.ExpressionAttributeValues[':key0'] = newValue[keyNames[0]]; if (keyNames.length > 1) { params.ExpressionAttributeValues[':key1'] = newValue[keyNames[1]]; } adjustCount++; } else { throw err; } } } if (pio && adjustCount > 0) { Logger.info('After adjustment, wrote %j as %j', value, params.Item); } if (!pio) { Logger.warn('Unable to write %j to DDB after %d provision tries and %d adjusts, giving up', params, currentTry, adjustCount); } return pio ? params.Item : null; } async simpleGet(tableName, keys, autoRetryCount = 3) { let holder = null; let currentTry = 0; const params = { TableName: tableName, Key: keys, }; while (!holder && currentTry < autoRetryCount) { try { holder = await this.awsDDB.send(new GetCommand(params)); } catch (err) { if (DynamoRatchet.objectIsErrorWithProvisionedThroughputExceededExceptionCode(err)) { const wait = Math.pow(2, currentTry) * 1000; Logger.debug('Exceeded read throughput for %j : Try %d of %d (Waiting %d ms)', params, currentTry, autoRetryCount, wait); await PromiseRatchet.wait(wait); currentTry++; } else { throw err; } } } if (!holder) { Logger.warn('Unable to read %j from DDB after %d tries, giving up', params, autoRetryCount); } const rval = !!holder && !!holder.Item ? Object.assign({}, holder.Item) : null; return rval; } static objectIsErrorWithProvisionedThroughputExceededExceptionCode(err) { return !!err && err instanceof ProvisionedThroughputExceededException; } async simpleGetWithCounterDecrement(tableName, keys, counterAttributeName, deleteOnZero, autoRetryCount = 3) { let holder = null; let currentTry = 0; const params = { TableName: tableName, Key: keys, UpdateExpression: 'set #counter = #counter-:decVal', ExpressionAttributeNames: { '#counter': counterAttributeName, }, ExpressionAttributeValues: { ':decVal': 1, ':minVal': 0, }, ConditionExpression: '#counter > :minVal', ReturnValues: 'ALL_NEW', }; let updateFailed = false; while (!holder && currentTry < autoRetryCount && !updateFailed) { try { holder = await this.awsDDB.send(new UpdateCommand(params)); } catch (err) { if (DynamoRatchet.objectIsErrorWithProvisionedThroughputExceededExceptionCode(err)) { const wait = Math.pow(2, currentTry) * 1000; Logger.debug('Exceeded update throughput for %j : Try %d of %d (Waiting %d ms)', params, currentTry, autoRetryCount, wait); await PromiseRatchet.wait(wait); currentTry++; } else if (!!err && err instanceof ConditionalCheckFailedException) { Logger.info('Cannot fetch requested row (%j) - the update check failed', keys); updateFailed = true; } else { throw err; } } } if (!holder && !updateFailed) { Logger.warn('Unable to update %j from DDB after %d tries, giving up', params, autoRetryCount); } const rval = !!holder && !!holder.Attributes ? Object.assign({}, holder.Attributes) : null; if (deleteOnZero && rval && rval[counterAttributeName] === 0) { Logger.info('Delete on 0 specified, removing'); await this.simpleDelete(tableName, keys); } return rval; } async simpleDelete(tableName, keys) { const params = { TableName: tableName, Key: keys, }; const holder = await this.awsDDB.send(new DeleteCommand(params)); return holder; } async atomicCounter(tableName, keys, counterFieldName, increment = 1) { const update = { TableName: tableName, Key: keys, UpdateExpression: 'SET #counterFieldName = #counterFieldName + :inc', ExpressionAttributeNames: { '#counterFieldName': counterFieldName, }, ExpressionAttributeValues: { ':inc': increment, }, ReturnValues: 'UPDATED_NEW', }; const ui = await this.awsDDB.send(new UpdateCommand(update)); const rval = NumberRatchet.safeNumber(ui.Attributes[counterFieldName]); return rval; } static cleanObject(ob) { if (ob) { const rem = []; Object.keys(ob).forEach((k) => { const v = ob[k]; if (v === '') { rem.push(k); } else if (v instanceof Object) { DynamoRatchet.cleanObject(v); } }); Logger.silly('Removing keys : %j', rem); rem.forEach((k) => { delete ob[k]; }); } } static stripToKeysOnly(input, keysNames) { let rval = null; if (!!input && !!keysNames && keysNames.length > 0) { rval = {}; keysNames.forEach((k) => { if (!input[k]) { ErrorRatchet.throwFormattedErr('Failed key extraction on %j - missing %s', input, k); } rval[k] = input[k]; }); } return rval; } static stripAllToKeysOnly(input, keys) { const rval = input.map((i) => DynamoRatchet.stripToKeysOnly(i, keys)); return rval; } } //# sourceMappingURL=dynamo-ratchet.js.map