UNPKG

@bitblit/ratchet-aws

Version:

Common tools for use with AWS browser and node

367 lines 14.6 kB
import { CopyObjectCommand, DeleteObjectCommand, GetObjectCommand, HeadObjectCommand, ListObjectsCommand, NoSuchKey, NotFound, PutObjectCommand } from "@aws-sdk/client-s3"; import { RequireRatchet } from '@bitblit/ratchet-common/lang/require-ratchet'; import { Logger } from '@bitblit/ratchet-common/logger/logger'; import { StringRatchet } from '@bitblit/ratchet-common/lang/string-ratchet'; import { StopWatch } from '@bitblit/ratchet-common/lang/stop-watch'; import { WebStreamRatchet } from '@bitblit/ratchet-common/lang/web-stream-ratchet'; import { getSignedUrl } from '@aws-sdk/s3-request-presigner'; import { Upload } from '@aws-sdk/lib-storage'; export class S3CacheRatchet { s3; defaultBucket; constructor(s3, defaultBucket = null) { this.s3 = s3; this.defaultBucket = defaultBucket; RequireRatchet.notNullOrUndefined(this.s3, 's3'); } get s3Client() { return this.s3; } static applyCacheControlMaxAge(input, seconds) { if (input && seconds) { input.CacheControl = 'max-age=' + seconds; } return input; } static applyUserMetaData(input, key, value) { if (input && StringRatchet.trimToNull(key) && StringRatchet.trimToNull(value)) { input.Metadata = input.Metadata || {}; input.Metadata[key] = value; } return input; } getDefaultBucket() { return this.defaultBucket; } getS3Client() { return this.s3; } async fileExists(key, bucket = null) { try { const head = await this.fetchMetaForCacheFile(key, this.bucketVal(bucket)); return !!head; } catch (err) { Logger.silly('Error calling file exists (as expected) %s', err); return false; } } async fetchCacheFilePassThru(req) { let rval = null; try { rval = await this.s3.send(new GetObjectCommand(req)); } catch (err) { if (err instanceof NoSuchKey) { Logger.debug('Key %s not found - returning null', req.Key); rval = null; } else { throw err; } } return rval; } async fetchCacheFileAsS3GetObjectCommandOutput(key, bucket = null) { let rval = null; try { const params = { Bucket: this.bucketVal(bucket), Key: key, }; rval = await this.s3.send(new GetObjectCommand(params)); } catch (err) { if (err instanceof NoSuchKey) { Logger.debug('Key %s not found - returning null', key); rval = null; } else { throw err; } } return rval; } async fetchCacheFileAsReadableStream(key, bucket = null) { const out = await this.fetchCacheFileAsS3GetObjectCommandOutput(key, bucket); return out.Body.transformToWebStream(); } async fetchCacheFileAsBuffer(key, bucket = null) { let rval = null; const out = await this.fetchCacheFileAsS3GetObjectCommandOutput(key, bucket); if (out?.Body) { const tmp = await out.Body.transformToByteArray(); rval = Buffer.from(tmp); } return rval; } async fetchCacheFileAsString(key, bucket = null) { let rval = null; const out = await this.fetchCacheFileAsS3GetObjectCommandOutput(key, bucket); if (out?.Body) { rval = await out.Body.transformToString(); } return rval; } async fetchCacheFileAsObject(key, bucket = null) { const value = await this.fetchCacheFileAsString(key, bucket); return value ? JSON.parse(value) : null; } async removeCacheFile(key, bucket = null) { let rval = null; const params = { Bucket: this.bucketVal(bucket), Key: key, }; try { rval = await this.s3.send(new DeleteObjectCommand(params)); } catch (err) { if (err && err instanceof NotFound) { Logger.info('Swallowing 404 deleting missing object %s %s', bucket, key); rval = null; } else { throw err; } } return rval; } async writeObjectToCacheFile(key, dataObject, template, bucket) { const json = JSON.stringify(dataObject); return this.writeStringToCacheFile(key, json, template, bucket); } async writeStringToCacheFile(key, dataString, template, bucket) { const stream = WebStreamRatchet.stringToWebReadableStream(dataString); return this.writeStreamToCacheFile(key, stream, template, bucket); } async writeStreamToCacheFile(key, data, template, bucket, progressFn = (progress) => { Logger.debug('Uploading : %s', progress); }) { const params = Object.assign({}, template || {}, { Bucket: this.bucketVal(bucket), Key: key, Body: data, }); const upload = new Upload({ client: this.s3, params: params, tags: [], queueSize: 4, partSize: 1024 * 1024 * 5, leavePartsOnError: false, }); if (progressFn) { upload.on('httpUploadProgress', progressFn); } const result = await upload.done(); return result; } async synchronize(srcPrefix, targetPrefix, targetRatchet = this, recurseSubFolders = false) { RequireRatchet.notNullOrUndefined(srcPrefix, 'srcPrefix'); RequireRatchet.notNullOrUndefined(targetPrefix, 'targetPrefix'); RequireRatchet.true(srcPrefix.endsWith('/'), 'srcPrefix must end in /'); RequireRatchet.true(targetPrefix.endsWith('/'), 'targetPrefix must end in /'); let rval = []; const sourceFiles = await this.directChildrenOfPrefix(srcPrefix); const targetFiles = await targetRatchet.directChildrenOfPrefix(targetPrefix); const sw = new StopWatch(); for (let i = 0; i < sourceFiles.length; i++) { const sourceFile = sourceFiles[i]; Logger.info('Processing %s : %s', sourceFile, sw.dumpExpected(i / sourceFiles.length)); if (sourceFile.endsWith('/')) { if (recurseSubFolders) { Logger.info('%s is a subfolder - recursing'); const subs = await this.synchronize(srcPrefix + sourceFile, targetPrefix + sourceFile, targetRatchet, recurseSubFolders); Logger.info('Got %d back from %s', subs.length, sourceFile); rval = rval.concat(subs); } else { Logger.info('%s is a subfolder and recurse not specified - skipping', sourceFile); } } else { let shouldCopy = true; const srcMeta = await this.fetchMetaForCacheFile(srcPrefix + sourceFile); if (targetFiles.includes(sourceFile)) { const targetMeta = await targetRatchet.fetchMetaForCacheFile(targetPrefix + sourceFile); if (srcMeta.ETag === targetMeta.ETag) { Logger.debug('Skipping - identical'); shouldCopy = false; } } if (shouldCopy) { Logger.debug('Copying...'); const srcStream = await this.fetchCacheFileAsReadableStream(srcPrefix + sourceFile); try { const written = await targetRatchet.writeStreamToCacheFile(targetPrefix + sourceFile, srcStream, srcMeta, undefined); Logger.silly('Write result : %j', written); rval.push(sourceFile); } catch (err) { Logger.error('Failed to sync : %s : %s', sourceFile, err); } } } } Logger.info('Found %d files, copied %d', sourceFiles.length, rval.length); sw.log(); return rval; } async fetchMetaForCacheFile(key, bucket = null) { let rval = null; try { rval = await this.s3.send(new HeadObjectCommand({ Bucket: this.bucketVal(bucket), Key: key, })); } catch (err) { if (err && err instanceof NotFound) { Logger.info('Cache file %s %s not found returning null', this.bucketVal(bucket), key); rval = null; } else { Logger.error('Unrecognized error, rethrowing : %s', err, err); throw err; } } return rval; } async cacheFileAgeInSeconds(key, bucket = null) { try { const res = await this.fetchMetaForCacheFile(key, bucket); if (res && res.LastModified) { return Math.floor((new Date().getTime() - res.LastModified.getTime()) / 1000); } else { Logger.warn('Cache file %s %s had no last modified returning null', this.bucketVal(bucket), key); return null; } } catch (err) { if (err && err instanceof NotFound) { Logger.warn('Cache file %s %s not found returning null', this.bucketVal(bucket), key); return null; } else { throw err; } } } async renameFile(srcKey, dstKey, srcBucket = null, dstBucket = null) { Logger.info('Rename %s to %s (%s/%s)', srcKey, dstKey, srcBucket, dstBucket); const output = await this.copyFile(srcKey, dstKey, srcBucket, dstBucket); if (output) { await this.removeCacheFile(srcKey, srcBucket); } return output; } async copyFile(srcKey, dstKey, srcBucket = null, dstBucket = null, template) { const params = Object.assign({ MetadataDirective: 'COPY' }, template ?? {}); params.CopySource = '/' + this.bucketVal(srcBucket) + '/' + srcKey; params.Bucket = this.bucketVal(dstBucket); params.Key = dstKey; Logger.info('Performing copy with %j', params); const rval = await this.s3.send(new CopyObjectCommand(params)); return rval; } async quietCopyFile(srcKey, dstKey, srcBucket = null, dstBucket = null) { let rval = false; try { const _tmp = await this.copyFile(srcKey, dstKey, srcBucket, dstBucket); rval = true; } catch (err) { Logger.silly('Failed to copy file in S3 : %s', err); } return rval; } async preSignedDownloadUrlForCacheFile(key, expirationSeconds = 3600, bucket = null) { const getCommand = { Bucket: this.bucketVal(bucket), Key: key, }; const link = await getSignedUrl(this.s3, new GetObjectCommand(getCommand), { expiresIn: expirationSeconds }); return link; } async preSignedUploadUrlForCacheFile(key, expirationSeconds = 3600, bucket = null) { const putCommand = { Bucket: this.bucketVal(bucket), Key: key, }; const link = await getSignedUrl(this.s3, new PutObjectCommand(putCommand), { expiresIn: expirationSeconds }); return link; } async directChildrenOfPrefix(prefix, expandFiles = false, bucket = null, maxToReturn = null) { const returnValue = []; const params = { Bucket: this.bucketVal(bucket), Prefix: prefix, Delimiter: '/', }; let response = null; do { response = await this.s3.send(new ListObjectsCommand(params)); const prefixLength = prefix.length; if (response['CommonPrefixes']) { response['CommonPrefixes'].forEach((cp) => { if (!maxToReturn || returnValue.length < maxToReturn) { const value = cp['Prefix'].substring(prefixLength); returnValue.push(value); } }); } if (response['Contents']) { await Promise.all(response['Contents'].map(async (cp) => { if (!maxToReturn || returnValue.length < maxToReturn) { if (expandFiles) { const expanded = { link: await this.preSignedDownloadUrlForCacheFile(cp['Key'], 3600, bucket), name: cp['Key'].substring(prefixLength), size: cp['Size'], }; returnValue.push(expanded); } else { returnValue.push(cp['Key'].substring(prefixLength)); } } })); } params.Marker = response.NextMarker; } while (params.Marker && (!maxToReturn || returnValue.length < maxToReturn)); return returnValue; } async allSubFoldersOfPrefix(prefix, bucket = null) { const returnValue = [prefix]; let idx = 0; while (idx < returnValue.length) { const next = returnValue[idx++]; Logger.debug('Pulling %s (%d remaining)', next, returnValue.length - idx); const req = { Bucket: this.bucketVal(bucket), Prefix: next, Delimiter: '/', }; let resp = null; do { req.ContinuationToken = resp ? resp.NextContinuationToken : null; resp = await this.s3.send(new ListObjectsCommand(req)); resp.CommonPrefixes.forEach((p) => { returnValue.push(p.Prefix); }); Logger.debug('g:%j', resp); } while (resp.NextContinuationToken); } return returnValue; } bucketVal(explicitBucket) { const rval = explicitBucket ? explicitBucket : this.defaultBucket; if (!rval) { throw 'You must set either the default bucket or pass it explicitly'; } return rval; } } //# sourceMappingURL=s3-cache-ratchet.js.map