UNPKG

flydrive

Version:

File storage library with unified API to manage files across multiple cloud storage providers like S3, GCS, R2 and so on

380 lines (376 loc) 13.2 kB
import { DriveDirectory, DriveFile } from "../../chunk-TZSKXVQT.js"; // drivers/gcs/driver.ts import string from "@poppinss/utils/string"; import { Storage } from "@google-cloud/storage"; // drivers/gcs/debug.ts import { debuglog } from "util"; var debug_default = debuglog("flydrive:gcs"); // drivers/gcs/driver.ts var GCSDriver = class { constructor(options) { this.options = options; this.#storage = "storage" in options ? options.storage : new Storage(options); if (options.usingUniformAcl !== void 0) { this.#usingUniformAcl = options.usingUniformAcl; } if (debug_default.enabled) { debug_default("driver config %O", { ...options, credentials: "REDACTED" }); } } #storage; #usingUniformAcl = true; /** * Returns GCS options for the save operations. */ #getSaveOptions(options) { const { visibility, // used locally contentType, // forwaded as metadata cacheControl, // forwaded as metadata contentEncoding, // forwaded as metadata contentLength, // not entertained by GCS contentLanguage, // not entertained by GCS contentDisposition, // not entertained by GCS ...rest // forwarded as it is } = options || {}; const gcsOptions = { resumable: false, ...rest }; gcsOptions.metadata = Object.assign(gcsOptions.metadata || {}, { contentType, cacheControl, contentEncoding }); if (this.#usingUniformAcl === false) { gcsOptions.public = (visibility || this.options.visibility) === "public"; gcsOptions.private = !gcsOptions.public; gcsOptions.predefinedAcl = gcsOptions.public ? "publicRead" : "private"; } debug_default("gcs write options %O", gcsOptions); return gcsOptions; } /** * Creates the metadata for the file from the raw response * returned by GCS */ #createFileMetaData(apiFile) { const metaData = { contentType: apiFile.contentType, contentLength: Number(apiFile.size), etag: apiFile.etag, lastModified: new Date(apiFile.updated) }; debug_default("file metadata %O", this.options.bucket, metaData); return metaData; } /** * Returns the GCS objects using the callback approach, since there * is no other way to get access to the API response and the * pagination token * * Instead of using "bucket.getFiles" we use "bucket.request", because * the "getFiles" method internally creates an instance of "File". * We do not even need this instance and wasting resources when * querying a bucket with many files. */ #getGCSObjects(options) { const bucket = this.#storage.bucket(this.options.bucket); debug_default("fetching files list %O", options); return new Promise((resolve, reject) => { bucket.request( { uri: "/o", qs: options }, (error, response) => { if (error) { debug_default("list files API error %O", error); reject(error); } else { debug_default("list files API response %O", response); resolve({ files: response.items || [], paginationToken: response.nextPageToken, prefixes: response.prefixes || [] }); } } ); }); } /** * Returns a boolean indicating if the file exists * or not. */ async exists(key) { debug_default("checking if file exists %s:%s", this.options.bucket, key); const bucket = this.#storage.bucket(this.options.bucket); const response = await bucket.file(key).exists(); return response[0]; } /** * Returns the contents of a file as a UTF-8 string. An * exception is thrown when object is missing. */ async get(key) { debug_default("reading file contents %s:%s", this.options.bucket, key); const bucket = this.#storage.bucket(this.options.bucket); const response = await bucket.file(key).download(); return response[0].toString("utf-8"); } /** * Returns the contents of the file as a Readable stream. An * exception is thrown when the file is missing. */ async getStream(key) { debug_default("reading file contents as a stream %s:%s", this.options.bucket, key); const bucket = this.#storage.bucket(this.options.bucket); return bucket.file(key).createReadStream(); } /** * Returns the contents of the file as an Uint8Array. An * exception is thrown when the file is missing. */ async getBytes(key) { debug_default("reading file contents as array buffer %s:%s", this.options.bucket, key); const bucket = this.#storage.bucket(this.options.bucket); const response = await bucket.file(key).download(); return new Uint8Array(response[0]); } /** * Returns the file metadata. */ async getMetaData(key) { debug_default("fetching file metadata %s:%s", this.options.bucket, key); const bucket = this.#storage.bucket(this.options.bucket); const response = await bucket.file(key).getMetadata(); return this.#createFileMetaData(response[0]); } /** * Returns the visibility of a file */ async getVisibility(key) { debug_default("fetching file visibility %s:%s", this.options.bucket, key); const bucket = this.#storage.bucket(this.options.bucket); const [isFilePublic] = await bucket.file(key).isPublic(); return isFilePublic ? "public" : "private"; } /** * Returns the public URL of the file. This method does not check * if the file exists or not. */ async getUrl(key) { const generateURL = this.options.urlBuilder?.generateURL; if (generateURL) { debug_default("using custom implementation for generating public URL %s:%s", this.options.bucket, key); return generateURL(key, this.options.bucket, this.#storage); } debug_default("generating public URL %s:%s", this.options.bucket, key); const bucket = this.#storage.bucket(this.options.bucket); const file = bucket.file(key); return file.publicUrl(); } /** * Returns the signed/temporary URL of the file. By default, the signed URLs * expire in 30mins, but a custom expiry can be defined using * "options.expiresIn" property. */ async getSignedUrl(key, options) { const { contentDisposition, contentType, expiresIn, ...rest } = Object.assign({}, options); const expires = /* @__PURE__ */ new Date(); expires.setSeconds((/* @__PURE__ */ new Date()).getSeconds() + string.seconds.parse(expiresIn || "30mins")); const signedURLOptions = { action: "read", expires, responseType: contentType, responseDisposition: contentDisposition, ...rest }; const generateSignedURL = this.options.urlBuilder?.generateSignedURL; if (generateSignedURL) { debug_default("using custom implementation for generating signed URL %s:%s", this.options.bucket, key); return generateSignedURL(key, this.options.bucket, signedURLOptions, this.#storage); } debug_default("generating signed URL %s:%s", this.options.bucket, key); const bucket = this.#storage.bucket(this.options.bucket); const file = bucket.file(key); const response = await file.getSignedUrl(signedURLOptions); return response[0]; } /** * Returns the signed/temporary URL that can be used to directly upload * the file contents to the storage. By default, the signed URLs * expire in 30mins, but a custom expiry can be defined using * "options.expiresIn" property. */ async getSignedUploadUrl(key, options) { const { expiresIn, ...rest } = Object.assign({}, options); const expires = /* @__PURE__ */ new Date(); expires.setSeconds((/* @__PURE__ */ new Date()).getSeconds() + string.seconds.parse(expiresIn || "30mins")); const signedURLOptions = { action: "write", expires, ...rest }; const generateSignedUploadURL = this.options.urlBuilder?.generateSignedUploadURL; if (generateSignedUploadURL) { debug_default( "using custom implementation for generating signed upload URL %s:%s", this.options.bucket, key ); return generateSignedUploadURL(key, this.options.bucket, signedURLOptions, this.#storage); } debug_default("generating signed URL %s:%s", this.options.bucket, key); const bucket = this.#storage.bucket(this.options.bucket); const file = bucket.file(key); const response = await file.getSignedUrl(signedURLOptions); return response[0]; } /** * Updates the visibility of a file */ async setVisibility(key, visibility) { debug_default("updating file visibility %s:%s to %s", this.options.bucket, key, visibility); const bucket = this.#storage.bucket(this.options.bucket); const file = bucket.file(key); if (visibility === "private") { await file.makePrivate(); } else { await file.makePublic(); } } /** * Writes a file to the bucket for the given key and contents. */ async put(key, contents, options) { debug_default("creating/updating file %s:%s", this.options.bucket, key); const bucket = this.#storage.bucket(this.options.bucket); await bucket.file(key).save( typeof contents === "string" ? Buffer.from(contents) : Buffer.from(contents), this.#getSaveOptions(options) ); } /** * Writes a file to the bucket for the given key and stream */ putStream(key, contents, options) { debug_default("creating/updating file using readable stream %s:%s", this.options.bucket, key); const bucket = this.#storage.bucket(this.options.bucket); return new Promise((resolve, reject) => { const writeable = bucket.file(key).createWriteStream(this.#getSaveOptions(options)); writeable.once("error", reject); contents.once("error", reject); contents.pipe(writeable).on("finish", resolve).on("error", reject); }); } /** * Copies the source file to the destination. Both paths must * be within the root location. */ async copy(source, destination, options) { debug_default( "copying file from %s:%s to %s:%s", this.options.bucket, source, this.options.bucket, destination ); const bucket = this.#storage.bucket(this.options.bucket); options = options || {}; if (!options.visibility && !this.#usingUniformAcl) { const [isFilePublic] = await bucket.file(source).isPublic(); options.visibility = isFilePublic ? "public" : "private"; } await bucket.file(source).copy(destination, this.#getSaveOptions(options)); } /** * Moves the source file to the destination. Both paths must * be within the root location. */ async move(source, destination, options) { debug_default( "moving file from %s:%s to %s:%s", this.options.bucket, source, this.options.bucket, destination ); const bucket = this.#storage.bucket(this.options.bucket); options = options || {}; if (!options.visibility && !this.#usingUniformAcl) { const [isFilePublic] = await bucket.file(source).isPublic(); options.visibility = isFilePublic ? "public" : "private"; } await bucket.file(source).move(destination, this.#getSaveOptions(options)); } /** * Deletes the object from the bucket */ async delete(key) { debug_default("removing file %s:%s", this.options.bucket, key); const bucket = this.#storage.bucket(this.options.bucket); await bucket.file(key).delete({ ignoreNotFound: true }); } /** * Deletes the files and directories matching the provided * prefix. */ async deleteAll(prefix) { const bucket = this.#storage.bucket(this.options.bucket); debug_default("removing all files matching prefix %s:%s", this.options.bucket, prefix); await bucket.deleteFiles({ prefix: `${prefix.replace(/\/$/, "")}/` }); } /** * Returns a list of files. The pagination token can be used to paginate * through the files. */ async listAll(prefix, options) { const self = this; let { recursive, paginationToken, maxResults } = Object.assign({ recursive: false }, options); if (prefix) { prefix = !recursive ? `${prefix.replace(/\/$/, "")}/` : prefix; } debug_default("listing all files matching prefix %s:%s", this.options.bucket, prefix); const response = await this.#getGCSObjects({ autoPaginate: false, delimiter: !recursive ? "/" : "", includeTrailingDelimiter: !recursive, includeFoldersAsPrefixes: !recursive, pageToken: paginationToken, ...prefix !== "/" ? { prefix } : {}, ...maxResults !== void 0 ? { maxResults } : {} }); function* filesGenerator() { for (const directory of response.prefixes) { yield new DriveDirectory(directory.replace(/\/$/, "")); } for (const file of response.files) { yield new DriveFile(file.name, self, self.#createFileMetaData(file)); } } return { paginationToken: response.paginationToken, objects: { [Symbol.iterator]: filesGenerator } }; } }; export { GCSDriver };