owltech
Version:
This a backend for OwlTech Company
1,233 lines • 103 kB
JavaScript
"use strict";
/*!
* Copyright 2014 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
Object.defineProperty(exports, "__esModule", { value: true });
const common_1 = require("@google-cloud/common");
const promisify_1 = require("@google-cloud/promisify");
const compressible = require("compressible");
const concat = require("concat-stream");
const crypto = require("crypto");
const dateFormat = require("date-and-time");
const extend = require("extend");
const fs = require("fs");
const hashStreamValidation = require('hash-stream-validation');
const mime = require("mime");
const once = require("onetime");
const os = require("os");
const pumpify = require('pumpify');
const resumableUpload = require("gcs-resumable-upload");
const streamEvents = require("stream-events");
const through = require("through2");
const xdgBasedir = require("xdg-basedir");
const querystring = require("querystring");
const zlib = require("zlib");
const url = require("url");
const bucket_1 = require("./bucket");
const acl_1 = require("./acl");
const duplexify = require('duplexify');
const util_1 = require("./util");
var ActionToHTTPMethod;
(function (ActionToHTTPMethod) {
ActionToHTTPMethod["read"] = "GET";
ActionToHTTPMethod["write"] = "PUT";
ActionToHTTPMethod["delete"] = "DELETE";
ActionToHTTPMethod["resumable"] = "POST";
})(ActionToHTTPMethod = exports.ActionToHTTPMethod || (exports.ActionToHTTPMethod = {}));
/**
* Custom error type for errors related to creating a resumable upload.
*
* @private
*/
class ResumableUploadError extends Error {
constructor() {
super(...arguments);
this.name = 'ResumableUploadError';
}
}
/**
* Custom error type for errors related to getting signed errors and policies.
*
* @private
*/
class SigningError extends Error {
constructor() {
super(...arguments);
this.name = 'SigningError';
}
}
/**
* @const {string}
* @private
*/
const STORAGE_DOWNLOAD_BASE_URL = 'https://storage.googleapis.com';
/**
* @const {string}
* @private
*/
const STORAGE_UPLOAD_BASE_URL = 'https://www.googleapis.com/upload/storage/v1/b';
/**
* @const {RegExp}
* @private
*/
const GS_URL_REGEXP = /^gs:\/\/([a-z0-9_.-]+)\/(.+)$/;
class RequestError extends Error {
}
const SEVEN_DAYS = 604800;
/*
* Default signing version for getSignedUrl is 'v2'.
*/
const DEFAULT_SIGNING_VERSION = 'v2';
/**
* A File object is created from your {@link Bucket} object using
* {@link Bucket#file}.
*
* @class
*/
class File extends common_1.ServiceObject {
/**
* @typedef {object} FileOptions Options passed to the File constructor.
* @property {string} [encryptionKey] A custom encryption key.
* @property {number} [generation] Generation to scope the file to.
* @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this
* object, if the object is encrypted by such a key. Limited availability;
* usable only by enabled projects.
* @property {string} [userProject] The ID of the project which will be
* billed for all requests made from File object.
*/
/**
* Constructs a file object.
*
* @param {Bucket} bucket The Bucket instance this file is
* attached to.
* @param {string} name The name of the remote file.
* @param {FileOptions} [options] Configuration options.
* @example
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const myBucket = storage.bucket('my-bucket');
*
* const file = myBucket.file('my-file');
*/
constructor(bucket, name, options = {}) {
name = name.replace(/^\/+/, '');
const requestQueryObject = {};
let generation;
if (options.generation != null) {
if (typeof options.generation === 'string') {
generation = Number(options.generation);
}
else {
generation = options.generation;
}
if (!isNaN(generation)) {
requestQueryObject.generation = generation;
}
}
const userProject = options.userProject || bucket.userProject;
if (typeof userProject === 'string') {
requestQueryObject.userProject = userProject;
}
const methods = {
/**
* @typedef {array} DeleteFileResponse
* @property {object} 0 The full API response.
*/
/**
* @callback DeleteFileCallback
* @param {?Error} err Request error, if any.
* @param {object} apiResponse The full API response.
*/
/**
* Delete the file.
*
* @see [Objects: delete API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete}
*
* @method File#delete
* @param {object} [options] Configuration options.
* @param {string} [options.userProject] The ID of the project which will be
* billed for the request.
* @param {DeleteFileCallback} [callback] Callback function.
* @returns {Promise<DeleteFileResponse>}
*
* @example
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const myBucket = storage.bucket('my-bucket');
*
* const file = myBucket.file('my-file');
* file.delete(function(err, apiResponse) {});
*
* //-
* // If the callback is omitted, we'll return a Promise.
* //-
* file.delete().then(function(data) {
* const apiResponse = data[0];
* });
*
* @example <caption>include:samples/files.js</caption>
* region_tag:storage_delete_file
* Another example:
*/
delete: {
reqOpts: {
qs: requestQueryObject,
},
},
/**
* @typedef {array} FileExistsResponse
* @property {boolean} 0 Whether the {@link File} exists.
*/
/**
* @callback FileExistsCallback
* @param {?Error} err Request error, if any.
* @param {boolean} exists Whether the {@link File} exists.
*/
/**
* Check if the file exists.
*
* @method File#exists
* @param {options} [options] Configuration options.
* @param {string} [options.userProject] The ID of the project which will be
* billed for the request.
* @param {FileExistsCallback} [callback] Callback function.
* @returns {Promise<FileExistsResponse>}
*
* @example
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const myBucket = storage.bucket('my-bucket');
*
* const file = myBucket.file('my-file');
*
* file.exists(function(err, exists) {});
*
* //-
* // If the callback is omitted, we'll return a Promise.
* //-
* file.exists().then(function(data) {
* const exists = data[0];
* });
*/
exists: {
reqOpts: {
qs: requestQueryObject,
},
},
/**
* @typedef {array} GetFileResponse
* @property {File} 0 The {@link File}.
* @property {object} 1 The full API response.
*/
/**
* @callback GetFileCallback
* @param {?Error} err Request error, if any.
* @param {File} file The {@link File}.
* @param {object} apiResponse The full API response.
*/
/**
* Get a file object and its metadata if it exists.
*
* @method File#get
* @param {options} [options] Configuration options.
* @param {string} [options.userProject] The ID of the project which will be
* billed for the request.
* @param {GetFileCallback} [callback] Callback function.
* @returns {Promise<GetFileResponse>}
*
* @example
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const myBucket = storage.bucket('my-bucket');
*
* const file = myBucket.file('my-file');
*
* file.get(function(err, file, apiResponse) {
* // file.metadata` has been populated.
* });
*
* //-
* // If the callback is omitted, we'll return a Promise.
* //-
* file.get().then(function(data) {
* const file = data[0];
* const apiResponse = data[1];
* });
*/
get: {
reqOpts: {
qs: requestQueryObject,
},
},
/**
* @typedef {array} GetFileMetadataResponse
* @property {object} 0 The {@link File} metadata.
* @property {object} 1 The full API response.
*/
/**
* @callback GetFileMetadataCallback
* @param {?Error} err Request error, if any.
* @param {object} metadata The {@link File} metadata.
* @param {object} apiResponse The full API response.
*/
/**
* Get the file's metadata.
*
* @see [Objects: get API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/objects/get}
*
* @method File#getMetadata
* @param {object} [options] Configuration options.
* @param {string} [options.userProject] The ID of the project which will be
* billed for the request.
* @param {GetFileMetadataCallback} [callback] Callback function.
* @returns {Promise<GetFileMetadataResponse>}
*
* @example
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const myBucket = storage.bucket('my-bucket');
*
* const file = myBucket.file('my-file');
*
* file.getMetadata(function(err, metadata, apiResponse) {});
*
* //-
* // If the callback is omitted, we'll return a Promise.
* //-
* file.getMetadata().then(function(data) {
* const metadata = data[0];
* const apiResponse = data[1];
* });
*
* @example <caption>include:samples/files.js</caption>
* region_tag:storage_get_metadata
* Another example:
*/
getMetadata: {
reqOpts: {
qs: requestQueryObject,
},
},
/**
* @typedef {object} SetFileMetadataOptions Configuration options for File#setMetadata().
* @param {string} [userProject] The ID of the project which will be billed for the request.
*/
/**
* @callback SetFileMetadataCallback
* @param {?Error} err Request error, if any.
* @param {object} apiResponse The full API response.
*/
/**
* @typedef {array} SetFileMetadataResponse
* @property {object} 0 The full API response.
*/
/**
* Merge the given metadata with the current remote file's metadata. This
* will set metadata if it was previously unset or update previously set
* metadata. To unset previously set metadata, set its value to null.
*
* You can set custom key/value pairs in the metadata key of the given
* object, however the other properties outside of this object must adhere
* to the [official API documentation](https://goo.gl/BOnnCK).
*
* NOTE: multiple calls to setMetadata in parallel might result in
* unpredictable results. See [issue]{@link
* https://github.com/googleapis/nodejs-storage/issues/274}.
*
* See the examples below for more information.
*
* @see [Objects: patch API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch}
*
* @method File#setMetadata
* @param {object} [metadata] The metadata you wish to update.
* @param {SetFileMetadataOptions} [options] Configuration options.
* @param {SetFileMetadataCallback} [callback] Callback function.
* @returns {Promise<SetFileMetadataResponse>}
*
* @example
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const myBucket = storage.bucket('my-bucket');
*
* const file = myBucket.file('my-file');
*
* const metadata = {
* contentType: 'application/x-font-ttf',
* metadata: {
* my: 'custom',
* properties: 'go here'
* }
* };
*
* file.setMetadata(metadata, function(err, apiResponse) {});
*
* // Assuming current metadata = { hello: 'world', unsetMe: 'will do' }
* file.setMetadata({
* metadata: {
* abc: '123', // will be set.
* unsetMe: null, // will be unset (deleted).
* hello: 'goodbye' // will be updated from 'world' to 'goodbye'.
* }
* }, function(err, apiResponse) {
* // metadata should now be { abc: '123', hello: 'goodbye' }
* });
*
* //-
* // Set a temporary hold on this file from its bucket's retention period
* // configuration.
* //
* file.setMetadata({
* temporaryHold: true
* }, function(err, apiResponse) {});
*
* //-
* // Alternatively, you may set a temporary hold. This will follow the
* // same behavior as an event-based hold, with the exception that the
* // bucket's retention policy will not renew for this file from the time
* // the hold is released.
* //-
* file.setMetadata({
* eventBasedHold: true
* }, function(err, apiResponse) {});
*
* //-
* // If the callback is omitted, we'll return a Promise.
* //-
* file.setMetadata(metadata).then(function(data) {
* const apiResponse = data[0];
* });
*/
setMetadata: {
reqOpts: {
qs: requestQueryObject,
},
},
};
super({
parent: bucket,
baseUrl: '/o',
id: encodeURIComponent(name),
methods,
});
this.bucket = bucket;
// tslint:disable-next-line:no-any
this.storage = bucket.parent;
// @TODO Can this duplicate code from above be avoided?
if (options.generation != null) {
let generation;
if (typeof options.generation === 'string') {
generation = Number(options.generation);
}
else {
generation = options.generation;
}
if (!isNaN(generation)) {
this.generation = generation;
}
}
this.kmsKeyName = options.kmsKeyName;
this.userProject = userProject;
this.name = name;
if (options.encryptionKey) {
this.setEncryptionKey(options.encryptionKey);
}
this.acl = new acl_1.Acl({
request: this.request.bind(this),
pathPrefix: '/acl',
});
}
/**
* @typedef {array} CopyResponse
* @property {File} 0 The copied {@link File}.
* @property {object} 1 The full API response.
*/
/**
* @callback CopyCallback
* @param {?Error} err Request error, if any.
* @param {File} copiedFile The copied {@link File}.
* @param {object} apiResponse The full API response.
*/
/**
* @typedef {object} CopyOptions Configuration options for File#copy(). See an
* [Object
* resource](https://cloud.google.com/storage/docs/json_api/v1/objects#resource).
* @property {string} [destinationKmsKeyName] Resource name of the Cloud
* KMS key, of the form
* `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`,
* that will be used to encrypt the object. Overwrites the object
* metadata's `kms_key_name` value, if any.
* @property {string} [keepAcl] Retain the ACL for the new file.
* @property {string} [predefinedAcl] Set the ACL for the new file.
* @property {string} [token] A previously-returned `rewriteToken` from an
* unfinished rewrite request.
* @property {string} [userProject] The ID of the project which will be
* billed for the request.
*/
/**
* Copy this file to another file. By default, this will copy the file to the
* same bucket, but you can choose to copy it to another Bucket by providing
* a Bucket or File object or a URL starting with "gs://".
*
* @see [Objects: rewrite API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite}
*
* @throws {Error} If the destination file is not provided.
*
* @param {string|Bucket|File} destination Destination file.
* @param {CopyOptions} [options] Configuration options. See an
* @param {CopyCallback} [callback] Callback function.
* @returns {Promise<CopyResponse>}
*
* @example
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
*
* //-
* // You can pass in a variety of types for the destination.
* //
* // For all of the below examples, assume we are working with the following
* // Bucket and File objects.
* //-
* const bucket = storage.bucket('my-bucket');
* const file = bucket.file('my-image.png');
*
* //-
* // If you pass in a string for the destination, the file is copied to its
* // current bucket, under the new name provided.
* //-
* file.copy('my-image-copy.png', function(err, copiedFile, apiResponse) {
* // `my-bucket` now contains:
* // - "my-image.png"
* // - "my-image-copy.png"
*
* // `copiedFile` is an instance of a File object that refers to your new
* // file.
* });
*
* //-
* // If you pass in a string starting with "gs://" for the destination, the
* // file is copied to the other bucket and under the new name provided.
* //-
* const newLocation = 'gs://another-bucket/my-image-copy.png';
* file.copy(newLocation, function(err, copiedFile, apiResponse) {
* // `my-bucket` still contains:
* // - "my-image.png"
* //
* // `another-bucket` now contains:
* // - "my-image-copy.png"
*
* // `copiedFile` is an instance of a File object that refers to your new
* // file.
* });
*
* //-
* // If you pass in a Bucket object, the file will be copied to that bucket
* // using the same name.
* //-
* const anotherBucket = storage.bucket('another-bucket');
* file.copy(anotherBucket, function(err, copiedFile, apiResponse) {
* // `my-bucket` still contains:
* // - "my-image.png"
* //
* // `another-bucket` now contains:
* // - "my-image.png"
*
* // `copiedFile` is an instance of a File object that refers to your new
* // file.
* });
*
* //-
* // If you pass in a File object, you have complete control over the new
* // bucket and filename.
* //-
* const anotherFile = anotherBucket.file('my-awesome-image.png');
* file.copy(anotherFile, function(err, copiedFile, apiResponse) {
* // `my-bucket` still contains:
* // - "my-image.png"
* //
* // `another-bucket` now contains:
* // - "my-awesome-image.png"
*
* // Note:
* // The `copiedFile` parameter is equal to `anotherFile`.
* });
*
* //-
* // If the callback is omitted, we'll return a Promise.
* //-
* file.copy(newLocation).then(function(data) {
* const newFile = data[0];
* const apiResponse = data[1];
* });
*
* @example <caption>include:samples/files.js</caption>
* region_tag:storage_copy_file
* Another example:
*/
copy(destination, optionsOrCallback, callback) {
const noDestinationError = new Error('Destination file should have a name.');
if (!destination) {
throw noDestinationError;
}
let options = {};
if (typeof optionsOrCallback === 'function') {
callback = optionsOrCallback;
}
else if (optionsOrCallback) {
options = optionsOrCallback;
}
options = extend(true, {}, options);
callback = callback || common_1.util.noop;
let destBucket;
let destName;
let newFile;
if (typeof destination === 'string') {
const parsedDestination = GS_URL_REGEXP.exec(destination);
if (parsedDestination !== null && parsedDestination.length === 3) {
destBucket = this.storage.bucket(parsedDestination[1]);
destName = parsedDestination[2];
}
else {
destBucket = this.bucket;
destName = destination;
}
}
else if (destination instanceof bucket_1.Bucket) {
destBucket = destination;
destName = this.name;
}
else if (destination instanceof File) {
destBucket = destination.bucket;
destName = destination.name;
newFile = destination;
}
else {
throw noDestinationError;
}
const query = {};
if (this.generation !== undefined) {
query.sourceGeneration = this.generation;
}
if (options.token !== undefined) {
query.rewriteToken = options.token;
}
if (options.userProject !== undefined) {
query.userProject = options.userProject;
delete options.userProject;
}
newFile = newFile || destBucket.file(destName);
const headers = {};
if (this.encryptionKey !== undefined) {
headers['x-goog-copy-source-encryption-algorithm'] = 'AES256';
headers['x-goog-copy-source-encryption-key'] = this.encryptionKeyBase64;
headers['x-goog-copy-source-encryption-key-sha256'] =
this.encryptionKeyHash;
}
if (newFile.encryptionKey !== undefined) {
this.setEncryptionKey(newFile.encryptionKey);
}
else if (options.destinationKmsKeyName !== undefined) {
query.destinationKmsKeyName = options.destinationKmsKeyName;
delete options.destinationKmsKeyName;
}
else if (newFile.kmsKeyName !== undefined) {
query.destinationKmsKeyName = newFile.kmsKeyName;
}
if (query.destinationKmsKeyName) {
this.kmsKeyName = query.destinationKmsKeyName;
const keyIndex = this.interceptors.indexOf(this.encryptionKeyInterceptor);
if (keyIndex > -1) {
this.interceptors.splice(keyIndex, 1);
}
}
this.request({
method: 'POST',
uri: `/rewriteTo/b/${destBucket.name}/o/${encodeURIComponent(newFile.name)}`,
qs: query,
json: options,
headers,
}, (err, resp) => {
if (err) {
callback(err, null, resp);
return;
}
if (resp.rewriteToken) {
const options = {
token: resp.rewriteToken,
};
if (query.userProject) {
options.userProject = query.userProject;
}
if (query.destinationKmsKeyName) {
options.destinationKmsKeyName = query.destinationKmsKeyName;
}
this.copy(newFile, options, callback);
return;
}
callback(null, newFile, resp);
});
}
/**
* @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream.
* @property {string} [userProject] The ID of the project which will be
* billed for the request.
* @property {string|boolean} [validation] Possible values: `"md5"`,
* `"crc32c"`, or `false`. By default, data integrity is validated with a
* CRC32c checksum. You may use MD5 if preferred, but that hash is not
* supported for composite objects. An error will be raised if MD5 is
* specified but is not available. You may also choose to skip validation
* completely, however this is **not recommended**.
* @property {number} [start] A byte offset to begin the file's download
* from. Default is 0. NOTE: Byte ranges are inclusive; that is,
* `options.start = 0` and `options.end = 999` represent the first 1000
* bytes in a file or object. NOTE: when specifying a byte range, data
* integrity is not available.
* @property {number} [end] A byte offset to stop reading the file at.
* NOTE: Byte ranges are inclusive; that is, `options.start = 0` and
* `options.end = 999` represent the first 1000 bytes in a file or object.
* NOTE: when specifying a byte range, data integrity is not available.
*/
/**
* Create a readable stream to read the contents of the remote file. It can be
* piped to a writable stream or listened to for 'data' events to read a
* file's contents.
*
* In the unlikely event there is a mismatch between what you downloaded and
* the version in your Bucket, your error handler will receive an error with
* code "CONTENT_DOWNLOAD_MISMATCH". If you receive this error, the best
* recourse is to try downloading the file again.
*
* For faster crc32c computation, you must manually install
* [`fast-crc32c`](http://www.gitnpm.com/fast-crc32c):
*
* $ npm install --save fast-crc32c
*
* NOTE: Readable streams will emit the `end` event when the file is fully
* downloaded.
*
* @param {CreateReadStreamOptions} [options] Configuration options.
* @returns {ReadableStream}
*
* @example
* //-
* // <h4>Downloading a File</h4>
* //
* // The example below demonstrates how we can reference a remote file, then
* // pipe its contents to a local file. This is effectively creating a local
* // backup of your remote data.
* //-
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const bucket = storage.bucket('my-bucket');
*
* const fs = require('fs');
* const remoteFile = bucket.file('image.png');
* const localFilename = '/Users/stephen/Photos/image.png';
*
* remoteFile.createReadStream()
* .on('error', function(err) {})
* .on('response', function(response) {
* // Server connected and responded with the specified status and
* headers.
* })
* .on('end', function() {
* // The file is fully downloaded.
* })
* .pipe(fs.createWriteStream(localFilename));
*
* //-
* // To limit the downloaded data to only a byte range, pass an options
* object.
* //-
* const logFile = myBucket.file('access_log');
* logFile.createReadStream({
* start: 10000,
* end: 20000
* })
* .on('error', function(err) {})
* .pipe(fs.createWriteStream('/Users/stephen/logfile.txt'));
*
* //-
* // To read a tail byte range, specify only `options.end` as a negative
* // number.
* //-
* const logFile = myBucket.file('access_log');
* logFile.createReadStream({
* end: -100
* })
* .on('error', function(err) {})
* .pipe(fs.createWriteStream('/Users/stephen/logfile.txt'));
*/
createReadStream(options = {}) {
const rangeRequest = typeof options.start === 'number' || typeof options.end === 'number';
const tailRequest = options.end < 0;
// tslint:disable-next-line:no-any
let validateStream; // Created later, if necessary.
const throughStream = streamEvents(through());
let crc32c = true;
let md5 = false;
let refreshedMetadata = false;
if (typeof options.validation === 'string') {
// tslint:disable-next-line:no-any
options.validation =
options.validation.toLowerCase();
crc32c = options.validation === 'crc32c';
md5 = options.validation === 'md5';
}
else if (options.validation === false) {
crc32c = false;
}
if (rangeRequest) {
if (typeof options.validation === 'string' ||
options.validation === true) {
throw new Error('Cannot use validation with file ranges (start/end).');
}
// Range requests can't receive data integrity checks.
crc32c = false;
md5 = false;
}
// Authenticate the request, then pipe the remote API request to the stream
// returned to the user.
const makeRequest = () => {
const query = {
alt: 'media',
};
if (this.generation) {
query.generation = this.generation;
}
if (options.userProject) {
query.userProject = options.userProject;
}
const headers = {
'Accept-Encoding': 'gzip',
'Cache-Control': 'no-store'
};
if (rangeRequest) {
const start = typeof options.start === 'number' ? options.start : '0';
const end = typeof options.end === 'number' ? options.end : '';
headers.Range = `bytes=${tailRequest ? end : `${start}-${end}`}`;
}
const reqOpts = {
forever: false,
uri: '',
headers,
qs: query,
};
this.requestStream(reqOpts)
.on('error', err => {
throughStream.destroy(err);
})
.on('response', res => {
throughStream.emit('response', res);
// tslint:disable-next-line:no-any
common_1.util.handleResp(null, res, null, onResponse);
})
.resume();
// We listen to the response event from the request stream so that we
// can...
//
// 1) Intercept any data from going to the user if an error occurred.
// 2) Calculate the hashes from the http.IncomingMessage response
// stream,
// which will return the bytes from the source without decompressing
// gzip'd content. We then send it through decompressed, if
// applicable, to the user.
const onResponse = (err, body, rawResponseStream) => {
if (err) {
// Get error message from the body.
rawResponseStream.pipe(concat(body => {
err.message = body.toString();
throughStream.destroy(err);
}));
return;
}
rawResponseStream.on('error', onComplete);
const headers = rawResponseStream.toJSON().headers;
const isCompressed = headers['content-encoding'] === 'gzip';
const shouldRunValidation = !rangeRequest && (crc32c || md5);
const throughStreams = [];
if (shouldRunValidation) {
validateStream = hashStreamValidation({ crc32c, md5 });
throughStreams.push(validateStream);
}
if (isCompressed) {
throughStreams.push(zlib.createGunzip());
}
if (throughStreams.length === 1) {
rawResponseStream =
// tslint:disable-next-line:no-any
rawResponseStream.pipe(throughStreams[0]);
}
else if (throughStreams.length > 1) {
rawResponseStream =
rawResponseStream.pipe(pumpify.obj(throughStreams));
}
rawResponseStream.on('error', onComplete)
.on('end', onComplete)
.pipe(throughStream, { end: false });
};
// This is hooked to the `complete` event from the request stream. This is
// our chance to validate the data and let the user know if anything went
// wrong.
let onCompleteCalled = false;
const onComplete = (err) => {
if (err) {
onCompleteCalled = true;
throughStream.destroy(err);
return;
}
if (rangeRequest) {
onCompleteCalled = true;
throughStream.end();
return;
}
if (!refreshedMetadata) {
refreshedMetadata = true;
this.getMetadata({ userProject: options.userProject }, onComplete);
return;
}
if (onCompleteCalled) {
return;
}
onCompleteCalled = true;
const hashes = {
crc32c: this.metadata.crc32c,
md5: this.metadata.md5Hash,
};
// If we're doing validation, assume the worst-- a data integrity
// mismatch. If not, these tests won't be performed, and we can assume
// the best.
let failed = crc32c || md5;
if (crc32c && hashes.crc32c) {
// We must remove the first four bytes from the returned checksum.
// http://stackoverflow.com/questions/25096737/
// base64-encoding-of-crc32c-long-value
failed = !validateStream.test('crc32c', hashes.crc32c.substr(4));
}
if (md5 && hashes.md5) {
failed = !validateStream.test('md5', hashes.md5);
}
if (md5 && !hashes.md5) {
const hashError = new RequestError([
'MD5 verification was specified, but is not available for the',
'requested object. MD5 is not available for composite objects.',
].join(' '));
hashError.code = 'MD5_NOT_AVAILABLE';
throughStream.destroy(hashError);
}
else if (failed) {
const mismatchError = new RequestError([
'The downloaded data did not match the data from the server.',
'To be sure the content is the same, you should download the',
'file again.',
].join(' '));
mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH';
throughStream.destroy(mismatchError);
}
else {
throughStream.end();
}
};
};
throughStream.on('reading', makeRequest);
return throughStream;
}
/**
* @callback CreateResumableUploadCallback
* @param {?Error} err Request error, if any.
* @param {string} uri The resumable upload's unique session URI.
*/
/**
* @typedef {array} CreateResumableUploadResponse
* @property {string} 0 The resumable upload's unique session URI.
*/
/**
* @typedef {object} CreateResumableUploadOptions
* @property {object} [metadata] Metadata to set on the file.
* @property {string} [origin] Origin header to set for the upload.
* @property {string} [predefinedAcl] Apply a predefined set of access
* controls to this object.
*
* Acceptable values are:
* - **`authenticatedRead`** - Object owner gets `OWNER` access, and
* `allAuthenticatedUsers` get `READER` access.
*
* - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and
* project team owners get `OWNER` access.
*
* - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project
* team owners get `READER` access.
*
* - **`private`** - Object owner gets `OWNER` access.
*
* - **`projectPrivate`** - Object owner gets `OWNER` access, and project
* team members get access according to their roles.
*
* - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`
* get `READER` access.
* @property {boolean} [private] Make the uploaded file private. (Alias for
* `options.predefinedAcl = 'private'`)
* @property {boolean} [public] Make the uploaded file public. (Alias for
* `options.predefinedAcl = 'publicRead'`)
* @property {string} [userProject] The ID of the project which will be
* billed for the request.
*/
/**
* Create a unique resumable upload session URI. This is the first step when
* performing a resumable upload.
*
* See the [Resumable upload
* guide](https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload)
* for more on how the entire process works.
*
* <h4>Note</h4>
*
* If you are just looking to perform a resumable upload without worrying
* about any of the details, see {@link File#createWriteStream}. Resumable
* uploads are performed by default.
*
* @see [Resumable upload guide]{@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload}
*
* @param {CreateResumableUploadOptions} [options] Configuration options.
* @param {CreateResumableUploadCallback} [callback] Callback function.
* @returns {Promise<CreateResumableUploadResponse>}
*
* @example
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const myBucket = storage.bucket('my-bucket');
*
* const file = myBucket.file('my-file');
* file.createResumableUpload(function(err, uri) {
* if (!err) {
* // `uri` can be used to PUT data to.
* }
* });
*
* //-
* // If the callback is omitted, we'll return a Promise.
* //-
* file.createResumableUpload().then(function(data) {
* const uri = data[0];
* });
*/
createResumableUpload(optionsOrCallback, callback) {
const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};
callback =
typeof optionsOrCallback === 'function' ? optionsOrCallback : callback;
resumableUpload.createURI({
authClient: this.storage.authClient,
bucket: this.bucket.name,
file: this.name,
generation: this.generation,
key: this.encryptionKey,
kmsKeyName: this.kmsKeyName,
metadata: options.metadata,
offset: options.offset,
origin: options.origin,
predefinedAcl: options.predefinedAcl,
private: options.private,
public: options.public,
userProject: options.userProject || this.userProject,
}, callback);
}
/**
* @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream().
* @property {string} [contentType] Alias for
* `options.metadata.contentType`. If set to `auto`, the file name is used
* to determine the contentType.
* @property {string|boolean} [gzip] If true, automatically gzip the file.
* If set to `auto`, the contentType is used to determine if the file
* should be gzipped. This will set `options.metadata.contentEncoding` to
* `gzip` if necessary.
* @property {object} [metadata] See the examples below or
* [Objects: insert request
* body](https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON)
* for more details.
* @property {number} [offset] The starting byte of the upload stream, for
* resuming an interrupted upload. Defaults to 0.
* @property {string} [predefinedAcl] Apply a predefined set of access
* controls to this object.
*
* Acceptable values are:
* - **`authenticatedRead`** - Object owner gets `OWNER` access, and
* `allAuthenticatedUsers` get `READER` access.
*
* - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and
* project team owners get `OWNER` access.
*
* - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project
* team owners get `READER` access.
*
* - **`private`** - Object owner gets `OWNER` access.
*
* - **`projectPrivate`** - Object owner gets `OWNER` access, and project
* team members get access according to their roles.
*
* - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers`
* get `READER` access.
* @property {boolean} [private] Make the uploaded file private. (Alias for
* `options.predefinedAcl = 'private'`)
* @property {boolean} [public] Make the uploaded file public. (Alias for
* `options.predefinedAcl = 'publicRead'`)
* @property {boolean} [resumable] Force a resumable upload. NOTE: When
* working with streams, the file format and size is unknown until it's
* completely consumed. Because of this, it's best for you to be explicit
* for what makes sense given your input.
* @property {string} [uri] The URI for an already-created resumable
* upload. See {@link File#createResumableUpload}.
* @property {string} [userProject] The ID of the project which will be
* billed for the request.
* @property {string|boolean} [validation] Possible values: `"md5"`,
* `"crc32c"`, or `false`. By default, data integrity is validated with a
* CRC32c checksum. You may use MD5 if preferred, but that hash is not
* supported for composite objects. An error will be raised if MD5 is
* specified but is not available. You may also choose to skip validation
* completely, however this is **not recommended**.
*/
/**
* Create a writable stream to overwrite the contents of the file in your
* bucket.
*
* A File object can also be used to create files for the first time.
*
* Resumable uploads are automatically enabled and must be shut off explicitly
* by setting `options.resumable` to `false`.
*
* Resumable uploads require write access to the $HOME directory. Through
* [`config-store`](http://www.gitnpm.com/configstore), some metadata is
* stored. By default, if the directory is not writable, we will fall back to
* a simple upload. However, if you explicitly request a resumable upload, and
* we cannot write to the config directory, we will return a
* `ResumableUploadError`.
*
* <p class="notice">
* There is some overhead when using a resumable upload that can cause
* noticeable performance degradation while uploading a series of small
* files. When uploading files less than 10MB, it is recommended that the
* resumable feature is disabled.
* </p>
*
* For faster crc32c computation, you must manually install
* [`fast-crc32c`](http://www.gitnpm.com/fast-crc32c):
*
* $ npm install --save fast-crc32c
*
* NOTE: Writable streams will emit the `finish` event when the file is fully
* uploaded.
*
* @see [Upload Options (Simple or Resumable)]{@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload}
* @see [Objects: insert API Documentation]{@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert}
*
* @param {CreateWriteStreamOptions} [options] Configuration options.
* @returns {WritableStream}
*
* @example
* const fs = require('fs');
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const myBucket = storage.bucket('my-bucket');
*
* const file = myBucket.file('my-file');
*
* //-
* // <h4>Uploading a File</h4>
* //
* // Now, consider a case where we want to upload a file to your bucket. You
* // have the option of using {@link Bucket#upload}, but that is just
* // a convenience method which will do the following.
* //-
* fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')
* .pipe(file.createWriteStream())
* .on('error', function(err) {})
* .on('finish', function() {
* // The file upload is complete.
* });
*
* //-
* // <h4>Uploading a File with gzip compression</h4>
* //-
* fs.createReadStream('/Users/stephen/site/index.html')
* .pipe(file.createWriteStream({ gzip: true }))
* .on('error', function(err) {})
* .on('finish', function() {
* // The file upload is complete.
* });
*
* //-
* // Downloading the file with `createReadStream` will automatically decode
* // the file.
* //-
*
* //-
* // <h4>Uploading a File with Metadata</h4>
* //
* // One last case you may run into is when you want to upload a file to your
* // bucket and set its metadata at the same time. Like above, you can use
* // {@link Bucket#upload} to do this, which is just a wrapper around
* // the following.
* //-
* fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg')
* .pipe(file.createWriteStream({
* metadata: {
* contentType: 'image/jpeg',
* metadata: {
* custom: 'metadata'
* }
* }
* }))
* .on('error', function(err) {})
* .on('finish', function() {
* // The file upload is complete.
* });
*/
// tslint:disable-next-line:no-any
createWriteStream(options = {}) {
options = Object.assign({ metadata: {} }, options);
if (options.contentType) {
options.metadata.contentType = options.contentType;
if (options.metadata.contentType === 'auto') {
options.metadata.contentType = mime.getType(this.name);
}
}
let gzip = options.gzip;
if (gzip === 'auto') {
gzip = compressible(options.metadata.contentType);
}
if (gzip) {
options.metadata.contentEncoding = 'gzip';
}
let crc32c = true;
let md5 = false;
if (typeof options.validation === 'string') {
options.validation = options.validation.toLowerCase();
crc32c = options.validation === 'crc32c';
md5 = options.validation === 'md5';
}
else if (options.validation === false) {
crc32c = false;
}
// Collect data as it comes in to store in a hash. This is compared to the
// checksum value on the returned metadata from the API.
const validateStream = hashStreamValidation({
crc32c,
md5,
});
const fileWriteStream = duplexify();
const stream = streamEvents(pumpify([
gzip ? zlib.createGzip() : through(),
validateStream,
fileWriteStream,
]));
//