@google-cloud/bigquery
Version:
Google BigQuery Client Library for Node.js
1,313 lines • 67.7 kB
JavaScript
"use strict";
/*!
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.BigQueryInt = exports.BigQueryTime = exports.BigQueryDatetime = exports.BigQueryTimestamp = exports.Geography = exports.BigQueryDate = exports.BigQueryRange = exports.BigQuery = exports.PROTOCOL_REGEX = exports.common = void 0;
const common_1 = require("@google-cloud/common");
const common = require("@google-cloud/common");
exports.common = common;
const paginator_1 = require("@google-cloud/paginator");
const promisify_1 = require("@google-cloud/promisify");
const precise_date_1 = require("@google-cloud/precise-date");
const arrify = require("arrify");
const Big = require("big.js");
const extend = require("extend");
const is = require("is");
const uuid = require("uuid");
const dataset_1 = require("./dataset");
const job_1 = require("./job");
const table_1 = require("./table");
const logger_1 = require("./logger");
exports.PROTOCOL_REGEX = /^(\w*):\/\//;
/**
* @typedef {object} BigQueryOptions
* @property {string} [projectId] The project ID from the Google Developer's
* Console, e.g. 'grape-spaceship-123'. We will also check the environment
* variable `GCLOUD_PROJECT` for your project ID. If your app is running in
* an environment which supports {@link
* https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application
* Application Default Credentials}, your project ID will be detected
* automatically.
* @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key
* downloaded from the Google Developers Console. If you provide a path to a
* JSON file, the `projectId` option above is not necessary. NOTE: .pem and
* .p12 require you to specify the `email` option as well.
* @property {string} [token] An OAUTH access token. If provided, we will not
* manage fetching, re-using, and re-minting access tokens.
* @property {string} [email] Account email address. Required when using a .pem
* or .p12 keyFilename.
* @property {object} [credentials] Credentials object.
* @property {string} [credentials.client_email]
* @property {string} [credentials.private_key]
* @property {Constructor} [promise] Custom promise module to use instead of
* native Promises.
* @property {string[]} [scopes] Additional OAuth scopes to use in requests. For
* example, to access an external data source, you may need the
* `https://www.googleapis.com/auth/drive.readonly` scope.
*/
/**
* In the following examples from this page and the other modules (`Dataset`,
* `Table`, etc.), we are going to be using a dataset from
* {@link http://goo.gl/f2SXcb| data.gov} of higher education institutions.
*
* We will create a table with the correct schema, import the public CSV file
* into that table, and query it for data.
*
* This client supports enabling query-related preview features via environmental
* variables. By setting the environment variable QUERY_PREVIEW_ENABLED to the string
* "TRUE", the client will enable preview features, though behavior may still be
* controlled via the bigquery service as well. Currently, the feature(s) in scope
* include: stateless queries (query execution without corresponding job metadata).
*
* @class
*
* See {@link https://cloud.google.com/bigquery/what-is-bigquery| What is BigQuery?}
*
* @param {BigQueryOptions} options Constructor options.
*
* @example Install the client library with <a href="https://www.npmjs.com/">npm</a>:
* ```
* npm install @google-cloud/bigquery
*
* ```
* @example Import the client library
* ```
* const {BigQuery} = require('@google-cloud/bigquery');
*
* ```
* @example Create a client that uses <a href="https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application">Application Default Credentials (ADC)</a>:
* ```
* const bigquery = new BigQuery();
*
* ```
* @example Create a client with <a href="https://cloud.google.com/docs/authentication/production#obtaining_and_providing_service_account_credentials_manually">explicit credentials</a>:
* ```
* const bigquery = new BigQuery({
* projectId: 'your-project-id',
* keyFilename: '/path/to/keyfile.json'
* });
*
* ```
* @example <caption>include:samples/quickstart.js</caption>
* region_tag:bigquery_quickstart
* Full quickstart example:
*/
class BigQuery extends common_1.Service {
createQueryStream(options) {
// placeholder body, overwritten in constructor
return new paginator_1.ResourceStream({}, () => { });
}
getDatasetsStream(options) {
// placeholder body, overwritten in constructor
return new paginator_1.ResourceStream({}, () => { });
}
getJobsStream(options) {
// placeholder body, overwritten in constructor
return new paginator_1.ResourceStream({}, () => { });
}
constructor(options = {}) {
let universeDomain = 'googleapis.com';
const servicePath = 'bigquery';
if (options.universeDomain) {
universeDomain = BigQuery.sanitizeDomain(options.universeDomain);
}
const EMULATOR_HOST = process.env.BIGQUERY_EMULATOR_HOST;
let apiEndpoint = `https://${servicePath}.${universeDomain}`;
if (typeof EMULATOR_HOST === 'string') {
apiEndpoint = BigQuery.sanitizeEndpoint(EMULATOR_HOST);
}
if (options.apiEndpoint) {
apiEndpoint = BigQuery.sanitizeEndpoint(options.apiEndpoint);
}
options = Object.assign({}, options, {
apiEndpoint,
});
const baseUrl = EMULATOR_HOST || `${options.apiEndpoint}/bigquery/v2`;
const config = {
apiEndpoint: options.apiEndpoint,
baseUrl,
scopes: ['https://www.googleapis.com/auth/bigquery'],
packageJson: require('../../package.json'),
autoRetry: options.autoRetry,
maxRetries: options.maxRetries,
};
if (options.scopes) {
config.scopes = config.scopes.concat(options.scopes);
}
super(config, options);
const QUERY_PREVIEW_ENABLED = process.env.QUERY_PREVIEW_ENABLED;
this._enableQueryPreview = false;
if (typeof QUERY_PREVIEW_ENABLED === 'string') {
if (QUERY_PREVIEW_ENABLED.toUpperCase() === 'TRUE') {
this._enableQueryPreview = true;
}
}
this._universeDomain = universeDomain;
this.location = options.location;
/**
* Run a query scoped to your project as a readable object stream.
*
* @method
* @param {object} query Configuration object. See {@link BigQuery.query} for a complete
* list of options.
*
* @example
* ```
* const {BigQuery} = require('@google-cloud/bigquery');
* const bigquery = new BigQuery();
*
* const query = 'SELECT url FROM `publicdata.samples.github_nested` LIMIT
* 100';
*
* bigquery.createQueryStream(query)
* .on('error', console.error)
* .on('data', function(row) {
* // row is a result from your query.
* })
* .on('end', function() {
* // All rows retrieved.
* });
*
* //-
* // If you anticipate many results, you can end a stream early to prevent
* // unnecessary processing and API requests.
* //-
* bigquery.createQueryStream(query)
* .on('data', function(row) {
* this.end();
* });
* ```
*/
this.createQueryStream = paginator_1.paginator.streamify('queryAsStream_');
/**
* List all or some of the {@link Dataset} objects in your project as
* a readable object stream.
*
* @param {object} [options] Configuration object. See
* {@link BigQuery.getDatasets} for a complete list of options.
*
* @example
* ```
* const {BigQuery} = require('@google-cloud/bigquery');
* const bigquery = new BigQuery();
*
* bigquery.getDatasetsStream()
* .on('error', console.error)
* .on('data', function(dataset) {
* // dataset is a Dataset object.
* })
* .on('end', function() {
* // All datasets retrieved.
* });
*
* //-
* // If you anticipate many results, you can end a stream early to prevent
* // unnecessary processing and API requests.
* //-
* bigquery.getDatasetsStream()
* .on('data', function(dataset) {
* this.end();
* });
* ```
*/
this.getDatasetsStream = paginator_1.paginator.streamify('getDatasets');
/**
* List all or some of the {@link Job} objects in your project as a
* readable object stream.
*
* @param {object} [options] Configuration object. See
* {@link BigQuery.getJobs} for a complete list of options.
*
* @example
* ```
* const {BigQuery} = require('@google-cloud/bigquery');
* const bigquery = new BigQuery();
*
* bigquery.getJobsStream()
* .on('error', console.error)
* .on('data', function(job) {
* // job is a Job object.
* })
* .on('end', function() {
* // All jobs retrieved.
* });
*
* //-
* // If you anticipate many results, you can end a stream early to prevent
* // unnecessary processing and API requests.
* //-
* bigquery.getJobsStream()
* .on('data', function(job) {
* this.end();
* });
* ```
*/
this.getJobsStream = paginator_1.paginator.streamify('getJobs');
// Disable `prettyPrint` for better performance.
// https://github.com/googleapis/nodejs-bigquery/issues/858
this.interceptors.push({
request: (reqOpts) => {
return extend(true, {}, reqOpts, { qs: { prettyPrint: false } });
},
});
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
trace_(msg, ...otherArgs) {
(0, logger_1.logger)('[bigquery]', msg, ...otherArgs);
}
get universeDomain() {
return this._universeDomain;
}
static sanitizeEndpoint(url) {
if (!exports.PROTOCOL_REGEX.test(url)) {
url = `https://${url}`;
}
return this.sanitizeDomain(url);
}
static sanitizeDomain(url) {
return url.replace(/\/+$/, ''); // Remove trailing slashes
}
/**
* Merge a rowset returned from the API with a table schema.
*
* @private
*
* @param {object} schema
* @param {array} rows
* @param {object} options
* @param {boolean|IntegerTypeCastOptions} options.wrapIntegers Wrap values of
* 'INT64' type in {@link BigQueryInt} objects.
* If a `boolean`, this will wrap values in {@link BigQueryInt} objects.
* If an `object`, this will return a value returned by
* `wrapIntegers.integerTypeCastFunction`.
* Please see {@link IntegerTypeCastOptions} for options descriptions.
* @param {array} options.selectedFields List of fields to return.
* If unspecified, all fields are returned.
* @param {array} options.parseJSON parse a 'JSON' field into a JSON object.
* @returns Fields using their matching names from the table's schema.
*/
static mergeSchemaWithRows_(schema, rows, options) {
var _a;
if (options.selectedFields && options.selectedFields.length > 0) {
const selectedFieldsArray = options.selectedFields.map(c => {
return c.split('.');
});
const currentFields = selectedFieldsArray.map(c => c.shift());
//filter schema fields based on selected fields.
schema.fields = (_a = schema.fields) === null || _a === void 0 ? void 0 : _a.filter(field => currentFields
.map(c => c.toLowerCase())
.indexOf(field.name.toLowerCase()) >= 0);
options.selectedFields = selectedFieldsArray
.filter(c => c.length > 0)
.map(c => c.join('.'));
}
return arrify(rows).map(mergeSchema).map(flattenRows);
function mergeSchema(row) {
return row.f.map((field, index) => {
const schemaField = schema.fields[index];
let value = field.v;
if (schemaField.mode === 'REPEATED') {
value = value.map(val => {
return convertSchemaFieldValue(schemaField, val.v, options);
});
}
else {
value = convertSchemaFieldValue(schemaField, value, options);
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const fieldObject = {};
fieldObject[schemaField.name] = value;
return fieldObject;
});
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function flattenRows(rows) {
return rows.reduce((acc, row) => {
const key = Object.keys(row)[0];
acc[key] = row[key];
return acc;
}, {});
}
}
/**
* The `DATE` type represents a logical calendar date, independent of time
* zone. It does not represent a specific 24-hour time period. Rather, a given
* DATE value represents a different 24-hour period when interpreted in
* different time zones, and may represent a shorter or longer day during
* Daylight Savings Time transitions.
*
* @param {object|string} value The date. If a string, this should be in the
* format the API describes: `YYYY-[M]M-[D]D`.
* Otherwise, provide an object.
* @param {string|number} value.year Four digits.
* @param {string|number} value.month One or two digits.
* @param {string|number} value.day One or two digits.
*
* @example
* ```
* const {BigQuery} = require('@google-cloud/bigquery');
* const bigquery = new BigQuery();
* const date = bigquery.date('2017-01-01');
*
* //-
* // Alternatively, provide an object.
* //-
* const date2 = bigquery.date({
* year: 2017,
* month: 1,
* day: 1
* });
* ```
*/
static date(value) {
return new BigQueryDate(value);
}
/**
* @param {object|string} value The date. If a string, this should be in the
* format the API describes: `YYYY-[M]M-[D]D`.
* Otherwise, provide an object.
* @param {string|number} value.year Four digits.
* @param {string|number} value.month One or two digits.
* @param {string|number} value.day One or two digits.
*
* @example
* ```
* const {BigQuery} = require('@google-cloud/bigquery');
* const date = BigQuery.date('2017-01-01');
*
* //-
* // Alternatively, provide an object.
* //-
* const date2 = BigQuery.date({
* year: 2017,
* month: 1,
* day: 1
* });
* ```
*/
date(value) {
return BigQuery.date(value);
}
/**
* A `DATETIME` data type represents a point in time. Unlike a `TIMESTAMP`,
* this does not refer to an absolute instance in time. Instead, it is the
* civil time, or the time that a user would see on a watch or calendar.
*
* @method BigQuery.datetime
* @param {object|string} value The time. If a string, this should be in the
* format the API describes: `YYYY-[M]M-[D]D[ [H]H:[M]M:[S]S[.DDDDDD]]`.
* Otherwise, provide an object.
* @param {string|number} value.year Four digits.
* @param {string|number} value.month One or two digits.
* @param {string|number} value.day One or two digits.
* @param {string|number} [value.hours] One or two digits (`00` - `23`).
* @param {string|number} [value.minutes] One or two digits (`00` - `59`).
* @param {string|number} [value.seconds] One or two digits (`00` - `59`).
* @param {string|number} [value.fractional] Up to six digits for microsecond
* precision.
*
* @example
* ```
* const {BigQuery} = require('@google-cloud/bigquery');
* const datetime = BigQuery.datetime('2017-01-01 13:00:00');
*
* //-
* // Alternatively, provide an object.
* //-
* const datetime = BigQuery.datetime({
* year: 2017,
* month: 1,
* day: 1,
* hours: 14,
* minutes: 0,
* seconds: 0
* });
* ```
*/
/**
* A `DATETIME` data type represents a point in time. Unlike a `TIMESTAMP`,
* this does not refer to an absolute instance in time. Instead, it is the
* civil time, or the time that a user would see on a watch or calendar.
*
* @param {object|string} value The time. If a string, this should be in the
* format the API describes: `YYYY-[M]M-[D]D[ [H]H:[M]M:[S]S[.DDDDDD]]`.
* Otherwise, provide an object.
* @param {string|number} value.year Four digits.
* @param {string|number} value.month One or two digits.
* @param {string|number} value.day One or two digits.
* @param {string|number} [value.hours] One or two digits (`00` - `23`).
* @param {string|number} [value.minutes] One or two digits (`00` - `59`).
* @param {string|number} [value.seconds] One or two digits (`00` - `59`).
* @param {string|number} [value.fractional] Up to six digits for microsecond
* precision.
*
* @example
* ```
* const {BigQuery} = require('@google-cloud/bigquery');
* const bigquery = new BigQuery();
* const datetime = bigquery.datetime('2017-01-01 13:00:00');
*
* //-
* // Alternatively, provide an object.
* //-
* const datetime = bigquery.datetime({
* year: 2017,
* month: 1,
* day: 1,
* hours: 14,
* minutes: 0,
* seconds: 0
* });
* ```
*/
static datetime(value) {
return new BigQueryDatetime(value);
}
datetime(value) {
return BigQuery.datetime(value);
}
/**
* A `TIME` data type represents a time, independent of a specific date.
*
* @method BigQuery.time
* @param {object|string} value The time. If a string, this should be in the
* format the API describes: `[H]H:[M]M:[S]S[.DDDDDD]`. Otherwise, provide
* an object.
* @param {string|number} [value.hours] One or two digits (`00` - `23`).
* @param {string|number} [value.minutes] One or two digits (`00` - `59`).
* @param {string|number} [value.seconds] One or two digits (`00` - `59`).
* @param {string|number} [value.fractional] Up to six digits for microsecond
* precision.
*
* @example
* ```
* const {BigQuery} = require('@google-cloud/bigquery');
* const time = BigQuery.time('14:00:00'); // 2:00 PM
*
* //-
* // Alternatively, provide an object.
* //-
* const time = BigQuery.time({
* hours: 14,
* minutes: 0,
* seconds: 0
* });
* ```
*/
/**
* A `TIME` data type represents a time, independent of a specific date.
*
* @param {object|string} value The time. If a string, this should be in the
* format the API describes: `[H]H:[M]M:[S]S[.DDDDDD]`. Otherwise, provide
* an object.
* @param {string|number} [value.hours] One or two digits (`00` - `23`).
* @param {string|number} [value.minutes] One or two digits (`00` - `59`).
* @param {string|number} [value.seconds] One or two digits (`00` - `59`).
* @param {string|number} [value.fractional] Up to six digits for microsecond
* precision.
*
* @example
* ```
* const {BigQuery} = require('@google-cloud/bigquery');
* const bigquery = new BigQuery();
* const time = bigquery.time('14:00:00'); // 2:00 PM
*
* //-
* // Alternatively, provide an object.
* //-
* const time = bigquery.time({
* hours: 14,
* minutes: 0,
* seconds: 0
* });
* ```
*/
static time(value) {
return new BigQueryTime(value);
}
time(value) {
return BigQuery.time(value);
}
/**
* A timestamp represents an absolute point in time, independent of any time
* zone or convention such as Daylight Savings Time.
*
* The recommended input here is a `Date` or `PreciseDate` class.
* If passing as a `string`, it should be Timestamp literals: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#timestamp_literals.
* When passing a `number` input, it should be epoch seconds in float representation.
*
* @method BigQuery.timestamp
* @param {Date|string} value The time.
*
* @example
* ```
* const {BigQuery} = require('@google-cloud/bigquery');
* const timestamp = BigQuery.timestamp(new Date());
* ```
*/
static timestamp(value) {
return new BigQueryTimestamp(value);
}
/**
* A timestamp represents an absolute point in time, independent of any time
* zone or convention such as Daylight Savings Time.
*
* The recommended input here is a `Date` or `PreciseDate` class.
* If passing as a `string`, it should be Timestamp literals: https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#timestamp_literals.
* When passing a `number` input, it should be epoch seconds in float representation.
*
* @param {Date|string|string|number} value The time.
*
* @example
* ```
* const {BigQuery} = require('@google-cloud/bigquery');
* const bigquery = new BigQuery();
* const timestamp = bigquery.timestamp(new Date());
* ```
*/
timestamp(value) {
return BigQuery.timestamp(value);
}
/**
* A range represents contiguous range between two dates, datetimes, or timestamps.
* The lower and upper bound for the range are optional.
* The lower bound is inclusive and the upper bound is exclusive.
*
* @method BigQuery.range
* @param {string|BigQueryRangeOptions} value The range API string or start/end with dates/datetimes/timestamp ranges.
* @param {string} elementType The range element type - DATE|DATETIME|TIMESTAMP
*
* @example
* ```
* const {BigQuery} = require('@google-cloud/bigquery');
* const timestampRange = BigQuery.range('[2020-10-01 12:00:00+08, 2020-12-31 12:00:00+08)', 'TIMESTAMP');
* ```
*/
static range(value, elementType) {
return new BigQueryRange(value, elementType);
}
/**
* A range represents contiguous range between two dates, datetimes, or timestamps.
* The lower and upper bound for the range are optional.
* The lower bound is inclusive and the upper bound is exclusive.
*
* @param {string|BigQueryRangeOptions} value The range API string or start/end with dates/datetimes/timestamp ranges.
* @param {string} elementType The range element type - DATE|DATETIME|TIMESTAMP
*
* @example
* ```
* const {BigQuery} = require('@google-cloud/bigquery');
* const bigquery = new BigQuery();
* const timestampRange = bigquery.range('[2020-10-01 12:00:00+08, 2020-12-31 12:00:00+08)', 'TIMESTAMP');
* ```
*/
range(value, elementType) {
return BigQuery.range(value, elementType);
}
/**
* A BigQueryInt wraps 'INT64' values. Can be used to maintain precision.
*
* @param {string|number|IntegerTypeCastValue} value The INT64 value to convert.
* @param {IntegerTypeCastOptions} typeCastOptions Configuration to convert
* value. Must provide an `integerTypeCastFunction` to handle conversion.
* @returns {BigQueryInt}
*
* @example
* ```
* const {BigQuery} = require('@google-cloud/bigquery');
* const bigquery = new BigQuery();
*
* const largeIntegerValue = Number.MAX_SAFE_INTEGER + 1;
*
* const options = {
* integerTypeCastFunction: value => value.split(),
* };
*
* const bqInteger = bigquery.int(largeIntegerValue, options);
*
* const customValue = bqInteger.valueOf();
* // customValue is the value returned from your `integerTypeCastFunction`.
* ```
*/
static int(value, typeCastOptions) {
return new BigQueryInt(value, typeCastOptions);
}
int(value, typeCastOptions) {
return BigQuery.int(value, typeCastOptions);
}
/**
* A geography value represents a surface area on the Earth
* in Well-known Text (WKT) format.
*
* @param {string} value The geospatial data.
*
* @example
* ```
* const {BigQuery} = require('@google-cloud/bigquery');
* const bigquery = new BigQuery();
* const geography = bigquery.geography('POINT(1, 2)');
* ```
*/
static geography(value) {
return new Geography(value);
}
geography(value) {
return BigQuery.geography(value);
}
/**
* Convert an INT64 value to Number.
*
* @private
* @param {object} value The INT64 value to convert.
*/
static decodeIntegerValue_(value) {
const num = Number(value.integerValue);
if (!Number.isSafeInteger(num)) {
throw new Error('We attempted to return all of the numeric values, but ' +
(value.schemaFieldName ? value.schemaFieldName + ' ' : '') +
'value ' +
value.integerValue +
" is out of bounds of 'Number.MAX_SAFE_INTEGER'.\n" +
"To prevent this error, please consider passing 'options.wrapIntegers' as\n" +
'{\n' +
' integerTypeCastFunction: provide <your_custom_function>\n' +
' fields: optionally specify field name(s) to be custom casted\n' +
'}\n');
}
return num;
}
/**
* Return a value's provided type.
*
* @private
*
* @throws {error} If the type provided is invalid.
*
* See {@link https://cloud.google.com/bigquery/data-types| Data Type}
*
* @param {*} providedType The type.
* @returns {string} The valid type provided.
*/
static getTypeDescriptorFromProvidedType_(providedType) {
// The list of types can be found in src/types.d.ts
const VALID_TYPES = [
'DATE',
'DATETIME',
'TIME',
'TIMESTAMP',
'BYTES',
'NUMERIC',
'BIGNUMERIC',
'BOOL',
'INT64',
'FLOAT64',
'STRING',
'GEOGRAPHY',
'ARRAY',
'STRUCT',
'JSON',
'RANGE',
];
if (is.array(providedType)) {
providedType = providedType;
return {
type: 'ARRAY',
arrayType: BigQuery.getTypeDescriptorFromProvidedType_(providedType[0]),
};
}
else if (is.object(providedType)) {
return {
type: 'STRUCT',
structTypes: Object.keys(providedType).map(prop => {
return {
name: prop,
type: BigQuery.getTypeDescriptorFromProvidedType_(providedType[prop]),
};
}),
};
}
providedType = providedType.toUpperCase();
if (!VALID_TYPES.includes(providedType)) {
throw new Error(`Invalid type provided: "${providedType}"`);
}
return { type: providedType.toUpperCase() };
}
/**
* Detect a value's type.
*
* @private
*
* @throws {error} If the type could not be detected.
*
* See {@link https://cloud.google.com/bigquery/data-types| Data Type}
*
* @param {*} value The value.
* @returns {string} The type detected from the value.
*/
static getTypeDescriptorFromValue_(value) {
let typeName;
if (value === null) {
throw new Error("Parameter types must be provided for null values via the 'types' field in query options.");
}
if (value instanceof BigQueryDate) {
typeName = 'DATE';
}
else if (value instanceof BigQueryDatetime) {
typeName = 'DATETIME';
}
else if (value instanceof BigQueryTime) {
typeName = 'TIME';
}
else if (value instanceof BigQueryTimestamp) {
typeName = 'TIMESTAMP';
}
else if (value instanceof Buffer) {
typeName = 'BYTES';
}
else if (value instanceof Big) {
if (value.c.length - value.e >= 10) {
typeName = 'BIGNUMERIC';
}
else {
typeName = 'NUMERIC';
}
}
else if (value instanceof BigQueryInt) {
typeName = 'INT64';
}
else if (value instanceof Geography) {
typeName = 'GEOGRAPHY';
}
else if (value instanceof BigQueryRange) {
return {
type: 'RANGE',
rangeElementType: {
type: value.elementType,
},
};
}
else if (Array.isArray(value)) {
if (value.length === 0) {
throw new Error("Parameter types must be provided for empty arrays via the 'types' field in query options.");
}
return {
type: 'ARRAY',
arrayType: BigQuery.getTypeDescriptorFromValue_(value[0]),
};
}
else if (is.boolean(value)) {
typeName = 'BOOL';
}
else if (is.number(value)) {
typeName = value % 1 === 0 ? 'INT64' : 'FLOAT64';
}
else if (is.object(value)) {
return {
type: 'STRUCT',
structTypes: Object.keys(value).map(prop => {
return {
name: prop,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
type: BigQuery.getTypeDescriptorFromValue_(value[prop]),
};
}),
};
}
else if (is.string(value)) {
typeName = 'STRING';
}
if (!typeName) {
throw new Error([
'This value could not be translated to a BigQuery data type.',
value,
].join('\n'));
}
return {
type: typeName,
};
}
/**
* Convert a value into a `queryParameter` object.
*
* @private
*
* See {@link https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#request-body| Jobs.query API Reference Docs (see `queryParameters`)}
*
* @param {*} value The value.
* @param {string|ProvidedTypeStruct|ProvidedTypeArray} providedType Provided
* query parameter type.
* @returns {object} A properly-formed `queryParameter` object.
*/
static valueToQueryParameter_(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
value, providedType) {
var _a, _b;
if (is.date(value)) {
value = BigQuery.timestamp(value);
}
let parameterType;
if (providedType) {
parameterType = BigQuery.getTypeDescriptorFromProvidedType_(providedType);
}
else {
parameterType = BigQuery.getTypeDescriptorFromValue_(value);
}
const queryParameter = { parameterType, parameterValue: {} };
const typeName = queryParameter.parameterType.type;
if (typeName === 'ARRAY') {
queryParameter.parameterValue.arrayValues = value.map(itemValue => {
const value = BigQuery._getValue(itemValue, parameterType.arrayType);
if (is.object(value) || is.array(value)) {
if (is.array(providedType)) {
providedType = providedType;
return BigQuery.valueToQueryParameter_(value, providedType[0])
.parameterValue;
}
else {
return BigQuery.valueToQueryParameter_(value).parameterValue;
}
}
return { value };
});
}
else if (typeName === 'STRUCT') {
queryParameter.parameterValue.structValues = Object.keys(value).reduce((structValues, prop) => {
let nestedQueryParameter;
if (providedType) {
nestedQueryParameter = BigQuery.valueToQueryParameter_(value[prop], providedType[prop]);
}
else {
nestedQueryParameter = BigQuery.valueToQueryParameter_(value[prop]);
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
structValues[prop] = nestedQueryParameter.parameterValue;
return structValues;
}, {});
}
else if (typeName === 'RANGE') {
let rangeValue;
if (value instanceof BigQueryRange) {
rangeValue = value;
}
else {
rangeValue = BigQuery.range(value, (_b = (_a = queryParameter.parameterType) === null || _a === void 0 ? void 0 : _a.rangeElementType) === null || _b === void 0 ? void 0 : _b.type);
}
queryParameter.parameterValue.rangeValue = {
start: {
value: rangeValue.value.start,
},
end: {
value: rangeValue.value.end,
},
};
}
else if (typeName === 'JSON' && is.object(value)) {
queryParameter.parameterValue.value = JSON.stringify(value);
}
else {
queryParameter.parameterValue.value = BigQuery._getValue(value, parameterType);
}
return queryParameter;
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
static _getValue(value, type) {
if (value === null) {
return null;
}
if (value.type)
type = value;
return BigQuery._isCustomType(type) ? value.value : value;
}
static _isCustomType({ type }) {
return (type.indexOf('TIME') > -1 ||
type.indexOf('DATE') > -1 ||
type.indexOf('GEOGRAPHY') > -1 ||
type.indexOf('RANGE') > -1 ||
type.indexOf('BigQueryInt') > -1);
}
createDataset(id, optionsOrCallback, cb) {
const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};
const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb;
const reqOpts = {
method: 'POST',
uri: '/datasets',
json: extend(true, {
location: this.location,
}, options, {
datasetReference: {
datasetId: id,
},
}),
};
if (options.projectId) {
reqOpts.projectId = options.projectId;
}
this.request(reqOpts, (err, resp) => {
if (err) {
callback(err, null, resp);
return;
}
const dataset = this.dataset(id, options);
dataset.metadata = resp;
callback(null, dataset, resp);
});
}
createQueryJob(opts, callback) {
const options = typeof opts === 'object' ? opts : { query: opts };
this.trace_('[createQueryJob]', options, callback);
if ((!options || !options.query) && !options.pageToken) {
throw new Error('A SQL query string is required.');
}
const query = extend(true, {
useLegacySql: false,
}, options);
this.trace_('[createQueryJob]', query);
if (options.destination) {
if (!(options.destination instanceof table_1.Table)) {
throw new Error('Destination must be a Table object.');
}
query.destinationTable = {
datasetId: options.destination.dataset.id,
projectId: options.destination.dataset.projectId,
tableId: options.destination.id,
};
delete query.destination;
}
if (query.params) {
const { parameterMode, params } = this.buildQueryParams_(query.params, query.types);
query.parameterMode = parameterMode;
query.queryParameters = params;
delete query.params;
}
const reqOpts = {};
reqOpts.configuration = {
query,
};
if (typeof query.jobTimeoutMs === 'number') {
reqOpts.configuration.jobTimeoutMs = query.jobTimeoutMs.toString();
delete query.jobTimeoutMs;
}
if (query.dryRun) {
reqOpts.configuration.dryRun = query.dryRun;
delete query.dryRun;
}
if (query.labels) {
reqOpts.configuration.labels = query.labels;
delete query.labels;
}
if (query.jobPrefix) {
reqOpts.jobPrefix = query.jobPrefix;
delete query.jobPrefix;
}
if (query.location) {
reqOpts.location = query.location;
delete query.location;
}
if (query.jobId) {
reqOpts.jobId = query.jobId;
delete query.jobId;
}
this.createJob(reqOpts, callback);
}
buildQueryParams_(params, types) {
if (!params) {
return {
parameterMode: undefined,
params: undefined,
};
}
const parameterMode = is.array(params) ? 'positional' : 'named';
const queryParameters = [];
if (parameterMode === 'named') {
const namedParams = params;
for (const namedParameter of Object.getOwnPropertyNames(namedParams)) {
const value = namedParams[namedParameter];
let queryParameter;
if (types) {
if (!is.object(types)) {
throw new Error('Provided types must match the value type passed to `params`');
}
const namedTypes = types;
if (namedTypes[namedParameter]) {
queryParameter = BigQuery.valueToQueryParameter_(value, namedTypes[namedParameter]);
}
else {
queryParameter = BigQuery.valueToQueryParameter_(value);
}
}
else {
queryParameter = BigQuery.valueToQueryParameter_(value);
}
queryParameter.name = namedParameter;
queryParameters.push(queryParameter);
}
}
else {
if (types) {
if (!is.array(types)) {
throw new Error('Provided types must match the value type passed to `params`');
}
const positionalTypes = types;
if (params.length !== types.length) {
throw new Error('Incorrect number of parameter types provided.');
}
params.forEach((value, i) => {
const queryParameter = BigQuery.valueToQueryParameter_(value, positionalTypes[i]);
queryParameters.push(queryParameter);
});
}
else {
params.forEach((value) => {
const queryParameter = BigQuery.valueToQueryParameter_(value);
queryParameters.push(queryParameter);
});
}
}
return {
parameterMode,
params: queryParameters,
};
}
createJob(options, callback) {
var _a;
const JOB_ID_PROVIDED = typeof options.jobId !== 'undefined';
const DRY_RUN = ((_a = options.configuration) === null || _a === void 0 ? void 0 : _a.dryRun)
? options.configuration.dryRun
: false;
const reqOpts = Object.assign({}, options);
let jobId = JOB_ID_PROVIDED ? reqOpts.jobId : uuid.v4();
if (reqOpts.jobId) {
delete reqOpts.jobId;
}
if (reqOpts.jobPrefix) {
jobId = reqOpts.jobPrefix + jobId;
delete reqOpts.jobPrefix;
}
reqOpts.jobReference = {
projectId: this.projectId,
jobId,
location: this.location,
};
if (options.location) {
reqOpts.jobReference.location = options.location;
delete reqOpts.location;
}
const job = this.job(jobId, {
location: reqOpts.jobReference.location,
});
this.request({
method: 'POST',
uri: '/jobs',
json: reqOpts,
}, async (err, resp) => {
const ALREADY_EXISTS_CODE = 409;
if (err) {
if (err.code === ALREADY_EXISTS_CODE &&
!JOB_ID_PROVIDED &&
!DRY_RUN) {
// The last insert attempt flaked, but the API still processed the
// request and created the job. Because of our "autoRetry" feature,
// we tried the request again, which tried to create it again,
// unnecessarily. We will get the job's metadata and treat it as if
// it just came back from the create call.
err = null;
[resp] = await job.getMetadata();
}
else {
callback(err, null, resp);
return;
}
}
if (resp.status.errors) {
err = new common_1.util.ApiError({
errors: resp.status.errors,
response: resp,
});
}
// Update the location with the one used by the API.
job.location = resp.jobReference.location;
job.metadata = resp;
callback(err, job, resp);
});
}
/**
* Create a reference to a dataset.
*
* @param {string} id ID of the dataset.
* @param {object} [options] Dataset options.
* @param {string} [options.projectId] The GCP project ID.
* @param {string} [options.location] The geographic location of the dataset.
* Required except for US and EU.
*
* @example
* ```
* const {BigQuery} = require('@google-cloud/bigquery');
* const bigquery = new BigQuery();
* const dataset = bigquery.dataset('higher_education');
* ```
*/
dataset(id, options) {
if (typeof id !== 'string') {
throw new TypeError('A dataset ID is required.');
}
if (this.location) {
options = extend({ location: this.location }, options);
}
return new dataset_1.Dataset(this, id, options);
}
getDatasets(optionsOrCallback, cb) {
const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};
const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb;
const reqOpts = {
uri: '/datasets',
qs: options,
};
if (options.projectId) {
reqOpts.projectId = options.projectId;
}
this.request(reqOpts, (err, resp) => {
if (err) {
callback(err, null, null, resp);
return;
}
let nextQuery = null;
if (resp.nextPageToken) {
nextQuery = Object.assign({}, options, {
pageToken: resp.nextPageToken,
});
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const datasets = (resp.datasets || []).map((dataset) => {
const dsOpts = {
location: dataset.location,
};
if (options.projectId) {
dsOpts.projectId = options.projectId;
}
const ds = this.dataset(dataset.datasetReference.datasetId, dsOpts);
ds.metadata = dataset;
return ds;
});
callback(null, datasets, nextQuery, resp);
});
}
getJobs(optionsOrCallback, cb) {
const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};
const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb;
this.request({
uri: '/jobs',
qs: options,
useQuerystring: true,
}, (err, resp) => {
if (err) {
callback(err, null, null, resp);
return;
}
let nextQuery = null;
if (resp.nextPageToken) {
nextQuery = Object.assign({}, options, {
pageToken: resp.nextPageToken,
});
}
const jobs = (resp.jobs || []).map((jobObject) => {
const job = this.job(jobObject.jobReference.jobId, {
location: jobObject.jobReference.location,
});
job.metadata = jobObject;
return job;
});
callback(null, jobs, nextQuery, resp);
});
}
/**
* Create a reference to an existing job.
*
* @param {string} id ID of the job.
* @param {object} [options] Configuration object.
* @param {string} [options.location] The geographic location of the job.
* Required except for US and EU.
*
* @example
* ```
* const {BigQuery} = require('@google-cloud/bigquery');
* const bigquery = new BigQuery();
*
* const myExistingJob = bigquery.job('job-id');
* ```
*/
job(id, options) {
if (this.location) {
options = extend({ location: this.location }, options);
}
return new job_1.Job(this, id, options);
}
query(query, optionsOrCallback, cb) {
let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {};
const queryOpts = typeof query === 'object'
? {
wrapIntegers: query.wrapIntegers,
parseJSON: query.parseJSON,
}
: {};
const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb;
this.trace_('[query]', query, options);
const queryReq = this.buildQueryRequest_(query, options);
this.trace_('[query] queryReq', queryReq);
if (!queryReq) {
this.createQueryJob(query, (err, job, resp) => {
if (err) {
callback(err, null, resp);
return;
}
if (typeof query === 'object' && query.dryRun) {
callback(null, [], resp);
return;
}
// The Job is important for the `queryAsStream_` method, so a new query
// isn't created each time results are polled for.
options = extend({ job }, queryOpts, options);
job.getQueryResults(options, callback);
});
return;
}
this.runJobsQuery(queryReq, (err, job, res) => {
this.trace_('[runJobsQuery callback]: ', query, err, job, res);
if (err) {
callback(err, null, res);
return;
}
options = extend({ job }, queryOpts, options);
if (res && res.jobComplete) {
let rows = [];
if (res.schema && res.rows) {
rows = BigQuery.mergeSchemaWithRows_(res.schema, res.rows, {
wrapIntegers: options.wrapIntegers || false,
parseJSON: options.parseJSON,
});
}
this.trace_('[runJobsQuery] job complete');
options._cachedRows = rows;
if (res.pageToken) {
this.trace_('[runJobsQuery] has more pages');
options.pageToken = res.pageToken;
}
else {
this.trace_('[runJobsQuery] no more pages');
}
job.getQueryResults(options, callback);
return;
}
delete options.t