@pulumi/gcp
Version:
A Pulumi package for creating and managing Google Cloud Platform resources.
519 lines • 16.6 kB
JavaScript
"use strict";
// *** WARNING: this file was generated by pulumi-language-nodejs. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
Object.defineProperty(exports, "__esModule", { value: true });
exports.Job = void 0;
const pulumi = require("@pulumi/pulumi");
const utilities = require("../utilities");
/**
* Jobs are actions that BigQuery runs on your behalf to load data, export data, query data, or copy data.
* Once a BigQuery job is created, it cannot be changed or deleted.
*
* To get more information about Job, see:
*
* * [API documentation](https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs)
* * How-to Guides
* * [BigQuery Jobs Intro](https://cloud.google.com/bigquery/docs/jobs-overview)
*
* ## Example Usage
*
* ### Bigquery Job Query
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const bar = new gcp.bigquery.Dataset("bar", {
* datasetId: "job_query_dataset",
* friendlyName: "test",
* description: "This is a test description",
* location: "US",
* });
* const foo = new gcp.bigquery.Table("foo", {
* deletionProtection: false,
* datasetId: bar.datasetId,
* tableId: "job_query_table",
* });
* const job = new gcp.bigquery.Job("job", {
* jobId: "job_query",
* labels: {
* "example-label": "example-value",
* },
* query: {
* query: "SELECT state FROM [lookerdata:cdc.project_tycho_reports]",
* destinationTable: {
* projectId: foo.project,
* datasetId: foo.datasetId,
* tableId: foo.tableId,
* },
* allowLargeResults: true,
* flattenResults: true,
* scriptOptions: {
* keyResultStatement: "LAST",
* },
* },
* });
* ```
* ### Bigquery Job Query Table Reference
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const bar = new gcp.bigquery.Dataset("bar", {
* datasetId: "job_query_dataset",
* friendlyName: "test",
* description: "This is a test description",
* location: "US",
* });
* const foo = new gcp.bigquery.Table("foo", {
* deletionProtection: false,
* datasetId: bar.datasetId,
* tableId: "job_query_table",
* });
* const job = new gcp.bigquery.Job("job", {
* jobId: "job_query",
* labels: {
* "example-label": "example-value",
* },
* query: {
* query: "SELECT state FROM [lookerdata:cdc.project_tycho_reports]",
* destinationTable: {
* tableId: foo.id,
* },
* defaultDataset: {
* datasetId: bar.id,
* },
* allowLargeResults: true,
* flattenResults: true,
* scriptOptions: {
* keyResultStatement: "LAST",
* },
* },
* });
* ```
* ### Bigquery Job Load
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const bar = new gcp.bigquery.Dataset("bar", {
* datasetId: "job_load_dataset",
* friendlyName: "test",
* description: "This is a test description",
* location: "US",
* });
* const foo = new gcp.bigquery.Table("foo", {
* deletionProtection: false,
* datasetId: bar.datasetId,
* tableId: "job_load_table",
* });
* const job = new gcp.bigquery.Job("job", {
* jobId: "job_load",
* labels: {
* my_job: "load",
* },
* load: {
* sourceUris: ["gs://cloud-samples-data/bigquery/us-states/us-states-by-date.csv"],
* destinationTable: {
* projectId: foo.project,
* datasetId: foo.datasetId,
* tableId: foo.tableId,
* },
* skipLeadingRows: 1,
* schemaUpdateOptions: [
* "ALLOW_FIELD_RELAXATION",
* "ALLOW_FIELD_ADDITION",
* ],
* writeDisposition: "WRITE_APPEND",
* autodetect: true,
* },
* });
* ```
* ### Bigquery Job Load Geojson
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const project = "my-project-name";
* const bucket = new gcp.storage.Bucket("bucket", {
* name: `${project}-bq-geojson`,
* location: "US",
* uniformBucketLevelAccess: true,
* });
* const object = new gcp.storage.BucketObject("object", {
* name: "geojson-data.jsonl",
* bucket: bucket.name,
* content: `{"type":"Feature","properties":{"continent":"Europe","region":"Scandinavia"},"geometry":{"type":"Polygon","coordinates":[[[-30.94,53.33],[33.05,53.33],[33.05,71.86],[-30.94,71.86],[-30.94,53.33]]]}}
* {"type":"Feature","properties":{"continent":"Africa","region":"West Africa"},"geometry":{"type":"Polygon","coordinates":[[[-23.91,0],[11.95,0],[11.95,18.98],[-23.91,18.98],[-23.91,0]]]}}
* `,
* });
* const bar = new gcp.bigquery.Dataset("bar", {
* datasetId: "job_load_dataset",
* friendlyName: "test",
* description: "This is a test description",
* location: "US",
* });
* const foo = new gcp.bigquery.Table("foo", {
* deletionProtection: false,
* datasetId: bar.datasetId,
* tableId: "job_load_table",
* });
* const job = new gcp.bigquery.Job("job", {
* jobId: "job_load",
* labels: {
* my_job: "load",
* },
* load: {
* sourceUris: [pulumi.interpolate`gs://${object.bucket}/${object.name}`],
* destinationTable: {
* projectId: foo.project,
* datasetId: foo.datasetId,
* tableId: foo.tableId,
* },
* writeDisposition: "WRITE_TRUNCATE",
* autodetect: true,
* sourceFormat: "NEWLINE_DELIMITED_JSON",
* jsonExtension: "GEOJSON",
* },
* }, {
* dependsOn: [object],
* });
* ```
* ### Bigquery Job Load Parquet
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const test = new gcp.storage.Bucket("test", {
* name: "job_load_bucket",
* location: "US",
* uniformBucketLevelAccess: true,
* });
* const testBucketObject = new gcp.storage.BucketObject("test", {
* name: "job_load_bucket_object",
* source: new pulumi.asset.FileAsset("./test-fixtures/test.parquet.gzip"),
* bucket: test.name,
* });
* const testDataset = new gcp.bigquery.Dataset("test", {
* datasetId: "job_load_dataset",
* friendlyName: "test",
* description: "This is a test description",
* location: "US",
* });
* const testTable = new gcp.bigquery.Table("test", {
* deletionProtection: false,
* tableId: "job_load_table",
* datasetId: testDataset.datasetId,
* });
* const job = new gcp.bigquery.Job("job", {
* jobId: "job_load",
* labels: {
* my_job: "load",
* },
* load: {
* sourceUris: [pulumi.interpolate`gs://${testBucketObject.bucket}/${testBucketObject.name}`],
* destinationTable: {
* projectId: testTable.project,
* datasetId: testTable.datasetId,
* tableId: testTable.tableId,
* },
* schemaUpdateOptions: [
* "ALLOW_FIELD_RELAXATION",
* "ALLOW_FIELD_ADDITION",
* ],
* writeDisposition: "WRITE_APPEND",
* sourceFormat: "PARQUET",
* autodetect: true,
* parquetOptions: {
* enumAsString: true,
* enableListInference: true,
* },
* },
* });
* ```
* ### Bigquery Job Copy
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const count = 2;
* const sourceDataset: gcp.bigquery.Dataset[] = [];
* for (const range = {value: 0}; range.value < count; range.value++) {
* sourceDataset.push(new gcp.bigquery.Dataset(`source-${range.value}`, {
* datasetId: `job_copy_${range.value}_dataset`,
* friendlyName: "test",
* description: "This is a test description",
* location: "US",
* }));
* }
* const source: gcp.bigquery.Table[] = [];
* for (const range = {value: 0}; range.value < count; range.value++) {
* source.push(new gcp.bigquery.Table(`source-${range.value}`, {
* datasetId: sourceDataset[range.value].datasetId,
* tableId: `job_copy_${range.value}_table`,
* deletionProtection: false,
* schema: `[
* {
* "name": "name",
* "type": "STRING",
* "mode": "NULLABLE"
* },
* {
* "name": "post_abbr",
* "type": "STRING",
* "mode": "NULLABLE"
* },
* {
* "name": "date",
* "type": "DATE",
* "mode": "NULLABLE"
* }
* ]
* `,
* }));
* }
* const destDataset = new gcp.bigquery.Dataset("dest", {
* datasetId: "job_copy_dest_dataset",
* friendlyName: "test",
* description: "This is a test description",
* location: "US",
* });
* const project = gcp.organizations.getProject({
* projectId: "my-project-name",
* });
* const encryptRole = new gcp.kms.CryptoKeyIAMMember("encrypt_role", {
* cryptoKeyId: "example-key",
* role: "roles/cloudkms.cryptoKeyEncrypterDecrypter",
* member: project.then(project => `serviceAccount:bq-${project.number}@bigquery-encryption.iam.gserviceaccount.com`),
* });
* const dest = new gcp.bigquery.Table("dest", {
* deletionProtection: false,
* datasetId: destDataset.datasetId,
* tableId: "job_copy_dest_table",
* schema: `[
* {
* "name": "name",
* "type": "STRING",
* "mode": "NULLABLE"
* },
* {
* "name": "post_abbr",
* "type": "STRING",
* "mode": "NULLABLE"
* },
* {
* "name": "date",
* "type": "DATE",
* "mode": "NULLABLE"
* }
* ]
* `,
* encryptionConfiguration: {
* kmsKeyName: "example-key",
* },
* }, {
* dependsOn: [encryptRole],
* });
* const job = new gcp.bigquery.Job("job", {
* jobId: "job_copy",
* copy: {
* sourceTables: [
* {
* projectId: source[0].project,
* datasetId: source[0].datasetId,
* tableId: source[0].tableId,
* },
* {
* projectId: source[1].project,
* datasetId: source[1].datasetId,
* tableId: source[1].tableId,
* },
* ],
* destinationTable: {
* projectId: dest.project,
* datasetId: dest.datasetId,
* tableId: dest.tableId,
* },
* destinationEncryptionConfiguration: {
* kmsKeyName: "example-key",
* },
* },
* }, {
* dependsOn: [encryptRole],
* });
* ```
* ### Bigquery Job Extract
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const source_oneDataset = new gcp.bigquery.Dataset("source-one", {
* datasetId: "job_extract_dataset",
* friendlyName: "test",
* description: "This is a test description",
* location: "US",
* });
* const source_one = new gcp.bigquery.Table("source-one", {
* deletionProtection: false,
* datasetId: source_oneDataset.datasetId,
* tableId: "job_extract_table",
* schema: `[
* {
* "name": "name",
* "type": "STRING",
* "mode": "NULLABLE"
* },
* {
* "name": "post_abbr",
* "type": "STRING",
* "mode": "NULLABLE"
* },
* {
* "name": "date",
* "type": "DATE",
* "mode": "NULLABLE"
* }
* ]
* `,
* });
* const dest = new gcp.storage.Bucket("dest", {
* name: "job_extract_bucket",
* location: "US",
* forceDestroy: true,
* });
* const job = new gcp.bigquery.Job("job", {
* jobId: "job_extract",
* extract: {
* destinationUris: [pulumi.interpolate`${dest.url}/extract`],
* sourceTable: {
* projectId: source_one.project,
* datasetId: source_one.datasetId,
* tableId: source_one.tableId,
* },
* destinationFormat: "NEWLINE_DELIMITED_JSON",
* compression: "GZIP",
* },
* });
* ```
*
* ## Import
*
* Job can be imported using any of these accepted formats:
*
* * `projects/{{project}}/jobs/{{job_id}}/location/{{location}}`
*
* * `projects/{{project}}/jobs/{{job_id}}`
*
* * `{{project}}/{{job_id}}/{{location}}`
*
* * `{{job_id}}/{{location}}`
*
* * `{{project}}/{{job_id}}`
*
* * `{{job_id}}`
*
* When using the `pulumi import` command, Job can be imported using one of the formats above. For example:
*
* ```sh
* $ pulumi import gcp:bigquery/job:Job default projects/{{project}}/jobs/{{job_id}}/location/{{location}}
* ```
*
* ```sh
* $ pulumi import gcp:bigquery/job:Job default projects/{{project}}/jobs/{{job_id}}
* ```
*
* ```sh
* $ pulumi import gcp:bigquery/job:Job default {{project}}/{{job_id}}/{{location}}
* ```
*
* ```sh
* $ pulumi import gcp:bigquery/job:Job default {{job_id}}/{{location}}
* ```
*
* ```sh
* $ pulumi import gcp:bigquery/job:Job default {{project}}/{{job_id}}
* ```
*
* ```sh
* $ pulumi import gcp:bigquery/job:Job default {{job_id}}
* ```
*/
class Job extends pulumi.CustomResource {
/**
* Get an existing Job resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
static get(name, id, state, opts) {
return new Job(name, state, { ...opts, id: id });
}
/**
* Returns true if the given object is an instance of Job. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
static isInstance(obj) {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === Job.__pulumiType;
}
constructor(name, argsOrState, opts) {
let resourceInputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState;
resourceInputs["copy"] = state?.copy;
resourceInputs["effectiveLabels"] = state?.effectiveLabels;
resourceInputs["extract"] = state?.extract;
resourceInputs["jobId"] = state?.jobId;
resourceInputs["jobTimeoutMs"] = state?.jobTimeoutMs;
resourceInputs["jobType"] = state?.jobType;
resourceInputs["labels"] = state?.labels;
resourceInputs["load"] = state?.load;
resourceInputs["location"] = state?.location;
resourceInputs["project"] = state?.project;
resourceInputs["pulumiLabels"] = state?.pulumiLabels;
resourceInputs["query"] = state?.query;
resourceInputs["statuses"] = state?.statuses;
resourceInputs["userEmail"] = state?.userEmail;
}
else {
const args = argsOrState;
if (args?.jobId === undefined && !opts.urn) {
throw new Error("Missing required property 'jobId'");
}
resourceInputs["copy"] = args?.copy;
resourceInputs["extract"] = args?.extract;
resourceInputs["jobId"] = args?.jobId;
resourceInputs["jobTimeoutMs"] = args?.jobTimeoutMs;
resourceInputs["labels"] = args?.labels;
resourceInputs["load"] = args?.load;
resourceInputs["location"] = args?.location;
resourceInputs["project"] = args?.project;
resourceInputs["query"] = args?.query;
resourceInputs["effectiveLabels"] = undefined /*out*/;
resourceInputs["jobType"] = undefined /*out*/;
resourceInputs["pulumiLabels"] = undefined /*out*/;
resourceInputs["statuses"] = undefined /*out*/;
resourceInputs["userEmail"] = undefined /*out*/;
}
opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts);
const secretOpts = { additionalSecretOutputs: ["effectiveLabels", "pulumiLabels"] };
opts = pulumi.mergeOptions(opts, secretOpts);
super(Job.__pulumiType, name, resourceInputs, opts);
}
}
exports.Job = Job;
/** @internal */
Job.__pulumiType = 'gcp:bigquery/job:Job';
//# sourceMappingURL=job.js.map