@cdktf/provider-google
Version:
Prebuilt google Provider for Terraform CDK (cdktf)
890 lines • 89.2 kB
TypeScript
/**
* Copyright (c) HashiCorp, Inc.
* SPDX-License-Identifier: MPL-2.0
*/
import { Construct } from 'constructs';
import * as cdktf from 'cdktf';
export interface BigqueryJobConfig extends cdktf.TerraformMetaArguments {
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#id BigqueryJob#id}
*
* Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
* If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
*/
readonly id?: string;
/**
* The ID of the job. The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or dashes (-). The maximum length is 1,024 characters.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#job_id BigqueryJob#job_id}
*/
readonly jobId: string;
/**
* Job timeout in milliseconds. If this time limit is exceeded, BigQuery may attempt to terminate the job.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#job_timeout_ms BigqueryJob#job_timeout_ms}
*/
readonly jobTimeoutMs?: string;
/**
* The labels associated with this job. You can use these to organize and group your jobs.
*
*
* **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
* Please refer to the field 'effective_labels' for all of the labels present on the resource.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#labels BigqueryJob#labels}
*/
readonly labels?: {
[key: string]: string;
};
/**
* The geographic location of the job. The default value is US.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#location BigqueryJob#location}
*/
readonly location?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#project BigqueryJob#project}
*/
readonly project?: string;
/**
* copy block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#copy BigqueryJob#copy}
*/
readonly copy?: BigqueryJobCopy;
/**
* extract block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#extract BigqueryJob#extract}
*/
readonly extract?: BigqueryJobExtract;
/**
* load block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#load BigqueryJob#load}
*/
readonly load?: BigqueryJobLoad;
/**
* query block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#query BigqueryJob#query}
*/
readonly query?: BigqueryJobQuery;
/**
* timeouts block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#timeouts BigqueryJob#timeouts}
*/
readonly timeouts?: BigqueryJobTimeouts;
}
export interface BigqueryJobStatusErrorResult {
}
export declare function bigqueryJobStatusErrorResultToTerraform(struct?: BigqueryJobStatusErrorResult): any;
export declare function bigqueryJobStatusErrorResultToHclTerraform(struct?: BigqueryJobStatusErrorResult): any;
export declare class BigqueryJobStatusErrorResultOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param complexObjectIndex the index of this item in the list
* @param complexObjectIsFromSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, complexObjectIndex: number, complexObjectIsFromSet: boolean);
get internalValue(): BigqueryJobStatusErrorResult | undefined;
set internalValue(value: BigqueryJobStatusErrorResult | undefined);
get location(): string;
get message(): string;
get reason(): string;
}
export declare class BigqueryJobStatusErrorResultList extends cdktf.ComplexList {
protected terraformResource: cdktf.IInterpolatingParent;
protected terraformAttribute: string;
protected wrapsSet: boolean;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param wrapsSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, wrapsSet: boolean);
/**
* @param index the index of the item to return
*/
get(index: number): BigqueryJobStatusErrorResultOutputReference;
}
export interface BigqueryJobStatusErrors {
}
export declare function bigqueryJobStatusErrorsToTerraform(struct?: BigqueryJobStatusErrors): any;
export declare function bigqueryJobStatusErrorsToHclTerraform(struct?: BigqueryJobStatusErrors): any;
export declare class BigqueryJobStatusErrorsOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param complexObjectIndex the index of this item in the list
* @param complexObjectIsFromSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, complexObjectIndex: number, complexObjectIsFromSet: boolean);
get internalValue(): BigqueryJobStatusErrors | undefined;
set internalValue(value: BigqueryJobStatusErrors | undefined);
get location(): string;
get message(): string;
get reason(): string;
}
export declare class BigqueryJobStatusErrorsList extends cdktf.ComplexList {
protected terraformResource: cdktf.IInterpolatingParent;
protected terraformAttribute: string;
protected wrapsSet: boolean;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param wrapsSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, wrapsSet: boolean);
/**
* @param index the index of the item to return
*/
get(index: number): BigqueryJobStatusErrorsOutputReference;
}
export interface BigqueryJobStatus {
}
export declare function bigqueryJobStatusToTerraform(struct?: BigqueryJobStatus): any;
export declare function bigqueryJobStatusToHclTerraform(struct?: BigqueryJobStatus): any;
export declare class BigqueryJobStatusOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param complexObjectIndex the index of this item in the list
* @param complexObjectIsFromSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, complexObjectIndex: number, complexObjectIsFromSet: boolean);
get internalValue(): BigqueryJobStatus | undefined;
set internalValue(value: BigqueryJobStatus | undefined);
private _errorResult;
get errorResult(): BigqueryJobStatusErrorResultList;
private _errors;
get errors(): BigqueryJobStatusErrorsList;
get state(): string;
}
export declare class BigqueryJobStatusList extends cdktf.ComplexList {
protected terraformResource: cdktf.IInterpolatingParent;
protected terraformAttribute: string;
protected wrapsSet: boolean;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param wrapsSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, wrapsSet: boolean);
/**
* @param index the index of the item to return
*/
get(index: number): BigqueryJobStatusOutputReference;
}
export interface BigqueryJobCopyDestinationEncryptionConfiguration {
/**
* Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table.
* The BigQuery Service Account associated with your project requires access to this encryption key.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#kms_key_name BigqueryJob#kms_key_name}
*/
readonly kmsKeyName: string;
}
export declare function bigqueryJobCopyDestinationEncryptionConfigurationToTerraform(struct?: BigqueryJobCopyDestinationEncryptionConfigurationOutputReference | BigqueryJobCopyDestinationEncryptionConfiguration): any;
export declare function bigqueryJobCopyDestinationEncryptionConfigurationToHclTerraform(struct?: BigqueryJobCopyDestinationEncryptionConfigurationOutputReference | BigqueryJobCopyDestinationEncryptionConfiguration): any;
export declare class BigqueryJobCopyDestinationEncryptionConfigurationOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryJobCopyDestinationEncryptionConfiguration | undefined;
set internalValue(value: BigqueryJobCopyDestinationEncryptionConfiguration | undefined);
private _kmsKeyName?;
get kmsKeyName(): string;
set kmsKeyName(value: string);
get kmsKeyNameInput(): string | undefined;
get kmsKeyVersion(): string;
}
export interface BigqueryJobCopyDestinationTable {
/**
* The ID of the dataset containing this table.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#dataset_id BigqueryJob#dataset_id}
*/
readonly datasetId?: string;
/**
* The ID of the project containing this table.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#project_id BigqueryJob#project_id}
*/
readonly projectId?: string;
/**
* The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set,
* or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#table_id BigqueryJob#table_id}
*/
readonly tableId: string;
}
export declare function bigqueryJobCopyDestinationTableToTerraform(struct?: BigqueryJobCopyDestinationTableOutputReference | BigqueryJobCopyDestinationTable): any;
export declare function bigqueryJobCopyDestinationTableToHclTerraform(struct?: BigqueryJobCopyDestinationTableOutputReference | BigqueryJobCopyDestinationTable): any;
export declare class BigqueryJobCopyDestinationTableOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryJobCopyDestinationTable | undefined;
set internalValue(value: BigqueryJobCopyDestinationTable | undefined);
private _datasetId?;
get datasetId(): string;
set datasetId(value: string);
resetDatasetId(): void;
get datasetIdInput(): string | undefined;
private _projectId?;
get projectId(): string;
set projectId(value: string);
resetProjectId(): void;
get projectIdInput(): string | undefined;
private _tableId?;
get tableId(): string;
set tableId(value: string);
get tableIdInput(): string | undefined;
}
export interface BigqueryJobCopySourceTables {
/**
* The ID of the dataset containing this table.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#dataset_id BigqueryJob#dataset_id}
*/
readonly datasetId?: string;
/**
* The ID of the project containing this table.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#project_id BigqueryJob#project_id}
*/
readonly projectId?: string;
/**
* The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set,
* or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#table_id BigqueryJob#table_id}
*/
readonly tableId: string;
}
export declare function bigqueryJobCopySourceTablesToTerraform(struct?: BigqueryJobCopySourceTables | cdktf.IResolvable): any;
export declare function bigqueryJobCopySourceTablesToHclTerraform(struct?: BigqueryJobCopySourceTables | cdktf.IResolvable): any;
export declare class BigqueryJobCopySourceTablesOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
private resolvableValue?;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param complexObjectIndex the index of this item in the list
* @param complexObjectIsFromSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, complexObjectIndex: number, complexObjectIsFromSet: boolean);
get internalValue(): BigqueryJobCopySourceTables | cdktf.IResolvable | undefined;
set internalValue(value: BigqueryJobCopySourceTables | cdktf.IResolvable | undefined);
private _datasetId?;
get datasetId(): string;
set datasetId(value: string);
resetDatasetId(): void;
get datasetIdInput(): string | undefined;
private _projectId?;
get projectId(): string;
set projectId(value: string);
resetProjectId(): void;
get projectIdInput(): string | undefined;
private _tableId?;
get tableId(): string;
set tableId(value: string);
get tableIdInput(): string | undefined;
}
export declare class BigqueryJobCopySourceTablesList extends cdktf.ComplexList {
protected terraformResource: cdktf.IInterpolatingParent;
protected terraformAttribute: string;
protected wrapsSet: boolean;
internalValue?: BigqueryJobCopySourceTables[] | cdktf.IResolvable;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param wrapsSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, wrapsSet: boolean);
/**
* @param index the index of the item to return
*/
get(index: number): BigqueryJobCopySourceTablesOutputReference;
}
export interface BigqueryJobCopy {
/**
* Specifies whether the job is allowed to create new tables. The following values are supported:
* CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table.
* CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result.
* Creation, truncation and append actions occur as one atomic update upon job completion Default value: "CREATE_IF_NEEDED" Possible values: ["CREATE_IF_NEEDED", "CREATE_NEVER"]
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#create_disposition BigqueryJob#create_disposition}
*/
readonly createDisposition?: string;
/**
* Specifies the action that occurs if the destination table already exists. The following values are supported:
* WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data and uses the schema from the query result.
* WRITE_APPEND: If the table already exists, BigQuery appends the data to the table.
* WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result.
* Each action is atomic and only occurs if BigQuery is able to complete the job successfully.
* Creation, truncation and append actions occur as one atomic update upon job completion. Default value: "WRITE_EMPTY" Possible values: ["WRITE_TRUNCATE", "WRITE_APPEND", "WRITE_EMPTY"]
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#write_disposition BigqueryJob#write_disposition}
*/
readonly writeDisposition?: string;
/**
* destination_encryption_configuration block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#destination_encryption_configuration BigqueryJob#destination_encryption_configuration}
*/
readonly destinationEncryptionConfiguration?: BigqueryJobCopyDestinationEncryptionConfiguration;
/**
* destination_table block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#destination_table BigqueryJob#destination_table}
*/
readonly destinationTable?: BigqueryJobCopyDestinationTable;
/**
* source_tables block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#source_tables BigqueryJob#source_tables}
*/
readonly sourceTables: BigqueryJobCopySourceTables[] | cdktf.IResolvable;
}
export declare function bigqueryJobCopyToTerraform(struct?: BigqueryJobCopyOutputReference | BigqueryJobCopy): any;
export declare function bigqueryJobCopyToHclTerraform(struct?: BigqueryJobCopyOutputReference | BigqueryJobCopy): any;
export declare class BigqueryJobCopyOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryJobCopy | undefined;
set internalValue(value: BigqueryJobCopy | undefined);
private _createDisposition?;
get createDisposition(): string;
set createDisposition(value: string);
resetCreateDisposition(): void;
get createDispositionInput(): string | undefined;
private _writeDisposition?;
get writeDisposition(): string;
set writeDisposition(value: string);
resetWriteDisposition(): void;
get writeDispositionInput(): string | undefined;
private _destinationEncryptionConfiguration;
get destinationEncryptionConfiguration(): BigqueryJobCopyDestinationEncryptionConfigurationOutputReference;
putDestinationEncryptionConfiguration(value: BigqueryJobCopyDestinationEncryptionConfiguration): void;
resetDestinationEncryptionConfiguration(): void;
get destinationEncryptionConfigurationInput(): BigqueryJobCopyDestinationEncryptionConfiguration | undefined;
private _destinationTable;
get destinationTable(): BigqueryJobCopyDestinationTableOutputReference;
putDestinationTable(value: BigqueryJobCopyDestinationTable): void;
resetDestinationTable(): void;
get destinationTableInput(): BigqueryJobCopyDestinationTable | undefined;
private _sourceTables;
get sourceTables(): BigqueryJobCopySourceTablesList;
putSourceTables(value: BigqueryJobCopySourceTables[] | cdktf.IResolvable): void;
get sourceTablesInput(): cdktf.IResolvable | BigqueryJobCopySourceTables[] | undefined;
}
export interface BigqueryJobExtractSourceModel {
/**
* The ID of the dataset containing this model.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#dataset_id BigqueryJob#dataset_id}
*/
readonly datasetId: string;
/**
* The ID of the model.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#model_id BigqueryJob#model_id}
*/
readonly modelId: string;
/**
* The ID of the project containing this model.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#project_id BigqueryJob#project_id}
*/
readonly projectId: string;
}
export declare function bigqueryJobExtractSourceModelToTerraform(struct?: BigqueryJobExtractSourceModelOutputReference | BigqueryJobExtractSourceModel): any;
export declare function bigqueryJobExtractSourceModelToHclTerraform(struct?: BigqueryJobExtractSourceModelOutputReference | BigqueryJobExtractSourceModel): any;
export declare class BigqueryJobExtractSourceModelOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryJobExtractSourceModel | undefined;
set internalValue(value: BigqueryJobExtractSourceModel | undefined);
private _datasetId?;
get datasetId(): string;
set datasetId(value: string);
get datasetIdInput(): string | undefined;
private _modelId?;
get modelId(): string;
set modelId(value: string);
get modelIdInput(): string | undefined;
private _projectId?;
get projectId(): string;
set projectId(value: string);
get projectIdInput(): string | undefined;
}
export interface BigqueryJobExtractSourceTable {
/**
* The ID of the dataset containing this table.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#dataset_id BigqueryJob#dataset_id}
*/
readonly datasetId?: string;
/**
* The ID of the project containing this table.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#project_id BigqueryJob#project_id}
*/
readonly projectId?: string;
/**
* The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set,
* or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#table_id BigqueryJob#table_id}
*/
readonly tableId: string;
}
export declare function bigqueryJobExtractSourceTableToTerraform(struct?: BigqueryJobExtractSourceTableOutputReference | BigqueryJobExtractSourceTable): any;
export declare function bigqueryJobExtractSourceTableToHclTerraform(struct?: BigqueryJobExtractSourceTableOutputReference | BigqueryJobExtractSourceTable): any;
export declare class BigqueryJobExtractSourceTableOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryJobExtractSourceTable | undefined;
set internalValue(value: BigqueryJobExtractSourceTable | undefined);
private _datasetId?;
get datasetId(): string;
set datasetId(value: string);
resetDatasetId(): void;
get datasetIdInput(): string | undefined;
private _projectId?;
get projectId(): string;
set projectId(value: string);
resetProjectId(): void;
get projectIdInput(): string | undefined;
private _tableId?;
get tableId(): string;
set tableId(value: string);
get tableIdInput(): string | undefined;
}
export interface BigqueryJobExtract {
/**
* The compression type to use for exported files. Possible values include GZIP, DEFLATE, SNAPPY, and NONE.
* The default value is NONE. DEFLATE and SNAPPY are only supported for Avro.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#compression BigqueryJob#compression}
*/
readonly compression?: string;
/**
* The exported file format. Possible values include CSV, NEWLINE_DELIMITED_JSON and AVRO for tables and SAVED_MODEL for models.
* The default value for tables is CSV. Tables with nested or repeated fields cannot be exported as CSV.
* The default value for models is SAVED_MODEL.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#destination_format BigqueryJob#destination_format}
*/
readonly destinationFormat?: string;
/**
* A list of fully-qualified Google Cloud Storage URIs where the extracted table should be written.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#destination_uris BigqueryJob#destination_uris}
*/
readonly destinationUris: string[];
/**
* When extracting data in CSV format, this defines the delimiter to use between fields in the exported data.
* Default is ','
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#field_delimiter BigqueryJob#field_delimiter}
*/
readonly fieldDelimiter?: string;
/**
* Whether to print out a header row in the results. Default is true.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#print_header BigqueryJob#print_header}
*/
readonly printHeader?: boolean | cdktf.IResolvable;
/**
* Whether to use logical types when extracting to AVRO format.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#use_avro_logical_types BigqueryJob#use_avro_logical_types}
*/
readonly useAvroLogicalTypes?: boolean | cdktf.IResolvable;
/**
* source_model block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#source_model BigqueryJob#source_model}
*/
readonly sourceModel?: BigqueryJobExtractSourceModel;
/**
* source_table block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#source_table BigqueryJob#source_table}
*/
readonly sourceTable?: BigqueryJobExtractSourceTable;
}
export declare function bigqueryJobExtractToTerraform(struct?: BigqueryJobExtractOutputReference | BigqueryJobExtract): any;
export declare function bigqueryJobExtractToHclTerraform(struct?: BigqueryJobExtractOutputReference | BigqueryJobExtract): any;
export declare class BigqueryJobExtractOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryJobExtract | undefined;
set internalValue(value: BigqueryJobExtract | undefined);
private _compression?;
get compression(): string;
set compression(value: string);
resetCompression(): void;
get compressionInput(): string | undefined;
private _destinationFormat?;
get destinationFormat(): string;
set destinationFormat(value: string);
resetDestinationFormat(): void;
get destinationFormatInput(): string | undefined;
private _destinationUris?;
get destinationUris(): string[];
set destinationUris(value: string[]);
get destinationUrisInput(): string[] | undefined;
private _fieldDelimiter?;
get fieldDelimiter(): string;
set fieldDelimiter(value: string);
resetFieldDelimiter(): void;
get fieldDelimiterInput(): string | undefined;
private _printHeader?;
get printHeader(): boolean | cdktf.IResolvable;
set printHeader(value: boolean | cdktf.IResolvable);
resetPrintHeader(): void;
get printHeaderInput(): boolean | cdktf.IResolvable | undefined;
private _useAvroLogicalTypes?;
get useAvroLogicalTypes(): boolean | cdktf.IResolvable;
set useAvroLogicalTypes(value: boolean | cdktf.IResolvable);
resetUseAvroLogicalTypes(): void;
get useAvroLogicalTypesInput(): boolean | cdktf.IResolvable | undefined;
private _sourceModel;
get sourceModel(): BigqueryJobExtractSourceModelOutputReference;
putSourceModel(value: BigqueryJobExtractSourceModel): void;
resetSourceModel(): void;
get sourceModelInput(): BigqueryJobExtractSourceModel | undefined;
private _sourceTable;
get sourceTable(): BigqueryJobExtractSourceTableOutputReference;
putSourceTable(value: BigqueryJobExtractSourceTable): void;
resetSourceTable(): void;
get sourceTableInput(): BigqueryJobExtractSourceTable | undefined;
}
export interface BigqueryJobLoadDestinationEncryptionConfiguration {
/**
* Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table.
* The BigQuery Service Account associated with your project requires access to this encryption key.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#kms_key_name BigqueryJob#kms_key_name}
*/
readonly kmsKeyName: string;
}
export declare function bigqueryJobLoadDestinationEncryptionConfigurationToTerraform(struct?: BigqueryJobLoadDestinationEncryptionConfigurationOutputReference | BigqueryJobLoadDestinationEncryptionConfiguration): any;
export declare function bigqueryJobLoadDestinationEncryptionConfigurationToHclTerraform(struct?: BigqueryJobLoadDestinationEncryptionConfigurationOutputReference | BigqueryJobLoadDestinationEncryptionConfiguration): any;
export declare class BigqueryJobLoadDestinationEncryptionConfigurationOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryJobLoadDestinationEncryptionConfiguration | undefined;
set internalValue(value: BigqueryJobLoadDestinationEncryptionConfiguration | undefined);
private _kmsKeyName?;
get kmsKeyName(): string;
set kmsKeyName(value: string);
get kmsKeyNameInput(): string | undefined;
get kmsKeyVersion(): string;
}
export interface BigqueryJobLoadDestinationTable {
/**
* The ID of the dataset containing this table.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#dataset_id BigqueryJob#dataset_id}
*/
readonly datasetId?: string;
/**
* The ID of the project containing this table.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#project_id BigqueryJob#project_id}
*/
readonly projectId?: string;
/**
* The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set,
* or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#table_id BigqueryJob#table_id}
*/
readonly tableId: string;
}
export declare function bigqueryJobLoadDestinationTableToTerraform(struct?: BigqueryJobLoadDestinationTableOutputReference | BigqueryJobLoadDestinationTable): any;
export declare function bigqueryJobLoadDestinationTableToHclTerraform(struct?: BigqueryJobLoadDestinationTableOutputReference | BigqueryJobLoadDestinationTable): any;
export declare class BigqueryJobLoadDestinationTableOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryJobLoadDestinationTable | undefined;
set internalValue(value: BigqueryJobLoadDestinationTable | undefined);
private _datasetId?;
get datasetId(): string;
set datasetId(value: string);
resetDatasetId(): void;
get datasetIdInput(): string | undefined;
private _projectId?;
get projectId(): string;
set projectId(value: string);
resetProjectId(): void;
get projectIdInput(): string | undefined;
private _tableId?;
get tableId(): string;
set tableId(value: string);
get tableIdInput(): string | undefined;
}
export interface BigqueryJobLoadParquetOptions {
/**
* If sourceFormat is set to PARQUET, indicates whether to use schema inference specifically for Parquet LIST logical type.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#enable_list_inference BigqueryJob#enable_list_inference}
*/
readonly enableListInference?: boolean | cdktf.IResolvable;
/**
* If sourceFormat is set to PARQUET, indicates whether to infer Parquet ENUM logical type as STRING instead of BYTES by default.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#enum_as_string BigqueryJob#enum_as_string}
*/
readonly enumAsString?: boolean | cdktf.IResolvable;
}
export declare function bigqueryJobLoadParquetOptionsToTerraform(struct?: BigqueryJobLoadParquetOptionsOutputReference | BigqueryJobLoadParquetOptions): any;
export declare function bigqueryJobLoadParquetOptionsToHclTerraform(struct?: BigqueryJobLoadParquetOptionsOutputReference | BigqueryJobLoadParquetOptions): any;
export declare class BigqueryJobLoadParquetOptionsOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryJobLoadParquetOptions | undefined;
set internalValue(value: BigqueryJobLoadParquetOptions | undefined);
private _enableListInference?;
get enableListInference(): boolean | cdktf.IResolvable;
set enableListInference(value: boolean | cdktf.IResolvable);
resetEnableListInference(): void;
get enableListInferenceInput(): boolean | cdktf.IResolvable | undefined;
private _enumAsString?;
get enumAsString(): boolean | cdktf.IResolvable;
set enumAsString(value: boolean | cdktf.IResolvable);
resetEnumAsString(): void;
get enumAsStringInput(): boolean | cdktf.IResolvable | undefined;
}
export interface BigqueryJobLoadTimePartitioning {
/**
* Number of milliseconds for which to keep the storage for a partition. A wrapper is used here because 0 is an invalid value.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#expiration_ms BigqueryJob#expiration_ms}
*/
readonly expirationMs?: string;
/**
* If not set, the table is partitioned by pseudo column '_PARTITIONTIME'; if set, the table is partitioned by this field.
* The field must be a top-level TIMESTAMP or DATE field. Its mode must be NULLABLE or REQUIRED.
* A wrapper is used here because an empty string is an invalid value.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#field BigqueryJob#field}
*/
readonly field?: string;
/**
* The only type supported is DAY, which will generate one partition per day. Providing an empty string used to cause an error,
* but in OnePlatform the field will be treated as unset.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#type BigqueryJob#type}
*/
readonly type: string;
}
export declare function bigqueryJobLoadTimePartitioningToTerraform(struct?: BigqueryJobLoadTimePartitioningOutputReference | BigqueryJobLoadTimePartitioning): any;
export declare function bigqueryJobLoadTimePartitioningToHclTerraform(struct?: BigqueryJobLoadTimePartitioningOutputReference | BigqueryJobLoadTimePartitioning): any;
export declare class BigqueryJobLoadTimePartitioningOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryJobLoadTimePartitioning | undefined;
set internalValue(value: BigqueryJobLoadTimePartitioning | undefined);
private _expirationMs?;
get expirationMs(): string;
set expirationMs(value: string);
resetExpirationMs(): void;
get expirationMsInput(): string | undefined;
private _field?;
get field(): string;
set field(value: string);
resetField(): void;
get fieldInput(): string | undefined;
private _type?;
get type(): string;
set type(value: string);
get typeInput(): string | undefined;
}
export interface BigqueryJobLoad {
/**
* Accept rows that are missing trailing optional columns. The missing values are treated as nulls.
* If false, records with missing trailing columns are treated as bad records, and if there are too many bad records,
* an invalid error is returned in the job result. The default value is false. Only applicable to CSV, ignored for other formats.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#allow_jagged_rows BigqueryJob#allow_jagged_rows}
*/
readonly allowJaggedRows?: boolean | cdktf.IResolvable;
/**
* Indicates if BigQuery should allow quoted data sections that contain newline characters in a CSV file.
* The default value is false.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#allow_quoted_newlines BigqueryJob#allow_quoted_newlines}
*/
readonly allowQuotedNewlines?: boolean | cdktf.IResolvable;
/**
* Indicates if we should automatically infer the options and schema for CSV and JSON sources.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#autodetect BigqueryJob#autodetect}
*/
readonly autodetect?: boolean | cdktf.IResolvable;
/**
* Specifies whether the job is allowed to create new tables. The following values are supported:
* CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table.
* CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result.
* Creation, truncation and append actions occur as one atomic update upon job completion Default value: "CREATE_IF_NEEDED" Possible values: ["CREATE_IF_NEEDED", "CREATE_NEVER"]
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#create_disposition BigqueryJob#create_disposition}
*/
readonly createDisposition?: string;
/**
* The character encoding of the data. The supported values are UTF-8 or ISO-8859-1.
* The default value is UTF-8. BigQuery decodes the data after the raw, binary data
* has been split using the values of the quote and fieldDelimiter properties.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#encoding BigqueryJob#encoding}
*/
readonly encoding?: string;
/**
* The separator for fields in a CSV file. The separator can be any ISO-8859-1 single-byte character.
* To use a character in the range 128-255, you must encode the character as UTF8. BigQuery converts
* the string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split the
* data in its raw, binary state. BigQuery also supports the escape sequence "\t" to specify a tab separator.
* The default value is a comma (',').
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#field_delimiter BigqueryJob#field_delimiter}
*/
readonly fieldDelimiter?: string;
/**
* Indicates if BigQuery should allow extra values that are not represented in the table schema.
* If true, the extra values are ignored. If false, records with extra columns are treated as bad records,
* and if there are too many bad records, an invalid error is returned in the job result.
* The default value is false. The sourceFormat property determines what BigQuery treats as an extra value:
* CSV: Trailing columns
* JSON: Named values that don't match any column names
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#ignore_unknown_values BigqueryJob#ignore_unknown_values}
*/
readonly ignoreUnknownValues?: boolean | cdktf.IResolvable;
/**
* If sourceFormat is set to newline-delimited JSON, indicates whether it should be processed as a JSON variant such as GeoJSON.
* For a sourceFormat other than JSON, omit this field. If the sourceFormat is newline-delimited JSON: - for newline-delimited
* GeoJSON: set to GEOJSON.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#json_extension BigqueryJob#json_extension}
*/
readonly jsonExtension?: string;
/**
* The maximum number of bad records that BigQuery can ignore when running the job. If the number of bad records exceeds this value,
* an invalid error is returned in the job result. The default value is 0, which requires that all records are valid.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#max_bad_records BigqueryJob#max_bad_records}
*/
readonly maxBadRecords?: number;
/**
* Specifies a string that represents a null value in a CSV file. For example, if you specify "\N", BigQuery interprets "\N" as a null value
* when loading a CSV file. The default value is the empty string. If you set this property to a custom value, BigQuery throws an error if an
* empty string is present for all data types except for STRING and BYTE. For STRING and BYTE columns, BigQuery interprets the empty string as
* an empty value.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#null_marker BigqueryJob#null_marker}
*/
readonly nullMarker?: string;
/**
* If sourceFormat is set to "DATASTORE_BACKUP", indicates which entity properties to load into BigQuery from a Cloud Datastore backup.
* Property names are case sensitive and must be top-level properties. If no properties are specified, BigQuery loads all properties.
* If any named property isn't found in the Cloud Datastore backup, an invalid error is returned in the job result.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#projection_fields BigqueryJob#projection_fields}
*/
readonly projectionFields?: string[];
/**
* The value that is used to quote data sections in a CSV file. BigQuery converts the string to ISO-8859-1 encoding,
* and then uses the first byte of the encoded string to split the data in its raw, binary state.
* The default value is a double-quote ('"'). If your data does not contain quoted sections, set the property value to an empty string.
* If your data contains quoted newline characters, you must also set the allowQuotedNewlines property to true.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#quote BigqueryJob#quote}
*/
readonly quote?: string;
/**
* Allows the schema of the destination table to be updated as a side effect of the load job if a schema is autodetected or
* supplied in the job configuration. Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND;
* when writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table, specified by partition decorators.
* For normal tables, WRITE_TRUNCATE will always overwrite the schema. One or more of the following values are specified:
* ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema.
* ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original schema to nullable.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#schema_update_options BigqueryJob#schema_update_options}
*/
readonly schemaUpdateOptions?: string[];
/**
* The number of rows at the top of a CSV file that BigQuery will skip when loading the data.
* The default value is 0. This property is useful if you have header rows in the file that should be skipped.
* When autodetect is on, the behavior is the following:
* skipLeadingRows unspecified - Autodetect tries to detect headers in the first row. If they are not detected,
* the row is read as data. Otherwise data is read starting from the second row.
* skipLeadingRows is 0 - Instructs autodetect that there are no headers and data should be read starting from the first row.
* skipLeadingRows = N > 0 - Autodetect skips N-1 rows and tries to detect headers in row N. If headers are not detected,
* row N is just skipped. Otherwise row N is used to extract column names for the detected schema.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#skip_leading_rows BigqueryJob#skip_leading_rows}
*/
readonly skipLeadingRows?: number;
/**
* The format of the data files. For CSV files, specify "CSV". For datastore backups, specify "DATASTORE_BACKUP".
* For newline-delimited JSON, specify "NEWLINE_DELIMITED_JSON". For Avro, specify "AVRO". For parquet, specify "PARQUET".
* For orc, specify "ORC". [Beta] For Bigtable, specify "BIGTABLE".
* The default value is CSV.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_job#source_format BigqueryJob#source_format}
*/
readonly sourceFormat?: string;
/**
* The fully-qualified URIs that point to your data in Google Cloud.
* For Google Cloud Storage URIs: Each URI can contain one '\*' wildcard character
* and it must come after the 'bucket' name. Size limits related to load jobs apply