@cdktf/provider-google
Version:
Prebuilt google Provider for Terraform CDK (cdktf)
732 lines (731 loc) • 37 kB
TypeScript
/**
* Copyright (c) HashiCorp, Inc.
* SPDX-License-Identifier: MPL-2.0
*/
import { Construct } from 'constructs';
import * as cdktf from 'cdktf';
export interface BigqueryDatasetConfig extends cdktf.TerraformMetaArguments {
/**
* A unique ID for this dataset, without the project name. The ID
* must contain only letters (a-z, A-Z), numbers (0-9), or
* underscores (_). The maximum length is 1,024 characters.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#dataset_id BigqueryDataset#dataset_id}
*/
readonly datasetId: string;
/**
* Defines the default collation specification of future tables created
* in the dataset. If a table is created in this dataset without table-level
* default collation, then the table inherits the dataset default collation,
* which is applied to the string fields that do not have explicit collation
* specified. A change to this field affects only tables created afterwards,
* and does not alter the existing tables.
*
* The following values are supported:
* - 'und:ci': undetermined locale, case insensitive.
* - '': empty string. Default to case-sensitive behavior.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#default_collation BigqueryDataset#default_collation}
*/
readonly defaultCollation?: string;
/**
* The default partition expiration for all partitioned tables in
* the dataset, in milliseconds.
* Once this property is set, all newly-created partitioned tables in
* the dataset will have an 'expirationMs' property in the 'timePartitioning'
* settings set to this value, and changing the value will only
* affect new tables, not existing ones. The storage in a partition will
* have an expiration time of its partition time plus this value.
* Setting this property overrides the use of 'defaultTableExpirationMs'
* for partitioned tables: only one of 'defaultTableExpirationMs' and
* 'defaultPartitionExpirationMs' will be used for any new partitioned
* table. If you provide an explicit 'timePartitioning.expirationMs' when
* creating or updating a partitioned table, that value takes precedence
* over the default partition expiration time indicated by this property.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#default_partition_expiration_ms BigqueryDataset#default_partition_expiration_ms}
*/
readonly defaultPartitionExpirationMs?: number;
/**
* The default lifetime of all tables in the dataset, in milliseconds.
* The minimum value is 3600000 milliseconds (one hour).
* Once this property is set, all newly-created tables in the dataset
* will have an 'expirationTime' property set to the creation time plus
* the value in this property, and changing the value will only affect
* new tables, not existing ones. When the 'expirationTime' for a given
* table is reached, that table will be deleted automatically.
* If a table's 'expirationTime' is modified or removed before the
* table expires, or if you provide an explicit 'expirationTime' when
* creating a table, that value takes precedence over the default
* expiration time indicated by this property.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#default_table_expiration_ms BigqueryDataset#default_table_expiration_ms}
*/
readonly defaultTableExpirationMs?: number;
/**
* If set to 'true', delete all the tables in the
* dataset when destroying the resource; otherwise,
* destroying the resource will fail if tables are present.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#delete_contents_on_destroy BigqueryDataset#delete_contents_on_destroy}
*/
readonly deleteContentsOnDestroy?: boolean | cdktf.IResolvable;
/**
* A user-friendly description of the dataset
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#description BigqueryDataset#description}
*/
readonly description?: string;
/**
* A descriptive name for the dataset
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#friendly_name BigqueryDataset#friendly_name}
*/
readonly friendlyName?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#id BigqueryDataset#id}
*
* Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
* If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
*/
readonly id?: string;
/**
* TRUE if the dataset and its table names are case-insensitive, otherwise FALSE.
* By default, this is FALSE, which means the dataset and its table names are
* case-sensitive. This field does not affect routine references.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#is_case_insensitive BigqueryDataset#is_case_insensitive}
*/
readonly isCaseInsensitive?: boolean | cdktf.IResolvable;
/**
* The labels associated with this dataset. You can use these to
* organize and group your datasets.
*
*
* **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
* Please refer to the field 'effective_labels' for all of the labels present on the resource.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#labels BigqueryDataset#labels}
*/
readonly labels?: {
[key: string]: string;
};
/**
* The geographic location where the dataset should reside.
* See [official docs](https://cloud.google.com/bigquery/docs/dataset-locations).
* There are two types of locations, regional or multi-regional. A regional
* location is a specific geographic place, such as Tokyo, and a multi-regional
* location is a large geographic area, such as the United States, that
* contains at least two geographic places.
* The default value is multi-regional location 'US'.
* Changing this forces a new resource to be created.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#location BigqueryDataset#location}
*/
readonly location?: string;
/**
* Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#max_time_travel_hours BigqueryDataset#max_time_travel_hours}
*/
readonly maxTimeTravelHours?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#project BigqueryDataset#project}
*/
readonly project?: string;
/**
* The tags attached to this table. Tag keys are globally unique. Tag key is expected to be
* in the namespaced format, for example "123456789012/environment" where 123456789012 is the
* ID of the parent organization or project resource for this tag key. Tag value is expected
* to be the short name, for example "Production". See [Tag definitions](/iam/docs/tags-access-control#definitions)
* for more details.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#resource_tags BigqueryDataset#resource_tags}
*/
readonly resourceTags?: {
[key: string]: string;
};
/**
* Specifies the storage billing model for the dataset.
* Set this flag value to LOGICAL to use logical bytes for storage billing,
* or to PHYSICAL to use physical bytes instead.
*
* LOGICAL is the default if this flag isn't specified.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#storage_billing_model BigqueryDataset#storage_billing_model}
*/
readonly storageBillingModel?: string;
/**
* access block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#access BigqueryDataset#access}
*/
readonly access?: BigqueryDatasetAccess[] | cdktf.IResolvable;
/**
* default_encryption_configuration block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#default_encryption_configuration BigqueryDataset#default_encryption_configuration}
*/
readonly defaultEncryptionConfiguration?: BigqueryDatasetDefaultEncryptionConfiguration;
/**
* external_dataset_reference block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#external_dataset_reference BigqueryDataset#external_dataset_reference}
*/
readonly externalDatasetReference?: BigqueryDatasetExternalDatasetReference;
/**
* timeouts block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#timeouts BigqueryDataset#timeouts}
*/
readonly timeouts?: BigqueryDatasetTimeouts;
}
export interface BigqueryDatasetAccessDatasetDataset {
/**
* The ID of the dataset containing this table.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#dataset_id BigqueryDataset#dataset_id}
*/
readonly datasetId: string;
/**
* The ID of the project containing this table.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#project_id BigqueryDataset#project_id}
*/
readonly projectId: string;
}
export declare function bigqueryDatasetAccessDatasetDatasetToTerraform(struct?: BigqueryDatasetAccessDatasetDatasetOutputReference | BigqueryDatasetAccessDatasetDataset): any;
export declare function bigqueryDatasetAccessDatasetDatasetToHclTerraform(struct?: BigqueryDatasetAccessDatasetDatasetOutputReference | BigqueryDatasetAccessDatasetDataset): any;
export declare class BigqueryDatasetAccessDatasetDatasetOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryDatasetAccessDatasetDataset | undefined;
set internalValue(value: BigqueryDatasetAccessDatasetDataset | undefined);
private _datasetId?;
get datasetId(): string;
set datasetId(value: string);
get datasetIdInput(): string | undefined;
private _projectId?;
get projectId(): string;
set projectId(value: string);
get projectIdInput(): string | undefined;
}
export interface BigqueryDatasetAccessDataset {
/**
* Which resources in the dataset this entry applies to. Currently, only views are supported,
* but additional target types may be added in the future. Possible values: VIEWS
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#target_types BigqueryDataset#target_types}
*/
readonly targetTypes: string[];
/**
* dataset block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#dataset BigqueryDataset#dataset}
*/
readonly dataset: BigqueryDatasetAccessDatasetDataset;
}
export declare function bigqueryDatasetAccessDatasetToTerraform(struct?: BigqueryDatasetAccessDatasetOutputReference | BigqueryDatasetAccessDataset): any;
export declare function bigqueryDatasetAccessDatasetToHclTerraform(struct?: BigqueryDatasetAccessDatasetOutputReference | BigqueryDatasetAccessDataset): any;
export declare class BigqueryDatasetAccessDatasetOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryDatasetAccessDataset | undefined;
set internalValue(value: BigqueryDatasetAccessDataset | undefined);
private _targetTypes?;
get targetTypes(): string[];
set targetTypes(value: string[]);
get targetTypesInput(): string[] | undefined;
private _dataset;
get dataset(): BigqueryDatasetAccessDatasetDatasetOutputReference;
putDataset(value: BigqueryDatasetAccessDatasetDataset): void;
get datasetInput(): BigqueryDatasetAccessDatasetDataset | undefined;
}
export interface BigqueryDatasetAccessRoutine {
/**
* The ID of the dataset containing this table.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#dataset_id BigqueryDataset#dataset_id}
*/
readonly datasetId: string;
/**
* The ID of the project containing this table.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#project_id BigqueryDataset#project_id}
*/
readonly projectId: string;
/**
* The ID of the routine. The ID must contain only letters (a-z,
* A-Z), numbers (0-9), or underscores (_). The maximum length
* is 256 characters.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#routine_id BigqueryDataset#routine_id}
*/
readonly routineId: string;
}
export declare function bigqueryDatasetAccessRoutineToTerraform(struct?: BigqueryDatasetAccessRoutineOutputReference | BigqueryDatasetAccessRoutine): any;
export declare function bigqueryDatasetAccessRoutineToHclTerraform(struct?: BigqueryDatasetAccessRoutineOutputReference | BigqueryDatasetAccessRoutine): any;
export declare class BigqueryDatasetAccessRoutineOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryDatasetAccessRoutine | undefined;
set internalValue(value: BigqueryDatasetAccessRoutine | undefined);
private _datasetId?;
get datasetId(): string;
set datasetId(value: string);
get datasetIdInput(): string | undefined;
private _projectId?;
get projectId(): string;
set projectId(value: string);
get projectIdInput(): string | undefined;
private _routineId?;
get routineId(): string;
set routineId(value: string);
get routineIdInput(): string | undefined;
}
export interface BigqueryDatasetAccessView {
/**
* The ID of the dataset containing this table.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#dataset_id BigqueryDataset#dataset_id}
*/
readonly datasetId: string;
/**
* The ID of the project containing this table.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#project_id BigqueryDataset#project_id}
*/
readonly projectId: string;
/**
* The ID of the table. The ID must contain only letters (a-z,
* A-Z), numbers (0-9), or underscores (_). The maximum length
* is 1,024 characters.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#table_id BigqueryDataset#table_id}
*/
readonly tableId: string;
}
export declare function bigqueryDatasetAccessViewToTerraform(struct?: BigqueryDatasetAccessViewOutputReference | BigqueryDatasetAccessView): any;
export declare function bigqueryDatasetAccessViewToHclTerraform(struct?: BigqueryDatasetAccessViewOutputReference | BigqueryDatasetAccessView): any;
export declare class BigqueryDatasetAccessViewOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryDatasetAccessView | undefined;
set internalValue(value: BigqueryDatasetAccessView | undefined);
private _datasetId?;
get datasetId(): string;
set datasetId(value: string);
get datasetIdInput(): string | undefined;
private _projectId?;
get projectId(): string;
set projectId(value: string);
get projectIdInput(): string | undefined;
private _tableId?;
get tableId(): string;
set tableId(value: string);
get tableIdInput(): string | undefined;
}
export interface BigqueryDatasetAccess {
/**
* A domain to grant access to. Any users signed in with the
* domain specified will be granted the specified access
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#domain BigqueryDataset#domain}
*/
readonly domain?: string;
/**
* An email address of a Google Group to grant access to.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#group_by_email BigqueryDataset#group_by_email}
*/
readonly groupByEmail?: string;
/**
* Some other type of member that appears in the IAM Policy but isn't a user,
* group, domain, or special group. For example: 'allUsers'
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#iam_member BigqueryDataset#iam_member}
*/
readonly iamMember?: string;
/**
* Describes the rights granted to the user specified by the other
* member of the access object. Basic, predefined, and custom roles
* are supported. Predefined roles that have equivalent basic roles
* are swapped by the API to their basic counterparts. See
* [official docs](https://cloud.google.com/bigquery/docs/access-control).
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#role BigqueryDataset#role}
*/
readonly role?: string;
/**
* A special group to grant access to. Possible values include:
* * 'projectOwners': Owners of the enclosing project.
* * 'projectReaders': Readers of the enclosing project.
* * 'projectWriters': Writers of the enclosing project.
* * 'allAuthenticatedUsers': All authenticated BigQuery users.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#special_group BigqueryDataset#special_group}
*/
readonly specialGroup?: string;
/**
* An email address of a user to grant access to. For example:
* fred@example.com
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#user_by_email BigqueryDataset#user_by_email}
*/
readonly userByEmail?: string;
/**
* dataset block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#dataset BigqueryDataset#dataset}
*/
readonly dataset?: BigqueryDatasetAccessDataset;
/**
* routine block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#routine BigqueryDataset#routine}
*/
readonly routine?: BigqueryDatasetAccessRoutine;
/**
* view block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#view BigqueryDataset#view}
*/
readonly view?: BigqueryDatasetAccessView;
}
export declare function bigqueryDatasetAccessToTerraform(struct?: BigqueryDatasetAccess | cdktf.IResolvable): any;
export declare function bigqueryDatasetAccessToHclTerraform(struct?: BigqueryDatasetAccess | cdktf.IResolvable): any;
export declare class BigqueryDatasetAccessOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
private resolvableValue?;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param complexObjectIndex the index of this item in the list
* @param complexObjectIsFromSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, complexObjectIndex: number, complexObjectIsFromSet: boolean);
get internalValue(): BigqueryDatasetAccess | cdktf.IResolvable | undefined;
set internalValue(value: BigqueryDatasetAccess | cdktf.IResolvable | undefined);
private _domain?;
get domain(): string;
set domain(value: string);
resetDomain(): void;
get domainInput(): string | undefined;
private _groupByEmail?;
get groupByEmail(): string;
set groupByEmail(value: string);
resetGroupByEmail(): void;
get groupByEmailInput(): string | undefined;
private _iamMember?;
get iamMember(): string;
set iamMember(value: string);
resetIamMember(): void;
get iamMemberInput(): string | undefined;
private _role?;
get role(): string;
set role(value: string);
resetRole(): void;
get roleInput(): string | undefined;
private _specialGroup?;
get specialGroup(): string;
set specialGroup(value: string);
resetSpecialGroup(): void;
get specialGroupInput(): string | undefined;
private _userByEmail?;
get userByEmail(): string;
set userByEmail(value: string);
resetUserByEmail(): void;
get userByEmailInput(): string | undefined;
private _dataset;
get dataset(): BigqueryDatasetAccessDatasetOutputReference;
putDataset(value: BigqueryDatasetAccessDataset): void;
resetDataset(): void;
get datasetInput(): BigqueryDatasetAccessDataset | undefined;
private _routine;
get routine(): BigqueryDatasetAccessRoutineOutputReference;
putRoutine(value: BigqueryDatasetAccessRoutine): void;
resetRoutine(): void;
get routineInput(): BigqueryDatasetAccessRoutine | undefined;
private _view;
get view(): BigqueryDatasetAccessViewOutputReference;
putView(value: BigqueryDatasetAccessView): void;
resetView(): void;
get viewInput(): BigqueryDatasetAccessView | undefined;
}
export declare class BigqueryDatasetAccessList extends cdktf.ComplexList {
protected terraformResource: cdktf.IInterpolatingParent;
protected terraformAttribute: string;
protected wrapsSet: boolean;
internalValue?: BigqueryDatasetAccess[] | cdktf.IResolvable;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param wrapsSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, wrapsSet: boolean);
/**
* @param index the index of the item to return
*/
get(index: number): BigqueryDatasetAccessOutputReference;
}
export interface BigqueryDatasetDefaultEncryptionConfiguration {
/**
* Describes the Cloud KMS encryption key that will be used to protect destination
* BigQuery table. The BigQuery Service Account associated with your project requires
* access to this encryption key.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#kms_key_name BigqueryDataset#kms_key_name}
*/
readonly kmsKeyName: string;
}
export declare function bigqueryDatasetDefaultEncryptionConfigurationToTerraform(struct?: BigqueryDatasetDefaultEncryptionConfigurationOutputReference | BigqueryDatasetDefaultEncryptionConfiguration): any;
export declare function bigqueryDatasetDefaultEncryptionConfigurationToHclTerraform(struct?: BigqueryDatasetDefaultEncryptionConfigurationOutputReference | BigqueryDatasetDefaultEncryptionConfiguration): any;
export declare class BigqueryDatasetDefaultEncryptionConfigurationOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryDatasetDefaultEncryptionConfiguration | undefined;
set internalValue(value: BigqueryDatasetDefaultEncryptionConfiguration | undefined);
private _kmsKeyName?;
get kmsKeyName(): string;
set kmsKeyName(value: string);
get kmsKeyNameInput(): string | undefined;
}
export interface BigqueryDatasetExternalDatasetReference {
/**
* The connection id that is used to access the externalSource.
* Format: projects/{projectId}/locations/{locationId}/connections/{connectionId}
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#connection BigqueryDataset#connection}
*/
readonly connection: string;
/**
* External source that backs this dataset.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#external_source BigqueryDataset#external_source}
*/
readonly externalSource: string;
}
export declare function bigqueryDatasetExternalDatasetReferenceToTerraform(struct?: BigqueryDatasetExternalDatasetReferenceOutputReference | BigqueryDatasetExternalDatasetReference): any;
export declare function bigqueryDatasetExternalDatasetReferenceToHclTerraform(struct?: BigqueryDatasetExternalDatasetReferenceOutputReference | BigqueryDatasetExternalDatasetReference): any;
export declare class BigqueryDatasetExternalDatasetReferenceOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryDatasetExternalDatasetReference | undefined;
set internalValue(value: BigqueryDatasetExternalDatasetReference | undefined);
private _connection?;
get connection(): string;
set connection(value: string);
get connectionInput(): string | undefined;
private _externalSource?;
get externalSource(): string;
set externalSource(value: string);
get externalSourceInput(): string | undefined;
}
export interface BigqueryDatasetTimeouts {
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#create BigqueryDataset#create}
*/
readonly create?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#delete BigqueryDataset#delete}
*/
readonly delete?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#update BigqueryDataset#update}
*/
readonly update?: string;
}
export declare function bigqueryDatasetTimeoutsToTerraform(struct?: BigqueryDatasetTimeouts | cdktf.IResolvable): any;
export declare function bigqueryDatasetTimeoutsToHclTerraform(struct?: BigqueryDatasetTimeouts | cdktf.IResolvable): any;
export declare class BigqueryDatasetTimeoutsOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
private resolvableValue?;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryDatasetTimeouts | cdktf.IResolvable | undefined;
set internalValue(value: BigqueryDatasetTimeouts | cdktf.IResolvable | undefined);
private _create?;
get create(): string;
set create(value: string);
resetCreate(): void;
get createInput(): string | undefined;
private _delete?;
get delete(): string;
set delete(value: string);
resetDelete(): void;
get deleteInput(): string | undefined;
private _update?;
get update(): string;
set update(value: string);
resetUpdate(): void;
get updateInput(): string | undefined;
}
/**
* Represents a {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset google_bigquery_dataset}
*/
export declare class BigqueryDataset extends cdktf.TerraformResource {
static readonly tfResourceType = "google_bigquery_dataset";
/**
* Generates CDKTF code for importing a BigqueryDataset resource upon running "cdktf plan <stack-name>"
* @param scope The scope in which to define this construct
* @param importToId The construct id used in the generated config for the BigqueryDataset to import
* @param importFromId The id of the existing BigqueryDataset that should be imported. Refer to the {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset#import import section} in the documentation of this resource for the id to use
* @param provider? Optional instance of the provider where the BigqueryDataset to import is found
*/
static generateConfigForImport(scope: Construct, importToId: string, importFromId: string, provider?: cdktf.TerraformProvider): cdktf.ImportableResource;
/**
* Create a new {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_dataset google_bigquery_dataset} Resource
*
* @param scope The scope in which to define this construct
* @param id The scoped construct ID. Must be unique amongst siblings in the same scope
* @param options BigqueryDatasetConfig
*/
constructor(scope: Construct, id: string, config: BigqueryDatasetConfig);
get creationTime(): number;
private _datasetId?;
get datasetId(): string;
set datasetId(value: string);
get datasetIdInput(): string | undefined;
private _defaultCollation?;
get defaultCollation(): string;
set defaultCollation(value: string);
resetDefaultCollation(): void;
get defaultCollationInput(): string | undefined;
private _defaultPartitionExpirationMs?;
get defaultPartitionExpirationMs(): number;
set defaultPartitionExpirationMs(value: number);
resetDefaultPartitionExpirationMs(): void;
get defaultPartitionExpirationMsInput(): number | undefined;
private _defaultTableExpirationMs?;
get defaultTableExpirationMs(): number;
set defaultTableExpirationMs(value: number);
resetDefaultTableExpirationMs(): void;
get defaultTableExpirationMsInput(): number | undefined;
private _deleteContentsOnDestroy?;
get deleteContentsOnDestroy(): boolean | cdktf.IResolvable;
set deleteContentsOnDestroy(value: boolean | cdktf.IResolvable);
resetDeleteContentsOnDestroy(): void;
get deleteContentsOnDestroyInput(): boolean | cdktf.IResolvable | undefined;
private _description?;
get description(): string;
set description(value: string);
resetDescription(): void;
get descriptionInput(): string | undefined;
private _effectiveLabels;
get effectiveLabels(): cdktf.StringMap;
get etag(): string;
private _friendlyName?;
get friendlyName(): string;
set friendlyName(value: string);
resetFriendlyName(): void;
get friendlyNameInput(): string | undefined;
private _id?;
get id(): string;
set id(value: string);
resetId(): void;
get idInput(): string | undefined;
private _isCaseInsensitive?;
get isCaseInsensitive(): boolean | cdktf.IResolvable;
set isCaseInsensitive(value: boolean | cdktf.IResolvable);
resetIsCaseInsensitive(): void;
get isCaseInsensitiveInput(): boolean | cdktf.IResolvable | undefined;
private _labels?;
get labels(): {
[key: string]: string;
};
set labels(value: {
[key: string]: string;
});
resetLabels(): void;
get labelsInput(): {
[key: string]: string;
} | undefined;
get lastModifiedTime(): number;
private _location?;
get location(): string;
set location(value: string);
resetLocation(): void;
get locationInput(): string | undefined;
private _maxTimeTravelHours?;
get maxTimeTravelHours(): string;
set maxTimeTravelHours(value: string);
resetMaxTimeTravelHours(): void;
get maxTimeTravelHoursInput(): string | undefined;
private _project?;
get project(): string;
set project(value: string);
resetProject(): void;
get projectInput(): string | undefined;
private _resourceTags?;
get resourceTags(): {
[key: string]: string;
};
set resourceTags(value: {
[key: string]: string;
});
resetResourceTags(): void;
get resourceTagsInput(): {
[key: string]: string;
} | undefined;
get selfLink(): string;
private _storageBillingModel?;
get storageBillingModel(): string;
set storageBillingModel(value: string);
resetStorageBillingModel(): void;
get storageBillingModelInput(): string | undefined;
private _terraformLabels;
get terraformLabels(): cdktf.StringMap;
private _access;
get access(): BigqueryDatasetAccessList;
putAccess(value: BigqueryDatasetAccess[] | cdktf.IResolvable): void;
resetAccess(): void;
get accessInput(): cdktf.IResolvable | BigqueryDatasetAccess[] | undefined;
private _defaultEncryptionConfiguration;
get defaultEncryptionConfiguration(): BigqueryDatasetDefaultEncryptionConfigurationOutputReference;
putDefaultEncryptionConfiguration(value: BigqueryDatasetDefaultEncryptionConfiguration): void;
resetDefaultEncryptionConfiguration(): void;
get defaultEncryptionConfigurationInput(): BigqueryDatasetDefaultEncryptionConfiguration | undefined;
private _externalDatasetReference;
get externalDatasetReference(): BigqueryDatasetExternalDatasetReferenceOutputReference;
putExternalDatasetReference(value: BigqueryDatasetExternalDatasetReference): void;
resetExternalDatasetReference(): void;
get externalDatasetReferenceInput(): BigqueryDatasetExternalDatasetReference | undefined;
private _timeouts;
get timeouts(): BigqueryDatasetTimeoutsOutputReference;
putTimeouts(value: BigqueryDatasetTimeouts): void;
resetTimeouts(): void;
get timeoutsInput(): cdktf.IResolvable | BigqueryDatasetTimeouts | undefined;
protected synthesizeAttributes(): {
[name: string]: any;
};
protected synthesizeHclAttributes(): {
[name: string]: any;
};
}