UNPKG

@cdktf/provider-google

Version:

Prebuilt google Provider for Terraform CDK (cdktf)

601 lines (600 loc) 32.3 kB
/** * Copyright (c) HashiCorp, Inc. * SPDX-License-Identifier: MPL-2.0 */ import { Construct } from 'constructs'; import * as cdktf from 'cdktf'; export interface BigqueryConnectionConfig extends cdktf.TerraformMetaArguments { /** * Optional connection id that should be assigned to the created connection. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#connection_id BigqueryConnection#connection_id} */ readonly connectionId?: string; /** * A descriptive description for the connection * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#description BigqueryConnection#description} */ readonly description?: string; /** * A descriptive name for the connection * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#friendly_name BigqueryConnection#friendly_name} */ readonly friendlyName?: string; /** * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#id BigqueryConnection#id} * * Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. * If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable. */ readonly id?: string; /** * Optional. The Cloud KMS key that is used for encryption. * * Example: projects/[kms_project_id]/locations/[region]/keyRings/[key_region]/cryptoKeys/[key] * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#kms_key_name BigqueryConnection#kms_key_name} */ readonly kmsKeyName?: string; /** * The geographic location where the connection should reside. * Cloud SQL instance must be in the same location as the connection * with following exceptions: Cloud SQL us-central1 maps to BigQuery US, Cloud SQL europe-west1 maps to BigQuery EU. * Examples: US, EU, asia-northeast1, us-central1, europe-west1. * Spanner Connections same as spanner region * AWS allowed regions are aws-us-east-1 * Azure allowed regions are azure-eastus2 * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#location BigqueryConnection#location} */ readonly location?: string; /** * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#project BigqueryConnection#project} */ readonly project?: string; /** * aws block * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#aws BigqueryConnection#aws} */ readonly aws?: BigqueryConnectionAws; /** * azure block * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#azure BigqueryConnection#azure} */ readonly azure?: BigqueryConnectionAzure; /** * cloud_resource block * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#cloud_resource BigqueryConnection#cloud_resource} */ readonly cloudResource?: BigqueryConnectionCloudResource; /** * cloud_spanner block * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#cloud_spanner BigqueryConnection#cloud_spanner} */ readonly cloudSpanner?: BigqueryConnectionCloudSpanner; /** * cloud_sql block * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#cloud_sql BigqueryConnection#cloud_sql} */ readonly cloudSql?: BigqueryConnectionCloudSql; /** * spark block * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#spark BigqueryConnection#spark} */ readonly spark?: BigqueryConnectionSpark; /** * timeouts block * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#timeouts BigqueryConnection#timeouts} */ readonly timeouts?: BigqueryConnectionTimeouts; } export interface BigqueryConnectionAwsAccessRole { /** * The user’s AWS IAM Role that trusts the Google-owned AWS IAM user Connection. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#iam_role_id BigqueryConnection#iam_role_id} */ readonly iamRoleId: string; } export declare function bigqueryConnectionAwsAccessRoleToTerraform(struct?: BigqueryConnectionAwsAccessRoleOutputReference | BigqueryConnectionAwsAccessRole): any; export declare function bigqueryConnectionAwsAccessRoleToHclTerraform(struct?: BigqueryConnectionAwsAccessRoleOutputReference | BigqueryConnectionAwsAccessRole): any; export declare class BigqueryConnectionAwsAccessRoleOutputReference extends cdktf.ComplexObject { private isEmptyObject; /** * @param terraformResource The parent resource * @param terraformAttribute The attribute on the parent resource this class is referencing */ constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string); get internalValue(): BigqueryConnectionAwsAccessRole | undefined; set internalValue(value: BigqueryConnectionAwsAccessRole | undefined); private _iamRoleId?; get iamRoleId(): string; set iamRoleId(value: string); get iamRoleIdInput(): string | undefined; get identity(): string; } export interface BigqueryConnectionAws { /** * access_role block * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#access_role BigqueryConnection#access_role} */ readonly accessRole: BigqueryConnectionAwsAccessRole; } export declare function bigqueryConnectionAwsToTerraform(struct?: BigqueryConnectionAwsOutputReference | BigqueryConnectionAws): any; export declare function bigqueryConnectionAwsToHclTerraform(struct?: BigqueryConnectionAwsOutputReference | BigqueryConnectionAws): any; export declare class BigqueryConnectionAwsOutputReference extends cdktf.ComplexObject { private isEmptyObject; /** * @param terraformResource The parent resource * @param terraformAttribute The attribute on the parent resource this class is referencing */ constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string); get internalValue(): BigqueryConnectionAws | undefined; set internalValue(value: BigqueryConnectionAws | undefined); private _accessRole; get accessRole(): BigqueryConnectionAwsAccessRoleOutputReference; putAccessRole(value: BigqueryConnectionAwsAccessRole): void; get accessRoleInput(): BigqueryConnectionAwsAccessRole | undefined; } export interface BigqueryConnectionAzure { /** * The id of customer's directory that host the data. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#customer_tenant_id BigqueryConnection#customer_tenant_id} */ readonly customerTenantId: string; /** * The Azure Application (client) ID where the federated credentials will be hosted. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#federated_application_client_id BigqueryConnection#federated_application_client_id} */ readonly federatedApplicationClientId?: string; } export declare function bigqueryConnectionAzureToTerraform(struct?: BigqueryConnectionAzureOutputReference | BigqueryConnectionAzure): any; export declare function bigqueryConnectionAzureToHclTerraform(struct?: BigqueryConnectionAzureOutputReference | BigqueryConnectionAzure): any; export declare class BigqueryConnectionAzureOutputReference extends cdktf.ComplexObject { private isEmptyObject; /** * @param terraformResource The parent resource * @param terraformAttribute The attribute on the parent resource this class is referencing */ constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string); get internalValue(): BigqueryConnectionAzure | undefined; set internalValue(value: BigqueryConnectionAzure | undefined); get application(): string; get clientId(): string; private _customerTenantId?; get customerTenantId(): string; set customerTenantId(value: string); get customerTenantIdInput(): string | undefined; private _federatedApplicationClientId?; get federatedApplicationClientId(): string; set federatedApplicationClientId(value: string); resetFederatedApplicationClientId(): void; get federatedApplicationClientIdInput(): string | undefined; get identity(): string; get objectId(): string; get redirectUri(): string; } export interface BigqueryConnectionCloudResource { } export declare function bigqueryConnectionCloudResourceToTerraform(struct?: BigqueryConnectionCloudResourceOutputReference | BigqueryConnectionCloudResource): any; export declare function bigqueryConnectionCloudResourceToHclTerraform(struct?: BigqueryConnectionCloudResourceOutputReference | BigqueryConnectionCloudResource): any; export declare class BigqueryConnectionCloudResourceOutputReference extends cdktf.ComplexObject { private isEmptyObject; /** * @param terraformResource The parent resource * @param terraformAttribute The attribute on the parent resource this class is referencing */ constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string); get internalValue(): BigqueryConnectionCloudResource | undefined; set internalValue(value: BigqueryConnectionCloudResource | undefined); get serviceAccountId(): string; } export interface BigqueryConnectionCloudSpanner { /** * Cloud Spanner database in the form 'project/instance/database'. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#database BigqueryConnection#database} */ readonly database: string; /** * Cloud Spanner database role for fine-grained access control. The Cloud Spanner admin should have provisioned the database role with appropriate permissions, such as 'SELECT' and 'INSERT'. Other users should only use roles provided by their Cloud Spanner admins. The database role name must start with a letter, and can only contain letters, numbers, and underscores. For more details, see https://cloud.google.com/spanner/docs/fgac-about. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#database_role BigqueryConnection#database_role} */ readonly databaseRole?: string; /** * Allows setting max parallelism per query when executing on Spanner independent compute resources. If unspecified, default values of parallelism are chosen that are dependent on the Cloud Spanner instance configuration. 'useParallelism' and 'useDataBoost' must be set when setting max parallelism. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#max_parallelism BigqueryConnection#max_parallelism} */ readonly maxParallelism?: number; /** * If set, the request will be executed via Spanner independent compute resources. 'use_parallelism' must be set when using data boost. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#use_data_boost BigqueryConnection#use_data_boost} */ readonly useDataBoost?: boolean | cdktf.IResolvable; /** * If parallelism should be used when reading from Cloud Spanner. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#use_parallelism BigqueryConnection#use_parallelism} */ readonly useParallelism?: boolean | cdktf.IResolvable; /** * If the serverless analytics service should be used to read data from Cloud Spanner. 'useParallelism' must be set when using serverless analytics. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#use_serverless_analytics BigqueryConnection#use_serverless_analytics} */ readonly useServerlessAnalytics?: boolean | cdktf.IResolvable; } export declare function bigqueryConnectionCloudSpannerToTerraform(struct?: BigqueryConnectionCloudSpannerOutputReference | BigqueryConnectionCloudSpanner): any; export declare function bigqueryConnectionCloudSpannerToHclTerraform(struct?: BigqueryConnectionCloudSpannerOutputReference | BigqueryConnectionCloudSpanner): any; export declare class BigqueryConnectionCloudSpannerOutputReference extends cdktf.ComplexObject { private isEmptyObject; /** * @param terraformResource The parent resource * @param terraformAttribute The attribute on the parent resource this class is referencing */ constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string); get internalValue(): BigqueryConnectionCloudSpanner | undefined; set internalValue(value: BigqueryConnectionCloudSpanner | undefined); private _database?; get database(): string; set database(value: string); get databaseInput(): string | undefined; private _databaseRole?; get databaseRole(): string; set databaseRole(value: string); resetDatabaseRole(): void; get databaseRoleInput(): string | undefined; private _maxParallelism?; get maxParallelism(): number; set maxParallelism(value: number); resetMaxParallelism(): void; get maxParallelismInput(): number | undefined; private _useDataBoost?; get useDataBoost(): boolean | cdktf.IResolvable; set useDataBoost(value: boolean | cdktf.IResolvable); resetUseDataBoost(): void; get useDataBoostInput(): boolean | cdktf.IResolvable | undefined; private _useParallelism?; get useParallelism(): boolean | cdktf.IResolvable; set useParallelism(value: boolean | cdktf.IResolvable); resetUseParallelism(): void; get useParallelismInput(): boolean | cdktf.IResolvable | undefined; private _useServerlessAnalytics?; get useServerlessAnalytics(): boolean | cdktf.IResolvable; set useServerlessAnalytics(value: boolean | cdktf.IResolvable); resetUseServerlessAnalytics(): void; get useServerlessAnalyticsInput(): boolean | cdktf.IResolvable | undefined; } export interface BigqueryConnectionCloudSqlCredential { /** * Password for database. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#password BigqueryConnection#password} */ readonly password: string; /** * Username for database. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#username BigqueryConnection#username} */ readonly username: string; } export declare function bigqueryConnectionCloudSqlCredentialToTerraform(struct?: BigqueryConnectionCloudSqlCredentialOutputReference | BigqueryConnectionCloudSqlCredential): any; export declare function bigqueryConnectionCloudSqlCredentialToHclTerraform(struct?: BigqueryConnectionCloudSqlCredentialOutputReference | BigqueryConnectionCloudSqlCredential): any; export declare class BigqueryConnectionCloudSqlCredentialOutputReference extends cdktf.ComplexObject { private isEmptyObject; /** * @param terraformResource The parent resource * @param terraformAttribute The attribute on the parent resource this class is referencing */ constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string); get internalValue(): BigqueryConnectionCloudSqlCredential | undefined; set internalValue(value: BigqueryConnectionCloudSqlCredential | undefined); private _password?; get password(): string; set password(value: string); get passwordInput(): string | undefined; private _username?; get username(): string; set username(value: string); get usernameInput(): string | undefined; } export interface BigqueryConnectionCloudSql { /** * Database name. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#database BigqueryConnection#database} */ readonly database: string; /** * Cloud SQL instance ID in the form project:location:instance. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#instance_id BigqueryConnection#instance_id} */ readonly instanceId: string; /** * Type of the Cloud SQL database. Possible values: ["DATABASE_TYPE_UNSPECIFIED", "POSTGRES", "MYSQL"] * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#type BigqueryConnection#type} */ readonly type: string; /** * credential block * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#credential BigqueryConnection#credential} */ readonly credential: BigqueryConnectionCloudSqlCredential; } export declare function bigqueryConnectionCloudSqlToTerraform(struct?: BigqueryConnectionCloudSqlOutputReference | BigqueryConnectionCloudSql): any; export declare function bigqueryConnectionCloudSqlToHclTerraform(struct?: BigqueryConnectionCloudSqlOutputReference | BigqueryConnectionCloudSql): any; export declare class BigqueryConnectionCloudSqlOutputReference extends cdktf.ComplexObject { private isEmptyObject; /** * @param terraformResource The parent resource * @param terraformAttribute The attribute on the parent resource this class is referencing */ constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string); get internalValue(): BigqueryConnectionCloudSql | undefined; set internalValue(value: BigqueryConnectionCloudSql | undefined); private _database?; get database(): string; set database(value: string); get databaseInput(): string | undefined; private _instanceId?; get instanceId(): string; set instanceId(value: string); get instanceIdInput(): string | undefined; get serviceAccountId(): string; private _type?; get type(): string; set type(value: string); get typeInput(): string | undefined; private _credential; get credential(): BigqueryConnectionCloudSqlCredentialOutputReference; putCredential(value: BigqueryConnectionCloudSqlCredential): void; get credentialInput(): BigqueryConnectionCloudSqlCredential | undefined; } export interface BigqueryConnectionSparkMetastoreServiceConfig { /** * Resource name of an existing Dataproc Metastore service in the form of projects/[projectId]/locations/[region]/services/[serviceId]. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#metastore_service BigqueryConnection#metastore_service} */ readonly metastoreService?: string; } export declare function bigqueryConnectionSparkMetastoreServiceConfigToTerraform(struct?: BigqueryConnectionSparkMetastoreServiceConfigOutputReference | BigqueryConnectionSparkMetastoreServiceConfig): any; export declare function bigqueryConnectionSparkMetastoreServiceConfigToHclTerraform(struct?: BigqueryConnectionSparkMetastoreServiceConfigOutputReference | BigqueryConnectionSparkMetastoreServiceConfig): any; export declare class BigqueryConnectionSparkMetastoreServiceConfigOutputReference extends cdktf.ComplexObject { private isEmptyObject; /** * @param terraformResource The parent resource * @param terraformAttribute The attribute on the parent resource this class is referencing */ constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string); get internalValue(): BigqueryConnectionSparkMetastoreServiceConfig | undefined; set internalValue(value: BigqueryConnectionSparkMetastoreServiceConfig | undefined); private _metastoreService?; get metastoreService(): string; set metastoreService(value: string); resetMetastoreService(): void; get metastoreServiceInput(): string | undefined; } export interface BigqueryConnectionSparkSparkHistoryServerConfig { /** * Resource name of an existing Dataproc Cluster to act as a Spark History Server for the connection if the form of projects/[projectId]/regions/[region]/clusters/[cluster_name]. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#dataproc_cluster BigqueryConnection#dataproc_cluster} */ readonly dataprocCluster?: string; } export declare function bigqueryConnectionSparkSparkHistoryServerConfigToTerraform(struct?: BigqueryConnectionSparkSparkHistoryServerConfigOutputReference | BigqueryConnectionSparkSparkHistoryServerConfig): any; export declare function bigqueryConnectionSparkSparkHistoryServerConfigToHclTerraform(struct?: BigqueryConnectionSparkSparkHistoryServerConfigOutputReference | BigqueryConnectionSparkSparkHistoryServerConfig): any; export declare class BigqueryConnectionSparkSparkHistoryServerConfigOutputReference extends cdktf.ComplexObject { private isEmptyObject; /** * @param terraformResource The parent resource * @param terraformAttribute The attribute on the parent resource this class is referencing */ constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string); get internalValue(): BigqueryConnectionSparkSparkHistoryServerConfig | undefined; set internalValue(value: BigqueryConnectionSparkSparkHistoryServerConfig | undefined); private _dataprocCluster?; get dataprocCluster(): string; set dataprocCluster(value: string); resetDataprocCluster(): void; get dataprocClusterInput(): string | undefined; } export interface BigqueryConnectionSpark { /** * metastore_service_config block * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#metastore_service_config BigqueryConnection#metastore_service_config} */ readonly metastoreServiceConfig?: BigqueryConnectionSparkMetastoreServiceConfig; /** * spark_history_server_config block * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#spark_history_server_config BigqueryConnection#spark_history_server_config} */ readonly sparkHistoryServerConfig?: BigqueryConnectionSparkSparkHistoryServerConfig; } export declare function bigqueryConnectionSparkToTerraform(struct?: BigqueryConnectionSparkOutputReference | BigqueryConnectionSpark): any; export declare function bigqueryConnectionSparkToHclTerraform(struct?: BigqueryConnectionSparkOutputReference | BigqueryConnectionSpark): any; export declare class BigqueryConnectionSparkOutputReference extends cdktf.ComplexObject { private isEmptyObject; /** * @param terraformResource The parent resource * @param terraformAttribute The attribute on the parent resource this class is referencing */ constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string); get internalValue(): BigqueryConnectionSpark | undefined; set internalValue(value: BigqueryConnectionSpark | undefined); get serviceAccountId(): string; private _metastoreServiceConfig; get metastoreServiceConfig(): BigqueryConnectionSparkMetastoreServiceConfigOutputReference; putMetastoreServiceConfig(value: BigqueryConnectionSparkMetastoreServiceConfig): void; resetMetastoreServiceConfig(): void; get metastoreServiceConfigInput(): BigqueryConnectionSparkMetastoreServiceConfig | undefined; private _sparkHistoryServerConfig; get sparkHistoryServerConfig(): BigqueryConnectionSparkSparkHistoryServerConfigOutputReference; putSparkHistoryServerConfig(value: BigqueryConnectionSparkSparkHistoryServerConfig): void; resetSparkHistoryServerConfig(): void; get sparkHistoryServerConfigInput(): BigqueryConnectionSparkSparkHistoryServerConfig | undefined; } export interface BigqueryConnectionTimeouts { /** * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#create BigqueryConnection#create} */ readonly create?: string; /** * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#delete BigqueryConnection#delete} */ readonly delete?: string; /** * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#update BigqueryConnection#update} */ readonly update?: string; } export declare function bigqueryConnectionTimeoutsToTerraform(struct?: BigqueryConnectionTimeouts | cdktf.IResolvable): any; export declare function bigqueryConnectionTimeoutsToHclTerraform(struct?: BigqueryConnectionTimeouts | cdktf.IResolvable): any; export declare class BigqueryConnectionTimeoutsOutputReference extends cdktf.ComplexObject { private isEmptyObject; private resolvableValue?; /** * @param terraformResource The parent resource * @param terraformAttribute The attribute on the parent resource this class is referencing */ constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string); get internalValue(): BigqueryConnectionTimeouts | cdktf.IResolvable | undefined; set internalValue(value: BigqueryConnectionTimeouts | cdktf.IResolvable | undefined); private _create?; get create(): string; set create(value: string); resetCreate(): void; get createInput(): string | undefined; private _delete?; get delete(): string; set delete(value: string); resetDelete(): void; get deleteInput(): string | undefined; private _update?; get update(): string; set update(value: string); resetUpdate(): void; get updateInput(): string | undefined; } /** * Represents a {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection google_bigquery_connection} */ export declare class BigqueryConnection extends cdktf.TerraformResource { static readonly tfResourceType = "google_bigquery_connection"; /** * Generates CDKTF code for importing a BigqueryConnection resource upon running "cdktf plan <stack-name>" * @param scope The scope in which to define this construct * @param importToId The construct id used in the generated config for the BigqueryConnection to import * @param importFromId The id of the existing BigqueryConnection that should be imported. Refer to the {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection#import import section} in the documentation of this resource for the id to use * @param provider? Optional instance of the provider where the BigqueryConnection to import is found */ static generateConfigForImport(scope: Construct, importToId: string, importFromId: string, provider?: cdktf.TerraformProvider): cdktf.ImportableResource; /** * Create a new {@link https://registry.terraform.io/providers/hashicorp/google/6.36.0/docs/resources/bigquery_connection google_bigquery_connection} Resource * * @param scope The scope in which to define this construct * @param id The scoped construct ID. Must be unique amongst siblings in the same scope * @param options BigqueryConnectionConfig = {} */ constructor(scope: Construct, id: string, config?: BigqueryConnectionConfig); private _connectionId?; get connectionId(): string; set connectionId(value: string); resetConnectionId(): void; get connectionIdInput(): string | undefined; private _description?; get description(): string; set description(value: string); resetDescription(): void; get descriptionInput(): string | undefined; private _friendlyName?; get friendlyName(): string; set friendlyName(value: string); resetFriendlyName(): void; get friendlyNameInput(): string | undefined; get hasCredential(): cdktf.IResolvable; private _id?; get id(): string; set id(value: string); resetId(): void; get idInput(): string | undefined; private _kmsKeyName?; get kmsKeyName(): string; set kmsKeyName(value: string); resetKmsKeyName(): void; get kmsKeyNameInput(): string | undefined; private _location?; get location(): string; set location(value: string); resetLocation(): void; get locationInput(): string | undefined; get name(): string; private _project?; get project(): string; set project(value: string); resetProject(): void; get projectInput(): string | undefined; private _aws; get aws(): BigqueryConnectionAwsOutputReference; putAws(value: BigqueryConnectionAws): void; resetAws(): void; get awsInput(): BigqueryConnectionAws | undefined; private _azure; get azure(): BigqueryConnectionAzureOutputReference; putAzure(value: BigqueryConnectionAzure): void; resetAzure(): void; get azureInput(): BigqueryConnectionAzure | undefined; private _cloudResource; get cloudResource(): BigqueryConnectionCloudResourceOutputReference; putCloudResource(value: BigqueryConnectionCloudResource): void; resetCloudResource(): void; get cloudResourceInput(): BigqueryConnectionCloudResource | undefined; private _cloudSpanner; get cloudSpanner(): BigqueryConnectionCloudSpannerOutputReference; putCloudSpanner(value: BigqueryConnectionCloudSpanner): void; resetCloudSpanner(): void; get cloudSpannerInput(): BigqueryConnectionCloudSpanner | undefined; private _cloudSql; get cloudSql(): BigqueryConnectionCloudSqlOutputReference; putCloudSql(value: BigqueryConnectionCloudSql): void; resetCloudSql(): void; get cloudSqlInput(): BigqueryConnectionCloudSql | undefined; private _spark; get spark(): BigqueryConnectionSparkOutputReference; putSpark(value: BigqueryConnectionSpark): void; resetSpark(): void; get sparkInput(): BigqueryConnectionSpark | undefined; private _timeouts; get timeouts(): BigqueryConnectionTimeoutsOutputReference; putTimeouts(value: BigqueryConnectionTimeouts): void; resetTimeouts(): void; get timeoutsInput(): cdktf.IResolvable | BigqueryConnectionTimeouts | undefined; protected synthesizeAttributes(): { [name: string]: any; }; protected synthesizeHclAttributes(): { [name: string]: any; }; }