@cdktf/provider-google
Version:
Prebuilt google Provider for Terraform CDK (cdktf)
421 lines (420 loc) • 23.6 kB
TypeScript
/**
* Copyright (c) HashiCorp, Inc.
* SPDX-License-Identifier: MPL-2.0
*/
import { Construct } from 'constructs';
import * as cdktf from 'cdktf';
export interface BigqueryDataTransferConfigConfig extends cdktf.TerraformMetaArguments {
/**
* The number of days to look back to automatically refresh the data.
* For example, if dataRefreshWindowDays = 10, then every day BigQuery
* reingests data for [today-10, today-1], rather than ingesting data for
* just [today-1]. Only valid if the data source supports the feature.
* Set the value to 0 to use the default value.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#data_refresh_window_days BigqueryDataTransferConfig#data_refresh_window_days}
*/
readonly dataRefreshWindowDays?: number;
/**
* The data source id. Cannot be changed once the transfer config is created.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#data_source_id BigqueryDataTransferConfig#data_source_id}
*/
readonly dataSourceId: string;
/**
* The BigQuery target dataset id.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#destination_dataset_id BigqueryDataTransferConfig#destination_dataset_id}
*/
readonly destinationDatasetId?: string;
/**
* When set to true, no runs are scheduled for a given transfer.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#disabled BigqueryDataTransferConfig#disabled}
*/
readonly disabled?: boolean | cdktf.IResolvable;
/**
* The user specified display name for the transfer config.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#display_name BigqueryDataTransferConfig#display_name}
*/
readonly displayName: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#id BigqueryDataTransferConfig#id}
*
* Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
* If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
*/
readonly id?: string;
/**
* The geographic location where the transfer config should reside.
* Examples: US, EU, asia-northeast1. The default value is US.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#location BigqueryDataTransferConfig#location}
*/
readonly location?: string;
/**
* Pub/Sub topic where notifications will be sent after transfer runs
* associated with this transfer config finish.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#notification_pubsub_topic BigqueryDataTransferConfig#notification_pubsub_topic}
*/
readonly notificationPubsubTopic?: string;
/**
* Parameters specific to each data source. For more information see the bq tab in the 'Setting up a data transfer'
* section for each data source. For example the parameters for Cloud Storage transfers are listed here:
* https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq
*
* **NOTE** : If you are attempting to update a parameter that cannot be updated (due to api limitations) [please force recreation of the resource](https://www.terraform.io/cli/state/taint#forcing-re-creation-of-resources).
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#params BigqueryDataTransferConfig#params}
*/
readonly params: {
[key: string]: string;
};
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#project BigqueryDataTransferConfig#project}
*/
readonly project?: string;
/**
* Data transfer schedule. If the data source does not support a custom
* schedule, this should be empty. If it is empty, the default value for
* the data source will be used. The specified times are in UTC. Examples
* of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan,
* jun 13:15, and first sunday of quarter 00:00. See more explanation
* about the format here:
* https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
* NOTE: The minimum interval time between recurring transfers depends
* on the data source; refer to the documentation for your data source.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#schedule BigqueryDataTransferConfig#schedule}
*/
readonly schedule?: string;
/**
* Service account email. If this field is set, transfer config will
* be created with this service account credentials. It requires that
* requesting user calling this API has permissions to act as this service account.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#service_account_name BigqueryDataTransferConfig#service_account_name}
*/
readonly serviceAccountName?: string;
/**
* email_preferences block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#email_preferences BigqueryDataTransferConfig#email_preferences}
*/
readonly emailPreferences?: BigqueryDataTransferConfigEmailPreferences;
/**
* encryption_configuration block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#encryption_configuration BigqueryDataTransferConfig#encryption_configuration}
*/
readonly encryptionConfiguration?: BigqueryDataTransferConfigEncryptionConfiguration;
/**
* schedule_options block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#schedule_options BigqueryDataTransferConfig#schedule_options}
*/
readonly scheduleOptions?: BigqueryDataTransferConfigScheduleOptions;
/**
* sensitive_params block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#sensitive_params BigqueryDataTransferConfig#sensitive_params}
*/
readonly sensitiveParams?: BigqueryDataTransferConfigSensitiveParams;
/**
* timeouts block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#timeouts BigqueryDataTransferConfig#timeouts}
*/
readonly timeouts?: BigqueryDataTransferConfigTimeouts;
}
export interface BigqueryDataTransferConfigEmailPreferences {
/**
* If true, email notifications will be sent on transfer run failures.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#enable_failure_email BigqueryDataTransferConfig#enable_failure_email}
*/
readonly enableFailureEmail: boolean | cdktf.IResolvable;
}
export declare function bigqueryDataTransferConfigEmailPreferencesToTerraform(struct?: BigqueryDataTransferConfigEmailPreferencesOutputReference | BigqueryDataTransferConfigEmailPreferences): any;
export declare function bigqueryDataTransferConfigEmailPreferencesToHclTerraform(struct?: BigqueryDataTransferConfigEmailPreferencesOutputReference | BigqueryDataTransferConfigEmailPreferences): any;
export declare class BigqueryDataTransferConfigEmailPreferencesOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryDataTransferConfigEmailPreferences | undefined;
set internalValue(value: BigqueryDataTransferConfigEmailPreferences | undefined);
private _enableFailureEmail?;
get enableFailureEmail(): boolean | cdktf.IResolvable;
set enableFailureEmail(value: boolean | cdktf.IResolvable);
get enableFailureEmailInput(): boolean | cdktf.IResolvable | undefined;
}
export interface BigqueryDataTransferConfigEncryptionConfiguration {
/**
* The name of the KMS key used for encrypting BigQuery data.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#kms_key_name BigqueryDataTransferConfig#kms_key_name}
*/
readonly kmsKeyName: string;
}
export declare function bigqueryDataTransferConfigEncryptionConfigurationToTerraform(struct?: BigqueryDataTransferConfigEncryptionConfigurationOutputReference | BigqueryDataTransferConfigEncryptionConfiguration): any;
export declare function bigqueryDataTransferConfigEncryptionConfigurationToHclTerraform(struct?: BigqueryDataTransferConfigEncryptionConfigurationOutputReference | BigqueryDataTransferConfigEncryptionConfiguration): any;
export declare class BigqueryDataTransferConfigEncryptionConfigurationOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryDataTransferConfigEncryptionConfiguration | undefined;
set internalValue(value: BigqueryDataTransferConfigEncryptionConfiguration | undefined);
private _kmsKeyName?;
get kmsKeyName(): string;
set kmsKeyName(value: string);
get kmsKeyNameInput(): string | undefined;
}
export interface BigqueryDataTransferConfigScheduleOptions {
/**
* If true, automatic scheduling of data transfer runs for this
* configuration will be disabled. The runs can be started on ad-hoc
* basis using transferConfigs.startManualRuns API. When automatic
* scheduling is disabled, the TransferConfig.schedule field will
* be ignored.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#disable_auto_scheduling BigqueryDataTransferConfig#disable_auto_scheduling}
*/
readonly disableAutoScheduling?: boolean | cdktf.IResolvable;
/**
* Defines time to stop scheduling transfer runs. A transfer run cannot be
* scheduled at or after the end time. The end time can be changed at any
* moment. The time when a data transfer can be triggered manually is not
* limited by this option.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#end_time BigqueryDataTransferConfig#end_time}
*/
readonly endTime?: string;
/**
* Specifies time to start scheduling transfer runs. The first run will be
* scheduled at or after the start time according to a recurrence pattern
* defined in the schedule string. The start time can be changed at any
* moment. The time when a data transfer can be triggered manually is not
* limited by this option.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#start_time BigqueryDataTransferConfig#start_time}
*/
readonly startTime?: string;
}
export declare function bigqueryDataTransferConfigScheduleOptionsToTerraform(struct?: BigqueryDataTransferConfigScheduleOptionsOutputReference | BigqueryDataTransferConfigScheduleOptions): any;
export declare function bigqueryDataTransferConfigScheduleOptionsToHclTerraform(struct?: BigqueryDataTransferConfigScheduleOptionsOutputReference | BigqueryDataTransferConfigScheduleOptions): any;
export declare class BigqueryDataTransferConfigScheduleOptionsOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryDataTransferConfigScheduleOptions | undefined;
set internalValue(value: BigqueryDataTransferConfigScheduleOptions | undefined);
private _disableAutoScheduling?;
get disableAutoScheduling(): boolean | cdktf.IResolvable;
set disableAutoScheduling(value: boolean | cdktf.IResolvable);
resetDisableAutoScheduling(): void;
get disableAutoSchedulingInput(): boolean | cdktf.IResolvable | undefined;
private _endTime?;
get endTime(): string;
set endTime(value: string);
resetEndTime(): void;
get endTimeInput(): string | undefined;
private _startTime?;
get startTime(): string;
set startTime(value: string);
resetStartTime(): void;
get startTimeInput(): string | undefined;
}
export interface BigqueryDataTransferConfigSensitiveParams {
/**
* The Secret Access Key of the AWS account transferring data from.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#secret_access_key BigqueryDataTransferConfig#secret_access_key}
*/
readonly secretAccessKey: string;
}
export declare function bigqueryDataTransferConfigSensitiveParamsToTerraform(struct?: BigqueryDataTransferConfigSensitiveParamsOutputReference | BigqueryDataTransferConfigSensitiveParams): any;
export declare function bigqueryDataTransferConfigSensitiveParamsToHclTerraform(struct?: BigqueryDataTransferConfigSensitiveParamsOutputReference | BigqueryDataTransferConfigSensitiveParams): any;
export declare class BigqueryDataTransferConfigSensitiveParamsOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryDataTransferConfigSensitiveParams | undefined;
set internalValue(value: BigqueryDataTransferConfigSensitiveParams | undefined);
private _secretAccessKey?;
get secretAccessKey(): string;
set secretAccessKey(value: string);
get secretAccessKeyInput(): string | undefined;
}
export interface BigqueryDataTransferConfigTimeouts {
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#create BigqueryDataTransferConfig#create}
*/
readonly create?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#delete BigqueryDataTransferConfig#delete}
*/
readonly delete?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#update BigqueryDataTransferConfig#update}
*/
readonly update?: string;
}
export declare function bigqueryDataTransferConfigTimeoutsToTerraform(struct?: BigqueryDataTransferConfigTimeouts | cdktf.IResolvable): any;
export declare function bigqueryDataTransferConfigTimeoutsToHclTerraform(struct?: BigqueryDataTransferConfigTimeouts | cdktf.IResolvable): any;
export declare class BigqueryDataTransferConfigTimeoutsOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
private resolvableValue?;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryDataTransferConfigTimeouts | cdktf.IResolvable | undefined;
set internalValue(value: BigqueryDataTransferConfigTimeouts | cdktf.IResolvable | undefined);
private _create?;
get create(): string;
set create(value: string);
resetCreate(): void;
get createInput(): string | undefined;
private _delete?;
get delete(): string;
set delete(value: string);
resetDelete(): void;
get deleteInput(): string | undefined;
private _update?;
get update(): string;
set update(value: string);
resetUpdate(): void;
get updateInput(): string | undefined;
}
/**
* Represents a {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config google_bigquery_data_transfer_config}
*/
export declare class BigqueryDataTransferConfig extends cdktf.TerraformResource {
static readonly tfResourceType = "google_bigquery_data_transfer_config";
/**
* Generates CDKTF code for importing a BigqueryDataTransferConfig resource upon running "cdktf plan <stack-name>"
* @param scope The scope in which to define this construct
* @param importToId The construct id used in the generated config for the BigqueryDataTransferConfig to import
* @param importFromId The id of the existing BigqueryDataTransferConfig that should be imported. Refer to the {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config#import import section} in the documentation of this resource for the id to use
* @param provider? Optional instance of the provider where the BigqueryDataTransferConfig to import is found
*/
static generateConfigForImport(scope: Construct, importToId: string, importFromId: string, provider?: cdktf.TerraformProvider): cdktf.ImportableResource;
/**
* Create a new {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_data_transfer_config google_bigquery_data_transfer_config} Resource
*
* @param scope The scope in which to define this construct
* @param id The scoped construct ID. Must be unique amongst siblings in the same scope
* @param options BigqueryDataTransferConfigConfig
*/
constructor(scope: Construct, id: string, config: BigqueryDataTransferConfigConfig);
private _dataRefreshWindowDays?;
get dataRefreshWindowDays(): number;
set dataRefreshWindowDays(value: number);
resetDataRefreshWindowDays(): void;
get dataRefreshWindowDaysInput(): number | undefined;
private _dataSourceId?;
get dataSourceId(): string;
set dataSourceId(value: string);
get dataSourceIdInput(): string | undefined;
private _destinationDatasetId?;
get destinationDatasetId(): string;
set destinationDatasetId(value: string);
resetDestinationDatasetId(): void;
get destinationDatasetIdInput(): string | undefined;
private _disabled?;
get disabled(): boolean | cdktf.IResolvable;
set disabled(value: boolean | cdktf.IResolvable);
resetDisabled(): void;
get disabledInput(): boolean | cdktf.IResolvable | undefined;
private _displayName?;
get displayName(): string;
set displayName(value: string);
get displayNameInput(): string | undefined;
private _id?;
get id(): string;
set id(value: string);
resetId(): void;
get idInput(): string | undefined;
private _location?;
get location(): string;
set location(value: string);
resetLocation(): void;
get locationInput(): string | undefined;
get name(): string;
private _notificationPubsubTopic?;
get notificationPubsubTopic(): string;
set notificationPubsubTopic(value: string);
resetNotificationPubsubTopic(): void;
get notificationPubsubTopicInput(): string | undefined;
private _params?;
get params(): {
[key: string]: string;
};
set params(value: {
[key: string]: string;
});
get paramsInput(): {
[key: string]: string;
} | undefined;
private _project?;
get project(): string;
set project(value: string);
resetProject(): void;
get projectInput(): string | undefined;
private _schedule?;
get schedule(): string;
set schedule(value: string);
resetSchedule(): void;
get scheduleInput(): string | undefined;
private _serviceAccountName?;
get serviceAccountName(): string;
set serviceAccountName(value: string);
resetServiceAccountName(): void;
get serviceAccountNameInput(): string | undefined;
private _emailPreferences;
get emailPreferences(): BigqueryDataTransferConfigEmailPreferencesOutputReference;
putEmailPreferences(value: BigqueryDataTransferConfigEmailPreferences): void;
resetEmailPreferences(): void;
get emailPreferencesInput(): BigqueryDataTransferConfigEmailPreferences | undefined;
private _encryptionConfiguration;
get encryptionConfiguration(): BigqueryDataTransferConfigEncryptionConfigurationOutputReference;
putEncryptionConfiguration(value: BigqueryDataTransferConfigEncryptionConfiguration): void;
resetEncryptionConfiguration(): void;
get encryptionConfigurationInput(): BigqueryDataTransferConfigEncryptionConfiguration | undefined;
private _scheduleOptions;
get scheduleOptions(): BigqueryDataTransferConfigScheduleOptionsOutputReference;
putScheduleOptions(value: BigqueryDataTransferConfigScheduleOptions): void;
resetScheduleOptions(): void;
get scheduleOptionsInput(): BigqueryDataTransferConfigScheduleOptions | undefined;
private _sensitiveParams;
get sensitiveParams(): BigqueryDataTransferConfigSensitiveParamsOutputReference;
putSensitiveParams(value: BigqueryDataTransferConfigSensitiveParams): void;
resetSensitiveParams(): void;
get sensitiveParamsInput(): BigqueryDataTransferConfigSensitiveParams | undefined;
private _timeouts;
get timeouts(): BigqueryDataTransferConfigTimeoutsOutputReference;
putTimeouts(value: BigqueryDataTransferConfigTimeouts): void;
resetTimeouts(): void;
get timeoutsInput(): cdktf.IResolvable | BigqueryDataTransferConfigTimeouts | undefined;
protected synthesizeAttributes(): {
[name: string]: any;
};
protected synthesizeHclAttributes(): {
[name: string]: any;
};
}