UNPKG

@cdktf/provider-google

Version:

Prebuilt google Provider for Terraform CDK (cdktf)

877 lines 57.5 kB
/** * Copyright (c) HashiCorp, Inc. * SPDX-License-Identifier: MPL-2.0 */ import { Construct } from 'constructs'; import * as cdktf from 'cdktf'; export interface DataPipelinePipelineConfig extends cdktf.TerraformMetaArguments { /** * The display name of the pipeline. It can contain only letters ([A-Za-z]), numbers ([0-9]), hyphens (-), and underscores (_). * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#display_name DataPipelinePipeline#display_name} */ readonly displayName?: string; /** * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#id DataPipelinePipeline#id} * * Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. * If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable. */ readonly id?: string; /** * "The pipeline name. For example': 'projects/PROJECT_ID/locations/LOCATION_ID/pipelines/PIPELINE_ID." * "- PROJECT_ID can contain letters ([A-Za-z]), numbers ([0-9]), hyphens (-), colons (:), and periods (.). For more information, see Identifying projects." * "LOCATION_ID is the canonical ID for the pipeline's location. The list of available locations can be obtained by calling google.cloud.location.Locations.ListLocations. Note that the Data Pipelines service is not available in all regions. It depends on Cloud Scheduler, an App Engine application, so it's only available in App Engine regions." * "PIPELINE_ID is the ID of the pipeline. Must be unique for the selected project and location." * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#name DataPipelinePipeline#name} */ readonly name: string; /** * The sources of the pipeline (for example, Dataplex). The keys and values are set by the corresponding sources during pipeline creation. * An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#pipeline_sources DataPipelinePipeline#pipeline_sources} */ readonly pipelineSources?: { [key: string]: string; }; /** * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#project DataPipelinePipeline#project} */ readonly project?: string; /** * A reference to the region * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#region DataPipelinePipeline#region} */ readonly region?: string; /** * Optional. A service account email to be used with the Cloud Scheduler job. If not specified, the default compute engine service account will be used. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#scheduler_service_account_email DataPipelinePipeline#scheduler_service_account_email} */ readonly schedulerServiceAccountEmail?: string; /** * The state of the pipeline. When the pipeline is created, the state is set to 'PIPELINE_STATE_ACTIVE' by default. State changes can be requested by setting the state to stopping, paused, or resuming. State cannot be changed through pipelines.patch requests. * https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#state Possible values: ["STATE_UNSPECIFIED", "STATE_RESUMING", "STATE_ACTIVE", "STATE_STOPPING", "STATE_ARCHIVED", "STATE_PAUSED"] * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#state DataPipelinePipeline#state} */ readonly state: string; /** * The type of the pipeline. This field affects the scheduling of the pipeline and the type of metrics to show for the pipeline. * https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#pipelinetype Possible values: ["PIPELINE_TYPE_UNSPECIFIED", "PIPELINE_TYPE_BATCH", "PIPELINE_TYPE_STREAMING"] * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#type DataPipelinePipeline#type} */ readonly type: string; /** * schedule_info block * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#schedule_info DataPipelinePipeline#schedule_info} */ readonly scheduleInfo?: DataPipelinePipelineScheduleInfo; /** * timeouts block * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#timeouts DataPipelinePipeline#timeouts} */ readonly timeouts?: DataPipelinePipelineTimeouts; /** * workload block * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#workload DataPipelinePipeline#workload} */ readonly workload?: DataPipelinePipelineWorkload; } export interface DataPipelinePipelineScheduleInfo { /** * Unix-cron format of the schedule. This information is retrieved from the linked Cloud Scheduler. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#schedule DataPipelinePipeline#schedule} */ readonly schedule?: string; /** * Timezone ID. This matches the timezone IDs used by the Cloud Scheduler API. If empty, UTC time is assumed. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#time_zone DataPipelinePipeline#time_zone} */ readonly timeZone?: string; } export declare function dataPipelinePipelineScheduleInfoToTerraform(struct?: DataPipelinePipelineScheduleInfoOutputReference | DataPipelinePipelineScheduleInfo): any; export declare function dataPipelinePipelineScheduleInfoToHclTerraform(struct?: DataPipelinePipelineScheduleInfoOutputReference | DataPipelinePipelineScheduleInfo): any; export declare class DataPipelinePipelineScheduleInfoOutputReference extends cdktf.ComplexObject { private isEmptyObject; /** * @param terraformResource The parent resource * @param terraformAttribute The attribute on the parent resource this class is referencing */ constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string); get internalValue(): DataPipelinePipelineScheduleInfo | undefined; set internalValue(value: DataPipelinePipelineScheduleInfo | undefined); get nextJobTime(): string; private _schedule?; get schedule(): string; set schedule(value: string); resetSchedule(): void; get scheduleInput(): string | undefined; private _timeZone?; get timeZone(): string; set timeZone(value: string); resetTimeZone(): void; get timeZoneInput(): string | undefined; } export interface DataPipelinePipelineTimeouts { /** * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#create DataPipelinePipeline#create} */ readonly create?: string; /** * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#delete DataPipelinePipeline#delete} */ readonly delete?: string; /** * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#update DataPipelinePipeline#update} */ readonly update?: string; } export declare function dataPipelinePipelineTimeoutsToTerraform(struct?: DataPipelinePipelineTimeouts | cdktf.IResolvable): any; export declare function dataPipelinePipelineTimeoutsToHclTerraform(struct?: DataPipelinePipelineTimeouts | cdktf.IResolvable): any; export declare class DataPipelinePipelineTimeoutsOutputReference extends cdktf.ComplexObject { private isEmptyObject; private resolvableValue?; /** * @param terraformResource The parent resource * @param terraformAttribute The attribute on the parent resource this class is referencing */ constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string); get internalValue(): DataPipelinePipelineTimeouts | cdktf.IResolvable | undefined; set internalValue(value: DataPipelinePipelineTimeouts | cdktf.IResolvable | undefined); private _create?; get create(): string; set create(value: string); resetCreate(): void; get createInput(): string | undefined; private _delete?; get delete(): string; set delete(value: string); resetDelete(): void; get deleteInput(): string | undefined; private _update?; get update(): string; set update(value: string); resetUpdate(): void; get updateInput(): string | undefined; } export interface DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironment { /** * Additional experiment flags for the job. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#additional_experiments DataPipelinePipeline#additional_experiments} */ readonly additionalExperiments?: string[]; /** * Additional user labels to be specified for the job. Keys and values should follow the restrictions specified in the labeling restrictions page. An object containing a list of key/value pairs. * 'Example: { "name": "wrench", "mass": "1kg", "count": "3" }.' * 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.' * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#additional_user_labels DataPipelinePipeline#additional_user_labels} */ readonly additionalUserLabels?: { [key: string]: string; }; /** * Whether to enable Streaming Engine for the job. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#enable_streaming_engine DataPipelinePipeline#enable_streaming_engine} */ readonly enableStreamingEngine?: boolean | cdktf.IResolvable; /** * Set FlexRS goal for the job. https://cloud.google.com/dataflow/docs/guides/flexrs * https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#FlexResourceSchedulingGoal Possible values: ["FLEXRS_UNSPECIFIED", "FLEXRS_SPEED_OPTIMIZED", "FLEXRS_COST_OPTIMIZED"] * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#flexrs_goal DataPipelinePipeline#flexrs_goal} */ readonly flexrsGoal?: string; /** * Configuration for VM IPs. * https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#WorkerIPAddressConfiguration Possible values: ["WORKER_IP_UNSPECIFIED", "WORKER_IP_PUBLIC", "WORKER_IP_PRIVATE"] * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#ip_configuration DataPipelinePipeline#ip_configuration} */ readonly ipConfiguration?: string; /** * 'Name for the Cloud KMS key for the job. The key format is: projects//locations//keyRings//cryptoKeys/' * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#kms_key_name DataPipelinePipeline#kms_key_name} */ readonly kmsKeyName?: string; /** * The machine type to use for the job. Defaults to the value from the template if not specified. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#machine_type DataPipelinePipeline#machine_type} */ readonly machineType?: string; /** * The maximum number of Compute Engine instances to be made available to your pipeline during execution, from 1 to 1000. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#max_workers DataPipelinePipeline#max_workers} */ readonly maxWorkers?: number; /** * Network to which VMs will be assigned. If empty or unspecified, the service will use the network "default". * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#network DataPipelinePipeline#network} */ readonly network?: string; /** * The initial number of Compute Engine instances for the job. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#num_workers DataPipelinePipeline#num_workers} */ readonly numWorkers?: number; /** * The email address of the service account to run the job as. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#service_account_email DataPipelinePipeline#service_account_email} */ readonly serviceAccountEmail?: string; /** * Subnetwork to which VMs will be assigned, if desired. You can specify a subnetwork using either a complete URL or an abbreviated path. Expected to be of the form "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" or "regions/REGION/subnetworks/SUBNETWORK". If the subnetwork is located in a Shared VPC network, you must use the complete URL. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#subnetwork DataPipelinePipeline#subnetwork} */ readonly subnetwork?: string; /** * The Cloud Storage path to use for temporary files. Must be a valid Cloud Storage URL, beginning with gs://. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#temp_location DataPipelinePipeline#temp_location} */ readonly tempLocation?: string; /** * The Compute Engine region (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1". Mutually exclusive with workerZone. If neither workerRegion nor workerZone is specified, default to the control plane's region. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#worker_region DataPipelinePipeline#worker_region} */ readonly workerRegion?: string; /** * The Compute Engine zone (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1-a". Mutually exclusive with workerRegion. If neither workerRegion nor workerZone is specified, a zone in the control plane's region is chosen based on available capacity. If both workerZone and zone are set, workerZone takes precedence. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#worker_zone DataPipelinePipeline#worker_zone} */ readonly workerZone?: string; /** * The Compute Engine availability zone for launching worker instances to run your pipeline. In the future, workerZone will take precedence. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#zone DataPipelinePipeline#zone} */ readonly zone?: string; } export declare function dataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentToTerraform(struct?: DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutputReference | DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironment): any; export declare function dataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentToHclTerraform(struct?: DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutputReference | DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironment): any; export declare class DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutputReference extends cdktf.ComplexObject { private isEmptyObject; /** * @param terraformResource The parent resource * @param terraformAttribute The attribute on the parent resource this class is referencing */ constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string); get internalValue(): DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironment | undefined; set internalValue(value: DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironment | undefined); private _additionalExperiments?; get additionalExperiments(): string[]; set additionalExperiments(value: string[]); resetAdditionalExperiments(): void; get additionalExperimentsInput(): string[] | undefined; private _additionalUserLabels?; get additionalUserLabels(): { [key: string]: string; }; set additionalUserLabels(value: { [key: string]: string; }); resetAdditionalUserLabels(): void; get additionalUserLabelsInput(): { [key: string]: string; } | undefined; private _enableStreamingEngine?; get enableStreamingEngine(): boolean | cdktf.IResolvable; set enableStreamingEngine(value: boolean | cdktf.IResolvable); resetEnableStreamingEngine(): void; get enableStreamingEngineInput(): boolean | cdktf.IResolvable | undefined; private _flexrsGoal?; get flexrsGoal(): string; set flexrsGoal(value: string); resetFlexrsGoal(): void; get flexrsGoalInput(): string | undefined; private _ipConfiguration?; get ipConfiguration(): string; set ipConfiguration(value: string); resetIpConfiguration(): void; get ipConfigurationInput(): string | undefined; private _kmsKeyName?; get kmsKeyName(): string; set kmsKeyName(value: string); resetKmsKeyName(): void; get kmsKeyNameInput(): string | undefined; private _machineType?; get machineType(): string; set machineType(value: string); resetMachineType(): void; get machineTypeInput(): string | undefined; private _maxWorkers?; get maxWorkers(): number; set maxWorkers(value: number); resetMaxWorkers(): void; get maxWorkersInput(): number | undefined; private _network?; get network(): string; set network(value: string); resetNetwork(): void; get networkInput(): string | undefined; private _numWorkers?; get numWorkers(): number; set numWorkers(value: number); resetNumWorkers(): void; get numWorkersInput(): number | undefined; private _serviceAccountEmail?; get serviceAccountEmail(): string; set serviceAccountEmail(value: string); resetServiceAccountEmail(): void; get serviceAccountEmailInput(): string | undefined; private _subnetwork?; get subnetwork(): string; set subnetwork(value: string); resetSubnetwork(): void; get subnetworkInput(): string | undefined; private _tempLocation?; get tempLocation(): string; set tempLocation(value: string); resetTempLocation(): void; get tempLocationInput(): string | undefined; private _workerRegion?; get workerRegion(): string; set workerRegion(value: string); resetWorkerRegion(): void; get workerRegionInput(): string | undefined; private _workerZone?; get workerZone(): string; set workerZone(value: string); resetWorkerZone(): void; get workerZoneInput(): string | undefined; private _zone?; get zone(): string; set zone(value: string); resetZone(): void; get zoneInput(): string | undefined; } export interface DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameter { /** * Cloud Storage path to a file with a JSON-serialized ContainerSpec as content. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#container_spec_gcs_path DataPipelinePipeline#container_spec_gcs_path} */ readonly containerSpecGcsPath?: string; /** * The job name to use for the created job. For an update job request, the job name should be the same as the existing running job. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#job_name DataPipelinePipeline#job_name} */ readonly jobName: string; /** * Launch options for this Flex Template job. This is a common set of options across languages and templates. This should not be used to pass job parameters. * 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.' * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#launch_options DataPipelinePipeline#launch_options} */ readonly launchOptions?: { [key: string]: string; }; /** * 'The parameters for the Flex Template. Example: {"numWorkers":"5"}' * 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.' * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#parameters DataPipelinePipeline#parameters} */ readonly parameters?: { [key: string]: string; }; /** * 'Use this to pass transform name mappings for streaming update jobs. Example: {"oldTransformName":"newTransformName",...}' * 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.' * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#transform_name_mappings DataPipelinePipeline#transform_name_mappings} */ readonly transformNameMappings?: { [key: string]: string; }; /** * Set this to true if you are sending a request to update a running streaming job. When set, the job name should be the same as the running job. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#update DataPipelinePipeline#update} */ readonly update?: boolean | cdktf.IResolvable; /** * environment block * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#environment DataPipelinePipeline#environment} */ readonly environment?: DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironment; } export declare function dataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterToTerraform(struct?: DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutputReference | DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameter): any; export declare function dataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterToHclTerraform(struct?: DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutputReference | DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameter): any; export declare class DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutputReference extends cdktf.ComplexObject { private isEmptyObject; /** * @param terraformResource The parent resource * @param terraformAttribute The attribute on the parent resource this class is referencing */ constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string); get internalValue(): DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameter | undefined; set internalValue(value: DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameter | undefined); private _containerSpecGcsPath?; get containerSpecGcsPath(): string; set containerSpecGcsPath(value: string); resetContainerSpecGcsPath(): void; get containerSpecGcsPathInput(): string | undefined; private _jobName?; get jobName(): string; set jobName(value: string); get jobNameInput(): string | undefined; private _launchOptions?; get launchOptions(): { [key: string]: string; }; set launchOptions(value: { [key: string]: string; }); resetLaunchOptions(): void; get launchOptionsInput(): { [key: string]: string; } | undefined; private _parameters?; get parameters(): { [key: string]: string; }; set parameters(value: { [key: string]: string; }); resetParameters(): void; get parametersInput(): { [key: string]: string; } | undefined; private _transformNameMappings?; get transformNameMappings(): { [key: string]: string; }; set transformNameMappings(value: { [key: string]: string; }); resetTransformNameMappings(): void; get transformNameMappingsInput(): { [key: string]: string; } | undefined; private _update?; get update(): boolean | cdktf.IResolvable; set update(value: boolean | cdktf.IResolvable); resetUpdate(): void; get updateInput(): boolean | cdktf.IResolvable | undefined; private _environment; get environment(): DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironmentOutputReference; putEnvironment(value: DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironment): void; resetEnvironment(): void; get environmentInput(): DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterEnvironment | undefined; } export interface DataPipelinePipelineWorkloadDataflowFlexTemplateRequest { /** * The regional endpoint to which to direct the request. For example, us-central1, us-west1. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#location DataPipelinePipeline#location} */ readonly location: string; /** * The ID of the Cloud Platform project that the job belongs to. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#project_id DataPipelinePipeline#project_id} */ readonly projectId: string; /** * If true, the request is validated but not actually executed. Defaults to false. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#validate_only DataPipelinePipeline#validate_only} */ readonly validateOnly?: boolean | cdktf.IResolvable; /** * launch_parameter block * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#launch_parameter DataPipelinePipeline#launch_parameter} */ readonly launchParameter: DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameter; } export declare function dataPipelinePipelineWorkloadDataflowFlexTemplateRequestToTerraform(struct?: DataPipelinePipelineWorkloadDataflowFlexTemplateRequestOutputReference | DataPipelinePipelineWorkloadDataflowFlexTemplateRequest): any; export declare function dataPipelinePipelineWorkloadDataflowFlexTemplateRequestToHclTerraform(struct?: DataPipelinePipelineWorkloadDataflowFlexTemplateRequestOutputReference | DataPipelinePipelineWorkloadDataflowFlexTemplateRequest): any; export declare class DataPipelinePipelineWorkloadDataflowFlexTemplateRequestOutputReference extends cdktf.ComplexObject { private isEmptyObject; /** * @param terraformResource The parent resource * @param terraformAttribute The attribute on the parent resource this class is referencing */ constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string); get internalValue(): DataPipelinePipelineWorkloadDataflowFlexTemplateRequest | undefined; set internalValue(value: DataPipelinePipelineWorkloadDataflowFlexTemplateRequest | undefined); private _location?; get location(): string; set location(value: string); get locationInput(): string | undefined; private _projectId?; get projectId(): string; set projectId(value: string); get projectIdInput(): string | undefined; private _validateOnly?; get validateOnly(): boolean | cdktf.IResolvable; set validateOnly(value: boolean | cdktf.IResolvable); resetValidateOnly(): void; get validateOnlyInput(): boolean | cdktf.IResolvable | undefined; private _launchParameter; get launchParameter(): DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameterOutputReference; putLaunchParameter(value: DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameter): void; get launchParameterInput(): DataPipelinePipelineWorkloadDataflowFlexTemplateRequestLaunchParameter | undefined; } export interface DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironment { /** * Additional experiment flags for the job. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#additional_experiments DataPipelinePipeline#additional_experiments} */ readonly additionalExperiments?: string[]; /** * Additional user labels to be specified for the job. Keys and values should follow the restrictions specified in the labeling restrictions page. An object containing a list of key/value pairs. * 'Example: { "name": "wrench", "mass": "1kg", "count": "3" }.' * 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.' * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#additional_user_labels DataPipelinePipeline#additional_user_labels} */ readonly additionalUserLabels?: { [key: string]: string; }; /** * Whether to bypass the safety checks for the job's temporary directory. Use with caution. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#bypass_temp_dir_validation DataPipelinePipeline#bypass_temp_dir_validation} */ readonly bypassTempDirValidation?: boolean | cdktf.IResolvable; /** * Whether to enable Streaming Engine for the job. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#enable_streaming_engine DataPipelinePipeline#enable_streaming_engine} */ readonly enableStreamingEngine?: boolean | cdktf.IResolvable; /** * Configuration for VM IPs. * https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines#WorkerIPAddressConfiguration Possible values: ["WORKER_IP_UNSPECIFIED", "WORKER_IP_PUBLIC", "WORKER_IP_PRIVATE"] * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#ip_configuration DataPipelinePipeline#ip_configuration} */ readonly ipConfiguration?: string; /** * 'Name for the Cloud KMS key for the job. The key format is: projects//locations//keyRings//cryptoKeys/' * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#kms_key_name DataPipelinePipeline#kms_key_name} */ readonly kmsKeyName?: string; /** * The machine type to use for the job. Defaults to the value from the template if not specified. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#machine_type DataPipelinePipeline#machine_type} */ readonly machineType?: string; /** * The maximum number of Compute Engine instances to be made available to your pipeline during execution, from 1 to 1000. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#max_workers DataPipelinePipeline#max_workers} */ readonly maxWorkers?: number; /** * Network to which VMs will be assigned. If empty or unspecified, the service will use the network "default". * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#network DataPipelinePipeline#network} */ readonly network?: string; /** * The initial number of Compute Engine instances for the job. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#num_workers DataPipelinePipeline#num_workers} */ readonly numWorkers?: number; /** * The email address of the service account to run the job as. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#service_account_email DataPipelinePipeline#service_account_email} */ readonly serviceAccountEmail?: string; /** * Subnetwork to which VMs will be assigned, if desired. You can specify a subnetwork using either a complete URL or an abbreviated path. Expected to be of the form "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" or "regions/REGION/subnetworks/SUBNETWORK". If the subnetwork is located in a Shared VPC network, you must use the complete URL. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#subnetwork DataPipelinePipeline#subnetwork} */ readonly subnetwork?: string; /** * The Cloud Storage path to use for temporary files. Must be a valid Cloud Storage URL, beginning with gs://. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#temp_location DataPipelinePipeline#temp_location} */ readonly tempLocation?: string; /** * The Compute Engine region (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1". Mutually exclusive with workerZone. If neither workerRegion nor workerZone is specified, default to the control plane's region. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#worker_region DataPipelinePipeline#worker_region} */ readonly workerRegion?: string; /** * The Compute Engine zone (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in which worker processing should occur, e.g. "us-west1-a". Mutually exclusive with workerRegion. If neither workerRegion nor workerZone is specified, a zone in the control plane's region is chosen based on available capacity. If both workerZone and zone are set, workerZone takes precedence. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#worker_zone DataPipelinePipeline#worker_zone} */ readonly workerZone?: string; /** * The Compute Engine availability zone for launching worker instances to run your pipeline. In the future, workerZone will take precedence. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#zone DataPipelinePipeline#zone} */ readonly zone?: string; } export declare function dataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentToTerraform(struct?: DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutputReference | DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironment): any; export declare function dataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentToHclTerraform(struct?: DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutputReference | DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironment): any; export declare class DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutputReference extends cdktf.ComplexObject { private isEmptyObject; /** * @param terraformResource The parent resource * @param terraformAttribute The attribute on the parent resource this class is referencing */ constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string); get internalValue(): DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironment | undefined; set internalValue(value: DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironment | undefined); private _additionalExperiments?; get additionalExperiments(): string[]; set additionalExperiments(value: string[]); resetAdditionalExperiments(): void; get additionalExperimentsInput(): string[] | undefined; private _additionalUserLabels?; get additionalUserLabels(): { [key: string]: string; }; set additionalUserLabels(value: { [key: string]: string; }); resetAdditionalUserLabels(): void; get additionalUserLabelsInput(): { [key: string]: string; } | undefined; private _bypassTempDirValidation?; get bypassTempDirValidation(): boolean | cdktf.IResolvable; set bypassTempDirValidation(value: boolean | cdktf.IResolvable); resetBypassTempDirValidation(): void; get bypassTempDirValidationInput(): boolean | cdktf.IResolvable | undefined; private _enableStreamingEngine?; get enableStreamingEngine(): boolean | cdktf.IResolvable; set enableStreamingEngine(value: boolean | cdktf.IResolvable); resetEnableStreamingEngine(): void; get enableStreamingEngineInput(): boolean | cdktf.IResolvable | undefined; private _ipConfiguration?; get ipConfiguration(): string; set ipConfiguration(value: string); resetIpConfiguration(): void; get ipConfigurationInput(): string | undefined; private _kmsKeyName?; get kmsKeyName(): string; set kmsKeyName(value: string); resetKmsKeyName(): void; get kmsKeyNameInput(): string | undefined; private _machineType?; get machineType(): string; set machineType(value: string); resetMachineType(): void; get machineTypeInput(): string | undefined; private _maxWorkers?; get maxWorkers(): number; set maxWorkers(value: number); resetMaxWorkers(): void; get maxWorkersInput(): number | undefined; private _network?; get network(): string; set network(value: string); resetNetwork(): void; get networkInput(): string | undefined; private _numWorkers?; get numWorkers(): number; set numWorkers(value: number); resetNumWorkers(): void; get numWorkersInput(): number | undefined; private _serviceAccountEmail?; get serviceAccountEmail(): string; set serviceAccountEmail(value: string); resetServiceAccountEmail(): void; get serviceAccountEmailInput(): string | undefined; private _subnetwork?; get subnetwork(): string; set subnetwork(value: string); resetSubnetwork(): void; get subnetworkInput(): string | undefined; private _tempLocation?; get tempLocation(): string; set tempLocation(value: string); resetTempLocation(): void; get tempLocationInput(): string | undefined; private _workerRegion?; get workerRegion(): string; set workerRegion(value: string); resetWorkerRegion(): void; get workerRegionInput(): string | undefined; private _workerZone?; get workerZone(): string; set workerZone(value: string); resetWorkerZone(): void; get workerZoneInput(): string | undefined; private _zone?; get zone(): string; set zone(value: string); resetZone(): void; get zoneInput(): string | undefined; } export interface DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParameters { /** * The job name to use for the created job. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#job_name DataPipelinePipeline#job_name} */ readonly jobName: string; /** * The runtime parameters to pass to the job. * 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.' * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#parameters DataPipelinePipeline#parameters} */ readonly parameters?: { [key: string]: string; }; /** * Map of transform name prefixes of the job to be replaced to the corresponding name prefixes of the new job. Only applicable when updating a pipeline. * 'An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.' * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#transform_name_mapping DataPipelinePipeline#transform_name_mapping} */ readonly transformNameMapping?: { [key: string]: string; }; /** * If set, replace the existing pipeline with the name specified by jobName with this pipeline, preserving state. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#update DataPipelinePipeline#update} */ readonly update?: boolean | cdktf.IResolvable; /** * environment block * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#environment DataPipelinePipeline#environment} */ readonly environment?: DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironment; } export declare function dataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersToTerraform(struct?: DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutputReference | DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParameters): any; export declare function dataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersToHclTerraform(struct?: DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutputReference | DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParameters): any; export declare class DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersOutputReference extends cdktf.ComplexObject { private isEmptyObject; /** * @param terraformResource The parent resource * @param terraformAttribute The attribute on the parent resource this class is referencing */ constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string); get internalValue(): DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParameters | undefined; set internalValue(value: DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParameters | undefined); private _jobName?; get jobName(): string; set jobName(value: string); get jobNameInput(): string | undefined; private _parameters?; get parameters(): { [key: string]: string; }; set parameters(value: { [key: string]: string; }); resetParameters(): void; get parametersInput(): { [key: string]: string; } | undefined; private _transformNameMapping?; get transformNameMapping(): { [key: string]: string; }; set transformNameMapping(value: { [key: string]: string; }); resetTransformNameMapping(): void; get transformNameMappingInput(): { [key: string]: string; } | undefined; private _update?; get update(): boolean | cdktf.IResolvable; set update(value: boolean | cdktf.IResolvable); resetUpdate(): void; get updateInput(): boolean | cdktf.IResolvable | undefined; private _environment; get environment(): DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironmentOutputReference; putEnvironment(value: DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironment): void; resetEnvironment(): void; get environmentInput(): DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParametersEnvironment | undefined; } export interface DataPipelinePipelineWorkloadDataflowLaunchTemplateRequest { /** * A Cloud Storage path to the template from which to create the job. Must be a valid Cloud Storage URL, beginning with 'gs://'. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#gcs_path DataPipelinePipeline#gcs_path} */ readonly gcsPath?: string; /** * The regional endpoint to which to direct the request. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#location DataPipelinePipeline#location} */ readonly location?: string; /** * The ID of the Cloud Platform project that the job belongs to. * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#project_id DataPipelinePipeline#project_id} */ readonly projectId: string; /** * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#validate_only DataPipelinePipeline#validate_only} */ readonly validateOnly?: boolean | cdktf.IResolvable; /** * launch_parameters block * * Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/data_pipeline_pipeline#launch_parameters DataPipelinePipeline#launch_parameters} */ readonly launchParameters?: DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestLaunchParameters; } export declare function dataPipelinePipelineWorkloadDataflowLaunchTemplateRequestToTerraform(struct?: DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestOutputReference | DataPipelinePipelineWorkloadDataflowLaunchTemplateRequest): any; export declare function dataPipelinePipelineWorkloadDataflowLaunchTemplateRequestToHclTerraform(struct?: DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestOutputReference | DataPipelinePipelineWorkloadDataflowLaunchTemplateRequest): any; export declare class DataPipelinePipelineWorkloadDataflowLaunchTemplateRequestOutputReference extends cdktf.ComplexObject { private isEmptyObject; /** * @param terraformResource The parent resource * @param terraformAttribute The attribute on the parent resource this class is referencing */ constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string); get internalValue(): DataPipelinePipelineWorkloadDataflowLaunchTemplateRequest | undefined; set internalValue(value: DataPipelinePipelineWorkloadDataflowLaunchTemplateRequest | undefined); private _gcsPath?; get gcsPath(): string; set gcsPath(value: string); resetGcsPath(): void; get gcsPathInput(): string | undefined; private _location?; get location(): string; set location(value: string); reset