@cdktf/provider-google
Version:
Prebuilt google Provider for Terraform CDK (cdktf)
333 lines (332 loc) • 15.3 kB
TypeScript
/**
* Copyright (c) HashiCorp, Inc.
* SPDX-License-Identifier: MPL-2.0
*/
import { Construct } from 'constructs';
import * as cdktf from 'cdktf';
export interface DataflowJobConfig extends cdktf.TerraformMetaArguments {
/**
* List of experiments that should be used by the job. An example value is ["enable_stackdriver_agent_metrics"].
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#additional_experiments DataflowJob#additional_experiments}
*/
readonly additionalExperiments?: string[];
/**
* Indicates if the job should use the streaming engine feature.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#enable_streaming_engine DataflowJob#enable_streaming_engine}
*/
readonly enableStreamingEngine?: boolean | cdktf.IResolvable;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#id DataflowJob#id}
*
* Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
* If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
*/
readonly id?: string;
/**
* The configuration for VM IPs. Options are "WORKER_IP_PUBLIC" or "WORKER_IP_PRIVATE".
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#ip_configuration DataflowJob#ip_configuration}
*/
readonly ipConfiguration?: string;
/**
* The name for the Cloud KMS key for the job. Key format is: projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#kms_key_name DataflowJob#kms_key_name}
*/
readonly kmsKeyName?: string;
/**
* User labels to be specified for the job. Keys and values should follow the restrictions specified in the labeling restrictions page. NOTE: This field is non-authoritative, and will only manage the labels present in your configuration.
* Please refer to the field 'effective_labels' for all of the labels present on the resource.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#labels DataflowJob#labels}
*/
readonly labels?: {
[key: string]: string;
};
/**
* The machine type to use for the job.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#machine_type DataflowJob#machine_type}
*/
readonly machineType?: string;
/**
* The number of workers permitted to work on the job. More workers may improve processing speed at additional cost.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#max_workers DataflowJob#max_workers}
*/
readonly maxWorkers?: number;
/**
* A unique name for the resource, required by Dataflow.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#name DataflowJob#name}
*/
readonly name: string;
/**
* The network to which VMs will be assigned. If it is not provided, "default" will be used.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#network DataflowJob#network}
*/
readonly network?: string;
/**
* One of "drain" or "cancel". Specifies behavior of deletion during terraform destroy.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#on_delete DataflowJob#on_delete}
*/
readonly onDelete?: string;
/**
* Key/Value pairs to be passed to the Dataflow job (as used in the template).
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#parameters DataflowJob#parameters}
*/
readonly parameters?: {
[key: string]: string;
};
/**
* The project in which the resource belongs.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#project DataflowJob#project}
*/
readonly project?: string;
/**
* The region in which the created job should run.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#region DataflowJob#region}
*/
readonly region?: string;
/**
* The Service Account email used to create the job.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#service_account_email DataflowJob#service_account_email}
*/
readonly serviceAccountEmail?: string;
/**
* If true, treat DRAINING and CANCELLING as terminal job states and do not wait for further changes before removing from terraform state and moving on. WARNING: this will lead to job name conflicts if you do not ensure that the job names are different, e.g. by embedding a release ID or by using a random_id.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#skip_wait_on_job_termination DataflowJob#skip_wait_on_job_termination}
*/
readonly skipWaitOnJobTermination?: boolean | cdktf.IResolvable;
/**
* The subnetwork to which VMs will be assigned. Should be of the form "regions/REGION/subnetworks/SUBNETWORK".
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#subnetwork DataflowJob#subnetwork}
*/
readonly subnetwork?: string;
/**
* A writeable location on Google Cloud Storage for the Dataflow job to dump its temporary data.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#temp_gcs_location DataflowJob#temp_gcs_location}
*/
readonly tempGcsLocation: string;
/**
* The Google Cloud Storage path to the Dataflow job template.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#template_gcs_path DataflowJob#template_gcs_path}
*/
readonly templateGcsPath: string;
/**
* Only applicable when updating a pipeline. Map of transform name prefixes of the job to be replaced with the corresponding name prefixes of the new job.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#transform_name_mapping DataflowJob#transform_name_mapping}
*/
readonly transformNameMapping?: {
[key: string]: string;
};
/**
* The zone in which the created job should run. If it is not provided, the provider zone is used.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#zone DataflowJob#zone}
*/
readonly zone?: string;
/**
* timeouts block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#timeouts DataflowJob#timeouts}
*/
readonly timeouts?: DataflowJobTimeouts;
}
export interface DataflowJobTimeouts {
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#update DataflowJob#update}
*/
readonly update?: string;
}
export declare function dataflowJobTimeoutsToTerraform(struct?: DataflowJobTimeouts | cdktf.IResolvable): any;
export declare function dataflowJobTimeoutsToHclTerraform(struct?: DataflowJobTimeouts | cdktf.IResolvable): any;
export declare class DataflowJobTimeoutsOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
private resolvableValue?;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataflowJobTimeouts | cdktf.IResolvable | undefined;
set internalValue(value: DataflowJobTimeouts | cdktf.IResolvable | undefined);
private _update?;
get update(): string;
set update(value: string);
resetUpdate(): void;
get updateInput(): string | undefined;
}
/**
* Represents a {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job google_dataflow_job}
*/
export declare class DataflowJob extends cdktf.TerraformResource {
static readonly tfResourceType = "google_dataflow_job";
/**
* Generates CDKTF code for importing a DataflowJob resource upon running "cdktf plan <stack-name>"
* @param scope The scope in which to define this construct
* @param importToId The construct id used in the generated config for the DataflowJob to import
* @param importFromId The id of the existing DataflowJob that should be imported. Refer to the {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job#import import section} in the documentation of this resource for the id to use
* @param provider? Optional instance of the provider where the DataflowJob to import is found
*/
static generateConfigForImport(scope: Construct, importToId: string, importFromId: string, provider?: cdktf.TerraformProvider): cdktf.ImportableResource;
/**
* Create a new {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataflow_job google_dataflow_job} Resource
*
* @param scope The scope in which to define this construct
* @param id The scoped construct ID. Must be unique amongst siblings in the same scope
* @param options DataflowJobConfig
*/
constructor(scope: Construct, id: string, config: DataflowJobConfig);
private _additionalExperiments?;
get additionalExperiments(): string[];
set additionalExperiments(value: string[]);
resetAdditionalExperiments(): void;
get additionalExperimentsInput(): string[] | undefined;
private _effectiveLabels;
get effectiveLabels(): cdktf.StringMap;
private _enableStreamingEngine?;
get enableStreamingEngine(): boolean | cdktf.IResolvable;
set enableStreamingEngine(value: boolean | cdktf.IResolvable);
resetEnableStreamingEngine(): void;
get enableStreamingEngineInput(): boolean | cdktf.IResolvable | undefined;
private _id?;
get id(): string;
set id(value: string);
resetId(): void;
get idInput(): string | undefined;
private _ipConfiguration?;
get ipConfiguration(): string;
set ipConfiguration(value: string);
resetIpConfiguration(): void;
get ipConfigurationInput(): string | undefined;
get jobId(): string;
private _kmsKeyName?;
get kmsKeyName(): string;
set kmsKeyName(value: string);
resetKmsKeyName(): void;
get kmsKeyNameInput(): string | undefined;
private _labels?;
get labels(): {
[key: string]: string;
};
set labels(value: {
[key: string]: string;
});
resetLabels(): void;
get labelsInput(): {
[key: string]: string;
} | undefined;
private _machineType?;
get machineType(): string;
set machineType(value: string);
resetMachineType(): void;
get machineTypeInput(): string | undefined;
private _maxWorkers?;
get maxWorkers(): number;
set maxWorkers(value: number);
resetMaxWorkers(): void;
get maxWorkersInput(): number | undefined;
private _name?;
get name(): string;
set name(value: string);
get nameInput(): string | undefined;
private _network?;
get network(): string;
set network(value: string);
resetNetwork(): void;
get networkInput(): string | undefined;
private _onDelete?;
get onDelete(): string;
set onDelete(value: string);
resetOnDelete(): void;
get onDeleteInput(): string | undefined;
private _parameters?;
get parameters(): {
[key: string]: string;
};
set parameters(value: {
[key: string]: string;
});
resetParameters(): void;
get parametersInput(): {
[key: string]: string;
} | undefined;
private _project?;
get project(): string;
set project(value: string);
resetProject(): void;
get projectInput(): string | undefined;
private _region?;
get region(): string;
set region(value: string);
resetRegion(): void;
get regionInput(): string | undefined;
private _serviceAccountEmail?;
get serviceAccountEmail(): string;
set serviceAccountEmail(value: string);
resetServiceAccountEmail(): void;
get serviceAccountEmailInput(): string | undefined;
private _skipWaitOnJobTermination?;
get skipWaitOnJobTermination(): boolean | cdktf.IResolvable;
set skipWaitOnJobTermination(value: boolean | cdktf.IResolvable);
resetSkipWaitOnJobTermination(): void;
get skipWaitOnJobTerminationInput(): boolean | cdktf.IResolvable | undefined;
get state(): string;
private _subnetwork?;
get subnetwork(): string;
set subnetwork(value: string);
resetSubnetwork(): void;
get subnetworkInput(): string | undefined;
private _tempGcsLocation?;
get tempGcsLocation(): string;
set tempGcsLocation(value: string);
get tempGcsLocationInput(): string | undefined;
private _templateGcsPath?;
get templateGcsPath(): string;
set templateGcsPath(value: string);
get templateGcsPathInput(): string | undefined;
private _terraformLabels;
get terraformLabels(): cdktf.StringMap;
private _transformNameMapping?;
get transformNameMapping(): {
[key: string]: string;
};
set transformNameMapping(value: {
[key: string]: string;
});
resetTransformNameMapping(): void;
get transformNameMappingInput(): {
[key: string]: string;
} | undefined;
get type(): string;
private _zone?;
get zone(): string;
set zone(value: string);
resetZone(): void;
get zoneInput(): string | undefined;
private _timeouts;
get timeouts(): DataflowJobTimeoutsOutputReference;
putTimeouts(value: DataflowJobTimeouts): void;
resetTimeouts(): void;
get timeoutsInput(): cdktf.IResolvable | DataflowJobTimeouts | undefined;
protected synthesizeAttributes(): {
[name: string]: any;
};
protected synthesizeHclAttributes(): {
[name: string]: any;
};
}