@cdktf/provider-google
Version:
Prebuilt google Provider for Terraform CDK (cdktf)
937 lines • 52.4 kB
TypeScript
/**
* Copyright (c) HashiCorp, Inc.
* SPDX-License-Identifier: MPL-2.0
*/
import { Construct } from 'constructs';
import * as cdktf from 'cdktf';
export interface DataplexTaskConfig extends cdktf.TerraformMetaArguments {
/**
* User-provided description of the task.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#description DataplexTask#description}
*/
readonly description?: string;
/**
* User friendly display name.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#display_name DataplexTask#display_name}
*/
readonly displayName?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#id DataplexTask#id}
*
* Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
* If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
*/
readonly id?: string;
/**
* User-defined labels for the task.
*
*
* **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
* Please refer to the field 'effective_labels' for all of the labels present on the resource.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#labels DataplexTask#labels}
*/
readonly labels?: {
[key: string]: string;
};
/**
* The lake in which the task will be created in.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#lake DataplexTask#lake}
*/
readonly lake?: string;
/**
* The location in which the task will be created in.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#location DataplexTask#location}
*/
readonly location?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#project DataplexTask#project}
*/
readonly project?: string;
/**
* The task Id of the task.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#task_id DataplexTask#task_id}
*/
readonly taskId?: string;
/**
* execution_spec block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#execution_spec DataplexTask#execution_spec}
*/
readonly executionSpec: DataplexTaskExecutionSpec;
/**
* notebook block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#notebook DataplexTask#notebook}
*/
readonly notebook?: DataplexTaskNotebook;
/**
* spark block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#spark DataplexTask#spark}
*/
readonly spark?: DataplexTaskSpark;
/**
* timeouts block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#timeouts DataplexTask#timeouts}
*/
readonly timeouts?: DataplexTaskTimeouts;
/**
* trigger_spec block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#trigger_spec DataplexTask#trigger_spec}
*/
readonly triggerSpec: DataplexTaskTriggerSpec;
}
export interface DataplexTaskExecutionStatusLatestJob {
}
export declare function dataplexTaskExecutionStatusLatestJobToTerraform(struct?: DataplexTaskExecutionStatusLatestJob): any;
export declare function dataplexTaskExecutionStatusLatestJobToHclTerraform(struct?: DataplexTaskExecutionStatusLatestJob): any;
export declare class DataplexTaskExecutionStatusLatestJobOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param complexObjectIndex the index of this item in the list
* @param complexObjectIsFromSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, complexObjectIndex: number, complexObjectIsFromSet: boolean);
get internalValue(): DataplexTaskExecutionStatusLatestJob | undefined;
set internalValue(value: DataplexTaskExecutionStatusLatestJob | undefined);
get endTime(): string;
get message(): string;
get name(): string;
get retryCount(): number;
get service(): string;
get serviceJob(): string;
get startTime(): string;
get state(): string;
get uid(): string;
}
export declare class DataplexTaskExecutionStatusLatestJobList extends cdktf.ComplexList {
protected terraformResource: cdktf.IInterpolatingParent;
protected terraformAttribute: string;
protected wrapsSet: boolean;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param wrapsSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, wrapsSet: boolean);
/**
* @param index the index of the item to return
*/
get(index: number): DataplexTaskExecutionStatusLatestJobOutputReference;
}
export interface DataplexTaskExecutionStatus {
}
export declare function dataplexTaskExecutionStatusToTerraform(struct?: DataplexTaskExecutionStatus): any;
export declare function dataplexTaskExecutionStatusToHclTerraform(struct?: DataplexTaskExecutionStatus): any;
export declare class DataplexTaskExecutionStatusOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param complexObjectIndex the index of this item in the list
* @param complexObjectIsFromSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, complexObjectIndex: number, complexObjectIsFromSet: boolean);
get internalValue(): DataplexTaskExecutionStatus | undefined;
set internalValue(value: DataplexTaskExecutionStatus | undefined);
private _latestJob;
get latestJob(): DataplexTaskExecutionStatusLatestJobList;
get updateTime(): string;
}
export declare class DataplexTaskExecutionStatusList extends cdktf.ComplexList {
protected terraformResource: cdktf.IInterpolatingParent;
protected terraformAttribute: string;
protected wrapsSet: boolean;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param wrapsSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, wrapsSet: boolean);
/**
* @param index the index of the item to return
*/
get(index: number): DataplexTaskExecutionStatusOutputReference;
}
export interface DataplexTaskExecutionSpec {
/**
* The arguments to pass to the task. The args can use placeholders of the format ${placeholder} as part of key/value string. These will be interpolated before passing the args to the driver. Currently supported placeholders: - ${taskId} - ${job_time} To pass positional args, set the key as TASK_ARGS. The value should be a comma-separated string of all the positional arguments. To use a delimiter other than comma, refer to https://cloud.google.com/sdk/gcloud/reference/topic/escaping. In case of other keys being present in the args, then TASK_ARGS will be passed as the last argument. An object containing a list of 'key': value pairs. Example: { 'name': 'wrench', 'mass': '1.3kg', 'count': '3' }.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#args DataplexTask#args}
*/
readonly args?: {
[key: string]: string;
};
/**
* The Cloud KMS key to use for encryption, of the form: projects/{project_number}/locations/{locationId}/keyRings/{key-ring-name}/cryptoKeys/{key-name}.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#kms_key DataplexTask#kms_key}
*/
readonly kmsKey?: string;
/**
* The maximum duration after which the job execution is expired. A duration in seconds with up to nine fractional digits, ending with 's'. Example: '3.5s'.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#max_job_execution_lifetime DataplexTask#max_job_execution_lifetime}
*/
readonly maxJobExecutionLifetime?: string;
/**
* The project in which jobs are run. By default, the project containing the Lake is used. If a project is provided, the ExecutionSpec.service_account must belong to this project.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#project DataplexTask#project}
*/
readonly project?: string;
/**
* Service account to use to execute a task. If not provided, the default Compute service account for the project is used.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#service_account DataplexTask#service_account}
*/
readonly serviceAccount: string;
}
export declare function dataplexTaskExecutionSpecToTerraform(struct?: DataplexTaskExecutionSpecOutputReference | DataplexTaskExecutionSpec): any;
export declare function dataplexTaskExecutionSpecToHclTerraform(struct?: DataplexTaskExecutionSpecOutputReference | DataplexTaskExecutionSpec): any;
export declare class DataplexTaskExecutionSpecOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataplexTaskExecutionSpec | undefined;
set internalValue(value: DataplexTaskExecutionSpec | undefined);
private _args?;
get args(): {
[key: string]: string;
};
set args(value: {
[key: string]: string;
});
resetArgs(): void;
get argsInput(): {
[key: string]: string;
} | undefined;
private _kmsKey?;
get kmsKey(): string;
set kmsKey(value: string);
resetKmsKey(): void;
get kmsKeyInput(): string | undefined;
private _maxJobExecutionLifetime?;
get maxJobExecutionLifetime(): string;
set maxJobExecutionLifetime(value: string);
resetMaxJobExecutionLifetime(): void;
get maxJobExecutionLifetimeInput(): string | undefined;
private _project?;
get project(): string;
set project(value: string);
resetProject(): void;
get projectInput(): string | undefined;
private _serviceAccount?;
get serviceAccount(): string;
set serviceAccount(value: string);
get serviceAccountInput(): string | undefined;
}
export interface DataplexTaskNotebookInfrastructureSpecBatch {
/**
* Total number of job executors. Executor Count should be between 2 and 100. [Default=2]
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#executors_count DataplexTask#executors_count}
*/
readonly executorsCount?: number;
/**
* Max configurable executors. If maxExecutorsCount > executorsCount, then auto-scaling is enabled. Max Executor Count should be between 2 and 1000. [Default=1000]
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#max_executors_count DataplexTask#max_executors_count}
*/
readonly maxExecutorsCount?: number;
}
export declare function dataplexTaskNotebookInfrastructureSpecBatchToTerraform(struct?: DataplexTaskNotebookInfrastructureSpecBatchOutputReference | DataplexTaskNotebookInfrastructureSpecBatch): any;
export declare function dataplexTaskNotebookInfrastructureSpecBatchToHclTerraform(struct?: DataplexTaskNotebookInfrastructureSpecBatchOutputReference | DataplexTaskNotebookInfrastructureSpecBatch): any;
export declare class DataplexTaskNotebookInfrastructureSpecBatchOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataplexTaskNotebookInfrastructureSpecBatch | undefined;
set internalValue(value: DataplexTaskNotebookInfrastructureSpecBatch | undefined);
private _executorsCount?;
get executorsCount(): number;
set executorsCount(value: number);
resetExecutorsCount(): void;
get executorsCountInput(): number | undefined;
private _maxExecutorsCount?;
get maxExecutorsCount(): number;
set maxExecutorsCount(value: number);
resetMaxExecutorsCount(): void;
get maxExecutorsCountInput(): number | undefined;
}
export interface DataplexTaskNotebookInfrastructureSpecContainerImage {
/**
* Container image to use.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#image DataplexTask#image}
*/
readonly image?: string;
/**
* A list of Java JARS to add to the classpath. Valid input includes Cloud Storage URIs to Jar binaries. For example, gs://bucket-name/my/path/to/file.jar
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#java_jars DataplexTask#java_jars}
*/
readonly javaJars?: string[];
/**
* Override to common configuration of open source components installed on the Dataproc cluster. The properties to set on daemon config files. Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. For more information, see Cluster properties.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#properties DataplexTask#properties}
*/
readonly properties?: {
[key: string]: string;
};
/**
* A list of python packages to be installed. Valid formats include Cloud Storage URI to a PIP installable library. For example, gs://bucket-name/my/path/to/lib.tar.gz
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#python_packages DataplexTask#python_packages}
*/
readonly pythonPackages?: string[];
}
export declare function dataplexTaskNotebookInfrastructureSpecContainerImageToTerraform(struct?: DataplexTaskNotebookInfrastructureSpecContainerImageOutputReference | DataplexTaskNotebookInfrastructureSpecContainerImage): any;
export declare function dataplexTaskNotebookInfrastructureSpecContainerImageToHclTerraform(struct?: DataplexTaskNotebookInfrastructureSpecContainerImageOutputReference | DataplexTaskNotebookInfrastructureSpecContainerImage): any;
export declare class DataplexTaskNotebookInfrastructureSpecContainerImageOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataplexTaskNotebookInfrastructureSpecContainerImage | undefined;
set internalValue(value: DataplexTaskNotebookInfrastructureSpecContainerImage | undefined);
private _image?;
get image(): string;
set image(value: string);
resetImage(): void;
get imageInput(): string | undefined;
private _javaJars?;
get javaJars(): string[];
set javaJars(value: string[]);
resetJavaJars(): void;
get javaJarsInput(): string[] | undefined;
private _properties?;
get properties(): {
[key: string]: string;
};
set properties(value: {
[key: string]: string;
});
resetProperties(): void;
get propertiesInput(): {
[key: string]: string;
} | undefined;
private _pythonPackages?;
get pythonPackages(): string[];
set pythonPackages(value: string[]);
resetPythonPackages(): void;
get pythonPackagesInput(): string[] | undefined;
}
export interface DataplexTaskNotebookInfrastructureSpecVpcNetwork {
/**
* The Cloud VPC network in which the job is run. By default, the Cloud VPC network named Default within the project is used.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#network DataplexTask#network}
*/
readonly network?: string;
/**
* List of network tags to apply to the job.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#network_tags DataplexTask#network_tags}
*/
readonly networkTags?: string[];
/**
* The Cloud VPC sub-network in which the job is run.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#sub_network DataplexTask#sub_network}
*/
readonly subNetwork?: string;
}
export declare function dataplexTaskNotebookInfrastructureSpecVpcNetworkToTerraform(struct?: DataplexTaskNotebookInfrastructureSpecVpcNetworkOutputReference | DataplexTaskNotebookInfrastructureSpecVpcNetwork): any;
export declare function dataplexTaskNotebookInfrastructureSpecVpcNetworkToHclTerraform(struct?: DataplexTaskNotebookInfrastructureSpecVpcNetworkOutputReference | DataplexTaskNotebookInfrastructureSpecVpcNetwork): any;
export declare class DataplexTaskNotebookInfrastructureSpecVpcNetworkOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataplexTaskNotebookInfrastructureSpecVpcNetwork | undefined;
set internalValue(value: DataplexTaskNotebookInfrastructureSpecVpcNetwork | undefined);
private _network?;
get network(): string;
set network(value: string);
resetNetwork(): void;
get networkInput(): string | undefined;
private _networkTags?;
get networkTags(): string[];
set networkTags(value: string[]);
resetNetworkTags(): void;
get networkTagsInput(): string[] | undefined;
private _subNetwork?;
get subNetwork(): string;
set subNetwork(value: string);
resetSubNetwork(): void;
get subNetworkInput(): string | undefined;
}
export interface DataplexTaskNotebookInfrastructureSpec {
/**
* batch block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#batch DataplexTask#batch}
*/
readonly batch?: DataplexTaskNotebookInfrastructureSpecBatch;
/**
* container_image block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#container_image DataplexTask#container_image}
*/
readonly containerImage?: DataplexTaskNotebookInfrastructureSpecContainerImage;
/**
* vpc_network block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#vpc_network DataplexTask#vpc_network}
*/
readonly vpcNetwork?: DataplexTaskNotebookInfrastructureSpecVpcNetwork;
}
export declare function dataplexTaskNotebookInfrastructureSpecToTerraform(struct?: DataplexTaskNotebookInfrastructureSpecOutputReference | DataplexTaskNotebookInfrastructureSpec): any;
export declare function dataplexTaskNotebookInfrastructureSpecToHclTerraform(struct?: DataplexTaskNotebookInfrastructureSpecOutputReference | DataplexTaskNotebookInfrastructureSpec): any;
export declare class DataplexTaskNotebookInfrastructureSpecOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataplexTaskNotebookInfrastructureSpec | undefined;
set internalValue(value: DataplexTaskNotebookInfrastructureSpec | undefined);
private _batch;
get batch(): DataplexTaskNotebookInfrastructureSpecBatchOutputReference;
putBatch(value: DataplexTaskNotebookInfrastructureSpecBatch): void;
resetBatch(): void;
get batchInput(): DataplexTaskNotebookInfrastructureSpecBatch | undefined;
private _containerImage;
get containerImage(): DataplexTaskNotebookInfrastructureSpecContainerImageOutputReference;
putContainerImage(value: DataplexTaskNotebookInfrastructureSpecContainerImage): void;
resetContainerImage(): void;
get containerImageInput(): DataplexTaskNotebookInfrastructureSpecContainerImage | undefined;
private _vpcNetwork;
get vpcNetwork(): DataplexTaskNotebookInfrastructureSpecVpcNetworkOutputReference;
putVpcNetwork(value: DataplexTaskNotebookInfrastructureSpecVpcNetwork): void;
resetVpcNetwork(): void;
get vpcNetworkInput(): DataplexTaskNotebookInfrastructureSpecVpcNetwork | undefined;
}
export interface DataplexTaskNotebook {
/**
* Cloud Storage URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#archive_uris DataplexTask#archive_uris}
*/
readonly archiveUris?: string[];
/**
* Cloud Storage URIs of files to be placed in the working directory of each executor.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#file_uris DataplexTask#file_uris}
*/
readonly fileUris?: string[];
/**
* Path to input notebook. This can be the Cloud Storage URI of the notebook file or the path to a Notebook Content. The execution args are accessible as environment variables (TASK_key=value).
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#notebook DataplexTask#notebook}
*/
readonly notebook: string;
/**
* infrastructure_spec block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#infrastructure_spec DataplexTask#infrastructure_spec}
*/
readonly infrastructureSpec?: DataplexTaskNotebookInfrastructureSpec;
}
export declare function dataplexTaskNotebookToTerraform(struct?: DataplexTaskNotebookOutputReference | DataplexTaskNotebook): any;
export declare function dataplexTaskNotebookToHclTerraform(struct?: DataplexTaskNotebookOutputReference | DataplexTaskNotebook): any;
export declare class DataplexTaskNotebookOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataplexTaskNotebook | undefined;
set internalValue(value: DataplexTaskNotebook | undefined);
private _archiveUris?;
get archiveUris(): string[];
set archiveUris(value: string[]);
resetArchiveUris(): void;
get archiveUrisInput(): string[] | undefined;
private _fileUris?;
get fileUris(): string[];
set fileUris(value: string[]);
resetFileUris(): void;
get fileUrisInput(): string[] | undefined;
private _notebook?;
get notebook(): string;
set notebook(value: string);
get notebookInput(): string | undefined;
private _infrastructureSpec;
get infrastructureSpec(): DataplexTaskNotebookInfrastructureSpecOutputReference;
putInfrastructureSpec(value: DataplexTaskNotebookInfrastructureSpec): void;
resetInfrastructureSpec(): void;
get infrastructureSpecInput(): DataplexTaskNotebookInfrastructureSpec | undefined;
}
export interface DataplexTaskSparkInfrastructureSpecBatch {
/**
* Total number of job executors. Executor Count should be between 2 and 100. [Default=2]
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#executors_count DataplexTask#executors_count}
*/
readonly executorsCount?: number;
/**
* Max configurable executors. If maxExecutorsCount > executorsCount, then auto-scaling is enabled. Max Executor Count should be between 2 and 1000. [Default=1000]
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#max_executors_count DataplexTask#max_executors_count}
*/
readonly maxExecutorsCount?: number;
}
export declare function dataplexTaskSparkInfrastructureSpecBatchToTerraform(struct?: DataplexTaskSparkInfrastructureSpecBatchOutputReference | DataplexTaskSparkInfrastructureSpecBatch): any;
export declare function dataplexTaskSparkInfrastructureSpecBatchToHclTerraform(struct?: DataplexTaskSparkInfrastructureSpecBatchOutputReference | DataplexTaskSparkInfrastructureSpecBatch): any;
export declare class DataplexTaskSparkInfrastructureSpecBatchOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataplexTaskSparkInfrastructureSpecBatch | undefined;
set internalValue(value: DataplexTaskSparkInfrastructureSpecBatch | undefined);
private _executorsCount?;
get executorsCount(): number;
set executorsCount(value: number);
resetExecutorsCount(): void;
get executorsCountInput(): number | undefined;
private _maxExecutorsCount?;
get maxExecutorsCount(): number;
set maxExecutorsCount(value: number);
resetMaxExecutorsCount(): void;
get maxExecutorsCountInput(): number | undefined;
}
export interface DataplexTaskSparkInfrastructureSpecContainerImage {
/**
* Container image to use.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#image DataplexTask#image}
*/
readonly image?: string;
/**
* A list of Java JARS to add to the classpath. Valid input includes Cloud Storage URIs to Jar binaries. For example, gs://bucket-name/my/path/to/file.jar
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#java_jars DataplexTask#java_jars}
*/
readonly javaJars?: string[];
/**
* Override to common configuration of open source components installed on the Dataproc cluster. The properties to set on daemon config files. Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. For more information, see Cluster properties.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#properties DataplexTask#properties}
*/
readonly properties?: {
[key: string]: string;
};
/**
* A list of python packages to be installed. Valid formats include Cloud Storage URI to a PIP installable library. For example, gs://bucket-name/my/path/to/lib.tar.gz
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#python_packages DataplexTask#python_packages}
*/
readonly pythonPackages?: string[];
}
export declare function dataplexTaskSparkInfrastructureSpecContainerImageToTerraform(struct?: DataplexTaskSparkInfrastructureSpecContainerImageOutputReference | DataplexTaskSparkInfrastructureSpecContainerImage): any;
export declare function dataplexTaskSparkInfrastructureSpecContainerImageToHclTerraform(struct?: DataplexTaskSparkInfrastructureSpecContainerImageOutputReference | DataplexTaskSparkInfrastructureSpecContainerImage): any;
export declare class DataplexTaskSparkInfrastructureSpecContainerImageOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataplexTaskSparkInfrastructureSpecContainerImage | undefined;
set internalValue(value: DataplexTaskSparkInfrastructureSpecContainerImage | undefined);
private _image?;
get image(): string;
set image(value: string);
resetImage(): void;
get imageInput(): string | undefined;
private _javaJars?;
get javaJars(): string[];
set javaJars(value: string[]);
resetJavaJars(): void;
get javaJarsInput(): string[] | undefined;
private _properties?;
get properties(): {
[key: string]: string;
};
set properties(value: {
[key: string]: string;
});
resetProperties(): void;
get propertiesInput(): {
[key: string]: string;
} | undefined;
private _pythonPackages?;
get pythonPackages(): string[];
set pythonPackages(value: string[]);
resetPythonPackages(): void;
get pythonPackagesInput(): string[] | undefined;
}
export interface DataplexTaskSparkInfrastructureSpecVpcNetwork {
/**
* The Cloud VPC network in which the job is run. By default, the Cloud VPC network named Default within the project is used.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#network DataplexTask#network}
*/
readonly network?: string;
/**
* List of network tags to apply to the job.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#network_tags DataplexTask#network_tags}
*/
readonly networkTags?: string[];
/**
* The Cloud VPC sub-network in which the job is run.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#sub_network DataplexTask#sub_network}
*/
readonly subNetwork?: string;
}
export declare function dataplexTaskSparkInfrastructureSpecVpcNetworkToTerraform(struct?: DataplexTaskSparkInfrastructureSpecVpcNetworkOutputReference | DataplexTaskSparkInfrastructureSpecVpcNetwork): any;
export declare function dataplexTaskSparkInfrastructureSpecVpcNetworkToHclTerraform(struct?: DataplexTaskSparkInfrastructureSpecVpcNetworkOutputReference | DataplexTaskSparkInfrastructureSpecVpcNetwork): any;
export declare class DataplexTaskSparkInfrastructureSpecVpcNetworkOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataplexTaskSparkInfrastructureSpecVpcNetwork | undefined;
set internalValue(value: DataplexTaskSparkInfrastructureSpecVpcNetwork | undefined);
private _network?;
get network(): string;
set network(value: string);
resetNetwork(): void;
get networkInput(): string | undefined;
private _networkTags?;
get networkTags(): string[];
set networkTags(value: string[]);
resetNetworkTags(): void;
get networkTagsInput(): string[] | undefined;
private _subNetwork?;
get subNetwork(): string;
set subNetwork(value: string);
resetSubNetwork(): void;
get subNetworkInput(): string | undefined;
}
export interface DataplexTaskSparkInfrastructureSpec {
/**
* batch block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#batch DataplexTask#batch}
*/
readonly batch?: DataplexTaskSparkInfrastructureSpecBatch;
/**
* container_image block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#container_image DataplexTask#container_image}
*/
readonly containerImage?: DataplexTaskSparkInfrastructureSpecContainerImage;
/**
* vpc_network block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#vpc_network DataplexTask#vpc_network}
*/
readonly vpcNetwork?: DataplexTaskSparkInfrastructureSpecVpcNetwork;
}
export declare function dataplexTaskSparkInfrastructureSpecToTerraform(struct?: DataplexTaskSparkInfrastructureSpecOutputReference | DataplexTaskSparkInfrastructureSpec): any;
export declare function dataplexTaskSparkInfrastructureSpecToHclTerraform(struct?: DataplexTaskSparkInfrastructureSpecOutputReference | DataplexTaskSparkInfrastructureSpec): any;
export declare class DataplexTaskSparkInfrastructureSpecOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataplexTaskSparkInfrastructureSpec | undefined;
set internalValue(value: DataplexTaskSparkInfrastructureSpec | undefined);
private _batch;
get batch(): DataplexTaskSparkInfrastructureSpecBatchOutputReference;
putBatch(value: DataplexTaskSparkInfrastructureSpecBatch): void;
resetBatch(): void;
get batchInput(): DataplexTaskSparkInfrastructureSpecBatch | undefined;
private _containerImage;
get containerImage(): DataplexTaskSparkInfrastructureSpecContainerImageOutputReference;
putContainerImage(value: DataplexTaskSparkInfrastructureSpecContainerImage): void;
resetContainerImage(): void;
get containerImageInput(): DataplexTaskSparkInfrastructureSpecContainerImage | undefined;
private _vpcNetwork;
get vpcNetwork(): DataplexTaskSparkInfrastructureSpecVpcNetworkOutputReference;
putVpcNetwork(value: DataplexTaskSparkInfrastructureSpecVpcNetwork): void;
resetVpcNetwork(): void;
get vpcNetworkInput(): DataplexTaskSparkInfrastructureSpecVpcNetwork | undefined;
}
export interface DataplexTaskSpark {
/**
* Cloud Storage URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#archive_uris DataplexTask#archive_uris}
*/
readonly archiveUris?: string[];
/**
* Cloud Storage URIs of files to be placed in the working directory of each executor.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#file_uris DataplexTask#file_uris}
*/
readonly fileUris?: string[];
/**
* The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. The execution args are passed in as a sequence of named process arguments (--key=value).
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#main_class DataplexTask#main_class}
*/
readonly mainClass?: string;
/**
* The Cloud Storage URI of the jar file that contains the main class. The execution args are passed in as a sequence of named process arguments (--key=value).
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#main_jar_file_uri DataplexTask#main_jar_file_uri}
*/
readonly mainJarFileUri?: string;
/**
* The Gcloud Storage URI of the main Python file to use as the driver. Must be a .py file. The execution args are passed in as a sequence of named process arguments (--key=value).
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#python_script_file DataplexTask#python_script_file}
*/
readonly pythonScriptFile?: string;
/**
* The query text. The execution args are used to declare a set of script variables (set key='value';).
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#sql_script DataplexTask#sql_script}
*/
readonly sqlScript?: string;
/**
* A reference to a query file. This can be the Cloud Storage URI of the query file or it can the path to a SqlScript Content. The execution args are used to declare a set of script variables (set key='value';).
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#sql_script_file DataplexTask#sql_script_file}
*/
readonly sqlScriptFile?: string;
/**
* infrastructure_spec block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#infrastructure_spec DataplexTask#infrastructure_spec}
*/
readonly infrastructureSpec?: DataplexTaskSparkInfrastructureSpec;
}
export declare function dataplexTaskSparkToTerraform(struct?: DataplexTaskSparkOutputReference | DataplexTaskSpark): any;
export declare function dataplexTaskSparkToHclTerraform(struct?: DataplexTaskSparkOutputReference | DataplexTaskSpark): any;
export declare class DataplexTaskSparkOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataplexTaskSpark | undefined;
set internalValue(value: DataplexTaskSpark | undefined);
private _archiveUris?;
get archiveUris(): string[];
set archiveUris(value: string[]);
resetArchiveUris(): void;
get archiveUrisInput(): string[] | undefined;
private _fileUris?;
get fileUris(): string[];
set fileUris(value: string[]);
resetFileUris(): void;
get fileUrisInput(): string[] | undefined;
private _mainClass?;
get mainClass(): string;
set mainClass(value: string);
resetMainClass(): void;
get mainClassInput(): string | undefined;
private _mainJarFileUri?;
get mainJarFileUri(): string;
set mainJarFileUri(value: string);
resetMainJarFileUri(): void;
get mainJarFileUriInput(): string | undefined;
private _pythonScriptFile?;
get pythonScriptFile(): string;
set pythonScriptFile(value: string);
resetPythonScriptFile(): void;
get pythonScriptFileInput(): string | undefined;
private _sqlScript?;
get sqlScript(): string;
set sqlScript(value: string);
resetSqlScript(): void;
get sqlScriptInput(): string | undefined;
private _sqlScriptFile?;
get sqlScriptFile(): string;
set sqlScriptFile(value: string);
resetSqlScriptFile(): void;
get sqlScriptFileInput(): string | undefined;
private _infrastructureSpec;
get infrastructureSpec(): DataplexTaskSparkInfrastructureSpecOutputReference;
putInfrastructureSpec(value: DataplexTaskSparkInfrastructureSpec): void;
resetInfrastructureSpec(): void;
get infrastructureSpecInput(): DataplexTaskSparkInfrastructureSpec | undefined;
}
export interface DataplexTaskTimeouts {
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#create DataplexTask#create}
*/
readonly create?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#delete DataplexTask#delete}
*/
readonly delete?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#update DataplexTask#update}
*/
readonly update?: string;
}
export declare function dataplexTaskTimeoutsToTerraform(struct?: DataplexTaskTimeouts | cdktf.IResolvable): any;
export declare function dataplexTaskTimeoutsToHclTerraform(struct?: DataplexTaskTimeouts | cdktf.IResolvable): any;
export declare class DataplexTaskTimeoutsOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
private resolvableValue?;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataplexTaskTimeouts | cdktf.IResolvable | undefined;
set internalValue(value: DataplexTaskTimeouts | cdktf.IResolvable | undefined);
private _create?;
get create(): string;
set create(value: string);
resetCreate(): void;
get createInput(): string | undefined;
private _delete?;
get delete(): string;
set delete(value: string);
resetDelete(): void;
get deleteInput(): string | undefined;
private _update?;
get update(): string;
set update(value: string);
resetUpdate(): void;
get updateInput(): string | undefined;
}
export interface DataplexTaskTriggerSpec {
/**
* Prevent the task from executing. This does not cancel already running tasks. It is intended to temporarily disable RECURRING tasks.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#disabled DataplexTask#disabled}
*/
readonly disabled?: boolean | cdktf.IResolvable;
/**
* Number of retry attempts before aborting. Set to zero to never attempt to retry a failed task.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#max_retries DataplexTask#max_retries}
*/
readonly maxRetries?: number;
/**
* Cron schedule (https://en.wikipedia.org/wiki/Cron) for running tasks periodically. To explicitly set a timezone to the cron tab, apply a prefix in the cron tab: 'CRON_TZ=${IANA_TIME_ZONE}' or 'TZ=${IANA_TIME_ZONE}'. The ${IANA_TIME_ZONE} may only be a valid string from IANA time zone database. For example, CRON_TZ=America/New_York 1 * * * *, or TZ=America/New_York 1 * * * *. This field is required for RECURRING tasks.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#schedule DataplexTask#schedule}
*/
readonly schedule?: string;
/**
* The first run of the task will be after this time. If not specified, the task will run shortly after being submitted if ON_DEMAND and based on the schedule if RECURRING.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#start_time DataplexTask#start_time}
*/
readonly startTime?: string;
/**
* Trigger type of the user-specified Task Possible values: ["ON_DEMAND", "RECURRING"]
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#type DataplexTask#type}
*/
readonly type: string;
}
export declare function dataplexTaskTriggerSpecToTerraform(struct?: DataplexTaskTriggerSpecOutputReference | DataplexTaskTriggerSpec): any;
export declare function dataplexTaskTriggerSpecToHclTerraform(struct?: DataplexTaskTriggerSpecOutputReference | DataplexTaskTriggerSpec): any;
export declare class DataplexTaskTriggerSpecOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataplexTaskTriggerSpec | undefined;
set internalValue(value: DataplexTaskTriggerSpec | undefined);
private _disabled?;
get disabled(): boolean | cdktf.IResolvable;
set disabled(value: boolean | cdktf.IResolvable);
resetDisabled(): void;
get disabledInput(): boolean | cdktf.IResolvable | undefined;
private _maxRetries?;
get maxRetries(): number;
set maxRetries(value: number);
resetMaxRetries(): void;
get maxRetriesInput(): number | undefined;
private _schedule?;
get schedule(): string;
set schedule(value: string);
resetSchedule(): void;
get scheduleInput(): string | undefined;
private _startTime?;
get startTime(): string;
set startTime(value: string);
resetStartTime(): void;
get startTimeInput(): string | undefined;
private _type?;
get type(): string;
set type(value: string);
get typeInput(): string | undefined;
}
/**
* Represents a {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task google_dataplex_task}
*/
export declare class DataplexTask extends cdktf.TerraformResource {
static readonly tfResourceType = "google_dataplex_task";
/**
* Generates CDKTF code for importing a DataplexTask resource upon running "cdktf plan <stack-name>"
* @param scope The scope in which to define this construct
* @param importToId The construct id used in the generated config for the DataplexTask to import
* @param importFromId The id of the existing DataplexTask that should be imported. Refer to the {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task#import import section} in the documentation of this resource for the id to use
* @param provider? Optional instance of the provider where the DataplexTask to import is found
*/
static generateConfigForImport(scope: Construct, importToId: string, importFromId: string, provider?: cdktf.TerraformProvider): cdktf.ImportableResource;
/**
* Create a new {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataplex_task google_dataplex_task} Resource
*
* @param scope The scope in which to define this construct
* @param id The scoped construct ID. Must be unique amongst siblings in the same scope
* @param options DataplexTaskConfig
*/
constructor(scope: Construct, id: string, config: DataplexTaskConfig);
get createTime(): string;
private _description?;
get description(): string;
set description(value: string);
resetDescription(): void;
get descriptionInput(): string | undefined;
private _displayName?;
get displayName(): string;
set displayName(value: string);
resetDisplayName(): void;
get displayNameInput(): string | undefined;
private _effectiveLabels;
get effectiveLabels(): cdktf.StringMap;
private _executionStatu