@cdktf/provider-google
Version:
Prebuilt google Provider for Terraform CDK (cdktf)
969 lines (968 loc) • 49.5 kB
TypeScript
/**
* Copyright (c) HashiCorp, Inc.
* SPDX-License-Identifier: MPL-2.0
*/
import { Construct } from 'constructs';
import * as cdktf from 'cdktf';
export interface DataprocBatchConfig extends cdktf.TerraformMetaArguments {
/**
* The ID to use for the batch, which will become the final component of the batch's resource name.
* This value must be 4-63 characters. Valid characters are /[a-z][0-9]-/.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#batch_id DataprocBatch#batch_id}
*/
readonly batchId?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#id DataprocBatch#id}
*
* Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
* If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
*/
readonly id?: string;
/**
* The labels to associate with this batch.
*
*
* **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
* Please refer to the field 'effective_labels' for all of the labels present on the resource.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#labels DataprocBatch#labels}
*/
readonly labels?: {
[key: string]: string;
};
/**
* The location in which the batch will be created in.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#location DataprocBatch#location}
*/
readonly location?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#project DataprocBatch#project}
*/
readonly project?: string;
/**
* environment_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#environment_config DataprocBatch#environment_config}
*/
readonly environmentConfig?: DataprocBatchEnvironmentConfig;
/**
* pyspark_batch block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#pyspark_batch DataprocBatch#pyspark_batch}
*/
readonly pysparkBatch?: DataprocBatchPysparkBatch;
/**
* runtime_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#runtime_config DataprocBatch#runtime_config}
*/
readonly runtimeConfig?: DataprocBatchRuntimeConfig;
/**
* spark_batch block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#spark_batch DataprocBatch#spark_batch}
*/
readonly sparkBatch?: DataprocBatchSparkBatch;
/**
* spark_r_batch block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#spark_r_batch DataprocBatch#spark_r_batch}
*/
readonly sparkRBatch?: DataprocBatchSparkRBatch;
/**
* spark_sql_batch block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#spark_sql_batch DataprocBatch#spark_sql_batch}
*/
readonly sparkSqlBatch?: DataprocBatchSparkSqlBatch;
/**
* timeouts block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#timeouts DataprocBatch#timeouts}
*/
readonly timeouts?: DataprocBatchTimeouts;
}
export interface DataprocBatchRuntimeInfoApproximateUsage {
}
export declare function dataprocBatchRuntimeInfoApproximateUsageToTerraform(struct?: DataprocBatchRuntimeInfoApproximateUsage): any;
export declare function dataprocBatchRuntimeInfoApproximateUsageToHclTerraform(struct?: DataprocBatchRuntimeInfoApproximateUsage): any;
export declare class DataprocBatchRuntimeInfoApproximateUsageOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param complexObjectIndex the index of this item in the list
* @param complexObjectIsFromSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, complexObjectIndex: number, complexObjectIsFromSet: boolean);
get internalValue(): DataprocBatchRuntimeInfoApproximateUsage | undefined;
set internalValue(value: DataprocBatchRuntimeInfoApproximateUsage | undefined);
get acceleratorType(): string;
get milliAcceleratorSeconds(): string;
get milliDcuSeconds(): string;
get shuffleStorageGbSeconds(): string;
}
export declare class DataprocBatchRuntimeInfoApproximateUsageList extends cdktf.ComplexList {
protected terraformResource: cdktf.IInterpolatingParent;
protected terraformAttribute: string;
protected wrapsSet: boolean;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param wrapsSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, wrapsSet: boolean);
/**
* @param index the index of the item to return
*/
get(index: number): DataprocBatchRuntimeInfoApproximateUsageOutputReference;
}
export interface DataprocBatchRuntimeInfoCurrentUsage {
}
export declare function dataprocBatchRuntimeInfoCurrentUsageToTerraform(struct?: DataprocBatchRuntimeInfoCurrentUsage): any;
export declare function dataprocBatchRuntimeInfoCurrentUsageToHclTerraform(struct?: DataprocBatchRuntimeInfoCurrentUsage): any;
export declare class DataprocBatchRuntimeInfoCurrentUsageOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param complexObjectIndex the index of this item in the list
* @param complexObjectIsFromSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, complexObjectIndex: number, complexObjectIsFromSet: boolean);
get internalValue(): DataprocBatchRuntimeInfoCurrentUsage | undefined;
set internalValue(value: DataprocBatchRuntimeInfoCurrentUsage | undefined);
get acceleratorType(): string;
get milliAccelerator(): string;
get milliDcu(): string;
get milliDcuPremium(): string;
get shuffleStorageGb(): string;
get shuffleStorageGbPremium(): string;
get snapshotTime(): string;
}
export declare class DataprocBatchRuntimeInfoCurrentUsageList extends cdktf.ComplexList {
protected terraformResource: cdktf.IInterpolatingParent;
protected terraformAttribute: string;
protected wrapsSet: boolean;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param wrapsSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, wrapsSet: boolean);
/**
* @param index the index of the item to return
*/
get(index: number): DataprocBatchRuntimeInfoCurrentUsageOutputReference;
}
export interface DataprocBatchRuntimeInfo {
}
export declare function dataprocBatchRuntimeInfoToTerraform(struct?: DataprocBatchRuntimeInfo): any;
export declare function dataprocBatchRuntimeInfoToHclTerraform(struct?: DataprocBatchRuntimeInfo): any;
export declare class DataprocBatchRuntimeInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param complexObjectIndex the index of this item in the list
* @param complexObjectIsFromSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, complexObjectIndex: number, complexObjectIsFromSet: boolean);
get internalValue(): DataprocBatchRuntimeInfo | undefined;
set internalValue(value: DataprocBatchRuntimeInfo | undefined);
private _approximateUsage;
get approximateUsage(): DataprocBatchRuntimeInfoApproximateUsageList;
private _currentUsage;
get currentUsage(): DataprocBatchRuntimeInfoCurrentUsageList;
get diagnosticOutputUri(): string;
private _endpoints;
get endpoints(): cdktf.StringMap;
get outputUri(): string;
}
export declare class DataprocBatchRuntimeInfoList extends cdktf.ComplexList {
protected terraformResource: cdktf.IInterpolatingParent;
protected terraformAttribute: string;
protected wrapsSet: boolean;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param wrapsSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, wrapsSet: boolean);
/**
* @param index the index of the item to return
*/
get(index: number): DataprocBatchRuntimeInfoOutputReference;
}
export interface DataprocBatchStateHistory {
}
export declare function dataprocBatchStateHistoryToTerraform(struct?: DataprocBatchStateHistory): any;
export declare function dataprocBatchStateHistoryToHclTerraform(struct?: DataprocBatchStateHistory): any;
export declare class DataprocBatchStateHistoryOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param complexObjectIndex the index of this item in the list
* @param complexObjectIsFromSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, complexObjectIndex: number, complexObjectIsFromSet: boolean);
get internalValue(): DataprocBatchStateHistory | undefined;
set internalValue(value: DataprocBatchStateHistory | undefined);
get state(): string;
get stateMessage(): string;
get stateStartTime(): string;
}
export declare class DataprocBatchStateHistoryList extends cdktf.ComplexList {
protected terraformResource: cdktf.IInterpolatingParent;
protected terraformAttribute: string;
protected wrapsSet: boolean;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param wrapsSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, wrapsSet: boolean);
/**
* @param index the index of the item to return
*/
get(index: number): DataprocBatchStateHistoryOutputReference;
}
export interface DataprocBatchEnvironmentConfigExecutionConfig {
/**
* The Cloud KMS key to use for encryption.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#kms_key DataprocBatch#kms_key}
*/
readonly kmsKey?: string;
/**
* Tags used for network traffic control.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#network_tags DataprocBatch#network_tags}
*/
readonly networkTags?: string[];
/**
* Network configuration for workload execution.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#network_uri DataprocBatch#network_uri}
*/
readonly networkUri?: string;
/**
* Service account that used to execute workload.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#service_account DataprocBatch#service_account}
*/
readonly serviceAccount?: string;
/**
* A Cloud Storage bucket used to stage workload dependencies, config files, and store
* workload output and other ephemeral data, such as Spark history files. If you do not specify a staging bucket,
* Cloud Dataproc will determine a Cloud Storage location according to the region where your workload is running,
* and then create and manage project-level, per-location staging and temporary buckets.
* This field requires a Cloud Storage bucket name, not a gs://... URI to a Cloud Storage bucket.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#staging_bucket DataprocBatch#staging_bucket}
*/
readonly stagingBucket?: string;
/**
* Subnetwork configuration for workload execution.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#subnetwork_uri DataprocBatch#subnetwork_uri}
*/
readonly subnetworkUri?: string;
/**
* The duration after which the workload will be terminated.
* When the workload exceeds this duration, it will be unconditionally terminated without waiting for ongoing
* work to finish. If ttl is not specified for a batch workload, the workload will be allowed to run until it
* exits naturally (or run forever without exiting). If ttl is not specified for an interactive session,
* it defaults to 24 hours. If ttl is not specified for a batch that uses 2.1+ runtime version, it defaults to 4 hours.
* Minimum value is 10 minutes; maximum value is 14 days. If both ttl and idleTtl are specified (for an interactive session),
* the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idleTtl or
* when ttl has been exceeded, whichever occurs first.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#ttl DataprocBatch#ttl}
*/
readonly ttl?: string;
}
export declare function dataprocBatchEnvironmentConfigExecutionConfigToTerraform(struct?: DataprocBatchEnvironmentConfigExecutionConfigOutputReference | DataprocBatchEnvironmentConfigExecutionConfig): any;
export declare function dataprocBatchEnvironmentConfigExecutionConfigToHclTerraform(struct?: DataprocBatchEnvironmentConfigExecutionConfigOutputReference | DataprocBatchEnvironmentConfigExecutionConfig): any;
export declare class DataprocBatchEnvironmentConfigExecutionConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocBatchEnvironmentConfigExecutionConfig | undefined;
set internalValue(value: DataprocBatchEnvironmentConfigExecutionConfig | undefined);
private _kmsKey?;
get kmsKey(): string;
set kmsKey(value: string);
resetKmsKey(): void;
get kmsKeyInput(): string | undefined;
private _networkTags?;
get networkTags(): string[];
set networkTags(value: string[]);
resetNetworkTags(): void;
get networkTagsInput(): string[] | undefined;
private _networkUri?;
get networkUri(): string;
set networkUri(value: string);
resetNetworkUri(): void;
get networkUriInput(): string | undefined;
private _serviceAccount?;
get serviceAccount(): string;
set serviceAccount(value: string);
resetServiceAccount(): void;
get serviceAccountInput(): string | undefined;
private _stagingBucket?;
get stagingBucket(): string;
set stagingBucket(value: string);
resetStagingBucket(): void;
get stagingBucketInput(): string | undefined;
private _subnetworkUri?;
get subnetworkUri(): string;
set subnetworkUri(value: string);
resetSubnetworkUri(): void;
get subnetworkUriInput(): string | undefined;
private _ttl?;
get ttl(): string;
set ttl(value: string);
resetTtl(): void;
get ttlInput(): string | undefined;
}
export interface DataprocBatchEnvironmentConfigPeripheralsConfigSparkHistoryServerConfig {
/**
* Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#dataproc_cluster DataprocBatch#dataproc_cluster}
*/
readonly dataprocCluster?: string;
}
export declare function dataprocBatchEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigToTerraform(struct?: DataprocBatchEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigOutputReference | DataprocBatchEnvironmentConfigPeripheralsConfigSparkHistoryServerConfig): any;
export declare function dataprocBatchEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigToHclTerraform(struct?: DataprocBatchEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigOutputReference | DataprocBatchEnvironmentConfigPeripheralsConfigSparkHistoryServerConfig): any;
export declare class DataprocBatchEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocBatchEnvironmentConfigPeripheralsConfigSparkHistoryServerConfig | undefined;
set internalValue(value: DataprocBatchEnvironmentConfigPeripheralsConfigSparkHistoryServerConfig | undefined);
private _dataprocCluster?;
get dataprocCluster(): string;
set dataprocCluster(value: string);
resetDataprocCluster(): void;
get dataprocClusterInput(): string | undefined;
}
export interface DataprocBatchEnvironmentConfigPeripheralsConfig {
/**
* Resource name of an existing Dataproc Metastore service.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#metastore_service DataprocBatch#metastore_service}
*/
readonly metastoreService?: string;
/**
* spark_history_server_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#spark_history_server_config DataprocBatch#spark_history_server_config}
*/
readonly sparkHistoryServerConfig?: DataprocBatchEnvironmentConfigPeripheralsConfigSparkHistoryServerConfig;
}
export declare function dataprocBatchEnvironmentConfigPeripheralsConfigToTerraform(struct?: DataprocBatchEnvironmentConfigPeripheralsConfigOutputReference | DataprocBatchEnvironmentConfigPeripheralsConfig): any;
export declare function dataprocBatchEnvironmentConfigPeripheralsConfigToHclTerraform(struct?: DataprocBatchEnvironmentConfigPeripheralsConfigOutputReference | DataprocBatchEnvironmentConfigPeripheralsConfig): any;
export declare class DataprocBatchEnvironmentConfigPeripheralsConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocBatchEnvironmentConfigPeripheralsConfig | undefined;
set internalValue(value: DataprocBatchEnvironmentConfigPeripheralsConfig | undefined);
private _metastoreService?;
get metastoreService(): string;
set metastoreService(value: string);
resetMetastoreService(): void;
get metastoreServiceInput(): string | undefined;
private _sparkHistoryServerConfig;
get sparkHistoryServerConfig(): DataprocBatchEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigOutputReference;
putSparkHistoryServerConfig(value: DataprocBatchEnvironmentConfigPeripheralsConfigSparkHistoryServerConfig): void;
resetSparkHistoryServerConfig(): void;
get sparkHistoryServerConfigInput(): DataprocBatchEnvironmentConfigPeripheralsConfigSparkHistoryServerConfig | undefined;
}
export interface DataprocBatchEnvironmentConfig {
/**
* execution_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#execution_config DataprocBatch#execution_config}
*/
readonly executionConfig?: DataprocBatchEnvironmentConfigExecutionConfig;
/**
* peripherals_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#peripherals_config DataprocBatch#peripherals_config}
*/
readonly peripheralsConfig?: DataprocBatchEnvironmentConfigPeripheralsConfig;
}
export declare function dataprocBatchEnvironmentConfigToTerraform(struct?: DataprocBatchEnvironmentConfigOutputReference | DataprocBatchEnvironmentConfig): any;
export declare function dataprocBatchEnvironmentConfigToHclTerraform(struct?: DataprocBatchEnvironmentConfigOutputReference | DataprocBatchEnvironmentConfig): any;
export declare class DataprocBatchEnvironmentConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocBatchEnvironmentConfig | undefined;
set internalValue(value: DataprocBatchEnvironmentConfig | undefined);
private _executionConfig;
get executionConfig(): DataprocBatchEnvironmentConfigExecutionConfigOutputReference;
putExecutionConfig(value: DataprocBatchEnvironmentConfigExecutionConfig): void;
resetExecutionConfig(): void;
get executionConfigInput(): DataprocBatchEnvironmentConfigExecutionConfig | undefined;
private _peripheralsConfig;
get peripheralsConfig(): DataprocBatchEnvironmentConfigPeripheralsConfigOutputReference;
putPeripheralsConfig(value: DataprocBatchEnvironmentConfigPeripheralsConfig): void;
resetPeripheralsConfig(): void;
get peripheralsConfigInput(): DataprocBatchEnvironmentConfigPeripheralsConfig | undefined;
}
export interface DataprocBatchPysparkBatch {
/**
* HCFS URIs of archives to be extracted into the working directory of each executor.
* Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#archive_uris DataprocBatch#archive_uris}
*/
readonly archiveUris?: string[];
/**
* The arguments to pass to the driver. Do not include arguments that can be set as batch
* properties, such as --conf, since a collision can occur that causes an incorrect batch submission.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#args DataprocBatch#args}
*/
readonly args?: string[];
/**
* HCFS URIs of files to be placed in the working directory of each executor.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#file_uris DataprocBatch#file_uris}
*/
readonly fileUris?: string[];
/**
* HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#jar_file_uris DataprocBatch#jar_file_uris}
*/
readonly jarFileUris?: string[];
/**
* The HCFS URI of the main Python file to use as the Spark driver. Must be a .py file.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#main_python_file_uri DataprocBatch#main_python_file_uri}
*/
readonly mainPythonFileUri?: string;
/**
* HCFS file URIs of Python files to pass to the PySpark framework.
* Supported file types: .py, .egg, and .zip.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#python_file_uris DataprocBatch#python_file_uris}
*/
readonly pythonFileUris?: string[];
}
export declare function dataprocBatchPysparkBatchToTerraform(struct?: DataprocBatchPysparkBatchOutputReference | DataprocBatchPysparkBatch): any;
export declare function dataprocBatchPysparkBatchToHclTerraform(struct?: DataprocBatchPysparkBatchOutputReference | DataprocBatchPysparkBatch): any;
export declare class DataprocBatchPysparkBatchOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocBatchPysparkBatch | undefined;
set internalValue(value: DataprocBatchPysparkBatch | undefined);
private _archiveUris?;
get archiveUris(): string[];
set archiveUris(value: string[]);
resetArchiveUris(): void;
get archiveUrisInput(): string[] | undefined;
private _args?;
get args(): string[];
set args(value: string[]);
resetArgs(): void;
get argsInput(): string[] | undefined;
private _fileUris?;
get fileUris(): string[];
set fileUris(value: string[]);
resetFileUris(): void;
get fileUrisInput(): string[] | undefined;
private _jarFileUris?;
get jarFileUris(): string[];
set jarFileUris(value: string[]);
resetJarFileUris(): void;
get jarFileUrisInput(): string[] | undefined;
private _mainPythonFileUri?;
get mainPythonFileUri(): string;
set mainPythonFileUri(value: string);
resetMainPythonFileUri(): void;
get mainPythonFileUriInput(): string | undefined;
private _pythonFileUris?;
get pythonFileUris(): string[];
set pythonFileUris(value: string[]);
resetPythonFileUris(): void;
get pythonFileUrisInput(): string[] | undefined;
}
export interface DataprocBatchRuntimeConfigAutotuningConfig {
/**
* Optional. Scenarios for which tunings are applied. Possible values: ["SCALING", "BROADCAST_HASH_JOIN", "MEMORY"]
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#scenarios DataprocBatch#scenarios}
*/
readonly scenarios?: string[];
}
export declare function dataprocBatchRuntimeConfigAutotuningConfigToTerraform(struct?: DataprocBatchRuntimeConfigAutotuningConfigOutputReference | DataprocBatchRuntimeConfigAutotuningConfig): any;
export declare function dataprocBatchRuntimeConfigAutotuningConfigToHclTerraform(struct?: DataprocBatchRuntimeConfigAutotuningConfigOutputReference | DataprocBatchRuntimeConfigAutotuningConfig): any;
export declare class DataprocBatchRuntimeConfigAutotuningConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocBatchRuntimeConfigAutotuningConfig | undefined;
set internalValue(value: DataprocBatchRuntimeConfigAutotuningConfig | undefined);
private _scenarios?;
get scenarios(): string[];
set scenarios(value: string[]);
resetScenarios(): void;
get scenariosInput(): string[] | undefined;
}
export interface DataprocBatchRuntimeConfig {
/**
* Optional. Cohort identifier. Identifies families of the workloads having the same shape, e.g. daily ETL jobs.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#cohort DataprocBatch#cohort}
*/
readonly cohort?: string;
/**
* Optional custom container image for the job runtime environment. If not specified, a default container image will be used.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#container_image DataprocBatch#container_image}
*/
readonly containerImage?: string;
/**
* A mapping of property names to values, which are used to configure workload execution.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#properties DataprocBatch#properties}
*/
readonly properties?: {
[key: string]: string;
};
/**
* Version of the batch runtime.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#version DataprocBatch#version}
*/
readonly version?: string;
/**
* autotuning_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#autotuning_config DataprocBatch#autotuning_config}
*/
readonly autotuningConfig?: DataprocBatchRuntimeConfigAutotuningConfig;
}
export declare function dataprocBatchRuntimeConfigToTerraform(struct?: DataprocBatchRuntimeConfigOutputReference | DataprocBatchRuntimeConfig): any;
export declare function dataprocBatchRuntimeConfigToHclTerraform(struct?: DataprocBatchRuntimeConfigOutputReference | DataprocBatchRuntimeConfig): any;
export declare class DataprocBatchRuntimeConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocBatchRuntimeConfig | undefined;
set internalValue(value: DataprocBatchRuntimeConfig | undefined);
private _cohort?;
get cohort(): string;
set cohort(value: string);
resetCohort(): void;
get cohortInput(): string | undefined;
private _containerImage?;
get containerImage(): string;
set containerImage(value: string);
resetContainerImage(): void;
get containerImageInput(): string | undefined;
private _effectiveProperties;
get effectiveProperties(): cdktf.StringMap;
private _properties?;
get properties(): {
[key: string]: string;
};
set properties(value: {
[key: string]: string;
});
resetProperties(): void;
get propertiesInput(): {
[key: string]: string;
} | undefined;
private _version?;
get version(): string;
set version(value: string);
resetVersion(): void;
get versionInput(): string | undefined;
private _autotuningConfig;
get autotuningConfig(): DataprocBatchRuntimeConfigAutotuningConfigOutputReference;
putAutotuningConfig(value: DataprocBatchRuntimeConfigAutotuningConfig): void;
resetAutotuningConfig(): void;
get autotuningConfigInput(): DataprocBatchRuntimeConfigAutotuningConfig | undefined;
}
export interface DataprocBatchSparkBatch {
/**
* HCFS URIs of archives to be extracted into the working directory of each executor.
* Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#archive_uris DataprocBatch#archive_uris}
*/
readonly archiveUris?: string[];
/**
* The arguments to pass to the driver. Do not include arguments that can be set as batch
* properties, such as --conf, since a collision can occur that causes an incorrect batch submission.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#args DataprocBatch#args}
*/
readonly args?: string[];
/**
* HCFS URIs of files to be placed in the working directory of each executor.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#file_uris DataprocBatch#file_uris}
*/
readonly fileUris?: string[];
/**
* HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#jar_file_uris DataprocBatch#jar_file_uris}
*/
readonly jarFileUris?: string[];
/**
* The name of the driver main class. The jar file that contains the class must be in the
* classpath or specified in jarFileUris.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#main_class DataprocBatch#main_class}
*/
readonly mainClass?: string;
/**
* The HCFS URI of the jar file that contains the main class.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#main_jar_file_uri DataprocBatch#main_jar_file_uri}
*/
readonly mainJarFileUri?: string;
}
export declare function dataprocBatchSparkBatchToTerraform(struct?: DataprocBatchSparkBatchOutputReference | DataprocBatchSparkBatch): any;
export declare function dataprocBatchSparkBatchToHclTerraform(struct?: DataprocBatchSparkBatchOutputReference | DataprocBatchSparkBatch): any;
export declare class DataprocBatchSparkBatchOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocBatchSparkBatch | undefined;
set internalValue(value: DataprocBatchSparkBatch | undefined);
private _archiveUris?;
get archiveUris(): string[];
set archiveUris(value: string[]);
resetArchiveUris(): void;
get archiveUrisInput(): string[] | undefined;
private _args?;
get args(): string[];
set args(value: string[]);
resetArgs(): void;
get argsInput(): string[] | undefined;
private _fileUris?;
get fileUris(): string[];
set fileUris(value: string[]);
resetFileUris(): void;
get fileUrisInput(): string[] | undefined;
private _jarFileUris?;
get jarFileUris(): string[];
set jarFileUris(value: string[]);
resetJarFileUris(): void;
get jarFileUrisInput(): string[] | undefined;
private _mainClass?;
get mainClass(): string;
set mainClass(value: string);
resetMainClass(): void;
get mainClassInput(): string | undefined;
private _mainJarFileUri?;
get mainJarFileUri(): string;
set mainJarFileUri(value: string);
resetMainJarFileUri(): void;
get mainJarFileUriInput(): string | undefined;
}
export interface DataprocBatchSparkRBatch {
/**
* HCFS URIs of archives to be extracted into the working directory of each executor.
* Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#archive_uris DataprocBatch#archive_uris}
*/
readonly archiveUris?: string[];
/**
* The arguments to pass to the driver. Do not include arguments that can be set as batch
* properties, such as --conf, since a collision can occur that causes an incorrect batch submission.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#args DataprocBatch#args}
*/
readonly args?: string[];
/**
* HCFS URIs of files to be placed in the working directory of each executor.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#file_uris DataprocBatch#file_uris}
*/
readonly fileUris?: string[];
/**
* The HCFS URI of the main R file to use as the driver. Must be a .R or .r file.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#main_r_file_uri DataprocBatch#main_r_file_uri}
*/
readonly mainRFileUri?: string;
}
export declare function dataprocBatchSparkRBatchToTerraform(struct?: DataprocBatchSparkRBatchOutputReference | DataprocBatchSparkRBatch): any;
export declare function dataprocBatchSparkRBatchToHclTerraform(struct?: DataprocBatchSparkRBatchOutputReference | DataprocBatchSparkRBatch): any;
export declare class DataprocBatchSparkRBatchOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocBatchSparkRBatch | undefined;
set internalValue(value: DataprocBatchSparkRBatch | undefined);
private _archiveUris?;
get archiveUris(): string[];
set archiveUris(value: string[]);
resetArchiveUris(): void;
get archiveUrisInput(): string[] | undefined;
private _args?;
get args(): string[];
set args(value: string[]);
resetArgs(): void;
get argsInput(): string[] | undefined;
private _fileUris?;
get fileUris(): string[];
set fileUris(value: string[]);
resetFileUris(): void;
get fileUrisInput(): string[] | undefined;
private _mainRFileUri?;
get mainRFileUri(): string;
set mainRFileUri(value: string);
resetMainRFileUri(): void;
get mainRFileUriInput(): string | undefined;
}
export interface DataprocBatchSparkSqlBatch {
/**
* HCFS URIs of jar files to be added to the Spark CLASSPATH.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#jar_file_uris DataprocBatch#jar_file_uris}
*/
readonly jarFileUris?: string[];
/**
* The HCFS URI of the script that contains Spark SQL queries to execute.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#query_file_uri DataprocBatch#query_file_uri}
*/
readonly queryFileUri?: string;
/**
* Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#query_variables DataprocBatch#query_variables}
*/
readonly queryVariables?: {
[key: string]: string;
};
}
export declare function dataprocBatchSparkSqlBatchToTerraform(struct?: DataprocBatchSparkSqlBatchOutputReference | DataprocBatchSparkSqlBatch): any;
export declare function dataprocBatchSparkSqlBatchToHclTerraform(struct?: DataprocBatchSparkSqlBatchOutputReference | DataprocBatchSparkSqlBatch): any;
export declare class DataprocBatchSparkSqlBatchOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocBatchSparkSqlBatch | undefined;
set internalValue(value: DataprocBatchSparkSqlBatch | undefined);
private _jarFileUris?;
get jarFileUris(): string[];
set jarFileUris(value: string[]);
resetJarFileUris(): void;
get jarFileUrisInput(): string[] | undefined;
private _queryFileUri?;
get queryFileUri(): string;
set queryFileUri(value: string);
resetQueryFileUri(): void;
get queryFileUriInput(): string | undefined;
private _queryVariables?;
get queryVariables(): {
[key: string]: string;
};
set queryVariables(value: {
[key: string]: string;
});
resetQueryVariables(): void;
get queryVariablesInput(): {
[key: string]: string;
} | undefined;
}
export interface DataprocBatchTimeouts {
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#create DataprocBatch#create}
*/
readonly create?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#delete DataprocBatch#delete}
*/
readonly delete?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#update DataprocBatch#update}
*/
readonly update?: string;
}
export declare function dataprocBatchTimeoutsToTerraform(struct?: DataprocBatchTimeouts | cdktf.IResolvable): any;
export declare function dataprocBatchTimeoutsToHclTerraform(struct?: DataprocBatchTimeouts | cdktf.IResolvable): any;
export declare class DataprocBatchTimeoutsOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
private resolvableValue?;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocBatchTimeouts | cdktf.IResolvable | undefined;
set internalValue(value: DataprocBatchTimeouts | cdktf.IResolvable | undefined);
private _create?;
get create(): string;
set create(value: string);
resetCreate(): void;
get createInput(): string | undefined;
private _delete?;
get delete(): string;
set delete(value: string);
resetDelete(): void;
get deleteInput(): string | undefined;
private _update?;
get update(): string;
set update(value: string);
resetUpdate(): void;
get updateInput(): string | undefined;
}
/**
* Represents a {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch google_dataproc_batch}
*/
export declare class DataprocBatch extends cdktf.TerraformResource {
static readonly tfResourceType = "google_dataproc_batch";
/**
* Generates CDKTF code for importing a DataprocBatch resource upon running "cdktf plan <stack-name>"
* @param scope The scope in which to define this construct
* @param importToId The construct id used in the generated config for the DataprocBatch to import
* @param importFromId The id of the existing DataprocBatch that should be imported. Refer to the {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch#import import section} in the documentation of this resource for the id to use
* @param provider? Optional instance of the provider where the DataprocBatch to import is found
*/
static generateConfigForImport(scope: Construct, importToId: string, importFromId: string, provider?: cdktf.TerraformProvider): cdktf.ImportableResource;
/**
* Create a new {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_batch google_dataproc_batch} Resource
*
* @param scope The scope in which to define this construct
* @param id The scoped construct ID. Must be unique amongst siblings in the same scope
* @param options DataprocBatchConfig = {}
*/
constructor(scope: Construct, id: string, config?: DataprocBatchConfig);
private _batchId?;
get batchId(): string;
set batchId(value: string);
resetBatchId(): void;
get batchIdInput(): string | undefined;
get createTime(): string;
get creator(): string;
private _effectiveLabels;
get effectiveLabels(): cdktf.StringMap;
private _id?;
get id(): string;
set id(value: string);
resetId(): void;
get idInput(): string | undefined;
private _labels?;
get labels(): {
[key: string]: string;
};
set labels(value: {
[key: string]: string;
});
resetLabels(): void;
get labelsInput(): {
[key: string]: string;
} | undefined;
private _location?;
get location(): string;
set location(value: string);
resetLocation(): void;
get locationInput(): string | undefined;
get name(): string;
get operation(): string;
private _project?;
get project(): string;
set project(value: string);
resetProject(): void;
get projectInput(): string | undefined;
private _runtimeInfo;
get runtimeInfo(): DataprocBatchRuntimeInfoList;
get state(): string;
private _stateHistory;
get stateHistory(): DataprocBatchStateHistoryList;
get stateMessage(): string;
get stateTime(): string;
private _terraformLabels;
get terraformLabels(): cdktf.StringMap;
get uuid(): string;
private _environmentConfig;
get environmentConfig(): DataprocBatchEnvironmentConfigOutputReference;
putEnvironmentConfig(value: DataprocBatchEnvironmentConfig): void;
resetEnvironmentConfig(): void;
get environmentConfigInput(): DataprocBatchEnvironmentConfig | undefined;
private _pysparkBatch;
get pysparkBatch(): DataprocBatchPysparkBatchOutputReference;
putPysparkBatch(value: DataprocBatchPysparkBatch): void;
resetPysparkBatch(): void;
get pysparkBatchInput(): DataprocBatchPysparkBatch | undefined;
private _runtimeConfig;
get runtimeConfig(): DataprocBatchRuntimeConfigOutputReference;
putRuntimeConfig(value: DataprocBatchRuntimeConfig): void;
resetRuntimeConfig(): void;
get runtimeConfigInput(): DataprocBatchRuntimeConfig | undefined;
private _sparkBatch;
get sparkBatch(): DataprocBatchSparkBatchOutputReference;
putSparkBatch(value: DataprocBatchSparkBatch): void;
resetSparkBatch(): void;
get sparkBatchInput(): DataprocBatchSparkBatch | undefined;
private _sparkRBatch;
get sparkRBatch(): DataprocBatchSparkRBatchOutputReference;
putSparkRBatch(value: DataprocBatchSparkRBatch): void;
resetSparkRBatch(): void;
get sparkRBatchInput(): DataprocBatchSparkRBatch | undefined;
private _sparkSqlBatch;
get sparkSqlBatch(): DataprocBatchSparkSqlBatchOutputReference;
putSparkSqlBatch(value: DataprocBatchSparkSqlBatch): void;
resetSparkSqlBatch(): void;
get sparkSqlBatchInput(): DataprocBatchSparkSqlBatch | undefined;
private _timeouts;
get timeouts(): DataprocBatchTimeoutsOutputReference;
putTimeouts(value: DataprocBatchTimeouts): void;
resetTimeouts(): void;
get timeoutsInput(): cdktf.IResolvable | DataprocBatchTimeouts | undefined;
protected synthesizeAttributes(): {
[name: string]: any;
};
protected synthesizeHclAttributes(): {
[name: string]: any;
};
}