@cdktf/provider-google
Version:
Prebuilt google Provider for Terraform CDK (cdktf)
1,019 lines • 63 kB
TypeScript
/**
* Copyright (c) HashiCorp, Inc.
* SPDX-License-Identifier: MPL-2.0
*/
import { Construct } from 'constructs';
import * as cdktf from 'cdktf';
export interface DataprocJobConfig extends cdktf.TerraformMetaArguments {
/**
* By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#force_delete DataprocJob#force_delete}
*/
readonly forceDelete?: boolean | cdktf.IResolvable;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#id DataprocJob#id}
*
* Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
* If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
*/
readonly id?: string;
/**
* Optional. The labels to associate with this job.
*
* **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
* Please refer to the field 'effective_labels' for all of the labels present on the resource.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#labels DataprocJob#labels}
*/
readonly labels?: {
[key: string]: string;
};
/**
* The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#project DataprocJob#project}
*/
readonly project?: string;
/**
* The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#region DataprocJob#region}
*/
readonly region?: string;
/**
* hadoop_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#hadoop_config DataprocJob#hadoop_config}
*/
readonly hadoopConfig?: DataprocJobHadoopConfig;
/**
* hive_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#hive_config DataprocJob#hive_config}
*/
readonly hiveConfig?: DataprocJobHiveConfig;
/**
* pig_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#pig_config DataprocJob#pig_config}
*/
readonly pigConfig?: DataprocJobPigConfig;
/**
* placement block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#placement DataprocJob#placement}
*/
readonly placement: DataprocJobPlacement;
/**
* presto_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#presto_config DataprocJob#presto_config}
*/
readonly prestoConfig?: DataprocJobPrestoConfig;
/**
* pyspark_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#pyspark_config DataprocJob#pyspark_config}
*/
readonly pysparkConfig?: DataprocJobPysparkConfig;
/**
* reference block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#reference DataprocJob#reference}
*/
readonly reference?: DataprocJobReference;
/**
* scheduling block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#scheduling DataprocJob#scheduling}
*/
readonly scheduling?: DataprocJobScheduling;
/**
* spark_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#spark_config DataprocJob#spark_config}
*/
readonly sparkConfig?: DataprocJobSparkConfig;
/**
* sparksql_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#sparksql_config DataprocJob#sparksql_config}
*/
readonly sparksqlConfig?: DataprocJobSparksqlConfig;
/**
* timeouts block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#timeouts DataprocJob#timeouts}
*/
readonly timeouts?: DataprocJobTimeouts;
}
export interface DataprocJobStatus {
}
export declare function dataprocJobStatusToTerraform(struct?: DataprocJobStatus): any;
export declare function dataprocJobStatusToHclTerraform(struct?: DataprocJobStatus): any;
export declare class DataprocJobStatusOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param complexObjectIndex the index of this item in the list
* @param complexObjectIsFromSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, complexObjectIndex: number, complexObjectIsFromSet: boolean);
get internalValue(): DataprocJobStatus | undefined;
set internalValue(value: DataprocJobStatus | undefined);
get details(): string;
get state(): string;
get stateStartTime(): string;
get substate(): string;
}
export declare class DataprocJobStatusList extends cdktf.ComplexList {
protected terraformResource: cdktf.IInterpolatingParent;
protected terraformAttribute: string;
protected wrapsSet: boolean;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param wrapsSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, wrapsSet: boolean);
/**
* @param index the index of the item to return
*/
get(index: number): DataprocJobStatusOutputReference;
}
export interface DataprocJobHadoopConfigLoggingConfig {
/**
* Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#driver_log_levels DataprocJob#driver_log_levels}
*/
readonly driverLogLevels: {
[key: string]: string;
};
}
export declare function dataprocJobHadoopConfigLoggingConfigToTerraform(struct?: DataprocJobHadoopConfigLoggingConfigOutputReference | DataprocJobHadoopConfigLoggingConfig): any;
export declare function dataprocJobHadoopConfigLoggingConfigToHclTerraform(struct?: DataprocJobHadoopConfigLoggingConfigOutputReference | DataprocJobHadoopConfigLoggingConfig): any;
export declare class DataprocJobHadoopConfigLoggingConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocJobHadoopConfigLoggingConfig | undefined;
set internalValue(value: DataprocJobHadoopConfigLoggingConfig | undefined);
private _driverLogLevels?;
get driverLogLevels(): {
[key: string]: string;
};
set driverLogLevels(value: {
[key: string]: string;
});
get driverLogLevelsInput(): {
[key: string]: string;
} | undefined;
}
export interface DataprocJobHadoopConfig {
/**
* HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#archive_uris DataprocJob#archive_uris}
*/
readonly archiveUris?: string[];
/**
* The arguments to pass to the driver.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#args DataprocJob#args}
*/
readonly args?: string[];
/**
* HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#file_uris DataprocJob#file_uris}
*/
readonly fileUris?: string[];
/**
* HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#jar_file_uris DataprocJob#jar_file_uris}
*/
readonly jarFileUris?: string[];
/**
* The class containing the main method of the driver. Must be in a provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#main_class DataprocJob#main_class}
*/
readonly mainClass?: string;
/**
* The HCFS URI of jar file containing the driver jar. Conflicts with main_class
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#main_jar_file_uri DataprocJob#main_jar_file_uri}
*/
readonly mainJarFileUri?: string;
/**
* A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#properties DataprocJob#properties}
*/
readonly properties?: {
[key: string]: string;
};
/**
* logging_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#logging_config DataprocJob#logging_config}
*/
readonly loggingConfig?: DataprocJobHadoopConfigLoggingConfig;
}
export declare function dataprocJobHadoopConfigToTerraform(struct?: DataprocJobHadoopConfigOutputReference | DataprocJobHadoopConfig): any;
export declare function dataprocJobHadoopConfigToHclTerraform(struct?: DataprocJobHadoopConfigOutputReference | DataprocJobHadoopConfig): any;
export declare class DataprocJobHadoopConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocJobHadoopConfig | undefined;
set internalValue(value: DataprocJobHadoopConfig | undefined);
private _archiveUris?;
get archiveUris(): string[];
set archiveUris(value: string[]);
resetArchiveUris(): void;
get archiveUrisInput(): string[] | undefined;
private _args?;
get args(): string[];
set args(value: string[]);
resetArgs(): void;
get argsInput(): string[] | undefined;
private _fileUris?;
get fileUris(): string[];
set fileUris(value: string[]);
resetFileUris(): void;
get fileUrisInput(): string[] | undefined;
private _jarFileUris?;
get jarFileUris(): string[];
set jarFileUris(value: string[]);
resetJarFileUris(): void;
get jarFileUrisInput(): string[] | undefined;
private _mainClass?;
get mainClass(): string;
set mainClass(value: string);
resetMainClass(): void;
get mainClassInput(): string | undefined;
private _mainJarFileUri?;
get mainJarFileUri(): string;
set mainJarFileUri(value: string);
resetMainJarFileUri(): void;
get mainJarFileUriInput(): string | undefined;
private _properties?;
get properties(): {
[key: string]: string;
};
set properties(value: {
[key: string]: string;
});
resetProperties(): void;
get propertiesInput(): {
[key: string]: string;
} | undefined;
private _loggingConfig;
get loggingConfig(): DataprocJobHadoopConfigLoggingConfigOutputReference;
putLoggingConfig(value: DataprocJobHadoopConfigLoggingConfig): void;
resetLoggingConfig(): void;
get loggingConfigInput(): DataprocJobHadoopConfigLoggingConfig | undefined;
}
export interface DataprocJobHiveConfig {
/**
* Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#continue_on_failure DataprocJob#continue_on_failure}
*/
readonly continueOnFailure?: boolean | cdktf.IResolvable;
/**
* HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#jar_file_uris DataprocJob#jar_file_uris}
*/
readonly jarFileUris?: string[];
/**
* A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#properties DataprocJob#properties}
*/
readonly properties?: {
[key: string]: string;
};
/**
* HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#query_file_uri DataprocJob#query_file_uri}
*/
readonly queryFileUri?: string;
/**
* The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#query_list DataprocJob#query_list}
*/
readonly queryList?: string[];
/**
* Mapping of query variable names to values (equivalent to the Hive command: SET name="value";).
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#script_variables DataprocJob#script_variables}
*/
readonly scriptVariables?: {
[key: string]: string;
};
}
export declare function dataprocJobHiveConfigToTerraform(struct?: DataprocJobHiveConfigOutputReference | DataprocJobHiveConfig): any;
export declare function dataprocJobHiveConfigToHclTerraform(struct?: DataprocJobHiveConfigOutputReference | DataprocJobHiveConfig): any;
export declare class DataprocJobHiveConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocJobHiveConfig | undefined;
set internalValue(value: DataprocJobHiveConfig | undefined);
private _continueOnFailure?;
get continueOnFailure(): boolean | cdktf.IResolvable;
set continueOnFailure(value: boolean | cdktf.IResolvable);
resetContinueOnFailure(): void;
get continueOnFailureInput(): boolean | cdktf.IResolvable | undefined;
private _jarFileUris?;
get jarFileUris(): string[];
set jarFileUris(value: string[]);
resetJarFileUris(): void;
get jarFileUrisInput(): string[] | undefined;
private _properties?;
get properties(): {
[key: string]: string;
};
set properties(value: {
[key: string]: string;
});
resetProperties(): void;
get propertiesInput(): {
[key: string]: string;
} | undefined;
private _queryFileUri?;
get queryFileUri(): string;
set queryFileUri(value: string);
resetQueryFileUri(): void;
get queryFileUriInput(): string | undefined;
private _queryList?;
get queryList(): string[];
set queryList(value: string[]);
resetQueryList(): void;
get queryListInput(): string[] | undefined;
private _scriptVariables?;
get scriptVariables(): {
[key: string]: string;
};
set scriptVariables(value: {
[key: string]: string;
});
resetScriptVariables(): void;
get scriptVariablesInput(): {
[key: string]: string;
} | undefined;
}
export interface DataprocJobPigConfigLoggingConfig {
/**
* Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#driver_log_levels DataprocJob#driver_log_levels}
*/
readonly driverLogLevels: {
[key: string]: string;
};
}
export declare function dataprocJobPigConfigLoggingConfigToTerraform(struct?: DataprocJobPigConfigLoggingConfigOutputReference | DataprocJobPigConfigLoggingConfig): any;
export declare function dataprocJobPigConfigLoggingConfigToHclTerraform(struct?: DataprocJobPigConfigLoggingConfigOutputReference | DataprocJobPigConfigLoggingConfig): any;
export declare class DataprocJobPigConfigLoggingConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocJobPigConfigLoggingConfig | undefined;
set internalValue(value: DataprocJobPigConfigLoggingConfig | undefined);
private _driverLogLevels?;
get driverLogLevels(): {
[key: string]: string;
};
set driverLogLevels(value: {
[key: string]: string;
});
get driverLogLevelsInput(): {
[key: string]: string;
} | undefined;
}
export interface DataprocJobPigConfig {
/**
* Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#continue_on_failure DataprocJob#continue_on_failure}
*/
readonly continueOnFailure?: boolean | cdktf.IResolvable;
/**
* HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#jar_file_uris DataprocJob#jar_file_uris}
*/
readonly jarFileUris?: string[];
/**
* A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#properties DataprocJob#properties}
*/
readonly properties?: {
[key: string]: string;
};
/**
* HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#query_file_uri DataprocJob#query_file_uri}
*/
readonly queryFileUri?: string;
/**
* The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#query_list DataprocJob#query_list}
*/
readonly queryList?: string[];
/**
* Mapping of query variable names to values (equivalent to the Pig command: name=[value]).
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#script_variables DataprocJob#script_variables}
*/
readonly scriptVariables?: {
[key: string]: string;
};
/**
* logging_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#logging_config DataprocJob#logging_config}
*/
readonly loggingConfig?: DataprocJobPigConfigLoggingConfig;
}
export declare function dataprocJobPigConfigToTerraform(struct?: DataprocJobPigConfigOutputReference | DataprocJobPigConfig): any;
export declare function dataprocJobPigConfigToHclTerraform(struct?: DataprocJobPigConfigOutputReference | DataprocJobPigConfig): any;
export declare class DataprocJobPigConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocJobPigConfig | undefined;
set internalValue(value: DataprocJobPigConfig | undefined);
private _continueOnFailure?;
get continueOnFailure(): boolean | cdktf.IResolvable;
set continueOnFailure(value: boolean | cdktf.IResolvable);
resetContinueOnFailure(): void;
get continueOnFailureInput(): boolean | cdktf.IResolvable | undefined;
private _jarFileUris?;
get jarFileUris(): string[];
set jarFileUris(value: string[]);
resetJarFileUris(): void;
get jarFileUrisInput(): string[] | undefined;
private _properties?;
get properties(): {
[key: string]: string;
};
set properties(value: {
[key: string]: string;
});
resetProperties(): void;
get propertiesInput(): {
[key: string]: string;
} | undefined;
private _queryFileUri?;
get queryFileUri(): string;
set queryFileUri(value: string);
resetQueryFileUri(): void;
get queryFileUriInput(): string | undefined;
private _queryList?;
get queryList(): string[];
set queryList(value: string[]);
resetQueryList(): void;
get queryListInput(): string[] | undefined;
private _scriptVariables?;
get scriptVariables(): {
[key: string]: string;
};
set scriptVariables(value: {
[key: string]: string;
});
resetScriptVariables(): void;
get scriptVariablesInput(): {
[key: string]: string;
} | undefined;
private _loggingConfig;
get loggingConfig(): DataprocJobPigConfigLoggingConfigOutputReference;
putLoggingConfig(value: DataprocJobPigConfigLoggingConfig): void;
resetLoggingConfig(): void;
get loggingConfigInput(): DataprocJobPigConfigLoggingConfig | undefined;
}
export interface DataprocJobPlacement {
/**
* The name of the cluster where the job will be submitted
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#cluster_name DataprocJob#cluster_name}
*/
readonly clusterName: string;
}
export declare function dataprocJobPlacementToTerraform(struct?: DataprocJobPlacementOutputReference | DataprocJobPlacement): any;
export declare function dataprocJobPlacementToHclTerraform(struct?: DataprocJobPlacementOutputReference | DataprocJobPlacement): any;
export declare class DataprocJobPlacementOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocJobPlacement | undefined;
set internalValue(value: DataprocJobPlacement | undefined);
private _clusterName?;
get clusterName(): string;
set clusterName(value: string);
get clusterNameInput(): string | undefined;
get clusterUuid(): string;
}
export interface DataprocJobPrestoConfigLoggingConfig {
/**
* Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#driver_log_levels DataprocJob#driver_log_levels}
*/
readonly driverLogLevels: {
[key: string]: string;
};
}
export declare function dataprocJobPrestoConfigLoggingConfigToTerraform(struct?: DataprocJobPrestoConfigLoggingConfigOutputReference | DataprocJobPrestoConfigLoggingConfig): any;
export declare function dataprocJobPrestoConfigLoggingConfigToHclTerraform(struct?: DataprocJobPrestoConfigLoggingConfigOutputReference | DataprocJobPrestoConfigLoggingConfig): any;
export declare class DataprocJobPrestoConfigLoggingConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocJobPrestoConfigLoggingConfig | undefined;
set internalValue(value: DataprocJobPrestoConfigLoggingConfig | undefined);
private _driverLogLevels?;
get driverLogLevels(): {
[key: string]: string;
};
set driverLogLevels(value: {
[key: string]: string;
});
get driverLogLevelsInput(): {
[key: string]: string;
} | undefined;
}
export interface DataprocJobPrestoConfig {
/**
* Presto client tags to attach to this query.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#client_tags DataprocJob#client_tags}
*/
readonly clientTags?: string[];
/**
* Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#continue_on_failure DataprocJob#continue_on_failure}
*/
readonly continueOnFailure?: boolean | cdktf.IResolvable;
/**
* The format in which query output will be displayed. See the Presto documentation for supported output formats.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#output_format DataprocJob#output_format}
*/
readonly outputFormat?: string;
/**
* A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#properties DataprocJob#properties}
*/
readonly properties?: {
[key: string]: string;
};
/**
* The HCFS URI of the script that contains SQL queries. Conflicts with query_list
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#query_file_uri DataprocJob#query_file_uri}
*/
readonly queryFileUri?: string;
/**
* The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#query_list DataprocJob#query_list}
*/
readonly queryList?: string[];
/**
* logging_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#logging_config DataprocJob#logging_config}
*/
readonly loggingConfig?: DataprocJobPrestoConfigLoggingConfig;
}
export declare function dataprocJobPrestoConfigToTerraform(struct?: DataprocJobPrestoConfigOutputReference | DataprocJobPrestoConfig): any;
export declare function dataprocJobPrestoConfigToHclTerraform(struct?: DataprocJobPrestoConfigOutputReference | DataprocJobPrestoConfig): any;
export declare class DataprocJobPrestoConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocJobPrestoConfig | undefined;
set internalValue(value: DataprocJobPrestoConfig | undefined);
private _clientTags?;
get clientTags(): string[];
set clientTags(value: string[]);
resetClientTags(): void;
get clientTagsInput(): string[] | undefined;
private _continueOnFailure?;
get continueOnFailure(): boolean | cdktf.IResolvable;
set continueOnFailure(value: boolean | cdktf.IResolvable);
resetContinueOnFailure(): void;
get continueOnFailureInput(): boolean | cdktf.IResolvable | undefined;
private _outputFormat?;
get outputFormat(): string;
set outputFormat(value: string);
resetOutputFormat(): void;
get outputFormatInput(): string | undefined;
private _properties?;
get properties(): {
[key: string]: string;
};
set properties(value: {
[key: string]: string;
});
resetProperties(): void;
get propertiesInput(): {
[key: string]: string;
} | undefined;
private _queryFileUri?;
get queryFileUri(): string;
set queryFileUri(value: string);
resetQueryFileUri(): void;
get queryFileUriInput(): string | undefined;
private _queryList?;
get queryList(): string[];
set queryList(value: string[]);
resetQueryList(): void;
get queryListInput(): string[] | undefined;
private _loggingConfig;
get loggingConfig(): DataprocJobPrestoConfigLoggingConfigOutputReference;
putLoggingConfig(value: DataprocJobPrestoConfigLoggingConfig): void;
resetLoggingConfig(): void;
get loggingConfigInput(): DataprocJobPrestoConfigLoggingConfig | undefined;
}
export interface DataprocJobPysparkConfigLoggingConfig {
/**
* Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#driver_log_levels DataprocJob#driver_log_levels}
*/
readonly driverLogLevels: {
[key: string]: string;
};
}
export declare function dataprocJobPysparkConfigLoggingConfigToTerraform(struct?: DataprocJobPysparkConfigLoggingConfigOutputReference | DataprocJobPysparkConfigLoggingConfig): any;
export declare function dataprocJobPysparkConfigLoggingConfigToHclTerraform(struct?: DataprocJobPysparkConfigLoggingConfigOutputReference | DataprocJobPysparkConfigLoggingConfig): any;
export declare class DataprocJobPysparkConfigLoggingConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocJobPysparkConfigLoggingConfig | undefined;
set internalValue(value: DataprocJobPysparkConfigLoggingConfig | undefined);
private _driverLogLevels?;
get driverLogLevels(): {
[key: string]: string;
};
set driverLogLevels(value: {
[key: string]: string;
});
get driverLogLevelsInput(): {
[key: string]: string;
} | undefined;
}
export interface DataprocJobPysparkConfig {
/**
* Optional. HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#archive_uris DataprocJob#archive_uris}
*/
readonly archiveUris?: string[];
/**
* Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#args DataprocJob#args}
*/
readonly args?: string[];
/**
* Optional. HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#file_uris DataprocJob#file_uris}
*/
readonly fileUris?: string[];
/**
* Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#jar_file_uris DataprocJob#jar_file_uris}
*/
readonly jarFileUris?: string[];
/**
* Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#main_python_file_uri DataprocJob#main_python_file_uri}
*/
readonly mainPythonFileUri: string;
/**
* Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#properties DataprocJob#properties}
*/
readonly properties?: {
[key: string]: string;
};
/**
* Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#python_file_uris DataprocJob#python_file_uris}
*/
readonly pythonFileUris?: string[];
/**
* logging_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#logging_config DataprocJob#logging_config}
*/
readonly loggingConfig?: DataprocJobPysparkConfigLoggingConfig;
}
export declare function dataprocJobPysparkConfigToTerraform(struct?: DataprocJobPysparkConfigOutputReference | DataprocJobPysparkConfig): any;
export declare function dataprocJobPysparkConfigToHclTerraform(struct?: DataprocJobPysparkConfigOutputReference | DataprocJobPysparkConfig): any;
export declare class DataprocJobPysparkConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocJobPysparkConfig | undefined;
set internalValue(value: DataprocJobPysparkConfig | undefined);
private _archiveUris?;
get archiveUris(): string[];
set archiveUris(value: string[]);
resetArchiveUris(): void;
get archiveUrisInput(): string[] | undefined;
private _args?;
get args(): string[];
set args(value: string[]);
resetArgs(): void;
get argsInput(): string[] | undefined;
private _fileUris?;
get fileUris(): string[];
set fileUris(value: string[]);
resetFileUris(): void;
get fileUrisInput(): string[] | undefined;
private _jarFileUris?;
get jarFileUris(): string[];
set jarFileUris(value: string[]);
resetJarFileUris(): void;
get jarFileUrisInput(): string[] | undefined;
private _mainPythonFileUri?;
get mainPythonFileUri(): string;
set mainPythonFileUri(value: string);
get mainPythonFileUriInput(): string | undefined;
private _properties?;
get properties(): {
[key: string]: string;
};
set properties(value: {
[key: string]: string;
});
resetProperties(): void;
get propertiesInput(): {
[key: string]: string;
} | undefined;
private _pythonFileUris?;
get pythonFileUris(): string[];
set pythonFileUris(value: string[]);
resetPythonFileUris(): void;
get pythonFileUrisInput(): string[] | undefined;
private _loggingConfig;
get loggingConfig(): DataprocJobPysparkConfigLoggingConfigOutputReference;
putLoggingConfig(value: DataprocJobPysparkConfigLoggingConfig): void;
resetLoggingConfig(): void;
get loggingConfigInput(): DataprocJobPysparkConfigLoggingConfig | undefined;
}
export interface DataprocJobReference {
/**
* The job ID, which must be unique within the project. The job ID is generated by the server upon job submission or provided by the user as a means to perform retries without creating duplicate jobs
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#job_id DataprocJob#job_id}
*/
readonly jobId?: string;
}
export declare function dataprocJobReferenceToTerraform(struct?: DataprocJobReferenceOutputReference | DataprocJobReference): any;
export declare function dataprocJobReferenceToHclTerraform(struct?: DataprocJobReferenceOutputReference | DataprocJobReference): any;
export declare class DataprocJobReferenceOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocJobReference | undefined;
set internalValue(value: DataprocJobReference | undefined);
private _jobId?;
get jobId(): string;
set jobId(value: string);
resetJobId(): void;
get jobIdInput(): string | undefined;
}
export interface DataprocJobScheduling {
/**
* Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#max_failures_per_hour DataprocJob#max_failures_per_hour}
*/
readonly maxFailuresPerHour: number;
/**
* Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#max_failures_total DataprocJob#max_failures_total}
*/
readonly maxFailuresTotal: number;
}
export declare function dataprocJobSchedulingToTerraform(struct?: DataprocJobSchedulingOutputReference | DataprocJobScheduling): any;
export declare function dataprocJobSchedulingToHclTerraform(struct?: DataprocJobSchedulingOutputReference | DataprocJobScheduling): any;
export declare class DataprocJobSchedulingOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocJobScheduling | undefined;
set internalValue(value: DataprocJobScheduling | undefined);
private _maxFailuresPerHour?;
get maxFailuresPerHour(): number;
set maxFailuresPerHour(value: number);
get maxFailuresPerHourInput(): number | undefined;
private _maxFailuresTotal?;
get maxFailuresTotal(): number;
set maxFailuresTotal(value: number);
get maxFailuresTotalInput(): number | undefined;
}
export interface DataprocJobSparkConfigLoggingConfig {
/**
* Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#driver_log_levels DataprocJob#driver_log_levels}
*/
readonly driverLogLevels: {
[key: string]: string;
};
}
export declare function dataprocJobSparkConfigLoggingConfigToTerraform(struct?: DataprocJobSparkConfigLoggingConfigOutputReference | DataprocJobSparkConfigLoggingConfig): any;
export declare function dataprocJobSparkConfigLoggingConfigToHclTerraform(struct?: DataprocJobSparkConfigLoggingConfigOutputReference | DataprocJobSparkConfigLoggingConfig): any;
export declare class DataprocJobSparkConfigLoggingConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocJobSparkConfigLoggingConfig | undefined;
set internalValue(value: DataprocJobSparkConfigLoggingConfig | undefined);
private _driverLogLevels?;
get driverLogLevels(): {
[key: string]: string;
};
set driverLogLevels(value: {
[key: string]: string;
});
get driverLogLevelsInput(): {
[key: string]: string;
} | undefined;
}
export interface DataprocJobSparkConfig {
/**
* HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#archive_uris DataprocJob#archive_uris}
*/
readonly archiveUris?: string[];
/**
* The arguments to pass to the driver.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#args DataprocJob#args}
*/
readonly args?: string[];
/**
* HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#file_uris DataprocJob#file_uris}
*/
readonly fileUris?: string[];
/**
* HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#jar_file_uris DataprocJob#jar_file_uris}
*/
readonly jarFileUris?: string[];
/**
* The class containing the main method of the driver. Must be in a provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#main_class DataprocJob#main_class}
*/
readonly mainClass?: string;
/**
* The HCFS URI of jar file containing the driver jar. Conflicts with main_class
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#main_jar_file_uri DataprocJob#main_jar_file_uri}
*/
readonly mainJarFileUri?: string;
/**
* A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#properties DataprocJob#properties}
*/
readonly properties?: {
[key: string]: string;
};
/**
* logging_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_job#logging_config DataprocJob#logging_config}
*/
readonly loggingConfig?: DataprocJobSparkConfigLoggingConfig;
}
export declare function dataprocJobSparkConfigToTerraform(struct?: DataprocJobSparkConfigOutputReference | DataprocJobSparkConfig): any;
export declare function dataprocJobSparkConfigToHclTerraform(struct?: DataprocJobSparkConfigOutputReference | DataprocJobSparkConfig): any;
export declare class DataprocJobSparkConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocJobSparkConfig | undefined;
set internalValue(value: DataprocJobSparkConfig | undefined);
private _archiveUris?;
get archiveUris(): string[];
set archiveUris(value: string[]);
resetArchiveUris(): void;
get archiveUrisInput(): string[] | undefined;
private _args?;
get args(): string[];
set args(value: string[]);
resetArgs(): void;
get argsInput(): string[] | undefined;
private _fileUris?;
get fileUris(): string[];
set fileUris(value: string[]);
resetFileUris(): void;
get fileUrisInput(): string[] | undefined;
private _jarFileUris?;
get jarFileUris(): string[];
set jarFileUris(value: string[]);
resetJarFileUris(): void;
get jarFileUrisInput(): string[] | undefined;
private _mainClass?;
get mainClass(): string;
set mainClass(value: string);
resetMainClass(): void;
get mainClassInput(): string | undefined;
private _mainJarFileUri?;
get mainJarFileUri(): string;
set mainJarFileUri(value: string);
resetMainJarFileUri(): void;
get mainJarFileUriInput(): string | undefined;
private _properties?;
get properties(): {
[key: string]: string;
};
set properties(value: {
[key: string]: string;
});
resetProperties(): void;
get propertiesInput(): {
[key: string]: string;
} | undefined;
private _loggingConfig;
get loggingConfig(): DataprocJobSparkConfigLoggingConfigOutputReference;
putLoggingConfig(value: DataprocJobSparkConfigLoggingConfig): void;
resetLoggingConfig(): void;
get loggingConfigInput(): DataprocJ