@cdktf/provider-google
Version:
Prebuilt google Provider for Terraform CDK (cdktf)
868 lines • 220 kB
TypeScript
/**
* Copyright (c) HashiCorp, Inc.
* SPDX-License-Identifier: MPL-2.0
*/
import { Construct } from 'constructs';
import * as cdktf from 'cdktf';
export interface DataprocWorkflowTemplateConfig extends cdktf.TerraformMetaArguments {
/**
* Optional. Timeout duration for the DAG of jobs, expressed in seconds (see [JSON representation of duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). The timeout duration must be from 10 minutes ("600s") to 24 hours ("86400s"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a [managed cluster](/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster), the cluster is deleted.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#dag_timeout DataprocWorkflowTemplate#dag_timeout}
*/
readonly dagTimeout?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#id DataprocWorkflowTemplate#id}
*
* Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
* If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
*/
readonly id?: string;
/**
* Optional. The labels to associate with this template. These labels will be propagated to all jobs and clusters created by the workflow instance. Label **keys** must contain 1 to 63 characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). Label **values** may be empty, but, if present, must contain 1 to 63 characters, and must conform to [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be associated with a template.
*
* **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
* Please refer to the field `effective_labels` for all of the labels present on the resource.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#labels DataprocWorkflowTemplate#labels}
*/
readonly labels?: {
[key: string]: string;
};
/**
* The location for the resource
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#location DataprocWorkflowTemplate#location}
*/
readonly location: string;
/**
* Output only. The resource name of the workflow template, as described in https://cloud.google.com/apis/design/resource_names. * For `projects.regions.workflowTemplates`, the resource name of the template has the following format: `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` * For `projects.locations.workflowTemplates`, the resource name of the template has the following format: `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#name DataprocWorkflowTemplate#name}
*/
readonly name: string;
/**
* The project for the resource
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#project DataprocWorkflowTemplate#project}
*/
readonly project?: string;
/**
* Output only. The current version of this workflow template.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#version DataprocWorkflowTemplate#version}
*/
readonly version?: number;
/**
* jobs block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#jobs DataprocWorkflowTemplate#jobs}
*/
readonly jobs: DataprocWorkflowTemplateJobs[] | cdktf.IResolvable;
/**
* parameters block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#parameters DataprocWorkflowTemplate#parameters}
*/
readonly parameters?: DataprocWorkflowTemplateParameters[] | cdktf.IResolvable;
/**
* placement block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#placement DataprocWorkflowTemplate#placement}
*/
readonly placement: DataprocWorkflowTemplatePlacement;
/**
* timeouts block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#timeouts DataprocWorkflowTemplate#timeouts}
*/
readonly timeouts?: DataprocWorkflowTemplateTimeouts;
}
export interface DataprocWorkflowTemplateJobsHadoopJobLoggingConfig {
/**
* The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#driver_log_levels DataprocWorkflowTemplate#driver_log_levels}
*/
readonly driverLogLevels?: {
[key: string]: string;
};
}
export declare function dataprocWorkflowTemplateJobsHadoopJobLoggingConfigToTerraform(struct?: DataprocWorkflowTemplateJobsHadoopJobLoggingConfigOutputReference | DataprocWorkflowTemplateJobsHadoopJobLoggingConfig): any;
export declare function dataprocWorkflowTemplateJobsHadoopJobLoggingConfigToHclTerraform(struct?: DataprocWorkflowTemplateJobsHadoopJobLoggingConfigOutputReference | DataprocWorkflowTemplateJobsHadoopJobLoggingConfig): any;
export declare class DataprocWorkflowTemplateJobsHadoopJobLoggingConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocWorkflowTemplateJobsHadoopJobLoggingConfig | undefined;
set internalValue(value: DataprocWorkflowTemplateJobsHadoopJobLoggingConfig | undefined);
private _driverLogLevels?;
get driverLogLevels(): {
[key: string]: string;
};
set driverLogLevels(value: {
[key: string]: string;
});
resetDriverLogLevels(): void;
get driverLogLevelsInput(): {
[key: string]: string;
} | undefined;
}
export interface DataprocWorkflowTemplateJobsHadoopJob {
/**
* Optional. HCFS URIs of archives to be extracted in the working directory of Hadoop drivers and tasks. Supported file types: .jar, .tar, .tar.gz, .tgz, or .zip.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#archive_uris DataprocWorkflowTemplate#archive_uris}
*/
readonly archiveUris?: string[];
/**
* Optional. The arguments to pass to the driver. Do not include arguments, such as `-libjars` or `-Dfoo=bar`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#args DataprocWorkflowTemplate#args}
*/
readonly args?: string[];
/**
* Optional. HCFS (Hadoop Compatible Filesystem) URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#file_uris DataprocWorkflowTemplate#file_uris}
*/
readonly fileUris?: string[];
/**
* Optional. Jar file URIs to add to the CLASSPATHs of the Hadoop driver and tasks.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#jar_file_uris DataprocWorkflowTemplate#jar_file_uris}
*/
readonly jarFileUris?: string[];
/**
* The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in `jar_file_uris`.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#main_class DataprocWorkflowTemplate#main_class}
*/
readonly mainClass?: string;
/**
* The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#main_jar_file_uri DataprocWorkflowTemplate#main_jar_file_uri}
*/
readonly mainJarFileUri?: string;
/**
* Optional. A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#properties DataprocWorkflowTemplate#properties}
*/
readonly properties?: {
[key: string]: string;
};
/**
* logging_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#logging_config DataprocWorkflowTemplate#logging_config}
*/
readonly loggingConfig?: DataprocWorkflowTemplateJobsHadoopJobLoggingConfig;
}
export declare function dataprocWorkflowTemplateJobsHadoopJobToTerraform(struct?: DataprocWorkflowTemplateJobsHadoopJobOutputReference | DataprocWorkflowTemplateJobsHadoopJob): any;
export declare function dataprocWorkflowTemplateJobsHadoopJobToHclTerraform(struct?: DataprocWorkflowTemplateJobsHadoopJobOutputReference | DataprocWorkflowTemplateJobsHadoopJob): any;
export declare class DataprocWorkflowTemplateJobsHadoopJobOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocWorkflowTemplateJobsHadoopJob | undefined;
set internalValue(value: DataprocWorkflowTemplateJobsHadoopJob | undefined);
private _archiveUris?;
get archiveUris(): string[];
set archiveUris(value: string[]);
resetArchiveUris(): void;
get archiveUrisInput(): string[] | undefined;
private _args?;
get args(): string[];
set args(value: string[]);
resetArgs(): void;
get argsInput(): string[] | undefined;
private _fileUris?;
get fileUris(): string[];
set fileUris(value: string[]);
resetFileUris(): void;
get fileUrisInput(): string[] | undefined;
private _jarFileUris?;
get jarFileUris(): string[];
set jarFileUris(value: string[]);
resetJarFileUris(): void;
get jarFileUrisInput(): string[] | undefined;
private _mainClass?;
get mainClass(): string;
set mainClass(value: string);
resetMainClass(): void;
get mainClassInput(): string | undefined;
private _mainJarFileUri?;
get mainJarFileUri(): string;
set mainJarFileUri(value: string);
resetMainJarFileUri(): void;
get mainJarFileUriInput(): string | undefined;
private _properties?;
get properties(): {
[key: string]: string;
};
set properties(value: {
[key: string]: string;
});
resetProperties(): void;
get propertiesInput(): {
[key: string]: string;
} | undefined;
private _loggingConfig;
get loggingConfig(): DataprocWorkflowTemplateJobsHadoopJobLoggingConfigOutputReference;
putLoggingConfig(value: DataprocWorkflowTemplateJobsHadoopJobLoggingConfig): void;
resetLoggingConfig(): void;
get loggingConfigInput(): DataprocWorkflowTemplateJobsHadoopJobLoggingConfig | undefined;
}
export interface DataprocWorkflowTemplateJobsHiveJobQueryListStruct {
/**
* Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": [ "query1", "query2", "query3;query4", ] } }
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#queries DataprocWorkflowTemplate#queries}
*/
readonly queries: string[];
}
export declare function dataprocWorkflowTemplateJobsHiveJobQueryListStructToTerraform(struct?: DataprocWorkflowTemplateJobsHiveJobQueryListStructOutputReference | DataprocWorkflowTemplateJobsHiveJobQueryListStruct): any;
export declare function dataprocWorkflowTemplateJobsHiveJobQueryListStructToHclTerraform(struct?: DataprocWorkflowTemplateJobsHiveJobQueryListStructOutputReference | DataprocWorkflowTemplateJobsHiveJobQueryListStruct): any;
export declare class DataprocWorkflowTemplateJobsHiveJobQueryListStructOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocWorkflowTemplateJobsHiveJobQueryListStruct | undefined;
set internalValue(value: DataprocWorkflowTemplateJobsHiveJobQueryListStruct | undefined);
private _queries?;
get queries(): string[];
set queries(value: string[]);
get queriesInput(): string[] | undefined;
}
export interface DataprocWorkflowTemplateJobsHiveJob {
/**
* Optional. Whether to continue executing queries if a query fails. The default value is `false`. Setting to `true` can be useful when executing independent parallel queries.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#continue_on_failure DataprocWorkflowTemplate#continue_on_failure}
*/
readonly continueOnFailure?: boolean | cdktf.IResolvable;
/**
* Optional. HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#jar_file_uris DataprocWorkflowTemplate#jar_file_uris}
*/
readonly jarFileUris?: string[];
/**
* Optional. A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#properties DataprocWorkflowTemplate#properties}
*/
readonly properties?: {
[key: string]: string;
};
/**
* The HCFS URI of the script that contains Hive queries.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#query_file_uri DataprocWorkflowTemplate#query_file_uri}
*/
readonly queryFileUri?: string;
/**
* Optional. Mapping of query variable names to values (equivalent to the Hive command: `SET name="value";`).
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#script_variables DataprocWorkflowTemplate#script_variables}
*/
readonly scriptVariables?: {
[key: string]: string;
};
/**
* query_list block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#query_list DataprocWorkflowTemplate#query_list}
*/
readonly queryList?: DataprocWorkflowTemplateJobsHiveJobQueryListStruct;
}
export declare function dataprocWorkflowTemplateJobsHiveJobToTerraform(struct?: DataprocWorkflowTemplateJobsHiveJobOutputReference | DataprocWorkflowTemplateJobsHiveJob): any;
export declare function dataprocWorkflowTemplateJobsHiveJobToHclTerraform(struct?: DataprocWorkflowTemplateJobsHiveJobOutputReference | DataprocWorkflowTemplateJobsHiveJob): any;
export declare class DataprocWorkflowTemplateJobsHiveJobOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocWorkflowTemplateJobsHiveJob | undefined;
set internalValue(value: DataprocWorkflowTemplateJobsHiveJob | undefined);
private _continueOnFailure?;
get continueOnFailure(): boolean | cdktf.IResolvable;
set continueOnFailure(value: boolean | cdktf.IResolvable);
resetContinueOnFailure(): void;
get continueOnFailureInput(): boolean | cdktf.IResolvable | undefined;
private _jarFileUris?;
get jarFileUris(): string[];
set jarFileUris(value: string[]);
resetJarFileUris(): void;
get jarFileUrisInput(): string[] | undefined;
private _properties?;
get properties(): {
[key: string]: string;
};
set properties(value: {
[key: string]: string;
});
resetProperties(): void;
get propertiesInput(): {
[key: string]: string;
} | undefined;
private _queryFileUri?;
get queryFileUri(): string;
set queryFileUri(value: string);
resetQueryFileUri(): void;
get queryFileUriInput(): string | undefined;
private _scriptVariables?;
get scriptVariables(): {
[key: string]: string;
};
set scriptVariables(value: {
[key: string]: string;
});
resetScriptVariables(): void;
get scriptVariablesInput(): {
[key: string]: string;
} | undefined;
private _queryList;
get queryList(): DataprocWorkflowTemplateJobsHiveJobQueryListStructOutputReference;
putQueryList(value: DataprocWorkflowTemplateJobsHiveJobQueryListStruct): void;
resetQueryList(): void;
get queryListInput(): DataprocWorkflowTemplateJobsHiveJobQueryListStruct | undefined;
}
export interface DataprocWorkflowTemplateJobsPigJobLoggingConfig {
/**
* The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#driver_log_levels DataprocWorkflowTemplate#driver_log_levels}
*/
readonly driverLogLevels?: {
[key: string]: string;
};
}
export declare function dataprocWorkflowTemplateJobsPigJobLoggingConfigToTerraform(struct?: DataprocWorkflowTemplateJobsPigJobLoggingConfigOutputReference | DataprocWorkflowTemplateJobsPigJobLoggingConfig): any;
export declare function dataprocWorkflowTemplateJobsPigJobLoggingConfigToHclTerraform(struct?: DataprocWorkflowTemplateJobsPigJobLoggingConfigOutputReference | DataprocWorkflowTemplateJobsPigJobLoggingConfig): any;
export declare class DataprocWorkflowTemplateJobsPigJobLoggingConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocWorkflowTemplateJobsPigJobLoggingConfig | undefined;
set internalValue(value: DataprocWorkflowTemplateJobsPigJobLoggingConfig | undefined);
private _driverLogLevels?;
get driverLogLevels(): {
[key: string]: string;
};
set driverLogLevels(value: {
[key: string]: string;
});
resetDriverLogLevels(): void;
get driverLogLevelsInput(): {
[key: string]: string;
} | undefined;
}
export interface DataprocWorkflowTemplateJobsPigJobQueryListStruct {
/**
* Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": [ "query1", "query2", "query3;query4", ] } }
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#queries DataprocWorkflowTemplate#queries}
*/
readonly queries: string[];
}
export declare function dataprocWorkflowTemplateJobsPigJobQueryListStructToTerraform(struct?: DataprocWorkflowTemplateJobsPigJobQueryListStructOutputReference | DataprocWorkflowTemplateJobsPigJobQueryListStruct): any;
export declare function dataprocWorkflowTemplateJobsPigJobQueryListStructToHclTerraform(struct?: DataprocWorkflowTemplateJobsPigJobQueryListStructOutputReference | DataprocWorkflowTemplateJobsPigJobQueryListStruct): any;
export declare class DataprocWorkflowTemplateJobsPigJobQueryListStructOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocWorkflowTemplateJobsPigJobQueryListStruct | undefined;
set internalValue(value: DataprocWorkflowTemplateJobsPigJobQueryListStruct | undefined);
private _queries?;
get queries(): string[];
set queries(value: string[]);
get queriesInput(): string[] | undefined;
}
export interface DataprocWorkflowTemplateJobsPigJob {
/**
* Optional. Whether to continue executing queries if a query fails. The default value is `false`. Setting to `true` can be useful when executing independent parallel queries.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#continue_on_failure DataprocWorkflowTemplate#continue_on_failure}
*/
readonly continueOnFailure?: boolean | cdktf.IResolvable;
/**
* Optional. HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#jar_file_uris DataprocWorkflowTemplate#jar_file_uris}
*/
readonly jarFileUris?: string[];
/**
* Optional. A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#properties DataprocWorkflowTemplate#properties}
*/
readonly properties?: {
[key: string]: string;
};
/**
* The HCFS URI of the script that contains the Pig queries.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#query_file_uri DataprocWorkflowTemplate#query_file_uri}
*/
readonly queryFileUri?: string;
/**
* Optional. Mapping of query variable names to values (equivalent to the Pig command: `name=[value]`).
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#script_variables DataprocWorkflowTemplate#script_variables}
*/
readonly scriptVariables?: {
[key: string]: string;
};
/**
* logging_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#logging_config DataprocWorkflowTemplate#logging_config}
*/
readonly loggingConfig?: DataprocWorkflowTemplateJobsPigJobLoggingConfig;
/**
* query_list block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#query_list DataprocWorkflowTemplate#query_list}
*/
readonly queryList?: DataprocWorkflowTemplateJobsPigJobQueryListStruct;
}
export declare function dataprocWorkflowTemplateJobsPigJobToTerraform(struct?: DataprocWorkflowTemplateJobsPigJobOutputReference | DataprocWorkflowTemplateJobsPigJob): any;
export declare function dataprocWorkflowTemplateJobsPigJobToHclTerraform(struct?: DataprocWorkflowTemplateJobsPigJobOutputReference | DataprocWorkflowTemplateJobsPigJob): any;
export declare class DataprocWorkflowTemplateJobsPigJobOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocWorkflowTemplateJobsPigJob | undefined;
set internalValue(value: DataprocWorkflowTemplateJobsPigJob | undefined);
private _continueOnFailure?;
get continueOnFailure(): boolean | cdktf.IResolvable;
set continueOnFailure(value: boolean | cdktf.IResolvable);
resetContinueOnFailure(): void;
get continueOnFailureInput(): boolean | cdktf.IResolvable | undefined;
private _jarFileUris?;
get jarFileUris(): string[];
set jarFileUris(value: string[]);
resetJarFileUris(): void;
get jarFileUrisInput(): string[] | undefined;
private _properties?;
get properties(): {
[key: string]: string;
};
set properties(value: {
[key: string]: string;
});
resetProperties(): void;
get propertiesInput(): {
[key: string]: string;
} | undefined;
private _queryFileUri?;
get queryFileUri(): string;
set queryFileUri(value: string);
resetQueryFileUri(): void;
get queryFileUriInput(): string | undefined;
private _scriptVariables?;
get scriptVariables(): {
[key: string]: string;
};
set scriptVariables(value: {
[key: string]: string;
});
resetScriptVariables(): void;
get scriptVariablesInput(): {
[key: string]: string;
} | undefined;
private _loggingConfig;
get loggingConfig(): DataprocWorkflowTemplateJobsPigJobLoggingConfigOutputReference;
putLoggingConfig(value: DataprocWorkflowTemplateJobsPigJobLoggingConfig): void;
resetLoggingConfig(): void;
get loggingConfigInput(): DataprocWorkflowTemplateJobsPigJobLoggingConfig | undefined;
private _queryList;
get queryList(): DataprocWorkflowTemplateJobsPigJobQueryListStructOutputReference;
putQueryList(value: DataprocWorkflowTemplateJobsPigJobQueryListStruct): void;
resetQueryList(): void;
get queryListInput(): DataprocWorkflowTemplateJobsPigJobQueryListStruct | undefined;
}
export interface DataprocWorkflowTemplateJobsPrestoJobLoggingConfig {
/**
* The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#driver_log_levels DataprocWorkflowTemplate#driver_log_levels}
*/
readonly driverLogLevels?: {
[key: string]: string;
};
}
export declare function dataprocWorkflowTemplateJobsPrestoJobLoggingConfigToTerraform(struct?: DataprocWorkflowTemplateJobsPrestoJobLoggingConfigOutputReference | DataprocWorkflowTemplateJobsPrestoJobLoggingConfig): any;
export declare function dataprocWorkflowTemplateJobsPrestoJobLoggingConfigToHclTerraform(struct?: DataprocWorkflowTemplateJobsPrestoJobLoggingConfigOutputReference | DataprocWorkflowTemplateJobsPrestoJobLoggingConfig): any;
export declare class DataprocWorkflowTemplateJobsPrestoJobLoggingConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocWorkflowTemplateJobsPrestoJobLoggingConfig | undefined;
set internalValue(value: DataprocWorkflowTemplateJobsPrestoJobLoggingConfig | undefined);
private _driverLogLevels?;
get driverLogLevels(): {
[key: string]: string;
};
set driverLogLevels(value: {
[key: string]: string;
});
resetDriverLogLevels(): void;
get driverLogLevelsInput(): {
[key: string]: string;
} | undefined;
}
export interface DataprocWorkflowTemplateJobsPrestoJobQueryListStruct {
/**
* Required. The queries to execute. You do not need to end a query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. Here is an example of a Dataproc API snippet that uses a QueryList to specify a HiveJob: "hiveJob": { "queryList": { "queries": [ "query1", "query2", "query3;query4", ] } }
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#queries DataprocWorkflowTemplate#queries}
*/
readonly queries: string[];
}
export declare function dataprocWorkflowTemplateJobsPrestoJobQueryListStructToTerraform(struct?: DataprocWorkflowTemplateJobsPrestoJobQueryListStructOutputReference | DataprocWorkflowTemplateJobsPrestoJobQueryListStruct): any;
export declare function dataprocWorkflowTemplateJobsPrestoJobQueryListStructToHclTerraform(struct?: DataprocWorkflowTemplateJobsPrestoJobQueryListStructOutputReference | DataprocWorkflowTemplateJobsPrestoJobQueryListStruct): any;
export declare class DataprocWorkflowTemplateJobsPrestoJobQueryListStructOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocWorkflowTemplateJobsPrestoJobQueryListStruct | undefined;
set internalValue(value: DataprocWorkflowTemplateJobsPrestoJobQueryListStruct | undefined);
private _queries?;
get queries(): string[];
set queries(value: string[]);
get queriesInput(): string[] | undefined;
}
export interface DataprocWorkflowTemplateJobsPrestoJob {
/**
* Optional. Presto client tags to attach to this query
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#client_tags DataprocWorkflowTemplate#client_tags}
*/
readonly clientTags?: string[];
/**
* Optional. Whether to continue executing queries if a query fails. The default value is `false`. Setting to `true` can be useful when executing independent parallel queries.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#continue_on_failure DataprocWorkflowTemplate#continue_on_failure}
*/
readonly continueOnFailure?: boolean | cdktf.IResolvable;
/**
* Optional. The format in which query output will be displayed. See the Presto documentation for supported output formats
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#output_format DataprocWorkflowTemplate#output_format}
*/
readonly outputFormat?: string;
/**
* Optional. A mapping of property names to values. Used to set Presto [session properties](https://prestodb.io/docs/current/sql/set-session.html) Equivalent to using the --session flag in the Presto CLI
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#properties DataprocWorkflowTemplate#properties}
*/
readonly properties?: {
[key: string]: string;
};
/**
* The HCFS URI of the script that contains SQL queries.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#query_file_uri DataprocWorkflowTemplate#query_file_uri}
*/
readonly queryFileUri?: string;
/**
* logging_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#logging_config DataprocWorkflowTemplate#logging_config}
*/
readonly loggingConfig?: DataprocWorkflowTemplateJobsPrestoJobLoggingConfig;
/**
* query_list block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#query_list DataprocWorkflowTemplate#query_list}
*/
readonly queryList?: DataprocWorkflowTemplateJobsPrestoJobQueryListStruct;
}
export declare function dataprocWorkflowTemplateJobsPrestoJobToTerraform(struct?: DataprocWorkflowTemplateJobsPrestoJobOutputReference | DataprocWorkflowTemplateJobsPrestoJob): any;
export declare function dataprocWorkflowTemplateJobsPrestoJobToHclTerraform(struct?: DataprocWorkflowTemplateJobsPrestoJobOutputReference | DataprocWorkflowTemplateJobsPrestoJob): any;
export declare class DataprocWorkflowTemplateJobsPrestoJobOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocWorkflowTemplateJobsPrestoJob | undefined;
set internalValue(value: DataprocWorkflowTemplateJobsPrestoJob | undefined);
private _clientTags?;
get clientTags(): string[];
set clientTags(value: string[]);
resetClientTags(): void;
get clientTagsInput(): string[] | undefined;
private _continueOnFailure?;
get continueOnFailure(): boolean | cdktf.IResolvable;
set continueOnFailure(value: boolean | cdktf.IResolvable);
resetContinueOnFailure(): void;
get continueOnFailureInput(): boolean | cdktf.IResolvable | undefined;
private _outputFormat?;
get outputFormat(): string;
set outputFormat(value: string);
resetOutputFormat(): void;
get outputFormatInput(): string | undefined;
private _properties?;
get properties(): {
[key: string]: string;
};
set properties(value: {
[key: string]: string;
});
resetProperties(): void;
get propertiesInput(): {
[key: string]: string;
} | undefined;
private _queryFileUri?;
get queryFileUri(): string;
set queryFileUri(value: string);
resetQueryFileUri(): void;
get queryFileUriInput(): string | undefined;
private _loggingConfig;
get loggingConfig(): DataprocWorkflowTemplateJobsPrestoJobLoggingConfigOutputReference;
putLoggingConfig(value: DataprocWorkflowTemplateJobsPrestoJobLoggingConfig): void;
resetLoggingConfig(): void;
get loggingConfigInput(): DataprocWorkflowTemplateJobsPrestoJobLoggingConfig | undefined;
private _queryList;
get queryList(): DataprocWorkflowTemplateJobsPrestoJobQueryListStructOutputReference;
putQueryList(value: DataprocWorkflowTemplateJobsPrestoJobQueryListStruct): void;
resetQueryList(): void;
get queryListInput(): DataprocWorkflowTemplateJobsPrestoJobQueryListStruct | undefined;
}
export interface DataprocWorkflowTemplateJobsPysparkJobLoggingConfig {
/**
* The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#driver_log_levels DataprocWorkflowTemplate#driver_log_levels}
*/
readonly driverLogLevels?: {
[key: string]: string;
};
}
export declare function dataprocWorkflowTemplateJobsPysparkJobLoggingConfigToTerraform(struct?: DataprocWorkflowTemplateJobsPysparkJobLoggingConfigOutputReference | DataprocWorkflowTemplateJobsPysparkJobLoggingConfig): any;
export declare function dataprocWorkflowTemplateJobsPysparkJobLoggingConfigToHclTerraform(struct?: DataprocWorkflowTemplateJobsPysparkJobLoggingConfigOutputReference | DataprocWorkflowTemplateJobsPysparkJobLoggingConfig): any;
export declare class DataprocWorkflowTemplateJobsPysparkJobLoggingConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocWorkflowTemplateJobsPysparkJobLoggingConfig | undefined;
set internalValue(value: DataprocWorkflowTemplateJobsPysparkJobLoggingConfig | undefined);
private _driverLogLevels?;
get driverLogLevels(): {
[key: string]: string;
};
set driverLogLevels(value: {
[key: string]: string;
});
resetDriverLogLevels(): void;
get driverLogLevelsInput(): {
[key: string]: string;
} | undefined;
}
export interface DataprocWorkflowTemplateJobsPysparkJob {
/**
* Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#archive_uris DataprocWorkflowTemplate#archive_uris}
*/
readonly archiveUris?: string[];
/**
* Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#args DataprocWorkflowTemplate#args}
*/
readonly args?: string[];
/**
* Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#file_uris DataprocWorkflowTemplate#file_uris}
*/
readonly fileUris?: string[];
/**
* Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#jar_file_uris DataprocWorkflowTemplate#jar_file_uris}
*/
readonly jarFileUris?: string[];
/**
* Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#main_python_file_uri DataprocWorkflowTemplate#main_python_file_uri}
*/
readonly mainPythonFileUri: string;
/**
* Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#properties DataprocWorkflowTemplate#properties}
*/
readonly properties?: {
[key: string]: string;
};
/**
* Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#python_file_uris DataprocWorkflowTemplate#python_file_uris}
*/
readonly pythonFileUris?: string[];
/**
* logging_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#logging_config DataprocWorkflowTemplate#logging_config}
*/
readonly loggingConfig?: DataprocWorkflowTemplateJobsPysparkJobLoggingConfig;
}
export declare function dataprocWorkflowTemplateJobsPysparkJobToTerraform(struct?: DataprocWorkflowTemplateJobsPysparkJobOutputReference | DataprocWorkflowTemplateJobsPysparkJob): any;
export declare function dataprocWorkflowTemplateJobsPysparkJobToHclTerraform(struct?: DataprocWorkflowTemplateJobsPysparkJobOutputReference | DataprocWorkflowTemplateJobsPysparkJob): any;
export declare class DataprocWorkflowTemplateJobsPysparkJobOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocWorkflowTemplateJobsPysparkJob | undefined;
set internalValue(value: DataprocWorkflowTemplateJobsPysparkJob | undefined);
private _archiveUris?;
get archiveUris(): string[];
set archiveUris(value: string[]);
resetArchiveUris(): void;
get archiveUrisInput(): string[] | undefined;
private _args?;
get args(): string[];
set args(value: string[]);
resetArgs(): void;
get argsInput(): string[] | undefined;
private _fileUris?;
get fileUris(): string[];
set fileUris(value: string[]);
resetFileUris(): void;
get fileUrisInput(): string[] | undefined;
private _jarFileUris?;
get jarFileUris(): string[];
set jarFileUris(value: string[]);
resetJarFileUris(): void;
get jarFileUrisInput(): string[] | undefined;
private _mainPythonFileUri?;
get mainPythonFileUri(): string;
set mainPythonFileUri(value: string);
get mainPythonFileUriInput(): string | undefined;
private _properties?;
get properties(): {
[key: string]: string;
};
set properties(value: {
[key: string]: string;
});
resetProperties(): void;
get propertiesInput(): {
[key: string]: string;
} | undefined;
private _pythonFileUris?;
get pythonFileUris(): string[];
set pythonFileUris(value: string[]);
resetPythonFileUris(): void;
get pythonFileUrisInput(): string[] | undefined;
private _loggingConfig;
get loggingConfig(): DataprocWorkflowTemplateJobsPysparkJobLoggingConfigOutputReference;
putLoggingConfig(value: DataprocWorkflowTemplateJobsPysparkJobLoggingConfig): void;
resetLoggingConfig(): void;
get loggingConfigInput(): DataprocWorkflowTemplateJobsPysparkJobLoggingConfig | undefined;
}
export interface DataprocWorkflowTemplateJobsScheduling {
/**
* Optional. Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. A job may be reported as thrashing if driver exits with non-zero code 4 times within 10 minute window. Maximum value is 10.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#max_failures_per_hour DataprocWorkflowTemplate#max_failures_per_hour}
*/
readonly maxFailuresPerHour?: number;
/**
* Optional. Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed. Maximum value is 240.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#max_failures_total DataprocWorkflowTemplate#max_failures_total}
*/
readonly maxFailuresTotal?: number;
}
export declare function dataprocWorkflowTemplateJobsSchedulingToTerraform(struct?: DataprocWorkflowTemplateJobsSchedulingOutputReference | DataprocWorkflowTemplateJobsScheduling): any;
export declare function dataprocWorkflowTemplateJobsSchedulingToHclTerraform(struct?: DataprocWorkflowTemplateJobsSchedulingOutputReference | DataprocWorkflowTemplateJobsScheduling): any;
export declare class DataprocWorkflowTemplateJobsSchedulingOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocWorkflowTemplateJobsScheduling | undefined;
set internalValue(value: DataprocWorkflowTemplateJobsScheduling | undefined);
private _maxFailuresPerHour?;
get maxFailuresPerHour(): number;
set maxFailuresPerHour(value: number);
resetMaxFailuresPerHour(): void;
get maxFailuresPerHourInput(): number | undefined;
private _maxFailuresTotal?;
get maxFailuresTotal(): number;
set maxFailuresTotal(value: number);
resetMaxFailuresTotal(): void;
get maxFailuresTotalInput(): number | undefined;
}
export interface DataprocWorkflowTemplateJobsSparkJobLoggingConfig {
/**
* The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/dataproc_workflow_template#driver_log_levels DataprocWorkflowTemplate#driver_log_levels}
*/
readonly driverLogLevels?: {
[key: string]: string;
};
}
export declare function dataprocWorkflowTemplateJobsSparkJobLoggingConfigToTerraform(struct?: DataprocWorkflowTemplateJobsSparkJobLoggingConfigOutputReference | DataprocWorkflowTemplateJobsSparkJobLoggingConfig): any;
export declare function dataprocWorkflowTemplateJobsSparkJobLoggingConfigToHclTerraform(struct?: DataprocWorkflowTemplateJobsSparkJobLoggingConfigOutputReference | DataprocWorkflowTemplateJobsSparkJobLoggingConfig): any;
export declare class DataprocWorkflowTemplateJobsSparkJobLoggingConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocWorkflowTemplateJobsSparkJobLoggingConfig | undefined;
set internalValue(value: DataprocWorkflowTemplateJobsSparkJobLoggingConfig | undefined);
private _driverLogLevels?;
get driverLogLevels(): {
[key: string]: string;
};
set driverLogLevels(value: {
[