@cdktf/provider-google
Version:
Prebuilt google Provider for Terraform CDK (cdktf)
624 lines (623 loc) • 33.4 kB
TypeScript
/**
* Copyright (c) HashiCorp, Inc.
* SPDX-License-Identifier: MPL-2.0
*/
import { Construct } from 'constructs';
import * as cdktf from 'cdktf';
export interface DataprocGdcSparkApplicationConfig extends cdktf.TerraformMetaArguments {
/**
* The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
*
* **Note**: This field is non-authoritative, and will only manage the annotations present in your configuration.
* Please refer to the field 'effective_annotations' for all of the annotations present on the resource.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#annotations DataprocGdcSparkApplication#annotations}
*/
readonly annotations?: {
[key: string]: string;
};
/**
* An ApplicationEnvironment from which to inherit configuration properties.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#application_environment DataprocGdcSparkApplication#application_environment}
*/
readonly applicationEnvironment?: string;
/**
* List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#dependency_images DataprocGdcSparkApplication#dependency_images}
*/
readonly dependencyImages?: string[];
/**
* User-provided human-readable name to be used in user interfaces.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#display_name DataprocGdcSparkApplication#display_name}
*/
readonly displayName?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#id DataprocGdcSparkApplication#id}
*
* Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
* If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
*/
readonly id?: string;
/**
* The labels to associate with this application. Labels may be used for filtering and billing tracking.
*
* **Note**: This field is non-authoritative, and will only manage the labels present in your configuration.
* Please refer to the field 'effective_labels' for all of the labels present on the resource.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#labels DataprocGdcSparkApplication#labels}
*/
readonly labels?: {
[key: string]: string;
};
/**
* The location of the spark application.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#location DataprocGdcSparkApplication#location}
*/
readonly location: string;
/**
* The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#namespace DataprocGdcSparkApplication#namespace}
*/
readonly namespace?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#project DataprocGdcSparkApplication#project}
*/
readonly project?: string;
/**
* application-specific properties.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#properties DataprocGdcSparkApplication#properties}
*/
readonly properties?: {
[key: string]: string;
};
/**
* The id of the service instance to which this spark application belongs.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#serviceinstance DataprocGdcSparkApplication#serviceinstance}
*/
readonly serviceinstance: string;
/**
* The id of the application
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#spark_application_id DataprocGdcSparkApplication#spark_application_id}
*/
readonly sparkApplicationId: string;
/**
* The Dataproc version of this application.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#version DataprocGdcSparkApplication#version}
*/
readonly version?: string;
/**
* pyspark_application_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#pyspark_application_config DataprocGdcSparkApplication#pyspark_application_config}
*/
readonly pysparkApplicationConfig?: DataprocGdcSparkApplicationPysparkApplicationConfig;
/**
* spark_application_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#spark_application_config DataprocGdcSparkApplication#spark_application_config}
*/
readonly sparkApplicationConfig?: DataprocGdcSparkApplicationSparkApplicationConfig;
/**
* spark_r_application_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#spark_r_application_config DataprocGdcSparkApplication#spark_r_application_config}
*/
readonly sparkRApplicationConfig?: DataprocGdcSparkApplicationSparkRApplicationConfig;
/**
* spark_sql_application_config block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#spark_sql_application_config DataprocGdcSparkApplication#spark_sql_application_config}
*/
readonly sparkSqlApplicationConfig?: DataprocGdcSparkApplicationSparkSqlApplicationConfig;
/**
* timeouts block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#timeouts DataprocGdcSparkApplication#timeouts}
*/
readonly timeouts?: DataprocGdcSparkApplicationTimeouts;
}
export interface DataprocGdcSparkApplicationPysparkApplicationConfig {
/**
* HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#archive_uris DataprocGdcSparkApplication#archive_uris}
*/
readonly archiveUris?: string[];
/**
* The arguments to pass to the driver. Do not include arguments, such as '--conf', that can be set as job properties, since a collision may occur that causes an incorrect job submission.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#args DataprocGdcSparkApplication#args}
*/
readonly args?: string[];
/**
* HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#file_uris DataprocGdcSparkApplication#file_uris}
*/
readonly fileUris?: string[];
/**
* HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#jar_file_uris DataprocGdcSparkApplication#jar_file_uris}
*/
readonly jarFileUris?: string[];
/**
* The HCFS URI of the main Python file to use as the driver. Must be a .py file.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#main_python_file_uri DataprocGdcSparkApplication#main_python_file_uri}
*/
readonly mainPythonFileUri: string;
/**
* HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#python_file_uris DataprocGdcSparkApplication#python_file_uris}
*/
readonly pythonFileUris?: string[];
}
export declare function dataprocGdcSparkApplicationPysparkApplicationConfigToTerraform(struct?: DataprocGdcSparkApplicationPysparkApplicationConfigOutputReference | DataprocGdcSparkApplicationPysparkApplicationConfig): any;
export declare function dataprocGdcSparkApplicationPysparkApplicationConfigToHclTerraform(struct?: DataprocGdcSparkApplicationPysparkApplicationConfigOutputReference | DataprocGdcSparkApplicationPysparkApplicationConfig): any;
export declare class DataprocGdcSparkApplicationPysparkApplicationConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocGdcSparkApplicationPysparkApplicationConfig | undefined;
set internalValue(value: DataprocGdcSparkApplicationPysparkApplicationConfig | undefined);
private _archiveUris?;
get archiveUris(): string[];
set archiveUris(value: string[]);
resetArchiveUris(): void;
get archiveUrisInput(): string[] | undefined;
private _args?;
get args(): string[];
set args(value: string[]);
resetArgs(): void;
get argsInput(): string[] | undefined;
private _fileUris?;
get fileUris(): string[];
set fileUris(value: string[]);
resetFileUris(): void;
get fileUrisInput(): string[] | undefined;
private _jarFileUris?;
get jarFileUris(): string[];
set jarFileUris(value: string[]);
resetJarFileUris(): void;
get jarFileUrisInput(): string[] | undefined;
private _mainPythonFileUri?;
get mainPythonFileUri(): string;
set mainPythonFileUri(value: string);
get mainPythonFileUriInput(): string | undefined;
private _pythonFileUris?;
get pythonFileUris(): string[];
set pythonFileUris(value: string[]);
resetPythonFileUris(): void;
get pythonFileUrisInput(): string[] | undefined;
}
export interface DataprocGdcSparkApplicationSparkApplicationConfig {
/**
* HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: '.jar', '.tar', '.tar.gz', '.tgz', and '.zip'.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#archive_uris DataprocGdcSparkApplication#archive_uris}
*/
readonly archiveUris?: string[];
/**
* The arguments to pass to the driver. Do not include arguments that can be set as application properties, such as '--conf', since a collision can occur that causes an incorrect application submission.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#args DataprocGdcSparkApplication#args}
*/
readonly args?: string[];
/**
* HCFS URIs of files to be placed in the working directory of each executor.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#file_uris DataprocGdcSparkApplication#file_uris}
*/
readonly fileUris?: string[];
/**
* HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#jar_file_uris DataprocGdcSparkApplication#jar_file_uris}
*/
readonly jarFileUris?: string[];
/**
* The name of the driver main class. The jar file that contains the class must be in the classpath or specified in 'jar_file_uris'.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#main_class DataprocGdcSparkApplication#main_class}
*/
readonly mainClass?: string;
/**
* The HCFS URI of the jar file that contains the main class.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#main_jar_file_uri DataprocGdcSparkApplication#main_jar_file_uri}
*/
readonly mainJarFileUri?: string;
}
export declare function dataprocGdcSparkApplicationSparkApplicationConfigToTerraform(struct?: DataprocGdcSparkApplicationSparkApplicationConfigOutputReference | DataprocGdcSparkApplicationSparkApplicationConfig): any;
export declare function dataprocGdcSparkApplicationSparkApplicationConfigToHclTerraform(struct?: DataprocGdcSparkApplicationSparkApplicationConfigOutputReference | DataprocGdcSparkApplicationSparkApplicationConfig): any;
export declare class DataprocGdcSparkApplicationSparkApplicationConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocGdcSparkApplicationSparkApplicationConfig | undefined;
set internalValue(value: DataprocGdcSparkApplicationSparkApplicationConfig | undefined);
private _archiveUris?;
get archiveUris(): string[];
set archiveUris(value: string[]);
resetArchiveUris(): void;
get archiveUrisInput(): string[] | undefined;
private _args?;
get args(): string[];
set args(value: string[]);
resetArgs(): void;
get argsInput(): string[] | undefined;
private _fileUris?;
get fileUris(): string[];
set fileUris(value: string[]);
resetFileUris(): void;
get fileUrisInput(): string[] | undefined;
private _jarFileUris?;
get jarFileUris(): string[];
set jarFileUris(value: string[]);
resetJarFileUris(): void;
get jarFileUrisInput(): string[] | undefined;
private _mainClass?;
get mainClass(): string;
set mainClass(value: string);
resetMainClass(): void;
get mainClassInput(): string | undefined;
private _mainJarFileUri?;
get mainJarFileUri(): string;
set mainJarFileUri(value: string);
resetMainJarFileUri(): void;
get mainJarFileUriInput(): string | undefined;
}
export interface DataprocGdcSparkApplicationSparkRApplicationConfig {
/**
* HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#archive_uris DataprocGdcSparkApplication#archive_uris}
*/
readonly archiveUris?: string[];
/**
* The arguments to pass to the driver. Do not include arguments, such as '--conf', that can be set as job properties, since a collision may occur that causes an incorrect job submission.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#args DataprocGdcSparkApplication#args}
*/
readonly args?: string[];
/**
* HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#file_uris DataprocGdcSparkApplication#file_uris}
*/
readonly fileUris?: string[];
/**
* The HCFS URI of the main R file to use as the driver. Must be a .R file.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#main_r_file_uri DataprocGdcSparkApplication#main_r_file_uri}
*/
readonly mainRFileUri: string;
}
export declare function dataprocGdcSparkApplicationSparkRApplicationConfigToTerraform(struct?: DataprocGdcSparkApplicationSparkRApplicationConfigOutputReference | DataprocGdcSparkApplicationSparkRApplicationConfig): any;
export declare function dataprocGdcSparkApplicationSparkRApplicationConfigToHclTerraform(struct?: DataprocGdcSparkApplicationSparkRApplicationConfigOutputReference | DataprocGdcSparkApplicationSparkRApplicationConfig): any;
export declare class DataprocGdcSparkApplicationSparkRApplicationConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocGdcSparkApplicationSparkRApplicationConfig | undefined;
set internalValue(value: DataprocGdcSparkApplicationSparkRApplicationConfig | undefined);
private _archiveUris?;
get archiveUris(): string[];
set archiveUris(value: string[]);
resetArchiveUris(): void;
get archiveUrisInput(): string[] | undefined;
private _args?;
get args(): string[];
set args(value: string[]);
resetArgs(): void;
get argsInput(): string[] | undefined;
private _fileUris?;
get fileUris(): string[];
set fileUris(value: string[]);
resetFileUris(): void;
get fileUrisInput(): string[] | undefined;
private _mainRFileUri?;
get mainRFileUri(): string;
set mainRFileUri(value: string);
get mainRFileUriInput(): string | undefined;
}
export interface DataprocGdcSparkApplicationSparkSqlApplicationConfigQueryListStruct {
/**
* The queries to run.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#queries DataprocGdcSparkApplication#queries}
*/
readonly queries: string[];
}
export declare function dataprocGdcSparkApplicationSparkSqlApplicationConfigQueryListStructToTerraform(struct?: DataprocGdcSparkApplicationSparkSqlApplicationConfigQueryListStructOutputReference | DataprocGdcSparkApplicationSparkSqlApplicationConfigQueryListStruct): any;
export declare function dataprocGdcSparkApplicationSparkSqlApplicationConfigQueryListStructToHclTerraform(struct?: DataprocGdcSparkApplicationSparkSqlApplicationConfigQueryListStructOutputReference | DataprocGdcSparkApplicationSparkSqlApplicationConfigQueryListStruct): any;
export declare class DataprocGdcSparkApplicationSparkSqlApplicationConfigQueryListStructOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocGdcSparkApplicationSparkSqlApplicationConfigQueryListStruct | undefined;
set internalValue(value: DataprocGdcSparkApplicationSparkSqlApplicationConfigQueryListStruct | undefined);
private _queries?;
get queries(): string[];
set queries(value: string[]);
get queriesInput(): string[] | undefined;
}
export interface DataprocGdcSparkApplicationSparkSqlApplicationConfig {
/**
* HCFS URIs of jar files to be added to the Spark CLASSPATH.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#jar_file_uris DataprocGdcSparkApplication#jar_file_uris}
*/
readonly jarFileUris?: string[];
/**
* The HCFS URI of the script that contains SQL queries.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#query_file_uri DataprocGdcSparkApplication#query_file_uri}
*/
readonly queryFileUri?: string;
/**
* Mapping of query variable names to values (equivalent to the Spark SQL command: SET 'name="value";').
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#script_variables DataprocGdcSparkApplication#script_variables}
*/
readonly scriptVariables?: {
[key: string]: string;
};
/**
* query_list block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#query_list DataprocGdcSparkApplication#query_list}
*/
readonly queryList?: DataprocGdcSparkApplicationSparkSqlApplicationConfigQueryListStruct;
}
export declare function dataprocGdcSparkApplicationSparkSqlApplicationConfigToTerraform(struct?: DataprocGdcSparkApplicationSparkSqlApplicationConfigOutputReference | DataprocGdcSparkApplicationSparkSqlApplicationConfig): any;
export declare function dataprocGdcSparkApplicationSparkSqlApplicationConfigToHclTerraform(struct?: DataprocGdcSparkApplicationSparkSqlApplicationConfigOutputReference | DataprocGdcSparkApplicationSparkSqlApplicationConfig): any;
export declare class DataprocGdcSparkApplicationSparkSqlApplicationConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocGdcSparkApplicationSparkSqlApplicationConfig | undefined;
set internalValue(value: DataprocGdcSparkApplicationSparkSqlApplicationConfig | undefined);
private _jarFileUris?;
get jarFileUris(): string[];
set jarFileUris(value: string[]);
resetJarFileUris(): void;
get jarFileUrisInput(): string[] | undefined;
private _queryFileUri?;
get queryFileUri(): string;
set queryFileUri(value: string);
resetQueryFileUri(): void;
get queryFileUriInput(): string | undefined;
private _scriptVariables?;
get scriptVariables(): {
[key: string]: string;
};
set scriptVariables(value: {
[key: string]: string;
});
resetScriptVariables(): void;
get scriptVariablesInput(): {
[key: string]: string;
} | undefined;
private _queryList;
get queryList(): DataprocGdcSparkApplicationSparkSqlApplicationConfigQueryListStructOutputReference;
putQueryList(value: DataprocGdcSparkApplicationSparkSqlApplicationConfigQueryListStruct): void;
resetQueryList(): void;
get queryListInput(): DataprocGdcSparkApplicationSparkSqlApplicationConfigQueryListStruct | undefined;
}
export interface DataprocGdcSparkApplicationTimeouts {
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#create DataprocGdcSparkApplication#create}
*/
readonly create?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#delete DataprocGdcSparkApplication#delete}
*/
readonly delete?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#update DataprocGdcSparkApplication#update}
*/
readonly update?: string;
}
export declare function dataprocGdcSparkApplicationTimeoutsToTerraform(struct?: DataprocGdcSparkApplicationTimeouts | cdktf.IResolvable): any;
export declare function dataprocGdcSparkApplicationTimeoutsToHclTerraform(struct?: DataprocGdcSparkApplicationTimeouts | cdktf.IResolvable): any;
export declare class DataprocGdcSparkApplicationTimeoutsOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
private resolvableValue?;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): DataprocGdcSparkApplicationTimeouts | cdktf.IResolvable | undefined;
set internalValue(value: DataprocGdcSparkApplicationTimeouts | cdktf.IResolvable | undefined);
private _create?;
get create(): string;
set create(value: string);
resetCreate(): void;
get createInput(): string | undefined;
private _delete?;
get delete(): string;
set delete(value: string);
resetDelete(): void;
get deleteInput(): string | undefined;
private _update?;
get update(): string;
set update(value: string);
resetUpdate(): void;
get updateInput(): string | undefined;
}
/**
* Represents a {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application google_dataproc_gdc_spark_application}
*/
export declare class DataprocGdcSparkApplication extends cdktf.TerraformResource {
static readonly tfResourceType = "google_dataproc_gdc_spark_application";
/**
* Generates CDKTF code for importing a DataprocGdcSparkApplication resource upon running "cdktf plan <stack-name>"
* @param scope The scope in which to define this construct
* @param importToId The construct id used in the generated config for the DataprocGdcSparkApplication to import
* @param importFromId The id of the existing DataprocGdcSparkApplication that should be imported. Refer to the {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application#import import section} in the documentation of this resource for the id to use
* @param provider? Optional instance of the provider where the DataprocGdcSparkApplication to import is found
*/
static generateConfigForImport(scope: Construct, importToId: string, importFromId: string, provider?: cdktf.TerraformProvider): cdktf.ImportableResource;
/**
* Create a new {@link https://registry.terraform.io/providers/hashicorp/google/6.36.1/docs/resources/dataproc_gdc_spark_application google_dataproc_gdc_spark_application} Resource
*
* @param scope The scope in which to define this construct
* @param id The scoped construct ID. Must be unique amongst siblings in the same scope
* @param options DataprocGdcSparkApplicationConfig
*/
constructor(scope: Construct, id: string, config: DataprocGdcSparkApplicationConfig);
private _annotations?;
get annotations(): {
[key: string]: string;
};
set annotations(value: {
[key: string]: string;
});
resetAnnotations(): void;
get annotationsInput(): {
[key: string]: string;
} | undefined;
private _applicationEnvironment?;
get applicationEnvironment(): string;
set applicationEnvironment(value: string);
resetApplicationEnvironment(): void;
get applicationEnvironmentInput(): string | undefined;
get createTime(): string;
private _dependencyImages?;
get dependencyImages(): string[];
set dependencyImages(value: string[]);
resetDependencyImages(): void;
get dependencyImagesInput(): string[] | undefined;
private _displayName?;
get displayName(): string;
set displayName(value: string);
resetDisplayName(): void;
get displayNameInput(): string | undefined;
private _effectiveAnnotations;
get effectiveAnnotations(): cdktf.StringMap;
private _effectiveLabels;
get effectiveLabels(): cdktf.StringMap;
private _id?;
get id(): string;
set id(value: string);
resetId(): void;
get idInput(): string | undefined;
private _labels?;
get labels(): {
[key: string]: string;
};
set labels(value: {
[key: string]: string;
});
resetLabels(): void;
get labelsInput(): {
[key: string]: string;
} | undefined;
private _location?;
get location(): string;
set location(value: string);
get locationInput(): string | undefined;
get monitoringEndpoint(): string;
get name(): string;
private _namespace?;
get namespace(): string;
set namespace(value: string);
resetNamespace(): void;
get namespaceInput(): string | undefined;
get outputUri(): string;
private _project?;
get project(): string;
set project(value: string);
resetProject(): void;
get projectInput(): string | undefined;
private _properties?;
get properties(): {
[key: string]: string;
};
set properties(value: {
[key: string]: string;
});
resetProperties(): void;
get propertiesInput(): {
[key: string]: string;
} | undefined;
get reconciling(): cdktf.IResolvable;
private _serviceinstance?;
get serviceinstance(): string;
set serviceinstance(value: string);
get serviceinstanceInput(): string | undefined;
private _sparkApplicationId?;
get sparkApplicationId(): string;
set sparkApplicationId(value: string);
get sparkApplicationIdInput(): string | undefined;
get state(): string;
get stateMessage(): string;
private _terraformLabels;
get terraformLabels(): cdktf.StringMap;
get uid(): string;
get updateTime(): string;
private _version?;
get version(): string;
set version(value: string);
resetVersion(): void;
get versionInput(): string | undefined;
private _pysparkApplicationConfig;
get pysparkApplicationConfig(): DataprocGdcSparkApplicationPysparkApplicationConfigOutputReference;
putPysparkApplicationConfig(value: DataprocGdcSparkApplicationPysparkApplicationConfig): void;
resetPysparkApplicationConfig(): void;
get pysparkApplicationConfigInput(): DataprocGdcSparkApplicationPysparkApplicationConfig | undefined;
private _sparkApplicationConfig;
get sparkApplicationConfig(): DataprocGdcSparkApplicationSparkApplicationConfigOutputReference;
putSparkApplicationConfig(value: DataprocGdcSparkApplicationSparkApplicationConfig): void;
resetSparkApplicationConfig(): void;
get sparkApplicationConfigInput(): DataprocGdcSparkApplicationSparkApplicationConfig | undefined;
private _sparkRApplicationConfig;
get sparkRApplicationConfig(): DataprocGdcSparkApplicationSparkRApplicationConfigOutputReference;
putSparkRApplicationConfig(value: DataprocGdcSparkApplicationSparkRApplicationConfig): void;
resetSparkRApplicationConfig(): void;
get sparkRApplicationConfigInput(): DataprocGdcSparkApplicationSparkRApplicationConfig | undefined;
private _sparkSqlApplicationConfig;
get sparkSqlApplicationConfig(): DataprocGdcSparkApplicationSparkSqlApplicationConfigOutputReference;
putSparkSqlApplicationConfig(value: DataprocGdcSparkApplicationSparkSqlApplicationConfig): void;
resetSparkSqlApplicationConfig(): void;
get sparkSqlApplicationConfigInput(): DataprocGdcSparkApplicationSparkSqlApplicationConfig | undefined;
private _timeouts;
get timeouts(): DataprocGdcSparkApplicationTimeoutsOutputReference;
putTimeouts(value: DataprocGdcSparkApplicationTimeouts): void;
resetTimeouts(): void;
get timeoutsInput(): cdktf.IResolvable | DataprocGdcSparkApplicationTimeouts | undefined;
protected synthesizeAttributes(): {
[name: string]: any;
};
protected synthesizeHclAttributes(): {
[name: string]: any;
};
}