@cdktf/provider-google
Version:
Prebuilt google Provider for Terraform CDK (cdktf)
573 lines (572 loc) • 28.6 kB
TypeScript
/**
* Copyright (c) HashiCorp, Inc.
* SPDX-License-Identifier: MPL-2.0
*/
import { Construct } from 'constructs';
import * as cdktf from 'cdktf';
export interface BigqueryRoutineConfig extends cdktf.TerraformMetaArguments {
/**
* If set to DATA_MASKING, the function is validated and made available as a masking function. For more information, see https://cloud.google.com/bigquery/docs/user-defined-functions#custom-mask Possible values: ["DATA_MASKING"]
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#data_governance_type BigqueryRoutine#data_governance_type}
*/
readonly dataGovernanceType?: string;
/**
* The ID of the dataset containing this routine
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#dataset_id BigqueryRoutine#dataset_id}
*/
readonly datasetId: string;
/**
* The body of the routine. For functions, this is the expression in the AS clause.
* If language=SQL, it is the substring inside (but excluding) the parentheses.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#definition_body BigqueryRoutine#definition_body}
*/
readonly definitionBody: string;
/**
* The description of the routine if defined.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#description BigqueryRoutine#description}
*/
readonly description?: string;
/**
* The determinism level of the JavaScript UDF if defined. Possible values: ["DETERMINISM_LEVEL_UNSPECIFIED", "DETERMINISTIC", "NOT_DETERMINISTIC"]
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#determinism_level BigqueryRoutine#determinism_level}
*/
readonly determinismLevel?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#id BigqueryRoutine#id}
*
* Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
* If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
*/
readonly id?: string;
/**
* Optional. If language = "JAVASCRIPT", this field stores the path of the
* imported JAVASCRIPT libraries.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#imported_libraries BigqueryRoutine#imported_libraries}
*/
readonly importedLibraries?: string[];
/**
* The language of the routine. Possible values: ["SQL", "JAVASCRIPT", "PYTHON", "JAVA", "SCALA"]
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#language BigqueryRoutine#language}
*/
readonly language?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#project BigqueryRoutine#project}
*/
readonly project?: string;
/**
* Optional. Can be set only if routineType = "TABLE_VALUED_FUNCTION".
*
* If absent, the return table type is inferred from definitionBody at query time in each query
* that references this routine. If present, then the columns in the evaluated table result will
* be cast to match the column types specificed in return table type, at query time.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#return_table_type BigqueryRoutine#return_table_type}
*/
readonly returnTableType?: string;
/**
* A JSON schema for the return type. Optional if language = "SQL"; required otherwise.
* If absent, the return type is inferred from definitionBody at query time in each query
* that references this routine. If present, then the evaluated result will be cast to
* the specified returned type at query time. ~>**NOTE**: Because this field expects a JSON
* string, any changes to the string will create a diff, even if the JSON itself hasn't
* changed. If the API returns a different value for the same schema, e.g. it switche
* d the order of values or replaced STRUCT field type with RECORD field type, we currently
* cannot suppress the recurring diff this causes. As a workaround, we recommend using
* the schema as returned by the API.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#return_type BigqueryRoutine#return_type}
*/
readonly returnType?: string;
/**
* The ID of the the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#routine_id BigqueryRoutine#routine_id}
*/
readonly routineId: string;
/**
* The type of routine. Possible values: ["SCALAR_FUNCTION", "PROCEDURE", "TABLE_VALUED_FUNCTION"]
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#routine_type BigqueryRoutine#routine_type}
*/
readonly routineType: string;
/**
* arguments block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#arguments BigqueryRoutine#arguments}
*/
readonly arguments?: BigqueryRoutineArguments[] | cdktf.IResolvable;
/**
* remote_function_options block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#remote_function_options BigqueryRoutine#remote_function_options}
*/
readonly remoteFunctionOptions?: BigqueryRoutineRemoteFunctionOptions;
/**
* spark_options block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#spark_options BigqueryRoutine#spark_options}
*/
readonly sparkOptions?: BigqueryRoutineSparkOptions;
/**
* timeouts block
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#timeouts BigqueryRoutine#timeouts}
*/
readonly timeouts?: BigqueryRoutineTimeouts;
}
export interface BigqueryRoutineArguments {
/**
* Defaults to FIXED_TYPE. Default value: "FIXED_TYPE" Possible values: ["FIXED_TYPE", "ANY_TYPE"]
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#argument_kind BigqueryRoutine#argument_kind}
*/
readonly argumentKind?: string;
/**
* A JSON schema for the data type. Required unless argumentKind = ANY_TYPE.
* ~>**NOTE**: Because this field expects a JSON string, any changes to the string
* will create a diff, even if the JSON itself hasn't changed. If the API returns
* a different value for the same schema, e.g. it switched the order of values
* or replaced STRUCT field type with RECORD field type, we currently cannot
* suppress the recurring diff this causes. As a workaround, we recommend using
* the schema as returned by the API.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#data_type BigqueryRoutine#data_type}
*/
readonly dataType?: string;
/**
* Specifies whether the argument is input or output. Can be set for procedures only. Possible values: ["IN", "OUT", "INOUT"]
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#mode BigqueryRoutine#mode}
*/
readonly mode?: string;
/**
* The name of this argument. Can be absent for function return argument.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#name BigqueryRoutine#name}
*/
readonly name?: string;
}
export declare function bigqueryRoutineArgumentsToTerraform(struct?: BigqueryRoutineArguments | cdktf.IResolvable): any;
export declare function bigqueryRoutineArgumentsToHclTerraform(struct?: BigqueryRoutineArguments | cdktf.IResolvable): any;
export declare class BigqueryRoutineArgumentsOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
private resolvableValue?;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param complexObjectIndex the index of this item in the list
* @param complexObjectIsFromSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, complexObjectIndex: number, complexObjectIsFromSet: boolean);
get internalValue(): BigqueryRoutineArguments | cdktf.IResolvable | undefined;
set internalValue(value: BigqueryRoutineArguments | cdktf.IResolvable | undefined);
private _argumentKind?;
get argumentKind(): string;
set argumentKind(value: string);
resetArgumentKind(): void;
get argumentKindInput(): string | undefined;
private _dataType?;
get dataType(): string;
set dataType(value: string);
resetDataType(): void;
get dataTypeInput(): string | undefined;
private _mode?;
get mode(): string;
set mode(value: string);
resetMode(): void;
get modeInput(): string | undefined;
private _name?;
get name(): string;
set name(value: string);
resetName(): void;
get nameInput(): string | undefined;
}
export declare class BigqueryRoutineArgumentsList extends cdktf.ComplexList {
protected terraformResource: cdktf.IInterpolatingParent;
protected terraformAttribute: string;
protected wrapsSet: boolean;
internalValue?: BigqueryRoutineArguments[] | cdktf.IResolvable;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
* @param wrapsSet whether the list is wrapping a set (will add tolist() to be able to access an item via an index)
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string, wrapsSet: boolean);
/**
* @param index the index of the item to return
*/
get(index: number): BigqueryRoutineArgumentsOutputReference;
}
export interface BigqueryRoutineRemoteFunctionOptions {
/**
* Fully qualified name of the user-provided connection object which holds
* the authentication information to send requests to the remote service.
* Format: "projects/{projectId}/locations/{locationId}/connections/{connectionId}"
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#connection BigqueryRoutine#connection}
*/
readonly connection?: string;
/**
* Endpoint of the user-provided remote service, e.g.
* 'https://us-east1-my_gcf_project.cloudfunctions.net/remote_add'
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#endpoint BigqueryRoutine#endpoint}
*/
readonly endpoint?: string;
/**
* Max number of rows in each batch sent to the remote service. If absent or if 0,
* BigQuery dynamically decides the number of rows in a batch.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#max_batching_rows BigqueryRoutine#max_batching_rows}
*/
readonly maxBatchingRows?: string;
/**
* User-defined context as a set of key/value pairs, which will be sent as function
* invocation context together with batched arguments in the requests to the remote
* service. The total number of bytes of keys and values must be less than 8KB.
*
* An object containing a list of "key": value pairs. Example:
* '{ "name": "wrench", "mass": "1.3kg", "count": "3" }'.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#user_defined_context BigqueryRoutine#user_defined_context}
*/
readonly userDefinedContext?: {
[key: string]: string;
};
}
export declare function bigqueryRoutineRemoteFunctionOptionsToTerraform(struct?: BigqueryRoutineRemoteFunctionOptionsOutputReference | BigqueryRoutineRemoteFunctionOptions): any;
export declare function bigqueryRoutineRemoteFunctionOptionsToHclTerraform(struct?: BigqueryRoutineRemoteFunctionOptionsOutputReference | BigqueryRoutineRemoteFunctionOptions): any;
export declare class BigqueryRoutineRemoteFunctionOptionsOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryRoutineRemoteFunctionOptions | undefined;
set internalValue(value: BigqueryRoutineRemoteFunctionOptions | undefined);
private _connection?;
get connection(): string;
set connection(value: string);
resetConnection(): void;
get connectionInput(): string | undefined;
private _endpoint?;
get endpoint(): string;
set endpoint(value: string);
resetEndpoint(): void;
get endpointInput(): string | undefined;
private _maxBatchingRows?;
get maxBatchingRows(): string;
set maxBatchingRows(value: string);
resetMaxBatchingRows(): void;
get maxBatchingRowsInput(): string | undefined;
private _userDefinedContext?;
get userDefinedContext(): {
[key: string]: string;
};
set userDefinedContext(value: {
[key: string]: string;
});
resetUserDefinedContext(): void;
get userDefinedContextInput(): {
[key: string]: string;
} | undefined;
}
export interface BigqueryRoutineSparkOptions {
/**
* Archive files to be extracted into the working directory of each executor. For more information about Apache Spark, see Apache Spark.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#archive_uris BigqueryRoutine#archive_uris}
*/
readonly archiveUris?: string[];
/**
* Fully qualified name of the user-provided Spark connection object.
* Format: "projects/{projectId}/locations/{locationId}/connections/{connectionId}"
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#connection BigqueryRoutine#connection}
*/
readonly connection?: string;
/**
* Custom container image for the runtime environment.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#container_image BigqueryRoutine#container_image}
*/
readonly containerImage?: string;
/**
* Files to be placed in the working directory of each executor. For more information about Apache Spark, see Apache Spark.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#file_uris BigqueryRoutine#file_uris}
*/
readonly fileUris?: string[];
/**
* JARs to include on the driver and executor CLASSPATH. For more information about Apache Spark, see Apache Spark.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#jar_uris BigqueryRoutine#jar_uris}
*/
readonly jarUris?: string[];
/**
* The fully qualified name of a class in jarUris, for example, com.example.wordcount.
* Exactly one of mainClass and main_jar_uri field should be set for Java/Scala language type.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#main_class BigqueryRoutine#main_class}
*/
readonly mainClass?: string;
/**
* The main file/jar URI of the Spark application.
* Exactly one of the definitionBody field and the mainFileUri field must be set for Python.
* Exactly one of mainClass and mainFileUri field should be set for Java/Scala language type.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#main_file_uri BigqueryRoutine#main_file_uri}
*/
readonly mainFileUri?: string;
/**
* Configuration properties as a set of key/value pairs, which will be passed on to the Spark application.
* For more information, see Apache Spark and the procedure option list.
* An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#properties BigqueryRoutine#properties}
*/
readonly properties?: {
[key: string]: string;
};
/**
* Python files to be placed on the PYTHONPATH for PySpark application. Supported file types: .py, .egg, and .zip. For more information about Apache Spark, see Apache Spark.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#py_file_uris BigqueryRoutine#py_file_uris}
*/
readonly pyFileUris?: string[];
/**
* Runtime version. If not specified, the default runtime version is used.
*
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#runtime_version BigqueryRoutine#runtime_version}
*/
readonly runtimeVersion?: string;
}
export declare function bigqueryRoutineSparkOptionsToTerraform(struct?: BigqueryRoutineSparkOptionsOutputReference | BigqueryRoutineSparkOptions): any;
export declare function bigqueryRoutineSparkOptionsToHclTerraform(struct?: BigqueryRoutineSparkOptionsOutputReference | BigqueryRoutineSparkOptions): any;
export declare class BigqueryRoutineSparkOptionsOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryRoutineSparkOptions | undefined;
set internalValue(value: BigqueryRoutineSparkOptions | undefined);
private _archiveUris?;
get archiveUris(): string[];
set archiveUris(value: string[]);
resetArchiveUris(): void;
get archiveUrisInput(): string[] | undefined;
private _connection?;
get connection(): string;
set connection(value: string);
resetConnection(): void;
get connectionInput(): string | undefined;
private _containerImage?;
get containerImage(): string;
set containerImage(value: string);
resetContainerImage(): void;
get containerImageInput(): string | undefined;
private _fileUris?;
get fileUris(): string[];
set fileUris(value: string[]);
resetFileUris(): void;
get fileUrisInput(): string[] | undefined;
private _jarUris?;
get jarUris(): string[];
set jarUris(value: string[]);
resetJarUris(): void;
get jarUrisInput(): string[] | undefined;
private _mainClass?;
get mainClass(): string;
set mainClass(value: string);
resetMainClass(): void;
get mainClassInput(): string | undefined;
private _mainFileUri?;
get mainFileUri(): string;
set mainFileUri(value: string);
resetMainFileUri(): void;
get mainFileUriInput(): string | undefined;
private _properties?;
get properties(): {
[key: string]: string;
};
set properties(value: {
[key: string]: string;
});
resetProperties(): void;
get propertiesInput(): {
[key: string]: string;
} | undefined;
private _pyFileUris?;
get pyFileUris(): string[];
set pyFileUris(value: string[]);
resetPyFileUris(): void;
get pyFileUrisInput(): string[] | undefined;
private _runtimeVersion?;
get runtimeVersion(): string;
set runtimeVersion(value: string);
resetRuntimeVersion(): void;
get runtimeVersionInput(): string | undefined;
}
export interface BigqueryRoutineTimeouts {
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#create BigqueryRoutine#create}
*/
readonly create?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#delete BigqueryRoutine#delete}
*/
readonly delete?: string;
/**
* Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#update BigqueryRoutine#update}
*/
readonly update?: string;
}
export declare function bigqueryRoutineTimeoutsToTerraform(struct?: BigqueryRoutineTimeouts | cdktf.IResolvable): any;
export declare function bigqueryRoutineTimeoutsToHclTerraform(struct?: BigqueryRoutineTimeouts | cdktf.IResolvable): any;
export declare class BigqueryRoutineTimeoutsOutputReference extends cdktf.ComplexObject {
private isEmptyObject;
private resolvableValue?;
/**
* @param terraformResource The parent resource
* @param terraformAttribute The attribute on the parent resource this class is referencing
*/
constructor(terraformResource: cdktf.IInterpolatingParent, terraformAttribute: string);
get internalValue(): BigqueryRoutineTimeouts | cdktf.IResolvable | undefined;
set internalValue(value: BigqueryRoutineTimeouts | cdktf.IResolvable | undefined);
private _create?;
get create(): string;
set create(value: string);
resetCreate(): void;
get createInput(): string | undefined;
private _delete?;
get delete(): string;
set delete(value: string);
resetDelete(): void;
get deleteInput(): string | undefined;
private _update?;
get update(): string;
set update(value: string);
resetUpdate(): void;
get updateInput(): string | undefined;
}
/**
* Represents a {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine google_bigquery_routine}
*/
export declare class BigqueryRoutine extends cdktf.TerraformResource {
static readonly tfResourceType = "google_bigquery_routine";
/**
* Generates CDKTF code for importing a BigqueryRoutine resource upon running "cdktf plan <stack-name>"
* @param scope The scope in which to define this construct
* @param importToId The construct id used in the generated config for the BigqueryRoutine to import
* @param importFromId The id of the existing BigqueryRoutine that should be imported. Refer to the {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine#import import section} in the documentation of this resource for the id to use
* @param provider? Optional instance of the provider where the BigqueryRoutine to import is found
*/
static generateConfigForImport(scope: Construct, importToId: string, importFromId: string, provider?: cdktf.TerraformProvider): cdktf.ImportableResource;
/**
* Create a new {@link https://registry.terraform.io/providers/hashicorp/google/6.13.0/docs/resources/bigquery_routine google_bigquery_routine} Resource
*
* @param scope The scope in which to define this construct
* @param id The scoped construct ID. Must be unique amongst siblings in the same scope
* @param options BigqueryRoutineConfig
*/
constructor(scope: Construct, id: string, config: BigqueryRoutineConfig);
get creationTime(): number;
private _dataGovernanceType?;
get dataGovernanceType(): string;
set dataGovernanceType(value: string);
resetDataGovernanceType(): void;
get dataGovernanceTypeInput(): string | undefined;
private _datasetId?;
get datasetId(): string;
set datasetId(value: string);
get datasetIdInput(): string | undefined;
private _definitionBody?;
get definitionBody(): string;
set definitionBody(value: string);
get definitionBodyInput(): string | undefined;
private _description?;
get description(): string;
set description(value: string);
resetDescription(): void;
get descriptionInput(): string | undefined;
private _determinismLevel?;
get determinismLevel(): string;
set determinismLevel(value: string);
resetDeterminismLevel(): void;
get determinismLevelInput(): string | undefined;
private _id?;
get id(): string;
set id(value: string);
resetId(): void;
get idInput(): string | undefined;
private _importedLibraries?;
get importedLibraries(): string[];
set importedLibraries(value: string[]);
resetImportedLibraries(): void;
get importedLibrariesInput(): string[] | undefined;
private _language?;
get language(): string;
set language(value: string);
resetLanguage(): void;
get languageInput(): string | undefined;
get lastModifiedTime(): number;
private _project?;
get project(): string;
set project(value: string);
resetProject(): void;
get projectInput(): string | undefined;
private _returnTableType?;
get returnTableType(): string;
set returnTableType(value: string);
resetReturnTableType(): void;
get returnTableTypeInput(): string | undefined;
private _returnType?;
get returnType(): string;
set returnType(value: string);
resetReturnType(): void;
get returnTypeInput(): string | undefined;
private _routineId?;
get routineId(): string;
set routineId(value: string);
get routineIdInput(): string | undefined;
private _routineType?;
get routineType(): string;
set routineType(value: string);
get routineTypeInput(): string | undefined;
private _arguments;
get arguments(): BigqueryRoutineArgumentsList;
putArguments(value: BigqueryRoutineArguments[] | cdktf.IResolvable): void;
resetArguments(): void;
get argumentsInput(): cdktf.IResolvable | BigqueryRoutineArguments[] | undefined;
private _remoteFunctionOptions;
get remoteFunctionOptions(): BigqueryRoutineRemoteFunctionOptionsOutputReference;
putRemoteFunctionOptions(value: BigqueryRoutineRemoteFunctionOptions): void;
resetRemoteFunctionOptions(): void;
get remoteFunctionOptionsInput(): BigqueryRoutineRemoteFunctionOptions | undefined;
private _sparkOptions;
get sparkOptions(): BigqueryRoutineSparkOptionsOutputReference;
putSparkOptions(value: BigqueryRoutineSparkOptions): void;
resetSparkOptions(): void;
get sparkOptionsInput(): BigqueryRoutineSparkOptions | undefined;
private _timeouts;
get timeouts(): BigqueryRoutineTimeoutsOutputReference;
putTimeouts(value: BigqueryRoutineTimeouts): void;
resetTimeouts(): void;
get timeoutsInput(): cdktf.IResolvable | BigqueryRoutineTimeouts | undefined;
protected synthesizeAttributes(): {
[name: string]: any;
};
protected synthesizeHclAttributes(): {
[name: string]: any;
};
}