@pulumi/databricks
Version:
A Pulumi package for creating and managing databricks cloud resources.
344 lines (343 loc) • 12.9 kB
TypeScript
import * as pulumi from "@pulumi/pulumi";
import * as inputs from "./types/input";
import * as outputs from "./types/output";
/**
* > **Note** If you have a fully automated setup with workspaces created by databricks.MwsWorkspaces or azurerm_databricks_workspace, please make sure to add dependsOn attribute in order to prevent _default auth: cannot configure default credentials_ errors.
*
* Retrieves information about a databricks.getSqlWarehouse using its id. This could be retrieved programmatically using databricks.getSqlWarehouses data source.
*
* ## Example Usage
*
* * Retrieve attributes of each SQL warehouses in a workspace:
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as databricks from "@pulumi/databricks";
*
* const all = databricks.getSqlWarehouses({});
* const _this = all.then(all => .reduce((__obj, [__key, __value]) => ({ ...__obj, [__key]: databricks.getSqlWarehouse({
* id: __value,
* }) })));
* ```
*
* * Search for a specific SQL Warehouse by name:
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as databricks from "@pulumi/databricks";
*
* const all = databricks.getSqlWarehouse({
* name: "Starter Warehouse",
* });
* ```
*
* ## Related resources
*
* The following resources are often used in the same context:
*
* * End to end workspace management guide.
* * databricks.InstanceProfile to manage AWS EC2 instance profiles that users can launch databricks.Cluster and access data, like databricks_mount.
* * databricks.SqlDashboard to manage Databricks SQL [Dashboards](https://docs.databricks.com/sql/user/dashboards/index.html).
* * databricks.SqlGlobalConfig to configure the security policy, databricks_instance_profile, and [data access properties](https://docs.databricks.com/sql/admin/data-access-configuration.html) for all databricks.getSqlWarehouse of workspace.
* * databricks.SqlPermissions to manage data object access control lists in Databricks workspaces for things like tables, views, databases, and [more](https://docs.databricks.com/security/access-control/table-acls/object-privileges.html).
*/
export declare function getSqlWarehouse(args?: GetSqlWarehouseArgs, opts?: pulumi.InvokeOptions): Promise<GetSqlWarehouseResult>;
/**
* A collection of arguments for invoking getSqlWarehouse.
*/
export interface GetSqlWarehouseArgs {
/**
* Time in minutes until an idle SQL warehouse terminates all clusters and stops.
*/
autoStopMins?: number;
/**
* block, consisting of following fields:
*/
channel?: inputs.GetSqlWarehouseChannel;
/**
* The size of the clusters allocated to the warehouse: "2X-Small", "X-Small", "Small", "Medium", "Large", "X-Large", "2X-Large", "3X-Large", "4X-Large".
*/
clusterSize?: string;
/**
* The username of the user who created the endpoint.
*/
creatorName?: string;
/**
* ID of the data source for this warehouse. This is used to bind an Databricks SQL query to an warehouse.
*/
dataSourceId?: string;
/**
* Whether [Photon](https://databricks.com/product/delta-engine) is enabled.
*/
enablePhoton?: boolean;
/**
* Whether this SQL warehouse is a serverless SQL warehouse.
*/
enableServerlessCompute?: boolean;
/**
* Health status of the endpoint.
*/
health?: inputs.GetSqlWarehouseHealth;
/**
* The ID of the SQL warehouse.
*/
id?: string;
instanceProfileArn?: string;
/**
* JDBC connection string.
*/
jdbcUrl?: string;
/**
* Maximum number of clusters available when a SQL warehouse is running.
*/
maxNumClusters?: number;
/**
* Minimum number of clusters available when a SQL warehouse is running.
*/
minNumClusters?: number;
/**
* Name of the SQL warehouse to search (case-sensitive).
*/
name?: string;
/**
* The current number of clusters used by the endpoint.
*/
numActiveSessions?: number;
/**
* The current number of clusters used by the endpoint.
*/
numClusters?: number;
/**
* ODBC connection params: `odbc_params.hostname`, `odbc_params.path`, `odbc_params.protocol`, and `odbc_params.port`.
*/
odbcParams?: inputs.GetSqlWarehouseOdbcParams;
/**
* The spot policy to use for allocating instances to clusters: `COST_OPTIMIZED` or `RELIABILITY_OPTIMIZED`.
*/
spotInstancePolicy?: string;
/**
* The current state of the endpoint.
*/
state?: string;
/**
* tags used for SQL warehouse resources.
*/
tags?: inputs.GetSqlWarehouseTags;
/**
* SQL warehouse type. See for [AWS](https://docs.databricks.com/sql/index.html#warehouse-types) or [Azure](https://learn.microsoft.com/azure/databricks/sql/#warehouse-types).
*/
warehouseType?: string;
}
/**
* A collection of values returned by getSqlWarehouse.
*/
export interface GetSqlWarehouseResult {
/**
* Time in minutes until an idle SQL warehouse terminates all clusters and stops.
*/
readonly autoStopMins: number;
/**
* block, consisting of following fields:
*/
readonly channel: outputs.GetSqlWarehouseChannel;
/**
* The size of the clusters allocated to the warehouse: "2X-Small", "X-Small", "Small", "Medium", "Large", "X-Large", "2X-Large", "3X-Large", "4X-Large".
*/
readonly clusterSize: string;
/**
* The username of the user who created the endpoint.
*/
readonly creatorName: string;
/**
* ID of the data source for this warehouse. This is used to bind an Databricks SQL query to an warehouse.
*/
readonly dataSourceId: string;
/**
* Whether [Photon](https://databricks.com/product/delta-engine) is enabled.
*/
readonly enablePhoton: boolean;
/**
* Whether this SQL warehouse is a serverless SQL warehouse.
*/
readonly enableServerlessCompute: boolean;
/**
* Health status of the endpoint.
*/
readonly health: outputs.GetSqlWarehouseHealth;
/**
* The ID of the SQL warehouse.
*/
readonly id: string;
readonly instanceProfileArn: string;
/**
* JDBC connection string.
*/
readonly jdbcUrl: string;
/**
* Maximum number of clusters available when a SQL warehouse is running.
*/
readonly maxNumClusters: number;
/**
* Minimum number of clusters available when a SQL warehouse is running.
*/
readonly minNumClusters: number;
/**
* Name of the Databricks SQL release channel. Possible values are: `CHANNEL_NAME_PREVIEW` and `CHANNEL_NAME_CURRENT`. Default is `CHANNEL_NAME_CURRENT`.
*/
readonly name: string;
/**
* The current number of clusters used by the endpoint.
*/
readonly numActiveSessions: number;
/**
* The current number of clusters used by the endpoint.
*/
readonly numClusters: number;
/**
* ODBC connection params: `odbc_params.hostname`, `odbc_params.path`, `odbc_params.protocol`, and `odbc_params.port`.
*/
readonly odbcParams: outputs.GetSqlWarehouseOdbcParams;
/**
* The spot policy to use for allocating instances to clusters: `COST_OPTIMIZED` or `RELIABILITY_OPTIMIZED`.
*/
readonly spotInstancePolicy: string;
/**
* The current state of the endpoint.
*/
readonly state: string;
/**
* tags used for SQL warehouse resources.
*/
readonly tags: outputs.GetSqlWarehouseTags;
/**
* SQL warehouse type. See for [AWS](https://docs.databricks.com/sql/index.html#warehouse-types) or [Azure](https://learn.microsoft.com/azure/databricks/sql/#warehouse-types).
*/
readonly warehouseType: string;
}
/**
* > **Note** If you have a fully automated setup with workspaces created by databricks.MwsWorkspaces or azurerm_databricks_workspace, please make sure to add dependsOn attribute in order to prevent _default auth: cannot configure default credentials_ errors.
*
* Retrieves information about a databricks.getSqlWarehouse using its id. This could be retrieved programmatically using databricks.getSqlWarehouses data source.
*
* ## Example Usage
*
* * Retrieve attributes of each SQL warehouses in a workspace:
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as databricks from "@pulumi/databricks";
*
* const all = databricks.getSqlWarehouses({});
* const _this = all.then(all => .reduce((__obj, [__key, __value]) => ({ ...__obj, [__key]: databricks.getSqlWarehouse({
* id: __value,
* }) })));
* ```
*
* * Search for a specific SQL Warehouse by name:
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as databricks from "@pulumi/databricks";
*
* const all = databricks.getSqlWarehouse({
* name: "Starter Warehouse",
* });
* ```
*
* ## Related resources
*
* The following resources are often used in the same context:
*
* * End to end workspace management guide.
* * databricks.InstanceProfile to manage AWS EC2 instance profiles that users can launch databricks.Cluster and access data, like databricks_mount.
* * databricks.SqlDashboard to manage Databricks SQL [Dashboards](https://docs.databricks.com/sql/user/dashboards/index.html).
* * databricks.SqlGlobalConfig to configure the security policy, databricks_instance_profile, and [data access properties](https://docs.databricks.com/sql/admin/data-access-configuration.html) for all databricks.getSqlWarehouse of workspace.
* * databricks.SqlPermissions to manage data object access control lists in Databricks workspaces for things like tables, views, databases, and [more](https://docs.databricks.com/security/access-control/table-acls/object-privileges.html).
*/
export declare function getSqlWarehouseOutput(args?: GetSqlWarehouseOutputArgs, opts?: pulumi.InvokeOutputOptions): pulumi.Output<GetSqlWarehouseResult>;
/**
* A collection of arguments for invoking getSqlWarehouse.
*/
export interface GetSqlWarehouseOutputArgs {
/**
* Time in minutes until an idle SQL warehouse terminates all clusters and stops.
*/
autoStopMins?: pulumi.Input<number>;
/**
* block, consisting of following fields:
*/
channel?: pulumi.Input<inputs.GetSqlWarehouseChannelArgs>;
/**
* The size of the clusters allocated to the warehouse: "2X-Small", "X-Small", "Small", "Medium", "Large", "X-Large", "2X-Large", "3X-Large", "4X-Large".
*/
clusterSize?: pulumi.Input<string>;
/**
* The username of the user who created the endpoint.
*/
creatorName?: pulumi.Input<string>;
/**
* ID of the data source for this warehouse. This is used to bind an Databricks SQL query to an warehouse.
*/
dataSourceId?: pulumi.Input<string>;
/**
* Whether [Photon](https://databricks.com/product/delta-engine) is enabled.
*/
enablePhoton?: pulumi.Input<boolean>;
/**
* Whether this SQL warehouse is a serverless SQL warehouse.
*/
enableServerlessCompute?: pulumi.Input<boolean>;
/**
* Health status of the endpoint.
*/
health?: pulumi.Input<inputs.GetSqlWarehouseHealthArgs>;
/**
* The ID of the SQL warehouse.
*/
id?: pulumi.Input<string>;
instanceProfileArn?: pulumi.Input<string>;
/**
* JDBC connection string.
*/
jdbcUrl?: pulumi.Input<string>;
/**
* Maximum number of clusters available when a SQL warehouse is running.
*/
maxNumClusters?: pulumi.Input<number>;
/**
* Minimum number of clusters available when a SQL warehouse is running.
*/
minNumClusters?: pulumi.Input<number>;
/**
* Name of the SQL warehouse to search (case-sensitive).
*/
name?: pulumi.Input<string>;
/**
* The current number of clusters used by the endpoint.
*/
numActiveSessions?: pulumi.Input<number>;
/**
* The current number of clusters used by the endpoint.
*/
numClusters?: pulumi.Input<number>;
/**
* ODBC connection params: `odbc_params.hostname`, `odbc_params.path`, `odbc_params.protocol`, and `odbc_params.port`.
*/
odbcParams?: pulumi.Input<inputs.GetSqlWarehouseOdbcParamsArgs>;
/**
* The spot policy to use for allocating instances to clusters: `COST_OPTIMIZED` or `RELIABILITY_OPTIMIZED`.
*/
spotInstancePolicy?: pulumi.Input<string>;
/**
* The current state of the endpoint.
*/
state?: pulumi.Input<string>;
/**
* tags used for SQL warehouse resources.
*/
tags?: pulumi.Input<inputs.GetSqlWarehouseTagsArgs>;
/**
* SQL warehouse type. See for [AWS](https://docs.databricks.com/sql/index.html#warehouse-types) or [Azure](https://learn.microsoft.com/azure/databricks/sql/#warehouse-types).
*/
warehouseType?: pulumi.Input<string>;
}