@pulumi/databricks
Version:
A Pulumi package for creating and managing databricks cloud resources.
154 lines • 6.97 kB
JavaScript
;
// *** WARNING: this file was generated by pulumi-language-nodejs. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
Object.defineProperty(exports, "__esModule", { value: true });
exports.SqlEndpoint = void 0;
const pulumi = require("@pulumi/pulumi");
const utilities = require("./utilities");
/**
* This resource is used to manage [Databricks SQL warehouses](https://docs.databricks.com/sql/admin/sql-endpoints.html). To create [SQL warehouses](https://docs.databricks.com/sql/get-started/concepts.html) you must have `databricksSqlAccess` on your databricks.Group or databricks_user.
*
* > This resource can only be used with a workspace-level provider!
*
* ## Example Usage
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as databricks from "@pulumi/databricks";
*
* const me = databricks.getCurrentUser({});
* const _this = new databricks.SqlEndpoint("this", {
* name: me.then(me => `Endpoint of ${me.alphanumeric}`),
* clusterSize: "Small",
* maxNumClusters: 1,
* tags: {
* customTags: [{
* key: "City",
* value: "Amsterdam",
* }],
* },
* });
* ```
*
* ## Access control
*
* * databricks.Permissions can control which groups or individual users can *Can Use* or *Can Manage* SQL warehouses.
* * `databricksSqlAccess` on databricks.Group or databricks_user.
*
* ## Related resources
*
* The following resources are often used in the same context:
*
* * End to end workspace management guide.
* * databricks.InstanceProfile to manage AWS EC2 instance profiles that users can launch databricks.Cluster and access data, like databricks_mount.
* * databricks.SqlDashboard to manage Databricks SQL [Dashboards](https://docs.databricks.com/sql/user/dashboards/index.html).
* * databricks.SqlGlobalConfig to configure the security policy, databricks_instance_profile, and [data access properties](https://docs.databricks.com/sql/admin/data-access-configuration.html) for all databricks.SqlEndpoint of workspace.
* * databricks.Grants to manage data access in Unity Catalog.
*
* ## Import
*
* You can import a `databricks_sql_endpoint` resource with ID like the following:
*
* hcl
*
* import {
*
* to = databricks_sql_endpoint.this
*
* id = "<endpoint-id>"
*
* }
*
* Alternatively, when using `terraform` version 1.4 or earlier, import using the `pulumi import` command:
*
* bash
*
* ```sh
* $ pulumi import databricks:index/sqlEndpoint:SqlEndpoint this <endpoint-id>
* ```
*/
class SqlEndpoint extends pulumi.CustomResource {
/**
* Get an existing SqlEndpoint resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
static get(name, id, state, opts) {
return new SqlEndpoint(name, state, { ...opts, id: id });
}
/**
* Returns true if the given object is an instance of SqlEndpoint. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
static isInstance(obj) {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === SqlEndpoint.__pulumiType;
}
constructor(name, argsOrState, opts) {
let resourceInputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState;
resourceInputs["autoStopMins"] = state?.autoStopMins;
resourceInputs["channel"] = state?.channel;
resourceInputs["clusterSize"] = state?.clusterSize;
resourceInputs["creatorName"] = state?.creatorName;
resourceInputs["dataSourceId"] = state?.dataSourceId;
resourceInputs["enablePhoton"] = state?.enablePhoton;
resourceInputs["enableServerlessCompute"] = state?.enableServerlessCompute;
resourceInputs["healths"] = state?.healths;
resourceInputs["instanceProfileArn"] = state?.instanceProfileArn;
resourceInputs["jdbcUrl"] = state?.jdbcUrl;
resourceInputs["maxNumClusters"] = state?.maxNumClusters;
resourceInputs["minNumClusters"] = state?.minNumClusters;
resourceInputs["name"] = state?.name;
resourceInputs["noWait"] = state?.noWait;
resourceInputs["numActiveSessions"] = state?.numActiveSessions;
resourceInputs["numClusters"] = state?.numClusters;
resourceInputs["odbcParams"] = state?.odbcParams;
resourceInputs["spotInstancePolicy"] = state?.spotInstancePolicy;
resourceInputs["state"] = state?.state;
resourceInputs["tags"] = state?.tags;
resourceInputs["warehouseType"] = state?.warehouseType;
}
else {
const args = argsOrState;
if (args?.clusterSize === undefined && !opts.urn) {
throw new Error("Missing required property 'clusterSize'");
}
resourceInputs["autoStopMins"] = args?.autoStopMins;
resourceInputs["channel"] = args?.channel;
resourceInputs["clusterSize"] = args?.clusterSize;
resourceInputs["dataSourceId"] = args?.dataSourceId;
resourceInputs["enablePhoton"] = args?.enablePhoton;
resourceInputs["enableServerlessCompute"] = args?.enableServerlessCompute;
resourceInputs["instanceProfileArn"] = args?.instanceProfileArn;
resourceInputs["maxNumClusters"] = args?.maxNumClusters;
resourceInputs["minNumClusters"] = args?.minNumClusters;
resourceInputs["name"] = args?.name;
resourceInputs["noWait"] = args?.noWait;
resourceInputs["spotInstancePolicy"] = args?.spotInstancePolicy;
resourceInputs["tags"] = args?.tags;
resourceInputs["warehouseType"] = args?.warehouseType;
resourceInputs["creatorName"] = undefined /*out*/;
resourceInputs["healths"] = undefined /*out*/;
resourceInputs["jdbcUrl"] = undefined /*out*/;
resourceInputs["numActiveSessions"] = undefined /*out*/;
resourceInputs["numClusters"] = undefined /*out*/;
resourceInputs["odbcParams"] = undefined /*out*/;
resourceInputs["state"] = undefined /*out*/;
}
opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts);
super(SqlEndpoint.__pulumiType, name, resourceInputs, opts);
}
}
exports.SqlEndpoint = SqlEndpoint;
/** @internal */
SqlEndpoint.__pulumiType = 'databricks:index/sqlEndpoint:SqlEndpoint';
//# sourceMappingURL=sqlEndpoint.js.map