@pulumi/databricks
Version:
A Pulumi package for creating and managing databricks cloud resources.
138 lines • 7.43 kB
JavaScript
// *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
Object.defineProperty(exports, "__esModule", { value: true });
exports.SqlEndpoint = void 0;
const pulumi = require("@pulumi/pulumi");
const utilities = require("./utilities");
/**
* This resource is used to manage [Databricks SQL warehouses](https://docs.databricks.com/sql/admin/sql-endpoints.html). To create [SQL warehouses](https://docs.databricks.com/sql/get-started/concepts.html) you must have `databricksSqlAccess` on your databricks.Group or databricks_user.
*
* ## Example Usage
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as databricks from "@pulumi/databricks";
*
* const me = databricks.getCurrentUser({});
* const _this = new databricks.SqlEndpoint("this", {
* name: me.then(me => `Endpoint of ${me.alphanumeric}`),
* clusterSize: "Small",
* maxNumClusters: 1,
* tags: {
* customTags: [{
* key: "City",
* value: "Amsterdam",
* }],
* },
* });
* ```
*
* ## Access control
*
* * databricks.Permissions can control which groups or individual users can *Can Use* or *Can Manage* SQL warehouses.
* * `databricksSqlAccess` on databricks.Group or databricks_user.
*
* ## Related resources
*
* The following resources are often used in the same context:
*
* * End to end workspace management guide.
* * databricks.InstanceProfile to manage AWS EC2 instance profiles that users can launch databricks.Cluster and access data, like databricks_mount.
* * databricks.SqlDashboard to manage Databricks SQL [Dashboards](https://docs.databricks.com/sql/user/dashboards/index.html).
* * databricks.SqlGlobalConfig to configure the security policy, databricks_instance_profile, and [data access properties](https://docs.databricks.com/sql/admin/data-access-configuration.html) for all databricks.SqlEndpoint of workspace.
* * databricks.SqlPermissions to manage data object access control lists in Databricks workspaces for things like tables, views, databases, and [more](https://docs.databricks.com/security/access-control/table-acls/object-privileges.html).
*
* ## Import
*
* You can import a `databricks_sql_endpoint` resource with ID like the following:
*
* bash
*
* ```sh
* $ pulumi import databricks:index/sqlEndpoint:SqlEndpoint this <endpoint-id>
* ```
*/
class SqlEndpoint extends pulumi.CustomResource {
/**
* Get an existing SqlEndpoint resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
static get(name, id, state, opts) {
return new SqlEndpoint(name, state, Object.assign(Object.assign({}, opts), { id: id }));
}
/**
* Returns true if the given object is an instance of SqlEndpoint. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
static isInstance(obj) {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === SqlEndpoint.__pulumiType;
}
constructor(name, argsOrState, opts) {
let resourceInputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState;
resourceInputs["autoStopMins"] = state ? state.autoStopMins : undefined;
resourceInputs["channel"] = state ? state.channel : undefined;
resourceInputs["clusterSize"] = state ? state.clusterSize : undefined;
resourceInputs["creatorName"] = state ? state.creatorName : undefined;
resourceInputs["dataSourceId"] = state ? state.dataSourceId : undefined;
resourceInputs["enablePhoton"] = state ? state.enablePhoton : undefined;
resourceInputs["enableServerlessCompute"] = state ? state.enableServerlessCompute : undefined;
resourceInputs["healths"] = state ? state.healths : undefined;
resourceInputs["instanceProfileArn"] = state ? state.instanceProfileArn : undefined;
resourceInputs["jdbcUrl"] = state ? state.jdbcUrl : undefined;
resourceInputs["maxNumClusters"] = state ? state.maxNumClusters : undefined;
resourceInputs["minNumClusters"] = state ? state.minNumClusters : undefined;
resourceInputs["name"] = state ? state.name : undefined;
resourceInputs["numActiveSessions"] = state ? state.numActiveSessions : undefined;
resourceInputs["numClusters"] = state ? state.numClusters : undefined;
resourceInputs["odbcParams"] = state ? state.odbcParams : undefined;
resourceInputs["spotInstancePolicy"] = state ? state.spotInstancePolicy : undefined;
resourceInputs["state"] = state ? state.state : undefined;
resourceInputs["tags"] = state ? state.tags : undefined;
resourceInputs["warehouseType"] = state ? state.warehouseType : undefined;
}
else {
const args = argsOrState;
if ((!args || args.clusterSize === undefined) && !opts.urn) {
throw new Error("Missing required property 'clusterSize'");
}
resourceInputs["autoStopMins"] = args ? args.autoStopMins : undefined;
resourceInputs["channel"] = args ? args.channel : undefined;
resourceInputs["clusterSize"] = args ? args.clusterSize : undefined;
resourceInputs["dataSourceId"] = args ? args.dataSourceId : undefined;
resourceInputs["enablePhoton"] = args ? args.enablePhoton : undefined;
resourceInputs["enableServerlessCompute"] = args ? args.enableServerlessCompute : undefined;
resourceInputs["instanceProfileArn"] = args ? args.instanceProfileArn : undefined;
resourceInputs["maxNumClusters"] = args ? args.maxNumClusters : undefined;
resourceInputs["minNumClusters"] = args ? args.minNumClusters : undefined;
resourceInputs["name"] = args ? args.name : undefined;
resourceInputs["spotInstancePolicy"] = args ? args.spotInstancePolicy : undefined;
resourceInputs["tags"] = args ? args.tags : undefined;
resourceInputs["warehouseType"] = args ? args.warehouseType : undefined;
resourceInputs["creatorName"] = undefined /*out*/;
resourceInputs["healths"] = undefined /*out*/;
resourceInputs["jdbcUrl"] = undefined /*out*/;
resourceInputs["numActiveSessions"] = undefined /*out*/;
resourceInputs["numClusters"] = undefined /*out*/;
resourceInputs["odbcParams"] = undefined /*out*/;
resourceInputs["state"] = undefined /*out*/;
}
opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts);
super(SqlEndpoint.__pulumiType, name, resourceInputs, opts);
}
}
exports.SqlEndpoint = SqlEndpoint;
/** @internal */
SqlEndpoint.__pulumiType = 'databricks:index/sqlEndpoint:SqlEndpoint';
//# sourceMappingURL=sqlEndpoint.js.map
;