@pulumi/databricks
Version:
A Pulumi package for creating and managing databricks cloud resources.
157 lines • 7.68 kB
JavaScript
;
// *** WARNING: this file was generated by pulumi-language-nodejs. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
Object.defineProperty(exports, "__esModule", { value: true });
exports.SqlPermissions = void 0;
const pulumi = require("@pulumi/pulumi");
const utilities = require("./utilities");
/**
* > Please switch to databricks.Grants with Unity Catalog to manage data access, which provides a better and faster way for managing data security. `databricks.Grants` resource *doesn't require a technical cluster to perform operations*. On workspaces with Unity Catalog enabled, you may run into errors such as `Error: cannot create sql permissions: cannot read current grants: For unity catalog, please specify the catalog name explicitly. E.g. SHOW GRANT ``your.address@email.com`` ON CATALOG main`. This happens if your `defaultCatalogName` was set to a UC catalog instead of `hiveMetastore`. The workaround is to re-assign the metastore again with the default catalog set to `hiveMetastore`. See databricks_metastore_assignment.
*
* This resource manages data object access control lists in Databricks workspaces for things like tables, views, databases, and [more](https://docs.databricks.com/security/access-control/table-acls/object-privileges.html). In order to enable Table Access control, you have to login to the workspace as administrator, go to `Admin Console`, pick the `Access Control` tab, click on the `Enable` button in the `Table Access Control` section, and click `Confirm`. The security guarantees of table access control **will only be effective if cluster access control is also turned on**. Please make sure that no users can create clusters in your workspace and all databricks.Cluster have approximately the following configuration:
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as databricks from "@pulumi/databricks";
*
* const clusterWithTableAccessControl = new databricks.Cluster("cluster_with_table_access_control", {sparkConf: {
* "spark.databricks.acl.dfAclsEnabled": "true",
* "spark.databricks.repl.allowedLanguages": "python,sql",
* }});
* ```
*
* > This resource can only be used with a workspace-level provider!
*
* It is required to define all permissions for a securable in a single resource, otherwise Pulumi cannot guarantee config drift prevention.
*
* ## Example Usage
*
* The following resource definition will enforce access control on a table by executing the following SQL queries on a special auto-terminating cluster it would create for this operation:
*
* * ``` SHOW GRANT ON TABLE `default`.`foo` ```
* * ```REVOKE ALL PRIVILEGES ON TABLE `default`.`foo` FROM ... every group and user that has access to it ...```
* * ``` GRANT MODIFY, SELECT ON TABLE `default`.`foo` TO `serge@example.com` ```
* * ``` GRANT SELECT ON TABLE `default`.`foo` TO `special group` ```
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as databricks from "@pulumi/databricks";
*
* const fooTable = new databricks.SqlPermissions("foo_table", {
* table: "foo",
* privilegeAssignments: [
* {
* principal: "serge@example.com",
* privileges: [
* "SELECT",
* "MODIFY",
* ],
* },
* {
* principal: "special group",
* privileges: ["SELECT"],
* },
* ],
* });
* ```
*
* ## Related Resources
*
* The following resources are often used in the same context:
*
* * End to end workspace management guide.
* * databricks.Group to manage [Account-level](https://docs.databricks.com/aws/en/admin/users-groups/groups) or [Workspace-level](https://docs.databricks.com/aws/en/admin/users-groups/workspace-local-groups) groups.
* * databricks.Grants to manage data access in Unity Catalog.
* * databricks.Permissions to manage [access control](https://docs.databricks.com/security/access-control/index.html) in Databricks workspace.
* * databricks.User to [manage users](https://docs.databricks.com/administration-guide/users-groups/users.html), that could be added to databricks.Group within the workspace.
*
* ## Import
*
* The resource can be imported using a synthetic identifier. Examples of valid synthetic identifiers are:
*
* * `table/default.foo` - table `foo` in a `default` database. The `database` is always mandatory.
*
* * `view/bar.foo` - view `foo` in `bar` database.
*
* * `database/bar` - `bar` database.
*
* * `catalog/` - entire catalog. `/` suffix is mandatory.
*
* * `any file/` - direct access to any file. `/` suffix is mandatory.
*
* * `anonymous function/` - anonymous function. `/` suffix is mandatory.
*
* hcl
*
* import {
*
* to = databricks_sql_permissions.foo
*
* id = "/<object-type>/<object-name>"
*
* }
*
* Alternatively, when using `terraform` version 1.4 or earlier, import using the `pulumi import` command:
*
* bash
*
* ```sh
* $ pulumi import databricks:index/sqlPermissions:SqlPermissions foo /<object-type>/<object-name>
* ```
*/
class SqlPermissions extends pulumi.CustomResource {
/**
* Get an existing SqlPermissions resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
static get(name, id, state, opts) {
return new SqlPermissions(name, state, { ...opts, id: id });
}
/**
* Returns true if the given object is an instance of SqlPermissions. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
static isInstance(obj) {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === SqlPermissions.__pulumiType;
}
constructor(name, argsOrState, opts) {
let resourceInputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState;
resourceInputs["anonymousFunction"] = state?.anonymousFunction;
resourceInputs["anyFile"] = state?.anyFile;
resourceInputs["catalog"] = state?.catalog;
resourceInputs["clusterId"] = state?.clusterId;
resourceInputs["database"] = state?.database;
resourceInputs["privilegeAssignments"] = state?.privilegeAssignments;
resourceInputs["table"] = state?.table;
resourceInputs["view"] = state?.view;
}
else {
const args = argsOrState;
resourceInputs["anonymousFunction"] = args?.anonymousFunction;
resourceInputs["anyFile"] = args?.anyFile;
resourceInputs["catalog"] = args?.catalog;
resourceInputs["clusterId"] = args?.clusterId;
resourceInputs["database"] = args?.database;
resourceInputs["privilegeAssignments"] = args?.privilegeAssignments;
resourceInputs["table"] = args?.table;
resourceInputs["view"] = args?.view;
}
opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts);
super(SqlPermissions.__pulumiType, name, resourceInputs, opts);
}
}
exports.SqlPermissions = SqlPermissions;
/** @internal */
SqlPermissions.__pulumiType = 'databricks:index/sqlPermissions:SqlPermissions';
//# sourceMappingURL=sqlPermissions.js.map