@pulumi/databricks
Version:
A Pulumi package for creating and managing databricks cloud resources.
136 lines • 5.88 kB
JavaScript
;
// *** WARNING: this file was generated by pulumi-language-nodejs. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
Object.defineProperty(exports, "__esModule", { value: true });
exports.SqlGlobalConfig = void 0;
const pulumi = require("@pulumi/pulumi");
const utilities = require("./utilities");
/**
* This resource configures the security policy, databricks_instance_profile, and [data access properties](https://docs.databricks.com/sql/admin/data-access-configuration.html) for all databricks.SqlEndpoint of workspace. *Please note that changing parameters of this resource will restart all running databricks_sql_endpoint.* To use this resource you need to be an administrator.
*
* > This resource can only be used with a workspace-level provider!
*
* ## Example Usage
*
* ### AWS example
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as databricks from "@pulumi/databricks";
*
* const _this = new databricks.SqlGlobalConfig("this", {
* securityPolicy: "DATA_ACCESS_CONTROL",
* instanceProfileArn: "arn:....",
* dataAccessConfig: {
* "spark.sql.session.timeZone": "UTC",
* },
* });
* ```
*
* ### Azure example
*
* For Azure you should use the `dataAccessConfig` to provide the service principal configuration. You can use the Databricks SQL Admin Console UI to help you generate the right configuration values.
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as databricks from "@pulumi/databricks";
*
* const _this = new databricks.SqlGlobalConfig("this", {
* securityPolicy: "DATA_ACCESS_CONTROL",
* dataAccessConfig: {
* "spark.hadoop.fs.azure.account.auth.type": "OAuth",
* "spark.hadoop.fs.azure.account.oauth.provider.type": "org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider",
* "spark.hadoop.fs.azure.account.oauth2.client.id": applicationId,
* "spark.hadoop.fs.azure.account.oauth2.client.secret": `{{secrets/${secretScope}/${secretKey}}}`,
* "spark.hadoop.fs.azure.account.oauth2.client.endpoint": `https://login.microsoftonline.com/${tenantId}/oauth2/token`,
* },
* sqlConfigParams: {
* ANSI_MODE: "true",
* },
* });
* ```
*
* ## Related Resources
*
* The following resources are often used in the same context:
*
* * End to end workspace management guide.
* * databricks.InstanceProfile to manage AWS EC2 instance profiles that users can launch databricks.Cluster and access data, like databricks_mount.
* * databricks.SqlDashboard to manage Databricks SQL [Dashboards](https://docs.databricks.com/sql/user/dashboards/index.html).
* * databricks.SqlEndpoint to manage Databricks SQL [Warehouses](https://docs.databricks.com/sql/admin/sql-endpoints.html).
* * databricks.Grants to manage data access in Unity Catalog.
*
* ## Import
*
* You can import a `databricks_sql_global_config` resource with command like the following (you need to use `global` as ID):
*
* hcl
*
* import {
*
* to = databricks_sql_global_config.this
*
* id = "global"
*
* }
*
* Alternatively, when using `terraform` version 1.4 or earlier, import using the `pulumi import` command:
*
* bash
*
* ```sh
* $ pulumi import databricks:index/sqlGlobalConfig:SqlGlobalConfig this global
* ```
*/
class SqlGlobalConfig extends pulumi.CustomResource {
/**
* Get an existing SqlGlobalConfig resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
static get(name, id, state, opts) {
return new SqlGlobalConfig(name, state, { ...opts, id: id });
}
/**
* Returns true if the given object is an instance of SqlGlobalConfig. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
static isInstance(obj) {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === SqlGlobalConfig.__pulumiType;
}
constructor(name, argsOrState, opts) {
let resourceInputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState;
resourceInputs["dataAccessConfig"] = state?.dataAccessConfig;
resourceInputs["enableServerlessCompute"] = state?.enableServerlessCompute;
resourceInputs["googleServiceAccount"] = state?.googleServiceAccount;
resourceInputs["instanceProfileArn"] = state?.instanceProfileArn;
resourceInputs["securityPolicy"] = state?.securityPolicy;
resourceInputs["sqlConfigParams"] = state?.sqlConfigParams;
}
else {
const args = argsOrState;
resourceInputs["dataAccessConfig"] = args?.dataAccessConfig;
resourceInputs["enableServerlessCompute"] = args?.enableServerlessCompute;
resourceInputs["googleServiceAccount"] = args?.googleServiceAccount;
resourceInputs["instanceProfileArn"] = args?.instanceProfileArn;
resourceInputs["securityPolicy"] = args?.securityPolicy;
resourceInputs["sqlConfigParams"] = args?.sqlConfigParams;
}
opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts);
super(SqlGlobalConfig.__pulumiType, name, resourceInputs, opts);
}
}
exports.SqlGlobalConfig = SqlGlobalConfig;
/** @internal */
SqlGlobalConfig.__pulumiType = 'databricks:index/sqlGlobalConfig:SqlGlobalConfig';
//# sourceMappingURL=sqlGlobalConfig.js.map