@pulumi/databricks
Version:
A Pulumi package for creating and managing databricks cloud resources.
185 lines • 9.26 kB
JavaScript
// *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
Object.defineProperty(exports, "__esModule", { value: true });
exports.Library = void 0;
const pulumi = require("@pulumi/pulumi");
const utilities = require("./utilities");
/**
* Installs a [library](https://docs.databricks.com/libraries/index.html) on databricks_cluster. Each different type of library has a slightly different syntax. It's possible to set only one type of library within one resource. Otherwise, the plan will fail with an error.
*
* > `databricks.Library` resource would always start the associated cluster if it's not running, so make sure to have auto-termination configured. It's not possible to atomically change the version of the same library without cluster restart. Libraries are fully removed from the cluster only after restart.
*
* ## Plugin Framework Migration
*
* The library resource has been migrated from sdkv2 to plugin framework。 If you encounter any problem with this resource and suspect it is due to the migration, you can fallback to sdkv2 by setting the environment variable in the following way `export USE_SDK_V2_RESOURCES="databricks.Library"`.
*
* ## Installing library on all clusters
*
* You can install libraries on all clusters with the help of databricks.getClusters data resource:
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as databricks from "@pulumi/databricks";
*
* export = async () => {
* const all = await databricks.getClusters({});
* const cli: databricks.Library[] = [];
* for (const range of all.ids.map((v, k) => ({key: k, value: v}))) {
* cli.push(new databricks.Library(`cli-${range.key}`, {
* clusterId: range.key,
* pypi: {
* "package": "databricks-cli",
* },
* }));
* }
* }
* ```
*
* ## Java/Scala Maven
*
* Installing artifacts from Maven repository. You can also optionally specify a `repo` parameter for a custom Maven-style repository, that should be accessible without any authentication. Maven libraries are resolved in Databricks Control Plane, so repo should be accessible from it. It can even be properly configured [maven s3 wagon](https://github.com/seahen/maven-s3-wagon), [AWS CodeArtifact](https://aws.amazon.com/codeartifact/) or [Azure Artifacts](https://azure.microsoft.com/en-us/services/devops/artifacts/).
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as databricks from "@pulumi/databricks";
*
* const deequ = new databricks.Library("deequ", {
* clusterId: _this.id,
* maven: {
* coordinates: "com.amazon.deequ:deequ:1.0.4",
* exclusions: ["org.apache.avro:avro"],
* },
* });
* ```
*
* ## Python PyPI
*
* Installing Python PyPI artifacts. You can optionally also specify the `repo` parameter for a custom PyPI mirror, which should be accessible without any authentication for the network that cluster runs in.
*
* > `repo` host should be accessible from the Internet by Databricks control plane. If connectivity to custom PyPI repositories is required, please modify cluster-node `/etc/pip.conf` through databricks_global_init_script.
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as databricks from "@pulumi/databricks";
*
* const fbprophet = new databricks.Library("fbprophet", {
* clusterId: _this.id,
* pypi: {
* "package": "fbprophet==0.6",
* },
* });
* ```
*
* ## Python requirements files
*
* Installing Python libraries listed in the `requirements.txt` file. Only Workspace paths and Unity Catalog Volumes paths are supported. Requires a cluster with DBR 15.0+.
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as databricks from "@pulumi/databricks";
*
* const libraries = new databricks.Library("libraries", {
* clusterId: _this.id,
* requirements: "/Workspace/path/to/requirements.txt",
* });
* ```
*
* ## R CRan
*
* Installing artifacts from CRan. You can also optionally specify a `repo` parameter for a custom cran mirror.
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as databricks from "@pulumi/databricks";
*
* const rkeops = new databricks.Library("rkeops", {
* clusterId: _this.id,
* cran: {
* "package": "rkeops",
* },
* });
* ```
*
* ## Related Resources
*
* The following resources are often used in the same context:
*
* * End to end workspace management guide.
* * databricks.getClusters data to retrieve a list of databricks.Cluster ids.
* * databricks.Cluster to create [Databricks Clusters](https://docs.databricks.com/clusters/index.html).
* * databricks.ClusterPolicy to create a databricks.Cluster policy, which limits the ability to create clusters based on a set of rules.
* * databricks.DbfsFile data to get file content from [Databricks File System (DBFS)](https://docs.databricks.com/data/databricks-file-system.html).
* * databricks.getDbfsFilePaths data to get list of file names from get file content from [Databricks File System (DBFS)](https://docs.databricks.com/data/databricks-file-system.html).
* * databricks.DbfsFile to manage relatively small files on [Databricks File System (DBFS)](https://docs.databricks.com/data/databricks-file-system.html).
* * databricks.GlobalInitScript to manage [global init scripts](https://docs.databricks.com/clusters/init-scripts.html#global-init-scripts), which are run on all databricks.Cluster and databricks_job.
* * databricks.Job to manage [Databricks Jobs](https://docs.databricks.com/jobs.html) to run non-interactive code in a databricks_cluster.
* * databricks.Mount to [mount your cloud storage](https://docs.databricks.com/data/databricks-file-system.html#mount-object-storage-to-dbfs) on `dbfs:/mnt/name`.
* * databricks.Pipeline to deploy [Delta Live Tables](https://docs.databricks.com/data-engineering/delta-live-tables/index.html).
* * databricks.Repo to manage [Databricks Repos](https://docs.databricks.com/repos.html).
*
* ## Import
*
* !> Importing this resource is not currently supported.
*/
class Library extends pulumi.CustomResource {
/**
* Get an existing Library resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
static get(name, id, state, opts) {
return new Library(name, state, Object.assign(Object.assign({}, opts), { id: id }));
}
/**
* Returns true if the given object is an instance of Library. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
static isInstance(obj) {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === Library.__pulumiType;
}
constructor(name, argsOrState, opts) {
let resourceInputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState;
resourceInputs["clusterId"] = state ? state.clusterId : undefined;
resourceInputs["cran"] = state ? state.cran : undefined;
resourceInputs["egg"] = state ? state.egg : undefined;
resourceInputs["jar"] = state ? state.jar : undefined;
resourceInputs["libraryId"] = state ? state.libraryId : undefined;
resourceInputs["maven"] = state ? state.maven : undefined;
resourceInputs["pypi"] = state ? state.pypi : undefined;
resourceInputs["requirements"] = state ? state.requirements : undefined;
resourceInputs["whl"] = state ? state.whl : undefined;
}
else {
const args = argsOrState;
if ((!args || args.clusterId === undefined) && !opts.urn) {
throw new Error("Missing required property 'clusterId'");
}
resourceInputs["clusterId"] = args ? args.clusterId : undefined;
resourceInputs["cran"] = args ? args.cran : undefined;
resourceInputs["egg"] = args ? args.egg : undefined;
resourceInputs["jar"] = args ? args.jar : undefined;
resourceInputs["libraryId"] = args ? args.libraryId : undefined;
resourceInputs["maven"] = args ? args.maven : undefined;
resourceInputs["pypi"] = args ? args.pypi : undefined;
resourceInputs["requirements"] = args ? args.requirements : undefined;
resourceInputs["whl"] = args ? args.whl : undefined;
}
opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts);
super(Library.__pulumiType, name, resourceInputs, opts);
}
}
exports.Library = Library;
/** @internal */
Library.__pulumiType = 'databricks:index/library:Library';
//# sourceMappingURL=library.js.map
;