@pulumi/databricks
Version:
A Pulumi package for creating and managing databricks cloud resources.
225 lines • 9.37 kB
JavaScript
;
// *** WARNING: this file was generated by pulumi-language-nodejs. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
Object.defineProperty(exports, "__esModule", { value: true });
exports.Pipeline = void 0;
const pulumi = require("@pulumi/pulumi");
const utilities = require("./utilities");
/**
* Use `databricks.Pipeline` to deploy [Lakeflow Declarative Pipelines](https://docs.databricks.com/aws/en/dlt).
*
* > This resource can only be used with a workspace-level provider!
*
* ## Example Usage
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as databricks from "@pulumi/databricks";
*
* const ldpDemo = new databricks.Notebook("ldp_demo", {});
* const ldpDemoRepo = new databricks.Repo("ldp_demo", {});
* const _this = new databricks.Pipeline("this", {
* name: "Pipeline Name",
* catalog: "main",
* schema: "ldp_demo",
* configuration: {
* key1: "value1",
* key2: "value2",
* },
* clusters: [
* {
* label: "default",
* numWorkers: 2,
* customTags: {
* cluster_type: "default",
* },
* },
* {
* label: "maintenance",
* numWorkers: 1,
* customTags: {
* cluster_type: "maintenance",
* },
* },
* ],
* libraries: [
* {
* notebook: {
* path: ldpDemo.id,
* },
* },
* {
* file: {
* path: pulumi.interpolate`${ldpDemoRepo.path}/pipeline.sql`,
* },
* },
* {
* glob: {
* include: pulumi.interpolate`${ldpDemoRepo.path}/subfolder/**`,
* },
* },
* ],
* continuous: false,
* notifications: [{
* emailRecipients: [
* "user@domain.com",
* "user1@domain.com",
* ],
* alerts: [
* "on-update-failure",
* "on-update-fatal-failure",
* "on-update-success",
* "on-flow-failure",
* ],
* }],
* });
* ```
*
* ## Related Resources
*
* The following resources are often used in the same context:
*
* * End to end workspace management guide.
* * databricks.getPipelines to retrieve [Lakeflow Declarative Pipelines](https://docs.databricks.com/aws/en/dlt) data.
* * databricks.Cluster to create [Databricks Clusters](https://docs.databricks.com/clusters/index.html).
* * databricks.Job to manage [Databricks Jobs](https://docs.databricks.com/jobs.html) to run non-interactive code in a databricks_cluster.
* * databricks.Notebook to manage [Databricks Notebooks](https://docs.databricks.com/notebooks/index.html).
*
* ## Import
*
* The resource job can be imported using the id of the pipeline
*
* hcl
*
* import {
*
* to = databricks_pipeline.this
*
* id = "<pipeline-id>"
*
* }
*
* Alternatively, when using `terraform` version 1.4 or earlier, import using the `pulumi import` command:
*
* bash
*
* ```sh
* $ pulumi import databricks:index/pipeline:Pipeline this <pipeline-id>
* ```
*/
class Pipeline extends pulumi.CustomResource {
/**
* Get an existing Pipeline resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
static get(name, id, state, opts) {
return new Pipeline(name, state, { ...opts, id: id });
}
/**
* Returns true if the given object is an instance of Pipeline. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
static isInstance(obj) {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === Pipeline.__pulumiType;
}
constructor(name, argsOrState, opts) {
let resourceInputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState;
resourceInputs["allowDuplicateNames"] = state?.allowDuplicateNames;
resourceInputs["budgetPolicyId"] = state?.budgetPolicyId;
resourceInputs["catalog"] = state?.catalog;
resourceInputs["cause"] = state?.cause;
resourceInputs["channel"] = state?.channel;
resourceInputs["clusterId"] = state?.clusterId;
resourceInputs["clusters"] = state?.clusters;
resourceInputs["configuration"] = state?.configuration;
resourceInputs["continuous"] = state?.continuous;
resourceInputs["creatorUserName"] = state?.creatorUserName;
resourceInputs["deployment"] = state?.deployment;
resourceInputs["development"] = state?.development;
resourceInputs["edition"] = state?.edition;
resourceInputs["environment"] = state?.environment;
resourceInputs["eventLog"] = state?.eventLog;
resourceInputs["expectedLastModified"] = state?.expectedLastModified;
resourceInputs["filters"] = state?.filters;
resourceInputs["gatewayDefinition"] = state?.gatewayDefinition;
resourceInputs["health"] = state?.health;
resourceInputs["ingestionDefinition"] = state?.ingestionDefinition;
resourceInputs["lastModified"] = state?.lastModified;
resourceInputs["latestUpdates"] = state?.latestUpdates;
resourceInputs["libraries"] = state?.libraries;
resourceInputs["name"] = state?.name;
resourceInputs["notifications"] = state?.notifications;
resourceInputs["photon"] = state?.photon;
resourceInputs["restartWindow"] = state?.restartWindow;
resourceInputs["rootPath"] = state?.rootPath;
resourceInputs["runAs"] = state?.runAs;
resourceInputs["runAsUserName"] = state?.runAsUserName;
resourceInputs["schema"] = state?.schema;
resourceInputs["serverless"] = state?.serverless;
resourceInputs["state"] = state?.state;
resourceInputs["storage"] = state?.storage;
resourceInputs["tags"] = state?.tags;
resourceInputs["target"] = state?.target;
resourceInputs["trigger"] = state?.trigger;
resourceInputs["url"] = state?.url;
}
else {
const args = argsOrState;
resourceInputs["allowDuplicateNames"] = args?.allowDuplicateNames;
resourceInputs["budgetPolicyId"] = args?.budgetPolicyId;
resourceInputs["catalog"] = args?.catalog;
resourceInputs["cause"] = args?.cause;
resourceInputs["channel"] = args?.channel;
resourceInputs["clusterId"] = args?.clusterId;
resourceInputs["clusters"] = args?.clusters;
resourceInputs["configuration"] = args?.configuration;
resourceInputs["continuous"] = args?.continuous;
resourceInputs["creatorUserName"] = args?.creatorUserName;
resourceInputs["deployment"] = args?.deployment;
resourceInputs["development"] = args?.development;
resourceInputs["edition"] = args?.edition;
resourceInputs["environment"] = args?.environment;
resourceInputs["eventLog"] = args?.eventLog;
resourceInputs["expectedLastModified"] = args?.expectedLastModified;
resourceInputs["filters"] = args?.filters;
resourceInputs["gatewayDefinition"] = args?.gatewayDefinition;
resourceInputs["health"] = args?.health;
resourceInputs["ingestionDefinition"] = args?.ingestionDefinition;
resourceInputs["lastModified"] = args?.lastModified;
resourceInputs["latestUpdates"] = args?.latestUpdates;
resourceInputs["libraries"] = args?.libraries;
resourceInputs["name"] = args?.name;
resourceInputs["notifications"] = args?.notifications;
resourceInputs["photon"] = args?.photon;
resourceInputs["restartWindow"] = args?.restartWindow;
resourceInputs["rootPath"] = args?.rootPath;
resourceInputs["runAs"] = args?.runAs;
resourceInputs["runAsUserName"] = args?.runAsUserName;
resourceInputs["schema"] = args?.schema;
resourceInputs["serverless"] = args?.serverless;
resourceInputs["state"] = args?.state;
resourceInputs["storage"] = args?.storage;
resourceInputs["tags"] = args?.tags;
resourceInputs["target"] = args?.target;
resourceInputs["trigger"] = args?.trigger;
resourceInputs["url"] = args?.url;
}
opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts);
super(Pipeline.__pulumiType, name, resourceInputs, opts);
}
}
exports.Pipeline = Pipeline;
/** @internal */
Pipeline.__pulumiType = 'databricks:index/pipeline:Pipeline';
//# sourceMappingURL=pipeline.js.map