@pulumi/databricks
Version:
A Pulumi package for creating and managing databricks cloud resources.
209 lines • 11.9 kB
JavaScript
// *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
Object.defineProperty(exports, "__esModule", { value: true });
exports.Job = void 0;
const pulumi = require("@pulumi/pulumi");
const utilities = require("./utilities");
/**
* The `databricks.Job` resource allows you to manage [Databricks Jobs](https://docs.databricks.com/jobs.html) to run non-interactive code in a databricks_cluster.
*
* ## Example Usage
*
* > In Pulumi configuration, it is recommended to define tasks in alphabetical order of their `taskKey` arguments, so that you get consistent and readable diff. Whenever tasks are added or removed, or `taskKey` is renamed, you'll observe a change in the majority of tasks. It's related to the fact that the current version of the provider treats `task` blocks as an ordered list. Alternatively, `task` block could have been an unordered set, though end-users would see the entire block replaced upon a change in single property of the task.
*
* It is possible to create [a Databricks job](https://docs.databricks.com/data-engineering/jobs/jobs-user-guide.html) using `task` blocks. A single task is defined with the `task` block containing one of the `*_task` blocks, `taskKey`, and additional arguments described below.
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as databricks from "@pulumi/databricks";
*
* const _this = new databricks.Job("this", {
* name: "Job with multiple tasks",
* description: "This job executes multiple tasks on a shared job cluster, which will be provisioned as part of execution, and terminated once all tasks are finished.",
* jobClusters: [{
* jobClusterKey: "j",
* newCluster: {
* numWorkers: 2,
* sparkVersion: latest.id,
* nodeTypeId: smallest.id,
* },
* }],
* tasks: [
* {
* taskKey: "a",
* newCluster: {
* numWorkers: 1,
* sparkVersion: latest.id,
* nodeTypeId: smallest.id,
* },
* notebookTask: {
* notebookPath: thisDatabricksNotebook.path,
* },
* },
* {
* taskKey: "b",
* dependsOns: [{
* taskKey: "a",
* }],
* existingClusterId: shared.id,
* sparkJarTask: {
* mainClassName: "com.acme.data.Main",
* },
* },
* {
* taskKey: "c",
* jobClusterKey: "j",
* notebookTask: {
* notebookPath: thisDatabricksNotebook.path,
* },
* },
* {
* taskKey: "d",
* pipelineTask: {
* pipelineId: thisDatabricksPipeline.id,
* },
* },
* ],
* });
* ```
*
* ## Access Control
*
* By default, all users can create and modify jobs unless an administrator [enables jobs access control](https://docs.databricks.com/administration-guide/access-control/jobs-acl.html). With jobs access control, individual permissions determine a user’s abilities.
*
* * databricks.Permissions can control which groups or individual users can *Can View*, *Can Manage Run*, and *Can Manage*.
* * databricks.ClusterPolicy can control which kinds of clusters users can create for jobs.
*
* ## Import
*
* The resource job can be imported using the id of the job
*
* bash
*
* ```sh
* $ pulumi import databricks:index/job:Job this <job-id>
* ```
*/
class Job extends pulumi.CustomResource {
/**
* Get an existing Job resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
static get(name, id, state, opts) {
return new Job(name, state, Object.assign(Object.assign({}, opts), { id: id }));
}
/**
* Returns true if the given object is an instance of Job. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
static isInstance(obj) {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === Job.__pulumiType;
}
constructor(name, argsOrState, opts) {
let resourceInputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState;
resourceInputs["alwaysRunning"] = state ? state.alwaysRunning : undefined;
resourceInputs["budgetPolicyId"] = state ? state.budgetPolicyId : undefined;
resourceInputs["continuous"] = state ? state.continuous : undefined;
resourceInputs["controlRunState"] = state ? state.controlRunState : undefined;
resourceInputs["dbtTask"] = state ? state.dbtTask : undefined;
resourceInputs["deployment"] = state ? state.deployment : undefined;
resourceInputs["description"] = state ? state.description : undefined;
resourceInputs["editMode"] = state ? state.editMode : undefined;
resourceInputs["emailNotifications"] = state ? state.emailNotifications : undefined;
resourceInputs["environments"] = state ? state.environments : undefined;
resourceInputs["existingClusterId"] = state ? state.existingClusterId : undefined;
resourceInputs["format"] = state ? state.format : undefined;
resourceInputs["gitSource"] = state ? state.gitSource : undefined;
resourceInputs["health"] = state ? state.health : undefined;
resourceInputs["jobClusters"] = state ? state.jobClusters : undefined;
resourceInputs["libraries"] = state ? state.libraries : undefined;
resourceInputs["maxConcurrentRuns"] = state ? state.maxConcurrentRuns : undefined;
resourceInputs["maxRetries"] = state ? state.maxRetries : undefined;
resourceInputs["minRetryIntervalMillis"] = state ? state.minRetryIntervalMillis : undefined;
resourceInputs["name"] = state ? state.name : undefined;
resourceInputs["newCluster"] = state ? state.newCluster : undefined;
resourceInputs["notebookTask"] = state ? state.notebookTask : undefined;
resourceInputs["notificationSettings"] = state ? state.notificationSettings : undefined;
resourceInputs["parameters"] = state ? state.parameters : undefined;
resourceInputs["performanceTarget"] = state ? state.performanceTarget : undefined;
resourceInputs["pipelineTask"] = state ? state.pipelineTask : undefined;
resourceInputs["pythonWheelTask"] = state ? state.pythonWheelTask : undefined;
resourceInputs["queue"] = state ? state.queue : undefined;
resourceInputs["retryOnTimeout"] = state ? state.retryOnTimeout : undefined;
resourceInputs["runAs"] = state ? state.runAs : undefined;
resourceInputs["runJobTask"] = state ? state.runJobTask : undefined;
resourceInputs["schedule"] = state ? state.schedule : undefined;
resourceInputs["sparkJarTask"] = state ? state.sparkJarTask : undefined;
resourceInputs["sparkPythonTask"] = state ? state.sparkPythonTask : undefined;
resourceInputs["sparkSubmitTask"] = state ? state.sparkSubmitTask : undefined;
resourceInputs["tags"] = state ? state.tags : undefined;
resourceInputs["tasks"] = state ? state.tasks : undefined;
resourceInputs["timeoutSeconds"] = state ? state.timeoutSeconds : undefined;
resourceInputs["trigger"] = state ? state.trigger : undefined;
resourceInputs["url"] = state ? state.url : undefined;
resourceInputs["webhookNotifications"] = state ? state.webhookNotifications : undefined;
}
else {
const args = argsOrState;
resourceInputs["alwaysRunning"] = args ? args.alwaysRunning : undefined;
resourceInputs["budgetPolicyId"] = args ? args.budgetPolicyId : undefined;
resourceInputs["continuous"] = args ? args.continuous : undefined;
resourceInputs["controlRunState"] = args ? args.controlRunState : undefined;
resourceInputs["dbtTask"] = args ? args.dbtTask : undefined;
resourceInputs["deployment"] = args ? args.deployment : undefined;
resourceInputs["description"] = args ? args.description : undefined;
resourceInputs["editMode"] = args ? args.editMode : undefined;
resourceInputs["emailNotifications"] = args ? args.emailNotifications : undefined;
resourceInputs["environments"] = args ? args.environments : undefined;
resourceInputs["existingClusterId"] = args ? args.existingClusterId : undefined;
resourceInputs["format"] = args ? args.format : undefined;
resourceInputs["gitSource"] = args ? args.gitSource : undefined;
resourceInputs["health"] = args ? args.health : undefined;
resourceInputs["jobClusters"] = args ? args.jobClusters : undefined;
resourceInputs["libraries"] = args ? args.libraries : undefined;
resourceInputs["maxConcurrentRuns"] = args ? args.maxConcurrentRuns : undefined;
resourceInputs["maxRetries"] = args ? args.maxRetries : undefined;
resourceInputs["minRetryIntervalMillis"] = args ? args.minRetryIntervalMillis : undefined;
resourceInputs["name"] = args ? args.name : undefined;
resourceInputs["newCluster"] = args ? args.newCluster : undefined;
resourceInputs["notebookTask"] = args ? args.notebookTask : undefined;
resourceInputs["notificationSettings"] = args ? args.notificationSettings : undefined;
resourceInputs["parameters"] = args ? args.parameters : undefined;
resourceInputs["performanceTarget"] = args ? args.performanceTarget : undefined;
resourceInputs["pipelineTask"] = args ? args.pipelineTask : undefined;
resourceInputs["pythonWheelTask"] = args ? args.pythonWheelTask : undefined;
resourceInputs["queue"] = args ? args.queue : undefined;
resourceInputs["retryOnTimeout"] = args ? args.retryOnTimeout : undefined;
resourceInputs["runAs"] = args ? args.runAs : undefined;
resourceInputs["runJobTask"] = args ? args.runJobTask : undefined;
resourceInputs["schedule"] = args ? args.schedule : undefined;
resourceInputs["sparkJarTask"] = args ? args.sparkJarTask : undefined;
resourceInputs["sparkPythonTask"] = args ? args.sparkPythonTask : undefined;
resourceInputs["sparkSubmitTask"] = args ? args.sparkSubmitTask : undefined;
resourceInputs["tags"] = args ? args.tags : undefined;
resourceInputs["tasks"] = args ? args.tasks : undefined;
resourceInputs["timeoutSeconds"] = args ? args.timeoutSeconds : undefined;
resourceInputs["trigger"] = args ? args.trigger : undefined;
resourceInputs["webhookNotifications"] = args ? args.webhookNotifications : undefined;
resourceInputs["url"] = undefined /*out*/;
}
opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts);
super(Job.__pulumiType, name, resourceInputs, opts);
}
}
exports.Job = Job;
/** @internal */
Job.__pulumiType = 'databricks:index/job:Job';
//# sourceMappingURL=job.js.map
;