@pulumi/digitalocean
Version:
A Pulumi package for creating and managing DigitalOcean cloud resources.
136 lines • 6.93 kB
JavaScript
;
// *** WARNING: this file was generated by pulumi-language-nodejs. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
Object.defineProperty(exports, "__esModule", { value: true });
exports.DatabaseKafkaConfig = void 0;
const pulumi = require("@pulumi/pulumi");
const utilities = require("./utilities");
/**
* Provides a virtual resource that can be used to change advanced configuration
* options for a DigitalOcean managed Kafka database cluster.
*
* > **Note** Kafka configurations are only removed from state when destroyed. The remote configuration is not unset.
*
* ## Example Usage
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as digitalocean from "@pulumi/digitalocean";
*
* const exampleDatabaseCluster = new digitalocean.DatabaseCluster("example", {
* name: "example-kafka-cluster",
* engine: "kafka",
* version: "3.7",
* size: digitalocean.DatabaseSlug.DB_1VPCU1GB,
* region: digitalocean.Region.NYC3,
* nodeCount: 3,
* });
* const example = new digitalocean.DatabaseKafkaConfig("example", {
* clusterId: exampleDatabaseCluster.id,
* groupInitialRebalanceDelayMs: 3000,
* groupMinSessionTimeoutMs: 6000,
* groupMaxSessionTimeoutMs: 1800000,
* messageMaxBytes: 1048588,
* logCleanerDeleteRetentionMs: 86400000,
* logCleanerMinCompactionLagMs: "0",
* logFlushIntervalMs: "9223372036854775807",
* logIndexIntervalBytes: 4096,
* logMessageDownconversionEnable: true,
* logMessageTimestampDifferenceMaxMs: "9223372036854775807",
* logPreallocate: false,
* logRetentionBytes: "-1",
* logRetentionHours: 168,
* logRetentionMs: "604800000",
* logRollJitterMs: "0",
* logSegmentDeleteDelayMs: 60000,
* autoCreateTopicsEnable: true,
* });
* ```
*
* ## Import
*
* A Kafka database cluster's configuration can be imported using the `id` the parent cluster, e.g.
*
* ```sh
* $ pulumi import digitalocean:index/databaseKafkaConfig:DatabaseKafkaConfig example 4b62829a-9c42-465b-aaa3-84051048e712
* ```
*/
class DatabaseKafkaConfig extends pulumi.CustomResource {
/**
* Get an existing DatabaseKafkaConfig resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
static get(name, id, state, opts) {
return new DatabaseKafkaConfig(name, state, { ...opts, id: id });
}
/**
* Returns true if the given object is an instance of DatabaseKafkaConfig. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
static isInstance(obj) {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === DatabaseKafkaConfig.__pulumiType;
}
constructor(name, argsOrState, opts) {
let resourceInputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState;
resourceInputs["autoCreateTopicsEnable"] = state?.autoCreateTopicsEnable;
resourceInputs["clusterId"] = state?.clusterId;
resourceInputs["groupInitialRebalanceDelayMs"] = state?.groupInitialRebalanceDelayMs;
resourceInputs["groupMaxSessionTimeoutMs"] = state?.groupMaxSessionTimeoutMs;
resourceInputs["groupMinSessionTimeoutMs"] = state?.groupMinSessionTimeoutMs;
resourceInputs["logCleanerDeleteRetentionMs"] = state?.logCleanerDeleteRetentionMs;
resourceInputs["logCleanerMinCompactionLagMs"] = state?.logCleanerMinCompactionLagMs;
resourceInputs["logFlushIntervalMs"] = state?.logFlushIntervalMs;
resourceInputs["logIndexIntervalBytes"] = state?.logIndexIntervalBytes;
resourceInputs["logMessageDownconversionEnable"] = state?.logMessageDownconversionEnable;
resourceInputs["logMessageTimestampDifferenceMaxMs"] = state?.logMessageTimestampDifferenceMaxMs;
resourceInputs["logPreallocate"] = state?.logPreallocate;
resourceInputs["logRetentionBytes"] = state?.logRetentionBytes;
resourceInputs["logRetentionHours"] = state?.logRetentionHours;
resourceInputs["logRetentionMs"] = state?.logRetentionMs;
resourceInputs["logRollJitterMs"] = state?.logRollJitterMs;
resourceInputs["logSegmentDeleteDelayMs"] = state?.logSegmentDeleteDelayMs;
resourceInputs["messageMaxBytes"] = state?.messageMaxBytes;
}
else {
const args = argsOrState;
if (args?.clusterId === undefined && !opts.urn) {
throw new Error("Missing required property 'clusterId'");
}
resourceInputs["autoCreateTopicsEnable"] = args?.autoCreateTopicsEnable;
resourceInputs["clusterId"] = args?.clusterId;
resourceInputs["groupInitialRebalanceDelayMs"] = args?.groupInitialRebalanceDelayMs;
resourceInputs["groupMaxSessionTimeoutMs"] = args?.groupMaxSessionTimeoutMs;
resourceInputs["groupMinSessionTimeoutMs"] = args?.groupMinSessionTimeoutMs;
resourceInputs["logCleanerDeleteRetentionMs"] = args?.logCleanerDeleteRetentionMs;
resourceInputs["logCleanerMinCompactionLagMs"] = args?.logCleanerMinCompactionLagMs;
resourceInputs["logFlushIntervalMs"] = args?.logFlushIntervalMs;
resourceInputs["logIndexIntervalBytes"] = args?.logIndexIntervalBytes;
resourceInputs["logMessageDownconversionEnable"] = args?.logMessageDownconversionEnable;
resourceInputs["logMessageTimestampDifferenceMaxMs"] = args?.logMessageTimestampDifferenceMaxMs;
resourceInputs["logPreallocate"] = args?.logPreallocate;
resourceInputs["logRetentionBytes"] = args?.logRetentionBytes;
resourceInputs["logRetentionHours"] = args?.logRetentionHours;
resourceInputs["logRetentionMs"] = args?.logRetentionMs;
resourceInputs["logRollJitterMs"] = args?.logRollJitterMs;
resourceInputs["logSegmentDeleteDelayMs"] = args?.logSegmentDeleteDelayMs;
resourceInputs["messageMaxBytes"] = args?.messageMaxBytes;
}
opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts);
super(DatabaseKafkaConfig.__pulumiType, name, resourceInputs, opts);
}
}
exports.DatabaseKafkaConfig = DatabaseKafkaConfig;
/** @internal */
DatabaseKafkaConfig.__pulumiType = 'digitalocean:index/databaseKafkaConfig:DatabaseKafkaConfig';
//# sourceMappingURL=databaseKafkaConfig.js.map