ionoscloud.kafka.Cluster
Explore with Pulumi AI
Manages a Kafka Cluster on IonosCloud.
Example Usage
This resource will create an operational Kafka Cluster. After this section completes, the provisioner can be called.
import * as pulumi from "@pulumi/pulumi";
import * as ionoscloud from "@ionos-cloud/sdk-pulumi";
// Basic example
const example = new ionoscloud.compute.Datacenter("example", {
name: "example-kafka-datacenter",
location: "de/fra",
});
const exampleLan = new ionoscloud.compute.Lan("example", {
datacenterId: example.id,
"public": false,
name: "example-kafka-lan",
});
const exampleCluster = new ionoscloud.kafka.Cluster("example", {
name: "example-kafka-cluster",
location: "de/fra",
version: "3.7.0",
size: "S",
connections: {
datacenterId: example.id,
lanId: exampleLan.id,
brokerAddresses: [
"192.168.1.101/24",
"192.168.1.102/24",
"192.168.1.103/24",
],
},
});
import pulumi
import pulumi_ionoscloud as ionoscloud
# Basic example
example = ionoscloud.compute.Datacenter("example",
name="example-kafka-datacenter",
location="de/fra")
example_lan = ionoscloud.compute.Lan("example",
datacenter_id=example.id,
public=False,
name="example-kafka-lan")
example_cluster = ionoscloud.kafka.Cluster("example",
name="example-kafka-cluster",
location="de/fra",
version="3.7.0",
size="S",
connections={
"datacenter_id": example.id,
"lan_id": example_lan.id,
"broker_addresses": [
"192.168.1.101/24",
"192.168.1.102/24",
"192.168.1.103/24",
],
})
package main
import (
"github.com/ionos-cloud/pulumi-ionoscloud/sdk/go/ionoscloud/compute"
"github.com/ionos-cloud/pulumi-ionoscloud/sdk/go/ionoscloud/kafka"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
// Basic example
example, err := compute.NewDatacenter(ctx, "example", &compute.DatacenterArgs{
Name: pulumi.String("example-kafka-datacenter"),
Location: pulumi.String("de/fra"),
})
if err != nil {
return err
}
exampleLan, err := compute.NewLan(ctx, "example", &compute.LanArgs{
DatacenterId: example.ID(),
Public: pulumi.Bool(false),
Name: pulumi.String("example-kafka-lan"),
})
if err != nil {
return err
}
_, err = kafka.NewCluster(ctx, "example", &kafka.ClusterArgs{
Name: pulumi.String("example-kafka-cluster"),
Location: pulumi.String("de/fra"),
Version: pulumi.String("3.7.0"),
Size: pulumi.String("S"),
Connections: &kafka.ClusterConnectionsArgs{
DatacenterId: example.ID(),
LanId: exampleLan.ID(),
BrokerAddresses: pulumi.StringArray{
pulumi.String("192.168.1.101/24"),
pulumi.String("192.168.1.102/24"),
pulumi.String("192.168.1.103/24"),
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Ionoscloud = Ionoscloud.Pulumi.Ionoscloud;
return await Deployment.RunAsync(() =>
{
// Basic example
var example = new Ionoscloud.Compute.Datacenter("example", new()
{
Name = "example-kafka-datacenter",
Location = "de/fra",
});
var exampleLan = new Ionoscloud.Compute.Lan("example", new()
{
DatacenterId = example.Id,
Public = false,
Name = "example-kafka-lan",
});
var exampleCluster = new Ionoscloud.Kafka.Cluster("example", new()
{
Name = "example-kafka-cluster",
Location = "de/fra",
Version = "3.7.0",
Size = "S",
Connections = new Ionoscloud.Kafka.Inputs.ClusterConnectionsArgs
{
DatacenterId = example.Id,
LanId = exampleLan.Id,
BrokerAddresses = new[]
{
"192.168.1.101/24",
"192.168.1.102/24",
"192.168.1.103/24",
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.ionoscloud.compute.Datacenter;
import com.pulumi.ionoscloud.compute.DatacenterArgs;
import com.pulumi.ionoscloud.compute.Lan;
import com.pulumi.ionoscloud.compute.LanArgs;
import com.pulumi.ionoscloud.kafka.Cluster;
import com.pulumi.ionoscloud.kafka.ClusterArgs;
import com.pulumi.ionoscloud.kafka.inputs.ClusterConnectionsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
// Basic example
var example = new Datacenter("example", DatacenterArgs.builder()
.name("example-kafka-datacenter")
.location("de/fra")
.build());
var exampleLan = new Lan("exampleLan", LanArgs.builder()
.datacenterId(example.id())
.public_(false)
.name("example-kafka-lan")
.build());
var exampleCluster = new Cluster("exampleCluster", ClusterArgs.builder()
.name("example-kafka-cluster")
.location("de/fra")
.version("3.7.0")
.size("S")
.connections(ClusterConnectionsArgs.builder()
.datacenterId(example.id())
.lanId(exampleLan.id())
.brokerAddresses(
"192.168.1.101/24",
"192.168.1.102/24",
"192.168.1.103/24")
.build())
.build());
}
}
resources:
# Basic example
example:
type: ionoscloud:compute:Datacenter
properties:
name: example-kafka-datacenter
location: de/fra
exampleLan:
type: ionoscloud:compute:Lan
name: example
properties:
datacenterId: ${example.id}
public: false
name: example-kafka-lan
exampleCluster:
type: ionoscloud:kafka:Cluster
name: example
properties:
name: example-kafka-cluster
location: de/fra
version: 3.7.0
size: S
connections:
datacenterId: ${example.id}
lanId: ${exampleLan.id}
brokerAddresses:
- 192.168.1.101/24
- 192.168.1.102/24
- 192.168.1.103/24
import * as pulumi from "@pulumi/pulumi";
import * as ionoscloud from "@ionos-cloud/sdk-pulumi";
import * as random from "@pulumi/random";
// Complete example
const example = new ionoscloud.compute.Datacenter("example", {
name: "example-kafka-datacenter",
location: "de/fra",
});
const exampleLan = new ionoscloud.compute.Lan("example", {
datacenterId: example.id,
"public": false,
name: "example-kafka-lan",
});
const password = new random.index.Password("password", {
length: 16,
special: false,
});
const exampleServer = new ionoscloud.compute.Server("example", {
name: "example-kafka-server",
datacenterId: example.id,
cores: 1,
ram: 2 * 1024,
availabilityZone: "AUTO",
cpuFamily: "INTEL_SKYLAKE",
imageName: "ubuntu:latest",
imagePassword: password.result,
volume: {
name: "example-kafka-volume",
size: 6,
diskType: "SSD Standard",
},
nic: {
lan: exampleLan.id,
name: "example-kafka-nic",
dhcp: true,
},
});
const exampleCluster = new ionoscloud.kafka.Cluster("example", {
name: "example-kafka-cluster",
location: example.location,
version: "3.7.0",
size: "S",
connections: {
datacenterId: example.id,
lanId: exampleLan.id,
brokerAddresses: "kafka_cluster_broker_ips_cidr_list",
},
});
import pulumi
import pulumi_ionoscloud as ionoscloud
import pulumi_random as random
# Complete example
example = ionoscloud.compute.Datacenter("example",
name="example-kafka-datacenter",
location="de/fra")
example_lan = ionoscloud.compute.Lan("example",
datacenter_id=example.id,
public=False,
name="example-kafka-lan")
password = random.index.Password("password",
length=16,
special=False)
example_server = ionoscloud.compute.Server("example",
name="example-kafka-server",
datacenter_id=example.id,
cores=1,
ram=2 * 1024,
availability_zone="AUTO",
cpu_family="INTEL_SKYLAKE",
image_name="ubuntu:latest",
image_password=password["result"],
volume={
"name": "example-kafka-volume",
"size": 6,
"disk_type": "SSD Standard",
},
nic={
"lan": example_lan.id,
"name": "example-kafka-nic",
"dhcp": True,
})
example_cluster = ionoscloud.kafka.Cluster("example",
name="example-kafka-cluster",
location=example.location,
version="3.7.0",
size="S",
connections={
"datacenter_id": example.id,
"lan_id": example_lan.id,
"broker_addresses": "kafka_cluster_broker_ips_cidr_list",
})
package main
import (
"github.com/ionos-cloud/pulumi-ionoscloud/sdk/go/ionoscloud/compute"
"github.com/ionos-cloud/pulumi-ionoscloud/sdk/go/ionoscloud/kafka"
"github.com/pulumi/pulumi-random/sdk/go/random"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
// Complete example
example, err := compute.NewDatacenter(ctx, "example", &compute.DatacenterArgs{
Name: pulumi.String("example-kafka-datacenter"),
Location: pulumi.String("de/fra"),
})
if err != nil {
return err
}
exampleLan, err := compute.NewLan(ctx, "example", &compute.LanArgs{
DatacenterId: example.ID(),
Public: pulumi.Bool(false),
Name: pulumi.String("example-kafka-lan"),
})
if err != nil {
return err
}
password, err := random.NewPassword(ctx, "password", &random.PasswordArgs{
Length: 16,
Special: false,
})
if err != nil {
return err
}
_, err = compute.NewServer(ctx, "example", &compute.ServerArgs{
Name: pulumi.String("example-kafka-server"),
DatacenterId: example.ID(),
Cores: pulumi.Int(1),
Ram: int(2 * 1024),
AvailabilityZone: pulumi.String("AUTO"),
CpuFamily: pulumi.String("INTEL_SKYLAKE"),
ImageName: pulumi.String("ubuntu:latest"),
ImagePassword: password.Result,
Volume: &compute.ServerVolumeArgs{
Name: pulumi.String("example-kafka-volume"),
Size: pulumi.Int(6),
DiskType: pulumi.String("SSD Standard"),
},
Nic: &compute.ServerNicArgs{
Lan: exampleLan.ID(),
Name: pulumi.String("example-kafka-nic"),
Dhcp: pulumi.Bool(true),
},
})
if err != nil {
return err
}
_, err = kafka.NewCluster(ctx, "example", &kafka.ClusterArgs{
Name: pulumi.String("example-kafka-cluster"),
Location: example.Location,
Version: pulumi.String("3.7.0"),
Size: pulumi.String("S"),
Connections: &kafka.ClusterConnectionsArgs{
DatacenterId: example.ID(),
LanId: exampleLan.ID(),
BrokerAddresses: pulumi.StringArray("kafka_cluster_broker_ips_cidr_list"),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Ionoscloud = Ionoscloud.Pulumi.Ionoscloud;
using Random = Pulumi.Random;
return await Deployment.RunAsync(() =>
{
// Complete example
var example = new Ionoscloud.Compute.Datacenter("example", new()
{
Name = "example-kafka-datacenter",
Location = "de/fra",
});
var exampleLan = new Ionoscloud.Compute.Lan("example", new()
{
DatacenterId = example.Id,
Public = false,
Name = "example-kafka-lan",
});
var password = new Random.Index.Password("password", new()
{
Length = 16,
Special = false,
});
var exampleServer = new Ionoscloud.Compute.Server("example", new()
{
Name = "example-kafka-server",
DatacenterId = example.Id,
Cores = 1,
Ram = 2 * 1024,
AvailabilityZone = "AUTO",
CpuFamily = "INTEL_SKYLAKE",
ImageName = "ubuntu:latest",
ImagePassword = password.Result,
Volume = new Ionoscloud.Compute.Inputs.ServerVolumeArgs
{
Name = "example-kafka-volume",
Size = 6,
DiskType = "SSD Standard",
},
Nic = new Ionoscloud.Compute.Inputs.ServerNicArgs
{
Lan = exampleLan.Id,
Name = "example-kafka-nic",
Dhcp = true,
},
});
var exampleCluster = new Ionoscloud.Kafka.Cluster("example", new()
{
Name = "example-kafka-cluster",
Location = example.Location,
Version = "3.7.0",
Size = "S",
Connections = new Ionoscloud.Kafka.Inputs.ClusterConnectionsArgs
{
DatacenterId = example.Id,
LanId = exampleLan.Id,
BrokerAddresses = "kafka_cluster_broker_ips_cidr_list",
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.ionoscloud.compute.Datacenter;
import com.pulumi.ionoscloud.compute.DatacenterArgs;
import com.pulumi.ionoscloud.compute.Lan;
import com.pulumi.ionoscloud.compute.LanArgs;
import com.pulumi.random.password;
import com.pulumi.random.PasswordArgs;
import com.pulumi.ionoscloud.compute.Server;
import com.pulumi.ionoscloud.compute.ServerArgs;
import com.pulumi.ionoscloud.compute.inputs.ServerVolumeArgs;
import com.pulumi.ionoscloud.compute.inputs.ServerNicArgs;
import com.pulumi.ionoscloud.kafka.Cluster;
import com.pulumi.ionoscloud.kafka.ClusterArgs;
import com.pulumi.ionoscloud.kafka.inputs.ClusterConnectionsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
// Complete example
var example = new Datacenter("example", DatacenterArgs.builder()
.name("example-kafka-datacenter")
.location("de/fra")
.build());
var exampleLan = new Lan("exampleLan", LanArgs.builder()
.datacenterId(example.id())
.public_(false)
.name("example-kafka-lan")
.build());
var password = new Password("password", PasswordArgs.builder()
.length(16)
.special(false)
.build());
var exampleServer = new Server("exampleServer", ServerArgs.builder()
.name("example-kafka-server")
.datacenterId(example.id())
.cores(1)
.ram(2 * 1024)
.availabilityZone("AUTO")
.cpuFamily("INTEL_SKYLAKE")
.imageName("ubuntu:latest")
.imagePassword(password.result())
.volume(ServerVolumeArgs.builder()
.name("example-kafka-volume")
.size(6)
.diskType("SSD Standard")
.build())
.nic(ServerNicArgs.builder()
.lan(exampleLan.id())
.name("example-kafka-nic")
.dhcp(true)
.build())
.build());
var exampleCluster = new Cluster("exampleCluster", ClusterArgs.builder()
.name("example-kafka-cluster")
.location(example.location())
.version("3.7.0")
.size("S")
.connections(ClusterConnectionsArgs.builder()
.datacenterId(example.id())
.lanId(exampleLan.id())
.brokerAddresses("kafka_cluster_broker_ips_cidr_list")
.build())
.build());
}
}
Coming soon!
Create Cluster Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Cluster(name: string, args: ClusterArgs, opts?: CustomResourceOptions);
@overload
def Cluster(resource_name: str,
args: ClusterArgs,
opts: Optional[ResourceOptions] = None)
@overload
def Cluster(resource_name: str,
opts: Optional[ResourceOptions] = None,
connections: Optional[ClusterConnectionsArgs] = None,
size: Optional[str] = None,
version: Optional[str] = None,
location: Optional[str] = None,
name: Optional[str] = None)
func NewCluster(ctx *Context, name string, args ClusterArgs, opts ...ResourceOption) (*Cluster, error)
public Cluster(string name, ClusterArgs args, CustomResourceOptions? opts = null)
public Cluster(String name, ClusterArgs args)
public Cluster(String name, ClusterArgs args, CustomResourceOptions options)
type: ionoscloud:kafka:Cluster
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args ClusterArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args ClusterArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args ClusterArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args ClusterArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args ClusterArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var exampleclusterResourceResourceFromKafkacluster = new Ionoscloud.Kafka.Cluster("exampleclusterResourceResourceFromKafkacluster", new()
{
Connections = new Ionoscloud.Kafka.Inputs.ClusterConnectionsArgs
{
BrokerAddresses = new[]
{
"string",
},
DatacenterId = "string",
LanId = "string",
},
Size = "string",
Version = "string",
Location = "string",
Name = "string",
});
example, err := kafka.NewCluster(ctx, "exampleclusterResourceResourceFromKafkacluster", &kafka.ClusterArgs{
Connections: &kafka.ClusterConnectionsArgs{
BrokerAddresses: pulumi.StringArray{
pulumi.String("string"),
},
DatacenterId: pulumi.String("string"),
LanId: pulumi.String("string"),
},
Size: pulumi.String("string"),
Version: pulumi.String("string"),
Location: pulumi.String("string"),
Name: pulumi.String("string"),
})
var exampleclusterResourceResourceFromKafkacluster = new com.ionoscloud.pulumi.ionoscloud.kafka.Cluster("exampleclusterResourceResourceFromKafkacluster", com.ionoscloud.pulumi.ionoscloud.kafka.ClusterArgs.builder()
.connections(ClusterConnectionsArgs.builder()
.brokerAddresses("string")
.datacenterId("string")
.lanId("string")
.build())
.size("string")
.version("string")
.location("string")
.name("string")
.build());
examplecluster_resource_resource_from_kafkacluster = ionoscloud.kafka.Cluster("exampleclusterResourceResourceFromKafkacluster",
connections={
"broker_addresses": ["string"],
"datacenter_id": "string",
"lan_id": "string",
},
size="string",
version="string",
location="string",
name="string")
const exampleclusterResourceResourceFromKafkacluster = new ionoscloud.kafka.Cluster("exampleclusterResourceResourceFromKafkacluster", {
connections: {
brokerAddresses: ["string"],
datacenterId: "string",
lanId: "string",
},
size: "string",
version: "string",
location: "string",
name: "string",
});
type: ionoscloud:kafka:Cluster
properties:
connections:
brokerAddresses:
- string
datacenterId: string
lanId: string
location: string
name: string
size: string
version: string
Cluster Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The Cluster resource accepts the following input properties:
- Connections
Ionoscloud.
Cluster Connections - Connection information of the Kafka Cluster. Minimum items: 1, maximum items: 1.
- Size string
- [string] Size of the Kafka Cluster. Possible values:
XS
,S
- Version string
- [string] Version of the Kafka Cluster. Possible values:
3.7.0
- Location string
- [string] The location of the Kafka Cluster. Possible values:
de/fra
,de/txl
. If this is not set and if no value is provided for theIONOS_API_URL
env var, the defaultlocation
will be:de/fra
. - Name string
- [string] Name of the Kafka Cluster.
- Connections
Cluster
Connections Args - Connection information of the Kafka Cluster. Minimum items: 1, maximum items: 1.
- Size string
- [string] Size of the Kafka Cluster. Possible values:
XS
,S
- Version string
- [string] Version of the Kafka Cluster. Possible values:
3.7.0
- Location string
- [string] The location of the Kafka Cluster. Possible values:
de/fra
,de/txl
. If this is not set and if no value is provided for theIONOS_API_URL
env var, the defaultlocation
will be:de/fra
. - Name string
- [string] Name of the Kafka Cluster.
- connections
Cluster
Connections - Connection information of the Kafka Cluster. Minimum items: 1, maximum items: 1.
- size String
- [string] Size of the Kafka Cluster. Possible values:
XS
,S
- version String
- [string] Version of the Kafka Cluster. Possible values:
3.7.0
- location String
- [string] The location of the Kafka Cluster. Possible values:
de/fra
,de/txl
. If this is not set and if no value is provided for theIONOS_API_URL
env var, the defaultlocation
will be:de/fra
. - name String
- [string] Name of the Kafka Cluster.
- connections
Cluster
Connections - Connection information of the Kafka Cluster. Minimum items: 1, maximum items: 1.
- size string
- [string] Size of the Kafka Cluster. Possible values:
XS
,S
- version string
- [string] Version of the Kafka Cluster. Possible values:
3.7.0
- location string
- [string] The location of the Kafka Cluster. Possible values:
de/fra
,de/txl
. If this is not set and if no value is provided for theIONOS_API_URL
env var, the defaultlocation
will be:de/fra
. - name string
- [string] Name of the Kafka Cluster.
- connections
Cluster
Connections Args - Connection information of the Kafka Cluster. Minimum items: 1, maximum items: 1.
- size str
- [string] Size of the Kafka Cluster. Possible values:
XS
,S
- version str
- [string] Version of the Kafka Cluster. Possible values:
3.7.0
- location str
- [string] The location of the Kafka Cluster. Possible values:
de/fra
,de/txl
. If this is not set and if no value is provided for theIONOS_API_URL
env var, the defaultlocation
will be:de/fra
. - name str
- [string] Name of the Kafka Cluster.
- connections Property Map
- Connection information of the Kafka Cluster. Minimum items: 1, maximum items: 1.
- size String
- [string] Size of the Kafka Cluster. Possible values:
XS
,S
- version String
- [string] Version of the Kafka Cluster. Possible values:
3.7.0
- location String
- [string] The location of the Kafka Cluster. Possible values:
de/fra
,de/txl
. If this is not set and if no value is provided for theIONOS_API_URL
env var, the defaultlocation
will be:de/fra
. - name String
- [string] Name of the Kafka Cluster.
Outputs
All input properties are implicitly available as output properties. Additionally, the Cluster resource produces the following output properties:
- Broker
Addresses List<string> [list] IP address and port of cluster brokers.
⚠ NOTE:
IONOS_API_URL_KAFKA
can be used to set a custom API URL for the kafka resource.location
field needs to be empty, otherwise it will override the custom API URL. Settingendpoint
orIONOS_API_URL
does not have any effect.- Id string
- The provider-assigned unique ID for this managed resource.
- Broker
Addresses []string [list] IP address and port of cluster brokers.
⚠ NOTE:
IONOS_API_URL_KAFKA
can be used to set a custom API URL for the kafka resource.location
field needs to be empty, otherwise it will override the custom API URL. Settingendpoint
orIONOS_API_URL
does not have any effect.- Id string
- The provider-assigned unique ID for this managed resource.
- broker
Addresses List<String> [list] IP address and port of cluster brokers.
⚠ NOTE:
IONOS_API_URL_KAFKA
can be used to set a custom API URL for the kafka resource.location
field needs to be empty, otherwise it will override the custom API URL. Settingendpoint
orIONOS_API_URL
does not have any effect.- id String
- The provider-assigned unique ID for this managed resource.
- broker
Addresses string[] [list] IP address and port of cluster brokers.
⚠ NOTE:
IONOS_API_URL_KAFKA
can be used to set a custom API URL for the kafka resource.location
field needs to be empty, otherwise it will override the custom API URL. Settingendpoint
orIONOS_API_URL
does not have any effect.- id string
- The provider-assigned unique ID for this managed resource.
- broker_
addresses Sequence[str] [list] IP address and port of cluster brokers.
⚠ NOTE:
IONOS_API_URL_KAFKA
can be used to set a custom API URL for the kafka resource.location
field needs to be empty, otherwise it will override the custom API URL. Settingendpoint
orIONOS_API_URL
does not have any effect.- id str
- The provider-assigned unique ID for this managed resource.
- broker
Addresses List<String> [list] IP address and port of cluster brokers.
⚠ NOTE:
IONOS_API_URL_KAFKA
can be used to set a custom API URL for the kafka resource.location
field needs to be empty, otherwise it will override the custom API URL. Settingendpoint
orIONOS_API_URL
does not have any effect.- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing Cluster Resource
Get an existing Cluster resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: ClusterState, opts?: CustomResourceOptions): Cluster
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
broker_addresses: Optional[Sequence[str]] = None,
connections: Optional[ClusterConnectionsArgs] = None,
location: Optional[str] = None,
name: Optional[str] = None,
size: Optional[str] = None,
version: Optional[str] = None) -> Cluster
func GetCluster(ctx *Context, name string, id IDInput, state *ClusterState, opts ...ResourceOption) (*Cluster, error)
public static Cluster Get(string name, Input<string> id, ClusterState? state, CustomResourceOptions? opts = null)
public static Cluster get(String name, Output<String> id, ClusterState state, CustomResourceOptions options)
resources: _: type: ionoscloud:kafka:Cluster get: id: ${id}
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Broker
Addresses List<string> [list] IP address and port of cluster brokers.
⚠ NOTE:
IONOS_API_URL_KAFKA
can be used to set a custom API URL for the kafka resource.location
field needs to be empty, otherwise it will override the custom API URL. Settingendpoint
orIONOS_API_URL
does not have any effect.- Connections
Ionoscloud.
Cluster Connections - Connection information of the Kafka Cluster. Minimum items: 1, maximum items: 1.
- Location string
- [string] The location of the Kafka Cluster. Possible values:
de/fra
,de/txl
. If this is not set and if no value is provided for theIONOS_API_URL
env var, the defaultlocation
will be:de/fra
. - Name string
- [string] Name of the Kafka Cluster.
- Size string
- [string] Size of the Kafka Cluster. Possible values:
XS
,S
- Version string
- [string] Version of the Kafka Cluster. Possible values:
3.7.0
- Broker
Addresses []string [list] IP address and port of cluster brokers.
⚠ NOTE:
IONOS_API_URL_KAFKA
can be used to set a custom API URL for the kafka resource.location
field needs to be empty, otherwise it will override the custom API URL. Settingendpoint
orIONOS_API_URL
does not have any effect.- Connections
Cluster
Connections Args - Connection information of the Kafka Cluster. Minimum items: 1, maximum items: 1.
- Location string
- [string] The location of the Kafka Cluster. Possible values:
de/fra
,de/txl
. If this is not set and if no value is provided for theIONOS_API_URL
env var, the defaultlocation
will be:de/fra
. - Name string
- [string] Name of the Kafka Cluster.
- Size string
- [string] Size of the Kafka Cluster. Possible values:
XS
,S
- Version string
- [string] Version of the Kafka Cluster. Possible values:
3.7.0
- broker
Addresses List<String> [list] IP address and port of cluster brokers.
⚠ NOTE:
IONOS_API_URL_KAFKA
can be used to set a custom API URL for the kafka resource.location
field needs to be empty, otherwise it will override the custom API URL. Settingendpoint
orIONOS_API_URL
does not have any effect.- connections
Cluster
Connections - Connection information of the Kafka Cluster. Minimum items: 1, maximum items: 1.
- location String
- [string] The location of the Kafka Cluster. Possible values:
de/fra
,de/txl
. If this is not set and if no value is provided for theIONOS_API_URL
env var, the defaultlocation
will be:de/fra
. - name String
- [string] Name of the Kafka Cluster.
- size String
- [string] Size of the Kafka Cluster. Possible values:
XS
,S
- version String
- [string] Version of the Kafka Cluster. Possible values:
3.7.0
- broker
Addresses string[] [list] IP address and port of cluster brokers.
⚠ NOTE:
IONOS_API_URL_KAFKA
can be used to set a custom API URL for the kafka resource.location
field needs to be empty, otherwise it will override the custom API URL. Settingendpoint
orIONOS_API_URL
does not have any effect.- connections
Cluster
Connections - Connection information of the Kafka Cluster. Minimum items: 1, maximum items: 1.
- location string
- [string] The location of the Kafka Cluster. Possible values:
de/fra
,de/txl
. If this is not set and if no value is provided for theIONOS_API_URL
env var, the defaultlocation
will be:de/fra
. - name string
- [string] Name of the Kafka Cluster.
- size string
- [string] Size of the Kafka Cluster. Possible values:
XS
,S
- version string
- [string] Version of the Kafka Cluster. Possible values:
3.7.0
- broker_
addresses Sequence[str] [list] IP address and port of cluster brokers.
⚠ NOTE:
IONOS_API_URL_KAFKA
can be used to set a custom API URL for the kafka resource.location
field needs to be empty, otherwise it will override the custom API URL. Settingendpoint
orIONOS_API_URL
does not have any effect.- connections
Cluster
Connections Args - Connection information of the Kafka Cluster. Minimum items: 1, maximum items: 1.
- location str
- [string] The location of the Kafka Cluster. Possible values:
de/fra
,de/txl
. If this is not set and if no value is provided for theIONOS_API_URL
env var, the defaultlocation
will be:de/fra
. - name str
- [string] Name of the Kafka Cluster.
- size str
- [string] Size of the Kafka Cluster. Possible values:
XS
,S
- version str
- [string] Version of the Kafka Cluster. Possible values:
3.7.0
- broker
Addresses List<String> [list] IP address and port of cluster brokers.
⚠ NOTE:
IONOS_API_URL_KAFKA
can be used to set a custom API URL for the kafka resource.location
field needs to be empty, otherwise it will override the custom API URL. Settingendpoint
orIONOS_API_URL
does not have any effect.- connections Property Map
- Connection information of the Kafka Cluster. Minimum items: 1, maximum items: 1.
- location String
- [string] The location of the Kafka Cluster. Possible values:
de/fra
,de/txl
. If this is not set and if no value is provided for theIONOS_API_URL
env var, the defaultlocation
will be:de/fra
. - name String
- [string] Name of the Kafka Cluster.
- size String
- [string] Size of the Kafka Cluster. Possible values:
XS
,S
- version String
- [string] Version of the Kafka Cluster. Possible values:
3.7.0
Supporting Types
ClusterConnections, ClusterConnectionsArgs
- Broker
Addresses List<string> - [list] IP addresses and subnet of cluster brokers. Note the following unavailable IP range: 10.224.0.0/11
- Datacenter
Id string - [string] The datacenter to connect your instance to.
- Lan
Id string - [string] The numeric LAN ID to connect your instance to.
- Broker
Addresses []string - [list] IP addresses and subnet of cluster brokers. Note the following unavailable IP range: 10.224.0.0/11
- Datacenter
Id string - [string] The datacenter to connect your instance to.
- Lan
Id string - [string] The numeric LAN ID to connect your instance to.
- broker
Addresses List<String> - [list] IP addresses and subnet of cluster brokers. Note the following unavailable IP range: 10.224.0.0/11
- datacenter
Id String - [string] The datacenter to connect your instance to.
- lan
Id String - [string] The numeric LAN ID to connect your instance to.
- broker
Addresses string[] - [list] IP addresses and subnet of cluster brokers. Note the following unavailable IP range: 10.224.0.0/11
- datacenter
Id string - [string] The datacenter to connect your instance to.
- lan
Id string - [string] The numeric LAN ID to connect your instance to.
- broker_
addresses Sequence[str] - [list] IP addresses and subnet of cluster brokers. Note the following unavailable IP range: 10.224.0.0/11
- datacenter_
id str - [string] The datacenter to connect your instance to.
- lan_
id str - [string] The numeric LAN ID to connect your instance to.
- broker
Addresses List<String> - [list] IP addresses and subnet of cluster brokers. Note the following unavailable IP range: 10.224.0.0/11
- datacenter
Id String - [string] The datacenter to connect your instance to.
- lan
Id String - [string] The numeric LAN ID to connect your instance to.
Import
Kafka Cluster can be imported using the location
and kafka cluster id
:
$ pulumi import ionoscloud:kafka/cluster:Cluster mycluster location:kafka cluster uuid
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- ionoscloud ionos-cloud/pulumi-ionoscloud
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
ionoscloud
Terraform Provider.