Skip to content

Commit

Permalink
Fix tests to work
Browse files Browse the repository at this point in the history
  • Loading branch information
rquitales committed Aug 24, 2023
1 parent 5460955 commit 010711d
Show file tree
Hide file tree
Showing 3 changed files with 103 additions and 10 deletions.
8 changes: 5 additions & 3 deletions examples/examples_nodejs_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,13 @@ func TestAccEnvironmentTs(t *testing.T) {
func TestKafkaTopicPatch(t *testing.T) {
test := getJSBaseOptions(t).
With(integration.ProgramTestOptions{
Dir: path.Join(getCwd(t), "kafka-topic-patch", "ts", "step1"),
Dir: path.Join(getCwd(t), "kafka-topic-patch", "ts", "step1"),
SkipEmptyPreviewUpdate: true,
EditDirs: []integration.EditDir{
{
Dir: path.Join(getCwd(t), "kafka-topic-patch", "ts", "step2"),
Additive: true,
Dir: path.Join(getCwd(t), "kafka-topic-patch", "ts", "step2"),
Additive: true,
ExpectNoChanges: false,
},
},
})
Expand Down
39 changes: 39 additions & 0 deletions examples/kafka-topic-patch/ts/step1/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,20 +3,59 @@ import * as confluent from "@pulumi/confluentcloud";

const env = new confluent.Environment("ts-env");

// Create test Kafka cluster.
const kfCluster = new confluent.KafkaCluster("kfCluster", {
cloud: "AWS",
region: "us-west-2",
availability: "SINGLE_ZONE",
environment: {
id: env.id,
},
basic: {},
});

// Create necessary resources/access to manage the Kafka cluster.
const kfSA = new confluent.ServiceAccount("kfSA", {
description: "SA to manage Kafka cluster",
});

const roleBinding = new confluent.RoleBinding("roleBinding", {
roleName: "CloudClusterAdmin",
crnPattern: kfCluster.rbacCrn,
principal: pulumi.interpolate`User:${kfSA.id}`,
});

const clusterAPIKey = new confluent.ApiKey(
"clusterAPIKey",
{
owner: {
apiVersion: kfSA.apiVersion,
id: kfSA.id,
kind: kfSA.kind,
},
managedResource: {
apiVersion: kfCluster.apiVersion,
id: kfCluster.id,
kind: kfCluster.kind,
environment: {
id: env.id,
},
},
},
{ dependsOn: [roleBinding] }
);

// Create the Kafka topic.
const kfTopic = new confluent.KafkaTopic("kfTopic", {
topicName: "test-topic",

kafkaCluster: {
id: kfCluster.id,
},
// Deprecated field which we are still supporting.
httpEndpoint: kfCluster.restEndpoint,
credentials: {
key: clusterAPIKey.id,
secret: clusterAPIKey.secret,
},
});
66 changes: 59 additions & 7 deletions examples/kafka-topic-patch/ts/step2/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,29 +3,81 @@ import * as confluent from "@pulumi/confluentcloud";

const env = new confluent.Environment("ts-env");

// Create test Kafka cluster.
const kfCluster = new confluent.KafkaCluster("kfCluster", {
cloud: "AWS",
region: "us-west-2",
availability: "SINGLE_ZONE",
environment: {
id: env.id,
},
basic: {},
});

// Create necessary resources/access to manage the Kafka cluster.
const kfSA = new confluent.ServiceAccount("kfSA", {
description: "SA to manage Kafka cluster",
});

const roleBinding = new confluent.RoleBinding("roleBinding", {
roleName: "CloudClusterAdmin",
crnPattern: kfCluster.rbacCrn,
principal: pulumi.interpolate`User:${kfSA.id}`,
});

const clusterAPIKey = new confluent.ApiKey(
"clusterAPIKey",
{
owner: {
apiVersion: kfSA.apiVersion,
id: kfSA.id,
kind: kfSA.kind,
},
managedResource: {
apiVersion: kfCluster.apiVersion,
id: kfCluster.id,
kind: kfCluster.kind,
environment: {
id: env.id,
},
},
},
{ dependsOn: [roleBinding] }
);

// Create the Kafka topic.
const kfTopic = new confluent.KafkaTopic("kfTopic", {
topicName: "test-topic",

kafkaCluster: {
id: kfCluster.id,
},
// Deprecated field which we are still supporting.
httpEndpoint: kfCluster.restEndpoint,
credentials: {
key: clusterAPIKey.id,
secret: clusterAPIKey.secret,
},
});

const res = kfCluster.restEndpoint.apply((endPt) => {
const res = confluent.getKafkaTopic({
topicName: "test-topic",
restEndpoint: endPt,
// Test that we can also use the datasource to get the topic.
const res = pulumi
.all([
kfCluster.id,
kfCluster.restEndpoint,
clusterAPIKey.id,
clusterAPIKey.secret,
])
.apply(([clusterID, endPt, key, secret]) => {
return confluent.getKafkaTopic({
topicName: "test-topic",
restEndpoint: endPt,
kafkaCluster: {
id: clusterID,
},
credentials: {
key: key,
secret: secret,
},
});
});

return res;
});

0 comments on commit 010711d

Please sign in to comment.