Skip to content

Commit

Permalink
Revert to combined KRaft
Browse files Browse the repository at this point in the history
  • Loading branch information
bbejeck committed Oct 28, 2024
1 parent 023e5de commit 6d489f5
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 8 deletions.
8 changes: 4 additions & 4 deletions tests/kafkatest/tests/streams/streams_broker_bounce_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ def collect_results(self, sleep_time_secs):
broker_type=["leader"],
num_threads=[1, 3],
sleep_time_secs=[120],
metadata_quorum=[quorum.isolated_kraft])
metadata_quorum=[quorum.combined_kraft])
def test_broker_type_bounce(self, failure_mode, broker_type, sleep_time_secs, num_threads, metadata_quorum):
"""
Start a smoke test client, then kill one particular broker and ensure data is still received
Expand All @@ -231,7 +231,7 @@ def test_broker_type_bounce(self, failure_mode, broker_type, sleep_time_secs, nu
@matrix(failure_mode=["clean_shutdown"],
broker_type=["controller"],
sleep_time_secs=[0],
metadata_quorum=[quorum.isolated_kraft])
metadata_quorum=[quorum.combined_kraft])
def test_broker_type_bounce_at_start(self, failure_mode, broker_type, sleep_time_secs, metadata_quorum):
"""
Start a smoke test client, then kill one particular broker immediately before streams stats
Expand All @@ -253,7 +253,7 @@ def test_broker_type_bounce_at_start(self, failure_mode, broker_type, sleep_time
@cluster(num_nodes=7)
@matrix(failure_mode=["clean_shutdown", "hard_shutdown", "clean_bounce", "hard_bounce"],
num_failures=[2],
metadata_quorum=[quorum.isolated_kraft])
metadata_quorum=[quorum.combined_kraft])
def test_many_brokers_bounce(self, failure_mode, num_failures, metadata_quorum):
"""
Start a smoke test client, then kill a few brokers and ensure data is still received
Expand All @@ -272,7 +272,7 @@ def test_many_brokers_bounce(self, failure_mode, num_failures, metadata_quorum):
@cluster(num_nodes=7)
@matrix(failure_mode=["clean_bounce", "hard_bounce"],
num_failures=[3],
metadata_quorum=[quorum.isolated_kraft])
metadata_quorum=[quorum.combined_kraft])
def test_all_brokers_bounce(self, failure_mode, num_failures, metadata_quorum):
"""
Start a smoke test client, then kill a few brokers and ensure data is still received
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def setUp(self):
self.zk.start()

@cluster(num_nodes=7)
@matrix(metadata_quorum=[quorum.isolated_kraft], use_new_coordinator=[True, False])
@matrix(metadata_quorum=[quorum.combined_kraft], use_new_coordinator=[True, False])
def test_streams_resilient_to_broker_down(self, metadata_quorum, use_new_coordinator=False):
self.kafka.start()

Expand Down Expand Up @@ -82,7 +82,7 @@ def test_streams_resilient_to_broker_down(self, metadata_quorum, use_new_coordin
self.kafka.stop()

@cluster(num_nodes=7)
@matrix(metadata_quorum=[quorum.isolated_kraft], use_new_coordinator=[True, False])
@matrix(metadata_quorum=[quorum.combined_kraft], use_new_coordinator=[True, False])
def test_streams_runs_with_broker_down_initially(self, metadata_quorum, use_new_coordinator=False):
self.kafka.start()
node = self.kafka.leader(self.inputTopic)
Expand Down Expand Up @@ -150,7 +150,7 @@ def test_streams_runs_with_broker_down_initially(self, metadata_quorum, use_new_
self.kafka.stop()

@cluster(num_nodes=9)
@matrix(metadata_quorum=[quorum.isolated_kraft], use_new_coordinator=[True, False])
@matrix(metadata_quorum=[quorum.combined_kraft], use_new_coordinator=[True, False])
def test_streams_should_scale_in_while_brokers_down(self, metadata_quorum, use_new_coordinator=False):
self.kafka.start()

Expand Down Expand Up @@ -229,7 +229,7 @@ def test_streams_should_scale_in_while_brokers_down(self, metadata_quorum, use_n
self.kafka.stop()

@cluster(num_nodes=9)
@matrix(metadata_quorum=[quorum.isolated_kraft], use_new_coordinator=[True, False])
@matrix(metadata_quorum=[quorum.combined_kraft], use_new_coordinator=[True, False])
def test_streams_should_failover_while_brokers_down(self, metadata_quorum, use_new_coordinator=False):
self.kafka.start()

Expand Down

0 comments on commit 6d489f5

Please sign in to comment.