diff --git a/shotover-proxy/tests/cassandra_int_tests/mod.rs b/shotover-proxy/tests/cassandra_int_tests/mod.rs index ae47c8449..5fe7a66ba 100644 --- a/shotover-proxy/tests/cassandra_int_tests/mod.rs +++ b/shotover-proxy/tests/cassandra_int_tests/mod.rs @@ -467,6 +467,51 @@ async fn cluster_multi_rack_2_per_rack(#[case] driver: CassandraDriver) { cluster::multi_rack::test_topology_task(None, expected_nodes, 16).await; } +#[tokio::test(flavor = "multi_thread")] +async fn cluster_multi_rack_2_per_rack_go_smoke_test() { + let _compose = docker_compose( + "tests/test-configs/cassandra/cluster-multi-rack-2-per-rack/docker-compose.yaml", + ); + + let shotover_rack1 = shotover_process( + "tests/test-configs/cassandra/cluster-multi-rack-2-per-rack/topology_rack1.yaml", + ) + .with_config("tests/test-configs/shotover-config/config1.yaml") + .with_log_name("Rack1") + .start() + .await; + let shotover_rack2 = shotover_process( + "tests/test-configs/cassandra/cluster-multi-rack-2-per-rack/topology_rack2.yaml", + ) + .with_log_name("Rack2") + .with_config("tests/test-configs/shotover-config/config2.yaml") + .start() + .await; + let shotover_rack3 = shotover_process( + "tests/test-configs/cassandra/cluster-multi-rack-2-per-rack/topology_rack3.yaml", + ) + .with_config("tests/test-configs/shotover-config/config3.yaml") + .with_log_name("Rack3") + .start() + .await; + + test_helpers::connection::cassandra::go::run_go_smoke_test().await; + + // gocql driver will route execute requests to its control connection during initialization which results in out of rack requests. + // This warning is correctly triggered in that case. + // The warning occurs only in rack1, gocql driver always picks rack 1 for its control connection + shotover_rack1 + .shutdown_and_then_consume_events(&[EventMatcher::new() + .with_level(Level::Warn) + .with_target("shotover::transforms::cassandra::sink_cluster::node_pool") + .with_message("No suitable nodes to route to found within rack. This error only occurs in debug builds as it should never occur in an ideal integration test situation.") + .with_count(Count::Any) + ]) + .await; + shotover_rack2.shutdown_and_then_consume_events(&[]).await; + shotover_rack3.shutdown_and_then_consume_events(&[]).await; +} + #[rstest] #[case::scylla(Scylla)] //#[case::cdrs(Cdrs)] // TODO diff --git a/shotover/src/transforms/cassandra/sink_cluster/rewrite.rs b/shotover/src/transforms/cassandra/sink_cluster/rewrite.rs index f77fe4cd6..e181deff6 100644 --- a/shotover/src/transforms/cassandra/sink_cluster/rewrite.rs +++ b/shotover/src/transforms/cassandra/sink_cluster/rewrite.rs @@ -119,7 +119,7 @@ impl MessageRewriter { RewriteTableTy::Prepare { clone_index } => { let mut first = true; for node in pool.nodes().iter() { - if node.is_up && node.rack == self.local_shotover_node.rack { + if node.is_up { if first { let message_id = messages[*clone_index].id(); self.prepare_requests_to_destination_nodes