Skip to content

Commit

Permalink
feat(replication_test): Add test_stream_approximate_trimming test
Browse files Browse the repository at this point in the history
Signed-off-by: Stepan Bagritsevich <[email protected]>
  • Loading branch information
BagritsevichStepan committed Jan 21, 2025
1 parent 54ef564 commit f635b6a
Showing 1 changed file with 36 additions and 0 deletions.
36 changes: 36 additions & 0 deletions tests/dragonfly/replication_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -2734,3 +2734,39 @@ async def test_master_too_big(df_factory):
# We should never sync due to used memory too high during full sync
with pytest.raises(TimeoutError):
await wait_available_async(c_replica, timeout=10)


@dfly_args({"proactor_threads": 4})
async def test_stream_approximate_trimming(df_factory):
master = df_factory.create()
replica = df_factory.create()

df_factory.start_all([master, replica])
c_master = master.client()
c_replica = replica.client()

await c_replica.execute_command(f"REPLICAOF localhost {master.port}")
await wait_for_replicas_state(c_replica)

# Step 1: Populate master with 100 streams, each containing 200 entries
num_streams = 100
entries_per_stream = 200

for i in range(num_streams):
stream_name = f"stream{i}"
for j in range(entries_per_stream):
await c_master.execute_command("XADD", stream_name, "*", f"field{j}", f"value{j}")

# Step 2: Trim each stream to a random size between 70 and 200
for i in range(num_streams):
stream_name = f"stream{i}"
trim_size = random.randint(70, entries_per_stream)
await c_master.execute_command("XTRIM", stream_name, "MAXLEN", "~", trim_size)

# Wait for replica sync
await asyncio.sleep(1)

# Check replica data consistent
master_data = await StaticSeeder.capture(c_master)
replica_data = await StaticSeeder.capture(c_replica)
assert master_data == replica_data

0 comments on commit f635b6a

Please sign in to comment.