From 2b456748eb90a83dc912cf73b8c5329b31bf7f74 Mon Sep 17 00:00:00 2001 From: Till Prochaska <1512805+tillprochaska@users.noreply.github.com> Date: Mon, 1 Jul 2024 18:01:06 +0200 Subject: [PATCH] Reproduce task-specific keys not being removed from Redis --- tests/test_taskqueue.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/tests/test_taskqueue.py b/tests/test_taskqueue.py index 925442c..1173d3e 100644 --- a/tests/test_taskqueue.py +++ b/tests/test_taskqueue.py @@ -328,6 +328,36 @@ def test_dataset_get_status(): assert status["last_update"] is None +def test_dataset_flush_status(): + conn = get_fakeredis() + dataset = Dataset(conn=conn, name="abc") + assert conn.keys() == [] + + task = Task( + task_id="1", + job_id="abc", + delivery_tag="", + operation="ingest", + context={}, + payload={}, + priority=5, + collection_id="abc", + ) + + # Enqueue and retry a task + dataset.add_task(task.task_id, task.operation) + task.increment_retry_count(conn) + + # This stores status data in Redis + assert conn.keys() != [] + + # Calling the method removes status data related to this dataset from Redis + dataset.flush_status() + # TODO: This is failing right now which over time results in slow, but steady + # Redis memory usage + assert conn.keys() == [] + + def test_get_priority_bucket(): redis = get_fakeredis() rmq_channel = get_rabbitmq_channel()