Skip to content

Commit

Permalink
Reproduce task-specific keys not being removed from Redis
Browse files Browse the repository at this point in the history
  • Loading branch information
tillprochaska committed Jul 1, 2024
1 parent 054f5f2 commit 2b45674
Showing 1 changed file with 30 additions and 0 deletions.
30 changes: 30 additions & 0 deletions tests/test_taskqueue.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,6 +328,36 @@ def test_dataset_get_status():
assert status["last_update"] is None


def test_dataset_flush_status():
conn = get_fakeredis()
dataset = Dataset(conn=conn, name="abc")
assert conn.keys() == []

task = Task(
task_id="1",
job_id="abc",
delivery_tag="",
operation="ingest",
context={},
payload={},
priority=5,
collection_id="abc",
)

# Enqueue and retry a task
dataset.add_task(task.task_id, task.operation)
task.increment_retry_count(conn)

# This stores status data in Redis
assert conn.keys() != []

# Calling the method removes status data related to this dataset from Redis
dataset.flush_status()
# TODO: This is failing right now which over time results in slow, but steady
# Redis memory usage
assert conn.keys() == []


def test_get_priority_bucket():
redis = get_fakeredis()
rmq_channel = get_rabbitmq_channel()
Expand Down

0 comments on commit 2b45674

Please sign in to comment.