Skip to content

Commit

Permalink
remove gc collect again for now
Browse files Browse the repository at this point in the history
  • Loading branch information
albertz committed Jan 10, 2025
1 parent 3075f5b commit d1cf549
Showing 1 changed file with 3 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -457,7 +457,6 @@ def model_recog_flashlight(
out_spatial_dim,
final beam_dim
"""
import gc
from dataclasses import dataclass
import torch
from flashlight.lib.text.decoder import LM, LMState
Expand Down Expand Up @@ -598,7 +597,9 @@ def _cache_maybe_free_memory(self):
if used_mem / total_mem < self._max_used_mem_fraction:
break
# Check again after trying to empty the cache.
gc.collect()
# Note: gc.collect() is problematic here because of how Flashlight handles the states:
# We have millions of Python objects in the mapping_states dict,
# which takes a very long time to go through.
torch.cuda.empty_cache()
used_mem = torch.cuda.memory_reserved(dev)
if used_mem / total_mem < self._max_used_mem_fraction:
Expand Down

0 comments on commit d1cf549

Please sign in to comment.