Skip to content

Commit

Permalink
Memory tweaks.
Browse files Browse the repository at this point in the history
  • Loading branch information
comfyanonymous committed Aug 12, 2024
1 parent ce37c11 commit b8ffb29
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions comfy/model_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -438,11 +438,11 @@ def load_models_gpu(models, memory_required=0, force_patch_weights=False, minimu
global vram_state

inference_memory = minimum_inference_memory()
extra_mem = max(inference_memory, memory_required) + 100 * 1024 * 1024
extra_mem = max(inference_memory, memory_required + 300 * 1024 * 1024)
if minimum_memory_required is None:
minimum_memory_required = extra_mem
else:
minimum_memory_required = max(inference_memory, minimum_memory_required) + 100 * 1024 * 1024
minimum_memory_required = max(inference_memory, minimum_memory_required + 300 * 1024 * 1024)

models = set(models)

Expand Down

0 comments on commit b8ffb29

Please sign in to comment.