Skip to content

Commit

Permalink
Merge pull request #358 from Haidra-Org/main
Browse files Browse the repository at this point in the history
feat: comfyui `3b9a6cf`; fix: don't force torch cache empties
  • Loading branch information
tazlin authored Nov 14, 2024
2 parents a46ea59 + 1c9b133 commit b9dbcea
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 5 deletions.
8 changes: 4 additions & 4 deletions hordelib/comfy_horde.py
Original file line number Diff line number Diff line change
Expand Up @@ -432,12 +432,12 @@ def unload_all_models_vram():
logger.debug("Cleaning up models")
with torch.no_grad():
try:
_comfy_soft_empty_cache(True)
_comfy_soft_empty_cache()
log_free_ram()
except Exception as e:
logger.error(f"Exception during comfy unload: {e}")
_comfy_cleanup_models()
_comfy_soft_empty_cache(True)
_comfy_soft_empty_cache()

logger.debug(f"{len(SharedModelManager.manager._models_in_ram)} models cached in shared model manager")
logger.debug(f"{len(_comfy_current_loaded_models)} models loaded in comfy")
Expand Down Expand Up @@ -467,7 +467,7 @@ def unload_all_models_ram():
log_free_ram()

logger.debug("Soft emptying cache")
_comfy_soft_empty_cache(True)
_comfy_soft_empty_cache()
log_free_ram()

logger.debug(f"{len(SharedModelManager.manager._models_in_ram)} models cached in shared model manager")
Expand Down Expand Up @@ -945,7 +945,7 @@ def _run_pipeline(
global _comfy_cleanup_models
logger.debug("Cleaning up models")
_comfy_cleanup_models(False)
_comfy_soft_empty_cache(True)
_comfy_soft_empty_cache()

stdio.replay()

Expand Down
2 changes: 1 addition & 1 deletion hordelib/consts.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from hordelib.config_path import get_hordelib_path

COMFYUI_VERSION = "73e3a9e67654d5b20054da02c6a77311af527364"
COMFYUI_VERSION = "3b9a6cf2b11094f92228b121c6a0d466ba5d5246"
"""The exact version of ComfyUI version to load."""

REMOTE_PROXY = ""
Expand Down

0 comments on commit b9dbcea

Please sign in to comment.