Skip to content

Commit

Permalink
Disable bfloat16 on older mac.
Browse files Browse the repository at this point in the history
  • Loading branch information
comfyanonymous committed Dec 25, 2024
1 parent 0229228 commit b486885
Showing 1 changed file with 13 additions and 6 deletions.
19 changes: 13 additions & 6 deletions comfy/model_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -886,14 +886,19 @@ def pytorch_attention_flash_attention():
return True
return False

def force_upcast_attention_dtype():
upcast = args.force_upcast_attention
def mac_version():
try:
macos_version = tuple(int(n) for n in platform.mac_ver()[0].split("."))
if (14, 5) <= macos_version <= (15, 2): # black image bug on recent versions of macOS
upcast = True
return tuple(int(n) for n in platform.mac_ver()[0].split("."))
except:
pass
return None

def force_upcast_attention_dtype():
upcast = args.force_upcast_attention

macos_version = mac_version()
if macos_version is not None and ((14, 5) <= macos_version <= (15, 2)): # black image bug on recent versions of macOS
upcast = True

if upcast:
return torch.float32
else:
Expand Down Expand Up @@ -1034,6 +1039,8 @@ def should_use_bf16(device=None, model_params=0, prioritize_performance=True, ma
return False

if mps_mode():
if mac_version() < (14,):
return False
return True

if cpu_mode():
Expand Down

0 comments on commit b486885

Please sign in to comment.