Bring back the ability to run on CPU on PCs with CUDA-compatible GPUs

This commit is contained in:
cmdr2 2023-03-10 10:19:55 +05:30
parent 8907dabd4c
commit 0fd706f392
2 changed files with 8 additions and 6 deletions

View File

@ -65,10 +65,12 @@ def get_device_delta(render_devices, active_devices):
def is_mps_available(): def is_mps_available():
return platform.system() == "Darwin" and \ return (
hasattr(torch.backends, 'mps') and \ platform.system() == "Darwin"
torch.backends.mps.is_available() and \ and hasattr(torch.backends, "mps")
torch.backends.mps.is_built() and torch.backends.mps.is_available()
and torch.backends.mps.is_built()
)
def is_cuda_available(): def is_cuda_available():
@ -213,7 +215,7 @@ def is_device_compatible(device):
log.error(str(e)) log.error(str(e))
return False return False
if not is_cuda_available(): if device in ("cpu", "mps"):
return True return True
# Memory check # Memory check
try: try:

View File

@ -385,7 +385,7 @@ def get_devices():
} }
def get_device_info(device): def get_device_info(device):
if not device_manager.is_cuda_available(): if device in ("cpu", "mps"):
return {"name": device_manager.get_processor_name()} return {"name": device_manager.get_processor_name()}
mem_free, mem_total = torch.cuda.mem_get_info(device) mem_free, mem_total = torch.cuda.mem_get_info(device)