Allow GPUs with less than 2 GB, instead of restricting to 3 GB

This commit is contained in:
cmdr2 2023-05-19 17:25:53 +05:30
parent 1605c5fbcc
commit 063d14d2ac

View File

@ -224,9 +224,9 @@ def is_device_compatible(device):
try:
_, mem_total = torch.cuda.mem_get_info(device)
mem_total /= float(10 ** 9)
if mem_total < 3.0:
if mem_total < 2.0:
if is_device_compatible.history.get(device) == None:
log.warn(f"GPU {device} with less than 3 GB of VRAM is not compatible with Stable Diffusion")
log.warn(f"GPU {device} with less than 2 GB of VRAM is not compatible with Stable Diffusion")
is_device_compatible.history[device] = 1
return False
except RuntimeError as e: