Miss device type for option --medvram
This commit is contained in:
parent
65fbefd033
commit
aaeeef82fa
1 changed files with 6 additions and 3 deletions
|
@ -2,9 +2,12 @@ import torch
|
|||
|
||||
module_in_gpu = None
|
||||
cpu = torch.device("cpu")
|
||||
gpu = torch.device("cuda")
|
||||
device = gpu if torch.cuda.is_available() else cpu
|
||||
|
||||
if torch.has_cuda:
|
||||
device = gpu = torch.device("cuda")
|
||||
elif torch.has_mps:
|
||||
device = gpu = torch.device("mps")
|
||||
else:
|
||||
device = gpu = torch.device("cpu")
|
||||
|
||||
def setup_for_low_vram(sd_model, use_medvram):
|
||||
parents = {}
|
||||
|
|
Loading…
Reference in a new issue