commit
a336c7fe23
1 changed files with 3 additions and 2 deletions
|
@ -2,6 +2,7 @@ import glob
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import torch
|
import torch
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
from modules import shared, devices, sd_models, errors
|
from modules import shared, devices, sd_models, errors
|
||||||
|
|
||||||
|
@ -235,7 +236,7 @@ def lora_calc_updown(lora, module, target):
|
||||||
return updown
|
return updown
|
||||||
|
|
||||||
|
|
||||||
def lora_apply_weights(self: torch.nn.Conv2d | torch.nn.Linear | torch.nn.MultiheadAttention):
|
def lora_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn.MultiheadAttention]):
|
||||||
"""
|
"""
|
||||||
Applies the currently selected set of Loras to the weights of torch layer self.
|
Applies the currently selected set of Loras to the weights of torch layer self.
|
||||||
If weights already have this particular set of loras applied, does nothing.
|
If weights already have this particular set of loras applied, does nothing.
|
||||||
|
@ -295,7 +296,7 @@ def lora_apply_weights(self: torch.nn.Conv2d | torch.nn.Linear | torch.nn.Multih
|
||||||
setattr(self, "lora_current_names", wanted_names)
|
setattr(self, "lora_current_names", wanted_names)
|
||||||
|
|
||||||
|
|
||||||
def lora_reset_cached_weight(self: torch.nn.Conv2d | torch.nn.Linear):
|
def lora_reset_cached_weight(self: Union[torch.nn.Conv2d, torch.nn.Linear]):
|
||||||
setattr(self, "lora_current_names", ())
|
setattr(self, "lora_current_names", ())
|
||||||
setattr(self, "lora_weights_backup", None)
|
setattr(self, "lora_weights_backup", None)
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue