From 9d7390d2d19a8baf04ee4ebe598b96ac6ba7f97e Mon Sep 17 00:00:00 2001 From: camenduru <54370274+camenduru@users.noreply.github.com> Date: Mon, 27 Mar 2023 04:28:40 +0300 Subject: [PATCH 1/2] convert to python v3.9 --- extensions-builtin/Lora/lora.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index edd95f78..79d11e0e 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -2,6 +2,7 @@ import glob import os import re import torch +from typing import Union from modules import shared, devices, sd_models, errors @@ -235,7 +236,7 @@ def lora_calc_updown(lora, module, target): return updown -def lora_apply_weights(self: torch.nn.Conv2d | torch.nn.Linear | torch.nn.MultiheadAttention): +def lora_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn.MultiheadAttention]): """ Applies the currently selected set of Loras to the weights of torch layer self. If weights already have this particular set of loras applied, does nothing. From 6a147db1287fe660e1bfb2ebf5b3fadc14835c69 Mon Sep 17 00:00:00 2001 From: camenduru <54370274+camenduru@users.noreply.github.com> Date: Mon, 27 Mar 2023 04:40:31 +0300 Subject: [PATCH 2/2] convert to python v3.9 --- extensions-builtin/Lora/lora.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index 79d11e0e..696be8ea 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -296,7 +296,7 @@ def lora_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn.Mu setattr(self, "lora_current_names", wanted_names) -def lora_reset_cached_weight(self: torch.nn.Conv2d | torch.nn.Linear): +def lora_reset_cached_weight(self: Union[torch.nn.Conv2d, torch.nn.Linear]): setattr(self, "lora_current_names", ()) setattr(self, "lora_weights_backup", None)