From 90401d96a69f62e21bee11294a1fe800cbd8b7fa Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Tue, 20 Sep 2022 12:12:31 +0300 Subject: [PATCH] fix a off by one error with embedding at the start of the sentence --- modules/sd_hijack.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/sd_hijack.py b/modules/sd_hijack.py index 145c5ab7..ec83c0cb 100644 --- a/modules/sd_hijack.py +++ b/modules/sd_hijack.py @@ -401,7 +401,7 @@ class EmbeddingsWithFixes(torch.nn.Module): for offset, word in fixes: emb = self.embeddings.word_embeddings[word] emb_len = min(tensor.shape[0]-offset, emb.shape[0]) - tensor[offset:offset+emb_len] = self.embeddings.word_embeddings[word][0:emb_len] + tensor[offset+1:offset+1+emb_len] = self.embeddings.word_embeddings[word][0:emb_len] return inputs_embeds