Fix token max length

This commit is contained in:
guaneec 2022-09-25 14:13:03 +08:00 committed by AUTOMATIC1111
parent b8eae5de93
commit 615b2fc9ce

View file

@ -300,7 +300,7 @@ class FrozenCLIPEmbedderWithCustomWords(torch.nn.Module):
remade_batch_tokens = [] remade_batch_tokens = []
id_start = self.wrapped.tokenizer.bos_token_id id_start = self.wrapped.tokenizer.bos_token_id
id_end = self.wrapped.tokenizer.eos_token_id id_end = self.wrapped.tokenizer.eos_token_id
maxlen = self.wrapped.max_length - 2 maxlen = self.wrapped.max_length
used_custom_terms = [] used_custom_terms = []
cache = {} cache = {}