Fix embeddings dtype mismatch
This commit is contained in:
parent
645f4e7ef8
commit
c4b9b07db6
1 changed files with 1 additions and 1 deletions
|
@ -171,7 +171,7 @@ class EmbeddingsWithFixes(torch.nn.Module):
|
|||
vecs = []
|
||||
for fixes, tensor in zip(batch_fixes, inputs_embeds):
|
||||
for offset, embedding in fixes:
|
||||
emb = embedding.vec
|
||||
emb = embedding.vec.to(devices.dtype_unet) if devices.unet_needs_upcast else embedding.vec
|
||||
emb_len = min(tensor.shape[0] - offset - 1, emb.shape[0])
|
||||
tensor = torch.cat([tensor[0:offset + 1], emb[0:emb_len], tensor[offset + 1 + emb_len:]])
|
||||
|
||||
|
|
Loading…
Reference in a new issue