Fix embeddings dtype mismatch

This commit is contained in:
brkirch 2023-01-26 09:00:15 -05:00
parent 645f4e7ef8
commit c4b9b07db6
1 changed files with 1 additions and 1 deletions

View File

@ -171,7 +171,7 @@ class EmbeddingsWithFixes(torch.nn.Module):
vecs = [] vecs = []
for fixes, tensor in zip(batch_fixes, inputs_embeds): for fixes, tensor in zip(batch_fixes, inputs_embeds):
for offset, embedding in fixes: for offset, embedding in fixes:
emb = embedding.vec emb = embedding.vec.to(devices.dtype_unet) if devices.unet_needs_upcast else embedding.vec
emb_len = min(tensor.shape[0] - offset - 1, emb.shape[0]) emb_len = min(tensor.shape[0] - offset - 1, emb.shape[0])
tensor = torch.cat([tensor[0:offset + 1], emb[0:emb_len], tensor[offset + 1 + emb_len:]]) tensor = torch.cat([tensor[0:offset + 1], emb[0:emb_len], tensor[offset + 1 + emb_len:]])