make it work for @ethancohen123

This commit is contained in:
Phil Wang
2022-08-19 11:28:58 -07:00
parent de5e628773
commit 7762edd0ff
2 changed files with 9 additions and 3 deletions

View File

@@ -250,9 +250,15 @@ class XClipAdapter(BaseClipAdapter):
text = text[..., :self.max_text_len] text = text[..., :self.max_text_len]
text_mask = text != 0 text_mask = text != 0
encoder_output = self.clip.text_transformer(text) encoder_output = self.clip.text_transformer(text)
text_cls, text_encodings = encoder_output[:, 0], encoder_output[:, 1:]
encoder_output_is_cls = encoder_output.ndim == 3
text_cls, text_encodings = (encoder_output[:, 0], encoder_output[:, 1:]) if encoder_output_is_cls else (encoder_output, None)
text_embed = self.clip.to_text_latent(text_cls) text_embed = self.clip.to_text_latent(text_cls)
if exists(text_encodings):
text_encodings = text_encodings.masked_fill(~text_mask[..., None], 0.) text_encodings = text_encodings.masked_fill(~text_mask[..., None], 0.)
return EmbeddedText(l2norm(text_embed), text_encodings) return EmbeddedText(l2norm(text_embed), text_encodings)
@torch.no_grad() @torch.no_grad()

View File

@@ -1 +1 @@
__version__ = '1.8.2' __version__ = '1.8.3'