From c4d8e967e92c9b7172a3b420937c9bfafad90883 Mon Sep 17 00:00:00 2001 From: harubaru Date: Thu, 13 Oct 2022 01:39:01 -0700 Subject: [PATCH] oops forgot layer norm this is probably why i was getting garbage in training --- ldm/modules/encoders/modules.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ldm/modules/encoders/modules.py b/ldm/modules/encoders/modules.py index 66f0e57..84b793f 100644 --- a/ldm/modules/encoders/modules.py +++ b/ldm/modules/encoders/modules.py @@ -158,6 +158,7 @@ class FrozenCLIPEmbedder(AbstractEncoder): if self.penultimate: z = outputs.hidden_states[-2] # simple enough + z = self.transformer.text_model.final_layer_norm(z) else: z = outputs.last_hidden_state