From 3a10baa9f899c23ea2d2da1e96c297fbe7bc47f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?C=C3=A9dric=20Deltheil?= Date: Thu, 31 Aug 2023 17:52:57 +0200 Subject: [PATCH] cross-attn 2d: record use_bias attribute --- src/refiners/foundationals/latent_diffusion/cross_attention.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/refiners/foundationals/latent_diffusion/cross_attention.py b/src/refiners/foundationals/latent_diffusion/cross_attention.py index 592b165..c973f86 100644 --- a/src/refiners/foundationals/latent_diffusion/cross_attention.py +++ b/src/refiners/foundationals/latent_diffusion/cross_attention.py @@ -137,6 +137,7 @@ class CrossAttentionBlock2d(Sum): self.num_attention_heads = num_attention_heads self.num_attention_layers = num_attention_layers self.num_groups = num_groups + self.use_bias = use_bias self.context_key = context_key self.use_linear_projection = use_linear_projection self.projection_type = "Linear" if use_linear_projection else "Conv2d"