From e32d8d16f087a0a28252aa6c5a262c6e7e51ebf4 Mon Sep 17 00:00:00 2001 From: Pierre Chapuis Date: Wed, 13 Mar 2024 14:58:26 +0100 Subject: [PATCH] LoRA loading: forward exclusions when preprocessing parts of the UNet --- src/refiners/foundationals/latent_diffusion/lora.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/refiners/foundationals/latent_diffusion/lora.py b/src/refiners/foundationals/latent_diffusion/lora.py index 16654b1..2f7bfa5 100644 --- a/src/refiners/foundationals/latent_diffusion/lora.py +++ b/src/refiners/foundationals/latent_diffusion/lora.py @@ -184,7 +184,7 @@ class SDLoraManager: for exc_k, exc_v in preprocess.items(): ls = {k: v for k, v in loras_excluded.items() if exc_k in k} - auto_attach_loras(ls, self.unet, include=[exc_v], debug_map=debug_map) + auto_attach_loras(ls, self.unet, include=[exc_v], exclude=exclude, debug_map=debug_map) auto_attach_loras( loras_remaining,