From fb90b00e75328c9f7b1d3d46b2d9b162d9c8cdc3 Mon Sep 17 00:00:00 2001 From: Pierre Chapuis Date: Tue, 5 Mar 2024 19:29:25 +0100 Subject: [PATCH] add_loras_to_unet: add preprocess values as exclusions in last step --- .../foundationals/latent_diffusion/lora.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/src/refiners/foundationals/latent_diffusion/lora.py b/src/refiners/foundationals/latent_diffusion/lora.py index 8fe78cc..0cfefd9 100644 --- a/src/refiners/foundationals/latent_diffusion/lora.py +++ b/src/refiners/foundationals/latent_diffusion/lora.py @@ -143,11 +143,17 @@ class SDLoraManager: loras_excluded = {k: v for k, v in unet_loras.items() if any(x in k for x in preprocess.keys())} loras_remaining = {k: v for k, v in unet_loras.items() if k not in loras_excluded} - for exc, v in preprocess.items(): - ls = {k: v for k, v in loras_excluded.items() if exc in k} - auto_attach_loras(ls, self.unet, include=[v], debug_map=debug_map) + for exc_k, exc_v in preprocess.items(): + ls = {k: v for k, v in loras_excluded.items() if exc_k in k} + auto_attach_loras(ls, self.unet, include=[exc_v], debug_map=debug_map) - auto_attach_loras(loras_remaining, self.unet, exclude=exclude, include=include, debug_map=debug_map) + auto_attach_loras( + loras_remaining, + self.unet, + exclude=[*exclude, *preprocess.values()], + include=include, + debug_map=debug_map, + ) def remove_loras(self, *names: str) -> None: """Remove multiple LoRAs from the target.