mirror of
https://github.com/finegrain-ai/refiners.git
synced 2024-11-21 21:58:47 +00:00
fix slider loras test
This commit is contained in:
parent
8c7fcbc00f
commit
72fa17df48
|
@ -1430,14 +1430,18 @@ def test_diffusion_sdxl_multiple_loras(
|
||||||
) -> None:
|
) -> None:
|
||||||
sdxl = sdxl_ddim
|
sdxl = sdxl_ddim
|
||||||
expected_image = expected_sdxl_multi_loras
|
expected_image = expected_sdxl_multi_loras
|
||||||
_, dpo = lora_data_dpo
|
_, dpo_weights = lora_data_dpo
|
||||||
loras, scales = lora_sliders
|
slider_loras, slider_scales = lora_sliders
|
||||||
loras["dpo"] = dpo
|
|
||||||
|
|
||||||
manager = SDLoraManager(sdxl)
|
manager = SDLoraManager(sdxl)
|
||||||
for lora_name, lora_weights in loras.items():
|
for lora_name, lora_weights in slider_loras.items():
|
||||||
manager.add_loras(lora_name, lora_weights, scales[lora_name])
|
manager.add_loras(
|
||||||
manager.add_loras("dpo", dpo, 1.4)
|
lora_name,
|
||||||
|
lora_weights,
|
||||||
|
slider_scales[lora_name],
|
||||||
|
unet_inclusions=["SelfAttention", "ResidualBlock", "Downsample", "Upsample"],
|
||||||
|
)
|
||||||
|
manager.add_loras("dpo", dpo_weights, 1.4, unet_inclusions=["CrossAttentionBlock"])
|
||||||
|
|
||||||
# parameters are the same as https://huggingface.co/radames/sdxl-DPO-LoRA
|
# parameters are the same as https://huggingface.co/radames/sdxl-DPO-LoRA
|
||||||
# except that we are using DDIM instead of sde-dpmsolver++
|
# except that we are using DDIM instead of sde-dpmsolver++
|
||||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 1.7 MiB After Width: | Height: | Size: 1.8 MiB |
Loading…
Reference in a new issue