refiners/tests/adapters/test_self_attention_guidance.py
Laurent b957360c58
Some checks failed
CI / lint_and_typecheck (push) Has been cancelled
Spell checker / Spell check (push) Has been cancelled
move some tests into the adapters test folder
2024-09-09 17:40:50 +02:00

27 lines
931 B
Python

import pytest
import torch
from refiners.fluxion.utils import no_grad
from refiners.foundationals.latent_diffusion import StableDiffusion_1, StableDiffusion_XL
@no_grad()
@pytest.mark.parametrize("k_sd", [StableDiffusion_1, StableDiffusion_XL])
def test_set_self_attention_guidance(
k_sd: type[StableDiffusion_1] | type[StableDiffusion_XL], test_device: torch.device
):
sd = k_sd(device=test_device, dtype=torch.float16)
assert sd._find_sag_adapter() is None # type: ignore
sd.set_self_attention_guidance(enable=True, scale=0.42)
adapter = sd._find_sag_adapter() # type: ignore
assert adapter is not None
assert adapter.scale == 0.42
sd.set_self_attention_guidance(enable=True, scale=0.75)
assert sd._find_sag_adapter() == adapter # type: ignore
assert adapter.scale == 0.75
sd.set_self_attention_guidance(enable=False)
assert sd._find_sag_adapter() is None # type: ignore