2023-08-04 13:28:41 +00:00
|
|
|
from typing import cast
|
2023-09-21 09:47:11 +00:00
|
|
|
from warnings import warn
|
2023-12-11 10:46:38 +00:00
|
|
|
|
|
|
|
import pytest
|
2024-01-10 10:43:08 +00:00
|
|
|
from torch import Tensor, allclose, device as Device, equal, isclose, randn
|
2023-12-11 10:46:38 +00:00
|
|
|
|
2023-12-04 14:08:34 +00:00
|
|
|
from refiners.fluxion import manual_seed
|
2024-01-10 10:32:40 +00:00
|
|
|
from refiners.foundationals.latent_diffusion.schedulers import DDIM, DDPM, DPMSolver, EulerScheduler
|
2023-12-13 10:43:11 +00:00
|
|
|
|
|
|
|
|
|
|
|
def test_ddpm_diffusers():
|
|
|
|
from diffusers import DDPMScheduler # type: ignore
|
|
|
|
|
|
|
|
diffusers_scheduler = DDPMScheduler(beta_schedule="scaled_linear", beta_start=0.00085, beta_end=0.012)
|
|
|
|
diffusers_scheduler.set_timesteps(1000)
|
|
|
|
refiners_scheduler = DDPM(num_inference_steps=1000)
|
|
|
|
|
|
|
|
assert equal(diffusers_scheduler.timesteps, refiners_scheduler.timesteps)
|
2023-08-04 13:28:41 +00:00
|
|
|
|
|
|
|
|
2024-01-18 13:30:13 +00:00
|
|
|
@pytest.mark.parametrize("n_steps, last_step_first_order", [(5, False), (5, True), (30, False), (30, True)])
|
|
|
|
def test_dpm_solver_diffusers(n_steps: int, last_step_first_order: bool):
|
2023-08-04 13:28:41 +00:00
|
|
|
from diffusers import DPMSolverMultistepScheduler as DiffuserScheduler # type: ignore
|
|
|
|
|
|
|
|
manual_seed(0)
|
|
|
|
|
2024-01-18 13:30:13 +00:00
|
|
|
diffusers_scheduler = DiffuserScheduler(
|
|
|
|
beta_schedule="scaled_linear",
|
|
|
|
beta_start=0.00085,
|
|
|
|
beta_end=0.012,
|
|
|
|
lower_order_final=False,
|
|
|
|
euler_at_final=last_step_first_order,
|
|
|
|
)
|
|
|
|
diffusers_scheduler.set_timesteps(n_steps)
|
|
|
|
refiners_scheduler = DPMSolver(num_inference_steps=n_steps, last_step_first_order=last_step_first_order)
|
2023-08-04 13:28:41 +00:00
|
|
|
|
|
|
|
sample = randn(1, 3, 32, 32)
|
|
|
|
noise = randn(1, 3, 32, 32)
|
|
|
|
|
|
|
|
for step, timestep in enumerate(diffusers_scheduler.timesteps):
|
|
|
|
diffusers_output = cast(Tensor, diffusers_scheduler.step(noise, timestep, sample).prev_sample) # type: ignore
|
|
|
|
refiners_output = refiners_scheduler(x=sample, noise=noise, step=step)
|
2023-12-08 11:26:50 +00:00
|
|
|
assert allclose(diffusers_output, refiners_output, rtol=0.01), f"outputs differ at step {step}"
|
2023-08-04 13:28:41 +00:00
|
|
|
|
|
|
|
|
2023-12-12 16:18:29 +00:00
|
|
|
def test_ddim_diffusers():
|
2023-08-04 13:28:41 +00:00
|
|
|
from diffusers import DDIMScheduler # type: ignore
|
|
|
|
|
2023-12-12 16:18:29 +00:00
|
|
|
manual_seed(0)
|
|
|
|
|
2023-08-04 13:28:41 +00:00
|
|
|
diffusers_scheduler = DDIMScheduler(
|
|
|
|
beta_end=0.012,
|
|
|
|
beta_schedule="scaled_linear",
|
|
|
|
beta_start=0.00085,
|
|
|
|
num_train_timesteps=1000,
|
|
|
|
steps_offset=1,
|
|
|
|
clip_sample=False,
|
|
|
|
)
|
|
|
|
diffusers_scheduler.set_timesteps(30)
|
|
|
|
refiners_scheduler = DDIM(num_inference_steps=30)
|
|
|
|
|
|
|
|
sample = randn(1, 4, 32, 32)
|
|
|
|
noise = randn(1, 4, 32, 32)
|
|
|
|
|
|
|
|
for step, timestep in enumerate(diffusers_scheduler.timesteps):
|
|
|
|
diffusers_output = cast(Tensor, diffusers_scheduler.step(noise, timestep, sample).prev_sample) # type: ignore
|
|
|
|
refiners_output = refiners_scheduler(x=sample, noise=noise, step=step)
|
|
|
|
|
|
|
|
assert allclose(diffusers_output, refiners_output, rtol=0.01), f"outputs differ at step {step}"
|
2023-09-21 09:47:11 +00:00
|
|
|
|
|
|
|
|
2024-01-10 10:32:40 +00:00
|
|
|
def test_euler_diffusers():
|
2024-01-10 10:45:19 +00:00
|
|
|
from diffusers import EulerDiscreteScheduler # type: ignore
|
2024-01-10 10:32:40 +00:00
|
|
|
|
|
|
|
manual_seed(0)
|
|
|
|
diffusers_scheduler = EulerDiscreteScheduler(
|
|
|
|
beta_end=0.012,
|
|
|
|
beta_schedule="scaled_linear",
|
|
|
|
beta_start=0.00085,
|
|
|
|
num_train_timesteps=1000,
|
|
|
|
steps_offset=1,
|
|
|
|
timestep_spacing="linspace",
|
|
|
|
use_karras_sigmas=False,
|
|
|
|
)
|
|
|
|
diffusers_scheduler.set_timesteps(30)
|
|
|
|
refiners_scheduler = EulerScheduler(num_inference_steps=30)
|
|
|
|
|
|
|
|
sample = randn(1, 4, 32, 32)
|
|
|
|
noise = randn(1, 4, 32, 32)
|
|
|
|
|
2024-01-10 15:34:59 +00:00
|
|
|
ref_init_noise_sigma = diffusers_scheduler.init_noise_sigma # type: ignore
|
|
|
|
assert isinstance(ref_init_noise_sigma, Tensor)
|
|
|
|
assert isclose(ref_init_noise_sigma, refiners_scheduler.init_noise_sigma), "init_noise_sigma differ"
|
2024-01-10 10:32:40 +00:00
|
|
|
|
|
|
|
for step, timestep in enumerate(diffusers_scheduler.timesteps):
|
|
|
|
diffusers_output = cast(Tensor, diffusers_scheduler.step(noise, timestep, sample).prev_sample) # type: ignore
|
|
|
|
refiners_output = refiners_scheduler(x=sample, noise=noise, step=step)
|
|
|
|
|
|
|
|
assert allclose(diffusers_output, refiners_output, rtol=0.01), f"outputs differ at step {step}"
|
|
|
|
|
|
|
|
|
2023-10-05 14:44:38 +00:00
|
|
|
def test_scheduler_remove_noise():
|
|
|
|
from diffusers import DDIMScheduler # type: ignore
|
|
|
|
|
2023-12-12 16:18:29 +00:00
|
|
|
manual_seed(0)
|
|
|
|
|
2023-10-05 14:44:38 +00:00
|
|
|
diffusers_scheduler = DDIMScheduler(
|
|
|
|
beta_end=0.012,
|
|
|
|
beta_schedule="scaled_linear",
|
|
|
|
beta_start=0.00085,
|
|
|
|
num_train_timesteps=1000,
|
|
|
|
steps_offset=1,
|
|
|
|
clip_sample=False,
|
|
|
|
)
|
|
|
|
diffusers_scheduler.set_timesteps(30)
|
|
|
|
refiners_scheduler = DDIM(num_inference_steps=30)
|
|
|
|
|
|
|
|
sample = randn(1, 4, 32, 32)
|
|
|
|
noise = randn(1, 4, 32, 32)
|
|
|
|
|
|
|
|
for step, timestep in enumerate(diffusers_scheduler.timesteps):
|
2024-01-10 10:41:47 +00:00
|
|
|
diffusers_output = cast(Tensor, diffusers_scheduler.step(noise, timestep, sample).pred_original_sample) # type: ignore
|
2023-10-05 14:44:38 +00:00
|
|
|
refiners_output = refiners_scheduler.remove_noise(x=sample, noise=noise, step=step)
|
|
|
|
|
|
|
|
assert allclose(diffusers_output, refiners_output, rtol=0.01), f"outputs differ at step {step}"
|
|
|
|
|
|
|
|
|
2023-09-21 09:47:11 +00:00
|
|
|
def test_scheduler_device(test_device: Device):
|
|
|
|
if test_device.type == "cpu":
|
|
|
|
warn("not running on CPU, skipping")
|
|
|
|
pytest.skip()
|
|
|
|
|
|
|
|
scheduler = DDIM(num_inference_steps=30, device=test_device)
|
|
|
|
x = randn(1, 4, 32, 32, device=test_device)
|
|
|
|
noise = randn(1, 4, 32, 32, device=test_device)
|
|
|
|
noised = scheduler.add_noise(x, noise, scheduler.steps[0])
|
|
|
|
assert noised.device == test_device
|