diff --git a/README.md b/README.md index 3b1a688..3d548c1 100644 --- a/README.md +++ b/README.md @@ -179,7 +179,7 @@ The `Adapter` API lets you **easily patch models** by injecting parameters in ta E.g. to inject LoRA layers in all attention's linear layers: ```python -from refiners.adapters.lora import SingleLoraAdapter +from refiners.fluxion.adapters.lora import SingleLoraAdapter for layer in vit.layers(fl.Attention): for linear, parent in layer.walk(fl.Linear): diff --git a/scripts/conversion/convert_diffusers_lora.py b/scripts/conversion/convert_diffusers_lora.py index f0052d5..3afde74 100644 --- a/scripts/conversion/convert_diffusers_lora.py +++ b/scripts/conversion/convert_diffusers_lora.py @@ -12,7 +12,7 @@ from diffusers import DiffusionPipeline # type: ignore import refiners.fluxion.layers as fl from refiners.fluxion.model_converter import ModelConverter from refiners.fluxion.utils import save_to_safetensors -from refiners.adapters.lora import Lora, LoraAdapter +from refiners.fluxion.adapters.lora import Lora, LoraAdapter from refiners.foundationals.latent_diffusion import SD1UNet from refiners.foundationals.latent_diffusion.lora import LoraTarget, lora_targets diff --git a/src/refiners/adapters/__init__.py b/src/refiners/fluxion/adapters/__init__.py similarity index 100% rename from src/refiners/adapters/__init__.py rename to src/refiners/fluxion/adapters/__init__.py diff --git a/src/refiners/adapters/adapter.py b/src/refiners/fluxion/adapters/adapter.py similarity index 100% rename from src/refiners/adapters/adapter.py rename to src/refiners/fluxion/adapters/adapter.py diff --git a/src/refiners/adapters/lora.py b/src/refiners/fluxion/adapters/lora.py similarity index 98% rename from src/refiners/adapters/lora.py rename to src/refiners/fluxion/adapters/lora.py index b157fed..e6b55c2 100644 --- a/src/refiners/adapters/lora.py +++ b/src/refiners/fluxion/adapters/lora.py @@ -1,7 +1,7 @@ from typing import Iterable, Generic, TypeVar, Any import refiners.fluxion.layers as fl -from refiners.adapters.adapter import Adapter +from refiners.fluxion.adapters.adapter import Adapter from torch import Tensor, device as Device, dtype as DType from torch.nn import Parameter as TorchParameter diff --git a/src/refiners/foundationals/clip/concepts.py b/src/refiners/foundationals/clip/concepts.py index b33b789..a247a52 100644 --- a/src/refiners/foundationals/clip/concepts.py +++ b/src/refiners/foundationals/clip/concepts.py @@ -1,4 +1,4 @@ -from refiners.adapters.adapter import Adapter +from refiners.fluxion.adapters.adapter import Adapter from refiners.foundationals.clip.text_encoder import CLIPTextEncoder, TokenEncoder from refiners.foundationals.clip.tokenizer import CLIPTokenizer import refiners.fluxion.layers as fl diff --git a/src/refiners/foundationals/latent_diffusion/lora.py b/src/refiners/foundationals/latent_diffusion/lora.py index d5ddd25..bfde4aa 100644 --- a/src/refiners/foundationals/latent_diffusion/lora.py +++ b/src/refiners/foundationals/latent_diffusion/lora.py @@ -7,8 +7,8 @@ from torch import Tensor import refiners.fluxion.layers as fl from refiners.fluxion.utils import load_from_safetensors, load_metadata_from_safetensors -from refiners.adapters.adapter import Adapter -from refiners.adapters.lora import SingleLoraAdapter, LoraAdapter +from refiners.fluxion.adapters.adapter import Adapter +from refiners.fluxion.adapters.lora import SingleLoraAdapter, LoraAdapter from refiners.foundationals.clip.text_encoder import FeedForward, TransformerLayer from refiners.foundationals.latent_diffusion.cross_attention import CrossAttentionBlock2d diff --git a/src/refiners/foundationals/latent_diffusion/range_adapter.py b/src/refiners/foundationals/latent_diffusion/range_adapter.py index b30d062..a9a88a7 100644 --- a/src/refiners/foundationals/latent_diffusion/range_adapter.py +++ b/src/refiners/foundationals/latent_diffusion/range_adapter.py @@ -2,7 +2,7 @@ import math from torch import Tensor, arange, float32, exp, sin, cat, cos, device as Device, dtype as DType from jaxtyping import Float, Int -from refiners.adapters.adapter import Adapter +from refiners.fluxion.adapters.adapter import Adapter import refiners.fluxion.layers as fl diff --git a/src/refiners/foundationals/latent_diffusion/reference_only_control.py b/src/refiners/foundationals/latent_diffusion/reference_only_control.py index 8fd361c..f56707e 100644 --- a/src/refiners/foundationals/latent_diffusion/reference_only_control.py +++ b/src/refiners/foundationals/latent_diffusion/reference_only_control.py @@ -9,7 +9,7 @@ from refiners.fluxion.layers import ( Identity, Parallel, ) -from refiners.adapters.adapter import Adapter +from refiners.fluxion.adapters.adapter import Adapter from refiners.foundationals.latent_diffusion import SD1UNet from refiners.foundationals.latent_diffusion.cross_attention import CrossAttentionBlock from torch import Tensor diff --git a/src/refiners/foundationals/latent_diffusion/stable_diffusion_1/controlnet.py b/src/refiners/foundationals/latent_diffusion/stable_diffusion_1/controlnet.py index fd00aeb..2439985 100644 --- a/src/refiners/foundationals/latent_diffusion/stable_diffusion_1/controlnet.py +++ b/src/refiners/foundationals/latent_diffusion/stable_diffusion_1/controlnet.py @@ -7,7 +7,7 @@ from refiners.foundationals.latent_diffusion.stable_diffusion_1.unet import ( ResidualBlock, TimestepEncoder, ) -from refiners.adapters.adapter import Adapter +from refiners.fluxion.adapters.adapter import Adapter from refiners.foundationals.latent_diffusion.range_adapter import RangeAdapter2d from typing import cast, Iterable from torch import Tensor, device as Device, dtype as DType diff --git a/src/refiners/foundationals/latent_diffusion/stable_diffusion_xl/text_encoder.py b/src/refiners/foundationals/latent_diffusion/stable_diffusion_xl/text_encoder.py index 5d159f4..e334c3d 100644 --- a/src/refiners/foundationals/latent_diffusion/stable_diffusion_xl/text_encoder.py +++ b/src/refiners/foundationals/latent_diffusion/stable_diffusion_xl/text_encoder.py @@ -1,6 +1,6 @@ from typing import cast from torch import device as Device, dtype as DType, Tensor, cat -from refiners.adapters.adapter import Adapter +from refiners.fluxion.adapters.adapter import Adapter from refiners.fluxion.context import Contexts import refiners.fluxion.layers as fl from refiners.foundationals.clip.text_encoder import CLIPTextEncoderG, CLIPTextEncoderL diff --git a/src/refiners/training_utils/dropout.py b/src/refiners/training_utils/dropout.py index 7b1a12c..90999ac 100644 --- a/src/refiners/training_utils/dropout.py +++ b/src/refiners/training_utils/dropout.py @@ -5,7 +5,7 @@ from torch.nn import Dropout as TorchDropout import refiners.fluxion.layers as fl from refiners.training_utils.callback import Callback -from refiners.adapters.adapter import Adapter +from refiners.fluxion.adapters.adapter import Adapter if TYPE_CHECKING: from refiners.training_utils.config import BaseConfig diff --git a/tests/adapters/test_adapter.py b/tests/adapters/test_adapter.py index 8f4ee52..daa4ef1 100644 --- a/tests/adapters/test_adapter.py +++ b/tests/adapters/test_adapter.py @@ -1,5 +1,5 @@ import pytest -from refiners.adapters.adapter import Adapter +from refiners.fluxion.adapters.adapter import Adapter from refiners.fluxion.layers import Chain, Linear diff --git a/tests/adapters/test_lora.py b/tests/adapters/test_lora.py index 6255b88..b73304d 100644 --- a/tests/adapters/test_lora.py +++ b/tests/adapters/test_lora.py @@ -1,4 +1,4 @@ -from refiners.adapters.lora import Lora, SingleLoraAdapter, LoraAdapter +from refiners.fluxion.adapters.lora import Lora, SingleLoraAdapter, LoraAdapter from torch import randn, allclose import refiners.fluxion.layers as fl diff --git a/tests/adapters/test_range_adapter.py b/tests/adapters/test_range_adapter.py index d15cd13..ede27d4 100644 --- a/tests/adapters/test_range_adapter.py +++ b/tests/adapters/test_range_adapter.py @@ -1,5 +1,5 @@ import torch -from refiners.adapters.adapter import Adapter +from refiners.fluxion.adapters.adapter import Adapter from refiners.foundationals.latent_diffusion.range_adapter import RangeEncoder from refiners.fluxion.layers import Chain, Linear