diff --git a/docs/guides/adapting_sdxl/index.md b/docs/guides/adapting_sdxl/index.md index 6fd6742..5e50080 100644 --- a/docs/guides/adapting_sdxl/index.md +++ b/docs/guides/adapting_sdxl/index.md @@ -364,7 +364,7 @@ python scripts/conversion/convert_diffusers_ip_adapter.py --from ip-adapter-plus This will download and convert both IP-Adapter and CLIP Image Encoder pretrained weights. -Then, in your Python code, simply instantiate a [`SDXLIPAdapter`][refiners.foundationals.latent_diffusion.stable_diffusion_xl.image_prompt.SDXLIPAdapter] targetting our `sdxl.unet`, and inject it using a simple `.inject()` call: +Then, in your Python code, simply instantiate a [`SDXLIPAdapter`][refiners.foundationals.latent_diffusion.stable_diffusion_xl.image_prompt.SDXLIPAdapter] targeting our `sdxl.unet`, and inject it using a simple `.inject()` call: ```py # IP-Adapter diff --git a/src/refiners/fluxion/layers/chain.py b/src/refiners/fluxion/layers/chain.py index ab65e5c..767c6e1 100644 --- a/src/refiners/fluxion/layers/chain.py +++ b/src/refiners/fluxion/layers/chain.py @@ -55,7 +55,7 @@ class Chain(ContextModule): This layer is the main building block of Fluxion. It is used to compose other layers in a sequential manner. - Similary to [`torch.nn.Sequential`][torch.nn.Sequential], + Similarly to [`torch.nn.Sequential`][torch.nn.Sequential], it calls each of its sub-layers in order, chaining their outputs as inputs to the next sublayer. However, it also provides additional methods to manipulate its sub-layers and their context. diff --git a/src/refiners/foundationals/latent_diffusion/lora.py b/src/refiners/foundationals/latent_diffusion/lora.py index 7ff2deb..429ad42 100644 --- a/src/refiners/foundationals/latent_diffusion/lora.py +++ b/src/refiners/foundationals/latent_diffusion/lora.py @@ -131,7 +131,7 @@ class SDLoraManager: SDLoraManager.auto_attach(unet_loras, self.unet, exclude=exclude) def remove_loras(self, *names: str) -> None: - """Remove mulitple LoRAs from the target. + """Remove multiple LoRAs from the target. Args: names: The names of the LoRAs to remove. @@ -179,7 +179,7 @@ class SDLoraManager: self.update_scales({name: scale}) def update_scales(self, scales: dict[str, float], /) -> None: - """Update the scales of mulitple LoRAs. + """Update the scales of multiple LoRAs. Args: scales: The scales to update.