multi-upscaler: specify map_location when loading negative embedding

This commit is contained in:
Laurent 2024-07-12 12:05:17 +00:00 committed by Cédric Deltheil
parent af1b302e78
commit 88325c3bbc

View file

@ -112,7 +112,9 @@ class MultiUpscalerAbstract(MultiDiffusion[T], ABC):
if path is None: if path is None:
return "" return ""
embeddings: Tensor | dict[str, Any] = torch.load(path, weights_only=True) # type: ignore embeddings: torch.Tensor | dict[str, Any] = torch.load( # type: ignore
path, weights_only=True, map_location=self.device
)
if isinstance(embeddings, dict): if isinstance(embeddings, dict):
assert key is not None, "Key must be provided to access the negative embedding." assert key is not None, "Key must be provided to access the negative embedding."