do not call __getattr__ with keyword argument

Same for __setattr__. Use positional arguments instead. E.g.:

    import torch
    import refiners.fluxion.layers as fl
    m = torch.compile(fl.Linear(1,1))
    m(torch.zeros(1))
    # TypeError: Module.__getattr__() got an unexpected keyword argument 'name'
This commit is contained in:
Cédric Deltheil 2024-03-25 08:25:54 +00:00 committed by Cédric Deltheil
parent 0f87ea29e0
commit df0cc2aeb8

View file

@ -37,10 +37,10 @@ class Module(TorchModule):
# Because PyTorch assumes its users write highly dynamic code,
# it returns Python's top type `Any`. In Refiners, static type
# checking is a core design value, hence we return `object` instead.
return super().__getattr__(name=name)
return super().__getattr__(name)
def __setattr__(self, name: str, value: Any) -> None:
return super().__setattr__(name=name, value=value)
return super().__setattr__(name, value)
def load_from_safetensors(self, tensors_path: str | Path, strict: bool = True) -> "Module":
"""Load the module's state from a SafeTensors file.