mirror of
https://github.com/finegrain-ai/refiners.git
synced 2024-11-21 13:48:46 +00:00
do not call __getattr__ with keyword argument
Same for __setattr__. Use positional arguments instead. E.g.: import torch import refiners.fluxion.layers as fl m = torch.compile(fl.Linear(1,1)) m(torch.zeros(1)) # TypeError: Module.__getattr__() got an unexpected keyword argument 'name'
This commit is contained in:
parent
0f87ea29e0
commit
df0cc2aeb8
|
@ -37,10 +37,10 @@ class Module(TorchModule):
|
|||
# Because PyTorch assumes its users write highly dynamic code,
|
||||
# it returns Python's top type `Any`. In Refiners, static type
|
||||
# checking is a core design value, hence we return `object` instead.
|
||||
return super().__getattr__(name=name)
|
||||
return super().__getattr__(name)
|
||||
|
||||
def __setattr__(self, name: str, value: Any) -> None:
|
||||
return super().__setattr__(name=name, value=value)
|
||||
return super().__setattr__(name, value)
|
||||
|
||||
def load_from_safetensors(self, tensors_path: str | Path, strict: bool = True) -> "Module":
|
||||
"""Load the module's state from a SafeTensors file.
|
||||
|
|
Loading…
Reference in a new issue