mirror of
https://github.com/finegrain-ai/refiners.git
synced 2024-11-22 06:08:46 +00:00
fix minor typos in code and docs
This commit is contained in:
parent
feff4c78ae
commit
fd01ba910e
|
@ -215,7 +215,7 @@ class Chain(ContextModule):
|
|||
@staticmethod
|
||||
def _pretty_print_args(*args: Any) -> str:
|
||||
"""
|
||||
Flatten nested tuples and print tensors with their shape and other informations.
|
||||
Flatten nested tuples and print tensors with their shape and other information.
|
||||
"""
|
||||
|
||||
def _flatten_tuple(t: Tensor | tuple[Any, ...], /) -> list[Any]:
|
||||
|
|
|
@ -447,7 +447,7 @@ class IPAdapter(Generic[T], fl.Chain, Adapter[T]):
|
|||
assert isinstance(encoder_clone[-3], fl.Lambda) # pooling (classif token)
|
||||
for _ in range(3):
|
||||
encoder_clone.pop()
|
||||
transfomer_layers = encoder_clone[-1]
|
||||
assert isinstance(transfomer_layers, fl.Chain) and len(transfomer_layers) == 32
|
||||
transfomer_layers.pop()
|
||||
transformer_layers = encoder_clone[-1]
|
||||
assert isinstance(transformer_layers, fl.Chain) and len(transformer_layers) == 32
|
||||
transformer_layers.pop()
|
||||
return encoder_clone
|
||||
|
|
|
@ -178,7 +178,7 @@ class ModelConfig(BaseModel):
|
|||
|
||||
class GyroDropoutConfig(BaseModel):
|
||||
total_subnetworks: int = 512
|
||||
concurent_subnetworks: int = 64
|
||||
concurrent_subnetworks: int = 64
|
||||
iters_per_epoch: int = 512
|
||||
num_features_threshold: float = 5e5
|
||||
|
||||
|
|
|
@ -213,7 +213,7 @@ class TrainingClock:
|
|||
return int(time.time() - self.start_time)
|
||||
|
||||
@cached_property
|
||||
def evalution_interval_steps(self) -> int:
|
||||
def evaluation_interval_steps(self) -> int:
|
||||
return self.convert_time_unit_to_steps(
|
||||
number=self.evaluation_interval["number"], unit=self.evaluation_interval["unit"]
|
||||
)
|
||||
|
@ -244,7 +244,7 @@ class TrainingClock:
|
|||
|
||||
@property
|
||||
def is_evaluation_step(self) -> bool:
|
||||
return self.step % self.evalution_interval_steps == 0
|
||||
return self.step % self.evaluation_interval_steps == 0
|
||||
|
||||
@property
|
||||
def is_checkpointing_step(self) -> bool:
|
||||
|
|
Loading…
Reference in a new issue