refiners/tests/training_utils/mock_config_2_models.toml
Pierre Colle 25bfa78907 lr, betas, eps, weight_decay at model level
Co-authored-by: Cédric Deltheil <355031+deltheil@users.noreply.github.com>
2024-02-09 12:05:13 +01:00

35 lines
510 B
TOML

[models.mock_model1]
train = true
learning_rate = 1e-5
[models.mock_model2]
train = true
[training]
duration = "100:epoch"
seed = 0
batch_size = 4
gradient_accumulation = "4:step"
clip_grad_norm = 1.0
evaluation_interval = "5:epoch"
evaluation_seed = 1
[optimizer]
optimizer = "SGD"
learning_rate = 1
momentum = 0.9
[scheduler]
scheduler_type = "ConstantLR"
update_interval = "1:step"
[dropout]
dropout = 0.0
[checkpointing]
save_interval = "10:epoch"
[wandb]
mode = "disabled"
project = "mock_project"