chore: bunch of small stuff

Former-commit-id: e6eea69309c723face4c1f09ad935451ce715eee [formerly f59c07f943f0cddc6db2a0512923f2960a2400bd]
Former-commit-id: 0e2f44ec2b051ca2b31fe16c59e6702a6890701c
This commit is contained in:
Laurent Fainsin 2022-09-12 09:28:29 +02:00
parent b701afe363
commit 85c2febcac
4 changed files with 82 additions and 43 deletions

File diff suppressed because one or more lines are too long

View file

@ -47,7 +47,6 @@ if __name__ == "__main__":
)
# load checkpoint
# module.load_state_dict(torch.load()["state_dict"])
# module.load_from_checkpoint("/tmp/model.ckpt")
# log gradients and weights regularly
@ -68,10 +67,10 @@ if __name__ == "__main__":
precision=wandb.config.PRECISION,
logger=logger,
log_every_n_steps=5,
val_check_interval=50,
val_check_interval=200,
callbacks=[
EarlyStopping(monitor="valid/map", mode="max", patience=10, min_delta=0.01),
ModelCheckpoint(monitor="valid/map", mode="max"),
EarlyStopping(monitor="valid/bbox/map", mode="max", patience=10, min_delta=0.01),
ModelCheckpoint(monitor="valid/bbox/map", mode="max"),
# ModelPruning("l1_unstructured", amount=0.5),
LearningRateMonitor(log_momentum=True),
RichModelSummary(max_depth=2),

View file

@ -24,7 +24,7 @@ class RandomPaste(A.DualTransform):
self,
nb,
image_dir,
scale_range=(0.05, 0.5),
scale_range=(0.02, 0.3),
always_apply=True,
p=1.0,
):

View file

@ -28,17 +28,17 @@ WORKERS:
value: 16
EPOCHS:
value: 100
value: 50
TRAIN_BATCH_SIZE:
value: 10
value: 6
VALID_BATCH_SIZE:
value: 2
PREFETCH_FACTOR:
value: 2
LEARNING_RATE:
value: 0.0005
value: 0.001
WEIGHT_DECAY:
value: 0.0005
value: 0.0001
MOMENTUM:
value: 0.9