fix: bad logging

Former-commit-id: 221ec6b6bfcf4e2e616a1db688ef8e93a2bb5bfc [formerly 0e7792974319cbc693fdae3597d44f9c4c196b4d]
Former-commit-id: 70f179df977715a747b981f7e2784e3c7ed88028
This commit is contained in:
Laurent Fainsin 2022-07-05 22:52:34 +02:00
parent 40ea1c3191
commit 1306452152

View file

@ -83,7 +83,10 @@ class UNet(pl.LightningModule):
),
)
wandb.log({log_key: table}) # replace by self.log
wandb.log(
{log_key: table},
commit=False,
) # replace by self.log
def training_step(self, batch, batch_idx):
# unpacking
@ -153,7 +156,7 @@ class UNet(pl.LightningModule):
mae = torch.stack([d["mae"] for d in validation_outputs]).mean()
# logging
wandb.log(
self.log_dict(
{
"val/accuracy": accuracy,
"val/bce": loss,