Commit 5e3bb780 authored by Gaëtan Caillaut's avatar Gaëtan Caillaut
Browse files

fix timedelta

parent ca61992d
from pathlib import Path
from datetime import datetime
from datetime import datetime, timedelta
from torch.utils.tensorboard import SummaryWriter
from load import *
from evaluation import *
......@@ -121,7 +121,7 @@ if __name__ == "__main__":
n_train = 0
t0_epoch = datetime.now()
batch_cumulated_time = datetime.timedelta()
batch_cumulated_time = timedelta()
for batch_id, (x, attention_mask, wids) in enumerate(train_loader, 1):
t0_batch = datetime.now()
......@@ -143,7 +143,7 @@ if __name__ == "__main__":
if batch_id % args.progress:
print(
f"BATCH {batch_id:05}/{epoch:04} - LOSS {loss.item()} - TIME {batch_cumulated_time}", flush=True)
batch_cumulated_time = datetime.timedelta()
batch_cumulated_time = timedelta()
writer.add_scalar("Loss/train", cumloss / len(train_loader), epoch)
t1_epoch = datetime.now()
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment