Commit 11c00d6e authored by Anthony Larcher's avatar Anthony Larcher
Browse files

monitor display

parent c8d0b2a3
......@@ -377,7 +377,7 @@ class TrainingMonitor():
self.logger.critical(f"Best accuracy {self.best_accuracy * 100.} obtained at epoch {self.best_accuracy_epoch}")
def update(self,
epoch,
epoch=None,
training_acc=None,
training_loss=None,
test_eer=None,
......@@ -385,20 +385,21 @@ class TrainingMonitor():
val_loss=None,
val_acc=None):
self.current_epoch = epoch
if training_acc:
if epoch is not None:
self.current_epoch = epoch
if training_acc is not None:
self.training_acc.append(training_acc)
if training_loss:
if training_loss is not None:
self.training_loss.append(training_loss)
if val_eer:
if val_eer is not None:
self.val_eer.append(val_eer)
if val_loss:
if val_loss is not None:
self.val_loss.append(val_loss)
if val_acc:
if val_acc is not None:
self.val_acc.append(val_acc)
# remember best accuracy and save checkpoint
if self.compute_test_eer and test_eer:
if self.compute_test_eer and test_eer is not None:
self.test_eer.append(test_eer)
self.is_best = test_eer < self.best_eer
self.best_eer = min(test_eer, self.best_eer)
......@@ -407,7 +408,7 @@ class TrainingMonitor():
self.current_patience = self.init_patience
else:
self.current_patience -= 1
elif val_eer:
elif val_eer is not None:
self.is_best = val_eer < self.best_eer
self.best_eer = min(val_eer, self.best_eer)
if self.is_best:
......@@ -1375,7 +1376,7 @@ def new_xtrain(dataset_description,
for epoch in range(1, training_opts["epochs"] + 1):
monitor.update(epoch)
monitor.update(epoch=epoch)
# Process one epoch and return the current model
if monitor.current_patience == 0:
......@@ -1405,15 +1406,13 @@ def new_xtrain(dataset_description,
if training_opts["compute_test_eer"]:
test_eer = new_test_metrics(model, device, training_opts["mixed_precision"])
monitor.update(epoch,
test_eer=test_eer,
monitor.update(test_eer=test_eer,
val_eer=val_eer,
val_loss=val_loss,
val_acc=val_acc)
monitor.display()
# Save the current model and if needed update the best one
# TODO ajouter une option qui garde les modèles à certaines époques (par exemple avant le changement de LR
save_model(model, monitor, model_opts, training_opts, optimizer, scheduler)
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment