Skip to content
Snippets Groups Projects
Commit c0835180 authored by Mateusz Klimaszewski's avatar Mateusz Klimaszewski
Browse files

Fix training loops and metrics.

parent b8c83784
Branches
Tags
2 merge requests!15Merge develop to master,!14Install and docs
......@@ -16,6 +16,7 @@ class FinishingTrainingCheckpointer(training.Checkpointer):
epoch: Union[int, str],
trainer: "allen_trainer.Trainer",
is_best_so_far: bool = False,
save_model_only: bool = False,
) -> None:
if trainer._learning_rate_scheduler.decreases <= 1 or epoch == trainer._num_epochs - 1:
super().save_checkpoint(epoch, trainer, is_best_so_far)
......
......@@ -84,7 +84,7 @@ class GradientDescentTrainer(training.GradientDescentTrainer):
logger.info("Beginning training.")
val_metrics: Dict[str, float] = {}
this_epoch_val_metric: float
this_epoch_val_metric: float = None
metrics: Dict[str, Any] = {}
epochs_trained = 0
training_start_time = time.time()
......@@ -141,7 +141,7 @@ class GradientDescentTrainer(training.GradientDescentTrainer):
# Check validation metric for early stopping
this_epoch_val_metric = val_metrics[self._validation_metric]
self._metric_tracker.add_metric(this_epoch_val_metric)
# self._metric_tracker.add_metric(this_epoch_val_metric)
train_metrics["patience"] = self._metric_tracker._patience
if self._metric_tracker.should_stop_early():
......
......@@ -241,10 +241,10 @@ class SemanticMetrics(metrics.Metric):
self.feats_score.correct_indices *
self.lemma_score.correct_indices *
self.attachment_scores.correct_indices *
enhanced_indices)
enhanced_indices) * mask.flatten()
total, correct_indices = self.detach_tensors(total, correct_indices)
self.em_score = (correct_indices.float().sum() / total).item()
total, correct_indices = self.detach_tensors(total, correct_indices.float().sum())
self.em_score = (correct_indices / total).item()
def get_metric(self, reset: bool) -> Dict[str, float]:
metrics_dict = {
......
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment