Exemple #1
0
    def on_overwatch_metric_computed(
            self, current_overwatch_metric: OverWatchMetric):
        """
        AUTHORS:
        --------

        :author: Alix Leroy
        :author: Samuel Westlake

        DESCRIPTION:
        ------------

        Check if saving the model is required

        PARAMETERS:
        -----------

        :param current_overwatch_metric: float: The value of the metric to over watch

        RETURN:
        -------

        :return -> bool: Whether the model should be saved or not
        """

        # Save if there is no metric to compare against
        if self.best_overwatch_metric is None:
            self.best_overwatch_metric = current_overwatch_metric
            save = True
        else:
            # If the new metric has to be smaller than the best one
            if DEEP_SAVE_CONDITION_LESS.corresponds(
                    current_overwatch_metric.get_condition()):
                # If the model improved since last batch => Save
                if self.best_overwatch_metric.get_value(
                ) > current_overwatch_metric.get_value():
                    Notification(
                        DEEP_NOTIF_SUCCESS, DEEP_MSG_SAVER_IMPROVED %
                        (current_overwatch_metric.name, "%.4e" %
                         Decimal(self.best_overwatch_metric.get_value() -
                                 current_overwatch_metric.get_value())))
                    self.best_overwatch_metric = current_overwatch_metric
                    save = True
                # No improvement => Return False
                else:
                    Notification(
                        DEEP_NOTIF_INFO, DEEP_MSG_SAVER_NOT_IMPROVED %
                        current_overwatch_metric.name)
                    save = False

            # If the new metric has to be bigger than the best one (e.g. The accuracy of a classification)
            elif DEEP_SAVE_CONDITION_GREATER.corresponds(
                    current_overwatch_metric.get_condition()):
                # If the model improved since last batch => Save
                if self.best_overwatch_metric.get_value(
                ) < current_overwatch_metric.get_value():
                    Notification(
                        DEEP_NOTIF_SUCCESS, DEEP_MSG_SAVER_IMPROVED %
                        (current_overwatch_metric.name, "%.4e" %
                         Decimal(current_overwatch_metric.get_value() -
                                 self.best_overwatch_metric.get_value())))
                    self.best_overwatch_metric = current_overwatch_metric
                    save = True
                # No improvement => Return False
                else:
                    Notification(
                        DEEP_NOTIF_INFO, DEEP_MSG_SAVER_NOT_IMPROVED %
                        current_overwatch_metric.name)
                    save = False

            else:
                Notification(
                    DEEP_NOTIF_FATAL,
                    "The following saving condition does not exist : %s" %
                    current_overwatch_metric.get_condition())
                save = False

        if save is True:
            self.save_model()
Exemple #2
0
    def is_saving_required(self,
                           current_overwatch_metric: OverWatchMetric) -> bool:
        """
        AUTHORS:
        --------

        :author: Alix Leroy

        DESCRIPTION:
        ------------

        Check if saving the model is required

        PARAMETERS:
        -----------

        :param current_overwatch_metric_value->float: The value of the metric to over watch

        RETURN:
        -------

        :return->bool: Whether the model should be saved or not
        """
        save = False

        # Do not save at the first epoch
        if self.best_overwatch_metric is None:
            self.best_overwatch_metric = current_overwatch_metric
            save = False

        # If  the new metric has to be smaller than the best one
        if current_overwatch_metric.get_condition() == DEEP_COMPARE_SMALLER:
            # If the model improved since last batch => Save
            if self.best_overwatch_metric.get_value(
            ) > current_overwatch_metric.get_value():
                self.best_overwatch_metric = current_overwatch_metric
                save = True

            # No improvement => Return False
            else:
                save = False

        # If the new metric has to be bigger than the best one (e.g. The accuracy of a classification)
        elif current_overwatch_metric.get_condition() == DEEP_COMPARE_BIGGER:
            # If the model improved since last batch => Save
            if self.best_overwatch_metric.get_value(
            ) < current_overwatch_metric.get_value():
                self.best_overwatch_metric = current_overwatch_metric
                save = True

            # No improvement => Return False
            else:
                save = False

        else:
            Notification(
                DEEP_NOTIF_FATAL,
                "The following saving condition does not exist : " +
                str("test"))

        Thalamus().add_signal(signal=Signal(event=DEEP_EVENT_SAVING_REQUIRED,
                                            args={"saving_required": save}))