def __init__(self, threshold: Optional[float] = 0.5):
        """
        predicts hyponymy relation based on the entailment probability of entity pair.

        @param threshold: threshold of the entailment probability. larget than specified value is regarded as hyponymy relation.
        """
        self._auxiliary = HyponymyScoreLoss()
        self._threshold = threshold
    def setUp(self) -> None:

        n_ary = 6
        n_digits = 4
        tau = 4.0

        self._n_ary = n_ary
        self._n_digits = n_digits

        vec_p_x_zero = np.array([0.02,0.05,0.8,0.8])
        vec_x_repr = np.array([1,2,0,0])
        vec_p_y_zero = np.array([0.02,0.05,0.1,0.6])
        vec_y_repr = np.array([1,2,3,0])
        mat_p_x_1 = utils.generate_probability_matrix(vec_p_x_zero, vec_x_repr, n_digits, n_ary, tau)
        mat_p_y_1 = utils.generate_probability_matrix(vec_p_y_zero, vec_y_repr, n_digits, n_ary, tau)

        vec_p_x_zero = np.array([0.02,0.05,0.05,0.8])
        vec_x_repr = np.array([1,2,3,0])
        vec_p_y_zero = np.array([0.02,0.05,0.6,0.6])
        vec_y_repr = np.array([1,2,0,0])
        mat_p_x_2 = utils.generate_probability_matrix(vec_p_x_zero, vec_x_repr, n_digits, n_ary, tau)
        mat_p_y_2 = utils.generate_probability_matrix(vec_p_y_zero, vec_y_repr, n_digits, n_ary, tau)

        vec_p_x_zero = np.array([0.02,0.02,0.02,0.02])
        vec_x_repr = np.array([2,2,3,3])
        vec_p_y_zero = np.array([0.02,0.02,0.02,0.02])
        vec_y_repr = np.array([1,1,2,2])
        mat_p_x_3 = utils.generate_probability_matrix(vec_p_x_zero, vec_x_repr, n_digits, n_ary, tau)
        mat_p_y_3 = utils.generate_probability_matrix(vec_p_y_zero, vec_y_repr, n_digits, n_ary, tau)

        # x:hypernym, y:hyponym
        # mat_p_*: (n_dim, n_ary)
        self._mat_p_x = mat_p_x_1
        self._mat_p_y = mat_p_y_1
        # arry_p_*: (n_batch, n_dim, n_ary)
        self._arry_p_x = np.stack([mat_p_x_1, mat_p_x_2, mat_p_x_3])
        self._arry_p_y = np.stack([mat_p_y_1, mat_p_y_2, mat_p_y_3])
        self._arry_p_batch = np.stack([mat_p_x_1, mat_p_x_2, mat_p_x_3, mat_p_y_1, mat_p_y_2, mat_p_y_3])
        # train_signal: (hypernym_index, hyponym_index, hyponymy_score)
        self._hyponymy_tuples = [(0, 3, 1.0), (1, 4, -1.0), (2, 5, -4.0)] # [(x1, y1, 1.0), (x2, y2, -1.0), (x3, y3, -4.0)]

        self._t_mat_p_x = torch.from_numpy(self._mat_p_x)
        self._t_mat_p_y = torch.from_numpy(self._mat_p_y)
        self._t_arry_p_x = torch.from_numpy(self._arry_p_x)
        self._t_arry_p_y = torch.from_numpy(self._arry_p_y)
        self._t_arry_p_batch = torch.from_numpy(self._arry_p_batch)

        self._normalize_code_length = False
        self._normalize_coefficient_for_ground_truth = None
        self._loss_layer = HyponymyScoreLoss(normalize_hyponymy_score=self._normalize_code_length,
                                             normalize_coefficient_for_ground_truth=self._normalize_coefficient_for_ground_truth,
                                             distance_metric="mse")
    def __init__(
        self,
        model: MaskedAutoEncoder,
        loss_reconst: _Loss,
        loss_mutual_info: Optional[_Loss] = None,
        dataloader_train: Optional[DataLoader] = None,
        dataloader_val: Optional[DataLoader] = None,
        dataloader_test: Optional[DataLoader] = None,
        learning_rate: Optional[float] = 0.001,
        model_parameter_schedulers: Optional[Dict[str,
                                                  Callable[[float],
                                                           float]]] = None,
        loss_parameter_schedulers: Optional[Dict[str, Dict[str, Callable[
            [float], float]]]] = None,
    ):

        super(UnsupervisedTrainer, self).__init__()

        self._scale_loss_reconst = loss_reconst.scale
        self._scale_loss_mi = loss_mutual_info.scale if loss_mutual_info is not None else 1.

        self._model = model
        self._encoder = model._encoder
        self._decoder = model._decoder
        self._loss_reconst = loss_reconst
        self._loss_mutual_info = loss_mutual_info
        self._learning_rate = learning_rate
        self._dataloaders = {
            "train": dataloader_train,
            "val": dataloader_val,
            "test": dataloader_test
        }
        # auxiliary function that is solely used for validation
        self._auxiliary = HyponymyScoreLoss()

        # set model parameter scheduler
        if model_parameter_schedulers is None:
            self._model_parameter_schedulers = {}
        else:
            self._model_parameter_schedulers = model_parameter_schedulers

        if loss_parameter_schedulers is None:
            self._loss_parameter_schedulers = {}
        else:
            self._loss_parameter_schedulers = loss_parameter_schedulers
 def __init__(self, threshold: Optional[float] = 0.0):
     # this is used to compute soft code length
     self._auxiliary = HyponymyScoreLoss()
     self._threshold = threshold