def create_embeddings_layer(self, num_entities: int, num_relations: int,
                             embedding_dim: int) -> None:
     self.entity_head = BoxEmbedding(num_embeddings=num_entities,
                                     box_embedding_dim=embedding_dim,
                                     box_type=self.box_type,
                                     sparse=True)
     self.entity_tail = BoxEmbedding(num_embeddings=num_entities,
                                     box_embedding_dim=embedding_dim,
                                     box_type=self.box_type,
                                     sparse=True)
    def create_embeddings_layer(self, num_entities: int, num_relations: int,
                                embedding_dim: int, single_box: bool,
                                entities_init_interval_center: float,
                                entities_init_interval_delta: float,
                                relations_init_interval_center: float,
                                relations_init_interval_delta: float) -> None:
        self.h = BoxEmbedding(
            num_embeddings=num_entities,
            box_embedding_dim=embedding_dim,
            box_type=self.box_type,
            sparse=False,
            init_interval_center=entities_init_interval_center,
            init_interval_delta=entities_init_interval_delta)

        if not single_box:
            self.t = BoxEmbedding(
                num_embeddings=num_entities,
                box_embedding_dim=embedding_dim,
                box_type=self.box_type,
                sparse=False,
                init_interval_center=entities_init_interval_center,
                init_interval_delta=entities_init_interval_delta)
        else:
            self.t = self.h

        self.r = Embedding(num_relations, embedding_dim)
        # Also create common name mapping
        self.appropriate_emb = {
            'p_h': self.h,
            'n_h': self.h,
            'h': self.h,
            'tr_h': self.h,
            'hr_e': self.h,
            'p_t': self.t,
            'n_t': self.t,
            't': self.t,
            'hr_t': self.t,
            'tr_e': self.t,
            'p_r': self.r,
            'n_r': self.r,
            'r': self.r,
            'hr_r': self.r,
            'tr_r': self.r,
            'label': (lambda x: x)
        }
Exemplo n.º 3
0
    def create_entity_embedding_layer(self, num_entities, embedding_dim,
                                      box_type, sparse, init_interval_center,
                                      init_interval_delta) -> BoxEmbedding:

        return BoxEmbedding(num_embeddings=num_entities,
                            box_embedding_dim=embedding_dim,
                            box_type=self.box_type,
                            sparse=False,
                            init_interval_center=init_interval_center,
                            init_interval_delta=init_interval_delta)
    def create_embeddings_layer(self,
                                num_entities: int,
                                num_relations: int,
                                embedding_dim: int,
                                single_box: bool = False) -> None:

        self.entity_head = BoxEmbedding(
            num_embeddings=num_entities,
            box_embedding_dim=embedding_dim,
            box_type=self.box_type,
            sparse=True)
        if not single_box:
            self.entity_tail =BoxEmbedding(
                num_embeddings=num_entities,
                box_embedding_dim=embedding_dim,
                box_type=self.box_type,
                sparse=True)
        else:
            self.entity_tail = self.entity_head
class DimWiseMaxMarginConditionalModel(MaxMarginBoxModel):
    def __init__(
            self,
            num_entities: int,
            num_relations: int,
            embedding_dim: int,
            box_type: str = 'SigmoidBoxTensor',
            single_box: bool = False,
            softbox_temp: float = 10.,
            margin: float = 0.0,
            number_of_negative_samples: int = 0,
            debug: bool = False,
            regularization_weight: float = 0,
            init_interval_center: float = 0.25,
            init_interval_delta: float = 0.1,
            # adversarial_negative: bool = False,
            # adv_neg_softmax_temp: float = 0.8
    ) -> None:
        super().__init__(
            num_entities, num_relations, embedding_dim, box_type, single_box,
            softbox_temp, margin, number_of_negative_samples, debug,
            regularization_weight, init_interval_center, init_interval_delta)

    def create_embeddings_layer(self, num_entities: int, num_relations: int,
                                embedding_dim: int, single_box: bool,
                                entities_init_interval_center: float,
                                entities_init_interval_delta: float,
                                relations_init_interval_center: float,
                                relations_init_interval_delta: float) -> None:
        self.h = BoxEmbedding(
            num_embeddings=num_entities,
            box_embedding_dim=embedding_dim,
            box_type=self.box_type,
            sparse=False,
            init_interval_center=entities_init_interval_center,
            init_interval_delta=entities_init_interval_delta)

        if not single_box:
            self.t = BoxEmbedding(
                num_embeddings=num_entities,
                box_embedding_dim=embedding_dim,
                box_type=self.box_type,
                sparse=False,
                init_interval_center=entities_init_interval_center,
                init_interval_delta=entities_init_interval_delta)
        else:
            self.t = self.h

        self.r = Embedding(num_relations, embedding_dim)
        # Also create common name mapping
        self.appropriate_emb = {
            'p_h': self.h,
            'n_h': self.h,
            'h': self.h,
            'tr_h': self.h,
            'hr_e': self.h,
            'p_t': self.t,
            'n_t': self.t,
            't': self.t,
            'hr_t': self.t,
            'tr_e': self.t,
            'p_r': self.r,
            'n_r': self.r,
            'r': self.r,
            'hr_r': self.r,
            'tr_r': self.r,
            'label': (lambda x: x)
        }

    def _get_triple_score(self, head: BoxTensor, tail: BoxTensor,
                          relation: torch.Tensor) -> torch.Tensor:
        """ Gets score using conditionals.

        :: note: We do not need to worry about the dimentions of the boxes. If
                it can sensibly broadcast it will.
            """
        # get conditionals interval intersection scores

        numerators = head.dimension_wise_intersection_soft_volume(
            tail, temp=self.softbox_temp).clamp_min(1e-38)
        denominators = head.dimension_wise_soft_volume(
            temp=self.softbox_temp).clamp_min(1e-38)

        probs = numerators / denominators  # shape = (batch, num_dims)

        weighted_probs = (torch.nn.functional.softmax(relation,
                                                      dim=-1)) * probs
        score = torch.sum(weighted_probs, dim=-1)

        return score

    def get_regularization_penalty(self) -> Union[float, torch.Tensor]:

        if self.regularization_weight > 0:
            entity_penalty = self.h.get_bounding_box().log_soft_volume(
                temp=self.softbox_temp)
            # don't penalize if bb has volume 1 or less

            if (entity_penalty < 0).all():
                entity_penalty = entity_penalty * 0

            if not self.single_box:
                entity_penalty_t = self.t.get_bounding_box().log_soft_volume(
                    temp=self.softbox_temp)

                if (entity_penalty_t < 0).all():
                    pass
                else:
                    entity_penalty += entity_penalty_t

            reg_loss = (self.regularization_weight * (entity_penalty))
            # track the reg loss
            self.regularization_loss(reg_loss.item())

            return reg_loss
        else:
            return 0.0

    def get_histograms_to_log(self) -> Dict[str, torch.Tensor]:

        return {
            "relation_weights": self.r.weight.cpu().data.numpy().flatten(),
            "head_entity_volume_historgram": self.get_h_vol(),
            "tail_entity_volume_historgram": self.get_t_vol()
        }