Пример #1
0
    def __init__(self, **kwargs):
        super(TuckER, self).__init__(self.__class__.__name__.lower())
        param_list = [
            "tot_entity", "tot_relation", "ent_hidden_size", "rel_hidden_size",
            "lmbda", "input_dropout", "hidden_dropout1", "hidden_dropout2"
        ]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)

        num_total_ent = self.tot_entity
        num_total_rel = self.tot_relation
        self.d1 = self.ent_hidden_size
        self.d2 = self.rel_hidden_size

        self.ent_embeddings = NamedEmbedding("ent_embedding", num_total_ent,
                                             self.d1)
        self.rel_embeddings = NamedEmbedding("rel_embedding", num_total_rel,
                                             self.d2)
        self.W = NamedEmbedding("W", self.d2, self.d1 * self.d1)
        nn.init.xavier_uniform_(self.ent_embeddings.weight)
        nn.init.xavier_uniform_(self.rel_embeddings.weight)
        nn.init.xavier_uniform_(self.W.weight)

        self.parameter_list = [
            self.ent_embeddings,
            self.rel_embeddings,
            self.W,
        ]

        self.inp_drop = nn.Dropout(self.input_dropout)
        self.hidden_dropout1 = nn.Dropout(self.hidden_dropout1)
        self.hidden_dropout2 = nn.Dropout(self.hidden_dropout2)
Пример #2
0
    def __init__(self, **kwargs):
        super(CP, self).__init__(self.__class__.__name__.lower())
        param_list = ["tot_entity", "tot_relation", "hidden_size", "lmbda"]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)

        num_total_ent = self.tot_entity
        num_total_rel = self.tot_relation
        k = self.hidden_size

        self.sub_embeddings = NamedEmbedding("sub_embedding", num_total_ent, k)
        self.rel_embeddings = NamedEmbedding("rel_embedding", num_total_rel, k)
        self.obj_embeddings = NamedEmbedding("obj_embedding", num_total_ent, k)

        nn.init.xavier_uniform_(self.sub_embeddings.weight)
        nn.init.xavier_uniform_(self.rel_embeddings.weight)
        nn.init.xavier_uniform_(self.obj_embeddings.weight)

        self.parameter_list = [
            self.sub_embeddings,
            self.rel_embeddings,
            self.obj_embeddings,
        ]

        self.loss = Criterion.pointwise_logistic
Пример #3
0
    def __init__(self, **kwargs):
        super(ConvKB, self).__init__(self.__class__.__name__.lower())
        param_list = [
            "tot_entity", "tot_relation", "hidden_size", "num_filters",
            "filter_sizes"
        ]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)

        num_total_ent = self.tot_entity
        num_total_rel = self.tot_relation
        k = self.hidden_size
        num_filters = self.num_filters
        filter_sizes = self.filter_sizes
        device = kwargs["device"]

        self.ent_embeddings = NamedEmbedding("ent_embedding", num_total_ent, k)
        self.rel_embeddings = NamedEmbedding("rel_embedding", num_total_rel, k)
        nn.init.xavier_uniform_(self.ent_embeddings.weight)
        nn.init.xavier_uniform_(self.rel_embeddings.weight)

        self.parameter_list = [
            self.ent_embeddings,
            self.rel_embeddings,
        ]

        self.conv_list = [
            nn.Conv2d(1, num_filters, (3, filter_size),
                      stride=(1, 1)).to(device) for filter_size in filter_sizes
        ]
        conv_out_dim = num_filters * sum([(k - filter_size + 1)
                                          for filter_size in filter_sizes])
        self.fc1 = nn.Linear(in_features=conv_out_dim,
                             out_features=1,
                             bias=True)
Пример #4
0
    def __init__(self, **kwargs):
        super(Complex, self).__init__(self.__class__.__name__.lower())
        param_list = ["tot_entity", "tot_relation", "hidden_size", "lmbda"]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)

        num_total_ent = self.tot_entity
        num_total_rel = self.tot_relation
        k = self.hidden_size

        self.ent_embeddings_real = NamedEmbedding("emb_e_real", num_total_ent,
                                                  k)
        self.ent_embeddings_img = NamedEmbedding("emb_e_img", num_total_ent, k)
        self.rel_embeddings_real = NamedEmbedding("emb_rel_real",
                                                  num_total_rel, k)
        self.rel_embeddings_img = NamedEmbedding("emb_rel_img", num_total_rel,
                                                 k)
        nn.init.xavier_uniform_(self.ent_embeddings_real.weight)
        nn.init.xavier_uniform_(self.ent_embeddings_img.weight)
        nn.init.xavier_uniform_(self.rel_embeddings_real.weight)
        nn.init.xavier_uniform_(self.rel_embeddings_img.weight)

        self.parameter_list = [
            self.ent_embeddings_real,
            self.ent_embeddings_img,
            self.rel_embeddings_real,
            self.rel_embeddings_img,
        ]
Пример #5
0
    def __init__(self, **kwargs):
        super(SimplE, self).__init__(self.__class__.__name__.lower())
        param_list = ["tot_entity", "tot_relation", "hidden_size", "lmbda"]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)

        num_total_ent = self.tot_entity
        num_total_rel = self.tot_relation
        k = self.hidden_size
        self.tot_train_triples = kwargs['tot_train_triples']
        self.batch_size = kwargs['batch_size']

        self.ent_head_embeddings = NamedEmbedding("ent_head_embedding",
                                                  num_total_ent, k)
        self.ent_tail_embeddings = NamedEmbedding("ent_tail_embedding",
                                                  num_total_ent, k)
        self.rel_embeddings = NamedEmbedding("rel_embedding", num_total_rel, k)
        self.rel_inv_embeddings = NamedEmbedding("rel_inv_embedding",
                                                 num_total_rel, k)

        nn.init.xavier_uniform_(self.ent_head_embeddings.weight)
        nn.init.xavier_uniform_(self.ent_tail_embeddings.weight)
        nn.init.xavier_uniform_(self.rel_embeddings.weight)
        nn.init.xavier_uniform_(self.rel_inv_embeddings.weight)

        self.parameter_list = [
            self.ent_head_embeddings,
            self.ent_tail_embeddings,
            self.rel_embeddings,
            self.rel_inv_embeddings,
        ]
Пример #6
0
    def __init__(self, **kwargs):
        super(RotatE, self).__init__(self.__class__.__name__.lower())
        param_list = ["tot_entity", "tot_relation", "hidden_size", "margin"]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)

        self.embedding_range = (self.margin + 2.0) / self.hidden_size

        self.ent_embeddings = NamedEmbedding("ent_embeddings_real",
                                             self.tot_entity, self.hidden_size)
        self.ent_embeddings_imag = NamedEmbedding("ent_embeddings_imag",
                                                  self.tot_entity,
                                                  self.hidden_size)
        self.rel_embeddings = NamedEmbedding("rel_embeddings_real",
                                             self.tot_relation,
                                             self.hidden_size)
        nn.init.uniform_(self.ent_embeddings.weight, -self.embedding_range,
                         self.embedding_range)
        nn.init.uniform_(self.ent_embeddings_imag.weight,
                         -self.embedding_range, self.embedding_range)
        nn.init.uniform_(self.rel_embeddings.weight, -self.embedding_range,
                         self.embedding_range)

        self.parameter_list = [
            self.ent_embeddings,
            self.ent_embeddings_imag,
            self.rel_embeddings,
        ]

        self.loss = Criterion.pariwise_logistic
Пример #7
0
    def __init__(self, **kwargs):
        super(SLM, self).__init__(self.__class__.__name__.lower())
        param_list = [
            "tot_entity", "tot_relation", "rel_hidden_size", "ent_hidden_size"
        ]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)

        self.ent_embeddings = NamedEmbedding("ent_embedding", self.tot_entity,
                                             self.ent_hidden_size)
        self.rel_embeddings = NamedEmbedding("rel_embedding",
                                             self.tot_relation,
                                             self.rel_hidden_size)
        self.mr1 = NamedEmbedding("mr1", self.ent_hidden_size,
                                  self.rel_hidden_size)
        self.mr2 = NamedEmbedding("mr2", self.ent_hidden_size,
                                  self.rel_hidden_size)
        nn.init.xavier_uniform_(self.ent_embeddings.weight)
        nn.init.xavier_uniform_(self.rel_embeddings.weight)
        nn.init.xavier_uniform_(self.mr1.weight)
        nn.init.xavier_uniform_(self.mr2.weight)

        self.parameter_list = [
            self.ent_embeddings,
            self.rel_embeddings,
            self.mr1,
            self.mr2,
        ]

        self.loss = Criterion.pairwise_hinge
Пример #8
0
    def __init__(self, **kwargs):
        super(TransR, self).__init__(self.__class__.__name__.lower())
        param_list = [
            "tot_entity", "tot_relation", "rel_hidden_size", "ent_hidden_size",
            "l1_flag"
        ]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)

        self.ent_embeddings = NamedEmbedding("ent_embedding", self.tot_entity,
                                             self.ent_hidden_size)
        self.rel_embeddings = NamedEmbedding("rel_embedding",
                                             self.tot_relation,
                                             self.rel_hidden_size)
        self.rel_matrix = NamedEmbedding(
            "rel_matrix", self.tot_relation,
            self.ent_hidden_size * self.rel_hidden_size)
        nn.init.xavier_uniform_(self.ent_embeddings.weight)
        nn.init.xavier_uniform_(self.rel_embeddings.weight)
        nn.init.xavier_uniform_(self.rel_matrix.weight)

        self.parameter_list = [
            self.ent_embeddings,
            self.rel_embeddings,
            self.rel_matrix,
        ]

        self.loss = Criterion.pairwise_hinge
Пример #9
0
    def __init__(self, **kwargs):
        super(ConvE, self).__init__(self.__class__.__name__.lower())
        param_list = ["tot_entity", "tot_relation", "hidden_size", "hidden_size_1", 
                      "lmbda", "input_dropout", "feature_map_dropout", "hidden_dropout"]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)

        self.hidden_size_2 = self.hidden_size // self.hidden_size_1

        num_total_ent = self.tot_entity
        num_total_rel = self.tot_relation
        k = self.hidden_size

        self.ent_embeddings = nn.Embedding(num_total_ent, k)
        
        # because conve considers the reciprocal relations,
        # so every rel should have its mirrored rev_rel in ConvE.
        self.rel_embeddings = nn.Embedding(num_total_rel*2, k)
        
        self.b = nn.Embedding(1, num_total_ent)

        self.bn0 = nn.BatchNorm2d(1)
        self.inp_drop = nn.Dropout(self.input_dropout)
        self.conv2d_1 = nn.Conv2d(1, 32, (3, 3), stride=(1, 1))
        self.bn1 = nn.BatchNorm2d(32)
        self.feat_drop = nn.Dropout2d(self.feature_map_dropout)
        self.fc = nn.Linear((2*self.hidden_size_2-3+1)*(self.hidden_size_1-3+1)*32, k) # use the conv output shape * out_channel
        self.hidden_drop = nn.Dropout(self.hidden_dropout)
        self.bn2 = nn.BatchNorm1d(k)

        self.parameter_list = [
            NamedEmbedding(self.ent_embeddings, "ent_embedding"),
            NamedEmbedding(self.rel_embeddings, "rel_embedding"),
            NamedEmbedding(self.b, "b"),
        ]
Пример #10
0
    def __init__(self, **kwargs):
        super(InteractE, self).__init__(self.__class__.__name__.lower())
        param_list = [
            "tot_entity", "tot_relation", "input_dropout", "hidden_dropout",
            "feature_map_dropout", "feature_permutation", "num_filters",
            "kernel_size", "reshape_height", "reshape_width"
        ]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)

        self.hidden_size = self.reshape_width * self.reshape_height
        self.device = kwargs["device"]

        self.ent_embeddings = NamedEmbedding("ent_embeddings",
                                             self.tot_entity,
                                             self.hidden_size,
                                             padding_idx=None)
        self.rel_embeddings = NamedEmbedding("rel_embeddings",
                                             self.tot_relation,
                                             self.hidden_size,
                                             padding_idx=None)
        self.bceloss = nn.BCELoss()

        self.inp_drop = nn.Dropout(self.input_dropout)
        self.hidden_drop = nn.Dropout(self.hidden_dropout)
        self.feature_map_drop = nn.Dropout2d(self.feature_map_dropout)
        self.bn0 = nn.BatchNorm2d(self.feature_permutation)

        flat_sz_h = self.reshape_height
        flat_sz_w = 2 * self.reshape_width
        self.padding = 0

        self.bn1 = nn.BatchNorm2d(self.num_filters * self.feature_permutation)
        self.flat_sz = flat_sz_h * flat_sz_w * self.num_filters * self.feature_permutation

        self.bn2 = nn.BatchNorm1d(self.hidden_size)
        self.fc = nn.Linear(self.flat_sz, self.hidden_size)
        self.chequer_perm = self._get_chequer_perm()

        self.register_parameter("bias",
                                nn.Parameter(torch.zeros(self.tot_entity)))
        self.register_parameter(
            "conv_filt",
            nn.Parameter(
                torch.zeros(self.num_filters, 1, self.kernel_size,
                            self.kernel_size)))

        nn.init.xavier_uniform_(self.ent_embeddings.weight)
        nn.init.xavier_uniform_(self.rel_embeddings.weight)
        nn.init.xavier_uniform_(self.conv_filt)

        self.parameter_list = [
            self.ent_embeddings,
            self.rel_embeddings,
        ]

        self.loss = Criterion.multi_class_bce
Пример #11
0
    def __init__(self, **kwargs):
        super(KG2E, self).__init__(self.__class__.__name__.lower())
        param_list = [
            "tot_entity", "tot_relation", "hidden_size", "cmax", "cmin"
        ]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)

        # the mean for each element in the embedding space.
        self.ent_embeddings_mu = NamedEmbedding("ent_embeddings_mu",
                                                self.tot_entity,
                                                self.hidden_size)
        self.rel_embeddings_mu = NamedEmbedding("rel_embeddings_mu",
                                                self.tot_relation,
                                                self.hidden_size)

        # as the paper suggested, sigma is simplified to be the diagonal element in the covariance matrix.
        self.ent_embeddings_sigma = NamedEmbedding("ent_embeddings_sigma",
                                                   self.tot_entity,
                                                   self.hidden_size)
        self.rel_embeddings_sigma = NamedEmbedding("rel_embeddings_sigma",
                                                   self.tot_relation,
                                                   self.hidden_size)

        nn.init.xavier_uniform_(self.ent_embeddings_mu.weight)
        nn.init.xavier_uniform_(self.rel_embeddings_mu.weight)
        nn.init.xavier_uniform_(self.ent_embeddings_sigma.weight)
        nn.init.xavier_uniform_(self.rel_embeddings_sigma.weight)

        self.parameter_list = [
            self.ent_embeddings_mu,
            self.ent_embeddings_sigma,
            self.rel_embeddings_mu,
            self.rel_embeddings_sigma,
        ]

        min_ent = torch.min(
            torch.FloatTensor().new_full(
                self.ent_embeddings_sigma.weight.shape, self.cmax),
            torch.add(self.ent_embeddings_sigma.weight, 1.0))
        self.ent_embeddings_sigma.weight = nn.Parameter(
            torch.max(
                torch.FloatTensor().new_full(
                    self.ent_embeddings_sigma.weight.shape, self.cmin),
                min_ent))
        min_rel = torch.min(
            torch.FloatTensor().new_full(
                self.rel_embeddings_sigma.weight.shape, self.cmax),
            torch.add(self.rel_embeddings_sigma.weight, 1.0))
        self.rel_embeddings_sigma.weight = nn.Parameter(
            torch.max(
                torch.FloatTensor().new_full(
                    self.rel_embeddings_sigma.weight.shape, self.cmin),
                min_rel))

        self.loss = Criterion.pairwise_hinge
Пример #12
0
    def __init__(self, **kwargs):
        super(HypER, self).__init__(self.__class__.__name__.lower())
        param_list = [
            "tot_entity", "tot_relation", "ent_vec_dim", "rel_vec_dim",
            "input_dropout", "hidden_dropout", "feature_map_dropout"
        ]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)
        self.device = kwargs["device"]
        self.filt_h = 1
        self.filt_w = 9
        self.in_channels = 1
        self.out_channels = 32

        num_total_ent = self.tot_entity
        num_total_rel = self.tot_relation

        self.ent_embeddings = NamedEmbedding("ent_embeddings",
                                             num_total_ent,
                                             self.ent_vec_dim,
                                             padding_idx=0)
        self.rel_embeddings = NamedEmbedding("rel_embeddings",
                                             num_total_rel,
                                             self.rel_vec_dim,
                                             padding_idx=0)
        self.inp_drop = nn.Dropout(self.input_dropout)
        self.hidden_drop = nn.Dropout(self.hidden_dropout)
        self.feature_map_drop = nn.Dropout2d(self.feature_map_dropout)

        self.bn0 = torch.nn.BatchNorm2d(self.in_channels)
        self.bn1 = torch.nn.BatchNorm2d(self.out_channels)
        self.bn2 = torch.nn.BatchNorm1d(self.ent_vec_dim)
        self.register_parameter("b", nn.Parameter(torch.zeros(num_total_ent)))
        fc_length = (1 - self.filt_h + 1) * (self.ent_vec_dim - self.filt_w +
                                             1) * self.out_channels
        self.fc = torch.nn.Linear(fc_length, self.ent_vec_dim)
        fc1_length = self.in_channels * self.out_channels * self.filt_h * self.filt_w
        self.fc1 = torch.nn.Linear(self.rel_vec_dim, fc1_length)

        nn.init.xavier_uniform_(self.ent_embeddings.weight.data)
        nn.init.xavier_uniform_(self.rel_embeddings.weight.data)

        self.parameter_list = [
            self.ent_embeddings,
            self.rel_embeddings,
        ]

        self.loss = Criterion.multi_class_bce
Пример #13
0
    def __init__(self, **kwargs):
        super(DistMult, self).__init__(self.__class__.__name__.lower())
        param_list = ["tot_entity", "tot_relation", "hidden_size", "lmbda"]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)

        num_total_ent = self.tot_entity
        num_total_rel = self.tot_relation
        k = self.hidden_size

        self.ent_embeddings = nn.Embedding(num_total_ent, k)
        self.rel_embeddings = nn.Embedding(num_total_rel, k)
        nn.init.xavier_uniform_(self.ent_embeddings.weight)
        nn.init.xavier_uniform_(self.rel_embeddings.weight)

        self.parameter_list = [
            NamedEmbedding(self.ent_embeddings, "ent_embedding"),
            NamedEmbedding(self.rel_embeddings, "rel_embedding"),
        ]
Пример #14
0
    def __init__(self, **kwargs):
        super(MuRP, self).__init__(self.__class__.__name__.lower())
        param_list = ["tot_entity", "tot_relation", "hidden_size", "lmbda"]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)

        k = self.hidden_size
        self.device = kwargs["device"]

        self.ent_embeddings = NamedEmbedding("ent_embedding",
                                             self.tot_entity,
                                             k,
                                             padding_idx=0)
        self.ent_embeddings.weight.data = (1e-3 * torch.randn(
            (self.tot_entity, k), dtype=torch.double, device=self.device))
        self.rel_embeddings = NamedEmbedding("rel_embedding",
                                             self.tot_relation,
                                             k,
                                             padding_idx=0)
        self.rel_embeddings.weight.data = (1e-3 * torch.randn(
            (self.tot_relation, k), dtype=torch.double, device=self.device))
        self.wu = nn.Parameter(
            torch.tensor(np.random.uniform(-1, 1, (self.tot_relation, k)),
                         dtype=torch.double,
                         requires_grad=True,
                         device=self.device))
        self.bs = nn.Parameter(
            torch.zeros(self.tot_entity,
                        dtype=torch.double,
                        requires_grad=True,
                        device=self.device))
        self.bo = nn.Parameter(
            torch.zeros(self.tot_entity,
                        dtype=torch.double,
                        requires_grad=True,
                        device=self.device))

        self.parameter_list = [
            self.ent_embeddings,
            self.rel_embeddings,
        ]

        self.loss = Criterion.bce
Пример #15
0
    def __init__(self, **kwargs):
        super(ANALOGY, self).__init__(self.__class__.__name__.lower())
        param_list = ["tot_entity", "tot_relation", "hidden_size", "lmbda"]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)

        k = self.hidden_size

        self.ent_embeddings = nn.Embedding(self.tot_entity, k)
        self.rel_embeddings = nn.Embedding(self.tot_relation, k)
        self.ent_embeddings_real = nn.Embedding(self.tot_entity, k // 2)
        self.ent_embeddings_img = nn.Embedding(self.tot_entity, k // 2)
        self.rel_embeddings_real = nn.Embedding(self.tot_relation, k // 2)
        self.rel_embeddings_img = nn.Embedding(self.tot_relation, k // 2)

        nn.init.xavier_uniform_(self.ent_embeddings.weight)
        nn.init.xavier_uniform_(self.rel_embeddings.weight)
        nn.init.xavier_uniform_(self.ent_embeddings_real.weight)
        nn.init.xavier_uniform_(self.ent_embeddings_img.weight)
        nn.init.xavier_uniform_(self.rel_embeddings_real.weight)
        nn.init.xavier_uniform_(self.rel_embeddings_img.weight)

        self.parameter_list = [
            NamedEmbedding(self.ent_embeddings, "ent_embedding"),
            NamedEmbedding(self.rel_embeddings, "rel_embedding"),
            NamedEmbedding(self.ent_embeddings_real, "emb_e_real"),
            NamedEmbedding(self.ent_embeddings_img, "emb_e_img"),
            NamedEmbedding(self.rel_embeddings_real, "emb_rel_real"),
            NamedEmbedding(self.rel_embeddings_img, "emb_rel_img"),
        ]
Пример #16
0
    def __init__(self, **kwargs):
        super(TransM, self).__init__(self.__class__.__name__.lower())
        param_list = ["tot_entity", "tot_relation", "hidden_size", "l1_flag"]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)

        self.ent_embeddings = NamedEmbedding("ent_embedding", self.tot_entity,
                                             self.hidden_size)
        self.rel_embeddings = NamedEmbedding("rel_embedding",
                                             self.tot_relation,
                                             self.hidden_size)

        rel_head = {x: [] for x in range(self.tot_relation)}
        rel_tail = {x: [] for x in range(self.tot_relation)}
        rel_counts = {x: 0 for x in range(self.tot_relation)}
        train_triples_ids = kwargs["knowledge_graph"].read_cache_data(
            'triplets_train')
        for t in train_triples_ids:
            rel_head[t.r].append(t.h)
            rel_tail[t.r].append(t.t)
            rel_counts[t.r] += 1

        theta = [
            1 /
            np.log(2 + rel_counts[x] / (1 + len(rel_tail[x])) + rel_counts[x] /
                   (1 + len(rel_head[x]))) for x in range(self.tot_relation)
        ]
        self.theta = torch.from_numpy(np.asarray(theta, dtype=np.float32)).to(
            kwargs["device"])
        nn.init.xavier_uniform_(self.ent_embeddings.weight)
        nn.init.xavier_uniform_(self.rel_embeddings.weight)

        self.parameter_list = [
            self.ent_embeddings,
            self.rel_embeddings,
        ]

        self.loss = Criterion.pairwise_hinge
Пример #17
0
    def __init__(self, **kwargs):
        super(ProjE_pointwise, self).__init__(self.__class__.__name__.lower())
        param_list = [
            "tot_entity", "tot_relation", "hidden_size", "lmbda",
            "hidden_dropout"
        ]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)

        num_total_ent = self.tot_entity
        num_total_rel = self.tot_relation
        k = self.hidden_size
        self.device = kwargs["device"]

        self.ent_embeddings = NamedEmbedding("ent_embedding", num_total_ent, k)
        self.rel_embeddings = NamedEmbedding("rel_embedding", num_total_rel, k)
        self.bc1 = NamedEmbedding("bc1", 1, k)
        self.De1 = NamedEmbedding("De1", 1, k)
        self.Dr1 = NamedEmbedding("Dr1", 1, k)
        self.bc2 = NamedEmbedding("bc2", 1, k)
        self.De2 = NamedEmbedding("De2", 1, k)
        self.Dr2 = NamedEmbedding("Dr2", 1, k)
        nn.init.xavier_uniform_(self.ent_embeddings.weight)
        nn.init.xavier_uniform_(self.rel_embeddings.weight)
        nn.init.xavier_uniform_(self.bc1.weight)
        nn.init.xavier_uniform_(self.De1.weight)
        nn.init.xavier_uniform_(self.Dr1.weight)
        nn.init.xavier_uniform_(self.bc2.weight)
        nn.init.xavier_uniform_(self.De2.weight)
        nn.init.xavier_uniform_(self.Dr2.weight)

        self.parameter_list = [
            self.ent_embeddings,
            self.rel_embeddings,
            self.bc1,
            self.De1,
            self.Dr1,
            self.bc2,
            self.De2,
            self.Dr2,
        ]

        self.loss = Criterion.multi_class
Пример #18
0
    def __init__(self, **kwargs):
        super(SME, self).__init__(self.__class__.__name__.lower())
        param_list = ["tot_entity", "tot_relation", "hidden_size"]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)

        self.ent_embeddings = NamedEmbedding("ent_embedding", self.tot_entity,
                                             self.hidden_size)
        self.rel_embeddings = NamedEmbedding("rel_embedding",
                                             self.tot_relation,
                                             self.hidden_size)
        self.mu1 = NamedEmbedding("mu1", self.hidden_size, self.hidden_size)
        self.mu2 = NamedEmbedding("mu2", self.hidden_size, self.hidden_size)
        self.bu = NamedEmbedding("bu", self.hidden_size, 1)
        self.mv1 = NamedEmbedding("mv1", self.hidden_size, self.hidden_size)
        self.mv2 = NamedEmbedding("mv2", self.hidden_size, self.hidden_size)
        self.bv = NamedEmbedding("bv", self.hidden_size, 1)
        nn.init.xavier_uniform_(self.ent_embeddings.weight)
        nn.init.xavier_uniform_(self.rel_embeddings.weight)
        nn.init.xavier_uniform_(self.mu1.weight)
        nn.init.xavier_uniform_(self.mu2.weight)
        nn.init.xavier_uniform_(self.bu.weight)
        nn.init.xavier_uniform_(self.mv1.weight)
        nn.init.xavier_uniform_(self.mv2.weight)
        nn.init.xavier_uniform_(self.bv.weight)

        self.parameter_list = [
            self.ent_embeddings,
            self.rel_embeddings,
            self.mu1,
            self.mu2,
            self.bu,
            self.mv1,
            self.mv2,
            self.bv,
        ]

        self.loss = Criterion.pairwise_hinge
Пример #19
0
    def __init__(self, **kwargs):
        super(OctonionE, self).__init__(self.__class__.__name__.lower())
        param_list = ["tot_entity", "tot_relation", "hidden_size", "lmbda"]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)

        num_total_ent = self.tot_entity
        num_total_rel = self.tot_relation
        k = self.hidden_size

        self.ent_embedding_1 = NamedEmbedding("ent_embedding_1", num_total_ent,
                                              k)
        self.ent_embedding_2 = NamedEmbedding("ent_embedding_2", num_total_ent,
                                              k)
        self.ent_embedding_3 = NamedEmbedding("ent_embedding_3", num_total_ent,
                                              k)
        self.ent_embedding_4 = NamedEmbedding("ent_embedding_4", num_total_ent,
                                              k)
        self.ent_embedding_5 = NamedEmbedding("ent_embedding_5", num_total_ent,
                                              k)
        self.ent_embedding_6 = NamedEmbedding("ent_embedding_6", num_total_ent,
                                              k)
        self.ent_embedding_7 = NamedEmbedding("ent_embedding_7", num_total_ent,
                                              k)
        self.ent_embedding_8 = NamedEmbedding("ent_embedding_8", num_total_ent,
                                              k)
        self.rel_embedding_1 = NamedEmbedding("rel_embedding_1", num_total_rel,
                                              k)
        self.rel_embedding_2 = NamedEmbedding("rel_embedding_2", num_total_rel,
                                              k)
        self.rel_embedding_3 = NamedEmbedding("rel_embedding_3", num_total_rel,
                                              k)
        self.rel_embedding_4 = NamedEmbedding("rel_embedding_4", num_total_rel,
                                              k)
        self.rel_embedding_5 = NamedEmbedding("rel_embedding_5", num_total_rel,
                                              k)
        self.rel_embedding_6 = NamedEmbedding("rel_embedding_6", num_total_rel,
                                              k)
        self.rel_embedding_7 = NamedEmbedding("rel_embedding_7", num_total_rel,
                                              k)
        self.rel_embedding_8 = NamedEmbedding("rel_embedding_8", num_total_rel,
                                              k)
        self.rel_w_embedding = NamedEmbedding("rel_w_embedding", num_total_rel,
                                              k)

        nn.init.xavier_uniform_(self.ent_embedding_1.weight.data)
        nn.init.xavier_uniform_(self.ent_embedding_2.weight.data)
        nn.init.xavier_uniform_(self.ent_embedding_3.weight.data)
        nn.init.xavier_uniform_(self.ent_embedding_4.weight.data)
        nn.init.xavier_uniform_(self.ent_embedding_5.weight.data)
        nn.init.xavier_uniform_(self.ent_embedding_6.weight.data)
        nn.init.xavier_uniform_(self.ent_embedding_7.weight.data)
        nn.init.xavier_uniform_(self.ent_embedding_8.weight.data)
        nn.init.xavier_uniform_(self.rel_embedding_1.weight.data)
        nn.init.xavier_uniform_(self.rel_embedding_2.weight.data)
        nn.init.xavier_uniform_(self.rel_embedding_3.weight.data)
        nn.init.xavier_uniform_(self.rel_embedding_4.weight.data)
        nn.init.xavier_uniform_(self.rel_embedding_5.weight.data)
        nn.init.xavier_uniform_(self.rel_embedding_6.weight.data)
        nn.init.xavier_uniform_(self.rel_embedding_7.weight.data)
        nn.init.xavier_uniform_(self.rel_embedding_8.weight.data)
        nn.init.xavier_uniform_(self.rel_w_embedding.weight.data)

        self.parameter_list = [
            self.ent_embedding_1,
            self.ent_embedding_2,
            self.ent_embedding_3,
            self.ent_embedding_4,
            self.ent_embedding_5,
            self.ent_embedding_6,
            self.ent_embedding_7,
            self.ent_embedding_8,
            self.rel_embedding_1,
            self.rel_embedding_2,
            self.rel_embedding_3,
            self.rel_embedding_4,
            self.rel_embedding_5,
            self.rel_embedding_6,
            self.rel_embedding_7,
            self.rel_embedding_8,
            self.rel_w_embedding,
        ]
Пример #20
0
    def __init__(self, **kwargs):
        super(QuatE, self).__init__(self.__class__.__name__.lower())
        param_list = ["tot_entity", "tot_relation", "hidden_size", "lmbda"]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)

        num_total_ent = self.tot_entity
        num_total_rel = self.tot_relation
        k = self.hidden_size

        self.ent_s_embedding = NamedEmbedding("ent_s_embedding", num_total_ent,
                                              k)
        self.ent_x_embedding = NamedEmbedding("ent_x_embedding", num_total_ent,
                                              k)
        self.ent_y_embedding = NamedEmbedding("ent_y_embedding", num_total_ent,
                                              k)
        self.ent_z_embedding = NamedEmbedding("ent_z_embedding", num_total_ent,
                                              k)
        self.rel_s_embedding = NamedEmbedding("rel_s_embedding", num_total_rel,
                                              k)
        self.rel_x_embedding = NamedEmbedding("rel_x_embedding", num_total_rel,
                                              k)
        self.rel_y_embedding = NamedEmbedding("rel_y_embedding", num_total_rel,
                                              k)
        self.rel_z_embedding = NamedEmbedding("rel_z_embedding", num_total_rel,
                                              k)
        self.rel_w_embedding = NamedEmbedding("rel_w_embedding", num_total_rel,
                                              k)
        self.fc = nn.Linear(100, 50, bias=False)
        self.ent_dropout = nn.Dropout(0)
        self.rel_dropout = nn.Dropout(0)
        self.bn = nn.BatchNorm1d(k)

        r, i, j, k = QuatE._quaternion_init(self.tot_entity, self.hidden_size)
        r, i, j, k = torch.from_numpy(r), torch.from_numpy(
            i), torch.from_numpy(j), torch.from_numpy(k)
        self.ent_s_embedding.weight.data = r.type_as(
            self.ent_s_embedding.weight.data)
        self.ent_x_embedding.weight.data = i.type_as(
            self.ent_x_embedding.weight.data)
        self.ent_y_embedding.weight.data = j.type_as(
            self.ent_y_embedding.weight.data)
        self.ent_z_embedding.weight.data = k.type_as(
            self.ent_z_embedding.weight.data)

        s, x, y, z = QuatE._quaternion_init(self.tot_entity, self.hidden_size)
        s, x, y, z = torch.from_numpy(s), torch.from_numpy(
            x), torch.from_numpy(y), torch.from_numpy(z)
        self.rel_s_embedding.weight.data = s.type_as(
            self.rel_s_embedding.weight.data)
        self.rel_x_embedding.weight.data = x.type_as(
            self.rel_x_embedding.weight.data)
        self.rel_y_embedding.weight.data = y.type_as(
            self.rel_y_embedding.weight.data)
        self.rel_z_embedding.weight.data = z.type_as(
            self.rel_z_embedding.weight.data)

        nn.init.xavier_uniform_(self.ent_s_embedding.weight.data)
        nn.init.xavier_uniform_(self.ent_x_embedding.weight.data)
        nn.init.xavier_uniform_(self.ent_y_embedding.weight.data)
        nn.init.xavier_uniform_(self.ent_z_embedding.weight.data)
        nn.init.xavier_uniform_(self.rel_s_embedding.weight.data)
        nn.init.xavier_uniform_(self.rel_x_embedding.weight.data)
        nn.init.xavier_uniform_(self.rel_y_embedding.weight.data)
        nn.init.xavier_uniform_(self.rel_z_embedding.weight.data)
        nn.init.xavier_uniform_(self.rel_w_embedding.weight.data)

        self.parameter_list = [
            self.ent_s_embedding,
            self.ent_x_embedding,
            self.ent_y_embedding,
            self.ent_z_embedding,
            self.rel_s_embedding,
            self.rel_x_embedding,
            self.rel_y_embedding,
            self.rel_z_embedding,
            self.rel_w_embedding,
        ]
Пример #21
0
    def __init__(self, **kwargs):
        super(AcrE, self).__init__(self.__class__.__name__.lower())
        param_list = [
            "tot_entity", "tot_relation", "hidden_size", "input_dropout",
            "hidden_dropout", "feature_map_dropout", "in_channels", "way",
            "first_atrous", "second_atrous", "third_atrous", "acre_bias"
        ]
        param_dict = self.load_params(param_list, kwargs)
        self.__dict__.update(param_dict)

        num_total_ent = self.tot_entity
        num_total_rel = self.tot_relation
        k = self.hidden_size

        self.ent_embeddings = NamedEmbedding("ent_embedding",
                                             num_total_ent,
                                             k,
                                             padding_idx=None)
        self.rel_embeddings = NamedEmbedding("rel_embedding",
                                             num_total_rel * 2,
                                             k,
                                             padding_idx=None)

        self.inp_drop = torch.nn.Dropout(self.input_dropout)
        self.hidden_drop = torch.nn.Dropout(self.hidden_dropout)
        self.feature_map_drop = torch.nn.Dropout2d(self.feature_map_dropout)
        self.bn0 = torch.nn.BatchNorm2d(1)
        self.bn1 = torch.nn.BatchNorm2d(self.in_channels)
        self.bn2 = torch.nn.BatchNorm1d(k)
        self.fc = torch.nn.Linear(self.in_channels * 400, k)
        self.padding = 0

        if self.way == "serial":
            self.conv1 = torch.nn.Conv2d(1,
                                         self.in_channels, (3, 3),
                                         1,
                                         self.first_atrous,
                                         bias=self.acre_bias,
                                         dilation=self.first_atrous)
            self.conv2 = torch.nn.Conv2d(self.in_channels,
                                         self.in_channels, (3, 3),
                                         1,
                                         self.second_atrous,
                                         bias=self.acre_bias,
                                         dilation=self.second_atrous)
            self.conv3 = torch.nn.Conv2d(self.in_channels,
                                         self.in_channels, (3, 3),
                                         1,
                                         self.third_atrous,
                                         bias=self.acre_bias,
                                         dilation=self.third_atrous)
        else:
            self.conv1 = torch.nn.Conv2d(1,
                                         self.in_channels, (3, 3),
                                         1,
                                         self.first_atrous,
                                         bias=self.acre_bias,
                                         dilation=self.first_atrous)
            self.conv2 = torch.nn.Conv2d(1,
                                         self.in_channels, (3, 3),
                                         1,
                                         self.second_atrous,
                                         bias=self.acre_bias,
                                         dilation=self.second_atrous)
            self.conv3 = torch.nn.Conv2d(1,
                                         self.in_channels, (3, 3),
                                         1,
                                         self.third_atrous,
                                         bias=self.acre_bias,
                                         dilation=self.third_atrous)
            self.W_gate_e = torch.nn.Linear(1600, 400)

        self.register_parameter("bias",
                                nn.Parameter(torch.zeros(num_total_ent)))

        nn.init.xavier_uniform_(self.ent_embeddings.weight.data)
        nn.init.xavier_uniform_(self.rel_embeddings.weight.data)

        self.parameter_list = [
            self.ent_embeddings,
            self.rel_embeddings,
        ]

        self.loss = Criterion.multi_class_bce