def init(self): assert self.args.alpha > 1 self.desc_batch_size = self.args.desc_batch_size self.negative_indication_weight = -1. / self.desc_batch_size self.wv_dim = self.args.wv_dim self.default_desc_length = self.args.default_desc_length self.word_embed = self.args.word_embed self.desc_sim_th = self.args.desc_sim_th self.sim_th = self.args.sim_th self.word_em, self.e_desc = self._get_desc_input() self.ref_entities1 = self.kgs.valid_entities1 + self.kgs.test_entities1 self.ref_entities2 = self.kgs.valid_entities2 + self.kgs.test_entities2 self._define_variables() self._define_mapping_variables() self._define_embed_graph() self._define_mapping_graph() self._define_mapping_graph_new() self._define_desc_graph() self.session = load_session() tf.global_variables_initializer().run(session=self.session)
def init(self): self._define_variables() self._define_embed_graph() self._define_alignment_graph() self._define_likelihood_graph() self.session = load_session() tf.global_variables_initializer().run(session=self.session) self.ref_ent1 = self.kgs.valid_entities1 + self.kgs.test_entities1 self.ref_ent2 = self.kgs.valid_entities2 + self.kgs.test_entities2 # customize parameters assert self.args.init == 'normal' assert self.args.alignment_module == 'swapping' assert self.args.loss == 'limited' assert self.args.neg_sampling == 'truncated' assert self.args.optimizer == 'Adagrad' assert self.args.eval_metric == 'inner' assert self.args.loss_norm == 'L2' assert self.args.ent_l2_norm is True assert self.args.rel_l2_norm is True assert self.args.pos_margin >= 0.0 assert self.args.neg_margin > self.args.pos_margin assert self.args.neg_triple_num > 1 assert self.args.truncated_epsilon > 0.0 assert self.args.learning_rate >= 0.01
def init(self): self._define_variables() self._define_mapping_variables() self._define_embed_graph() self._define_mapping_graph() self.session = load_session() tf.global_variables_initializer().run(session=self.session)
def init(self): self.ref_entities1 = self.kgs.valid_entities1 + self.kgs.test_entities1 self.ref_entities2 = self.kgs.valid_entities2 + self.kgs.test_entities2 self.paths1 = generate_2steps_path(self.kgs.kg1.relation_triples_list) self.paths2 = generate_2steps_path(self.kgs.kg2.relation_triples_list) self._define_variables() self._define_embed_graph() self._define_alignment_graph() self.session = load_session() tf.global_variables_initializer().run(session=self.session) # customize parameters assert self.args.alignment_module == 'sharing' assert self.args.init == 'normal' assert self.args.neg_sampling == 'uniform' assert self.args.optimizer == 'Adagrad' assert self.args.eval_metric == 'inner' assert self.args.loss_norm == 'L2' assert self.args.ent_l2_norm is True assert self.args.rel_l2_norm is True assert self.args.margin > 0.0 assert self.args.neg_triple_num == 1 assert self.args.sim_th > 0.0
def init(self): assert self.args.alignment_module == 'mapping' assert self.args.neg_triple_num > 1 assert self.args.learning_rate >= 0.01 self.num_supports = self.args.support_number self.utils = GCN_Utils(self.args, self.kgs) self.attr = load_attr(self.kgs.entities_num, self.kgs) self.adj, self.ae_input, self.train = self.utils.load_data(self.attr) self.e = self.ae_input[2][0] self.support = [self.utils.preprocess_adj(self.adj)] self.ph_ae = { "support": [tf.sparse_placeholder(tf.float32) for _ in range(self.args.support_number)], "features": tf.sparse_placeholder(tf.float32), "dropout": tf.placeholder_with_default(0., shape=()), "num_features_nonzero": tf.placeholder_with_default(0, shape=()) } self.ph_se = { "support": [tf.sparse_placeholder(tf.float32) for _ in range(self.args.support_number)], "features": tf.placeholder(tf.float32), "dropout": tf.placeholder_with_default(0., shape=()), "num_features_nonzero": tf.placeholder_with_default(0, shape=()) } self.model_ae = GCN_Align_Unit(self.args, self.ph_ae, input_dim=self.ae_input[2][1], output_dim=self.args.ae_dim, ILL=self.train, sparse_inputs=True, featureless=False, logging=False) self.model_se = GCN_Align_Unit(self.args, self.ph_se, input_dim=self.e, output_dim=self.args.se_dim, ILL=self.train, sparse_inputs=False, featureless=True, logging=False) self.session = load_session() tf.global_variables_initializer().run(session=self.session)
def init(self): self.ref_entities1 = self.kgs.valid_entities1 + self.kgs.test_entities1 self.ref_entities2 = self.kgs.valid_entities2 + self.kgs.test_entities2 self._define_variables() self._define_embed_graph() self._define_sim_graph() self.session = load_session() tf.global_variables_initializer().run(session=self.session) # customize parameters assert self.args.alignment_module == 'sharing' assert self.args.init == 'normal' assert self.args.neg_sampling == 'uniform' assert self.args.optimizer == 'Adagrad' assert self.args.eval_metric == 'inner' assert self.args.loss_norm == 'L2' assert self.args.ent_l2_norm is True assert self.args.rel_l2_norm is True assert self.args.neg_triple_num >= 1 assert self.args.neg_alpha >= 0.0 assert self.args.top_attr_threshold > 0.0 assert self.args.attr_sim_mat_threshold > 0.0 assert self.args.attr_sim_mat_beta > 0.0
def init(self): self.attribute_triples_list1, self.attribute_triples_list2, self.value_id_char_ids, self.char_list_size = \ formatting_attr_triples(self.kgs, self.args.literal_len) self._define_variables() self._define_embed_graph() self.session = load_session() tf.global_variables_initializer().run(session=self.session)
def init(self): self._define_variables() self._define_embed_graph() self.session = load_session() tf.global_variables_initializer().run(session=self.session) assert self.args.init == 'xavier' assert self.args.alignment_module == 'sharing' assert self.args.optimizer == 'Adam' assert self.args.eval_metric == 'inner' assert self.args.ent_l2_norm is True assert self.args.rel_l2_norm is True assert self.args.dnn_neg_nums > 1
def init(self): self.session = load_session() tf.global_variables_initializer().run(session=self.session) # customize parameters assert self.args.init == 'unit' assert self.args.alignment_module == 'mapping' assert self.args.optimizer == 'Adagrad' assert self.args.eval_metric == 'inner' assert self.args.ent_l2_norm is True assert self.args.alpha > 1
def init(self): self.embedding_range = (self.args.gamma + self.epsilon) / self.args.dim self._define_variables() self._define_embed_graph() self._define_alignment_graph() self.session = load_session() tf.global_variables_initializer().run(session=self.session) self.ref_ent1 = self.kgs.valid_entities1 + self.kgs.test_entities1 self.ref_ent2 = self.kgs.valid_entities2 + self.kgs.test_entities2 # customize parameters assert self.args.alignment_module == 'swapping' assert self.args.neg_triple_num > 0.0 assert self.args.truncated_epsilon > 0.0
def init(self): self.embedding_range = (self.args.gamma + self.epsilon) / self.args.dim self._define_variables() self._define_embed_graph() self.session = load_session() tf.global_variables_initializer().run(session=self.session) assert self.args.init == 'uniform' assert self.args.alignment_module == 'sharing' assert self.args.neg_sampling == 'uniform' assert self.args.optimizer == 'Adam' assert self.args.eval_metric == 'inner' # assert self.args.ent_l2_norm is True # assert self.args.rel_l2_norm is True assert self.args.gamma > 0.0
def init(self): self._define_variables() self._define_embed_graph() self.session = load_session() tf.global_variables_initializer().run(session=self.session) assert self.args.init == 'xavier' assert self.args.alignment_module == 'sharing' assert self.args.neg_sampling == 'uniform' assert self.args.optimizer == 'Adagrad' assert self.args.eval_metric == 'inner' assert self.args.loss_norm == 'L2' assert self.args.ent_l2_norm is True assert self.args.rel_l2_norm is True assert self.args.margin > 0.0
def init(self): self._define_variables() self._define_embed_graph() self.session = load_session() tf.global_variables_initializer().run(session=self.session) # customize parameters assert self.args.loss == 'margin-based' assert self.args.alignment_module == 'mapping' assert self.args.loss == 'margin-based' assert self.args.neg_sampling == 'uniform' assert self.args.optimizer == 'Adam' assert self.args.eval_metric == 'inner' assert self.args.loss_norm == 'L2' assert self.args.ent_l2_norm is True assert self.args.rel_l2_norm is True assert self.args.neg_triple_num == 1
def init(self): self.aligned_ent_pair_set = interactive_model(self.kgs, self.args) self._define_variables() self._define_embed_graph() self.session = load_session() tf.global_variables_initializer().run(session=self.session) # customize parameters assert self.args.init == 'normal' assert self.args.loss == 'margin-based' assert self.args.neg_sampling == 'uniform' assert self.args.optimizer == 'SGD' assert self.args.eval_metric == 'inner' assert self.args.loss_norm == 'L2' assert self.args.ent_l2_norm is True assert self.args.rel_l2_norm is True assert self.args.neg_triple_num == 1 assert self.args.learning_rate >= 0.01
def init(self): # Defined in AlignE self._define_variables() self._define_embed_graph() # New self._define_alignment_graph() self._define_likelihood_graph() # Same as AlignE self.session = load_session(self.args.gpu) tf.global_variables_initializer().run(session=self.session) # Test shouldn't be here, we let BootEA overfits the datasets as the original authors does # THIS IS COMPLETELY WRONG, however we let BootEA having this big advantage and still performing worse # MAYBE IT'S NOT SO WRONG self.ref_ent1 = self.kgs.valid_entities1 + self.kgs.test_entities1 + self.kgs.extra_entities1 self.ref_ent2 = self.kgs.valid_entities2 + self.kgs.test_entities2 + self.kgs.extra_entities2 self.len_valid_test = len(self.kgs.valid_entities1 + self.kgs.test_entities1) # Added to improve early stopping -> Not needed anymore after rollback # self.saved_best = False # self.best_embeds = None # customize parameters assert self.args.init == 'normal' assert self.args.alignment_module == 'swapping' assert self.args.loss == 'limited' assert self.args.neg_sampling == 'truncated' assert self.args.optimizer == 'Adagrad' assert self.args.eval_metric == 'inner' assert self.args.loss_norm == 'L2' assert self.args.ent_l2_norm is True assert self.args.rel_l2_norm is True assert self.args.pos_margin >= 0.0 assert self.args.neg_margin > self.args.pos_margin assert self.args.neg_triple_num > 1 assert self.args.truncated_epsilon > 0.0 assert self.args.learning_rate >= 0.01
def __init__(self, data, args, attr_align_model): super().__init__(data, args, attr_align_model) self.flag1 = -1 self.flag2 = -1 self.early_stop = False self._define_variables() self._define_name_view_graph() self._define_relation_view_graph() self._define_attribute_view_graph() self._define_cross_kg_entity_reference_relation_view_graph() self._define_cross_kg_entity_reference_attribute_view_graph() self._define_cross_kg_relation_reference_graph() self._define_cross_kg_attribute_reference_graph() self._define_common_space_learning_graph() self._define_space_mapping_graph() self.session = load_session() tf.global_variables_initializer().run(session=self.session)
def init(self): self._options = opts = self.args opts.data_path = opts.training_data self.read(data_path=self._options.data_path) sequence_datapath = '%spaths_%.1f_%.1f' % ( self._options.data_path, self._options.alpha, self._options.beta) if not os.path.exists(sequence_datapath): self.sample_paths() else: print('load existing training sequences') self._train_data = pd.read_csv( '%spaths_%.1f_%.1f' % (self._options.data_path, self._options.alpha, self._options.beta), index_col=0) self._define_variables() self._define_embed_graph() self.session = load_session() tf.global_variables_initializer().run(session=self.session)
def __init__(self, data, args, predicate_align_model): super().__init__(data, args, predicate_align_model) self.out_folder = generate_out_folder(self.args.output, self.args.training_data, '', self.__class__.__name__) self.flag1 = -1 self.flag2 = -1 self.early_stop = False self._define_variables() self._define_name_view_graph() self._define_relation_view_graph() self._define_attribute_view_graph() self._define_cross_kg_entity_reference_relation_view_graph() self._define_cross_kg_entity_reference_attribute_view_graph() self._define_cross_kg_attribute_reference_graph() self._define_cross_kg_relation_reference_graph() self._define_common_space_learning_graph() self.session = load_session() tf.global_variables_initializer().run(session=self.session)
def init(self): self._define_variables() self._define_embed_graph() self.session = load_session() tf.global_variables_initializer().run(session=self.session) # customize parameters -> note they are the same as in BootEA json assert self.args.init == 'normal' assert self.args.alignment_module == 'swapping' assert self.args.loss == 'limited' assert self.args.neg_sampling == 'truncated' assert self.args.optimizer == 'Adagrad' assert self.args.eval_metric == 'inner' assert self.args.loss_norm == 'L2' assert self.args.ent_l2_norm is True assert self.args.rel_l2_norm is True assert self.args.pos_margin >= 0.0 assert self.args.neg_margin > self.args.pos_margin assert self.args.neg_triple_num > 1 assert self.args.truncated_epsilon > 0.0 assert self.args.learning_rate >= 0.01