Exemple #1
0
def test_generator_trane():
    """Function to test the generator for Translation distance based algorithm."""
    knowledge_graph = KnowledgeGraph(dataset="freebase15k", negative_sample="uniform")
    knowledge_graph.force_prepare_data()
    
    args = KGEArgParser().get_args([])
    
    start_time = timeit.default_timer()
    
    config = TransEConfig(args)

    gen = Generator(config=GeneratorConfig(data='train', algo='transe'), model_config=config)

    print("----init time:", timeit.default_timer() - start_time)
    
    for i in range(10):
        start_time_batch = timeit.default_timer()
        data = list(next(gen))
        h = data[0]
        r = data[1]
        t = data[2]
        # hr_t = data[3]
        # tr_h = data[4]
        print("----batch:", i, "----time:",timeit.default_timer() - start_time_batch)
        print(h,r,t)

    print("total time:", timeit.default_timer() - start_time)
    
    gen.stop()
Exemple #2
0
    def tune_model(self):
        """Function to tune the model."""
        acc = 0

        generator_config = GeneratorConfig(
            data=self.trainon,
            algo=self.model.model_name,
            batch_size=self.model.config.batch_size)
        self.gen_train = Generator(config=generator_config,
                                   model_config=self.model.config)

        self.evaluator = Evaluation(model=self.model,
                                    data_type=self.teston,
                                    debug=self.debug,
                                    tuning=True,
                                    session=self.sess)

        for n_iter in range(self.config.epochs):
            self.train_model_epoch(n_iter)

        self.gen_train.stop()
        self.evaluator.test_batch(n_iter)
        acc = self.evaluator.output_queue.get()
        self.evaluator.stop()
        self.sess.close()
        tf.reset_default_graph(
        )  # clean the tensorflow for the next training task.

        return acc
Exemple #3
0
def test_generator_proje():
    """Function to test the generator for ProjE algorithm."""
    knowledge_graph = KnowledgeGraph(dataset="freebase15k", negative_sample="uniform")
    knowledge_graph.force_prepare_data()

    args = KGEArgParser().get_args([])

    config = ProjE_pointwiseConfig(args=args)

    gen = iter(Generator(config=GeneratorConfig(data='train', algo='ProjE'), model_config=config))
    
    for i in range(1000):
        data = list(next(gen))
        print("----batch:", i)
        
        hr_hr = data[0]
        hr_t = data[1]
        tr_tr = data[2]
        tr_h = data[3]

        print("hr_hr:", hr_hr)
        print("hr_t:", hr_t)
        print("tr_tr:", tr_tr)
        print("tr_h:", tr_h)
    gen.stop()
Exemple #4
0
def test_generator_pairwise():
    """Function to test the generator for pairwise based algorithm."""
    knowledge_graph = KnowledgeGraph(dataset="freebase15k")
    knowledge_graph.force_prepare_data()

    config_def, model_def = Importer().import_model_config('transe')
    generator = Generator(model_def(config_def(KGEArgParser().get_args([]))))
    generator.start_one_epoch(10)
    for i in range(10):
        data = list(next(generator))
        assert len(data) == 6

        ph = data[0]
        pr = data[1]
        pt = data[2]
        nh = data[3]
        nr = data[4]
        nt = data[5]
        assert len(ph) == len(pr)
        assert len(ph) == len(pt)
        assert len(ph) == len(nh)
        assert len(ph) == len(nr)
        assert len(ph) == len(nt)

    generator.stop()
Exemple #5
0
    def train_model(self):
        """Function to train the model."""
        loss = 0

        if self.config.loadFromData:
            self.load_model()
        else:
            generator_config = GeneratorConfig(
                data=self.trainon,
                algo=self.model.model_name,
                batch_size=self.model.config.batch_size)
            self.gen_train = Generator(config=generator_config,
                                       model_config=self.model.config)

            if not self.tuning:
                self.evaluator = Evaluation(model=self.model,
                                            data_type=self.teston,
                                            debug=self.debug,
                                            session=self.sess)

            for n_iter in range(self.config.epochs):
                loss = self.train_model_epoch(n_iter)
                if not self.tuning:
                    self.test(n_iter)

            self.gen_train.stop()

            if not self.tuning:
                self.evaluator.save_training_result(self.training_results)
                self.evaluator.stop()

            if self.config.save_model:
                self.save_model()

        if self.config.disp_result:
            self.display()

        if self.config.disp_summary:
            self.summary()
            self.summary_hyperparameter()

        if not os.path.exists("./tmp"):
            os.mkdir("./tmp")

        save_path = self.saver.save(self.sess, "./tmp/model.ckpt")
        self.sess.close()
        tf.reset_default_graph(
        )  # clean the tensorflow for the next training task.

        return loss
Exemple #6
0
    def tune_model(self):
        """Function to tune the model."""
        current_loss = float("inf")

        self.generator = Generator(self.model)
        self.evaluator = Evaluator(self.model, tuning=True)
       
        for cur_epoch_idx in range(self.config.epochs):
            current_loss = self.train_model_epoch(cur_epoch_idx, tuning=True)

        self.evaluator.full_test(cur_epoch_idx)

        self.generator.stop()
        
        return current_loss
Exemple #7
0
    def train_model(self):
        """Function to train the model."""
        loss = 0

        if self.config.loadFromData:
            self.load_model()
        else:
            self.gen_train = Generator(config=self.generator_config,
                                       model_config=self.model.config)

            if not self.tuning:
                self.evaluator = Evaluation(model=self.model,
                                            data_type=self.teston,
                                            debug=self.debug,
                                            session=self.sess)

            for n_iter in range(self.config.epochs):
                loss = self.train_model_epoch(n_iter)
                if not self.tuning:
                    self.test(n_iter)

            self.gen_train.stop()

            if not self.tuning:
                self.evaluator.save_training_result(self.training_results)
                self.evaluator.stop()

            if self.config.save_model:
                self.save_model()

        if self.config.disp_result:
            self.display()

        if self.config.disp_summary:
            self.summary()
            self.summary_hyperparameter()

        self.export_embeddings()

        self.sess.close()
        tf.reset_default_graph(
        )  # clean the tensorflow for the next training task.

        return loss
Exemple #8
0
    def train_model(self, monitor=Monitor.FILTERED_MEAN_RANK):
        """Function to train the model."""
        self.generator = Generator(self.model)
        self.evaluator = Evaluator(self.model)

        if self.config.loadFromData:
            self.load_model()
        
        for cur_epoch_idx in range(self.config.epochs):
            self._logger.info("Epoch[%d/%d]" % (cur_epoch_idx, self.config.epochs))
            
            self.train_model_epoch(cur_epoch_idx)

            if cur_epoch_idx % self.config.test_step == 0:
                metrics = self.evaluator.mini_test(cur_epoch_idx)
                              
                if self.early_stopper.should_stop(metrics):
                    ### Early Stop Mechanism
                    ### start to check if the metric is still improving after each mini-test. 
                    ### Example, if test_step == 5, the trainer will check metrics every 5 epoch.
                    break

        self.evaluator.full_test(cur_epoch_idx)
        self.evaluator.metric_calculator.save_test_summary(self.model.model_name)

        self.generator.stop()
        self.save_training_result()

        if self.config.save_model:
            self.save_model()

        if self.config.disp_result:
            self.display()

        if self.config.disp_summary:
            self.config.summary()
            self.config.summary_hyperparameter(self.model.model_name)

        self.export_embeddings()

        return cur_epoch_idx # the runned epoches.
Exemple #9
0
def test_generator_pointwise():
    """Function to test the generator for pointwise based algorithm."""
    knowledge_graph = KnowledgeGraph(dataset="freebase15k")
    knowledge_graph.force_prepare_data()

    config_def, model_def = Importer().import_model_config("complex")
    generator = Generator(model_def(config_def(KGEArgParser().get_args([]))))
    generator.start_one_epoch(10)
    for i in range(10):
        data = list(next(generator))
        assert len(data) == 4

        h = data[0]
        r = data[1]
        t = data[2]
        y = data[3]
        assert len(h) == len(r)
        assert len(h) == len(t)
        assert set(y) == {1, -1}

    generator.stop()
Exemple #10
0
def test_generator_proje():
    """Function to test the generator for projection based algorithm."""
    knowledge_graph = KnowledgeGraph(dataset="freebase15k")
    knowledge_graph.force_prepare_data()

    config_def, model_def = Importer().import_model_config("proje_pointwise")
    generator = Generator(model_def(config_def(KGEArgParser().get_args([]))))
    generator.start_one_epoch(10)
    for i in range(10):
        data = list(next(generator))
        assert len(data) == 5

        h = data[0]
        r = data[1]
        t = data[2]
        hr_t = data[3]
        tr_h = data[4]
        assert len(h) == len(r)
        assert len(h) == len(t)
        assert isinstance(hr_t, tf.SparseTensor)
        assert isinstance(tr_h, tf.SparseTensor)

    generator.stop()