def test_generator_proje(): """Function to test the generator for ProjE algorithm.""" knowledge_graph = KnowledgeGraph(dataset="freebase15k", negative_sample="uniform") knowledge_graph.force_prepare_data() args = KGEArgParser().get_args([]) config = ProjE_pointwiseConfig(args=args) gen = iter(Generator(config=GeneratorConfig(data='train', algo='ProjE'), model_config=config)) for i in range(1000): data = list(next(gen)) print("----batch:", i) hr_hr = data[0] hr_t = data[1] tr_tr = data[2] tr_h = data[3] print("hr_hr:", hr_hr) print("hr_t:", hr_t) print("tr_tr:", tr_tr) print("tr_h:", tr_h) gen.stop()
def test_generator_trane(): """Function to test the generator for Translation distance based algorithm.""" knowledge_graph = KnowledgeGraph(dataset="freebase15k", negative_sample="uniform") knowledge_graph.force_prepare_data() args = KGEArgParser().get_args([]) start_time = timeit.default_timer() config = TransEConfig(args) gen = Generator(config=GeneratorConfig(data='train', algo='transe'), model_config=config) print("----init time:", timeit.default_timer() - start_time) for i in range(10): start_time_batch = timeit.default_timer() data = list(next(gen)) h = data[0] r = data[1] t = data[2] # hr_t = data[3] # tr_h = data[4] print("----batch:", i, "----time:",timeit.default_timer() - start_time_batch) print(h,r,t) print("total time:", timeit.default_timer() - start_time) gen.stop()
def test_fb15k_meta(): """Function to test the the knowledge graph parse for Freebase and basic operations.""" knowledge_graph = KnowledgeGraph(dataset="freebase15k") knowledge_graph.force_prepare_data() knowledge_graph.dump() assert knowledge_graph.is_cache_exists() knowledge_graph.prepare_data() knowledge_graph.dataset.read_metadata() knowledge_graph.dataset.dump()
def test_fb15k_manipulate(): """Function to test the the knowledge graph parse for Freebase and basic operations.""" knowledge_graph = KnowledgeGraph(dataset="freebase15k") knowledge_graph.force_prepare_data() knowledge_graph.dump() knowledge_graph.read_cache_data('triplets_train') knowledge_graph.read_cache_data('triplets_test') knowledge_graph.read_cache_data('triplets_valid') knowledge_graph.read_cache_data('hr_t') knowledge_graph.read_cache_data('tr_h') knowledge_graph.read_cache_data('idx2entity') knowledge_graph.read_cache_data('idx2relation') knowledge_graph.read_cache_data('entity2idx') knowledge_graph.read_cache_data('relation2idx')
def test_dl50a(): """Function to test the the knowledge graph parse for Deep learning knowledge base.""" knowledge_graph = KnowledgeGraph(dataset="deeplearning50a", negative_sample="uniform") knowledge_graph.force_prepare_data() knowledge_graph.dump()
def test_fb15k(): """Function to test the the knowledge graph parse for Freebase.""" knowledge_graph = KnowledgeGraph(dataset="freebase15k", negative_sample="uniform") knowledge_graph.force_prepare_data() knowledge_graph.dump()
def test_yago(): """Function to test the the knowledge graph parse for Yago Dataset.""" knowledge_graph = KnowledgeGraph(dataset="yago3_10", negative_sample="uniform") knowledge_graph.force_prepare_data() knowledge_graph.dump()
def test_wn18rr(): """Function to test the the knowledge graph parse for Wordnet Dataset.""" knowledge_graph = KnowledgeGraph(dataset="wordnet18_rr", negative_sample="uniform") knowledge_graph.force_prepare_data() knowledge_graph.dump()
def test_benchmarks(dataset_name): """Function to test the the knowledge graph parse for Freebase.""" print("testing downloading from online sources of benchmarks and KG controller's handling.") knowledge_graph = KnowledgeGraph(dataset=dataset_name) knowledge_graph.force_prepare_data() knowledge_graph.dump()