def test_es_lemmarizer(self): keyword = "ciudades" results = tfg_nlp.process(keyword) print("Lemma de %s es: %s" % (keyword, tfg_nlp.es_lemmatizer(keyword))) self.assertEqual(tfg_nlp.es_lemmatizer(keyword), results["result"]["lemmatizer"])
def test_en_lemmatizer(self): keyword = "city" results = tfg_nlp.process(keyword) print("Lemma of %s is: %s" % (keyword, tfg_nlp.en_lemmatizer(keyword))) self.assertEqual(tfg_nlp.en_lemmatizer(keyword), results["result"]["lemmatizer"])
def test_ca_lemmarizer(self): keyword = "investigació" results = tfg_nlp.process(keyword) print("Lemma de %s es: %s" % (keyword, tfg_nlp.ca_lemmatizer(keyword))) self.assertEqual(tfg_nlp.ca_lemmatizer(keyword), results["result"]["lemmatizer"])
def test_case_other_language(self): keywords = ["wilkomen"] results = [tfg_nlp.process(keyword) for keyword in keywords] print( json.dumps(results, indent=1, ensure_ascii=False).encode('utf-8').decode()) for result in results: self.assertEqual(None, result["result"]["Wikidata"])
def test_ThinkingAloud(self): keyword = "thinking aloud" result = tfg_nlp.process(keyword)["result"] print( json.dumps(result, indent=1, ensure_ascii=False).encode('utf-8').decode()) self.assertEqual("en", result["lang"]) self.assertEqual("http://www.wikidata.org/entity/Q391810", result["Wikidata"])
def test_case_punctuation_mark(self): keywords = ["long-range interactions"] results = [tfg_nlp.process(keyword) for keyword in keywords] print( json.dumps(results, indent=1, ensure_ascii=False).encode('utf-8').decode()) for result in results: self.assertEqual("http://www.wikidata.org/entity/Q63926701", result["result"]["Wikidata"])
def test_case_numbers(self): keywords = ["1917"] results = [tfg_nlp.process(keyword) for keyword in keywords] print( json.dumps(results, indent=1, ensure_ascii=False).encode('utf-8').decode()) for result in results: self.assertEqual("http://www.wikidata.org/entity/Q2092", result["result"]["Wikidata"])
def test_case_different_description3(self): keywords = ["navegación"] results = [tfg_nlp.process(keyword) for keyword in keywords] print( json.dumps(results, indent=1, ensure_ascii=False).encode('utf-8').decode()) for result in results: self.assertEqual("http://www.wikidata.org/entity/Q1972518", result["result"]["Wikidata"])
def test_Cryptography(self): keywords = ['cryptography', 'criptografia', 'criptografía'] results = [tfg_nlp.process(keyword) for keyword in keywords] print( json.dumps(results, indent=1, ensure_ascii=False).encode('utf-8').decode()) for result in results: self.assertEqual("http://www.wikidata.org/entity/Q8789", result["result"]["Wikidata"])
def test_case_strange_character(self): keywords = ["β-catenin"] results = [tfg_nlp.process(keyword) for keyword in keywords] print( json.dumps(results, indent=1, ensure_ascii=False).encode('utf-8').decode()) for result in results: self.assertEqual("http://www.wikidata.org/entity/Q10861922", result["result"]["Wikidata"])
def test_case_name_of_person(self): keywords = ["antonio del amo"] results = [tfg_nlp.process(keyword) for keyword in keywords] print( json.dumps(results, indent=1, ensure_ascii=False).encode('utf-8').decode()) for result in results: self.assertEqual("http://www.wikidata.org/entity/Q607975", result["result"]["Wikidata"])
def test_case_punctuation_mark2(self): keywords = ["qrt-pcr"] results = [tfg_nlp.process(keyword) for keyword in keywords] print( json.dumps(results, indent=1, ensure_ascii=False).encode('utf-8').decode()) for result in results: self.assertEqual( "http://dbpedia.org/resource/Polymerase_chain_reaction", result["result"]["DBpedia"])
def test_Acronym(self): keywords = ["artificial intelligence", "AI"] results = [tfg_nlp.process(keyword) for keyword in keywords] print( json.dumps(results, indent=1, ensure_ascii=False).encode('utf-8').decode()) for result in results: self.assertEqual( "http://dbpedia.org/resource/Artificial_intelligence", result["result"]["DBpedia"])