class EttService: def __init__(self, kb_ID, logging_lvl): self.logging_lvl = logging_lvl self.kb_ID = kb_ID #logging.basicConfig(stream=sys.stderr, level=logging_lvl) logging.info('\tETT Service started') self.kb_client = KnowledgeBaseClient(True) def write_to_KB(self, fact, tag): """ Post a tuple to the KB """ self.kb_client.addFact(self.kb_ID, tag, 1, 100, fact) return def add_emotion(self, *param): """ Offers the service of eTT, consisting in manipulating an answer to the user in order to transform it with respect to some emotion extrapolated by ELF internal state (tuples) """ answer_arr = param[0]['details'] # [0]["$input"] answer = answer_arr[0]['object']['_data']['text'] timestamp = answer_arr[0]['object']['_data']['timestamp'] language = answer_arr[0]['object']['_data']['language'] print(answer) logging.info("\tcallback ett called") query_enlp = { "_data": { "tag":"ENLP_ELF_EMOTION", } } res_enlp = self.kb_client.query(query_enlp) if res_enlp["success"]: data_enlp = res_enlp["details"][0]["object"]["_data"] ies = (data_enlp["valence"], data_enlp["arousal"]) else: print("ETT: No IES, using default") ies = (0., 0.) a_fact = prepare_answer(answer, ies, timestamp,language)#TODO: add language forwarding!!!! self.write_to_KB(a_fact, TAG_COLORED_ANSWER) def start(self): """Subscribe and wait for data""" self.kb_client.subscribe(self.kb_ID, {"_data": {"tag": TAG_ANSWER, "text": "$input", "timestamp": "$time", "language": "$lang"}}, self.add_emotion) # from the 'gnlp' module
class ConstantFromkB: def __init__(self, kb_ID, logging_lvl): self.logging_lvl = logging_lvl self.kb_ID = kb_ID logging.info("\tConstant_from_kB Service Handler created") self.kb_client = KnowledgeBaseClient(True) def extract_rooms_courses_from_KB(self,tag, file): """This method is the one devoted to extract "constants" information from the KB First it performs a query to retrieve the facts interested, then write them in file (2° parameter) """ #answer query answ = self.kb_client.query({"_meta": {"tag": tag}}) if answ['success'] == False: return else: file.write("\n#section for " +tag + ":\n") ris = [] #get results and append them in to ris list for obj in answ['details']: ris.append(obj['object']['_data']['data']['name']) #write them in rules file for r in ris: string = str(r).lower() predicate = "_".join(string.split(" ")) #create strings to write if len(string) > 1 and tag==TAG_COURSE: string2 = string.split(" ")[0] str_to_write = "PNOUN[SEM=<\\x.(DRS([],[" + predicate + "]))>] -> '" + string + "' | '" + string2 + "'" + "\n" else: str_to_write = "PNOUN[SEM=<\\x.(DRS([],[" + predicate + "]))>] -> '" + string + "\n" file.write(str_to_write) def extract_teachers_from_KB(self,tag, file): """This method is the one devoted to extract "constants" information from the KB First it perform a query to retrieve the facts interested, then write them in file (2° parameter) """ #answer query answ = self.kb_client.query({"_meta": {"tag": tag}}) if answ['success'] == False: return else: file.write("\n#section for " +tag + ":\n") #write them directly in to rules file for r in answ["details"][0]["object"]["_data"]["data"]: #create strings (one forn full name, one for surname) to write name = str(r).lower().split(" ") # heuristic to take the surname n = len(name) if name[n-2] == "DE" or name[n-2] == "DEL" or name[n-2] == "DI" or name[n-2] == "DELLA": tmp = name[n-2:] surname = " ".join(tmp) else: surname = name[-1] name2write = " ".join(name) predicate = "_".join(name) + "(x)" if name=='': #TODO when finally crawler groupi implements filter, we caan delete this rusles continue str1_to_write = "PNOUN[SEM=<\\x.(DRS([],[" + predicate + "]))>] -> '" + name2write + "'|'" + surname + "'\n" file.write(str1_to_write) def start(self): "ask for 'constants' facts" logging.info("\tConstant_from_kB Service started") #open file containing rules and copy them in another file rules_file = open(RULE_FILE_NAME) rules_plus_constants_files = open(EXPANDED_RULE_FILE_NAME, "w+") rules_plus_constants_files.write(rules_file.read()) #extraxt info from KB self.extract_teachers_from_KB(TAG_PROF,rules_plus_constants_files) self.extract_rooms_courses_from_KB(TAG_ROOM, rules_plus_constants_files) self.extract_rooms_courses_from_KB(TAG_COURSE, rules_plus_constants_files)
class QaService: def __init__(self, kb_ID, logging_lvl): self.logging_lvl = logging_lvl self.kb_ID = kb_ID #logging.basicConfig(stream=sys.stderr, level=logging_lvl) logging.info('\tQA Service Handler created') self.kb_client = KnowledgeBaseClient(True) self.query_prof, self.q_prof_answ, self.query_corso, self.q_corso_answ,\ self.dict_q_aule, self.dict_answ_aule = tp.init_templates_dict() def write_to_KB(self, fact, tag): """ Post a tuple to the KB """ self.kb_client.addFact(self.kb_ID, tag, 1, 100, fact) return def answer_query(self, *param): """This function is called by KB once a user ask a question. A number of strategies will be tried, in the following order: - exact template matching (user's query is = to a question in simple_queries.py) - tree templates matching - DRS extraction from the provided """ logging.info("\tcallback QA called") query = self._get_query_from_kb(param) question_answered = self.qa_exact_temp_matching(query) if question_answered == True: pass else: question_answered = drs_matcher(query, EXPANDED_RULE_FILE_NAME, self) #TODO: check if question was "answered" by DRS response = { "tag": TAG_ANSWER, "text": "Non ho capito. Puoi ripetere?", "time_stamp": 1 } self.write_to_KB(response, TAG_ANSWER) def _get_query_from_kb(self, response): """Exctract the user query from the kb response object""" answer_arr = response[ 0] # first field of the tuple. It contains the resp #print(answer_arr) query = answer_arr["details"][0]["object"]["_data"]["user_query"] return query def qa_exact_temp_matching(self, input_q): """This function tries to match exactly the query of a user to a template. Templates are in templates.py file in this module If a match is found this function returns True """ print("input query: " + input_q) # try to match res_1 = tp.check_exact_match(input_q, self.query_prof, self.q_prof_answ, ["professor", "professore", "prof"]) if (res_1[0] is True): query = res_1[1] query = query.replace("<prof-placeholder>", res_1[3]) print("Sto per fare la query sulla kB") print(query) #query = '{"_data": {"tag" : "crawler_course"}}' query = json.loads(query) resp = self.kb_client.query(query) print(resp) # produce answer return True else: res = tp.check_exact_match(input_q, self.query_corso, self.q_corso_answ, ["corso", "corso di"]) if (res[0] == True): # perform query to kb query = res_1[1] query = query.replace("<prof-placeholder>", res_1[3]) print("Sto per fare la query sulla kB") print(query) #query = '{"_data": {"tag" : "crawler_course"}}' query = json.loads(query) resp = self.kb_client.query(query) print(resp) #produce answer return True else: res = tp.check_exact_match(input_q, self.query_corso, self.q_corso_answ, ["aula"]) if (res[0] == True): # perform query to kb query = res_1[1] query = query.replace("<prof-placeholder>", res_1[3]) print("Sto per fare la query sulla kB") print(query) #query = '{"_data": {"tag" : "crawler_course"}}' query = json.loads(query) resp = self.kb_client.query(query) print(resp) #produce answer return True else: return False def start(self): """Subscribe and wait for data""" self.kb_client.subscribe( self.kb_ID, {"_data": { "tag": TAG_ANSWER, "text": "$input" }}, self.answer_query) #self.kb_client.subscribe(self.kb_ID, {"_data": {"tag": TAG_USER_TRANSCRIPT, "text": "$input", "language": "$lang"}}, self.answer_query) # from the 'gnlp' module logging.info("\tQA service started")
"RDF": "an rdf triple", "TEST": "test data" }) print(registering) if (registering['success'] == 0): print('registration failed') def callbfun(res): print("callback:") print(res) print(k.subscribe(myID, {"_data": {"prova": "$x"}}, callbfun)) print(k.addFact(myID, "TEST", 1, 50, {"prova": 1})) print(k.addFact(myID, "TEST", 1, 50, {"prova": 2})) print(k.addFact(myID, "TEST", 1, 50, {"prova": 3})) print(k.removeFact(myID, {"_data": {"prova": 2}})) print(k.queryBind({"_data": {"prova": "$x"}})) print(k.addFact(myID, "TEST", 1, 50, {"prova": "callb"})) print( k.addRule( myID, "TESTRULE", "{\"test\":\"$a\"} <- {\"prova\":\"$a\"};[\"isGreater\", \"$a\", 2]")) print(k.query({"test": "$a"}))
class IESService: """ This services update the internal elf emotion. it's based on some arcane black magic """ def __init__(self, kb_ID, logging_lvl): self.logging_lvl = logging_lvl self.max_threshold = 5 # soglia oltre la quale aumento la veloc. di spostamento self.travel_step = 1.0 # di quanto mi sposto nela direzione dell'utente self.idle_time_update = 250 # passato questo tempo parte l'update dello stato self.threshold = 0 # current counter self.timer = None # timer Object self.last_user_emotion = "anger" # ultima emozione dell'utente che ha parlato con elf self.travel_dist = 1.0 # normal travel distance self.elf_emotion_coord = (0.0, 0.0) # neutral self.kb_ID = kb_ID self.dist_modifier = 1.0 self.kb_client = KnowledgeBaseClient(True) #logging.basicConfig(stream=sys.stderr, level=logging_lvl) logging.info('\tIES Service started') def write_to_KB(self, fact, tag): """ Post a tuple to the KB """ self.kb_client.addFact(self.kb_ID, tag, 1, 100, fact) def timed_update(self): """ Funzione chiamata se non ci sono state interazioni per idle_time_update seconds """ self.timer.cancel() # do stuff do stuff new_emotion_point = self.travel_in_emotion_space( self.elf_emotion_coord, (-0.5, -0.8)) fact = { "time_stamp": str(datetime.datetime.now()), "valence": new_emotion_point[0], "arousal": new_emotion_point[1], "tag": TAG_ELF_EMOTION } self.write_to_KB(fact, TAG_ELF_EMOTION) # continue doing stuff self.timer = threading.Timer(self.idle_time_update, self.timed_update) self.timer.start() def on_user_interaction(self, *params): # stuff- logging.info("\tcallback IES called") self.timer.cancel() user_coord, emotion = self.get_mean_user_emotion() if (user_coord[0] == 0 and user_coord[1] == 0): self.timer = threading.Timer(self.idle_time_update, self.timed_update) return else: if (emotion == self.last_user_emotion): self.threshold += 1 new_emotion_point = user_coord if (self.threshold >= self.max_threshold): # travel with modifier self.dist_modifier = 1.1 new_emotion_point = self.travel_in_emotion_space( self.elf_emotion_coord, user_coord) else: self.dist_modifier = 1.0 logging.debug("\tCurrent user coord: " + str(user_coord) + "Closest Emotion category: " + str(emotion)) new_emotion_point = self.travel_in_emotion_space( self.elf_emotion_coord, user_coord) fact = { "time_stamp": str(datetime.datetime.now()), "valence": new_emotion_point[0], "arousal": new_emotion_point[1], "tag": TAG_ELF_EMOTION } self.write_to_KB(fact, TAG_ELF_EMOTION) self.timer = threading.Timer(self.idle_time_update, self.timed_update) def _get_query_datas(self, response): """Metodo per accedere velocemente al risultato di una query ritorna il dizionario datas """ obj = response["details"][0]["object"] datas = obj["_data"] print(datas) return datas def get_mean_user_emotion(self): """ Get user emotion a partire dai vari moduli e fai la media convertila in valore testuale e ritorna (valence, arousal), emotion """ a = 0 b = 0 success = True # take emotions from face recognition query_vis = { "_data": { "tag": "VISION_FACE_ANALYSIS", "is_interlocutor": "True" } } res_vis = self.kb_client.query(query_vis) if res_vis["success"]: data_vis = self._get_query_datas(res_vis) vis_point = em_conv.vector_to_circumplex(data_vis["emotion"]) print("Vision: ", vis_point) a += vis_point[0] b += vis_point[0] else: success = False print("Error while retrieving facial emotion") query_enlp = {"_data": {"tag": TAG_ELF_EMOTION}} res_enlp = self.kb_client.query(query_enlp) if res_enlp["success"]: data_enlp = self._get_query_datas(res_enlp) a += data_enlp["valence"] b += data_enlp["arousal"] else: success = False print("Error while retrieving enlp internal emotion") if success: a /= 2 b /= 2 rand_point = (a, b) if (a == 0 and b == 0): categorical_emo = "Neutral" else: categorical_emo = em_conv.circumplex_to_emotion( rand_point[0], rand_point[1]) return rand_point, categorical_emo def travel_in_emotion_space(self, start, end): """Start e end tuple (valence, arousal) modifica le coordinate di valence arousal dello stato interno lo stato interno è il punto start, end è la coordinata standard dell'emozione dell'utente ritorna """ logging.debug("\tcurrent elf emotion cooord ==" + str(start)) logging.debug("\tcurrent user emotion coord ==" + str(end)) vector = (end[0] - start[0], end[1] - start[1]) norm_v = math.sqrt(vector[0]**2 + vector[1]**2) if norm_v > 0: direction = (vector[0] / norm_v, vector[1] / norm_v) else: direction = (0., 0.) step = self.travel_step * self.dist_modifier new_emotion_point = (start[0] + step * direction[0], start[1] + step * direction[0]) # update elf status self.elf_emotion_coord = new_emotion_point logging.debug("\tupdated elf coord ==" + str(self.elf_emotion_coord)) return new_emotion_point def start(self): """ Start service """ self.kb_client.subscribe( self.kb_ID, {"_data": { "tag": TAG_VISION, "is_interlocutor": "True" }}, self.on_user_interaction) # appena avviato ELF è di buonumore, e non presagisce nulla, delle sventure che stanno per accadergli fact = { "time_stamp": str(datetime.datetime.now()), "valence": 0.8, "arousal": 0.5, "tag": TAG_ELF_EMOTION } self.write_to_KB(fact, TAG_ELF_EMOTION) self.timer = threading.Timer(self.idle_time_update, self.timed_update) self.timer.start()
import sys import json from kb import KnowledgeBaseClient kb = KnowledgeBaseClient(True) if (sys.argv[1] == "query"): r = kb.query(json.loads(sys.argv[2])) elif (sys.argv[1] == "addfact"): r = kb.addFact(sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5], json.loads(sys.argv[6])) elif (sys.argv[1] == "addrule"): r = kb.addRule(sys.argv[2], sys.argv[3], sys.argv[4]) elif (sys.argv[1] == "removefact"): r = kb.removeFact(sys.argv[2], json.loads(sys.argv[3])) elif (sys.argv[1] == "removerule"): r = kb.removeRule(sys.argv[2], json.loads(sys.argv[3])) elif (sys.argv[1] == "updatefact"): r = kb.updateFactByID(sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5], sys.argv[6], json.loads(sys.argv[7])) elif (sys.argv[1] == "registertags"): r = kb.registerTags(sys.argv[2], json.loads(sys.argv[3])) elif (sys.argv[1] == "tagdetails"): r = kb.getTagDetails(sys.argv[2:]) elif (sys.argv[1] == "getalltags"): r = kb.getAllTags( len(sys.argv) >= 3 and sys.argv[2].lower() in ["true", "yes", "y"]) elif (sys.argv[1] == "register"): r = kb.register() else: r = "invalid argument" print(sys.argv[2:])
import sys from interface_tags import PATH_TO_KB_MODULE sys.path.insert(0, PATH_TO_KB_MODULE) from kb import KnowledgeBaseClient client = KnowledgeBaseClient(False) kb_id = (client.register())['details'] rules = [ '{"_meta":{"tag":"TEACHING"}, "teach": "$prof", "room": "$room", "course" : "$course" } <- {"_meta":{"tag":"crawler_course"}, "_data":{"data": {"name" : "$course", "teacher_name": "$prof"}}};{"_meta":{"tag":"crawler_room_event"},"_predicates":[["containsString", ["$course2", "$course"]]], "_data": {"data": {"aula" : "$room", "descrizione" : "$course2"}}}' #'{"_meta":{"tag":"TEACHING"}, "teach": "$prof", "room": "$room", "course" : "$course" } <- {"_meta":{"tag":"crawler_course"}, "_data":{"data": {"name" : "$course", "teacher_name": "$prof"}}};{"_meta":{"tag":"crawler_room_event"}, "_data": {"data": {"aula" : "$room", "descrizione" : "$course"}}}' #'{"tag":"teaching": "$prof", "room": "$room", "course" : "$course" } <- {"data":{"name" : "$course", "teacher_name": "$prof"}};{"data":{"aula" : "$room", "descrizione" : "$course"}}' ] """client.removeRule(kb_id,2) for rule in rules: x = client.addRule(kb_id, "ENLP_EMOTIVE_ANSWER", rule) print(x) """ #print(client.query({"_data" : {"name" : "nlpcourse", "teacher_name": "Giuseppe Attardi"}})) #print(client.query({"_data" : {"aula" : "$X1", "descrizione": "nlpcourse"}})) res = client.query({"_data": {"teach": "GIUSEPPE ATTARDI", "room": "$x"}}) #res = client.query({"_data":{"teach":"$x"}}) print(res) # DO NOT THIS ANYMORE #res = client.query({"_data": "$x"})