def ontology_words_mapping(sentence):
	# simple preprocessing of utterance substitution of key words, phrases and compound words
	G = kb_services.load_semantic_network()
	language_info = kb_services.language_info()
	sentence = sentence.lower()
	# substitutes pronoun me for speaker (later should use contextual info of robot, who is talking?)
	sentence = re.sub(' me ', ' speaker ', sentence)
	sentence = re.sub('\?', ' ?', sentence)
	sentence = re.sub('pick up ', 'take ', sentence)

	sentence = re.sub('  ', ' ', sentence)
	sentence = re.sub('\.', '', sentence)
	sentence = re.sub(', ', ' ', sentence)

	sentence = re.sub(' $', '', sentence)

	sentence = re.sub(' one ', ' 1 ', sentence)
	sentence = re.sub(' two ', ' 2 ', sentence)
	sentence = re.sub(' three ', ' 3 ', sentence)
	sentence = re.sub(' four ', ' 4 ', sentence)
	sentence = re.sub(' five ', ' 5 ', sentence)

	sentence = re.sub(' other ', ' ', sentence)


	sentence = re.sub(' that is ', ' that_is ', sentence)
	sentence = re.sub(' from ', ' that_is above ', sentence)
	sentence = re.sub(' from top of ', ' that_is above ', sentence)
	sentence = re.sub(' placed on ', ' that_is above ', sentence)
	sentence = re.sub(' off ', ' that_is above ', sentence)
	
	sentence = re.sub(' which is ', ' that_is ', sentence)

	sentence = re.sub(' exactly down', ' down', sentence)

	sentence = re.sub(' most left ', ' leftmost ', sentence)

	# declaration of classes and objects
	sentence = re.sub('((is)|(are)) ((an object of)|(an instance of)|(an adjetive of))( a)? ', 'is is_object_of ', sentence)
	sentence = re.sub('((is)|(are)) ((a kind of)|(a sort of)) ', 'is is_kind_of ', sentence)
	# simple from of verbs
	sentence = re.sub(' ((is)|(are)) ', ' is ', sentence)
	# transform plural into singular
	#for each in language_info['noun']:
	#	plural = derive_plural(each)
	#	sentence = re.sub(plural, each, sentence)
	# unite compound words
	compound_words = kb_services.compound_words(G)
	for each in compound_words:
		test = re.sub('_', ' ', each)
		sentence = re.sub(test, each, sentence)

	#print "de aqui debo sustituir aka: ", sentence
	words = sentence.split(' ')
	new_sentence = []
	for each in words:
		new_sentence.append(kb_services.find_original_aka(G, each))

	new_sentence = " ".join(new_sentence)
	return new_sentence
def test_chunker():
	G = kb_services.load_semantic_network()
	#print G.nodes()
	sentence = "find the door on the right"
	compound_words = kb_services.compound_words(G)
	#print "compound nons :  ", compound_words
	for each in compound_words:
		test = re.sub('_', ' ', each)
		sentence = re.sub(test, each, sentence)

	words, ranked_tags = pos_tagger(G, sentence)	


	print words
	print ranked_tags[0]

	pp_interpretation = constituent_chunker(grammar_np_simple, words, ranked_tags[0])

	print "chunked words: ", pp_interpretation[1]

	print "noun phrases: ", pp_interpretation[2]

#test_ontology_word_mapping()
#test_pos()
#test_disambiguity()
#test_cyk()
#test_chunker()
Beispiel #3
0
def test_cyk():
	G = kb_services.load_semantic_network()
	#print G.nodes()
	words, ranked_tags = pos_tagger(G, "the kitchen")
	print  "words: ", words, "  tags: ", ranked_tags

	print "NP? ", parser_cyk(grammar_np_simple, ranked_tags[0])
Beispiel #4
0
def test_cyk():
    G = kb_services.load_semantic_network()
    #print G.nodes()
    words, ranked_tags = pos_tagger(G, "the man in the kitchen")
    print "words: ", words, "  tags: ", ranked_tags

    print "NP? ", parser_cyk(grammar_np_simple, ranked_tags[0])
def interpret_command(sentence_string):

	print "lol"

	G = kb_services.load_semantic_network()
	grounded_commands = interpretation.sentence_grounder(G, sentence_string)
	print "loll"
	print "grounded command: ", grounded_commands
	for each_command in grounded_commands:
		expression = interpretation.generate_dependency(G, each_command)
		print "generated expression to planner: ", expression
	print "lolll"
#	sentences  = interpretation.break_sentence(sentence_string)
#	print "hi: ", sentences
#	for command in sentences[0:1]:
#		grounded_commands = interpretation.sentence_grounder(G, command)
#		print "grounded command: ", grounded_commands
#		
#		for each_command in grounded_commands:
#			expression = interpretation.generate_dependency(G, each_command)
#			print "output expression: ", expression
#			if commands != [False]:
#				interpreted_sentences
#				interpreted_sentences += 1
#				commands[0] = re.sub(' \)', ')', commands[0])
#				commands[0] = re.sub('_', ' ', commands[0])
				
	return expression
Beispiel #6
0
def interpret_command(sentence_string):

    print "lol"

    G = kb_services.load_semantic_network()
    grounded_commands = interpretation.sentence_grounder(G, sentence_string)
    print "loll"
    print "grounded command: ", grounded_commands
    for each_command in grounded_commands:
        expression = interpretation.generate_dependency(G, each_command)
        print "generated expression to planner: ", expression
    print "lolll"
    #	sentences  = interpretation.break_sentence(sentence_string)
    #	print "hi: ", sentences
    #	for command in sentences[0:1]:
    #		grounded_commands = interpretation.sentence_grounder(G, command)
    #		print "grounded command: ", grounded_commands
    #
    #		for each_command in grounded_commands:
    #			expression = interpretation.generate_dependency(G, each_command)
    #			print "output expression: ", expression
    #			if commands != [False]:
    #				interpreted_sentences
    #				interpreted_sentences += 1
    #				commands[0] = re.sub(' \)', ')', commands[0])
    #				commands[0] = re.sub('_', ' ', commands[0])

    return expression
Beispiel #7
0
def test_chunker():
    G = kb_services.load_semantic_network()
    #print G.nodes()
    sentence = "find the door on the right"
    compound_words = kb_services.compound_words(G)
    #print "compound nons :  ", compound_words
    for each in compound_words:
        test = re.sub('_', ' ', each)
        sentence = re.sub(test, each, sentence)

    words, ranked_tags = pos_tagger(G, sentence)

    print words
    print ranked_tags[0]

    pp_interpretation = constituent_chunker(grammar_np_simple, words,
                                            ranked_tags[0])

    print "chunked words: ", pp_interpretation[1]

    print "noun phrases: ", pp_interpretation[2]


#test_ontology_word_mapping()
#test_pos()
#test_disambiguity()
#test_cyk()
#test_chunker()
def test_solver(sentence_string):
    G = kb_services.load_semantic_network()
    grounded_commands = sentence_grounder(G, sentence_string)
    print "grounded command: ", grounded_commands
    for each_command in grounded_commands:
        expression = generate_dependency(G, each_command)
        print "generated expression to planner: ", expression
def test_solver(sentence_string):
	G = kb_services.load_semantic_network()
	grounded_commands = sentence_grounder(G, sentence_string)
	print "grounded command: ", grounded_commands
	for each_command in grounded_commands:
		expression = generate_dependency(G, each_command)
		print "generated expression to planner: ", expression
def test_chunker():
	G = kb_services.load_semantic_network()
	#print G.nodes()
	words, ranked_tags = pos_tagger(G, "the man in the kitchen")	

	print words
	print ranked_tags[0]

	pp_interpretation = pp_chunker(grammar_pp, words, ranked_tags[0], [])

	print "chunked words: ", pp_interpretation[1]

	print "noun phrases: ", pp_interpretation[3]

#test_ontology_word_mapping()
#test_pos()
#test_disambiguity()
#test_chunker()
#test_cyk()
Beispiel #11
0
def test_chunker():
    G = kb_services.load_semantic_network()
    #print G.nodes()
    words, ranked_tags = pos_tagger(G, "the man in the kitchen")

    print words
    print ranked_tags[0]

    pp_interpretation = pp_chunker(grammar_pp, words, ranked_tags[0], [])

    print "chunked words: ", pp_interpretation[1]

    print "noun phrases: ", pp_interpretation[3]


#test_ontology_word_mapping()
#test_pos()
#test_disambiguity()
#test_chunker()
#test_cyk()
def load_facts_file_to_clips():
	G = kb_services.load_semantic_network()
	# get all nouns
	nouns = kb_services.all_subclasses(G, 'stuff') + kb_services.all_objects(G, 'stuff')
	facts_to_load = []
	for each_noun in nouns:
		att_dict = kb_services.get_attribute(G, each_noun, 'attribute')
		#print att_dict
		for each_attribute in att_dict:
			if each_attribute == 'nop':
				facts_to_load.append("(fact " + each_noun + " " + each_attribute + " " + att_dict[each_attribute][0] + ")")

			else:
				for each_value in att_dict[each_attribute]:
					#print "(fact " + each_noun + " " + each_attribute + " " + each_value + ")"
					facts_to_load.append("(fact " + each_noun + " " + each_attribute + " " + each_value + ")")

	for each_fact in facts_to_load:
		fi = clips.Assert(each_fact)
	fi = clips.Assert("(id_count 1)")
Beispiel #13
0
def ontology_words_mapping(sentence):
    # simple preprocessing of utterance substitution of key words, phrases and compound words
    G = kb_services.load_semantic_network()
    language_info = kb_services.language_info()
    sentence = sentence.lower()
    # substitutes pronoun me for speaker (later should use contextual info of robot, who is talking?)
    sentence = re.sub(' me ', ' speaker ', sentence)
    sentence = re.sub('\?', ' ?', sentence)
    sentence = re.sub('pick up ', 'take ', sentence)

    sentence = re.sub('  ', ' ', sentence)
    sentence = re.sub('\.', '', sentence)
    sentence = re.sub(', ', ' ', sentence)

    sentence = re.sub(' $', '', sentence)

    sentence = re.sub(' one ', ' 1 ', sentence)
    sentence = re.sub(' two ', ' 2 ', sentence)
    sentence = re.sub(' three ', ' 3 ', sentence)
    sentence = re.sub(' four ', ' 4 ', sentence)
    sentence = re.sub(' five ', ' 5 ', sentence)

    sentence = re.sub(' other ', ' ', sentence)

    sentence = re.sub(' that is ', ' that_is ', sentence)
    sentence = re.sub(' from ', ' that_is above ', sentence)
    sentence = re.sub(' from top of ', ' that_is above ', sentence)
    sentence = re.sub(' placed on ', ' that_is above ', sentence)
    sentence = re.sub(' off ', ' that_is above ', sentence)

    sentence = re.sub(' which is ', ' that_is ', sentence)

    sentence = re.sub(' exactly down', ' down', sentence)

    sentence = re.sub(' most left ', ' leftmost ', sentence)

    # declaration of classes and objects
    sentence = re.sub(
        '((is)|(are)) ((an object of)|(an instance of)|(an adjetive of))( a)? ',
        'is is_object_of ', sentence)
    sentence = re.sub('((is)|(are)) ((a kind of)|(a sort of)) ',
                      'is is_kind_of ', sentence)
    # simple from of verbs
    sentence = re.sub(' ((is)|(are)) ', ' is ', sentence)
    # transform plural into singular
    #for each in language_info['noun']:
    #	plural = derive_plural(each)
    #	sentence = re.sub(plural, each, sentence)
    # unite compound words
    compound_words = kb_services.compound_words(G)
    for each in compound_words:
        test = re.sub('_', ' ', each)
        sentence = re.sub(test, each, sentence)

    #print "de aqui debo sustituir aka: ", sentence
    words = sentence.split(' ')
    new_sentence = []
    for each in words:
        new_sentence.append(kb_services.find_original_aka(G, each))

    new_sentence = " ".join(new_sentence)
    return new_sentence
Beispiel #14
0
def test_pos():
    # graph of the knowledge base
    G = kb_services.load_semantic_network()
    #print G.nodes()
    words, ranked_tags = pos_tagger(G, "something to eat")
    print "words: ", words, "  tags: ", ranked_tags
def ontology_words_mapping(sentence):
	# simple preprocessing of utterance substitution of key words, phrases and compound words
	G = kb_services.load_semantic_network()
	language_info = kb_services.language_info()
	sentence = sentence.lower()
	# substitutes pronoun me for speaker (later should use contextual info of robot, who is talking?)
	#sentence = re.sub(' me ', ' speaker ', sentence)
	sentence = re.sub('\?', ' ?', sentence)
	sentence = re.sub('pick up ', 'take ', sentence)
	sentence = re.sub('robot ', '', sentence)


	sentence = re.sub('  ', ' ', sentence)
	sentence = re.sub('\.', '', sentence)
	sentence = re.sub(', ', ' ', sentence)

	sentence = re.sub(' $', '', sentence)


	sentence = re.sub(' one ', ' 1 ', sentence)
	sentence = re.sub(' two ', ' 2 ', sentence)
	sentence = re.sub(' three ', ' 3 ', sentence)
	sentence = re.sub(' four ', ' 4 ', sentence)
	sentence = re.sub(' five ', ' 5 ', sentence)

	sentence = re.sub(' other ', ' ', sentence)

	sentence = re.sub(' person ', ' man ', sentence)

	sentence = re.sub(' which is ', ' that_is ', sentence)

	sentence = re.sub(' exactly down', ' down', sentence)

	sentence = re.sub('search for', 'search_for', sentence)

	sentence = re.sub('look for', 'look_for', sentence)

	sentence = re.sub('go out', 'go_out', sentence)

	sentence = re.sub('get into', "get_into", sentence)

	sentence = re.sub("searches for", "searches_for", sentence)
	sentence = re.sub("pick up", "pick_up", sentence)
	sentence = re.sub("right of", "right_of", sentence)
	sentence = re.sub("left of", "left_of", sentence)
	sentence = re.sub("bring up", "bring_up", sentence)

	sentence = re.sub("right of", "right_of", sentence)
	sentence = re.sub("right of the", "right_of", sentence)
	sentence = re.sub("near of", "near_of", sentence)
	sentence = re.sub("near of the", "near_of", sentence)
	sentence = re.sub("left of", "left_of", sentence)
	sentence = re.sub("left of the", "left_of", sentence)
	sentence = re.sub("in front of", "in_front_of", sentence)
	sentence = re.sub("in front of the", "in_front_of", sentence)
	sentence = re.sub("in front", "in_front", sentence)
	sentence = re.sub("to the front of", "in_front_of", sentence)
	sentence = re.sub("to the front of the", "in_front_of", sentence)
	sentence = re.sub("behind of", "behind_of", sentence)
	sentence = re.sub("behind of the", "behind_of", sentence)
	sentence = re.sub("next to", "next_to", sentence)
	sentence = re.sub("next to the", "next_to", sentence)

	sentence = re.sub("i want", "next_to", sentence)
	sentence = re.sub("i need", "next_to", sentence)
	sentence = re.sub("next to the", "next_to", sentence)

	sentence = re.sub("bottle of", "bottle", sentence)
	#sentence = re.sub("follow me", "follow_me", sentence)

	sentence = re.sub("at corner", "at_corner", sentence)
	sentence = re.sub("at corner of", "at_corner_of", sentence)
	sentence = re.sub("at end", "at_end", sentence)
	sentence = re.sub("at end of", "at_end_of", sentence)
	
	sentence = re.sub('(could|can|would) you (please )?(robot )?', '', sentence)
	#sentence = re.sub('', '', sentence)

	sentence = re.sub('please (robot )?', '', sentence)
	
	# declaration of classes and objects
	sentence = re.sub('((is)|(are)) ((an object of)|(an instance of)|(an adjetive of))( a)? ', 'is is_object_of ', sentence)
	sentence = re.sub('((is)|(are)) ((a kind of)|(a sort of)) ', 'is is_kind_of ', sentence)
	# simple from of verbs
	sentence = re.sub(' ((is)|(are)) ', ' is ', sentence)
	# transform plural into singular
	#for each in language_info['noun']:
	#	plural = derive_plural(each)
	#	sentence = re.sub(plural, each, sentence)
	# unite compound words
	compound_words = kb_services.compound_words(G)
	#print "compound nons :  ", compound_words
	for each in compound_words:
		test = re.sub('_', ' ', each)
		sentence = re.sub(test, each, sentence)

	#print "de aqui debo sustituir aka: ", sentence
	words = sentence.split(' ')
	new_sentence = []
	for each in words:
		new_sentence.append(kb_services.find_original_aka(G, each))
		#print "palabra: ",each, " se sustituye por:  ", kb_services.find_original_aka(G, each)

	new_sentence = " ".join(new_sentence)
	return new_sentence
def ontology_words_mapping(sentence):
	# simple preprocessing of utterance substitution of key words, phrases and compound words
	G = kb_services.load_semantic_network()
	language_info = kb_services.language_info()
	sentence = sentence.lower()
	# substitutes pronoun me for speaker (later should use contextual info of robot, who is talking?)
	#sentence = re.sub(' me ', ' speaker ', sentence)
	sentence = re.sub('\?', ' ?', sentence)
	sentence = re.sub('pick up ', 'take ', sentence)
        sentence = re.sub('-', '_', sentence)

	sentence = re.sub('  ', ' ', sentence)
	sentence = re.sub('\.', '', sentence)
	sentence = re.sub(', ', ' ', sentence)

	sentence = re.sub(' $', '', sentence)
        sentence = re.sub("'", ' ', sentence)


	sentence = re.sub(' one ', ' 1 ', sentence)
	sentence = re.sub(' two ', ' 2 ', sentence)
	sentence = re.sub(' three ', ' 3 ', sentence)
	sentence = re.sub(' four ', ' 4 ', sentence)
	sentence = re.sub(' five ', ' 5 ', sentence)

	sentence = re.sub(' other ', ' ', sentence)

	sentence = re.sub(' person ', ' man ', sentence)

	sentence = re.sub(' which is ', ' that_is ', sentence)

	sentence = re.sub(' exactly down', ' down', sentence)

	sentence = re.sub('search for ', 'search_for ', sentence)

	sentence = re.sub('look for', 'look_for', sentence)
	sentence = re.sub('middle bottom', 'middle_bottom', sentence)

        #Montral special objects names
        sentence = re.sub('bathroom s cabinet', 'bathroom_s_cabinet', sentence)


	
	sentence = re.sub('(could|can|would) you (please )?(robot )?', '', sentence)
	sentence = re.sub('please (robot )?', '', sentence)
	
	# declaration of classes and objects
	sentence = re.sub('((is)|(are)) ((an object of)|(an instance of)|(an adjetive of))( a)? ', 'is is_object_of ', sentence)
	sentence = re.sub('((is)|(are)) ((a kind of)|(a sort of)) ', 'is is_kind_of ', sentence)
	# simple from of verbs
	sentence = re.sub(' ((is)|(are)) ', ' is ', sentence)
	# transform plural into singular
	#for each in language_info['noun']:
	#	plural = derive_plural(each)
	#	sentence = re.sub(plural, each, sentence)
	# unite compound words
	compound_words = kb_services.compound_words(G)
	#print "compound nons :  ", compound_words
	for each in compound_words:
		test = re.sub('_', ' ', each)
		sentence = re.sub(test, each, sentence)

	#print "de aqui debo sustituir aka: ", sentence
	words = sentence.split(' ')
	new_sentence = []
	for each in words:
		new_sentence.append(kb_services.find_original_aka(G, each))
		#print "palabra: ",each, " se sustituye por:  ", kb_services.find_original_aka(G, each)

	new_sentence = " ".join(new_sentence)
	return new_sentence
Beispiel #17
0
# code dependencies
import kb_services
import parsing
import interpretation
# network toolkit
import networkx as nx
# regular expressions 
import re
# drawing



G = kb_services.load_semantic_network()
#print G.nodes()

interpreted_sentences = 0
correctly_interpreted = []
misinterpreted = []
correctly_interpreted_counter = 0
misinterpreted_counter = 0


file = open('ejemplos_rockin.txt', 'r')
file_report = open('resultdos_rockin', 'w')
lines = file.readlines()
file.close()

print "lines: ", lines

#lines = ["Drop the jar", "Put the can on the counter", "Find the glass in the living room", "Search for the glass in the kitchen", "Go to the dining room", "Move along the wall", "Take the cereal box", "Grab the mayo on the table", "remove the sheets from the bed"]
Beispiel #18
0
def test_pos():
	# graph of the knowledge base 
	G = kb_services.load_semantic_network()
	#print G.nodes()
	words, ranked_tags = pos_tagger(G, "something to eat")
	print  "words: ", words, "  tags: ", ranked_tags
def ontology_words_mapping(sentence):
    # simple preprocessing of utterance substitution of key words, phrases and compound words
    G = kb_services.load_semantic_network()
    language_info = kb_services.language_info()
    sentence = sentence.lower()
    # substitutes pronoun me for speaker (later should use contextual info of robot, who is talking?)
    #sentence = re.sub(' me ', ' speaker ', sentence)
    sentence = re.sub('\?', ' ?', sentence)
    sentence = re.sub('pick up ', 'take ', sentence)
    sentence = re.sub('robot ', '', sentence)

    sentence = re.sub('  ', ' ', sentence)
    sentence = re.sub('\.', '', sentence)
    sentence = re.sub(', ', ' ', sentence)

    sentence = re.sub(' $', '', sentence)

    sentence = re.sub(' one ', ' 1 ', sentence)
    sentence = re.sub(' two ', ' 2 ', sentence)
    sentence = re.sub(' three ', ' 3 ', sentence)
    sentence = re.sub(' four ', ' 4 ', sentence)
    sentence = re.sub(' five ', ' 5 ', sentence)

    sentence = re.sub(' other ', ' ', sentence)

    sentence = re.sub(' person ', ' man ', sentence)

    sentence = re.sub(' which is ', ' that_is ', sentence)

    sentence = re.sub(' exactly down', ' down', sentence)

    sentence = re.sub('search for', 'search_for', sentence)

    sentence = re.sub('look for', 'look_for', sentence)

    sentence = re.sub('go out', 'go_out', sentence)

    sentence = re.sub('get into', "get_into", sentence)

    sentence = re.sub("searches for", "searches_for", sentence)
    sentence = re.sub("pick up", "pick_up", sentence)
    sentence = re.sub("right of", "right_of", sentence)
    sentence = re.sub("left of", "left_of", sentence)
    sentence = re.sub("bring up", "bring_up", sentence)

    sentence = re.sub("right of", "right_of", sentence)
    sentence = re.sub("right of the", "right_of", sentence)
    sentence = re.sub("near of", "near_of", sentence)
    sentence = re.sub("near of the", "near_of", sentence)
    sentence = re.sub("left of", "left_of", sentence)
    sentence = re.sub("left of the", "left_of", sentence)
    sentence = re.sub("in front of", "in_front_of", sentence)
    sentence = re.sub("in front of the", "in_front_of", sentence)
    sentence = re.sub("in front", "in_front", sentence)
    sentence = re.sub("to the front of", "in_front_of", sentence)
    sentence = re.sub("to the front of the", "in_front_of", sentence)
    sentence = re.sub("behind of", "behind_of", sentence)
    sentence = re.sub("behind of the", "behind_of", sentence)
    sentence = re.sub("next to", "next_to", sentence)
    sentence = re.sub("next to the", "next_to", sentence)

    sentence = re.sub("i want", "next_to", sentence)
    sentence = re.sub("i need", "next_to", sentence)
    sentence = re.sub("next to the", "next_to", sentence)

    sentence = re.sub("bottle of", "bottle", sentence)
    #sentence = re.sub("follow me", "follow_me", sentence)

    sentence = re.sub("at corner", "at_corner", sentence)
    sentence = re.sub("at corner of", "at_corner_of", sentence)
    sentence = re.sub("at end", "at_end", sentence)
    sentence = re.sub("at end of", "at_end_of", sentence)

    sentence = re.sub('(could|can|would) you (please )?(robot )?', '',
                      sentence)
    #sentence = re.sub('', '', sentence)

    sentence = re.sub('please (robot )?', '', sentence)

    # declaration of classes and objects
    sentence = re.sub(
        '((is)|(are)) ((an object of)|(an instance of)|(an adjetive of))( a)? ',
        'is is_object_of ', sentence)
    sentence = re.sub('((is)|(are)) ((a kind of)|(a sort of)) ',
                      'is is_kind_of ', sentence)
    # simple from of verbs
    sentence = re.sub(' ((is)|(are)) ', ' is ', sentence)
    # transform plural into singular
    #for each in language_info['noun']:
    #	plural = derive_plural(each)
    #	sentence = re.sub(plural, each, sentence)
    # unite compound words
    compound_words = kb_services.compound_words(G)
    #print "compound nons :  ", compound_words
    for each in compound_words:
        test = re.sub('_', ' ', each)
        sentence = re.sub(test, each, sentence)

    #print "de aqui debo sustituir aka: ", sentence
    words = sentence.split(' ')
    new_sentence = []
    for each in words:
        new_sentence.append(kb_services.find_original_aka(G, each))
        #print "palabra: ",each, " se sustituye por:  ", kb_services.find_original_aka(G, each)

    new_sentence = " ".join(new_sentence)
    return new_sentence
Beispiel #20
0
# network toolkit
import networkx as nx
# regular expressions 
import re
# drawing
import networkx.drawing
import matplotlib.pyplot as plt


bateria_ejemplos = [
"place green pyramid on top of red brick"
]



G = kb_services.load_semantic_network()

for each_example in bateria_ejemplos:
	analized_sentences = interpretation.sentence_grounder(G, each_example)	

	commands = []
	for each_caracterized_sentence in analized_sentences:

		print "sentence ready to be matched::       ------------------------------------"
		print "generatiing meaning expressions from ", each_caracterized_sentence["objects"]
		commands.append(interpretation.generate_dependency(G, each_caracterized_sentence))

#	print "commands to planner..."	
#	for each in commands:
#		print "sent to planner: ", each
#		print "planner response:"