sentenceNetRicci.createNet([fp3])
sentenceNetRicci.write_graph('ricci_graph.gv')
print 'Ricci size', len(sentenceNetRicci.get_net().nodes())

sentenceNetWikiGedi = SentenceNetCreator()
sentenceNetWikiGedi.createNet([fp4])
sentenceNetWikiGedi.write_graph('wikigedi_graph.gv')
print 'WikiGedi size', len(sentenceNetWikiGedi.get_net().nodes())

print "Weighted Knowledge Graphs created"

terms_filter = TextFilter()
sentence = "The system shall display similar books"
filtered_sent = terms_filter.filter_all(sentence)

visitor_wiki = SentenceNetVisitor(sentenceNetWiki.get_net(), sentenceNetWiki.get_edge_start_weight(), sentenceNetWiki.get_start_occurrences_num())
path_wiki, path_weight_wiki = visitor_wiki.search_A_star(filtered_sent)

print path_wiki
print path_weight_wiki

visitor_gediminas = SentenceNetVisitor(sentenceNetGediminas.get_net(), sentenceNetGediminas.get_edge_start_weight(), sentenceNetGediminas.get_start_occurrences_num())
path_gediminas, path_weight_gediminas = visitor_gediminas.search_A_star(filtered_sent)

print path_gediminas
print path_weight_gediminas

visitor_ricci = SentenceNetVisitor(sentenceNetRicci.get_net(), sentenceNetRicci.get_edge_start_weight(), sentenceNetGediminas.get_start_occurrences_num())
path_ricci, path_weight_ricci = visitor_ricci.search_A_star(filtered_sent)

print path_ricci
print 'Ricci size', len(sentenceNetRicci.get_net().nodes())

sentenceNetWikiGedi = SentenceNetCreator()
sentenceNetWikiGedi.createNet([fp4])
sentenceNetWikiGedi.write_graph('wikigedi_graph.gv')
print 'WikiGedi size', len(sentenceNetWikiGedi.get_net().nodes())

print "Weighted Knowledge Graphs created"

terms_filter = TextFilter()
sentence = "The system shall display similar books"
filtered_sent = terms_filter.filter_all(sentence)

visitor_wiki = SentenceNetVisitor(sentenceNetWiki.get_net(),
                                  sentenceNetWiki.get_edge_start_weight(),
                                  sentenceNetWiki.get_start_occurrences_num())
path_wiki, path_weight_wiki = visitor_wiki.search_A_star(filtered_sent)

print path_wiki
print path_weight_wiki

visitor_gediminas = SentenceNetVisitor(
    sentenceNetGediminas.get_net(),
    sentenceNetGediminas.get_edge_start_weight(),
    sentenceNetGediminas.get_start_occurrences_num())
path_gediminas, path_weight_gediminas = visitor_gediminas.search_A_star(
    filtered_sent)

print path_gediminas
print path_weight_gediminas
from SentenceNetVisitor import SentenceNetVisitor
from XMLReqManager import XMLReqManager
from SentenceNetCreator import SentenceNetCreator
from irutils.TextFilter import TextFilter

s1 = SentenceNetCreator()
n1 = s1.get_net()
v1 = SentenceNetVisitor(n1, s1.get_edge_start_weight(),
                        s1.get_start_occurrences_num())

xml_doc_handler = XMLReqManager('req_document.xsd', '2007 - eirene fun 7.xml')
req_document = xml_doc_handler.get_requirements_text()

terms_filter = TextFilter()

for sent in req_document:
    filtered_sent = terms_filter.filter_all(sent)
    path1, path_weight1 = v1.search_A_star(filtered_sent)

print 'now producing a random sentence according to the document learnt...'
print v1.get_random_sentence('network', 100)
from SentenceNetVisitor import SentenceNetVisitor
from XMLReqManager import XMLReqManager
from SentenceNetCreator import SentenceNetCreator
from irutils.TextFilter import TextFilter


s1 = SentenceNetCreator()
n1 = s1.get_net()
v1 = SentenceNetVisitor(n1, s1.get_edge_start_weight(), s1.get_start_occurrences_num())

xml_doc_handler = XMLReqManager('req_document.xsd', '2007 - eirene fun 7.xml')
req_document = xml_doc_handler.get_requirements_text()

terms_filter = TextFilter()

for sent in req_document:
    filtered_sent = terms_filter.filter_all(sent)
    filtered_sent = terms_filter.remove_item(filtered_sent, "\"")
    filtered_sent = terms_filter.remove_item(filtered_sent, "-")
    print filtered_sent
    v1.search_A_star(filtered_sent)

s1.write_graph("eireneGraph.gv")