fp1 and fp2. Then, given a sentence, returns the shortest path
within each graph together with the subgraphs attached to each node.
"""

from SentenceNetCreator import SentenceNetCreator
from SentenceNetVisitor import SentenceNetVisitor
from irutils.TextFilter import TextFilter
from pygraph.classes.digraph import digraph
import nltk

fp1 = '../docs/wikipedia.txt'
fp2 = '../docs/gediminas2005.txt'
fp3 = '../docs/ricci.txt'
fp4 = '../docs/wikipediagediminas2005.txt'

sentenceNetWiki = SentenceNetCreator()
sentenceNetWiki.createNet([fp1])
sentenceNetWiki.write_graph('wikipedia_graph.gv')
print 'Wiki size', len(sentenceNetWiki.get_net().nodes())

sentenceNetGediminas = SentenceNetCreator()
sentenceNetGediminas.createNet([fp2])
sentenceNetGediminas.write_graph('gediminas_graph.gv')
print 'Gediminas size', len(sentenceNetGediminas.get_net().nodes())

sentenceNetRicci = SentenceNetCreator()
sentenceNetRicci.createNet([fp3])
sentenceNetRicci.write_graph('ricci_graph.gv')
print 'Ricci size', len(sentenceNetRicci.get_net().nodes())

sentenceNetWikiGedi = SentenceNetCreator()
from SentenceNetVisitor import SentenceNetVisitor
from XMLReqManager import XMLReqManager
from SentenceNetCreator import SentenceNetCreator
from irutils.TextFilter import TextFilter

s1 = SentenceNetCreator()
n1 = s1.get_net()
v1 = SentenceNetVisitor(n1, s1.get_edge_start_weight(),
                        s1.get_start_occurrences_num())

xml_doc_handler = XMLReqManager('req_document.xsd', '2007 - eirene fun 7.xml')
req_document = xml_doc_handler.get_requirements_text()

terms_filter = TextFilter()

for sent in req_document:
    filtered_sent = terms_filter.filter_all(sent)
    path1, path_weight1 = v1.search_A_star(filtered_sent)

print 'now producing a random sentence according to the document learnt...'
print v1.get_random_sentence('network', 100)
Ejemplo n.º 3
0
from SentenceNetVisitor import SentenceNetVisitor
from XMLReqManager import XMLReqManager
from SentenceNetCreator import SentenceNetCreator
from irutils.TextFilter import TextFilter

s1 = SentenceNetCreator()
n1 = s1.get_net()
v1 = SentenceNetVisitor(n1, s1.get_edge_start_weight(),
                        s1.get_start_occurrences_num())

xml_doc_handler = XMLReqManager('req_document.xsd', '2007 - eirene fun 7.xml')
req_document = xml_doc_handler.get_requirements_text()

terms_filter = TextFilter()

for sent in req_document:
    filtered_sent = terms_filter.filter_all(sent)
    filtered_sent = terms_filter.remove_item(filtered_sent, "\"")
    filtered_sent = terms_filter.remove_item(filtered_sent, "-")
    print filtered_sent
    v1.search_A_star(filtered_sent)

s1.write_graph("eireneGraph.gv")
fp1 and fp2. Then, given a sentence, returns the shortest path
within each graph together with the subgraphs attached to each node.
"""

from SentenceNetCreator import SentenceNetCreator
from SentenceNetVisitor import SentenceNetVisitor
from irutils.TextFilter import TextFilter
from pygraph.classes.digraph import digraph
import nltk

fp1 = '../docs/wikipedia.txt'
fp2 = '../docs/gediminas2005.txt'
fp3 = '../docs/ricci.txt'
fp4 = '../docs/wikipediagediminas2005.txt'

sentenceNetWiki = SentenceNetCreator()
sentenceNetWiki.createNet([fp1])
sentenceNetWiki.write_graph('wikipedia_graph.gv')
print 'Wiki size', len(sentenceNetWiki.get_net().nodes())

sentenceNetGediminas = SentenceNetCreator()
sentenceNetGediminas.createNet([fp2])
sentenceNetGediminas.write_graph('gediminas_graph.gv')
print 'Gediminas size', len(sentenceNetGediminas.get_net().nodes())

sentenceNetRicci = SentenceNetCreator()
sentenceNetRicci.createNet([fp3])
sentenceNetRicci.write_graph('ricci_graph.gv')
print 'Ricci size', len(sentenceNetRicci.get_net().nodes())

sentenceNetWikiGedi = SentenceNetCreator()
from SentenceNetVisitor import SentenceNetVisitor
from XMLReqManager import XMLReqManager
from SentenceNetCreator import SentenceNetCreator
from irutils.TextFilter import TextFilter


s1 = SentenceNetCreator()
n1 = s1.get_net()
v1 = SentenceNetVisitor(n1, s1.get_edge_start_weight(), s1.get_start_occurrences_num())

xml_doc_handler = XMLReqManager('req_document.xsd', '2007 - eirene fun 7.xml')
req_document = xml_doc_handler.get_requirements_text()

terms_filter = TextFilter()

for sent in req_document:
    filtered_sent = terms_filter.filter_all(sent)
    filtered_sent = terms_filter.remove_item(filtered_sent, "\"")
    filtered_sent = terms_filter.remove_item(filtered_sent, "-")
    print filtered_sent
    v1.search_A_star(filtered_sent)

s1.write_graph("eireneGraph.gv")