Exemplo n.º 1
0

if __name__ == "__main__":

    WORD_VOCAB_FILE = 'data/words.vocab'
    POS_VOCAB_FILE = 'data/pos.vocab'

    try:
        word_vocab_f = open(WORD_VOCAB_FILE,'r')
        pos_vocab_f = open(POS_VOCAB_FILE,'r') 
    except FileNotFoundError:
        print("Could not find vocabulary files {} and {}".format(WORD_VOCAB_FILE, POS_VOCAB_FILE))
        sys.exit(1) 

    extractor = FeatureExtractor(word_vocab_f, pos_vocab_f)
    parser = Parser(extractor, sys.argv[1])

    total_labeled_correct = 0
    total_unlabeled_correct = 0
    total_words = 0

    las_list = []
    uas_list = []    
   
    count = 0 
    with open(sys.argv[2],'r') as in_file: 
        print("Evaluating. (Each . represents 100 test dependency trees)")
        for dtree in conll_reader(in_file):
            words = dtree.words()
            pos = dtree.pos()
            predict = parser.parse_sentence(words, pos)
if __name__ == "__main__":

    WORD_VOCAB_FILE = 'data/words.vocab'
    POS_VOCAB_FILE = 'data/pos.vocab'

    try:
        word_vocab_f = open(WORD_VOCAB_FILE, 'r')
        pos_vocab_f = open(POS_VOCAB_FILE, 'r')
    except FileNotFoundError:
        print("Could not find vocabulary files {} and {}".format(
            WORD_VOCAB_FILE, POS_VOCAB_FILE))
        sys.exit(1)

    extractor = FeatureExtractor(word_vocab_f, pos_vocab_f)
    parser = Parser(extractor, 'data/model.h5')

    total_labeled_correct = 0
    total_unlabeled_correct = 0
    total_words = 0

    las_list = []
    uas_list = []

    count = 0
    with open('data/dev.conll', 'r') as in_file:
        print("Evaluating. (Each . represents 100 test dependency trees)")
        for dtree in conll_reader(in_file):
            words = dtree.words()
            pos = dtree.pos()
            predict = parser.parse_sentence(words, pos)