Exemple #1
0
def index(a1_file, articles, max_sentences):
   
    global a_file
    a_file = a1_file

    q1= a1_file.protein1
    q2= a1_file.protein2 
    query = queries.main(q1,q2)    # Creates Queries
    q1_syns = query.q1_syns        # Retrieves Q1 and Q2 synonyms
    q2_syns = query.q2_syns
    print a1_file.protein1, ' synonyms = ', q1_syns
    print a1_file.protein2, ' synonyms = ', q2_syns
    
    ID_sentence_position_list = Papers.main(query, articles)
    if len(ID_sentence_position_list) > 0:
        print str(len(ID_sentence_position_list)) + " sentences with co-occurrence found"
    
    sentences_with_score1 = Syntax_Scorer.main(ID_sentence_position_list, query, max_sentences) 
    sentences_with_score2 = Semantics_Scorer.main(sentences_with_score1, query)
    sorted_sentences_with_score2 = sorted(sentences_with_score2, key=operator.attrgetter('score'), reverse=True)
    if sorted_sentences_with_score2:
        with open (r'txt_files_Testing\calibration unlimited sentences','a') as f:
            f.write(query.q1+'\t'+query.q2+'\n')
            for sent in sorted_sentences_with_score2:
                sent_w_replaced_queries = Organize.insert_syns(sent.sentence,q1,q1_syns,q2,q2_syns)
                if str(sent.sentence)[0] != '<': 
                    f.write(str(sent.score) +' '+ str(sent.method_scored)+'\t'+ sent_w_replaced_queries + '\n')
                    print str(sent.score) +' '+ sent_w_replaced_queries
            f.write('\n') 
            
    print_output_to_file(sorted_sentences_with_score2, q1_syns, q1, q2_syns, q2, a1_file) 
    print ""
Exemple #2
0
def print_output_to_file(sorted_sentences_with_score2, q1_syns, q1, q2_syns,
                         q2, a1_file):

    for sent_obj in sorted_sentences_with_score2[:1]:  #Organizes output
        if sorted_sentences_with_score2 == "No Papers with both queries were found on PubMed":
            with open(r'text_files\generated_test_data.txt', 'a') as f:
                f.write('\n' + a1_file.protein1 + '\t')
                f.write(a1_file.protein2 + '\t')
                if a1_file.paper_ID:
                    f.write(a1_file.paper_ID + '\n')
                if not a1_file.paper_ID:
                    f.write('\n')
                f.write("No Papers with both queries were found on PubMed!" +
                        '\n')
                f.close()

        elif sorted_sentences_with_score2 == "No sentences with co-occurrence found":
            with open(r'text_files\generated_test_data.txt', 'a') as f:
                f.write('\n' + a1_file.protein1 + '\t')
                f.write(a1_file.protein2 + '\t')
                if a1_file.paper_ID:
                    f.write(a1_file.paper_ID + '\n')
                if not a1_file.paper_ID:
                    f.write('\n')
                f.write('No sentences with co-occurrence found!' + '\n')
                f.close()
        else:
            score = sent_obj.score
            method_scored = str(sent_obj.method_scored)
            sent = sent_obj.sentence
            PMID = sent_obj.paper_id
            sent_w_replaced_queries = Organize.insert_syns(
                sent, q1, q1_syns, q2, q2_syns)
            with open(r'text_files\generated_test_data.txt', 'a') as f:
                f.write('\n' + a1_file.protein1 + '\t')
                f.write(a1_file.protein2 + '\t')
                f.write(str(score) + '\t')
                f.write(str(method_scored) + '\t')
                if a1_file.paper_ID:
                    f.write(a1_file.paper_ID + '\n')
                if not a1_file.paper_ID:
                    f.write('\n')
                f.write(sent_w_replaced_queries + '\n')
                f.close()
Exemple #3
0
def print_output_to_file(sorted_sentences_with_score2, q1_syns, q1, q2_syns, q2, a1_file):

    for sent_obj in sorted_sentences_with_score2[:1]:    #Organizes output
        if sorted_sentences_with_score2 == "No Papers with both queries were found on PubMed":
            with open(r'text_files\generated_test_data.txt','a') as f:
                f.write('\n'+a1_file.protein1+'\t')
                f.write(a1_file.protein2+'\t')
                if a1_file.paper_ID:
                    f.write(a1_file.paper_ID+'\n')
                if not a1_file.paper_ID:
                    f.write('\n')
                f.write("No Papers with both queries were found on PubMed!"+'\n') 
                f.close()
                       
        elif sorted_sentences_with_score2 == "No sentences with co-occurrence found":
            with open(r'text_files\generated_test_data.txt','a') as f:
                f.write('\n'+a1_file.protein1+'\t')
                f.write(a1_file.protein2+'\t')
                if a1_file.paper_ID:
                    f.write(a1_file.paper_ID+'\n')
                if not a1_file.paper_ID:
                    f.write('\n')
                f.write('No sentences with co-occurrence found!'+'\n')
                f.close()
        else:        
            score = sent_obj.score
            method_scored = str(sent_obj.method_scored)
            sent= sent_obj.sentence
            PMID = sent_obj.paper_id
            sent_w_replaced_queries = Organize.insert_syns(sent,q1,q1_syns,q2,q2_syns)
            with open(r'text_files\generated_test_data.txt','a') as f:
                f.write('\n'+a1_file.protein1+'\t')
                f.write(a1_file.protein2+'\t')
                f.write(str(score)+'\t')
                f.write(str(method_scored) + '\t')
                if a1_file.paper_ID:
                    f.write(a1_file.paper_ID+'\n')
                if not a1_file.paper_ID:
                    f.write('\n')
                f.write(sent_w_replaced_queries+'\n')
                f.close()
Exemple #4
0
def index(a1_file, articles, max_sentences):

    global a_file
    a_file = a1_file

    q1 = a1_file.protein1
    q2 = a1_file.protein2
    query = queries.main(q1, q2)  # Creates Queries
    q1_syns = query.q1_syns  # Retrieves Q1 and Q2 synonyms
    q2_syns = query.q2_syns
    print a1_file.protein1, ' synonyms = ', q1_syns
    print a1_file.protein2, ' synonyms = ', q2_syns

    ID_sentence_position_list = Papers.main(query, articles)
    if len(ID_sentence_position_list) > 0:
        print str(len(
            ID_sentence_position_list)) + " sentences with co-occurrence found"

    sentences_with_score1 = Syntax_Scorer.main(ID_sentence_position_list,
                                               query, max_sentences)
    sentences_with_score2 = Semantics_Scorer.main(sentences_with_score1, query)
    sorted_sentences_with_score2 = sorted(sentences_with_score2,
                                          key=operator.attrgetter('score'),
                                          reverse=True)
    if sorted_sentences_with_score2:
        with open(r'txt_files_Testing\calibration unlimited sentences',
                  'a') as f:
            f.write(query.q1 + '\t' + query.q2 + '\n')
            for sent in sorted_sentences_with_score2:
                sent_w_replaced_queries = Organize.insert_syns(
                    sent.sentence, q1, q1_syns, q2, q2_syns)
                if str(sent.sentence)[0] != '<':
                    f.write(
                        str(sent.score) + ' ' + str(sent.method_scored) +
                        '\t' + sent_w_replaced_queries + '\n')
                    print str(sent.score) + ' ' + sent_w_replaced_queries
            f.write('\n')

    print_output_to_file(sorted_sentences_with_score2, q1_syns, q1, q2_syns,
                         q2, a1_file)
    print ""
Exemple #5
0
log = open('example/pacman.log', 'r').read()
import json
import Organize
""" (Work In Progress)
log = Organize.log2list(log)
print('\n [A]ll, [I]nstalled, [U]pgraded, [R]emoved')
user = {
    "option": str,
    "quantity": int(100),
    "date": str
}
user["option"] = input('(A|I|U|R) > ')

print('\n Quantity of itens (optional)')
tmp = input('(Default: 100) > ')
user["quantity"] = int(tmp) if 

print('\n Date of the itens (optional)')
user["date"] = input('(day/month/year) > ')

print(user)"""

open('example/file.json',
     'w').write(json.dumps(Organize.log2json(log), indent="\t"))