Example #1
0
 def store_parse(self, analysed_article, data):
     analysis_sentences = {
         sentence.id:
         AnalysisSentence.objects.create(analysed_article=analysed_article,
                                         sentence=sentence).id
         for sentence in sbd.get_or_create_sentences(
             analysed_article.article)
     }
     result = interpret_output(analysis_sentences, data)
     wordcreator.store_analysis(analysed_article, *result)
Example #2
0
def add_sentence(asent):
    rdf = get_rdf(asent.sentence.sentence)

    bytes = rdf.read()
    fn = "/tmp/sent_{asent.id}.rdf".format(**locals())
    open(fn, 'w').write(bytes)
    log.info("Parsing sent {asent.id} from {fn}".format(**locals()))
    rdf = open(fn)
    
    sentences, words = parse_rdf(rdf)
    tokens, triples = create_values(asent.id, words)
    wordcreator.store_analysis(asent.analysed_article, tokens, triples)
Example #3
0
def add_sentence(asent):
    rdf = get_rdf(asent.sentence.sentence)

    bytes = rdf.read()
    fn = "/tmp/sent_{asent.id}.rdf".format(**locals())
    open(fn, 'w').write(bytes)
    log.info("Parsing sent {asent.id} from {fn}".format(**locals()))
    rdf = open(fn)

    sentences, words = parse_rdf(rdf)
    tokens, triples = create_values(asent.id, words)
    wordcreator.store_analysis(asent.analysed_article, tokens, triples)
Example #4
0
 def store_parse(self, analysed_article, data):
     if data.startswith("CoreNLP failed"):
         raise Exception(data)
     
     root = ElementTree.fromstring(data)
     # if the analysis sentences already exist, check there are no tokens and line the analysis_sentence up.
     # otherwise, create new ones
     sentences = list(sbd.get_or_create_sentences(analysed_article.article))
     if AnalysisSentence.objects.filter(analysed_article=analysed_article).exists():
         if Token.objects.filter(sentence__analysed_article=analysed_article).exists():
             raise Exception("Article already has tokens!")
         analysis_sentences = [AnalysisSentence.objects.get(analysed_article=analysed_article, sentence=sentence).id
                               for sentence in sentences]
     else:
         analysis_sentences = [AnalysisSentence.objects.create(analysed_article=analysed_article, sentence=sentence).id
                               for sentence in sentences]
     result = interpret_xml(analysis_sentences, root)
     import pickle; pickle.dump(result[1], open("/tmp/triples", "w"))
     wordcreator.store_analysis(analysed_article, *result)
Example #5
0
 def store_parse(self, analysed_article, data):
     analysis_sentences = {sentence.id : AnalysisSentence.objects.create(analysed_article=analysed_article, sentence=sentence).id
                           for sentence in sbd.get_or_create_sentences(analysed_article.article)}
     result = interpret_output(analysis_sentences, data)
     wordcreator.store_analysis(analysed_article, *result)