Example #1
0
def sentence(request, analysis_sentence):
    donukes = request.GET.get('donukes', 'n')[0].lower() == 'y'
    ruleset = request.GET.get('ruleset', '').lower()
    statements_without_object = ruleset == "en"
    if ruleset: ruleset = "_" + ruleset

    goldfile = GOLDFILE.format(**locals())
    grammarfile = GRAMMARFILE.format(**locals())
    lexiconfile = LEXICONFILE.format(**locals())
    
    tt = get_tt(ruleset)
    log.info("Loading syntax graph for sentence {analysis_sentence.id}".format(**locals()))
    tt.load_sentence(analysis_sentence.id)

    graphs = []
    graphs += [("raw", "Raw syntax graph", get_graph(tt))]
    tt.apply_lexical()
    graphs += [("lex", "After lexical processing", get_graph(tt))]
    
    graphs += apply_rules(tt)
    
    graphs += check_gold(goldfile, analysis_sentence.id, tt)

    nuketable = get_nukes(analysis_sentence, tt, statements_without_object)
    nuketable = tableoutput.table2htmlDjango(nuketable)

    lexfn, grammarfn, goldfn = lexiconfile, grammarfile, goldfile # into locals
    return render(request, "navigator/semanticroles/sentence.html", locals())
Example #2
0
def index(request):
    daily_scrapers = list(Scraper.objects.filter(run_daily=True, active=True))
    non_daily_scrapers = list(
        Scraper.objects.filter(run_daily=False, active=True))
    inactive_scrapers = list(Scraper.objects.filter(active=False))
    dates = [
        datetime.date.today() - datetime.timedelta(days=n) for n in range(14)
    ]

    scraper_lists = {
        "daily_table": daily_scrapers,
        "non_daily_table": non_daily_scrapers,
        "inactive_table": inactive_scrapers
    }

    for s_list in scraper_lists.values():
        for scraper in s_list:
            scraper.articles = scraper.n_scraped_articles(from_date=dates[-1],
                                                          to_date=dates[0])

    scraper_tables = {
        name: ObjectTable(rows=s_list, columns=["id", "label"])
        for name, s_list in scraper_lists.items()
    }

    def Set(scraper):
        s = scraper.articleset
        if s is None: return ""
        url = reverse(articleset, args=[s.project.id, s.id])
        return "<a href='{url}'>{s}</a>".format(**locals())

    for s_table in scraper_tables.values():
        s_table.addColumn(Set)

    def getdate(date, scraper):
        return scraper.articles.get(date, 0)

    for date in dates:
        for s_table in scraper_tables.values():
            s_table.addColumn(partial(getdate, date), str(date)[-5:])

    table_dict = {}
    for t_name, s_table in scraper_tables.items():
        table_dict[t_name] = table2htmlDjango(s_table, safe=True)

    return render(request, 'navigator/scrapers/index.html',
                  dict(locals().items() + table_dict.items()))
Example #3
0
def index(request):
    daily_scrapers = list(Scraper.objects.filter(run_daily=True,active=True))
    non_daily_scrapers = list(Scraper.objects.filter(run_daily=False,active=True))
    inactive_scrapers = list(Scraper.objects.filter(active=False))
    dates = [datetime.date.today() - datetime.timedelta(days=n) for n in range(14)]

    scraper_lists = {"daily_table":daily_scrapers,
                     "non_daily_table":non_daily_scrapers,
                     "inactive_table":inactive_scrapers}

    for s_list in scraper_lists.values():
        for scraper in s_list:
            scraper.articles = scraper.n_scraped_articles(from_date=dates[-1], to_date=dates[0])

    scraper_tables = {name : ObjectTable(rows=s_list, columns=["id", "label"]) for name,s_list in scraper_lists.items()}

    def Set(scraper):
        s = scraper.articleset
        if s is None: return ""
        url = reverse(articleset, args=[s.project.id, s.id])
        return "<a href='{url}'>{s}</a>".format(**locals())

    for s_table in scraper_tables.values():
        s_table.addColumn(Set)

    def getdate(date, scraper):
        return scraper.articles.get(date, 0)
    
    for date in dates:
        for s_table in scraper_tables.values():
            s_table.addColumn(partial(getdate, date), str(date)[-5:])

    table_dict = {}
    for t_name, s_table in scraper_tables.items():
        table_dict[t_name] = table2htmlDjango(s_table, safe=True)

    return render(request, 'navigator/scrapers/index.html', dict(locals().items() + table_dict.items()))
Example #4
0
 def run(self, tableObj):
     if self.options['template']:
         return render_to_string(self.options['template'], {'table':tableObj})
     return tableoutput.table2htmlDjango(tableObj)
Example #5
0
 def run(self, tableObj):
     if self.options['template']:
         return render_to_string(self.options['template'],
                                 {'table': tableObj})
     return tableoutput.table2htmlDjango(tableObj)
Example #6
0
def index(request):
    # build table with gold standard sentences
    ruleset = request.GET.get('ruleset', '').lower()
    if ruleset: ruleset = "_" + ruleset

    goldfile = GOLDFILE.format(**locals())
    grammarfile = GRAMMARFILE.format(**locals())
    g, gold_relations =get_gold(goldfile)
    comments = get_gold_comments(goldfile)

    # if rules are modified, store current values
    grammar_modified = os.path.getmtime(grammarfile)
    store_score = request.session.get('grammartime', None) != grammar_modified
    request.session['grammartime'] = grammar_modified

    sentences = AnalysisSentence.objects.filter(pk__in=g.keys())

    metrics = {} # (sid, "tp"/"fn"/"fp") : score
    tt = get_tt(ruleset, gold_relations)
    for sentence in sentences:
        tt.load_sentence(sentence.id)
        tt.apply_lexical()
        tt.apply_rules()


        found = set(tt.get_roles())
        print "--->", found
        gold = g[sentence.id]
        gold = set(do_gold_reality(found, gold))
        tp = len(gold & found)
        fp = len(found - gold)
        fn = len(gold - found)
        pr = tp / float(tp + fp) if (tp + fp) else None
        re = tp / float(tp + fn) if (tp + fn) else None
        f = 2 * pr * re / (pr + re) if (pr or re) else 0
        if tp + fp + fn == 0: f = None
        for metric in "tp fp fn pr re f".split():
            metrics[sentence.id, metric] = locals()[metric]
        key = "semanticroles_fscore_%i" % sentence.id
        previous = request.session.get(key, None)
        metrics[sentence.id, "prev"] = "" if previous is None else previous
        metrics[sentence.id, "diff"] = "" if previous is None else colorize((f or 0) - previous)
        if store_score:
            request.session[key] = f

	    
	    

    sentencetable = table3.ObjectTable(rows=sentences)
    sentencetable.addColumn(lambda s : "<a href='{url}?ruleset={ruleset}'>{s.id}</a>".format(url=reverse('semanticroles-sentence', args=[s.id]), ruleset=ruleset[1:], s=s), "ID")
    sentencetable.addColumn(lambda s : unicode(s.sentence.sentence)[:60], "Sentence")
    sentencetable.addColumn(lambda s : "<br/>".join(comments.get(s.id, [])), "Remarks")
    def get_metric(metric, sentence):
        
        result = metrics[sentence.id, metric]
        if result is None: result = ""
        if isinstance(result, float): result = "%1.2f" % result
        return result
    for metric in ("tp","fp","fn", "f", "prev", "diff"):
        sentencetable.addColumn(partial(get_metric, metric), metric)

    sentencetablehtml = tableoutput.table2htmlDjango(sentencetable, safe=True)
   
    print grammar_modified, store_score
    
    return render(request, "navigator/semanticroles/index.html", locals())