#all_res['ortho-tal'] = indices(title='Ortho corpus', # path='ortho-tal', # url='') all_res['Maupassant'] = indices(title='Maupassant', path='maupassant12.bin', url='https://github.com/m2litl2019/Projet-conception-Vikidia/blob/master/base/maupassant12.bin') all_res['Vikibest'] = indices(title='Vikibest', path='vikibest', url='https://github.com/m2litl2019/Projet-conception-Vikidia/tree/master/base/vikibest') all_res['Vikidia "à simplifier"'] = indices(title='Vikidia "à simplifier"', path='VikiSimply-tal', url='https://github.com/m2litl2019/Projet-conception-Vikidia/blob/master/base/VikiSimply-tal') #all_res['Monde Diplomatique'] = indices(title='Monde Diplomatique', # path='md_fr.bin', # url='https://github.com/m2litl2019/Projet-conception-Vikidia/tree/master/base/md_fr.tal') # Output #-------- p = Presentation('templates/maquette2.html') nb = 0 for k, res in all_res.items(): p.populate(res, nb, name=res['GEN_TITLE']) nb += 1 p.ouput_all('results/multitests')
}) else: res.update({ 'SEMLEX_AVG_MANULEX': res['SEMLEX_MANULEX'] / nb_words, 'SEMLEX_AVG_POLY': res['SEMLEX_POLY_INDEX'] / nb_words }) # Indices on HTML res.update(reperage_images_liens_viki(data=html_brut)) res.update(reperage_ponctuation(data=html_brut)) except Exception as e: html += '<span style="background-color: red; color: white;">Error while calculating Indices</span>' raise e # Build Indices output html += '<h2 id="ind">Indices</h2><a href="#sum">Back to summary</a><br><br><div style="background-color: #EFEFEF; border: 1px solid grey;">' p = Presentation('templates/maquette2.html') p.populate(res, name='article') html += p.output_html_string(header=False) html += '</div>' # Build output footer html += """</div> </body> </html> """ else: html = """<!DOCTYPE html> <head> <title>Analyse article</title> </head> <body width="100%"> <center> <form action="/index.py" method="post">
print() res.update(reperage_verbeconj_prorel_sub(VIKIDIA + "/" + article.tag)) print() res.update(reperage_tps(VIKIDIA + "/" + article.tag)) print() res.update(reperage_connecteurs_flesch(VIKIDIA + "/" + article.tag)) print() res.update(reperage_definition(VIKIDIA + "/" + article.tag)) lemmas = extract_lemmas(VIKIDIA + "/" + article.tag) res.update(compare_Manulex(lemmas)) res.update(compute_polysemy_index(VIKIDIA + "/" + article.tag)) res.update(reperage_images_liens_viki( article.tag[:-12], "vikidia")) #url = nom du fichier sans le _wikipedia.txt res.update(reperage_ponctuation(article.tag[:-12], "vikidia")) p.populate(res, i, name=article.tag[:-4]) except ZeroDivisionError: continue WIKIPEDIA = 'Wikipedia-TAL' print('== Wikipedia gros par articles ==') data = load(WIKIPEDIA, automerge=False) for i in range(0, len(data)): try: article = data[i] res = { 'GEN_TITLE': article.tag[:-4], 'GEN_URL': '', 'GEN_DATE': str(datetime.datetime.now())