Beispiel #1
0
#all_res['ortho-tal'] = indices(title='Ortho corpus',
#                               path='ortho-tal',
#                               url='')

all_res['Maupassant'] = indices(title='Maupassant',
                                path='maupassant12.bin',
                                url='https://github.com/m2litl2019/Projet-conception-Vikidia/blob/master/base/maupassant12.bin')

all_res['Vikibest'] = indices(title='Vikibest',
                              path='vikibest',
                              url='https://github.com/m2litl2019/Projet-conception-Vikidia/tree/master/base/vikibest')

all_res['Vikidia "à simplifier"'] = indices(title='Vikidia "à simplifier"',
                                            path='VikiSimply-tal',
                                            url='https://github.com/m2litl2019/Projet-conception-Vikidia/blob/master/base/VikiSimply-tal')

#all_res['Monde Diplomatique'] = indices(title='Monde Diplomatique',
#                                        path='md_fr.bin',
#                                        url='https://github.com/m2litl2019/Projet-conception-Vikidia/tree/master/base/md_fr.tal')

# Output
#--------

p = Presentation('templates/maquette2.html')
nb = 0
for k, res in all_res.items():
    p.populate(res, nb, name=res['GEN_TITLE'])
    nb += 1
p.ouput_all('results/multitests')
Beispiel #2
0
        res = {
            'GEN_TITLE': article.tag[:-4],
            'GEN_URL': '',
            'GEN_DATE': str(datetime.datetime.now())
        }
        print()
        res.update(reperage_passive(WIKIPEDIA + "/" + article.tag))
        print()
        res.update(reperage_pronoms(WIKIPEDIA + "/" + article.tag))
        print()
        res.update(reperage_verbeconj_prorel_sub(WIKIPEDIA + "/" +
                                                 article.tag))
        print()
        res.update(reperage_tps(WIKIPEDIA + "/" + article.tag))
        print()
        res.update(reperage_connecteurs_flesch(WIKIPEDIA + "/" + article.tag))
        print()
        res.update(reperage_definition(WIKIPEDIA + "/" + article.tag))
        lemmas = extract_lemmas(WIKIPEDIA + "/" + article.tag)
        res.update(compare_Manulex(lemmas))
        res.update(compute_polysemy_index(WIKIPEDIA + "/" + article.tag))
        res.update(reperage_images_liens_viki(
            article.tag[:-14],
            "wikipedia"))  #url = nom du fichier sans le _wikipedia.txt
        res.update(reperage_ponctuation(article.tag[:-14], "wikipedia"))
        p.populate(res, i, name=article.tag[:-4])
    except ZeroDivisionError:
        continue

p.ouput_all('results/multitests-articles')