コード例 #1
0
def modifyAllTreesFromAllUsers(project):
    sql = database.SQL(project)
    db, cursor = sql.open()
    command = "select sentences.rowid, sentences.nr, sentences.textid, trees.userid, trees.rowid from sentences, texts, trees where sentences.textid=texts.rowid and trees.sentenceid = sentences.rowid;"
    cursor.execute(command)
    answer = cursor.fetchall()
    print len(answer), "trees to update"
    lastpourc = -1
    for nr, (sid, snr, tid, uid, treeid) in enumerate(answer):
        pourc = int(float(nr) / len(answer) * 100)
        if pourc != lastpourc:
            sys.stdout.write("{pourc}%\r".format(pourc=pourc))
        sys.stdout.flush()
        dic = sql.gettree(None, None, treeid, indb=db, incursor=cursor)
        if dic and dic["tree"]:
            sentencetree = dic["tree"]
            #print "Resgmentation en cours de ", " ".join([node["t"] for i, node in sentencetree.iteritems()]), "id=", treeid, "by user", uid
            #newtree=retokenisation.retokenizeTree(sentencetree)
            newtree = retokenisation.segmentationChiffres(sentencetree)
            #pprint.pprint(newtree)
            sql.enterTree(cursor, newtree, sid, uid, tokensChanged=True)
            #print "Tree", treeid," successfully updated."
    print "Done."
    db.commit()
    db.close()
コード例 #2
0
def main():
    with open(f'{cwd}/config.json') as f:
        config = json.load(f)
    url = config['landsnet']['URL']
    logging_message = 'power'
    with Scraper() as scraper_:
        soup = scraper_.scrape(url, description=logging_message)
    landsnet = Landsnet()
    parsed_data = landsnet.parse(soup=soup)
    desired_values = landsnet.extract_desired_values(parsed_data)
    timestamp = landsnet.get_timestamp(parsed_data)
    final_data = landsnet.merge_data(desired_values, timestamp)
    sql = database.SQL()
    sql.write(table='landsnet', data=final_data, logging_message='power')
コード例 #3
0
def main():
    with open(f'{cwd}/config.json') as f:
        config = json.load(f)
    stations = config["met"]["stations"]
    weather = Forecast()
    sql = database.SQL()
    logging_message = 'weather'
    for station_name, station_id in stations.items():
        url = weather.get_url(station_id)
        with Scraper() as scraper_:
            raw_data = scraper_.scrape(url=url, description=logging_message)
        formatted_data = weather.parse(station_name, raw_data)
        sql.write(table='weather', data=formatted_data,
                  logging_message=logging_message)
コード例 #4
0
def updateParseResult(projectname, conlldirpath, filepattern="*.trees.conll14", annotatorName="parser", removeToGetDB="-one-word-per-line.conll14_parse"):
	sql = database.SQL(projectname)
	db,cursor=sql.open()
	print  "updateTrees:",glob(os.path.join(conlldirpath, filepattern))
	for filename in glob(os.path.join(conlldirpath, filepattern)):
		print "entering",filename
		sentences=conll.conllFile2trees(filename)
		dbtextname = os.path.basename(filename)[:-len(removeToGetDB)]
		textid = sql.enter(cursor, "texts",["textname"],(dbtextname,))
		
		if not textid:
			print "couldn't find the database named",textid
			return
		enterNewAnnotation(sql, db,cursor, sentences, textid, annotatorName=annotatorName)