def flush_flows():
    scheduledb.connect()
    filedb.connect()
    flowdb.connect()
    
    postgresops.dbcur.execute("select flowdef,time_from,time_to,source_name,source_id from flows.curflows where status=%s",(flowdb.ERROR,))
    idstokill = []
    idstokill.extend(postgresops.dbcur.fetchall())
    
    postgresops.dbcur.execute("select flowdef,time_from,time_to,source_name,source_id,task_ids,file_ids from flows.curflows where status!=%s",(flowdb.DONE,))
    rows = postgresops.dbcur.fetchall()
    for flowdef,time_from,time_to,source_name,source_id,task_ids,file_ids in rows:
        found_death= False
        for task_id in task_ids:
            postgresops.dbcur.execute("select status from schedule.tasks where id=%s",(task_id,))
            if ( postgresops.dbcur.rowcount <= 0 or postgresops.dbcur.fetchone()[0] >= scheduledb.ERROR_CRASH ):
                idstokill.append((flowdef,time_from,time_to,source_name,source_id))
                found_death = True
                break
        
        if found_death:
            continue
        
        for file_id in file_ids:
            postgresops.dbcur.execute("select status from flows.files where id=%s",(file_id,))
            if ( postgresops.dbcur.rowcount <= 0 or postgresops.dbcur.fetchone()[0] == filedb.FAIL ):
                idstokill.append((flowdef,time_from,time_to,source_name,source_id))
                found_death = True
                break
            
    if len(idstokill)==0: return

    if not autoconfirm:
        print "The following flows will be removed from the database:\n"+"\n\t".join([i.__str__() for i in idstokill])
        conf = raw_input("Confirm [y|n]? ")
        if ( conf != 'y' and conf != 'yes'):
            print "Cancelled by user"
            sys.exit(10)
            
    for flowdef,time_from,time_to,source_name,source_id in idstokill:
        str = postgresops.dbcur.mogrify("DELETE FROM flows.curflows WHERE flowdef=%s and time_from=%s and time_to=%s and source_name=%s and source_id=%s",
                                        (flowdef,time_from,time_to,source_name,source_id))
        print str
        postgresops.dbcur.execute(str)
        
    postgresops.dbcon.commit()
    print "";        
Beispiel #2
0
def do_flows(environ,start_response):
    import config
    from mod_flow import flowdb
    from mod_scheduler import scheduledb
    import postgresops
    from tasks import apply_status_task
    
    
    flowdb.connect()
    env = Environment(loader=FileSystemLoader(environ['DOCUMENT_ROOT']));
    template = env.get_template('flows.html')
    start_response('200 OK',[('Content-Type','text/html')])
    
    ## getting flow info
    postgresops.dbcur.execute("SELECT flowdef,time_from,time_to,count(*) from flows.curflows where status=%s group by flowdef,time_from,time_to",(flowdb.RUNNING,))
    active_flows = postgresops.dbcur.fetchall()
    postgresops.dbcur.execute("SELECT flowdef,time_from,time_to,count(*) from flows.curflows where status=%s group by flowdef,time_from,time_to order by time_from desc limit 15 ",(flowdb.DONE,))
    done_flows = postgresops.dbcur.fetchall()

    from mod_flow import filedb
    file_cache_progress = filedb.get_files(where='status=%d'%filedb.INVALID)
    file_cache_done = filedb.get_files(where='status=%d or status=%d'%(filedb.VALID,filedb.FAIL))
    for f in file_cache_progress + file_cache_done:
        if f.status == filedb.INVALID or f.status == filedb.FAIL:
            if f.status == filedb.INVALID:
                f.statusstr = "INVALID"
            else:
                f.statusstr = "FAIL"
            task = scheduledb.get_task_by_id(f.task_id)
            if task != None:
                apply_status_task(task)
                f.idstr = str(f.task_id)+' ('+task.statusstr+')'
            else:
                f.idstr = str(f.task_id)+' (not found)'
        elif f.status == filedb.VALID:
            f.statusstr = "VALID"
            f.idstr = str(f.task_id)

    return [str(template.render(
                active_flows=active_flows,
                done_flows=done_flows,
                file_cache_progress=file_cache_progress,
                file_cache_done=file_cache_done
            ))]