def update_file_mult(file,fields): for field,val in fields: postgresops.check_evil(field); postgresops.dbcur.execute("UPDATE flows.files SET "+",".join([field+"=%s" for field,val in fields])+" where id=%s", [val for field,val in fields]+[file.id]) postgresops.dbcon.commit()
def do_showlog(environ, start_response): import config import postgresops from mod_scheduler import scheduledb start_response("200 OK", [("Content-Type", "text/html")]) resp = ["<html><body>\n"] d = cgi.parse_qs(environ["QUERY_STRING"]) if "taskid" in d: import postgresops from mod_scheduler import scheduledb try: scheduledb.connect() postgresops.check_evil(d["taskid"][0]) resp.append("<h2>Log File for Task ID: %s</h2>\n<pre>\n" % d["taskid"][0]) task = scheduledb.get_tasks(where="id = %s" % (d["taskid"][0]), orderby="start_time desc") if len(task) != 1: resp.append("Error 0 or >1 tasks matched!") else: fin = open(task[0].log_file, "r") resp.extend(fin.readlines()) fin.close() except Exception, exp: resp.append("Exception " + str(exp) + " occurred") resp.append("</pre>\n")
def do_showplot(environ,start_response): import config d = cgi.parse_qs(environ['QUERY_STRING']) if 'id' in d: from mod_flow import filedb import postgresops try : filedb.connect(); postgresops.check_evil(d['id'][0]) files = filedb.get_files(where = 'id = %s'%d['id'][0]) if ( len(files) != 1): raise Exception("Error 0 or >1 files matched!") else: f = tempfile.NamedTemporaryFile(delete = False) fname = f.name f.close(); cmdline = str(config.map['web']['plotcsvcmd'] + "-csvin %s -pngout %s -title \"Source:%s ID:%s Filters:%s\"" %(files[0].file_name,fname,files[0].source_name,files[0].source_id,files[0].steps)) cmds = shlex.split(cmdline) subprocess.call(cmds) if os.path.getsize(fname) == 0: raise Exception("Plot file not created. Error plotting?") start_response('200 OK',[('Content-Type','image/png')]) return TempFileWrapper(fname); except Exception,exp: import traceback start_response('500 ERROR',[('Content-Type','text/plain')]) return ["Exception "+str(exp)+" occured\n",traceback.format_exc()]
def update_flow_mult(flow,fields): for field,val in fields: postgresops.check_evil(field); postgresops.dbcur.execute("UPDATE flows.curflows SET "+",".join([field+"=%s" for field,val in fields])+" where time_from=%s and time_to=%s and source_name=%s and source_id=%s", [val for field,val in fields]+[flow.time_from,flow.time_to,flow.source_name,flow.source_id]) postgresops.dbcon.commit()
def do_admin(environ, start_response): import config from mod_exec import mod_exec_IF start_response('200 OK',[('Content-Type','text/html')]) resp = ['<html><head><meta http-equiv="Refresh" content="10;url=javascript:window.history.back()" /></head><body>'] resp.append("<h2>Admin Command Progress</h2>\n<pre>\n"); d = cgi.parse_qs(environ['QUERY_STRING']) if ( 'modname' in d ): if ( 'action' in d ): resp.append("Module command: %s\n"%d['action'][0]); resp.append("Module name: %s\n\n"%d['modname'][0]) if d['modname'][0] == 'mod_exec' and d['action'][0] == 'start': # different procedure mod_exec_IF.connect(); if ( mod_exec_IF.connected() ): resp.append("mod_exec already responding, please kill first if you wish to restart") else: if (os.path.exists('/etc/init.d/sensezilla')): rcode = os.system('/etc/init.d/sensezilla start') else: rcode = os.system(config.map['mod_exec']['python']+' '+config.map['global']['root_dir']+'/modules/mod_exec/mod_exec.py') resp.append("Started mod_exec, response code %d"%rcode) else: resp.append("Connect to mod_exec...") mod_exec_IF.connect(); if ( mod_exec_IF.connected() ): resp.append("success.\n") if d['modname'][0] == 'mod_exec' and d['action'][0] == 'stop': mod_exec_IF.kill() resp.append("Killed mod_exec and all modules") else: curstate = mod_exec_IF.get_state(d['modname'][0]) if curstate != None: resp.append("Current module state is %d\n"%curstate) if ( d['action'][0] == 'start' ): mod_exec_IF.start(d['modname'][0]) resp.append("Module started\n") elif (d['action'][0] == 'stop' ): mod_exec_IF.stop(d['modname'][0]) resp.append("Module stopped\n") elif (d['action'][0] =='restart'): mod_exec_IF.restart(d['modname'][0]) resp.append("Module restarted\n") else: resp.append("Could not get state of %s (is it unknown to mod_exec?)"%d['modname'][0]) else: resp.append("fail.\n") else: resp.append("No action given\n") elif ( 'action' in d ): if d['action'][0] == 'requeueall': from mod_scheduler import scheduledb resp.append("Connecting to schedule DB...") scheduledb.connect() if not scheduledb.connected(): resp.append("fail.\n") else: errortasks = scheduledb.get_tasks(where='status >= %d'%(scheduledb.ERROR_CRASH),orderby="start_time desc") for task in errortasks: scheduledb.update_task(task,'status',scheduledb.WAITING_FOR_START) return ['<html><script>window.history.back()</script></html>'] elif d['action'][0] == 'requeue': from mod_scheduler import scheduledb import postgresops resp.append("Connecting to schedule DB...") scheduledb.connect() if not scheduledb.connected(): resp.append("fail.\n") else: resp.append("yay\n") if 'task' in d: try: postgresops.check_evil(d['task'][0]) task = scheduledb.get_tasks(where='status >= %d and id = %s'%(scheduledb.ERROR_CRASH,d['task'][0]),orderby="start_time desc") if ( len(task) != 1): resp.append("Error 0 or >1 tasks matched!") else: scheduledb.update_task(task[0],'status',scheduledb.WAITING_FOR_START) return ['<html><script>window.history.back()</script></html>'] except Exception,exp: resp.append("Exception "+str(exp)+" occurred") else: resp.append("Did not provide ID\n")
def update_file(file,field_name,field_val): postgresops.check_evil(field_name); postgresops.dbcur.execute("UPDATE flows.files SET "+field_name+"=%s where id=%s", (field_val,file.id)) postgresops.dbcon.commit()
ret,vals,sys.argv = utils.check_arg(sys.argv,'--to',1) if ret: try: totime = utils.str_to_date(vals[0]) except ValueError, msg: print "Bad to time: "+str(msg) ret,vals,sys.argv = utils.check_arg(sys.argv,'--type',1) if ret: dtype = vals[0] pretend,j,sys.argv = utils.check_arg(sys.argv,'--pretend') if ( len(sys.argv) == 3 ): postgresops.check_evil(sys.argv[0]) postgresops.check_evil(sys.argv[1]) sname = sys.argv[0] sid = sys.argv[1] pl_meta,dev,pl_index = devicedb.find_plugload(sname,sid) if pl_index == None: print "Cannot find device belonging to this source name/id pair" sys.exit(1); elif pl_meta == None: print "Cannot find metadata for this plugload (possibly undefined?)" sys.exit(1); print "Found device: "+dev.IDstr+" plugload channel %d"%(pl_index)
def update_task(task,field_name,field_val): postgresops.check_evil(field_name); postgresops.dbcur.execute("UPDATE schedule.tasks SET "+field_name+"=%s where id=%s",(field_val,task.id)) postgresops.dbcon.commit()
def update_task_mult(task,fields): for field,val in fields: postgresops.check_evil(field); postgresops.dbcur.execute("UPDATE schedule.tasks SET "+",".join([field+"=%s" for field,val in fields])+" where id=%s",[val for field,val in fields]+[task.id]) postgresops.dbcon.commit()
def publish_data(id_str, time, data, feednum=None, devdef=None, device_type=None, source=None, dev=None): # Usage 1: time and data are scalars - one data point, feed # = feednum or 0 if feednum=None # Usage 2: time and data are lists of scalars (time could also be scalar) - one data point per feed, feed #s = feednum (list) or range(total feeds) if feednum=None # Usage 3: time and data are lists of scalars, feednum is a scalar - multiple data points for one feed # Usage 4: time and data are lists of lists of scalars (time could also be list of scalar) - multiple data points per feed, feed #s = feednum(list) or range(total feeds) if feednum=None if not isinstance(data, list): # Usage 1 data = [data] if feednum == None: feednum = [0] else: feednum = [feednum] else: # Usage 2,3,4 if feednum == None: # Usage 2,4 feednum = range(len(data)) elif not isinstance(feednum,list): # usage 3 feednum = [feednum] time = [time] data = [data] if not isinstance(time,list) or (not isinstance(time[0],list) and isinstance(data[0],list)): # Usage 1,2,4 time = [time]*len(feednum) if not devicedb.connected(): devicedb.connect() id_str = id_str.replace('/','_'); postgresops.check_evil(id_str); dev = find_device(id_str, create_new = True, device_type=device_type, source=source, devdef=devdef, dev=dev) if dev == None: return; source_struct = utils.read_source(dev.source_name) if devdef == None: devdef = utils.read_device(dev.device_type) driver = source_struct['driver'] for i in range(len(feednum)): if feednum[i] >= len(dev.feed_names): print "ERROR cannot publish data for feed %d because it is not defined in the definition for %s"%(feednum[i],dev.device_type) elif feednum[i] >= len(dev.source_ids) or dev.source_ids[feednum[i]] == None or dev.source_ids[feednum[i]] == '': print "ERROR cannot publish data for feed %d of device %s because it is not defined"%(feednum[i],dev.IDstr) else: source_id = dev.source_ids[feednum[i]] if driver == 'SMAP': publish_smap(dev.source_name,source_struct,dev,devdef,feednum[i],source_id,time[i],data[i]) elif driver == 'CSV': fname = source_id if fname[0] != '/': fname = source_struct['path'] + '/' + fname try: parentdir = fname[:fname.rfind('/')] try: os.makedirs(parentdir) except: pass csvfile = open(fname, "ab") #print "\t",time[i],data[i] if isinstance(time[i],list): for j in range(len(time[i])): csvfile.write("%.12f,%.12f\n"%(time[i][j],data[i][j])) else: csvfile.write("%.12f,%.12f\n"%(time[i],data[i])) csvfile.close() except OSError,e: print "ERROR Cannot publish data to %s because "%(fname),e else: print "ERROR Cannot publish data for %s b/c no %s driver"%(dev.source_name,driver) return []
def update_flow(flow,field_name,field_val): postgresops.check_evil(field_name); postgresops.dbcur.execute("UPDATE flows.curflows SET "+field_name+"=%s where time_from=%s and time_to=%s and source_name=%s and source_id=%s", (field_val,flow.time_from,flow.time_to,flow.source_name,flow.source_id)) postgresops.dbcon.commit()