def main():
    
    # DB interface:
    global dbi
    dbi = ds_reader(pubdb_conn_info.reader_info())

    #Establish a connection to the database through the DBI
    try:
        dbi.connect()
    except:
        print "Unable to connect to database... womp womp :("

    #Get a list of all projects from the DBI
    projects = dbi.list_all_projects() # [project, command, server, sleepafter .... , enabled, resource]

    #Initialize the GUI to show plots from each project
    #Returns a dictionary of project name --> handles to data and curves
    datas_curves = init_gui(projects)   

    while(True):
        #Every second, we will be updating the qt plots
        time.sleep(1)
        
        #This function queries the database and adds a point to each project's plot
        update_gui(datas_curves)
예제 #2
0
def main():

    # DB interface:
    global dbi
    dbi = ds_reader(pubdb_conn_info.reader_info())

    #Establish a connection to the database through the DBI
    try:
        dbi.connect()
    except:
        print "Unable to connect to database... womp womp :("

    #Get a list of all projects from the DBI
    projects = dbi.list_all_projects(
    )  # [project, command, server, sleepafter .... , enabled, resource]

    #Initialize the GUI to show plots from each project
    #Returns a dictionary of project name --> handles to data and curves
    datas_curves = init_gui(projects)

    while (True):
        #Every second, we will be updating the qt plots
        time.sleep(1)

        #This function queries the database and adds a point to each project's plot
        update_gui(datas_curves)
예제 #3
0
  def __init__(self):

    # DB interface:
    self.dbi = ds_reader(pubdb_conn_info.reader_info())
    
    #Establish a connection to the database through the DBI
    try:
      self.dbi.connect()
    except:
      print "Unable to connect to database... womp womp :("

    #Dictionary that contains projects as keys, and arrays of statuses as values. ex:
    #{'dummy_daq': [(1, 109),(2,23)], 'dummy_nubin_xfer': [(0,144), (1, 109)]}
    self.proj_dict = self.dbi.list_status()
    self.enabled_projects = [ x._project for x in self.dbi.list_projects() ]
    self.my_utils = GuiUtils()
    self.colors = self.my_utils.getColors()
예제 #4
0
    def __init__(self):

        # DB interface:
        self.dbi = ds_reader(pubdb_conn_info.reader_info())

        #Establish a connection to the database through the DBI
        try:
            self.dbi.connect()
        except:
            print "Unable to connect to database... womp womp :("

        #Dictionary that contains projects as keys, and arrays of statuses as values. ex:
        #{'dummy_daq': [(1, 109),(2,23)], 'dummy_nubin_xfer': [(0,144), (1, 109)]}
        self.proj_dict = self.dbi.list_status()
        self.enabled_projects = [x._project for x in self.dbi.list_projects()]
        self.my_utils = GuiUtils()
        self.colors = self.my_utils.getColors()
예제 #5
0
import os
os.environ['PUB_LOGGER_LEVEL'] = 'kLOGGER_DEBUG'
os.environ['PUB_LOGGER_DRAIN'] = 'kLOGGER_COUT'
from pub_dbi import pubdb_conn, pubdb_conn_info

conn_info = pubdb_conn_info.reader_info()
k = pubdb_conn()
k.cursor(conn_info)
conn_info._db = 'procdb'
k.cursor(conn_info)
from pyqtgraph.Qt import QtGui, QtCore
import pyqtgraph as pg
from custom_piechart_class import PieChartItem
import time
# dstream import
from dstream.ds_api import ds_reader
# pub_dbi import
from pub_dbi import pubdb_conn_info
# catch ctrl+C to terminate the program
import signal

# DB interface:
global dbi
dbi = ds_reader(pubdb_conn_info.reader_info())

#Establish a connection to the database through the DBI
try:
    dbi.connect()
except:
    print "Unable to connect to database... womp womp :("

#suppress warnings temporarily:
QtCore.qInstallMsgHandler(lambda *args: None)

#Initialize the GUI to show plots from each project
#Initialize Qt (only once per application)
qapp = QtGui.QApplication([])
view = pg.GraphicsView()
#l is a GraphicsLayoutWidget
l = pg.GraphicsLayout()  #border=(100,100,100))
view.setCentralItem(l)
예제 #7
0
#!/usr/bin/env python
# python import
import sys
# dstream import
from dstream.ds_api import ds_reader
# pub_util import
from pub_util import pub_logger
# pub_dbi import
from pub_dbi  import pubdb_conn_info

logger = pub_logger.get_logger('list_log')

# DB interface for altering ProcessTable
k=ds_reader(pubdb_conn_info.reader_info(), logger)

# Connect to DB
k.connect()

# Define a project
projects = k.list_daemon_log()

if not projects: 
    print 'No project found... aborting!'
    print
    sys.exit(1)

for x in projects:

    msg = x.dump_log()
    if msg: print msg
예제 #8
0
logger = pub_logger.get_logger('cfg_dump_project')

if not len(sys.argv) in [2, 3]:

    logger.error('Invalid argument. Usage: %s OUTPUT_FILENAME [SERVER]' %
                 sys.argv[0])
    sys.exit(1)

out_file = sys.argv[1]
if os.path.isfile(out_file) or os.path.isdir(out_file):
    logger.error('File/Dir already exists: %s' % out_file)
    sys.exit(1)

# DB interface for altering ProcessTable
k = ds_reader(pubdb_conn_info.reader_info(), logger)
# Connect to DB
k.connect()

project_info_v = []

for p in k.list_all_projects():
    if len(sys.argv) < 3 or p._server == sys.argv[2]:
        project_info_v.append(p)

if not project_info_v:
    logger.error('No project found...')
    sys.exit(1)

fout = open(out_file, 'w')
for info in project_info_v:
예제 #9
0
    def process_newruns(self):

        # Attempt to connect DB. If failure, abort
        if not self.connect():
            self.error('Cannot connect to DB! Aborting...')
            return

        # loop through all directories in which data is to be found
        for path_num in xrange(len(self._data_dir)):

            data_path = self._data_dir[path_num]

            self.info('Start access in data directory %s' % data_path)

            run_lim = None
            # if we are not yet at the last directory (for which there should be no run limit)
            if (path_num < len(self._run_bound)):
                run_lim = self._run_bound[path_num]
                self.info('Run limit for this directory: %i' % run_lim)

            # get ALL closed data files in DATADIR
            if (os.path.isdir(data_path) == False):
                self.error('DATA DIR %s does not exist' % data_path)
                return

            self.info('Looking for data files in: %s' % data_path)

            dircontents = os.listdir(data_path)

            # create a dictionary to keep track of
            # - file name ----- NAME
            # - run number ---- RUN
            # - subrun number - SUBRUN
            # - time-create --- TIMEC
            # - time-modify --- TIMEM
            # : dictionary key: ------ (RUN,SUBRUN)
            # : dictionary content: -- (NAME,TIMEC,TIMEM)
            file_info = {}

            for f in dircontents:

                filepath = data_path + '/' + f

                # check that this is a file
                if (os.path.isfile(filepath) == False):
                    continue

                try:

                    time_create = os.path.getctime(filepath)
                    time_modify = os.path.getmtime(filepath)

                    # file format:
                    # NoiseRun-YYYY_M_DD_HH_MM_SS-RUN-SUBRUN.ubdaq
                    run = int(f.replace('.ubdaq', '').split('-')[-2])
                    subrun = int(f.replace('.ubdaq', '').split('-')[-1])
                    #self.info('found run (%i, %i)'%(run,subrun))
                    file_info[tuple(
                        (run, subrun))] = [f, time_create, time_modify]

                except:

                    # if file-name is .ubdaq then we have a problem
                    # were not able to read run-subrun info from file
                    if (f.find('.ubdaq')):
                        self.info(
                            'Could not read RUN/SUBRUN info for file %s' % f)

            # sort the dictionary
            # we want to ignore the largest run/subrun information
            # this will prevent us from potentially logging info
            # for a file that has not yet been closed
            sorted_file_info = sorted(file_info)
            # get tuple with largest run/subrun info found in files
            max_file_info = sorted_file_info[-1]

            # fetch from database the last run/subrun number recorded
            logger = pub_logger.get_logger('register_new_run')
            reader = ds_api.ds_reader(pubdb_conn_info.reader_info(), logger)
            last_recorded_info = reader.get_last_run_subrun(self._runtable)

            # log which (run,subrun) pair was added last
            self.info('last recorded (run,subrun) is (%d,%d)' %
                      (int(last_recorded_info[0]), int(last_recorded_info[1])))
            self.info(
                'No run with (run,subrun) smaller than this will be added to the RunTable'
            )

            # if we made it this far the file info needs to be
            # recorded to the database
            # DANGER *** DANGER *** DANGER *** DANGER
            # we will now invoke the death_start
            # this API will access the RUN table end edit
            # informaton, which is exactly what we need to do
            # however, this is dangerous and you should not
            # copy this code and re-use it somewhere
            # if you do, the Granduca's wrath will be upon you
            # lucikly for you, the Granduca was the first to
            # abolish the death penalty on November 30th 1786
            # http://en.wikipedia.org/wiki/Grand_Duchy_of_Tuscany#Reform
            # However, the imperial army may be less mercyful.
            # DANGER *** DANGER *** DANGER *** DANGER
            logger = pub_logger.get_logger('death_star')
            rundbWriter = ds_api.death_star(pubdb_conn_info.admin_info(),
                                            logger)

            # loop through dictionary keys and write to DB info
            # for runs/subruns not yet stored
            for info in sorted_file_info:

                # this key needs to be larger than the last logged value
                # but less than the last element in the dictionary
                if (info >= max_file_info):
                    continue
                if (run_lim):
                    if (info[0] > run_lim):
                        continue
                if (info <= last_recorded_info):
                    continue

                self.info('Trying to add to RunTable (run,subrun) = (%d,%d)' %
                          (int(info[0]), int(info[1])))

                try:

                    # info is key (run,subrun)
                    # dictionary value @ key is array
                    # [file name, time_crate, time_modify]
                    run = info[0]
                    subrun = info[1]
                    run_info = file_info[info]
                    file_creation = time.gmtime(int(run_info[1]))
                    file_closing = time.gmtime(int(run_info[2]))
                    file_creation = time.strftime('%Y-%m-%d %H:%M:%S',
                                                  file_creation)
                    file_closing = time.strftime('%Y-%m-%d %H:%M:%S',
                                                 file_closing)

                    self.info('filling death star...')
                    # insert into the death start
                    rundbWriter.insert_into_death_star(self._runtable, info[0],
                                                       info[1], file_creation,
                                                       file_closing)
                    # Report starting
                    self.info(
                        'recording info for new run: run=%d, subrun=%d ...' %
                        (int(run), int(subrun)))

                except:

                    # we did not succeed in adding this (run,subrun)
                    self.info('FAILED to add run=%d, subrun=%d to RunTable' %
                              (int(run), int(subrun)))
from pyqtgraph.Qt import QtGui, QtCore
import pyqtgraph as pg
from custom_piechart_class import PieChartItem
import time
# dstream import
from dstream.ds_api import ds_reader
# pub_dbi import
from pub_dbi import pubdb_conn_info
# catch ctrl+C to terminate the program
import signal

# DB interface:
global dbi
dbi = ds_reader(pubdb_conn_info.reader_info())

#Establish a connection to the database through the DBI
try:
    dbi.connect()
except:
    print "Unable to connect to database... womp womp :("

#suppress warnings temporarily:
QtCore.qInstallMsgHandler(lambda *args: None)


#Initialize the GUI to show plots from each project
#Initialize Qt (only once per application)
qapp = QtGui.QApplication([])
view = pg.GraphicsView()
#l is a GraphicsLayoutWidget
l = pg.GraphicsLayout()#border=(100,100,100))
예제 #11
0
    def process_newruns(self):

        # Attempt to connect DB. If failure, abort
        if not self.connect():
	    self.error('Cannot connect to DB! Aborting...')
            return


        # loop through all directories in which data is to be found
        for path_num in xrange(len(self._data_dir)):

            data_path = self._data_dir[path_num]

            self.info('Start access in data directory %s'%data_path)

            run_lim = None
            # if we are not yet at the last directory (for which there should be no run limit)
            if ( path_num < len(self._run_bound) ):
                run_lim = self._run_bound[path_num]
                self.info('Run limit for this directory: %i'%run_lim)

            # get ALL closed data files in DATADIR
            if (os.path.isdir(data_path) == False):
                self.error('DATA DIR %s does not exist'%data_path)
                return

            self.info('Looking for data files in: %s'%data_path)

            dircontents = os.listdir(data_path)

            # create a dictionary to keep track of
            # - file name ----- NAME
            # - run number ---- RUN
            # - subrun number - SUBRUN
            # - time-create --- TIMEC
            # - time-modify --- TIMEM
            # : dictionary key: ------ (RUN,SUBRUN)
            # : dictionary content: -- (NAME,TIMEC,TIMEM)
            file_info = {}
            
            for f in dircontents:
            
                filepath = data_path+'/'+f

                # check that this is a file
                if (os.path.isfile(filepath) == False):
                    continue
            
                try:
                    
                    time_create  = os.path.getctime(filepath)
                    time_modify = os.path.getmtime(filepath)
                
                    # file format:
                    # NoiseRun-YYYY_M_DD_HH_MM_SS-RUN-SUBRUN.ubdaq
                    run    = int(f.replace('.ubdaq','').split('-')[-2])
                    subrun = int(f.replace('.ubdaq','').split('-')[-1])
                    #self.info('found run (%i, %i)'%(run,subrun))
                    file_info[tuple((run,subrun))] = [f,time_create,time_modify]

                except:
                
                    # if file-name is .ubdaq then we have a problem
                    # were not able to read run-subrun info from file
                    if (f.find('.ubdaq')):
                        self.info('Could not read RUN/SUBRUN info for file %s'%f)

            # sort the dictionary
            # we want to ignore the largest run/subrun information
            # this will prevent us from potentially logging info
            # for a file that has not yet been closed
            sorted_file_info = sorted(file_info)
            # get tuple with largest run/subrun info found in files
            max_file_info = sorted_file_info[-1]
            
            # fetch from database the last run/subrun number recorded
            logger = pub_logger.get_logger('register_new_run')
            reader = ds_api.ds_reader(pubdb_conn_info.reader_info(), logger)
            last_recorded_info = reader.get_last_run_subrun(self._runtable)

            # log which (run,subrun) pair was added last
            self.info('last recorded (run,subrun) is (%d,%d)'%(int(last_recorded_info[0]),int(last_recorded_info[1])))
            self.info('No run with (run,subrun) smaller than this will be added to the RunTable')

            # if we made it this far the file info needs to be
            # recorded to the database
            # DANGER *** DANGER *** DANGER *** DANGER
            # we will now invoke the death_start
            # this API will access the RUN table end edit
            # informaton, which is exactly what we need to do
            # however, this is dangerous and you should not
            # copy this code and re-use it somewhere
            # if you do, the Granduca's wrath will be upon you
            # lucikly for you, the Granduca was the first to
            # abolish the death penalty on November 30th 1786
            # http://en.wikipedia.org/wiki/Grand_Duchy_of_Tuscany#Reform
            # However, the imperial army may be less mercyful.
            # DANGER *** DANGER *** DANGER *** DANGER
            logger = pub_logger.get_logger('death_star')
            rundbWriter = ds_api.death_star(pubdb_conn_info.admin_info(),logger)
            
            # loop through dictionary keys and write to DB info
            # for runs/subruns not yet stored
            for info in sorted_file_info:
                
                # this key needs to be larger than the last logged value
                # but less than the last element in the dictionary
                if (info >= max_file_info):
                    continue;
                if (run_lim):
                    if ( info[0] > run_lim):
                        continue;
                if (info <= last_recorded_info):
                    continue;

                self.info('Trying to add to RunTable (run,subrun) = (%d,%d)'%(int(info[0]),int(info[1])))

                try:

                    # info is key (run,subrun)
                    # dictionary value @ key is array
                    # [file name, time_crate, time_modify]
                    run           = info[0]
                    subrun        = info[1]
                    run_info      = file_info[info]
                    file_creation = time.gmtime(int(run_info[1]))
                    file_closing  = time.gmtime(int(run_info[2]))
                    file_creation = time.strftime('%Y-%m-%d %H:%M:%S',file_creation)
                    file_closing  = time.strftime('%Y-%m-%d %H:%M:%S',file_closing)
                    
                    self.info('filling death star...')
                    # insert into the death start
                    rundbWriter.insert_into_death_star(self._runtable,info[0],info[1],file_creation, file_closing)
                    # Report starting
                    self.info('recording info for new run: run=%d, subrun=%d ...' % (int(run),int(subrun)))

                except:
                    
                    # we did not succeed in adding this (run,subrun)
                    self.info('FAILED to add run=%d, subrun=%d to RunTable'%(int(run),int(subrun)))
예제 #12
0
import os
os.environ['PUB_LOGGER_LEVEL']='kLOGGER_DEBUG'
os.environ['PUB_LOGGER_DRAIN']='kLOGGER_COUT'
from pub_dbi import pubdb_conn, pubdb_conn_info

conn_info = pubdb_conn_info.reader_info()
k=pubdb_conn()
k.cursor(conn_info)
conn_info._db='procdb'
k.cursor(conn_info)