예제 #1
0
def closepairs(folder, stats):
  prob = 1
  return [#SimpleDB("indexes/%s/pitts_neg_closepairs.mat" % folder, 
          #  "raw/Pitts250/data", stats, prob),
          SimpleDB("indexes/%s/t27_neg_closepairs.mat" % folder, 
            "raw/Tokyo247", stats, prob),
          SimpleDB("indexes/%s/ttm_neg_closepairs.mat" % folder, 
            "raw/TokyoTimeMachine/data", stats, prob)]
예제 #2
0
def make_n_opposites(folder, stats):
  prob = 0.005
  return [#SimpleDB(("indexes/%s/pitts_neg_opposites.mat" % folder, 
          #  "raw/Pitts250/data"), stats, prob),
          SimpleDB("indexes/%s/t27_neg_opposites.mat" % folder, 
            "raw/Tokyo247", stats, prob),
          SimpleDB("indexes/%s/ttm_neg_opposites.mat" % folder, 
            "raw/TokyoTimeMachine/data", stats, prob)]
예제 #3
0
def farpos(folder, stats):
  prob = 0.1
  return [SimpleDB("indexes/%s/pitts_neg_farpos.mat" % folder,
            "raw/Pitts250/data", stats, prob),
          SimpleDB("indexes/%s/t27_neg_farpos.mat" % folder, 
            "raw/Tokyo247", stats, prob),
          SimpleDB("indexes/%s/ttm_neg_farpos.mat" % folder, 
            "raw/TokyoTimeMachine/data", stats, prob)]
예제 #4
0
    def make_nextdb(self, dbname=None):
        """create a new pvarch database, 
        copying pvs to save from an old database 
        (and that are in the current cache)
        """
        olddb  = SimpleDB(user=dbuser, passwd=dbpass,dbname=self.arch_db, host=dbhost,debug=0)
        olddb.use(self.arch_db)
        old_data = olddb.tables['pv'].select()

        cache_pvnames = self.get_pvnames()

       
        dbname = nextname(current=self.arch_db,dbname=dbname)
        self.create_emptydb(dbname)
        
        newdb = SimpleDB(user=dbuser, passwd=dbpass,dbname=dbname, host=dbhost,debug=0)
        pvtable = newdb.tables['pv']
        sys.stdout.write('adding %i pvs to DB %s\n' % (len(old_data),dbname))

        for p in old_data:
            if p['active'] == 'no' or p['name'] not in cache_pvnames:
                continue
            pvtable.insert(name=p['name'],
                           type=p['type'],
                           description=p['description'],
                           data_table=p['data_table'],
                           deadtime=p['deadtime'],
                           graph_type=p['graph_type'],
                           graph_lo=p['graph_lo'],
                           graph_hi=p['graph_hi'])

        self.__setrun(dbname)
        olddb.close()
        newdb.close()        
        return dbname
예제 #5
0
def make_scenenn(folder, stats):
    ret = []
    for i in scenenn.SCENENN_NUMS:
        i_p = scenenn.SCENENN_INDEX_PATH % (folder, i, "pos")
        db = SimpleDB(i_p, scenenn.SCENENN_IMAGE_PATH, stats, 1)
        ret.append(db)
    return ret
예제 #6
0
 def _read_db( self, db_path ):
   self.db_path = db_path
   self.db_name = os.path.basename( db_path )
   self.db = SimpleDB( self.db_path )
   self.names = self.db.field_names()
   self.types = self.db.field_types()
   self.NF = len(self.names)
   self.key = self.names[0]
예제 #7
0
def make_rgbd(folder, stats):
    print("RGBD")
    return [
        SimpleDB("indexes/" + folder + "/rgbd_dataset_freiburg1_room_pos.txt",
                 "raw/RGBD/data/", stats, 1),
        SimpleDB(
            "indexes/" + folder +
            "/rgbd_dataset_freiburg2_large_with_loop_pos.txt",
            "raw/RGBD/data/", stats, 1),
        SimpleDB(
            "indexes/" + folder +
            "/rgbd_dataset_freiburg2_pioneer_slam2_pos.txt", "raw/RGBD/data/",
            stats, 1),
        SimpleDB(
            "indexes/" + folder +
            "/rgbd_dataset_freiburg3_long_office_household_pos.txt",
            "raw/RGBD/data/", stats, 1),
        SimpleDB(
            "indexes/" + folder +
            "/rgbd_dataset_freiburg2_large_no_loop_pos.txt", "raw/RGBD/data/",
            stats, 1),
        SimpleDB(
            "indexes/" + folder +
            "/rgbd_dataset_freiburg2_pioneer_slam_pos.txt", "raw/RGBD/data/",
            stats, 1),
        SimpleDB(
            "indexes/" + folder +
            "/rgbd_dataset_freiburg2_pioneer_slam3_pos.txt", "raw/RGBD/data/",
            stats, 1)
    ]
예제 #8
0
    def __init__(self,dbconn=None, **kw):

        self.db = SimpleDB(dbconn=dbconn)
        self.dbconn = self.db.conn
        self.db.use(master_db)
        self.db.get_cursor()
        self.db.read_table_info()

        self.info    = self.db.tables['info']
        self.cache   = self.db.tables['cache']
        self.runs    = self.db.tables['runs']
        self.pairs   = self.db.tables['pairs']
        self.stations  = self.db.tables['stations']
        self.instruments = self.db.tables['instruments']
        self.inst_pvs  = self.db.tables['instrument_pvs']
        self.alerts = self.db.tables['alerts']
        
        self.arch_db = self._get_info('db',  process='archive')
        self.pvnames = []
예제 #9
0
def make_n_rgbd(folder, stats):
  return [SimpleDB("indexes/"+folder+"/rgbd_dataset_freiburg1_room_neg.txt",
            "raw/RGBD/data/", stats, 1),
          SimpleDB("indexes/"+folder+"/rgbd_dataset_freiburg2_large_with_loop_neg.txt",
            "raw/RGBD/data/", stats, 1),
          SimpleDB("indexes/"+folder+"/rgbd_dataset_freiburg2_pioneer_slam2_neg.txt",
            "raw/RGBD/data/", stats, 1),
          #SimpleDB("indexes/"+folder+"/rgbd_dataset_freiburg3_long_office_household_neg.txt",
            #"raw/RGBD/data/"), stats, 1),
          SimpleDB("indexes/"+folder+"/rgbd_dataset_freiburg2_large_no_loop_neg.txt",
            "raw/RGBD/data/", stats, 1),
          SimpleDB("indexes/"+folder+"/rgbd_dataset_freiburg2_pioneer_slam_neg.txt",
            "raw/RGBD/data/", stats, 1),
          SimpleDB("indexes/"+folder+"/rgbd_dataset_freiburg2_pioneer_slam3_neg.txt",
            "raw/RGBD/data/", stats, 1)]
예제 #10
0
    def __init__(self,dbconn=None,**args):

        self.master_db = config.master_db
        self.dbname = None
        self.db = SimpleDB(dbconn=dbconn)
        self.dbconn = self.db.conn
        # print 'Archiver db = ', self.db, self.db.conn
        
        ret = self.read_master("select db from info where process='archive'")
        self.dbname = ret[0]['db']
        # print 'Archiver:: current dbname = ', self.dbname
        # self.use_currentDB()
        self.db.use(self.dbname)        

        time.sleep(0.01)
        self.db.read_table_info()

        self.pv_table = self.db.tables['pv']
        
        self.debug  = 0
        self.force_checktime = 0
        self.messenger = sys.stdout
        self.dtime_limbo = {}

        self.last_collect = 0
        self.pvinfo = {}
        self.pvs    = {}
        for k,v in args.items():
            if   (k == 'debug'):      self.debug     = v
            elif (k == 'messenger'):  self.messenger = v

               
        for d in self.db.exec_fetch('select * from pv'):
            d.update({'last_ts': 0,'last_value':None,
                      'force_time': get_force_update_time() })
            self.pvinfo[d['name']] = d
예제 #11
0
def make_cnn(folder, stats):
    print("CNN")
    return [SimpleDB("indexes/" + folder + "/cnn_pos.mat", "", stats, 0.5)]
예제 #12
0
                             QLCDNumber)
from PyQt5.QtCore import QTimer
from PyQt5.QtCore import Qt
from PyQt5.QtCore import pyqtSlot, pyqtSignal, QObject
from PyQt5.QtCore import QThread

from SimpleDB import SimpleDB

HOST = '10.42.0.42'
PORT = 1217
ADDR = (HOST, PORT)

s = socket(AF_INET, SOCK_STREAM)
s.connect(ADDR)

db = SimpleDB()


class Game(QWidget):
    def __init__(self):
        super()
        self.initUI()

    def initUI(self):
        pass


class InitScene(QWidget):
    def __init__(self, st):
        super().__init__()
        self.initUI()
예제 #13
0
class Archiver:
    MIN_TIME = 100
    sql_insert  = "insert into %s (pv_id,time,value) values (%i,%f,%s)"
    def __init__(self,dbconn=None,**args):

        self.master_db = config.master_db
        self.dbname = None
        self.db = SimpleDB(dbconn=dbconn)
        self.dbconn = self.db.conn
        # print 'Archiver db = ', self.db, self.db.conn
        
        ret = self.read_master("select db from info where process='archive'")
        self.dbname = ret[0]['db']
        # print 'Archiver:: current dbname = ', self.dbname
        # self.use_currentDB()
        self.db.use(self.dbname)        

        time.sleep(0.01)
        self.db.read_table_info()

        self.pv_table = self.db.tables['pv']
        
        self.debug  = 0
        self.force_checktime = 0
        self.messenger = sys.stdout
        self.dtime_limbo = {}

        self.last_collect = 0
        self.pvinfo = {}
        self.pvs    = {}
        for k,v in args.items():
            if   (k == 'debug'):      self.debug     = v
            elif (k == 'messenger'):  self.messenger = v

               
        for d in self.db.exec_fetch('select * from pv'):
            d.update({'last_ts': 0,'last_value':None,
                      'force_time': get_force_update_time() })
            self.pvinfo[d['name']] = d


    def exec_fetch(self,sql):
        self.db.execute(sql)
        ret = [{}]
        try:
            ret = self.db.fetchall()
        except:
            pass
        return ret

    def read_master(self,query):
        self.db.use(self.master_db)
        ret = self.exec_fetch(query)
        if self.dbname is not None: self.db.use(self.dbname)
        return ret

    def use_currentDB(self,dbname=None):
        ret = self.read_master("select db from info where process='archive'")
        try:
            self.dbname = ret[0]['db']
            self.db.use(self.dbname)
        except:
            raise IOError, 'cannot determine archive database name'
        return self.dbname
        
    def get_cache_changes(self,dt=30):
        """ get list of name,type,value,cvalue,ts from cache """
        return self.read_master("select * from cache where ts>%f" % (time.time()-dt))

    def get_cache_names(self):
        ret = self.read_master("select pvname from cache")
        self.cache_names = [i['pvname'] for i in ret]
        return self.cache_names

    def get_cache_full(self, pv):
        " return full information for a cached pv"
        s = self.read_master("select * from cache where pvname='%s'" % pv)
        try:
            return s[0]
        except:
            print 'Could not get cache for pvname "%s"' % pv
            return  {'type':None,'value':None}
            

    def refresh_pv_table(self):
        """refresh the pvinfo dictionary  by re-reading the database settings for pvs in the pv table
        This will cause changes in database settings (deadtime, etc) to be updated.
        """
        for pvdata in self.pv_table.select():
            name = pvdata['name']
            # already know about this pv: update pvinfo.
            if name in self.pvinfo:
                self.pvinfo[name].update(pvdata)
            # look up any new pvs
            else:
                self.get_info(name)

    def check_for_new_pvs(self):
        " read pvs in cache, make sure we have them all in pvinfo dictionary" 
        for p in self.get_cache_names():
            self.get_info(p)
   

    def get_info(self,pvname):
        """ return pvinfo dictionary for a pv, and also ensures that it is in the pvinfo dictionary"""
        if pvname not in self.pvinfo:
            try:
                r = self.pv_table.select_where(name=pvname)
                dat = r[0]
            except IndexError:
                self.add_pv(pvname)
                time.sleep(0.01)
                try:
                    r = self.pv_table.select_where(name=pvname)
                    dat = r[0]
                except:
                    return None

            dat['force_time']  = get_force_update_time()
            dat['last_ts']     = 0
            dat['last_value']  = None
            self.pvinfo[pvname] = dat

        return self.pvinfo[pvname]

    def sync_with_cache(self,update_vals=False):
        """ initialize pv lists, insert times, etc with cache
        use update_vals to force insert of cache values into
        the archive (as on startup)
n       """
        newpvs = []
        cache_values = []
        self.get_cache_names()
        pvtable_data = self.pv_table.select()
        print ' This is sync with cache ', update_vals, len(self.cache_names), len(pvtable_data)
        self.db.use(self.master_db)
        now = time.time()
        print 'masterdb %s / data=%s' % ( self.master_db, len(pvtable_data))
        if update_vals:
            x = self.db.exec_fetch("select pvname,value,ts from cache")
            current_cache = {}
            for i in x:
                current_cache[i['pvname']] = i
                
        for pvdata in pvtable_data:
            name = normalize_pvname(pvdata['name'])
            
            if name not in self.cache_names:
                newpvs.append((name, epics.PV(name)))
            elif update_vals:  
                r = current_cache.get(name,None)
                if r is not None:
                    ts = r['ts']
                    if now - ts > SEC_DAY: ts = now
                    cache_values.append((name,ts,r['value']))

        if len(newpvs)>0:
            epics.poll()
            m = MasterDB()
            for pvname, pv in newpvs:
                if pv.connected:
                    m.add_pv(pvname)
            m.close()
        # now switch to archiving database, and (perhaps) insert values from cache
        if self.dbname is not None:
            self.db.use(self.dbname)
            for name,ts,value in cache_values:
                self.update_value(name,ts,value)
        print 'Sync with Cache Done'

    def get_pv(self,pvname):
        " "
        if pvname in self.pvs:
            return self.pvs[pvname]
        try:
            p = self.pvs[pvname] = epics.PV(pvname)
            return p
        except:
            return None


    def dbs_for_time(self, t0=SEC_DAY, t1=MAX_EPOCH):
        """ return list of databases with data in the given time range"""
        timerange = ( min(t0,t1) - SEC_DAY, max(t0,t1) + SEC_DAY)
        query = "select * from runs where stop_time>=%i and start_time<=%i order by start_time"
        r = []
        for i in self.read_master(query % timerange):
            if i['db'] not in r: r.append(i['db'])
        return r

    def get_value_at_time(self,pvname,t):
        "return archived value of a pv at one time"
        if pvname is None: return None
        pvname = normalize_pvname(pvname)
        info = self.get_info(pvname)
        if info is None: return None

        db = self.dbs_for_time(t,t)[0]
        self.db.use(db)
        qpv  = "select data_table,id from pv where name ='%s'" % pvname
        qdat = 'select time,value from %s where pv_id=%i and time<=%f order by time desc limit 1'
        i = self.db.exec_fetchone(qpv)
        r = self.db.exec_fetchone(qdat % (i['data_table'],i['id'],t))
        return r['time'],r['value']

    def get_data(self,pvname,t0,t1,with_current=None):
        "get data from database for a time range"
        if pvname is None: return []
        pvname = normalize_pvname(pvname)
        info = self.get_info(pvname)
        if info is None:
            return ([],'No PV named %s',pvname)
            
        #
        stat   = [info]
        dat    = []
        pvquery= "select data_table,id from pv where name ='%s'" % pvname
        fquery = 'select time,value from %s where pv_id=%i and time<=%f order by time desc limit 1'
        gquery = 'select time,value from %s where pv_id=%i and time>=%f order by time limit 1'       
        squery = 'select time,value from %s where pv_id=%i and time>=%f and time<=%f order by time'

        needs_firstpoint = True
        tnow = time.time()

        # make sure t0 and t1 are ordered
        if t0 > t1:   t0,t1 = t1,t0
        if t1-t0 < 1800.0: t0 = t1 - 1800.0 # look back at least an hour

        # determine if we should append the current (cached) value
        if with_current is None:
            add_current = abs(t1-tnow) < 1.5 * SEC_DAY
        else:
            add_current = with_current
            
        # print 'get data for ' ,pvname, t0,t1
        # print 'dbs: ', self.dbs_for_time(t0,t1)
        try:
            for db in self.dbs_for_time(t0,t1):
                self.db.use(db)
                stat.append(pvquery)
                r     = self.db.exec_fetchone(pvquery)
                try:
                    table = r['data_table']
                    pvid  = r['id']
                except KeyError:  # this db doesn't know about this PV -- maybe it's a recent addition?
                    continue
                stat.append((db,table, pvid))
                if needs_firstpoint:
                    q = fquery % (table,pvid,t0)
                    stat.append(q)
                    r = self.db.exec_fetchone(q)
                    try:
                        dat.append((r['time'],r['value']))
                        needs_firstpoint = False
                    except:
                        stat.append('no data before t0!')
                q = squery % (table,pvid,t0,t1)
                stat.append(q)
                for i in self.exec_fetch(q):
                    dat.append((i['time'],i['value']))
            # add value at time just after selected time range
            r = self.db.exec_fetchone(gquery % (table,pvid,t1))
            try:
                dat.append((r['time'],r['value']))
            except KeyError:
                pass
            # optionally, add current value
            if add_current: 
                stat.append('adding cached value')
                r= self.get_cache_full(pvname)
                stat.append(r)
                if r['value'] is not None:
                    dat.append((time.time(),r['value']))
        except:
            stat.append('Exception!')
        dat.sort()
        self.use_currentDB()
        return dat,stat

    def write(self,s):
        self.messenger.write(s)
        
    def drop_pv(self,name):
        self.db.execute("update pv set active='no' where name=%s" % clean_string(name))


    def add_pv(self,name,description=None,graph={},deadtime=None,deadband=None):
        """add PV to the database: expected to take a while"""
        pvname = normalize_pvname(name)

        t0_start = time.time()
        if not valid_pvname(pvname):
            sys.stdout.write("## Archiver add_pv invalid pvname = '%s'" % pvname)
            return None

        if pvname in self.pvinfo:
            if 'yes' == self.pvinfo[pvname]['active']:
                self.write("PV %s is already in database.\n" % pvname)
            else:
                self.write("PV %s is in database, reactivating!\n" % pvname)                
                self.pvinfo[pvname]['active'] = 'yes'
            return None
        # create an Epics PV, check that it's valid

        try:
            pv = epics.PV(pvname)
            pv.connect()
            pv.get_ctrlvars()
            typ = pv.type
            count = pv.count
            prec  = pv.precision
            connected = pv.connected 
        except:
            typ= 'int'
            count = 1
            prec = None
            connected = False

        if not connected:
            self.write("cannot add PV '%s': not connected" % pvname)
            return None

        # determine type
        dtype = 'string'
        if (typ in ('int','long','short')): dtype = 'int'
        if (typ in ('enum',)):              dtype = 'enum'
        if (typ in ('double','float')):     dtype = 'double'
        
        # determine data table
        table = "pvdat%3.3i" % ((hash(pvname) % 128) + 1)
        
        # determine descrption (don't try too hard!)
        if description is None:
            if pvname.endswith('.VAL'):
                descpv  = "%s.DESC" % pvname[:-4] 
            else:
                descpv  = "%s.DESC" % pvname 
                for f in motor_fields:
                    if pvname.endswith(f):
                        descpv = None
                        
            if descpv is not None:
                try:
                    dp = epics.PV(descpv)
                    description = dp.get(as_string=True)
                ## dp.disconnect()
                except:
                    pass
        if description is None: description = ''

        # set graph default settings
        gr = {'high':'','low':'','type':'normal'}
        gr.update(graph)
        if dtype == 'enum':
            x = pv.get(as_string=True)
            gr['type'] = 'discrete'
            gr['low'] = 0
            gr['high'] =  len(pv.enum_strs)
        elif dtype == 'double':
            gr['type'] = 'normal'
            dx = description.lower()
            for i in ('cathode','pirani','pressure'):
                if dx.find(i) >= 0: 
                    gr['type'] = 'log'
        
        if (deadtime == None):
            deadtime = config.pv_deadtime_dble
            if dtype in ('enum','string'):   deadtime = config.pv_deadtime_enum
            if (gr['type'] == 'log'): deadtime = 5.0  # (pressures change very frequently)

        if (deadband == None):
            deadband = 1.e-5
            if (gr['type'] == 'log'): deadband = 1.e-4
            if prec is not None: deadband = 10**(-(prec+1))
            if dtype in ('enum','string'):     deadband =  0.5
            
        self.write('Archiver adding PV: %s, table: %s' % (pvname,table))
        
        self.pv_table.insert(name    = pvname,
                             type    = dtype,
                             description= description,
                             data_table = table,
                             deadtime   = deadtime,
                             deadband   = deadband,
                             graph_lo   = gr['low'],
                             graph_hi   = gr['high'],
                             graph_type = gr['type'])

        
        dat  = self.pv_table.select_where(name=pvname)[0]
        dat['force_time'] = get_force_update_time()
        dat['last_ts'] = 0
        dat['last_value']  = None
        self.pvinfo[pvname] = dat

        self.update_value(pvname,time.time(),pv.value)

        self.write(" time=%f\n" % (time.time() - t0_start))

        sys.stdout.flush()
        pv = None        
        
    def update_value(self,name,ts,val):
        "insert value into appropriate table " 
        if val is None: return
        if ts is None or ts < self.MIN_TIME: ts = time.time()
       
        self.pvinfo[name]['last_ts'] =  ts
        self.pvinfo[name]['last_value'] =  val

        info = self.pvinfo[name]
        try:
            self.db.execute(self.sql_insert % (info['data_table'],info['id'], ts,clean_string(val)))
        except TypeError:
            self.write("cannot update %s\n" % name)

    def collect(self):
        """ one pass of collecting new values, deciding what to archive"""
        newvals, forced = {},{}
        tnow = time.time()
        dt  =  max(1.0, 2.*(tnow - self.last_collect))
        self.last_collect = tnow
        new_Changes = self.get_cache_changes(dt=dt)
        
        for dat in new_Changes:
            name  = dat['pvname']
            val   = dat['value']
            ts    = dat['ts'] or time.time()
            if name not in self.pvinfo:
                self.add_pv(name)

            info = self.pvinfo[name]
            if info['active'] == 'no' or name in newvals:
                continue
            
            last_ts   = info['last_ts']
            last_val  = info['last_value']
            if last_ts is None:
                last_ts = 0
           
            do_save = ((ts-last_ts) > info['deadtime'])
            if do_save and dat['type'] in ('double','float'):
                try:
                    v,o = float(val),float(last_val)
                    do_save = abs((v-o)/max(abs(v),abs(o),1.e-12)) > abs(info['deadband'])
                except:
                    pass
            if do_save:
                # self.update_value(name,ts,val)
                newvals[name] = (ts,val)
                if name in self.dtime_limbo:
                    self.dtime_limbo.pop(name)
            elif (ts-last_ts) > 1.e-3:   # pv changed, but inside 'deadtime': put it in limbo!
                self.dtime_limbo[name] = (ts,val)
                
        # now look through the "limbo list" and insert the most recent change
        # iff the last insert was longer ago than the deadtime:
        tnow = time.time()
        # print '====== Collect: ', len(new_Changes) , len(newvals), len(self.dtime_limbo), time.ctime()
        for name in self.dtime_limbo.keys():
            info = self.pvinfo[name]
            if info['active'] == 'no': continue
            last_ts   = info['last_ts']
            last_val  = info['last_value']
            if (tnow - last_ts) > info['deadtime']:
                newvals[name] = self.dtime_limbo.pop(name)

        n_new     = len(newvals)
        n_forced  = 0
        # check for stale values and re-read db settings every 5 minutes or so

        if (tnow - self.force_checktime) >= 300.0:
            # sys.stdout.write('looking for stale values, checking for new settings...%s\n'  %time.ctime() )
            self.force_checktime = tnow
            self.check_for_new_pvs()
            self.refresh_pv_table()

            for name, info in self.pvinfo.items():
                last_ts  = info['last_ts']
                last_val = info['last_value']
                if info['active'] == 'no': continue
                ftime = info['force_time']
                try:
                    force = tnow-last_ts > ftime
                except:
                    print 'Cannot Figure out whether to force recording??'
                    print tnow, last_ts, ftime
                    print 'They should all be floats???'
                    force = False
                    
                if force:
                    r = self.get_cache_full(name)
                    if r['type'] is None and r['value'] is None: # an empty / non-cached PV?
                        try:
                            test_pv = epics.PV(name)
                            test_pv.connect()
                            # if PV is still not connected, set time
                            # to wait 2 hours before checking again.
                            if (test_pv is None or not test_pv.connected):
                                self.pvinfo[name]['last_ts'] = 7200+tnow-ftime
                                self.write(" PV not connected: %s\n" % name)
                            else:
                                r['value'] = test_pv.value
                            test_pv.disconnect()
                        except:
                            pass
                    else:
                        if name not in newvals:
                            newvals[name] = (tnow,str(r['value']))
                            n_forced = n_forced + 1

        for name,data in newvals.items():
            self.update_value(name,data[0],data[1])

        # self.db.commit_transaction()
        return n_new,n_forced

    def set_pidstatus(self, pid=None, status='unknown'):
        self.db.use(self.master_db)
        if status in ('running','offline','stopping','unknown'):
            self.db.execute("update info set status='%s' where process='archive'" % status)
        if pid is not None:
            self.db.execute("update info set pid=%i where process='archive'" % int(pid))
        self.db.use(self.dbname)

    def set_infotime(self,ts):
        self.db.use(self.master_db)
        self.db.execute("update info set ts=%f,datetime='%s' where process='archive'" % (ts,time.ctime(ts)))
        self.db.use(self.dbname)

    def get_pidstatus(self):
        self.db.use(self.master_db)
        ret = self.db.exec_fetchone("select * from info where process='archive'")
        self.db.use(self.dbname)
        return ret['pid'], ret['status']

    def get_nchanged(self,minutes=10,limit=None):
        """return the number of values archived in the past minutes.
        if limit is set, return as  soon as this limit is seen to be exceeded
        this is useful when checking if any values have been cached."""
        n = 0
        dt = (time.time()-minutes*60.0)
        q = "select count(time) from pvdat%3.3i where time > %f " 
        for i in range(1,129):
            r = self.exec_fetch(q % (i,dt))
            try:
                n += r[0]['count(time)']
            except:
                pass
            if limit is not None and n>limit:
                break
        return n

    def mainloop(self,verbose=False):
        t0 = time.time()
        self.db.get_cursor()
        self.use_currentDB()

        self.last_collect = t0
        self.write( 'connecting to database %s ... \n' % self.dbname)
        self.sync_with_cache(update_vals=True)

        self.write("done. DB connection took %6.3f sec\n" % (time.time()-t0))
        self.write("connecting to %i Epics PVs ... \n" % ( len(self.pvinfo) ))
        self.write('======   Start monitoring / saving to DB=%s\n' % self.dbname)
        sys.stdout.flush()
        mypid = os.getpid()
        self.set_pidstatus(pid=mypid, status='running')

        is_collecting = True
        n_loop = 0
        n_changed = 0
        n_forced  = 0
        t_lastlog = 0
        mlast = -1
        msg = "%s: %i new, %i forced entries. (%i)\n"

        while is_collecting:
            try:
                n_loop = n_loop + 1
                n1,n2 = self.collect()
                n_changed = n_changed + n1
                n_forced  = n_forced  + n2
                
                epics.poll()
                tnow = time.time()
                tmin,tsec = time.localtime()[4:6]

                if tsec < 2 and tmin != mlast and tmin % 5 == 0:
                    self.write(msg % (time.ctime(), n_changed, n_forced, n_loop))
                    sys.stdout.flush()
                    n_changed = 0
                    n_forced  = 0
                    n_loop  =  0
                    t_lastlog = tnow
                    mlast = tmin
                self.set_infotime(tnow)

            except KeyboardInterrupt:
                sys.stderr.write('Interrupted by user.\n')
                return None
            
            masterpid, status = self.get_pidstatus()

            if (status in ('stopping','offline')) or (masterpid != mypid):
                self.set_pidstatus(status='offline')
                is_collecting = False

        return None
예제 #14
0
class MasterDB:
    """ general interface to Master Database of Epics Archiver.
    This is used by both by itself, and is sublcassed by
    Cache, ArchiveMaster, Instruments, and Alerts.

    Because of the multiple uses and especially because it is used
    as the sole interface by some Web processes, the API here may
    seem a bit disjointed, with a partial interface for Pairs and
    Alerts, but this allows many processes to have exactly one
    database connection.    
    """
    runs_title= '|  database         |       date range          | duration (days)|'
    runs_line = '|-------------------|---------------------------|----------------|'

    def_alert_msg ="""Hello,
  An alarm labeled  %LABEL%
  was detected for PV = '%PV%'
  The current value = %VALUE%.
  This is %COMP% the trip point value of %TRIP%
  """

    optokens = ('ne', 'eq', 'le', 'lt', 'ge', 'gt')
    opstrings= ('not equal to', 'equal to',
                'less than or equal to',    'less than',
                'greater than or equal to', 'greater than')
    
    ops = {'eq':'__eq__', 'ne':'__ne__', 
           'le':'__le__', 'lt':'__lt__', 
           'ge':'__ge__', 'gt':'__gt__'}
           
    def __init__(self,dbconn=None, **kw):

        self.db = SimpleDB(dbconn=dbconn)
        self.dbconn = self.db.conn
        self.db.use(master_db)
        self.db.get_cursor()
        self.db.read_table_info()

        self.info    = self.db.tables['info']
        self.cache   = self.db.tables['cache']
        self.runs    = self.db.tables['runs']
        self.pairs   = self.db.tables['pairs']
        self.stations  = self.db.tables['stations']
        self.instruments = self.db.tables['instruments']
        self.inst_pvs  = self.db.tables['instrument_pvs']
        self.alerts = self.db.tables['alerts']
        
        self.arch_db = self._get_info('db',  process='archive')
        self.pvnames = []
        
    def use_master(self):
        "point db cursor to use master database"
        self.db.use(master_db)

    def use_current_archive(self):
        "point db cursor to use current archive database"
        self.arch_db = self._get_info('db',  process='archive')        
        self.db.use(self.arch_db)

    def save_db(self,dbname=None):
        if dbname is None: dbname = self.arch_db
        sys.stdout.write('saving %s\n' % dbname)
        self.db.use(dbname)
        self.db.safe_dump(compress=True)
        self.db.use(master_db)
        
    def get_pvnames(self):
        """ generate self.pvnames: a list of pvnames in the cache"""
        self.pvnames = [i['pvname'] for i in self.cache.select()]
        # 
        #         for i in self.cache.select():
        #             if i['pvname'] not in self.pvnames:
        #                 self.pvnames.append(i['pvname'])
        return self.pvnames
    
    def request_pv_cache(self,pvname):
        """request a PV to be included in caching.
        will take effect once a 'process_requests' is executed."""
        self.db.use(master_db)
        npv = normalize_pvname(pvname)
        if len(self.pvnames)== 0: self.get_pvnames()
        if npv in self.pvnames: return

        cmd = "insert into requests (pvname,action,ts) values ('%s','add',%f)" % (npv,time.time())
        self.db.execute(cmd)
        print 'REQUEST_PV_CACHE: add ', pvname
        
    def add_pv(self,pvname,set_motor_pairs=True):
        """adds a PV to the cache: actually requests the addition, which will
        be handled by the next process_requests in mainloop().

        Here, we check for 'Motor' PV typs and make sure all motor fields are
        requested together, and that the motor fields are 'related' by assigning
        a pair_score = 10.                
        """

        pvname = normalize_pvname(pvname.strip())
        fields = (pvname,)
        if not valid_pvname(pvname):
            sys.stdout.write("## MasterDB add_pv invalid pvname = '%s'" % pvname)
            return fields
        
        prefix = pvname
        isMotor = False
        if pvname.endswith('.VAL'):
            prefix = pvname[:-4]

        p = epics.PV(pvname)
        p.wait_for_connection(timeout=0.1)
        print 'Master Add PVname = ', pvname, p.connected
        if p.connected: 
            self.request_pv_cache(pvname)
            if ('.' not in prefix and p.type == 'double'):
                rtype = epics.PV(prefix+'.RTYP')
                rtype.wait_for_connection(0.1)
                if rtype is not None:
                    isMotor = 'motor' == rtype.get()
            if isMotor:
                fields = tuple(["%s%s" % (prefix,i) for i in motor_fields])
                pvs = []
                for pvname in fields:
                    pvs.append(epics.PV(pvname))

                epics.poll()
                for p in pvs:
                    p.wait_for_connection(timeout=0.5)
                    if p.connected:
                        self.request_pv_cache(p.pvname)
                    
        if isMotor and set_motor_pairs:
            time.sleep(0.25)
            self.set_allpairs(fields)
            
        return fields
            

    def drop_pv(self,pvname):
        """drop a PV from the caching process -- really this 'suspends updates'
        will take effect once a 'process_requests' is executed."""
        npv = normalize_pvname(pvname)
        if len(self.pvnames)== 0: self.get_pvnames()
        if not npv in self.pvnames: return

        cmd = "insert into requests (pvname,action) values ('%s','suspend')" % npv
        self.db.execute(cmd)

    def get_recent(self,dt=60):
        """get recent additions to the cache, those
        inserted in the last  dt  seconds."""
        where = "ts>%f order by ts" % (time.time() - dt)
        return self.cache.select(where=where)
        
    def dbs_for_time(self, t0=SEC_DAY, t1=MAX_EPOCH):
        """ return list of databases with data in the given time range"""
        timerange = ( min(t0,t1) - SEC_DAY, max(t0,t1) + SEC_DAY)
        where = "stop_time>=%i and start_time<=%i order by start_time"
        r = []
        for i in self.runs.select(where=where % timerange):
            if i['db'] not in r: r.append(i['db'])
        return r

    def close(self):
        "close db connection"
        self.db.close()
    
    def _get_info(self,name='db',process='archive'):
        " get value from info table"
        try:
            return self.info.select_one(where="process='%s'" % process)[name]
        except:
            return None

    def _set_info(self,process='archive',**kw):
        " set value(s) in the info table"
        # print 'SET INFO ', process, kw
        self.info.update("process='%s'" % process, **kw)

    def get_cache_status(self):
        " get status of caching process"
        return self._get_info('status', process='cache')

    def get_arch_status(self):
        " get status of archiving process"
        return self._get_info('status', process='archive')

    def set_cache_status(self,status):
        " set status of caching process"                
        return self._set_info(status=status,  process='cache')

    def set_arch_status(self,status):
        " set status of archiving process"        
        return self._set_info(status=status,  process='archive')

    def get_cache_pid(self):
        " get pid of caching process"
        return self._get_info('pid',    process='cache')

    def get_arch_pid(self):
        " get pid of archiving process"        
        return self._get_info('pid',    process='archive')

    def set_cache_pid(self,pid):
        " set pid of caching process"
        return self._set_info(pid=pid,  process='cache')

    def set_arch_pid(self,pid):
        " set pid of archiving process"        
        return self._set_info(pid=pid,  process='archive')

    ##
    ## Status/Activity Reports 
    def get_npvs(self):
        """return the number of pvs in archive"""
        self.db.use(self.arch_db)
        r = self.db.exec_fetch('select id from pv')
        self.db.use(master_db)
        return len(r)

    def arch_nchanged(self,minutes=10,max = None):
        """return the number of values archived in the past minutes. """
        self.db.use(self.arch_db)
        n = 0
        t0 = time.time()
        dt = (time.time()-minutes*60.)
        q = "select pv_id from pvdat%3.3i where time > %f " 
        self.db.get_cursor()
        for i in range(1,129):
            n = n + self.db.cursor.execute(q % (i,dt))
            if max is not None and n > max: break
        self.db.use(master_db)
        return n

    def arch_report(self,minutes=10):
        """return a report (list of text lines) for archiving process, """
        npvs     = self.get_npvs()
        o = ["Current Database=%s, status=%s, PID=%i, %i PVs in archive" %(self.arch_db,
                                                                           self.get_arch_status(),
                                                                           self.get_arch_pid(),npvs)]
        return o

    def arch_full_report(self,minutes=10):
        """return a report (list of text lines) for archiving process,
        giving the number of values archived in the past minutes.
        """
        nchanged = self.arch_nchanged(minutes=minutes)
        npvs     = self.get_npvs()
        
        o = ["Current Database=%s, status=%s, PID=%i, %i PVs in archive" %(self.arch_db, 
                                                                 self.get_arch_status(),
                                                                 self.get_arch_pid(),npvs),
             "%i values archived in past %i minutes" % (nchanged, minutes)]
        return o

    def cache_report(self,brief=False,dt=60):
        """return a report (list of text lines) for caching process,
        giving number of values cached in the past dt seconds.
        Use 'brief=False' to show which PVs have been cached.
        """
        out = []
        pid = self.get_cache_pid()
        ret = self.cache.select(where="ts> %i order by ts" % (time.time()-dt))
        fmt = "  %s %.25s = %s"
        if not brief:
            for r in ret:
                out.append(fmt % (tformat(t=r['ts'],format="%H:%M:%S"),
                                  r['pvname']+' '*20, r['value']) )
                    
        fmt = '%i PVs had values updated in the past %i seconds. pid=%i'
        out.append(fmt % (len(ret),dt,pid))
        return out
        
    def runs_report(self, n=10):
        """return a report (list of text lines) for the archiving runs
        showing the time ranges for the (at most) 10 most recent runs.
        """
        r = []
        for i in self.runs.select(where='1=1 order by start_time desc limit %i' % n):
            timefmt = "%6.2f "
            if  i['db'] == self.arch_db:
                timefmt = "%6.1f*"
                i['stop_time'] = time.time()
            days = timefmt % ((i['stop_time'] - i['start_time'])/(24*3600.0))
            drange = "%s to %s" %(tformat(i['start_time'],format="%Y-%m-%d"),
                                  tformat(i['stop_time'],format="%Y-%m-%d"))
            
            r.append("| %16s  | %24s  |   %10s   |" % (i['db'],drange,days))
        r.reverse()
        out = [self.runs_line,self.runs_title,self.runs_line]
        for i in r: out.append(i)
        out.append(self.runs_line)
        return out

    ##
    ## Pairs
    def get_related_pvs(self,pv,minscore=1):
        """return a list of related pvs to the provided pv
        with a minumum pair score"""
       
        out = []
        tmp = []
        npv = normalize_pvname(pv)
        if len(self.pvnames)== 0: self.get_pvnames()
        if npv not in self.pvnames: return out
        for i in ('pv1','pv2'):
            where = "%s='%s' and score>=%i order by score" 
            for j in self.pairs.select(where = where % (i,npv,minscore)):
                tmp.append((j['score'],j['pv1'],j['pv2']))
        tmp.sort()
        for r in tmp:
            if   r[1] == npv:  out.append(r[2])
            elif r[2] == npv:  out.append(r[1])
        out.reverse()
        return out

    def __get_pvpairs(self,pv1,pv2):
        "fix and sort 2 pvs for use in the pairs tables"
        p = [normalize_pvname(pv1),normalize_pvname(pv2)]
        p.sort()
        return tuple(p)
    
        
    def get_pair_score(self,pv1,pv2):
        "set pair score for 2 pvs"        
        p = self.__get_pvpairs(pv1,pv2)
        score = -1
        if len(self.pvnames)== 0: self.get_pvnames()
        if (p[0] in self.pvnames) and (p[1] in self.pvnames):
            o  = self.pairs.select_one(where= "pv1='%s' and pv2='%s'" % p)
            score = int(o.get('score',-1))
        return score

    def set_pair_score(self,pv1,pv2,score=None):
        "set pair score for 2 pvs"
        p = self.__get_pvpairs(pv1,pv2)
        
        current_score  = self.get_pair_score(p[0],p[1])
        if current_score is None:
            wait_count = 0
            while current_score is None and wait_count < 10:
                time.sleep(0.1)
                self.get_pvnames()

                current_score  = self.get_pair_score(p[0],p[1])                
                wait_count = wait_count + 1
                
        # a current_score=None means the pv pairs may not be known yet.
        if current_score is None:
            current_score = -1
            score  = 1
            
        if score is None: score = 1 + current_score
       
        if current_score <= 0:
            q = "insert into pairs set score=%i, pv1='%s', pv2='%s'"
        else:
            q = "update pairs set score=%i where pv1='%s' and pv2='%s'"
        self.db.exec_fetch(q % (score,p[0],p[1]))
        
    def increment_pair_score(self,pv1,pv2):
        """increase by 1 the pair score for two pvs """
        self.set_pair_score(pv1,pv2,score=None)

    def set_allpairs(self,pvlist,score=10):
        """for a list/tuple of pvs, set all pair scores
        to be at least the provided score"""
        if not isinstance(pvlist,(tuple,list)): return
        _tmp = list(pvlist[:])

        # these may be newly added names, so may not yet be
        # in pvnames.  If not, let's give them a chance!
        newnames = False
        wait_count = 0
        if len(self.pvnames)== 0: self.get_pvnames()
        while newnames and wait_count < 10:
            newnames = False
            for i in _tmp:
                newnames = newnames or (i not in self.pvnames)

            time.sleep(0.1)
            self.get_pvnames()
            wait_count = wait_count + 1

        while _tmp:
            a = _tmp.pop()
            for b in _tmp:
                if self.get_pair_score(a,b)<score:
                    self.set_pair_score(a,b,score=score)
                    
        self.pairs.select()

    ##
    ## Instruments
    def get_instruments_with_pv(self,pv):
        """ return a list of (instrument ids, inst name, station name) for
        instruments which  contain a named pv"""
        inames = []
        pvn = normalize_pvname(pv)
        for r in self.inst_pvs.select(where="pvname='%s'" % pvn):
            inst = self.instruments.select_one(where="id=%i" % r['inst'])
            sta  = self.stations.select_one(where="id=%i" % inst['station'])
            inames.append((r['inst'],inst['name'],sta['name']))
        return inames

    ##
    ## Alerts
    def get_alerts(self,pvname=None,name=None):
        """ return a list of alerts for a pvname"""
        where = '1=1'
        if pvname is not None: 
            pvname = normalize_pvname(pvname)
            where = "pvname='%s'" % pvname
        
        if name is not None:
            where = "%s and name='%s'" % (where,clean_input(name))

        return self.alerts.select(where=where)

    def get_alert_with_id(self,id):
        """ return a list of alerts for a pvname"""
        return self.alerts.select_one(where="id=%i" % id)

    ## Alerts
    def remove_alert(self, id=None):
        """remove an alert"""
        if id is None: return
        q = "delete from alerts where id=%i" % int(id)
        self.db.execute(q)

   
    def add_alert(self, pvname=None,name=None,
                  mailto=None,  mailmsg=None, timeout=30,
                  compare='ne', trippoint=None, **kw):
        """add  a new alert"""
        if pvname is None: return
        
        pvname = normalize_pvname(pvname)        
        if name is None: name = pvname
        if len(self.pvnames)== 0: self.get_pvnames()
        if pvname not in self.pvnames:
            self.add_pv(pvname)

        active = 'yes'
        if mailto  is None:    active,mailto = ('no','')
        if mailmsg  is None:   active,mailmsg= ('no','')
        if trippoint is None:  active,trippoint = ('no',0.)

        if compare not in self.optokens: compare = 'ne'
        
        self.alerts.insert(name=name,pvname=pvname,active=active,
                           mailto=mailto,mailmsg=mailmsg, timeout=30,
                           compare=compare, trippoint=trippoint)
                  
        for a in self.get_alerts(pvname=pvname,name=name):
            val = epics.caget(pvname)
            self.check_alert(a['id'],val)
        
    def update_alert(self,id=None,**kw):
        """modify an existing alert, index with id, and passing in
        keyword args for ('pvname','name','mailto','mailmsg',
        'trippoint','compare','status','active')
        """
        if id is None: return
        where = "id=%i"% id        
        mykw = {}
        for k,v in kw.items():
            if k in ('pvname','name','mailto','mailmsg','timeout',
                     'trippoint','compare','status','active'):
                maxlen = 1024
                if k == 'mailmsg': maxlen = 32768
                v = clean_input(v,maxlen=maxlen)
                if 'compare' == k:
                    if not v in self.optokens:  v = 'ne'
                elif 'status' == k:
                    if v != 'ok': v = 'alarm'
                elif 'active' == k:
                    if v != 'no': v = 'yes'
                mykw[k]=v
        self.alerts.update(where=where,**mykw)
        a   = self.get_alert_with_id(id)
        val = epics.caget(a['pvname'])
        self.check_alert(id,val)

    def check_alert(self,id,value,sendmail=False):
        """
        check alert status, sending mail if necessary.
        returns two boolean values: (Value_OK, Mail_Was_Sent)
        """
        # print '## check_alert with sendmail=True', id, value
        where = "id=%i"% id
        alarm = self.alerts.select_one(where=where)
        
        # if alarm is not active, return now
        if 'no' == alarm['active']: return True,False

        # coerce values to strings or floats for comparisons
        convert = str
        if isinstance(value,(int,long,float,complex)):  convert = float

        value     = convert(value)
        trippoint = convert(alarm['trippoint'])
        cmp       = self.ops[alarm['compare']]
        
        # compute new alarm status: note form  'value.__ne__(trippoint)'
        value_ok = not getattr(value,cmp)(trippoint)

        old_value_ok = (alarm['status'] == 'ok')
        notify = sendmail and old_value_ok and (not value_ok)
        if old_value_ok != value_ok:
            # update the status filed in the alerts table
            status = 'alarm'
            if value_ok: status = 'ok'
            self.alerts.update(status=status,where=where)

            # send mail if value is now not ok!
            if notify:
                self.sendmail(alarm,value)

        # if sendmail: print '##check_alert: ' , value_ok, notify
        return value_ok, notify
            
    
    def sendmail(self,alarm,value):
        """ send an alert email from an alarm dict holding
        the appropriate row of the alert table.        
        """
        mailto = alarm['mailto']
        pvname = alarm['pvname']
        label  = alarm['name']
        compare= alarm['compare']
        msg    = alarm['mailmsg']
        
        if mailto in ('', None) or pvname in ('', None): return

        mailto = mailto.replace('\r','').replace('\n','')

        trippoint = str(alarm['trippoint'])
        mailto    = tuple(mailto.split(','))
        subject   = "[Epics Alert] %s" % (label)

        if msg in ('', None):  msg = self.def_alert_msg
        
        msg  = clean_mail_message(msg)

        opstr = 'not equal to'
        for tok,desc in zip(self.optokens, self.opstrings):
            if tok == compare: opstr = desc

        # fill in 'template' values in mail message
        for k,v in {'PV': pvname,  'LABEL':label,
                    'COMP': opstr, 'VALUE': str(value),  
                    'TRIP': str(trippoint)}.items():
            msg = msg.replace("%%%s%%" % k, v)

        # do %PV(XX)% replacements
        mlines = msg.split('\n')
        for i,line in enumerate(mlines):
            nmatch = 0
            match = re_showpv(line)
            while match is not None and nmatch<25:
                try:
                    pvn = match.groups()[0]
                    rep = "%(cvalue)s" % self.cache.select_one(where="pvname='%s'" % pvn)
                    line = line.replace('%%PV(%s)%%' % pvn,rep)
                except:
                    line = line.replace('%%PV(%s)%%' % pvn, 'Unknown_PV(%s)' % pvn)
                match = re_showpv(line)
                nmatch = nmatch + 1
            mlines[i] = line
        msg = "From: %s\r\nSubject: %s\r\n%s\nSee %s/plot/%s\n" % \
              (mailfrom,subject,'\n'.join(mlines),cgi_url,pvname)

        try:
            s = smtplib.SMTP(mailserver)
            s.sendmail(mailfrom,mailto,msg)
            s.quit()
        except:
            sys.stdout.write("Could not send Alert mail:  mail not configured??")
예제 #15
0
class WorkFrame( Parent ):

  def _read_db( self, db_path ):
    self.db_path = db_path
    self.db_name = os.path.basename( db_path )
    self.db = SimpleDB( self.db_path )
    self.names = self.db.field_names()
    self.types = self.db.field_types()
    self.NF = len(self.names)
    self.key = self.names[0]

  def _load_db( self, db_path ):
    self._read_db( db_path ) # sets db and other stuff

    self.status_bar = CustomStatusBar( self, self.db_name )
    self.SetStatusBar( self.status_bar )

    self.companies = {} # a set

    self.sorted = (0,True) # None or (col_idx,ascending) col_idx=0|1|... asc:bool
    self.mark_col( 0, True )

    self.c_date = Date(2000,1,1) # last checked date (far past), for check_overdue
    self.check_overdue( dont_refresh=True )

    self.curr = 0 # current record, index in the list ListCtrl
    self.refresh() # filter and sort are inside
    self.lc.SetItemState(self.curr, wx.LIST_STATE_SELECTED, wx.LIST_STATE_SELECTED)

  def __init__( self, parent, id, title, filepath, **kwds ):

    self.title = title
    self.parent = parent

    Parent.__init__( self, parent, id, title, **kwds )

    # .lc - the main table
    # it keeps record no in its user-data field
    self.lc = wx.ListCtrl( self, DISPLAY,
        style=wx.LC_REPORT|wx.LC_SINGLE_SEL, size=(700,450) )

    self.column_names = ['Inv.','Date Done','Customer','Description',
                          'Amount','Date Due','Status']
    for i,h in enumerate(self.column_names):
      self.lc.InsertColumn(i,h)

    self.butsave   = wx.Button(self, SAVE,   "Save")
    self.butprint  = wx.Button(self, PRINT,  "Print")
    self.butinfo   = wx.Button(self, INFO,   "Info")
    self.butadd    = wx.Button(self, ADD,    "Add")
    self.butedit   = wx.Button(self, EDIT,   "Edit")
    self.butdelete = wx.Button(self, DELETE, "Delete")
    self.butload   = wx.Button(self, LOAD,   "Load")
    self.butfilter = wx.Button(self, FILTER, "Filter")
    self.butabout  = wx.Button(self, ABOUT,  "About")
    self.butclose  = wx.Button(self, CLOSE,  "Quit")

    self._do_layout()

    wx.EVT_BUTTON( self, SAVE, self.on_save )
    wx.EVT_BUTTON( self, PRINT, self.on_print )
    wx.EVT_BUTTON( self, INFO, self.on_info )
    wx.EVT_BUTTON( self, ADD, self.on_add )
    wx.EVT_BUTTON( self, EDIT, self.on_edit )
    wx.EVT_BUTTON( self, DELETE, self.on_delete )
    wx.EVT_BUTTON( self, FILTER, self.on_filter )
    wx.EVT_BUTTON( self, LOAD, self.on_load )
    wx.EVT_BUTTON( self, CLOSE, self.on_close )
    wx.EVT_BUTTON( self, ABOUT, self.on_about )
    wx.EVT_LIST_ITEM_SELECTED( self, DISPLAY, self.display )
    self.lc.Bind( wx.EVT_LEFT_DCLICK, self.on_double_click )
    self.lc.Bind( wx.EVT_RIGHT_DOWN, self.on_right_down )
    # for wxMSW
    self.lc.Bind( wx.EVT_COMMAND_RIGHT_CLICK, self.on_right_click )
    # for wxGTK
    self.lc.Bind( wx.EVT_RIGHT_UP, self.on_right_click )
    self.Bind( wx.EVT_LIST_COL_CLICK, self.on_col_click, self.lc )
    self.Bind( wx.EVT_CLOSE, self.on_close )

    def default_filter(r):
      return r.date_due.y==CURRENT_YEAR or r.status in (ST_OVERDUE,ST_PENDING)
    def no_filter(r): return True
    def filter_pending(r): return r.status==ST_PENDING
    def filter_overdue(r): return r.status==ST_OVERDUE
    def filter_2003(r): return r.date_due.y==2003
    def filter_2004(r): return r.date_due.y==2004
    def filter_2005(r): return r.date_due.y==2005
    def filter_2006(r): return r.date_due.y==2006
    def filter_2007(r): return r.date_due.y==2007
    def filter_2008(r): return r.date_due.y==2008
    def filter_2009(r): return r.date_due.y==2009
    def filter_2010(r): return r.date_due.y==2010
    def filter_2011(r): return r.date_due.y==2011
    def filter_2012(r): return r.date_due.y==2012
    def filter_2013(r): return r.date_due.y==2013
    def filter_2014(r): return r.date_due.y==2014
    def filter_2015(r): return r.date_due.y==2015
    def filter_2016(r): return r.date_due.y==2016
    def filter_2017(r): return r.date_due.y==2017
    def filter_2018(r): return r.date_due.y==2018
    def filter_2019(r): return r.date_due.y==2019

    self.filters = [default_filter,no_filter,filter_pending,filter_overdue,
      filter_2003,filter_2004,filter_2005,filter_2006,filter_2007,filter_2008,filter_2009,filter_2010,
      filter_2011,filter_2012,filter_2013,filter_2014,filter_2015,filter_2016,filter_2017,filter_2018,
      filter_2019]
    self.filter_descrs = ["Default: this year and all pending/overdue",
      "All invoices ==============================",
      "All pending","All overdue",
      "2003","2004","2005","2006","2007","2008","2009","2010","2011","2012","2013","2014","2015",
      "2016","2017","2018","2019"]

    self.filter_no = 0
    self.filter_fn = self.filters[self.filter_no] # for refresh()

    self._load_db( filepath )

    self.timer = wx.PyTimer( self.check_overdue )
    self.timer.Start(30000) # every 30 seconds

  def _do_layout(self):
    boxa = wx.BoxSizer(wx.VERTICAL)
    boxt = wx.BoxSizer(wx.VERTICAL)
    boxb = wx.BoxSizer(wx.HORIZONTAL)
    CVH = wx.ALIGN_CENTER_HORIZONTAL|wx.ALIGN_CENTER_VERTICAL
    boxt.Add( self.lc, 1, wx.EXPAND )
    boxb.Add( self.butsave, 0, CVH )
    boxb.Add( self.butprint, 0, CVH )
    boxb.Add( self.butinfo, 0, CVH )
    boxb.Add( self.butadd, 0, CVH )
    boxb.Add( self.butedit, 0, CVH )
    boxb.Add( self.butdelete, 0, CVH )
    boxb.Add( self.butfilter, 0, CVH )
    boxb.Add( self.butload, 0, CVH )
    boxb.Add( self.butabout, 0, CVH )
    boxb.Add( self.butclose, 0, CVH )
    boxa.Add( boxt,1, wx.EXPAND )
    boxa.Add( boxb,0, wx.EXPAND )
    self.SetAutoLayout(1)
    self.SetSizer(boxa)
    boxa.Fit(self)
    boxa.SetSizeHints(self)
    self.Layout()

  # Mark-unmark columns when sorting

  def unmark_col( self, n ):
    """Remove 'sorting' mark from the column title.
    The fn asumes the title ends with ' [+] ' or ' [--]'
    """
    c = self.lc.GetColumn( n )
    c.SetText( c.GetText()[:-5] )
    c.m_mask = wx.LIST_MASK_TEXT
    self.lc.SetColumn( n, c )

  def refresh_status_line( self ):
    st = "File: %s%s - %d/%d records" % (self.db_name,
      (self.db.dirty and " *" or ""),
      self.n_records, self.total_records)
    self.status_bar.SetStatusText( st, 0 )

  def refresh( self ):
    """Re-read the database table,
    Apply filter,
    Sort,
    Re-build the lc"""
    self.lc.DeleteAllItems()
    self.records = self.db.get_records( self.filter_fn ) # Filter
    self.sort_records()
    for i,r in enumerate(self.records):
      self.lc.InsertStringItem(i,self.fmt_field(r,0))
      for j in range(1,self.NF):
        self.lc.SetStringItem(i,j,self.fmt_field(r,j))
      self.lc.SetItemData(i,i)
      if r.status == ST_OVERDUE:
        self.lc.SetItemTextColour(i, (255,0,0) ) # red
      elif r.status == ST_PENDING:
        self.lc.SetItemTextColour(i, (0,0,255) ) # blue
      self.companies[ r.customer ] = True
    self.n_records = len(self.records)
    self.total_records = self.db.total_records()
    self.refresh_status_line()

  def find_record_index( self, key ):
    for i,r in enumerate(self.records):
      if r[self.key]==key:
        return i
    return -1

  def mark_col( self, n, ord ):
    """Adds 'sorting' mark to column n (ord ? ' [+] ' ! ' [--]')
    n:int - column number, ord:bool - ascending
    """
    c = self.lc.GetColumn( n )
    t = c.GetText()
    t += ord and " [+] " or " [--]"
    c.SetText( t )
    c.m_mask = wx.LIST_MASK_TEXT
    self.lc.SetColumn( n, c )

  # Click on column title - sort

  def sort_records( self ):
    if not self.sorted:
      return
    (n, o) = self.sorted
    m = self.names[n]
    s = self.records
    if o:
      def fsrt(a,b): return cmp( a[m], b[m] ) # asc
    else:
      def fsrt(a,b): return cmp( b[m], a[m] ) # dsc
    #self.lc.SortItems( fsrt )
    s.sort( cmp=fsrt )

  def do_sort( self ):
    r = self.curr>=0 and self.records[ self.curr ] or None
    self.sort_records()
    self.refresh()
    if r:
      self._goto_rec( r )

  def on_col_click( self, event ):
    """click on a column title - sort"""
    n = event.GetColumn()
    event.Skip()
    # mark/unmark columns
    o = True # order to be
    if self.sorted:
      on,oo = self.sorted
      if on == n: o = not oo
      self.unmark_col( on )
    self.mark_col( n, o )
    self.sorted = ( n, o )
    self.do_sort()
    # the current selection is preserved

  # Current item

  def display( self, event ):
    curitem = event.m_itemIndex
    self.curr = self.lc.GetItemData( curitem )

  # Double click - edit

  def on_double_click( self, event ):
    self.on_edit( event )

  # Right button - pop-up menu

  def on_right_down( self, event ):
    self.x = event.GetX()
    self.y = event.GetY()
    item, flags = self.lc.HitTest( (self.x, self.y) )
    if flags & wx.LIST_HITTEST_ONITEM:
      self.lc.Select(item)
      self.inside = True
    else:
      self.inside = False
    event.Skip()

  def on_right_click( self, event ):
    if not self.inside:
      return
    # only do this part the first time so the events are only bound once
    if not hasattr( self, "popupID1" ):
      self.popupID1 = wx.NewId() # Edit
      self.popupID2 = wx.NewId() # Delete
      self.popupID3 = wx.NewId() # Mark as Paid

      self.Bind( wx.EVT_MENU, self.on_popup_one, id=self.popupID1 )
      self.Bind( wx.EVT_MENU, self.on_popup_two, id=self.popupID2 )
      self.Bind( wx.EVT_MENU, self.on_popup_three, id=self.popupID3 )

    # make a menu
    menu = wx.Menu()
    # add some items
    menu.Append( self.popupID1, "Edit"   )
    menu.Append( self.popupID2, "Delete" )
    menu.Append( self.popupID3, "Paid!"  )

    # Popup the menu.  If an item is selected then its handler
    # will be called before PopupMenu returns.
    self.PopupMenu( menu, (self.x, self.y) )
    menu.Destroy()

  def on_popup_one( self, event ): # Edit
    self.on_edit( event )

  def on_popup_two( self, event ): # Delete
    self.on_delete( event )

  def on_popup_three( self, event ): # Clean status
    r = self.records[ self.curr ]
    r.status = ''
    self.db.dirty = True
    self.refresh()
    self._goto_rec( r )

  def fmt_field( self, r, i ):
    v = r[self.names[i]]
    t = self.types[i]
    if   t == T_STR:   return v
    elif t == T_INT:   return str(v)
    elif t == T_DATE:  return v.format( "%a" )
    elif t == T_MONEY: return "$%.2f" % v

  def check_overdue( self, dont_refresh=False ):
    """Runs thru all the records in DB and changes 'pending' to 'overdue'
    is the current date/time > r.date_due"""
    d = Date() # today
    if d == self.c_date: # check only once a day
      return
    self.c_date = d
    c = 0  # changed records
    recs = self.db.get_records() # for all records
    for r in recs:
      if r.status == ST_PENDING and d > r.date_due:
        r.status = ST_OVERDUE
        c += 1
    if c > 0:
      # MsgBox( self, "%d records changed 'pending' to 'overdue'." % c )
      self.db.dirty = True
      if not dont_refresh:
        self.refresh()

  def _add_rec( self, rec ):
    self.db.add_record( rec )
    self.refresh()

  def _goto_rec( self, rec ):
    i = self.find_record_index( rec[self.key] )
    if i<0: i=self.n_records-1
    self.curr = i
    self.lc.SetItemState(self.curr, wx.LIST_STATE_SELECTED, wx.LIST_STATE_SELECTED)

  def on_add( self, event ):
    maxinv = 0
    for r in self.records:
      inv = r[self.key] # r.invoice
      if inv > maxinv:
        maxinv = inv
    dlg = RecordDlg( self, "Add New Entry", None, maxinv+1 )
    val = dlg.ShowModal()
    if val and dlg.rec:
      self._add_rec( dlg.rec )
      self._goto_rec( dlg.rec )
    dlg.Destroy()

  def on_edit( self, event ):
    if self.curr < 0:
      MsgBox( self, "No record selected" )
      return
    r = self.records[self.curr]
    k = r[self.key] # key value
    dlg = RecordDlg( self, "Edit Entry", r, None )
    val = dlg.ShowModal()
    rec = dlg.rec # can be None
    dlg.Destroy()
    if val and rec:
      self.db.update_record( self.key, k, rec )
      self.refresh()
      self._goto_rec( r )

  def on_delete( self, event ):
    if self.curr < 0:
      MsgBox( self, "No record selected" )
      return
    r = self.records[self.curr]
    x = r.invoice, r.customer, r.descr
    mg = ("Are You Sure You want to delete record\n"+
          "    Invoice:  \t%d\n    Customer:\t%s\n    Descriptn:\t%s") % x
    msg = wx.MessageDialog( self, mg, "Deleting", wx.OK|wx.CANCEL )
    res = msg.ShowModal()
    msg.Destroy()
    if res == wx.ID_OK:
      self.db.delete_record( self.key, r[self.key] )
      self.refresh()
      if self.curr >= self.n_records:
        self.curr = self.n_records - 1 # last record or -1 if no records
      if self.curr >= 0:
        self.lc.SetItemState(self.curr, wx.LIST_STATE_SELECTED, wx.LIST_STATE_SELECTED)

  def on_print( self, event ):
    data = []
    for i,r in enumerate(self.records):
      line = []
      for j in range(self.NF):
        line.append( self.fmt_field(r,j) )
      data.append( line )
    prt = PrintTable(self.parent)
    prt.data = data
    prt.left_margin = 0.5
    #prt.top_margin = 1
    prt.set_column = [0.8, 1.0, 3.0, 2.5, 1.0, 1.0, 0.8]
    prt.label = ["Invoice","Date Done","Customer","Description","Amount", "Date Due", "Status"]
    prt.SetColAlignment(4, wx.ALIGN_RIGHT)
    prt.SetLandscape()
    prt.SetHeader( "Invoice List Report", size = 30 )
    prt.SetFooter("Date: ", type = "Date", align=wx.ALIGN_RIGHT, indent = -2, colour = wx.NamedColour('BLUE'))
    #prt.SetRowSpacing( 10,10 )
    prt.Preview() # Print()

    red = wx.NamedColour('RED')
    blue = wx.NamedColour('BLUE')
    for i,r in enumerate(data):
      if r[6] == ST_OVERDUE:
        for j in range(self.NF):
          prt.SetCellText( i,j, red )
          prt.SetCellColour( i,j, (255,200,200))
      elif r[6] == ST_PENDING:
        for j in range(self.NF):
          prt.SetCellText( i, j, blue )
          prt.SetCellColour( i,j, (200,200,255))

    return

  def on_save( self, event ):
    self.db.save()
    self.refresh_status_line()

  def on_load( self, event ):
    dlg = wx.FileDialog( self, message="Choose a file",
      defaultDir=os.getcwd(), # starting dir = current dir
      defaultFile="",
      wildcard="Invoice Track database (*.tdb)|*.tdb|All files (*.*)|*.*",
      style=wx.OPEN | wx.CHANGE_DIR ) # allow to change current dir
    if dlg.ShowModal() == wx.ID_OK:
      path = dlg.GetPath()
      self._load_db( path )
    dlg.Destroy()

  def on_filter( self, event ):
    r = self.curr>=0 and self.records[ self.curr ] or None
    dlg = wx.SingleChoiceDialog( self,
      'Please select the filter to apply', 'The Filter',
      self.filter_descrs,
      wx.CHOICEDLG_STYLE )
    if dlg.ShowModal() == wx.ID_OK:
      self.filter_no = dlg.GetSelection()
      self.filter_fn = self.filters[self.filter_no]
      self.check_overdue( dont_refresh=True )
      self.refresh()
      if r:
        self._goto_rec( r )
    dlg.Destroy()

  def exit( self ):
    self.timer.Stop()
    if not self.db or not self.db.dirty:
      return
    mg = "Save changes to '%s'?" % self.db_name
    msg = wx.MessageDialog( self, mg, "Exiting", wx.YES|wx.NO )
    res = msg.ShowModal()
    msg.Destroy()
    if res == wx.ID_YES:
      self.db.save()
    self.db = None

  def on_close( self, event ): # From 'Close' and from 'X'
    self.exit()
    self.Destroy()

  def __del__(self): # just for a case
    self.exit()

  def on_info( self, event ):
    num = self.n_records
    if num == 0:
      MsgBox( self, "No records", "No statistics" )
      return
    sum = 0.0
    d = 0; d_sum = 0.0
    p = 0; p_sum = 0.0
    o = 0; o_sum = 0.0
    y_sum = {}
    for r in self.records:
      a = r.amount
      y = r.date_due.y
      if y in y_sum: y_sum[y] += a
      else: y_sum[y] = a
      sum += a
      if   r.status == ST_PENDING: p += 1; p_sum += a
      elif r.status == ST_OVERDUE: o += 1; o_sum += a
      else:                       d += 1; d_sum += a
    assert num == d + p + o
    msg = "Done: \t%d \t$%.2f\n" % (d, d_sum)
    msg += "Pending: \t%d \t$%.2f\n" % (p, p_sum)
    msg += "Overdue: \t%d \t$%.2f\n" % (o, o_sum)
    msg += "\nTotal: \t%d \t$%.2f\nMean:\t\t$%.2f\n" % (num,sum,sum/num)
    msg += "\nby years:\n"
    years = y_sum.keys()
    years.sort()
    for y in years:
      msg += " \t%d \t$%.2f\n" % (y, y_sum[y])
    MsgBox( self, msg, "Statistics" )

  def on_about( self, event ):
    MsgBox( self, NAME_VERS + " (C) Georgiy Pruss 2005-2012", NAME_VERS )