def makeDLpedenoise(ctx, uubnum, count=None): """Create LogHandlerFile for pedestals and noise ctx - context object, used keys: datadir + basetime + chans uubnum - UUB to log""" fn = ctx.datadir + ('pedenoise_u%04d' % uubnum) +\ ctx.basetime.strftime('-%Y%m%d.log') prolog = """\ # Pedestals and noise # UUB #%04d, date %s # columns: timestamp | meas_point | set_temp""" % ( uubnum, ctx.basetime.strftime('%Y-%m-%d')) if count is not None: prolog += " | index" for typ, fmt in (('pede', '7.2f'), ('noise', '7.2f')): prolog += ''.join([' | %s.ch%d' % (typ, chan) for chan in ctx.chans]) prolog += '\n' if count is None: logdata = [ '{timestamp:%Y-%m-%dT%H:%M:%S}', '{meas_point:4d}', '{set_temp:5.1f}' ] for typ, fmt in (('pede', '7.2f'), ('noise', '7.2f')): logdata += [ '{%s:%s}' % (item2label( functype='N', uubnum=uubnum, chan=chan, typ=typ), fmt) for chan in ctx.chans ] formatstr = ' '.join(logdata) + '\n' else: loglines = [] for ind in range(count): logdata = [ '{timestamp:%Y-%m-%dT%H:%M:%S}', '{meas_point:4d}', '{set_temp:5.1f}', "%03d" % ind ] for typ, fmt in (('pede', '7.2f'), ('noise', '7.2f')): logdata += [ '{%s:%s}' % (item2label(functype='N', uubnum=uubnum, chan=chan, typ=typ, index=ind), fmt) for chan in ctx.chans ] loglines.append(' '.join(logdata) + '\n') formatstr = ''.join(loglines) return LogHandlerFile(fn, formatstr, prolog=prolog, skiprec=lambda d: 'meas_noise' not in d)
def makeDLlinear(ctx, uubnum): """Create LogHandlerFile for gain and corr. coeff ctx - context object, used keys: datadir + basetime + chans uubnum - UUB to log""" fn = ctx.datadir + ('linear_uub%04d' % uubnum) +\ ctx.basetime.strftime('-%Y%m%d.log') prolog = """\ # Linearity ADC count vs. voltage analysis # - gain [ADC count/mV] & correlation coefficient # UUB #%04d, date %s # columns: timestamp | meas_point | set_temp""" % ( uubnum, ctx.basetime.strftime('%Y-%m-%d')) logdata = [ '{timestamp:%Y-%m-%dT%H:%M:%S}', '{meas_point:4d}', '{set_temp:5.1f}' ] itemr = {'functype': 'P', 'uubnum': uubnum} for typ, fmt in (('gain', '6.3f'), ('lin', '7.5f')): prolog += ''.join([' | %s.ch%d' % (typ, chan) for chan in ctx.chans]) logdata += [ '{%s:%s}' % (item2label(itemr, chan=chan, typ=typ), fmt) for chan in ctx.chans ] prolog += '\n' formatstr = ' '.join(logdata) + '\n' return LogHandlerFile(fn, formatstr, prolog=prolog, skiprec=lambda d: 'meas_pulse' not in d)
def makeDLhglgratio(ctx, uubnum): """Create LogHandlerFile for HG/LG ratio ctx - context object, used keys: datadir + basetime + chans uubnum - UUB to log""" CHANPAIRS = ((1, 2), (3, 4), (5, 6), (9, 10)) fn = ctx.datadir + ('hglgratio_u%04d' % uubnum) +\ ctx.basetime.strftime('-%Y%m%d.log') prolog = """\ # HG/LG ratio # UUB #%04d, date %s # columns: timestamp | meas_point | set_temp""" % ( uubnum, ctx.basetime.strftime('%Y-%m-%d')) logdata = [ '{timestamp:%Y-%m-%dT%H:%M:%S}', '{meas_point:4d}', '{set_temp:5.1f}' ] itemr = {'functype': 'P', 'typ': 'hglgratio', 'uubnum': uubnum} prolog += ''.join([ ' | gain.ch%d/gain.ch%d' % (ch_hg, ch_lg) for (ch_lg, ch_hg) in CHANPAIRS ]) logdata += [ '{%s:5.1f}' % item2label(itemr, chan=ch_lg) for (ch_lg, ch_hg) in CHANPAIRS ] prolog += '\n' formatstr = ' '.join(logdata) + '\n' return LogHandlerFile(fn, formatstr, prolog=prolog, skiprec=lambda d: 'meas_pulse' not in d)
def dpfilter(self, res_in): """Accumulate FLIR evaluation result checks for keys: flireval_u%04d - True/False/None return: does not modify res_in""" if 'meas_flir' not in res_in: return res_in mp = res_in.get('meas_point', -1) if mp <= self.lastmp: # avoid calling filter twice to one meas point self.logger.error('Duplicate call of dpfilter at measpoint %d', mp) return res_in self.lastmp = mp for uubnum in self.uubnums: label = item2label(typ='flireval', uubnum=uubnum) if label in res_in: res = res_in[label] stat = self.stats[uubnum] if res is True: stat['ok'] += 1 elif res is False: stat['failed'] += 1 self.log(mp, uubnum, 'failed') elif res is None: stat['missing'] += 1 self.log(mp, uubnum, 'missing') else: self.logger.error('wrong FLIR result ' + repr(res)) self.npoints += 1 return res_in
def dpfilter(self, res_in): """Count ADC ramp results, expects DPfilter_ramp applied return: does not modify res_in""" if 'meas_ramp' not in res_in: return res_in mp = res_in.get('meas_point', -1) if mp <= self.lastmp: # avoid calling filter twice to one meas point self.logger.error('Duplicate call of dpfilter at measpoint %d', mp) return res_in self.lastmp = mp for uubnum in self.uubnums: label = item2label(typ='rampdb', uubnum=uubnum) rampres = res_in[label] stat = self.stats[uubnum] if rampres == self.OK: stat['ok'] += 1 elif rampres == self.MISSING: stat['missing'] += 1 self.log(mp, uubnum, 'missing') elif rampres & self.FAILED: stat['failed'] += 1 self.log(mp, uubnum, 'failed', 'rampres %04x' % rampres) else: self.logger.error( 'Wrong ADC ramp result 0x%04x for uubnum %04d', rampres, uubnum) self.npoints += 1 return res_in
def makeDLstat(ctx, uubnum, styp): """Create LogHandlerFile for staticstics (mean + stdev) ctx - context object, used keys: datadir + basetime + chans uubnum - UUB to log styp - variable to calculate statistics for (e.g. pede or noise)""" params = { 'pede': { 'fn': 'pede_u%04d', 'prolog': 'Pedestals statistics: mean + stdev', 'skiprec': 'meas_noise' }, 'noise': { 'fn': 'noise_u%04d', 'prolog': 'Noise statistics: mean + stdev', 'skiprec': 'meas_noise' } } assert styp in params.keys() p = params[styp] fn = ctx.datadir + (p['fn'] % uubnum) +\ ctx.basetime.strftime('-%Y%m%d.log') prolog = """\ # %s # UUB #%04d, date %s # columns: timestamp | meas_point | set_temp""" % ( p['prolog'], uubnum, ctx.basetime.strftime('%Y-%m-%d')) logdata = [ '{timestamp:%Y-%m-%dT%H:%M:%S}', '{meas_point:4d}', '{set_temp:5.1f}' ] for typ, fmt in ((styp + 'mean', '7.2f'), (styp + 'stdev', '7.2f')): prolog += ''.join([' | %s.ch%d' % (typ, chan) for chan in ctx.chans]) logdata += [ '{%s:%s}' % (item2label(functype='N', uubnum=uubnum, chan=chan, typ=typ), fmt) for chan in ctx.chans ] prolog += '\n' formatstr = ' '.join(logdata) + '\n' return LogHandlerFile(fn, formatstr, prolog=prolog, skiprec=lambda d: p['skiprec'] not in d)
def check_minmax(self, res_in, typ, uubnum, failed, missing, comments, flabel=None, freq=None): FUNCTYPE = { 'noisemean': 'N', 'pedemean': 'N', 'pedestdev': 'N', 'gain': 'P', 'lin': 'P', 'hglgratio': 'P', 'cutoff': None } item = {'typ': typ, 'uubnum': uubnum} freqstr = '' if flabel is not None: item['functype'] = 'F' item['flabel'] = flabel freqstr = ' freq %.2fMHz' % (freq / 1e6) elif FUNCTYPE[typ] is not None: item['functype'] = FUNCTYPE[typ] for chan, minmax in self.limits[typ].items(): label = item2label(item, chan=chan) if label in res_in: val = res_in[label] if minmax[0] is not None and val < minmax[0]: failed[chan] = True comments.append('min %s @%s chan %d' % (typ, freqstr, chan)) if minmax[1] is not None and val > minmax[1]: failed[chan] = True comments.append('max %s @%s chan %d' % (typ, freqstr, chan)) else: missing[chan] = True comments.append('missing %s for%s chan %d' % (typ, freqstr, chan))
def dpfilter(self, res_in): """Count linear gain results, expects linear filter applied return: res_in + evalpulse_u<uubnum>_c<chan>P""" if 'meas_pulse' not in res_in: return res_in mp = res_in.get('meas_point', -1) if mp <= self.lastmp: # avoid calling filter twice to one meas point self.logger.error('Duplicate call of dpfilter at measpoint %d', mp) return res_in self.lastmp = mp res_out = res_in.copy() for uubnum in self.uubnums: failed = {chan: False for chan in range(1, 11)} missing = {chan: False for chan in range(1, 11)} comments = [] for crit in self.limits.keys(): self.check_minmax(res_in, crit, uubnum, failed, missing, comments) comment = ', '.join(comments) if comments else '' stat = self.stats[uubnum] # shortcut if any(failed.values()): stat['failed'] += 1 self.log(mp, uubnum, 'failed', comment) elif any(missing.values()): stat['missing'] += 1 self.log(mp, uubnum, 'missing', comment) else: stat['ok'] += 1 for chan in range(1, 11): label = item2label(typ='evalpulse', functype='P', uubnum=uubnum, chan=chan) if failed[chan]: res_out[label] = False elif not missing[chan]: res_out[label] = True self.npoints += 1 return res_out
def makeDLfhglgratio(ctx, uubnum, freqs): """Create LogHandlerFile for HG/LG ratio ctx - context object, used keys: datadir + basetime + chans uubnum - UUB to log freqs - list of frequencies to log""" CHANPAIRS = ((1, 2), (3, 4), (5, 6), (9, 10)) fn = ctx.datadir + ('fhglgratio_u%04d' % uubnum) +\ ctx.basetime.strftime('-%Y%m%d.log') prolog = """\ # HG/LG ratio # UUB #%04d, date %s # columns: timestamp | meas_point | set_temp | flabel | freq [MHz]""" % ( uubnum, ctx.basetime.strftime('%Y-%m-%d')) prolog += ''.join([ ' | gain.ch%d/gain.ch%d' % (ch_hg, ch_lg) for (ch_lg, ch_hg) in CHANPAIRS ]) prolog += '\n' itemr = {'functype': 'F', 'typ': 'fhglgratio', 'uubnum': uubnum} loglines = [] for freq in freqs: flabel = float2expo(freq, manlength=3) itemr['flabel'] = flabel logdata = [ '{timestamp:%Y-%m-%dT%H:%M:%S}', '{meas_point:4d}', '{set_temp:5.1f}', '%-4s %6.2f' % (flabel, freq / 1e6) ] logdata += [ '{%s:5.1f}' % item2label(itemr, chan=ch_lg) for (ch_lg, ch_hg) in CHANPAIRS ] loglines.append(' '.join(logdata) + '\n') formatstr = ''.join(loglines) return LogHandlerFile(fn, formatstr, prolog=prolog, skiprec=lambda d: 'meas_freq' not in d)
def makeDLfreqgain(ctx, uubnum, freqs): """Create LogHandlerFile for gain and corr. coeff ctx - context object, used keys: datadir + basetime + chans uubnum - UUB to log freqs - list of frequencies to log""" fn = ctx.datadir + ('fgain_uub%04d' % uubnum) +\ ctx.basetime.strftime('-%Y%m%d.log') prolog = """\ # Frequency dependent gain ADC count vs. voltage analysis # - freqgain [ADC count/mV] & correlation coefficient # UUB #%04d, date %s # columns: timestamp | meas_point | set_temp | flabel | freq [MHz]""" % ( uubnum, ctx.basetime.strftime('%Y-%m-%d')) for typ in ('fgain', 'flin'): prolog += ''.join([' | %s.ch%d' % (typ, chan) for chan in ctx.chans]) prolog += '\n' itemr = {'functype': 'F', 'uubnum': uubnum} loglines = [] for freq in freqs: flabel = float2expo(freq, manlength=3) itemr['flabel'] = flabel logdata = [ '{timestamp:%Y-%m-%dT%H:%M:%S}', '{meas_point:4d}', '{set_temp:5.1f}', '%-4s %6.2f' % (flabel, freq / 1e6) ] for typ, fmt in (('fgain', '6.3f'), ('flin', '7.5f')): logdata += [ '{%s:%s}' % (item2label(itemr, chan=chan, typ=typ), fmt) for chan in ctx.chans ] loglines.append(' '.join(logdata) + '\n') formatstr = ''.join(loglines) return LogHandlerFile(fn, formatstr, prolog=prolog, skiprec=lambda d: 'meas_freq' not in d)
def _collect(self, d, uubnum, typ, flabel=None): """Collects data of type <typ> (+flabel) for UUB <uubnum> return list of 10 values (or None) or None if no data available""" item = {'uubnum': uubnum, 'typ': typ} if typ == 'gain': item['functype'] = 'P' elif typ == 'fgain': item['functype'] = 'F' item['flabel'] = flabel elif typ in ('pede', 'pedemean', 'pedestdev', 'noise', 'noisemean', 'noisestdev'): item['functype'] = 'N' vals = {} for chan in range(1, 11): label = item2label(item, chan=chan) val = d.get(label, None) if val is not None and not math.isnan(val): vals[chan] = val if vals: res = [None] * 10 for chan, val in vals.items(): res[chan - 1] = val return res return None
def makeDLcutoff(ctx, uubnum): """Create LogHandlerFile for frequency cut-off ctx - context object, used keys: datadir + basetime + chans uubnum - UUB to log""" fn = ctx.datadir + ('cutoff_uub%04d' % uubnum) +\ ctx.basetime.strftime('-%Y%m%d.log') prolog = """\ # Cut-off frequency [MHz] # UUB #%04d, date %s # columns: timestamp | meas_point | set_temp """ % ( uubnum, ctx.basetime.strftime('%Y-%m-%d')) prolog += ''.join([' | cutoff.ch%d' % chan for chan in ctx.chans]) + '\n' logdata = [ '{timestamp:%Y-%m-%dT%H:%M:%S}', '{meas_point:4d}', '{set_temp:5.1f}' ] itemr = {'uubnum': uubnum, 'typ': 'cutoff'} logdata += [ '{%s:5.2f}' % item2label(itemr, chan=chan) for chan in ctx.chans ] formatstr = ' '.join(logdata) + '\n' return LogHandlerFile(fn, formatstr, prolog=prolog, skiprec=lambda d: 'meas_freq' not in d)
def run(self): if self.timer is None or self.q_resp is None: self.logger.error('timer or q_resp instance not provided, exiting') return tid = syscall(SYS_gettid) self.logger.debug('run start, name %s, tid %d', threading.current_thread().name, tid) self.rz_thread = threading.Thread(target=self.readZone, name='Thread-readZone') self.rz_thread.start() while True: self.timer.evt.wait() if self.timer.stop.is_set(): break timestamp = self.timer.timestamp # store info from timer flags = self.timer.flags while self.uubnums2del: self._removeUUB(self.uubnums2del.pop()) if 'meas.sc' in flags: currents = self._readCurrents() res = { item2label(typ='itot', uubnum=uubnum): currents[port] for uubnum, port in self.uubnums.items() } res['timestamp'] = timestamp res['meas_sc'] = True self.q_resp.put(res) if 'power' in flags: if 'rz_tout' in flags['power']: tout = flags['power']['rz_tout'] if tout is None: self.logger.debug( 'set power.rz_tout to default value %.1fs', self.RZ_TOUT) self.rz_tout = self.RZ_TOUT else: self.logger.debug('set power.rz_tout %.1fs', tout) self.rz_tout = tout self.rz_wake.set() if flags['power'].get('pczero', False): self.zeroTime() if flags['power'].get('pccalib', False): self.calibrateTime() # valid uubs for pcon/pcoff: <list>, True, None if 'pcoff' in flags['power']: self.switch(False, flags['power']['pcoff']) if 'pcon' in flags['power']: uubs = flags['power']['pcon'] del self.curzones[:] self.switch(True, uubs) if 'check' in flags['power']: if self.boottime or self.bootvolt: self.logger.warning('pcon: already under check') self.bootvolt = None self.boottime = True self.chk_ts = timestamp if 'volt_ramp' in flags['power']: if self.boottime or self.bootvolt: self.logger.warning('voltramp: already under check') del self.curzones[:] bv = flags['power']['volt_ramp'] bv['up'] = bv['volt_start'] < bv['volt_end'] self.bootvolt = bv self.boottime = self.bootvolt['start'] self.chk_ts = timestamp if 'power.pccheck' in flags: if not self.bootvolt and not self.boottime: self.logger.error('not after pcon or volt_ramp') continue # trigger readZone and wait for done self.rz_done.clear() self.rz_wake.set() if not self.rz_done.wait(1.0): self.logger.warning('Timeout on readZone done') res = {'timestamp': self.chk_ts} if self.bootvolt: direction = 'up' if self.bootvolt['up'] else 'down' state = 'on' if self.bootvolt['start'] else 'off' res['volt_ramp'] = (direction, state) typ = 'voltramp' + direction + state for uubnum in self.uubnums: label = item2label(typ=typ, uubnum=uubnum) try: res[label] = self._voltres(uubnum) except IndexError: self.logger.warning('voltage for %s not available', label) self.bootvolt = None if self.boottime: res['boottimes'] = True for uubnum in self.uubnums: try: t1, t2 = self._boottime(uubnum) except IndexError: self.logger.warning( 'boottime for %04d not available', uubnum) continue res[item2label(typ='boottime', uubnum=uubnum)] = t1 res[item2label(typ='adcinittime', uubnum=uubnum)] = (t2 - t1) self.boottime = None self.q_resp.put(res) self.logger.info('run finished')
def dpfilter(self, res_in): """Count frequency gain results, expects cut-off filter applied return: res_in + evalfgain_u<uubnum>_c<chan>_f<flabel>F + evalcutoff_u<uubnum>_c<chan>F""" if 'meas_freq' not in res_in: return res_in mp = res_in.get('meas_point', -1) if mp <= self.lastmp: # avoid calling filter twice to one meas point self.logger.error('Duplicate call of dpfilter at measpoint %d', mp) return res_in self.lastmp = mp res_out = res_in.copy() anyfailed = anymissing = False for uubnum in self.uubnums: comments = [] for flabel, freq in self.flabels.items(): failed = {chan: False for chan in range(1, 11)} missing = {chan: False for chan in range(1, 11)} # check fgain ffactor = self.freqdep[flabel] self.limits['fgain'] = { chan: [None, None] for chan in range(1, 11) } for chan, minmax in self.limits['gain'].items(): for i, val in enumerate(minmax): if val is not None: self.limits['fgain'][chan][i] = ffactor * val self.check_minmax(res_in, 'fgain', uubnum, failed, missing, comments, flabel, freq) # check HG/LG ratio self.check_minmax(res_in, 'fhglgratio', uubnum, failed, missing, comments, flabel, freq) # check flin linmax = self.flin[flabel] self.limits['flin'] = { chan: (None, linmax) for chan in range(1, 11) } self.check_minmax(res_in, 'flin', uubnum, failed, missing, comments, flabel, freq) for chan in range(1, 11): label = item2label(typ='evalfgain', functype='F', uubnum=uubnum, flabel=flabel, chan=chan) if failed[chan]: res_out[label] = False elif not missing[chan]: res_out[label] = True if any(failed.values()): anyfailed = True if any(missing.values()): anymissing = True # check cut-off frequency failed = {chan: False for chan in range(1, 11)} missing = {chan: False for chan in range(1, 11)} self.check_minmax(res_in, 'cutoff', uubnum, failed, missing, comments) for chan in range(1, 11): label = item2label(typ='evalcutoff', functype='F', uubnum=uubnum, chan=chan) if failed[chan]: res_out[label] = False elif not missing[chan]: res_out[label] = True if any(failed.values()): anyfailed = True if any(missing.values()): anymissing = True stat = self.stats[uubnum] # shortcut comment = ', '.join(comments) if anyfailed: stat['failed'] += 1 self.log(mp, uubnum, 'failed', comment) elif anymissing: stat['missing'] += 1 self.log(mp, uubnum, 'missing', comment) else: stat['ok'] += 1 self.npoints += 1 return res_out
logger.info(msg) print(msg, file=sys.stderr) afg.setParams(**afg_dict) if 'splitmode' in item_dict: splitmode = item_dict['splitmode'] pc.splitterMode = splitmode else: splitmode = None mdoSetVert(pc.splitterMode, item_dict['voltage'], mdo, splitgain, offsets, logger) sleep(TOUT_PREP) trigger() logger.debug('trigger sent') sleep(TOUT_DAQ) for mdoch, splitch in splitgain.mdomap.items(): fname = datadir + item2label(item_dict, splitch=splitch) + '.txt' res = mdo.readWFM(mdoch, fn=fname) # logger.info('saving %s', fname+'.txt') # np.savetxt(fname, res[0], fmt='%8.5f') # logger.debug('saved') for resol in ('hr', 'lr'): if not hsf[resol]: continue start, stop, step = dataslice['pulse_' + resol] N = (stop - start) // step yall = res[0][start:stop:step].reshape((N, 1)) resfit = hsf[resol].fit(yall, HalfSineFitter.CHI) ampli = resfit['ampli'][0] datapulses.write( datapulses_formstr.format(splitch=splitch, resol=resol,
def run(self): tid = syscall(SYS_gettid) self.logger.debug('run start, name %s, tid %d', threading.current_thread().name, tid) downloaded = [] while True: self.timer.evt.wait() if self.timer.stop.is_set(): self.logger.info('Timer stopped, ending run()') return timestamp = self.timer.timestamp # store info from timer flags = self.timer.flags.get('flir', None) # flags = { <one or more actions:> # snapshot: True/False # download: True/False # delete: True/False # <parameters relevant to actions:> # * snapshot # imagename: <str>, mandatory # - internal name, unique during FLIR instance life # db: raw|eval|both # - store raw/eval image into DB, default None # rawname: <str>, conditional if db = raw or both # - attachment name for raw image # evalname: <str>, mandatory if db = eval or both # - attachment name for evaluation image # - implies evaluation # evaltitle: <str>, optional # - eval. image title template for format # - keys: uubnum, res, timestamp # evalimname: <str>, optional # - eval. image name # bgimage: True/False # - use as background for evaluation # - contradicts evalname # description: <str>, optional # - description for raw/eval image to DB # * download, delete # imagename: <str>, optional # - if present limit operation only to imagename flags, rec = self._checkFlags(flags) if flags is None: continue imagename = flags['imagename'] if flags['snapshot']: self.snapshot(imagename) self.logger.info('Image %s stored', imagename) rec['timestamp'] = timestamp self.snapshots[imagename] = rec if flags['download']: if imagename is not None: if imagename in self.snapshots: imagelist = (imagename, ) else: self.logger.error('image <%s> not stored', imagename) imagelist = [] else: imagelist = list(self.snapshots.keys()) for image in imagelist: fname = image + self.typ[1] self.getfile('images/' + fname, self.datadir + fname) downloaded.append(image) rec = self.snapshots.pop(image) arec = {'uubs': (self.uubnum, ), 'run': True} if 'description' in rec: arec['description'] = rec['description'] if rec['db'] in ('raw', 'both'): arec['name'] = rec['rawname'] arec['filename'] = self.datadir + fname, self.q_att.put(arec) if rec['bgimage']: self.fe.readFFF(self.datadir + fname, True) elif rec['evalname'] is not None: res, efname = self._evalFFF( self.datadir + fname, timestamp, rec.get('evalimname', None), rec.get('evaltitle', None)) if self.evaluator is not None: self.evaluator.writeMsg( ('FLIR evaluation: ' + FLIR.FLIR_RES[res], )) label = item2label(typ='flireval', uubnum=self.uubnum) self.q_resp.put({ 'timestamp': rec['timestamp'], 'meas_flir': True, label: res }) if rec['db'] in ('eval', 'both') and efname: arec['name'] = rec['evalname'] arec['filename'] = efname self.q_att.put(arec) if efname is not None: im = PIL.Image.open(efname) im.show() if flags['delete']: if imagename is not None: if imagename in self.snapshots: self.logger.warning( 'image %s to be deleted not downloaded', imagename) imagelist = (imagename, ) elif imagename not in downloaded: self.logger.error('image <%s> not stored', imagename) imagelist = [] else: imagelist = (imagename, ) else: imagelist, downloaded = downloaded, [] for image in imagelist: self.logger.info('Deleting image %s', image) self.deleteimage(image)
def makeDLfampli(ctx, uubnum, keys): """Create LogHandlerFile for sine amplitudes ctx - context object, used keys: datadir + basetime + highgains + chans uubnum - UUB to log keys - freqs, voltages and/or splitmodes""" if keys is None: keys = {} voltages = keys.get('voltages', (None, )) splitmodes = keys.get('splitmodes', (None, )) freqs = keys.get('freqs', (ctx.afg.param['freq'], )) if 'count' in keys: indices = range(keys['count']) else: indices = (None, ) fn = ctx.datadir + ('fampli_uub%04d' % uubnum) +\ ctx.basetime.strftime('-%Y%m%d.log') itemr = {'functype': 'F', 'typ': 'fampli', 'uubnum': uubnum} prolog = """\ # Amplitudes of sines depending on frequency # UUB #%04d, date %s """ % (uubnum, ctx.basetime.strftime('%Y-%m-%d')) if 'comment' in keys: prolog += "# %s\n" % keys['comment'] prolog += "# columns: timestamp | meas_point | set_temp | " prolog += "flabel | freq [MHz] | " if splitmodes[0] is not None: prolog += "splitmode | " if voltages[0] is not None: prolog += "voltage | " if indices[0] is not None: prolog += "index | " prolog += ' | '.join(['fampli.ch%d' % chan for chan in ctx.chans]) prolog += '\n' loglines = [] for freq in freqs: flabel = float2expo(freq, manlength=3) itemr['flabel'] = flabel for splitmode in splitmodes: for voltage in voltages: for ind in indices: logdata = [ '{timestamp:%Y-%m-%dT%H:%M:%S}', '{meas_point:4d}', '{set_temp:5.1f}', '%-4s %6.2f' % (flabel, freq / 1e6) ] if splitmode is not None: logdata.append('%d' % splitmode) itemr['splitmode'] = splitmode if voltage is not None: logdata.append('%5.3f' % voltage) itemr['voltage'] = voltage if ind is not None: logdata.append('%03d' % ind) itemr['index'] = ind logdata += [ NOTCALC if ctx.notcalc('F', chan, splitmode, voltage) else '{%s:7.2f}' % item2label(itemr, chan=chan) for chan in ctx.chans ] loglines.append(' '.join(logdata) + '\n') formatstr = ''.join(loglines) return LogHandlerFile(fn, formatstr, prolog=prolog, missing=' ~ ', skiprec=lambda d: 'meas_freq' not in d)