def set_metadata_values(self): """Sets metadata values associated with self.data_nda """ logger.debug('in set_metadata_values') model = self.model nda = self.data_nda colk, colv = 0, 1 for row in range(model.rowCount()): key = model.item(row, colk).text() if key == 'data_size': model.item(row, colv).setText(str(nda.size)) elif key == 'data_dtype': model.item(row, colv).setText(str(nda.dtype)) elif key == 'data_ndim': model.item(row, colv).setText(str(nda.ndim)) elif key == 'data_shape': model.item(row, colv).setText(str(nda.shape)) elif key == 'host': model.item(row, colv).setText(gu.get_hostname()) elif key == 'uid': model.item(row, colv).setText(gu.get_login()) elif key == 'cwd': model.item(row, colv).setText(gu.get_cwd()) logger.info('Model document content:\n %s\n%s' % (self.info_model_dicdoc(), info_ndarr(self.data_nda, 'data n-d array ')))
def test_deploy_calib_array(): print(80 * '_', '\nTest deploy_calib_array') cdir = './calib' if not os.path.exists(cdir): gu.create_directory(cdir, verb=True) #cdir = '/reg/d/psdm/CXI/cxi83714/calib' src = 'CxiDs1.0:Cspad.0' type = 'pedestals' run_start = 9991 run_end = None arr = gu.np.ones((32, 185, 388)) cmts = { 'exp': 'cxi83714', 'ifname': 'input-file-name', 'app': 'my-app-name', 'comment': 'my-comment' } deploy_calib_array(cdir, src, type, run_start, run_end, arr, cmts, fmt='%.1f', pbits=3)
def deldoc(self): """Deletes specified document in the database. """ mode, kwargs = self.mode, self.kwargs dbname = mu.get_dbname(**kwargs) client = self.client() if not self.check_database(client, dbname): return detname = kwargs.get('detector', None) if detname is None: logger.warning( '%s needs in the collection name. Please specify the detector name.' % (mode)) colname = detname db, fs = mu.db_and_fs(client, dbname) colnames = mu.collection_names(db) if not (colname in colnames): # mu.collection_exists(db, colname) logger.warning('db "%s" does not have collection "%s"' % (db.name, str(colname))) return col = mu.collection(db, colname) logger.info('command mode: "%s" db: "%s" collection: "%s"' % (mode, db.name, str(colname))) defs = self.defs ctype = kwargs.get('ctype', None) run = kwargs.get('run', None) tsec = kwargs.get('time_sec', None) tstamp = kwargs.get('time_stamp', None) vers = kwargs.get('version', None) confirm = kwargs.get('confirm', False) query = {'detector': detname} if ctype != defs['ctype']: query['ctype'] = ctype if run != defs['run']: query['run'] = run if vers != defs['version']: query['version'] = vers #if tsec != defs['time_sec'] : query['time_sec'] = tsec if gu.is_in_command_line('-s', '--time_sec'): query['time_sec'] = tsec if gu.is_in_command_line('-t', '--time_stamp'): query['time_stamp'] = tstamp logger.info('query: %s' % str(query)) docs = mu.find_docs(col, query) if docs is None or docs.count() == 0: logger.warning('Can not find document for query: %s' % str(query)) return for i, doc in enumerate(docs): msg = ' deldoc %2d:'%i + doc['time_stamp'] + ' ' + str(doc['time_sec'])\ + ' %s'%doc['ctype'].ljust(16) + ' %4d'%doc['run'] + ' ' + str(doc['id_data']) logger.info(msg) if confirm: mu.delete_document_from_collection(col, doc['_id']) mu.del_document_data(doc, fs) if not confirm: mu.request_confirmation()
def config_logger(self, log_fname='log.txt'): self.append_qwlogger('Start logger\nLog file: %s' % log_fname) levname = self.log_level.value() level = self.dict_name_to_level[levname] # e.g. logging.DEBUG tsfmt = '%Y-%m-%dT%H:%M:%S' fmt = '%(levelname)s %(name)s: %(message)s' if level==logging.DEBUG else\ '%(asctime)s %(levelname)s %(name)s: %(message)s' #sys.stdout = sys.stderr = open('/dev/null', 'w') self.formatter = logging.Formatter(fmt, datefmt=tsfmt) #logger.addFilter(QWFilter(self)) # register self for callback from filter # TRICK: add filter to handler to intercept ALL messages if self.save_log_at_exit: depth = 6 if log_fname[0] == '/' else 1 gu.create_path(log_fname, depth, mode=0o0777) self.handler = logging.FileHandler(log_fname, 'w') else: self.handler = logging.StreamHandler() self.handler.addFilter(QWFilter(self)) #self.handler.setLevel(logging.NOTSET) # level self.handler.setFormatter(self.formatter) logger.addHandler(self.handler) self.set_level(levname) # pass level name
def save_txt(fname='nda.txt', arr=None, cmts=(), fmt='%.1f', verbos=False, addmetad=True) : """Save n-dimensional numpy array to text file with metadata. - fname - file name for text file, - arr - numpy array, - cmts -list of comments which will be saved in the file header. """ #recs = ['# %03d %s' % (i,cmt) for i, cmt in enumerate(cmts)] recs = ['# %s' % cmt for cmt in cmts] recs.append('\n# HOST %s' % gu.get_hostname()) recs.append('# WORK_DIR %s' % gu.get_cwd()) recs.append('# FILE_NAME %s' % fname) recs.append('# DATE_TIME %s' % gu.str_tstamp(fmt='%Y-%m-%dT%H:%M:%S')) recs.append('# UID %s' % gu.get_login()) recs.append('# SHAPE %s' % str(arr.shape).replace(' ','')) recs.append('# DATATYPE %s' % str(arr.dtype)) if addmetad : recs.append('\n# DTYPE %s' % str(arr.dtype)) recs.append('# NDIM %s' % len(arr.shape)) for i in range(len(arr.shape)) : recs.append('# DIM:%d %s' % (i, arr.shape[i])) arr2d = nu.reshape_nda_to_2d(arr) # pretty formatting recs.append('' if len(arr.shape)>1 else '\n') nline = '\n' if len(arr.shape)>1 else ' ' hdr = '\n'.join(recs) #print(hdr) np.savetxt(fname, arr, fmt, delimiter=' ', newline=nline, header=hdr, comments='') #, footer='\n') #, comments='# ') if verbos : print('File %s is saved' % fname)
def move_recs_to_archive(procname, exp, runs): """Move expired run records from log file to archive file. """ fname_log = log_file(exp, procname) fname_arc = arc_file(exp, procname) print('Move records for old runs to archive file: %s\n' % fname_arc) recs = gu.load_textfile(fname_log).split('\n') recs_log = [rec for rec in recs if not (rec[:4] in runs)] recs_arc = [rec for rec in recs if rec[:4] in runs] text_log = '\n'.join(recs_log) text_log += '\n' #if len(runs)==1 : text_log+='\n' text_arc = '\n'.join(recs_arc) text_arc += '\n' #if len(runs)==1 : text_arc+='\n' #print(' ==> log\n%s' % text_log) #print(' ==> arc\n%s' % text_arc) gu.save_textfile(text_log, fname_log, mode='w') os.chmod(fname_log, 0o664) gu.save_textfile(text_arc, fname_arc, mode='a') os.chmod(fname_log, 0o664)
def makeCalibFileName(self, src, type, run_start, run_end=None): """Returns calibration file name. """ if os.path.basename(self.cdir.rstrip('/')) != 'calib': if self.pbits & 1: print('WARNING! NOT calib DIRECTORY: %s' % self.cdir) return None # there have been problems with calib-dir mounts on the mon nodes. # raise an exception here to try to detect this problem #assert os.path.isdir(self.cdir), 'psana calib-dir must exist: '+self.cdir if not os.path.isdir(self.cdir): print('WARNING! psana calib-dir is not found: %s' % self.cdir) return None if not self._setGroup(src): return None if run_start < 0: if self.pbits & 1: print('WARNING! START RUN NUMBER IS NEGATIVE: %d' % run_start) return None if run_start > 9999: if self.pbits & 1: print('WARNING! START RUN NUMBER EXCEEDS 4-DIGITS: %d' % run_start) return None if run_end is None: self.cfname = '%d-end.data' % (run_start) else: if run_end < 0: if self.pbits & 1: print('WARNING! END RUN NUMBER IS NEGATIVE: %d' % run_end) return None if run_end > 9999: if self.pbits & 1: print('WARNING! END RUN NUMBER IS TOO BIG: %d' % run_end) return None if run_end < run_start: if self.pbits & 1: print('WARNING! END RUN:%d < START RUN:%d' % (run_end, run_start)) return None self.cfname = '%d-%d.data' % (run_start, run_end) dir = self.cdir for subdir in (self.group, src, type): dir = os.path.join(dir, subdir) gu.create_directory(dir, self.pbits) return os.path.join(dir, self.cfname)
def __init__(self, cp, show_buttons=True): QWidget.__init__(self, parent=None) self.log_level = cp.log_level self.log_prefix = cp.log_prefix self.log_file = cp.log_file # DEPRICATED log_fname = log_file_name(self.log_prefix.value()) depth = 6 if log_fname[0] == '/' else 1 gu.create_path(log_fname, depth, mode=0o0777) #print('Log file: %s' % log_fname) self.show_buttons = show_buttons cp.qwloggerstd = self #logger.debug('logging.DEBUG: ', logging.DEBUG) logger.debug('logging._levelToName: ', logging._levelToName ) # {0: 'NOTSET', 50: 'CRITICAL', 20: 'INFO',... logger.debug('logging._nameToLevel: ', logging._nameToLevel ) # {'NOTSET': 0, 'ERROR': 40, 'WARNING': 30,... self.dict_level_to_name = logging._levelToName self.dict_name_to_level = logging._nameToLevel self.level_names = list(logging._levelToName.values()) self.edi_txt = QTextEdit('Logger window') self.lab_level = QLabel('Log level:') self.but_close = QPushButton('&Close') self.but_save = QPushButton('&Save log-file') self.but_rand = QPushButton('&Random') self.cmb_level = QComboBox(self) self.cmb_level.addItems(self.level_names) self.cmb_level.setCurrentIndex( self.level_names.index(self.log_level.value())) self.hboxM = QHBoxLayout() self.hboxM.addWidget(self.edi_txt) self.hboxB = QHBoxLayout() self.hboxB.addStretch(4) self.hboxB.addWidget(self.lab_level) self.hboxB.addWidget(self.cmb_level) self.hboxB.addWidget(self.but_rand) self.hboxB.addStretch(1) self.hboxB.addWidget(self.but_save) self.hboxB.addWidget(self.but_close) self.vbox = QVBoxLayout() self.vbox.addLayout(self.hboxM) self.vbox.addLayout(self.hboxB) self.setLayout(self.vbox) if self.show_buttons: self.connect_buttons() self.set_style() self.set_tool_tips() self.config_logger(log_fname)
def log_file_name(lfpath) : """Returns (str) log file name like /reg/g/psdm/logs/calibman/lcls2/2018/20180518T122407-dubrovin.txt """ t0_sec = gu.time() tstamp = gu.str_tstamp('%Y%m%dT%H%M%S', t0_sec) #year_month = gu.str_tstamp('%Y/%m', time_sec=None) year = gu.str_tstamp('%Y', time_sec=None) return '%s/%s/%s-%s.txt' % (lfpath, year, tstamp, gu.get_login())#, os.getpid())
def scan_calib_for_experiment(exp='cxix25615', **kwargs): host = kwargs.get('host', None) port = kwargs.get('port', None) user = kwargs.get('user', None) upwd = kwargs.get('upwd', None) verbose = kwargs.get('verbose', False) client = dbu.connect_to_server(host, port, user, upwd) dbname = dbu.db_prefixed_name(exp) if dbu.database_exists(client, dbname): msg = 'Experiment %s already has a database. Consider to delete it from the list:\n%s'%\ (exp, str(dbu.database_names(client)))+\ '\nBefore adding consider to delete existing DB using command: cdb deldb --dbname %s -C -u <username> -p <password>' % dbname logger.warning(msg) return dircalib = nm.dir_calib(exp) #if verbose : logger.info('Scan: %s' % dircalib) for dir0 in gu.get_list_of_files_in_dir_for_part_fname(dircalib, pattern='::'): if not os.path.isdir(dir0): continue calibvers = os.path.basename(dir0) logger.debug(' %s ' % calibvers) for dir1 in gu.get_list_of_files_in_dir_for_part_fname(dir0, pattern=':'): if not os.path.isdir(dir1): continue detname = os.path.basename(dir1) detname_m = detname_conversion(detname) logger.debug(' %s' % detname_m) for cftype in gu.get_list_of_files_in_dir(dir1): if not (cftype in cc.list_calib_names): continue dir2 = '%s/%s' % (dir1, cftype) if not os.path.isdir(dir2): continue logger.debug(' %s' % cftype) cfdir = '%s/%s/%s/%s' % (dircalib, calibvers, detname, cftype) listdicts = history_list_of_dicts('%s/HISTORY' % cfdir, verbose) #logger.debug('XXX listdicts %s' % listdicts) count = 0 for fname in gu.get_list_of_files_in_dir(dir2): logger.debug(' %s' % fname) if fname == 'HISTORY': continue if os.path.splitext(fname)[1] != '.data': continue logger.debug(' XXX begin adding: %s %s %s %s' % (dircalib, detname_m, cftype, fname)) add_calib_file_to_cdb(exp, dircalib, calibvers, detname_m, cftype, fname, cfdir, listdicts, **kwargs) count += 1 logger.info(' converted %3d files from: %s' % (count, cfdir))
def deploy_calib_array(cdir, src, type, run_start, run_end=None, arr=None, dcmts={}, fmt='%.1f', pbits=1): """Deploys array in calibration file - makes the new file name using make_calib_file_name(...) - if file with this name already exists - rename it with current timestamp in the name - save array in file - add history record """ fname = make_calib_file_name(cdir, src, type, run_start, run_end, pbits) path_history = '%s/HISTORY' % os.path.dirname(fname) if os.path.exists(fname): fname_bkp = '%s-%s' % (fname, gu.str_tstamp(fmt='%Y-%m-%dT%H:%M:%S')) os.system('cp %s %s' % (fname, fname_bkp)) if pbits & 1: print('Existing file %s\nis backed-up %s' % (fname, fname_bkp)) # extend dictionary for other parameters d = dict(dcmts) d['run'] = run_start d['fname'] = os.path.basename(fname) d['src'] = src d['ctype'] = type # make list of comments cmts = ['%s %s' % (k.upper().ljust(11), v) for k, v in d.iteritems()] # save n-dimensional numpy array in the tmp text file fntmp = tempfile.NamedTemporaryFile(mode='r+b', suffix='.data') if pbits & 2: print('Save constants in tmp file: %s' % fntmp.name) save_txt(fntmp.name, arr, cmts, fmt='%.1f') if pbits & 1: print('Deploy constants in file: %s' % fname) # USE cat in stead of cp and move in order to create output file with correct ACL permissions cmd_cat = 'cat %s > %s' % (fntmp.name, fname) #os.system(cmd_cat) stream = os.popen(cmd_cat) resp = stream.read() msg = 'Command: %s\n - resp: %s' % (cmd_cat, resp) if pbits & 2: print(msg) # add record to the HISTORY file hrec = _history_record(d) if pbits & 1: print('Add record: %sto the file: %s' % (hrec, path_history)) gu.save_textfile(hrec, path_history, mode='a')
def append_log_file(exp='xpptut15', procname='pixel_status', runs=[], verb=0): """Appends records in the log file for list of (str) runs for specified experiment and process name. """ fname_log = log_file(exp, procname) if verb: print('Append log file: %s' % fname_log) gu.create_path(fname_log, depth=6, mode=0o774, verb=False) text = msg_to_log(runs) if text is None: return #print('Save in file text "%s"' % text) gu.save_textfile(text, fname_log, mode='a') os.chmod(fname_log, 0o664)
def msg_to_log(runs=[]): """Returns (str) message to the log file for list of (str) runs. """ if len(runs) == 0: return None tstamp = gu.str_tstamp('%Y-%m-%dT%H:%M:%S', time()) login = gu.get_login() cwd = gu.get_cwd() host = gu.get_hostname() cmd = sys.argv[0].split('/')[-1] recs = [ '%s %s %s %s cwd:%s cmd:%s' % (s, tstamp, login, host, cwd, cmd) for s in runs ] text = '\n'.join(recs) return text + '\n'
def exec_command(cmd): from psana.pscalib.proc.SubprocUtils import subproc logger.debug('Execute shell command: %s' % cmd) if not gu.shell_command_is_available(cmd.split()[0], verb=True): return out, err = subproc(cmd, env=None, shell=False, do_wait=True) if out or err: logger.warning('err: %s\nout: %s' % (err, out))
def print_experiments_count_runs(): # ins='CXI' d_ins_nruns = {} d_ins_nexps = {} nruns_tot = 0 nexps = 0 for ins in INSTRUMENTS: nruns_ins = 0 exps = experiments(ins) nexps += len(exps) for exp in exps: runs = runs_in_xtc_dir(exp) nruns = len(runs) nruns_ins += nruns nruns_tot += nruns print(' %10s nruns:%4d' % (exp, nruns)) d_ins_nruns[ins] = nruns_ins d_ins_nexps[ins] = len(exps) print('\nSummary on %s\n%s' % (gu.str_tstamp('%Y-%m-%dT%H:%M:%S', time()), 40 * '_')) for ins, nruns in d_ins_nruns.items(): print('%6d runs in %4d experiments of %s' % (nruns, d_ins_nexps[ins], ins)) dname = '%s/<all-ins>/<all-exp>/' % DIR_INS print('%s\n%6d runs in %4d experiments of %s' % (40 * '_', nruns_tot, nexps, dname))
def __init__(self, **kwargs): """Waveform peak finder wrapper. - wf digitizer channels (0,1,2,3,4) should be ordered for u1,u2,v1,v2[,w1,w2],mcp, respectively """ logger.debug(gu.str_kwargs(kwargs, title='WFPeaks input parameters:')) self.set_wf_peak_finder_parameters(**kwargs) self._wfs_old = None
def test_insert_constants(expname=TEST_EXPNAME, detname=TEST_DETNAME, ctype='test_ctype', runnum=10, data='test text sampele'): """ Inserts constants using direct MongoDB interface from MDBUtils. """ import psana.pyalgos.generic.Utils as gu print('test_delete_database 1:', database_names()) #txt = '%s\nThis is a string\n to test\ncalibration storage' % gu.str_tstamp() #data, ctype = txt, 'testtext'; logger.debug('txt: %s' % str(data)) #data, ctype = get_test_nda(), 'testnda'; logger.debug(info_ndarr(data, 'nda')) #data, ctype = get_test_dic(), 'testdict'; logger.debug('dict: %s' % str(data)) kwa = {'user' : gu.get_login()} t0_sec = time() ts = gu.str_tstamp(fmt='%Y-%m-%dT%H:%M:%S%z', time_sec=t0_sec) mu.insert_constants('%s - saved at %s'%(data,ts), expname, detname, ctype, runnum+int(tname), int(t0_sec),\ time_stamp=ts, **kwa) print('test_delete_database 2:', database_names())
def deploy_calib_file(cdir, src, type, run_start, run_end=None, ifname='', dcmts={}, pbits=1): """Deploys calibration file - makes the new file name using make_calib_file_name(...) - if file with this name already exists - rename it with current timestamp in the name - save array in file - add history record """ fname = make_calib_file_name(cdir, src, type, run_start, run_end, pbits) path_history = '%s/HISTORY' % os.path.dirname(fname) if os.path.exists(fname): fname_bkp = '%s-%s' % (fname, gu.str_tstamp(fmt='%Y-%m-%dT%H:%M:%S')) os.system('cp %s %s' % (fname, fname_bkp)) if pbits & 1: print('Existing file %s\nis backed-up %s' % (fname, fname_bkp)) # extend dictionary for other parameters d = dict(dcmts) d['run'] = run_start d['fname'] = os.path.basename(fname) d['ifname'] = ifname d['src'] = src d['ctype'] = type if pbits & 1: print('Deploy constants in file: %s' % fname) # USE cat in stead of cp and move in order to create output file with correct ACL permissions cmd_cat = 'cat %s > %s' % (ifname, fname) #os.system(cmd_cat) stream = os.popen(cmd_cat) resp = stream.read() msg = 'Command: %s\n - resp: %s' % (cmd_cat, resp) if pbits & 2: print(msg) # add record to the HISTORY file hrec = _history_record(d) if pbits & 1: print('Add record: %sto the file: %s' % (hrec, path_history)) gu.save_textfile(hrec, path_history, mode='a')
def add_calib_file_to_cdb(exp, dircalib, calibvers, detname, cftype, fname, cfdir, listdicts, **kwargs): """ """ d = history_dict_for_file(listdicts, fname) resp = parse_calib_file_name(fname) begin, end, ext = resp if resp is not None else (None, None, None) if begin is not None: begin = int(begin) if None in (begin, end, ext): return fpath = '%s/%s' % (cfdir, fname) verbose = kwargs.get('verbose', False) data = gu.load_textfile(fpath, verbose) if cftype in ('geometry','code_geometry') else\ load_xtcav_calib_file(fpath) if is_xtcav(calibvers, cftype) else\ load_txt(fpath) # using NDArrIO if isinstance(data, dict): serialize_dict(data) logger.debug(info_dict(data)) #print_dict(data) #data = json.dumps(data) # (data,ensure_ascii=True) json.dumps converts dict -> str # .replace("'", '"') for json data = str(data) if isinstance(data, np.ndarray): check_data_shape(data, detname, cftype) begin_time, end_time = run_begin_end_time(exp, int(begin)) if verbose: ndu.print_ndarr(data, 'scan calib: data') msg = 'scan calib: %s %s %s %s %s %s %s %s %s' % ( exp, cfdir, fname, begin, end, ext, calibvers, detname, cftype) logger.info(msg) logger.info('begin_time: %s end_time: %s' % (begin_time, end_time)) if data is None: msg = 'data is None, conversion is dropped for for file: %s' % fpath logger.warning(msg) return kwargs['run'] = begin kwargs['run_end'] = end kwargs['detector'] = detname kwargs['ctype'] = cftype kwargs['time_sec'] = begin_time kwargs['end_time'] = end_time kwargs['time_stamp'] = dbu._timestamp(begin_time) kwargs['extpars'] = d if d is not None else { } # just in case save entire history dict #kwargs['comment'] = 'HISTORY: %s' % d.get('comment', '') dbu.insert_calib_data(data, **kwargs)
def add(self): """Adds calibration constants to database from file. """ kwargs = self.kwargs fname = kwargs.get('iofname', 'None') ctype = kwargs.get('ctype', 'None') dtype = kwargs.get('dtype', 'None') verb = self.loglevel == 'DEBUG' assert os.path.exists(fname), 'File "%s" DOES NOT EXIST' % fname ext = os.path.splitext(fname)[-1] data = gu.load_textfile(fname, verb=verb) if ctype == 'geometry' or dtype in ('str', 'txt', 'text') else\ load_xtcav_calib_file(fname) if dtype == 'xtcav' else\ np.load(fname) if ext == '.npy' else\ gu.load_json(fname) if ext == '.json' or dtype == 'json' else\ gu.load_pickle(fname) if ext == '.pkl' or dtype in ('pkl', 'pickle') else\ load_txt(fname) # input NDArrIO dbu.insert_calib_data(data, **kwargs)
def exportdb(self): """Exports database. Equivalent to: mongodump -d <dbname> -o <filename> mongodump --host psanaphi105 --port 27017 --db calib-cxi12345 --archive=db.20180122.arc """ host, port, dbname, fname = self.host_port_dbname_fname() tstamp = gu.str_tstamp(fmt='%Y-%m-%dT%H-%M-%S') fname = 'cdb-%s-%s.arc' % (tstamp, dbname) if fname is None else fname dbu.exportdb(host, port, dbname, fname)
def do_work(): prefix = './%s-figs-ti_vs_tj' % gu.str_tstamp( fmt='%Y-%m-%d', time_sec=None) # '%Y-%m-%dT%H:%M:%S%z' gu.create_directory(prefix, mode=0o775) path = os.path.abspath(os.path.dirname(__file__)) print('path to npy flies dir:', path) ti_vs_tj = np.load('%s/ti_vs_tj.npy' % path) t_all = np.load('%s/t_all.npy' % path) trange = (1400., 2900.) print_ndarr(ti_vs_tj, 'ti_vs_tj:\n') print_ndarr(t_all, 't_all:\n') sum_bkg = t_all.sum() sum_cor = ti_vs_tj.sum() print('sum_bkg:', sum_bkg) print('sum_cor:', sum_cor) imrange = trange + trange # (1400., 2900., 1400., 2900.) axim = gr.plotImageLarge(ti_vs_tj, img_range=imrange, amp_range=(0,500), figsize=(11,10),\ title='ti_vs_tj', origin='lower', window=(0.10, 0.08, 0.88, 0.88), cmap='inferno') gr.save('%s/fig-ti_vs_tj.png' % prefix) bkg = np.outer(t_all, t_all) / sum_bkg print_ndarr(bkg, 'bkg:\n') axim = gr.plotImageLarge(bkg, img_range=imrange, amp_range=(0,500), figsize=(11,10),\ title='bkg', origin='lower', window=(0.10, 0.08, 0.88, 0.88), cmap='inferno') gr.save('%s/fig-ti_vs_tj-bkg.png' % prefix) harr = t_all nbins = harr.size ht = HBins(trange, nbins, vtype=np.float32) # ht.binedges() fig, axhi, hi = gr.hist1d(ht.bincenters(), bins=nbins, amp_range=ht.limits(), weights=harr, color='b', show_stat=True,\ log=True, figsize=(7,6), axwin=(0.10, 0.10, 0.88, 0.85), title='1-d bkg',\ xlabel='time of all hits (ns)', ylabel='number of hits', titwin='1-d bkg') gr.save('%s/fig-time-hits.png' % prefix) gr.show()
def experiments_under_control(procname='pixel_status'): """Returns list of (str) experiment names from control file. """ fname = control_file(procname) if not os.path.lexists(fname): #raise IOError('Control file "%s" does not exist' % fname) print('WARNING: control file "%s" does not exist' % fname) return [] recs = gu.load_textfile(fname).split('\n') return [rec for rec in recs if (rec and (rec[0] != '#'))] # skip empty and commented records
def test_deploy_calib_file(): print(80 * '_', '\nTest deploy_calib_file') cdir = './calib' if not os.path.exists(cdir): gu.create_directory(cdir, verb=True) #cdir = '/reg/d/psdm/CXI/cxi83714/calib' src = 'CxiDs1.0:Cspad.0' type = 'geometry' run_start = 9992 run_end = None fname = '/reg/g/psdm/detector/alignment/cspad/calib-cxi-camera1-2014-09-24/2016-06-15-geometry-cxil0216-r150-camera1-z95mm.txt' cmts = {'exp': 'cxi83714', 'app': 'my-app-name', 'comment': 'my-comment'} deploy_calib_file(cdir, src, type, run_start, run_end, fname, cmts, pbits=3)
def recs_in_log_file(exp='xpptut15', procname='pixel_status', verb=0): """Returns list of (str) records in the log file for specified experiment and process name. E.g. of one record: '0151 2017-10-05T15:19:21' """ fname_log = log_file(exp, procname) if verb: print('Log file: %s' % fname_log) if not os.path.lexists(fname_log): if verb: print('Log file "%s" does not exist' % fname_log) return [] recs = gu.load_textfile(fname_log).split('\n') return recs # list of records, each record is '0059 <time-stamp>'
def change_value(self, item, key, path): logger.debug('change_value for key: %s' % (key)) if key == self.data_fname: item.setText(str(path)) self.data_nda = self.load_nda_from_file(path) logger.info(info_ndarr(self.data_nda, 'From file %s loaded array' % path)) self.set_metadata_values() item.setBackground(QBrush(Qt.cyan)) else: txt = gu.load_textfile(path) logger.info('From file %s fill field: %s' % (path,txt)) item.setText(txt)
def test01(): # assuming /reg/d/psdm/CXI/cxid2714/calib/CsPad::CalibV1/CxiDs1.0:Cspad.0/pedestals/15-end.data #cdir = '/reg/d/psdm/CXI/cxid2714/calib/' #cdir = '/reg/d/psdm/CXI/cxi80410/calib/' cdir = '/reg/d/psdm/CXI/cxi83714/calib/' group = 'CsPad::CalibV1' src = 'CxiDs1.0:Cspad.0' type = 'pedestals' rnum = 134 #rnum = 123456789 #-------------------------- print(80 * '_', '\nTest 1') print('Finding calib file for\n dir = %s\n grp = %s\n src = %s\n type= %s\n run = %d' % \ (cdir, group, src, type, rnum)) cff = CalibFileFinder(cdir, group, 0o377) fname = cff.findCalibFile(src, type, rnum) #-------------------------- print(80 * '_', '\nTest 2') print('Test methods find_calib_file and make_calib_file_name') fname_existing = find_calib_file(cdir, src, type, rnum, pbits=1) print(' fname_existing : %s' % fname_existing) cdir = './calib' run_start = 134 gu.create_directory(cdir, True) fname_new = make_calib_file_name(cdir, src, type, run_start, run_end=None, pbits=0) print(' fname_new : %s' % fname_new)
def unpack(self, parser): """parser parameters: - host - port - experiment - detector - ctype - run - run_end - time_stamp - time_sec - version - iofname - comment - dbname - dbsuffix """ (popts, pargs) = parser.parse_args() #args = pargs #defs = vars(parser.get_default_values()) self.mode = mode = pargs[0] if len(pargs) > 0 else 'print' kwargs = vars(popts) time_sec, time_stamp = mu.time_and_timestamp(**kwargs) kwargs['time_sec'] = int(time_sec) kwargs['time_stamp'] = time_stamp kwargs['cli_mode'] = mode self.kwargs = kwargs self.defs = vars(parser.get_default_values()) self.strloglev = kwargs.get('strloglev', 'DEBUG').upper() if self.strloglev == 'DEBUG': #from psana.pyalgos.generic.Utils import print_kwargs, print_parser print(40 * '_') gu.print_parser(parser) gu.print_kwargs(kwargs) fmt = '%(asctime)s %(name)s %(lineno)d %(levelname)s: %(message)s'
def _setGroup(self, src): """If not available, sets group from source. """ if self.group == '' or self.group is None: dettype = gu.det_type_from_source(src) self.group = gu.dic_det_type_to_calib_group.get(dettype) if self.group is None: if self.pbits & 1: print( 'WARNING! CALIBRATION GROUP IS NOT FOUND FOR SOURCE %s' % src) return False return True
def _history_record(dcmts): """Returns history record made of dictionary comments and system info """ user = gu.get_login() host = gu.get_hostname() tstamp = gu.str_tstamp(fmt='%Y-%m-%dT%H:%M:%S zone:%Z') rnum = '%04d' % dcmts.get('run') exp = '%s' % dcmts.get('exp') ifname = '%s' % dcmts.get('ifname') ofname = '%s' % dcmts.get('fname') app = '%s' % dcmts.get('app') cmt = '%s' % dcmts.get('comment') return 'file:%s copy_of:%s exp:%s run:%s app:%s user:%s host:%s cptime:%s comment:%s\n' % \ (ofname.ljust(14), ifname, exp.ljust(8), rnum.ljust(4), app.ljust(10), user, host, tstamp.ljust(29), cmt)