def save_txt(fname='nda.txt', arr=None, cmts=(), fmt='%.1f', verbos=False, addmetad=True) : """Save n-dimensional numpy array to text file with metadata. - fname - file name for text file, - arr - numpy array, - cmts -list of comments which will be saved in the file header. """ #recs = ['# %03d %s' % (i,cmt) for i, cmt in enumerate(cmts)] recs = ['# %s' % cmt for cmt in cmts] recs.append('\n# HOST %s' % gu.get_hostname()) recs.append('# WORK_DIR %s' % gu.get_cwd()) recs.append('# FILE_NAME %s' % fname) recs.append('# DATE_TIME %s' % gu.str_tstamp(fmt='%Y-%m-%dT%H:%M:%S')) recs.append('# UID %s' % gu.get_login()) recs.append('# SHAPE %s' % str(arr.shape).replace(' ','')) recs.append('# DATATYPE %s' % str(arr.dtype)) if addmetad : recs.append('\n# DTYPE %s' % str(arr.dtype)) recs.append('# NDIM %s' % len(arr.shape)) for i in range(len(arr.shape)) : recs.append('# DIM:%d %s' % (i, arr.shape[i])) arr2d = nu.reshape_nda_to_2d(arr) # pretty formatting recs.append('' if len(arr.shape)>1 else '\n') nline = '\n' if len(arr.shape)>1 else ' ' hdr = '\n'.join(recs) #print(hdr) np.savetxt(fname, arr, fmt, delimiter=' ', newline=nline, header=hdr, comments='') #, footer='\n') #, comments='# ') if verbos : print('File %s is saved' % fname)
def set_metadata_values(self): """Sets metadata values associated with self.data_nda """ logger.debug('in set_metadata_values') model = self.model nda = self.data_nda colk, colv = 0, 1 for row in range(model.rowCount()): key = model.item(row, colk).text() if key == 'data_size': model.item(row, colv).setText(str(nda.size)) elif key == 'data_dtype': model.item(row, colv).setText(str(nda.dtype)) elif key == 'data_ndim': model.item(row, colv).setText(str(nda.ndim)) elif key == 'data_shape': model.item(row, colv).setText(str(nda.shape)) elif key == 'host': model.item(row, colv).setText(gu.get_hostname()) elif key == 'uid': model.item(row, colv).setText(gu.get_login()) elif key == 'cwd': model.item(row, colv).setText(gu.get_cwd()) logger.info('Model document content:\n %s\n%s' % (self.info_model_dicdoc(), info_ndarr(self.data_nda, 'data n-d array ')))
def log_file_name(lfpath) : """Returns (str) log file name like /reg/g/psdm/logs/calibman/lcls2/2018/20180518T122407-dubrovin.txt """ t0_sec = gu.time() tstamp = gu.str_tstamp('%Y%m%dT%H%M%S', t0_sec) #year_month = gu.str_tstamp('%Y/%m', time_sec=None) year = gu.str_tstamp('%Y', time_sec=None) return '%s/%s/%s-%s.txt' % (lfpath, year, tstamp, gu.get_login())#, os.getpid())
def msg_to_log(runs=[]): """Returns (str) message to the log file for list of (str) runs. """ if len(runs) == 0: return None tstamp = gu.str_tstamp('%Y-%m-%dT%H:%M:%S', time()) login = gu.get_login() cwd = gu.get_cwd() host = gu.get_hostname() cmd = sys.argv[0].split('/')[-1] recs = [ '%s %s %s %s cwd:%s cmd:%s' % (s, tstamp, login, host, cwd, cmd) for s in runs ] text = '\n'.join(recs) return text + '\n'
def test_all(tname): logger.info('\n%s\n' % usage()) kwa = {'host':cc.HOST,\ 'port':cc.PORT,\ 'user':gu.get_login()} if len(sys.argv) != 2: test_dir_calib(tname) elif tname == '1': test_dir_calib(tname) elif tname == '2': scan_calib_for_experiment('cxix25615', **kwa) elif tname == '3': test_detname_conversion(tname) elif tname == '4': scan_calib_for_experiment('amox23616', **kwa) elif tname == '5': test_get_for_url() elif tname == '6': test_run_begin_end_time() # /reg/d/psdm/AMO/amox23616/calib/Xtcav::CalibV1/XrayTransportDiagnostic.0:Opal1000.0/pedestals/104-end.data else: sys.exit('Test number parameter is not recognized.\n%s' % usage())
def test_insert_constants(expname=TEST_EXPNAME, detname=TEST_DETNAME, ctype='test_ctype', runnum=10, data='test text sampele'): """ Inserts constants using direct MongoDB interface from MDBUtils. """ import psana.pyalgos.generic.Utils as gu print('test_delete_database 1:', database_names()) #txt = '%s\nThis is a string\n to test\ncalibration storage' % gu.str_tstamp() #data, ctype = txt, 'testtext'; logger.debug('txt: %s' % str(data)) #data, ctype = get_test_nda(), 'testnda'; logger.debug(info_ndarr(data, 'nda')) #data, ctype = get_test_dic(), 'testdict'; logger.debug('dict: %s' % str(data)) kwa = {'user' : gu.get_login()} t0_sec = time() ts = gu.str_tstamp(fmt='%Y-%m-%dT%H:%M:%S%z', time_sec=t0_sec) mu.insert_constants('%s - saved at %s'%(data,ts), expname, detname, ctype, runnum+int(tname), int(t0_sec),\ time_stamp=ts, **kwa) print('test_delete_database 2:', database_names())
class Constants: d_host = cc.HOST d_port = cc.PORT d_user = gu.get_login() #cc.USERNAME d_upwd = '' d_experiment = 'exp12345' d_detector = 'detector_1234' d_loglevel = 'INFO' d_logdir = '/cds/group/psdm/logs/calibman/lcls2' # None # './cm-logger' d_webint = True h_host = 'DB host, default = %s' % d_host h_port = 'DB port, default = %s' % d_port h_user = '******' % d_user h_upwd = 'password, default = %s' % d_upwd h_experiment = 'experiment name, default = %s' % d_experiment h_detector = 'detector name, default = %s' % d_detector h_loglevel = 'logging level from list (%s), default = %s' % (LEVEL_NAMES, d_loglevel) h_logdir = 'logger directory, if specified the logfile will be saved under this directory, default = %s' % str( d_logdir) h_webint = 'use web-based CLI, default = %s' % d_webint
def _history_record(dcmts): """Returns history record made of dictionary comments and system info """ user = gu.get_login() host = gu.get_hostname() tstamp = gu.str_tstamp(fmt='%Y-%m-%dT%H:%M:%S zone:%Z') rnum = '%04d' % dcmts.get('run') exp = '%s' % dcmts.get('exp') ifname = '%s' % dcmts.get('ifname') ofname = '%s' % dcmts.get('fname') app = '%s' % dcmts.get('app') cmt = '%s' % dcmts.get('comment') return 'file:%s copy_of:%s exp:%s run:%s app:%s user:%s host:%s cptime:%s comment:%s\n' % \ (ofname.ljust(14), ifname, exp.ljust(8), rnum.ljust(4), app.ljust(10), user, host, tstamp.ljust(29), cmt)
def docdic(data, dataid, **kwargs): """Returns dictionary for db document in style of JSON object. """ doc = { 'experiment': kwargs.get('experiment', None), 'run': kwargs.get('run', '0'), 'run_end': kwargs.get('run_end', 'end'), 'detector': kwargs.get('detector', None), 'ctype': kwargs.get('ctype', None), 'time_sec': kwargs.get('time_sec', None), 'time_stamp': kwargs.get('time_stamp', None), 'version': kwargs.get('version', 'v00'), 'comment': kwargs.get('comment', ''), 'extpars': kwargs.get('extpars', None), 'uid': gu.get_login(), 'host': gu.get_hostname(), 'cwd': gu.get_cwd(), 'id_data': dataid, } if isinstance(data, np.ndarray): doc['data_type'] = 'ndarray' doc['data_dtype'] = str(data.dtype) doc['data_size'] = '%d' % data.size doc['data_ndim'] = '%d' % data.ndim doc['data_shape'] = str(data.shape) elif isinstance(data, str): doc['data_type'] = 'str' doc['data_size'] = '%d' % len(data) else: doc['data_type'] = 'any' logger.debug('doc data type: %s' % doc['data_type']) return doc
def input_option_parser(): from optparse import OptionParser d_host = cc.HOST d_port = cc.PORT d_user = gu.get_login() #cc.USERNAME d_upwd = '' d_experiment = 'exp12345' d_detector = 'detector_1234' d_loglevel = 'INFO' d_logdir = './cm-logger' h_host = 'DB host, default = %s' % d_host h_port = 'DB port, default = %s' % d_port h_user = '******' % d_user h_upwd = 'password, default = %s' % d_upwd h_experiment = 'experiment name, default = %s' % d_experiment h_detector = 'detector name, default = %s' % d_detector h_loglevel = 'logging level from list (%s), default = %s' % (LEVEL_NAMES, d_loglevel) h_logdir = 'logger directory, default = %s' % d_logdir parser = OptionParser(description='Calibration manager UI', usage=usage()) parser.add_option('--host', default=d_host, action='store', type='string', help=h_host) parser.add_option('--port', default=d_port, action='store', type='string', help=h_port) parser.add_option('-u', '--user', default=d_user, action='store', type='string', help=h_user) parser.add_option('-p', '--upwd', default=d_upwd, action='store', type='string', help=h_upwd) parser.add_option('-d', '--detector', default=d_detector, action='store', type='string', help=h_detector) parser.add_option('-e', '--experiment', default=d_experiment, action='store', type='string', help=h_experiment) parser.add_option('-l', '--loglevel', default=d_loglevel, action='store', type='string', help=h_loglevel) parser.add_option('-L', '--logdir', default=d_logdir, action='store', type='string', help=h_logdir) #parser.add_option('-v', '--verbose', default=d_verbose, action='store_false', help=h_verbose) return parser