def import_data(ID, setup='omicron'): db = Cinfdata(setup, use_caching=True) data = db.get_data(ID) try: data[:,0] = data[:,0]/1000 except: meta = db.get_metadata(ID) print('"{}" data for ID {} is empty and set to "None"'.format(meta['mass_label'], ID)) data = None return data
def import_data(ID, setup='omicron'): db = Cinfdata(setup, use_caching=True) data = db.get_data(ID) try: data[:,0] = data[:,0]/1000 except: meta = db.get_metadata(ID) print('"{}" data for ID {} is empty and set to "None"'.format(meta['mass_label'], ID)) data = None return data
def download_cinfdata_set( setup="sniffer", group_id=None, grouping_column=None, **kwargs ): if grouping_column is None: grouping_column, group_id = kwargs.popitem() from .Combining import synchronize try: from cinfdata import Cinfdata except ImportError: print( "the cinfdata module must be on your python path. It's here: \n" + "https://github.com/CINF/cinf_database/blob/master/cinfdata.py" ) try: cinfd = Cinfdata( setup, grouping_column=grouping_column, allow_wildcards=True, label_column="mass_label", ) except: raise # untill I know exactly which error I'm trying to catch. print("couldn't connect. You should run gstm") # os.system('gstm') raise RuntimeError("Couldn't connect to cinfdata!") # obj = cinfd.get_metadata_group('2018-03-30 14:13:17') # all_datasets = cinfd.get_metadata_group('%') # the_list = [(ID, d['time'], d['comment']) for ID, d in all_datasets.items()] # print(the_list) obj = cinfd.get_metadata_group(group_id) # print(str(obj)) # idlists = {} # keys will be time as string. values will be corresponding id's for key, value in obj.items(): # label = value['mass_label'] # print(label) timestamp = str(value["time"]) if timestamp not in idlists: idlists[timestamp] = [] idlists[timestamp] += [value["id"]] datasets = {} for timestamp, idlist in idlists.items(): if len(idlist) == 0: print("No data associated with timestamp '" + timestamp + "'.") continue dataset = {"title": timestamp, "data_type": "MS"} metadatas = dict([(i, cinfd.get_metadata(i)) for i in idlist]) unixtimes = [metadatas[i]["unixtime"] for i in idlist] if len(set(unixtimes)) > 1: msg = "unix times don't match for timestamp '" + timestamp + "'!" raise ValueError(msg) dataset["tstamp"] = unixtimes[0] dataset["timestamp"] = metadatas[idlist[0]]["time"].strftime("%H:%M:%S") labels = [metadatas[i]["mass_label"] for i in idlist] if "Mass Scan" in labels: dataset["scan_type"] = "mass" else: dataset["scan_type"] = "time" dataset["data_cols"] = set() dataset["timecols"] = {} for i in idlist: # avoiding id since it's got a builtin meaning data = cinfd.get_data(i) label = metadatas[i]["mass_label"] if len(data.shape) == 1: dataset[label] = data dataset["data_cols"].add(label) elif data.shape[1] == 2: x = data[:, 0] y = data[:, 1] x_label = label + "-x" y_label = label + "-y" dataset["timecols"][y_label] = x_label # Fixed 20B26!!! dataset[x_label] = x * 1e-3 # cinfdata saves time in ms!!! dataset[y_label] = y dataset["data_cols"].add(x_label) dataset["data_cols"].add(y_label) datasets[timestamp] = dataset timescans = [ dataset for dataset in datasets.values() if dataset["scan_type"] == "time" ] combined = synchronize(timescans, t_zero="first") return combined
import datetime import numpy as np from cinfdata import Cinfdata DATA_CHECKSUM = 12525.0000008 METADATA = { 'comment': '', 'tof_liner_voltage': None, 'tof_iterations': None, 'timestep': 0.1, 'id': 5417L, 'preamp_range': -7, 'tof_pulse_width': None, 'pre_wait_time': None, 'tof_ion_energy': None, 'tof_R2_voltage': None, 'mass_label': None, 'tof_R1_voltage': None, 'sem_voltage': 1798.83, u'unixtime': 1464261944L, 'type': 4L, 'pass_energy': None, 'time': datetime.datetime(2016, 5, 26, 13, 25, 44), 'sample_temperature': None, 'tof_focus_voltage': None, 'tof_pulse_voltage': None, 'tof_emission_current': None, 'tof_deflection_voltage': None, 'tof_p1_2': None, 'tof_p1_0': None, 'tof_p1_1': None, 'tof_lens_E': None, 'tof_lens_D': None, 'tof_lens_A': None, 'tof_lens_C': None, 'tof_lens_B': None } cinfdata = Cinfdata('tof', use_caching=True) # Get from database assert np.isclose(cinfdata.get_data(5417).sum(), DATA_CHECKSUM) assert cinfdata.get_metadata(5417) == METADATA print('Test database OK') # And now fetch from cache assert np.isclose(cinfdata.get_data(5417).sum(), DATA_CHECKSUM) assert cinfdata.get_metadata(5417) == METADATA print('Test cache OK')
from cinfdata import Cinfdata db = Cinfdata('stm312', use_caching=True) spectrum = db.get_data(6688) metadata = db.get_metadata(6688)
'tof_R1_voltage': None, 'sem_voltage': 1798.83, u'unixtime': 1464261944L, 'type': 4L, 'pass_energy': None, 'time': datetime.datetime(2016, 5, 26, 13, 25, 44), 'sample_temperature': None, 'tof_focus_voltage': None, 'tof_pulse_voltage': None, 'tof_emission_current': None, 'tof_deflection_voltage': None, 'tof_p1_2': None, 'tof_p1_0': None, 'tof_p1_1': None, 'tof_lens_E': None, 'tof_lens_D': None, 'tof_lens_A': None, 'tof_lens_C': None, 'tof_lens_B': None } cinfdata = Cinfdata('tof', use_caching=True) # Get from database assert np.isclose(cinfdata.get_data(5417).sum(), DATA_CHECKSUM) assert cinfdata.get_metadata(5417) == METADATA print('Test database OK') # And now fetch from cache assert np.isclose(cinfdata.get_data(5417).sum(), DATA_CHECKSUM) assert cinfdata.get_metadata(5417) == METADATA print('Test cache OK')
def download_cinfdata_set(setup='sniffer', group_id=None, grouping_column=None, **kwargs): if grouping_column is None: grouping_column, group_id = kwargs.popitem() from .Combining import synchronize try: from cinfdata import Cinfdata except ImportError: print( 'the cinfdata module must be on your python path. It\'s here: \n' + 'https://github.com/CINF/cinf_database/blob/master/cinfdata.py') try: cinfd = Cinfdata(setup, grouping_column=grouping_column, allow_wildcards=True, label_column='mass_label') except: raise #untill I know exactly which error I'm trying to catch. print('couldn\'t connect. You should run gstm') #os.system('gstm') raise RuntimeError('Couldn\'t connect to cinfdata!') #obj = cinfd.get_metadata_group('2018-03-30 14:13:17') #all_datasets = cinfd.get_metadata_group('%') #the_list = [(ID, d['time'], d['comment']) for ID, d in all_datasets.items()] #print(the_list) obj = cinfd.get_metadata_group(group_id) #print(str(obj)) # idlists = { } # keys will be time as string. values will be corresponding id's for key, value in obj.items(): #label = value['mass_label'] #print(label) timestamp = str(value['time']) if timestamp not in idlists: idlists[timestamp] = [] idlists[timestamp] += [value['id']] datasets = {} for timestamp, idlist in idlists.items(): if len(idlist) == 0: print('No data associated with timestamp \'' + timestamp + '\'.') continue dataset = {'title': timestamp, 'data_type': 'MS'} metadatas = dict([(i, cinfd.get_metadata(i)) for i in idlist]) unixtimes = [metadatas[i]['unixtime'] for i in idlist] if len(set(unixtimes)) > 1: msg = 'unix times don\'t match for timestamp \'' + timestamp + '\'!' raise ValueError(msg) dataset['tstamp'] = unixtimes[0] dataset['timestamp'] = metadatas[idlist[0]]['time'].strftime( '%H:%M:%S') labels = [metadatas[i]['mass_label'] for i in idlist] if 'Mass Scan' in labels: dataset['scan_type'] = 'mass' else: dataset['scan_type'] = 'time' dataset['data_cols'] = set() dataset['timecols'] = {} for i in idlist: #avoiding id since it's got a builtin meaning data = cinfd.get_data(i) label = metadatas[i]['mass_label'] if len(data.shape) == 1: dataset[label] = data dataset['data_cols'].add(label) elif data.shape[1] == 2: x = data[:, 0] y = data[:, 1] x_label = label + '-x' y_label = label + '-y' dataset['timecols'][x_label] = y_label dataset[x_label] = x * 1e-3 # cinfdata saves time in ms!!! dataset[y_label] = y dataset['data_cols'].add(x_label) dataset['data_cols'].add(y_label) datasets[timestamp] = dataset timescans = [ dataset for dataset in datasets.values() if dataset['scan_type'] == 'time' ] combined = synchronize(timescans, t_zero='first') return combined
from cinfdata import Cinfdata from matplotlib import pyplot as plt db = Cinfdata('stm312') spectrum = db.get_data(6688) metadata = db.get_metadata(6688) plt.plot(spectrum[:, 0], spectrum[:, 1]) plt.title(metadata['Comment']) plt.show()
#ID = 17601 # SA1 #ID = 17585 # SA2 ID = 14273 # 70% 6nm Pt STRING = '\ TARGET=5.0;\ MODEL=NP;\ SA_DENSITY=1.58426e19;\ PARTICLE_DIAMETER=6.0;\ APERTURE_DIAMETER=12.41;\ FIRST_LIMIT=20;\ SENSITIVITY_LIMIT=.7;SENSITIVITY_FILTER=1.;\ TIME=[];\ DEBUG=False' try: SESSION = IntegrateCurrent(STRING, db.get_data(ID), plot=True) SESSION.integrate() if SESSION.plot: limits = SESSION.ax2.axis() if SESSION.debugging: for ax in [ SESSION.dbg0, SESSION.dbg1, SESSION.dbg2, SESSION.ax1 ]: l1, l2, l3, l4 = ax.axis() ax.axis([limits[0], limits[1], l3, l4]) SESSION.plt.show() except: print( '***\nSomething is wrong: Check input parameters or try debugging mode!!\n***' ) limits = SESSION.ax2.axis()
settings = { #9489: 220, #9494: 400, #9504: 1500, #9507: 800, #9509: 1000, #9512: 600, #9516: 1000, #9518: 500, #9527: 600, 9545: 350, } db = Cinfdata('dummy', use_caching=False) for ID, LEVEL in settings.items(): data = db.get_data(ID) time, signal = data[:, 0], data[:, 1] smooth = ct.smooth(signal) diff = np.abs(np.diff(signal-smooth)) index = np.where(diff > LEVEL)[0] + 1 i = np.where(time > 0)[0] if mode in ['test', 'plot']: plt.title(str(ID)) plt.plot(data[:, 0], data[:, 1], 'b-', label='Raw data') plt.plot(time[index], signal[index], 'mo', markerfacecolor='w') index = [x for x in i if x not in index] if mode == 'plot':
#import rcparam from cinfdata import Cinfdata db = Cinfdata('omicron', use_caching=False) # pylint: disable=invalid-name ID = 15372 STRING = '\ TARGET=5.0;\ MODEL=NP;\ SA_DENSITY=1;\ PARTICLE_DIAMETER=5.0;\ APERTURE_DIAMETER=4.5;\ FIRST_LIMIT=10.8;\ SENSITIVITY_LIMIT=1.;SENSITIVITY_FILTER=1.;\ TIME=[]' try: SESSION = IntegrateCurrent(STRING, db.get_data(ID)) SESSION.integrate() if SESSION.plot: SESSION.plt.show() except: print( '***\nSomething is wrong: Check input parameters or try debugging mode!!\n***' ) SESSION.plt.show() raise # for 9x9 raster pattern: ap_dia ~ 12.4 mm (120.8 mm2 ~ 11x11 mm) # for 5x5 raster pattern: ap_dia ~ 6.7 mm (35.3 mm2) # [*** Based on simul. 12/12-18 use ap_dia ~ 9.0mm] # for localized_Z pattern: ap_dia ~ 4.81 mm (18.2 mm2 ~ 5.2x3.5 mm)