Beispiel #1
0
    def build(self):
        widget = os.environ['cinfdata_app_widget']
        cinfdata = Cinfdata(self, creds.username, creds.password)
        if widget == 'dateplotoptions':
            with open('DatePlotOptions_input.json') as file_:
                setup, link = json.load(file_)
            main_widget = DatePlotOptions((setup, link))
            main_widget.cinfdata = MagicMock()

        return main_widget
Beispiel #2
0
def import_data(ID, setup='omicron'):
    db = Cinfdata(setup, use_caching=True)
    data = db.get_data(ID)
    try:
        data[:,0] = data[:,0]/1000
    except:
        meta = db.get_metadata(ID)
        print('"{}" data for ID {} is empty and set to "None"'.format(meta['mass_label'], ID))
        data = None
    return data
Beispiel #3
0
    def _after_password(self, username, password, _):
        """After password has been received, initialize cinfdata"""
        # Initiate cinfdata and bind properties
        self.cinfdata = Cinfdata(self, username, password)

        # FIXME, check password somehow and think about how to come
        # back to _after_init

        # Add a reference to cinfdata to page selection
        self.ids.page_selection.cinfdata = self.cinfdata
        self.ids.main_image.cinfdata = self.cinfdata
Beispiel #4
0
    def __init__(self, timestamp, caching=False, set_label=None, temp_label='Sample temperature', recalculate=False):
        """Save all data in 'DATA' dict object"""

        # Various attributes
        self.pickle_format = 'tpd__{}__{}.pickle'
        self.exps = {}
        #self.caching = True if CACHING is True or caching is True else False
        self.caching = caching
        if CACHING is not None:
            self.caching = CACHING
        self.timestamp = {}
        self.timestamp['date'] = timestamp
        self.timestamp['unix'] = time.mktime(datetime.datetime.strptime(
                                             timestamp, "%Y-%m-%d %H:%M:%S").timetuple())
        # Try to load from file
        cache_file = []
        load_from_pickle = False
        if self.caching and not recalculate:
            list_of_files = path.os.listdir(CACHE_DIR)
            for name in list_of_files:
                if name.startswith('tpd__{}__'.format(timestamp)):
                    cache_file.append(name)
            if len(cache_file) == 1:
                with open(path.join(CACHE_DIR, cache_file[0]), 'rb') as f:
                    copy = pickle.load(f)
                    self.name = copy.name
                    self.labels = copy.labels
                    self.data = copy.data
                    self.exps = copy.exps
                load_from_pickle = True
                print(' *** Loaded TPD data from pickle: {} ***'.format(self.name))
            elif len(cache_file) > 1:
                print('Found multiple pickles matching timestamp:\n{}'.format(cache_file))

        # Get data from database
        if load_from_pickle is False:
            # Connect to database
            db = Cinfdata('omicron',
                        use_caching=caching,
                        grouping_column='time',
                        )
            # Get data in unit seconds
            group_data = db.get_data_group(timestamp, scaling_factors=(1E-3, None))
            group_meta = db.get_metadata_group(timestamp)

            # Get a list of labels and group data
            print(group_meta)
            self.name = group_meta[list(group_meta.keys())[0]]['Comment'].replace('/', '')
            self.labels = [group_meta[key]['mass_label'] for key in group_data.keys()]
            self.data = {group_meta[key]['mass_label']: group_data[key] for key in group_data.keys()}
            print('Loaded data from Experiment: "{}"'.format(self.name))
            self.isolate_experiments(set_label=set_label, temp_label=temp_label)
    def __init__(self, **kwargs):
        super(MainCarousel, self).__init__(**kwargs)
        # Initiate cinfdata and bind properties
        self.cinfdata = Cinfdata(self, creds.username, creds.password)

        # Add a reference to cinfdata to page selection
        self.ids.page_selection.cinfdata = self.cinfdata
        self.ids.main_image.cinfdata = self.cinfdata

        # Initiate date plot options and add cinfdata reference
        #self.dateplot_options = DatePlotOptions()
        #self.dateplot_options.cinfdata = self.cinfdata

        self.waiting_png = CoreImage('data/waiting.png')
Beispiel #6
0
    def __init__(self, timestamp, caching=False):
        """Save all data in 'DATA' dict object"""

        # Connect to database
        db = Cinfdata('omicron',
                    use_caching=caching,
                    grouping_column='time',
                    )
        # Get data in unit seconds
        group_data = db.get_data_group(timestamp, scaling_factors=(1E-3, None))
        group_meta = db.get_metadata_group(timestamp)
        # Get a list of labels and group data
        self.name = group_meta[list(group_meta.keys())[0]]['Comment']
        self.labels = [group_meta[key]['mass_label'] for key in group_data.keys()]
        self.data = {group_meta[key]['mass_label']: group_data[key] for key in group_data.keys()}
        print('Loaded data from Experiment: "{}"'.format(self.name))
Beispiel #7
0
def download_cinfdata_set(
    setup="sniffer", group_id=None, grouping_column=None, **kwargs
):

    if grouping_column is None:
        grouping_column, group_id = kwargs.popitem()

    from .Combining import synchronize

    try:
        from cinfdata import Cinfdata
    except ImportError:
        print(
            "the cinfdata module must be on your python path. It's here: \n"
            + "https://github.com/CINF/cinf_database/blob/master/cinfdata.py"
        )

    try:
        cinfd = Cinfdata(
            setup,
            grouping_column=grouping_column,
            allow_wildcards=True,
            label_column="mass_label",
        )
    except:
        raise  # untill I know exactly which error I'm trying to catch.
        print("couldn't connect. You should run gstm")
        # os.system('gstm')
        raise RuntimeError("Couldn't connect to cinfdata!")

    # obj = cinfd.get_metadata_group('2018-03-30 14:13:17')

    # all_datasets = cinfd.get_metadata_group('%')
    # the_list = [(ID, d['time'], d['comment']) for ID, d in all_datasets.items()]
    # print(the_list)

    obj = cinfd.get_metadata_group(group_id)
    # print(str(obj)) #

    idlists = {}  # keys will be time as string. values will be corresponding id's

    for key, value in obj.items():
        # label = value['mass_label']
        # print(label)
        timestamp = str(value["time"])
        if timestamp not in idlists:
            idlists[timestamp] = []
        idlists[timestamp] += [value["id"]]

    datasets = {}
    for timestamp, idlist in idlists.items():

        if len(idlist) == 0:
            print("No data associated with timestamp '" + timestamp + "'.")
            continue

        dataset = {"title": timestamp, "data_type": "MS"}

        metadatas = dict([(i, cinfd.get_metadata(i)) for i in idlist])

        unixtimes = [metadatas[i]["unixtime"] for i in idlist]
        if len(set(unixtimes)) > 1:
            msg = "unix times don't match for timestamp '" + timestamp + "'!"
            raise ValueError(msg)

        dataset["tstamp"] = unixtimes[0]
        dataset["timestamp"] = metadatas[idlist[0]]["time"].strftime("%H:%M:%S")

        labels = [metadatas[i]["mass_label"] for i in idlist]
        if "Mass Scan" in labels:
            dataset["scan_type"] = "mass"
        else:
            dataset["scan_type"] = "time"

        dataset["data_cols"] = set()
        dataset["timecols"] = {}
        for i in idlist:  # avoiding id since it's got a builtin meaning
            data = cinfd.get_data(i)
            label = metadatas[i]["mass_label"]
            if len(data.shape) == 1:
                dataset[label] = data
                dataset["data_cols"].add(label)
            elif data.shape[1] == 2:
                x = data[:, 0]
                y = data[:, 1]
                x_label = label + "-x"
                y_label = label + "-y"
                dataset["timecols"][y_label] = x_label  # Fixed 20B26!!!
                dataset[x_label] = x * 1e-3  # cinfdata saves time in ms!!!
                dataset[y_label] = y

                dataset["data_cols"].add(x_label)
                dataset["data_cols"].add(y_label)

        datasets[timestamp] = dataset

    timescans = [
        dataset for dataset in datasets.values() if dataset["scan_type"] == "time"
    ]

    combined = synchronize(timescans, t_zero="first")

    return combined
Beispiel #8
0
    'tof_R1_voltage': None,
    'sem_voltage': 1798.83,
    u'unixtime': 1464261944L,
    'type': 4L,
    'pass_energy': None,
    'time': datetime.datetime(2016, 5, 26, 13, 25, 44),
    'sample_temperature': None,
    'tof_focus_voltage': None,
    'tof_pulse_voltage': None,
    'tof_emission_current': None,
    'tof_deflection_voltage': None,
    'tof_p1_2': None,
    'tof_p1_0': None,
    'tof_p1_1': None,
    'tof_lens_E': None,
    'tof_lens_D': None,
    'tof_lens_A': None,
    'tof_lens_C': None,
    'tof_lens_B': None
}

cinfdata = Cinfdata('tof', use_caching=True)
# Get from database
assert np.isclose(cinfdata.get_data(5417).sum(), DATA_CHECKSUM)
assert cinfdata.get_metadata(5417) == METADATA
print('Test database OK')
# And now fetch from cache
assert np.isclose(cinfdata.get_data(5417).sum(), DATA_CHECKSUM)
assert cinfdata.get_metadata(5417) == METADATA
print('Test cache OK')
Beispiel #9
0
def download_cinfdata_set(setup='sniffer',
                          group_id=None,
                          grouping_column=None,
                          **kwargs):

    if grouping_column is None:
        grouping_column, group_id = kwargs.popitem()

    from .Combining import synchronize

    try:
        from cinfdata import Cinfdata
    except ImportError:
        print(
            'the cinfdata module must be on your python path. It\'s here: \n' +
            'https://github.com/CINF/cinf_database/blob/master/cinfdata.py')

    try:
        cinfd = Cinfdata(setup,
                         grouping_column=grouping_column,
                         allow_wildcards=True,
                         label_column='mass_label')
    except:
        raise  #untill I know exactly which error I'm trying to catch.
        print('couldn\'t connect. You should run gstm')
        #os.system('gstm')
        raise RuntimeError('Couldn\'t connect to cinfdata!')

    #obj = cinfd.get_metadata_group('2018-03-30 14:13:17')

    #all_datasets = cinfd.get_metadata_group('%')
    #the_list = [(ID, d['time'], d['comment']) for ID, d in all_datasets.items()]
    #print(the_list)

    obj = cinfd.get_metadata_group(group_id)
    #print(str(obj)) #

    idlists = {
    }  # keys will be time as string. values will be corresponding id's

    for key, value in obj.items():
        #label = value['mass_label']
        #print(label)
        timestamp = str(value['time'])
        if timestamp not in idlists:
            idlists[timestamp] = []
        idlists[timestamp] += [value['id']]

    datasets = {}
    for timestamp, idlist in idlists.items():

        if len(idlist) == 0:
            print('No data associated with timestamp \'' + timestamp + '\'.')
            continue

        dataset = {'title': timestamp, 'data_type': 'MS'}

        metadatas = dict([(i, cinfd.get_metadata(i)) for i in idlist])

        unixtimes = [metadatas[i]['unixtime'] for i in idlist]
        if len(set(unixtimes)) > 1:
            msg = 'unix times don\'t match for timestamp \'' + timestamp + '\'!'
            raise ValueError(msg)

        dataset['tstamp'] = unixtimes[0]
        dataset['timestamp'] = metadatas[idlist[0]]['time'].strftime(
            '%H:%M:%S')

        labels = [metadatas[i]['mass_label'] for i in idlist]
        if 'Mass Scan' in labels:
            dataset['scan_type'] = 'mass'
        else:
            dataset['scan_type'] = 'time'

        dataset['data_cols'] = set()
        dataset['timecols'] = {}
        for i in idlist:  #avoiding id since it's got a builtin meaning
            data = cinfd.get_data(i)
            label = metadatas[i]['mass_label']
            if len(data.shape) == 1:
                dataset[label] = data
                dataset['data_cols'].add(label)
            elif data.shape[1] == 2:
                x = data[:, 0]
                y = data[:, 1]
                x_label = label + '-x'
                y_label = label + '-y'
                dataset['timecols'][x_label] = y_label
                dataset[x_label] = x * 1e-3  # cinfdata saves time in ms!!!
                dataset[y_label] = y

                dataset['data_cols'].add(x_label)
                dataset['data_cols'].add(y_label)

        datasets[timestamp] = dataset

    timescans = [
        dataset for dataset in datasets.values()
        if dataset['scan_type'] == 'time'
    ]

    combined = synchronize(timescans, t_zero='first')

    return combined
Beispiel #10
0
from cinfdata import Cinfdata
from matplotlib import pyplot as plt

db = Cinfdata('stm312')
spectrum = db.get_data(6688)
metadata = db.get_metadata(6688)

plt.plot(spectrum[:, 0], spectrum[:, 1])
plt.title(metadata['Comment'])
plt.show()
Beispiel #11
0
                    self.dbg2.axvline(x=self.data[offset + counter, 0],
                                      color='r')
                break
        self.std2 = self.data[1:offset + counter, 1].std()
        print('STDs: ', std_gradient, self.std2)
        return std_gradient


################################################################
### MAIN ###
################################################################
if __name__ == '__main__':

    #import rcparam
    from cinfdata import Cinfdata
    db = Cinfdata('omicron', use_caching=False)  # pylint: disable=invalid-name
    #ID = 18851
    #ID = 12299 # NP NiFe
    ID = 18982  # GG 29
    #ID = 17601 # SA1
    #ID = 17585 # SA2
    ID = 14273  # 70% 6nm Pt
    STRING = '\
TARGET=5.0;\
MODEL=NP;\
SA_DENSITY=1.58426e19;\
PARTICLE_DIAMETER=6.0;\
APERTURE_DIAMETER=12.41;\
FIRST_LIMIT=20;\
SENSITIVITY_LIMIT=.7;SENSITIVITY_FILTER=1.;\
TIME=[];\
Beispiel #12
0
mode = 'plot'
#mode = 'save'
settings = {
    #9489: 220,
    #9494: 400,
    #9504: 1500,
    #9507: 800,
    #9509: 1000,
    #9512: 600,
    #9516: 1000,
    #9518: 500,
    #9527: 600,
    9545: 350,
}

db = Cinfdata('dummy', use_caching=False)
for ID, LEVEL in settings.items():
    data = db.get_data(ID)
    time, signal = data[:, 0], data[:, 1]

    smooth = ct.smooth(signal)
    diff = np.abs(np.diff(signal-smooth))

    index = np.where(diff > LEVEL)[0] + 1
    i = np.where(time > 0)[0]
    
    if mode in ['test', 'plot']:
        plt.title(str(ID))
        plt.plot(data[:, 0], data[:, 1], 'b-', label='Raw data')
        plt.plot(time[index], signal[index], 'mo', markerfacecolor='w')
            remaining = target_number - integral
            time_left = remaining / dep_rate
            time_hr = int(time_left / 3600)
            time_min = int((time_left - time_hr * 3600) / 60)
            time_sec = int(time_left - time_hr * 3600 - time_min * 60)
            print(msg.format(target_coverage, time_hr, time_min, time_sec))


################################################################
### MAIN ###
################################################################
if __name__ == '__main__':

    #import rcparam
    from cinfdata import Cinfdata
    db = Cinfdata('stm312', use_caching=False)  # pylint: disable=invalid-name
    ID = 13602
    STRING = '\
TARGET=10.0;\
MODEL=NP;\
SA_DENSITY=1;\
LEAK_CURRENT=1e-12;\
PARTICLE_DIAMETER=1.3;\
APERTURE_DIAMETER=7;\
TIME=[0,9200]'

    try:
        SESSION = IntegrateCurrent(STRING, db.get_data(ID))
        SESSION.integrate()
        if SESSION.plot:
            SESSION.plt.show()