def _load_hook(self, dbr): data = os.path.join(self.directory, self.filename) dm = H5DataManager() if os.path.isfile(data): dm.open_data(data) self.data_manager = dm
def load_graph(self): path = self.path # path = os.path.join(self.root, self.filename) from pychron.lasers.power.power_map_processor import PowerMapProcessor pmp = PowerMapProcessor() if path.endswith('.h5') or path.endswith('.hdf5'): reader = H5DataManager() # reader = self._data_manager_factory() reader.open_data(path) else: with open(path, 'r') as f: reader = csv.reader(f) # trim off header reader.next() # self.graph = pmp.load_graph(reader) self.graph.width = 625 self.graph.height = 500 reader.open_data(path) z, _ = pmp._extract_h5(reader) if self.surf: self.graph3D.plot_data(z, func='surf', representation=self.representation, warp_scale=self.vertical_ex , outline=self.surf_outline ) if self.contour: self.graph3D.plot_data(z, func='contour_surf', contours=self.levels, warp_scale=self.vertical_ex, outline=self.contour_outline )
def _execute_power_calibration(self): self.graph = self._graph_factory() # s, _ = g.new_series() # po = g.plots[0] # g.add_aux_axis(po, s) # if self.parent is not None: self._open_graph() self.data_manager = dm = H5DataManager() if self.parameters.use_db: dw = DataWarehouse(root=os.path.join(self.parent.db_root, 'power_calibration')) dw.build_warehouse() directory = dw.get_current_dir() else: directory = os.path.join(paths.data_dir, 'power_calibration') _dn = dm.new_frame(directory=directory, base_frame_name='power_calibration') table = dm.new_table('/', 'calibration', table_style='PowerCalibration') callback = lambda p, r, t: self._write_data(p, r, t) self._stop_signal = TEvent() self._iterate(self.parameters, self.graph, True, callback, table) self._finish_calibration() if self._alive: self._alive = False self._save_to_db() self._apply_calibration()
def load(self, path): pmp = PowerMapProcessor() reader = H5DataManager() reader.open_data(path) cg = pmp.load_graph(reader) self.beam_diameter, self.power = pmp.extract_attrs(['beam_diameter', 'power']) self.component = cg.plotcontainer self.was_executed = True self.processor = pmp
def _load_hook(self, dbr): data = os.path.join(self.directory, self.filename) dm = H5DataManager() if os.path.isfile(data): dm.open_data(data) tab = dm.get_table('internal', 'Power') if tab is not None: if hasattr(tab.attrs, 'request_power'): self.summary = 'request power ={}'.format( tab.attrs.request_power) self.runid = str(dbr.rid) self.data_manager = dm
def _make_per_spec(self, lt): run_spec = AutomatedRunSpec() per_spec = PersistenceSpec() arar_age = ArArAge() # populate per_spec per_spec.run_spec = run_spec per_spec.arar_age = arar_age # popluate run_spec run_spec.identifier = lt.labnumber run_spec.aliquot = lt.aliquot run_spec.step = lt.step run_spec.username = '******' run_spec.uuid = lt.uuid cp = lt.collection_path man = H5DataManager() man.open_file(cp) # add signal/isotopes group = man.get_group('signal') for grp in man.get_groups(group): isok = grp._v_name iso = Isotope(name=isok, fit='linear') # only handle one detector per isotope tbl = man.get_tables(grp)[0] iso.detector = tbl._v_name xs = array([x['time'] for x in tbl.iterrows()]) ys = array([x['value'] for x in tbl.iterrows()]) iso.xs = xs iso.ys = ys arar_age.isotopes[isok] = iso # add sniffs group = man.get_group('sniff') for k, iso in arar_age.isotopes.iteritems(): grp = man.get_group(k, group) tbl = man.get_tables(grp)[0] iso.sniff.detector = tbl._v_name xs = array([x['time'] for x in tbl.iterrows()]) ys = array([x['value'] for x in tbl.iterrows()]) iso.sniff.xs = xs iso.sniff.ys = ys # add baselines group = man.get_group('baseline') for dettbl in man.get_tables(group): detname = dettbl._v_name xs = array([x['time'] for x in dettbl.iterrows()]) ys = array([x['value'] for x in dettbl.iterrows()]) for iso in arar_age.isotopes.itervalues(): if iso.detector == detname: iso.baseline.xs = xs iso.baseline.ys = ys iso.baseline.fit = 'average' return per_spec