def _save_xls_data_table(self, root, ias, step_heat_title, fusion_title, spectrometer, summary_sheet=False, auto_view=False): ext = '.xls' app = 'Microsoft Office 2011/Microsoft Excel' shgroups, fgroups = self._assemble_groups(ias) if shgroups: w = StepHeatTableXLSWriter() name = '{}_{}_step_heat_data'.format(self.name, spectrometer) p, _ = unique_path(root, name, extension=ext) iagroups, shgroups = zip(*shgroups) w.build(p, iagroups, shgroups, use_summary_sheet=summary_sheet, title=step_heat_title) if auto_view: view_file(p, application=app) if fgroups: w = FusionTableXLSWriter() name = '{}_{}_fusion_data'.format(self.name, spectrometer) p, _ = unique_path(root, name, extension=ext) iagroups, fgroups = zip(*fgroups) w.build(p, iagroups, fgroups, use_summary_sheet=summary_sheet, title=fusion_title) if auto_view: view_file(p, application=app)
def _new_frame_path(self, path=None, directory='scans', offset=0, base_frame_name=None, verbose=True): """ """ if base_frame_name is None: base_frame_name = 'scan' ''' if directory is an absolute path paths.data_dir is not joined ''' base = os.path.join(paths.data_dir, directory) if not os.path.isdir(base): os.mkdir(base) if path is None: path, _cnt = unique_path(base, base_frame_name, extension=self._extension) if verbose: self.info('New frame {}'.format(path)) return path
def _save(self, editor, root, pathname, name, project, lns): if self._save_pdf_figure: p, _ = unique_path(root, pathname, extension='.pdf') editor.save_file(p, dest_box=(1.5, 1, 6, 9)) if self._save_db_figure: editor.save_figure('EasyFigure {}'.format(name), project, lns)
def _start_recording(self, path, basename): self.info('start video recording {}'.format(path)) d = os.path.dirname(path) if not os.path.isdir(d): self.warning('invalid directory {}'.format(d)) self.warning('using default directory') path, _ = unique_path(paths.video_dir, basename, extension='avi') self.info('saving recording to path {}'.format(path)) # if self.use_db: # db = self.get_video_database() # db.connect() # # v = db.add_video_record(rid=basename) # db.add_path(v, path) # self.info('saving {} to database'.format(basename)) # db.commit() video = self.video def renderer(p): cw, ch = self.get_frame_size() frame = video.get_cached_frame() frame = video.crop(frame, 0, 0, cw, ch) if frame is not None: pil_save(frame, p) if self.render_with_markup: renderer = self._render_snapshot self.video.start_recording(path, renderer)
def snapshot(self, path=None, name=None, auto=False, inform=True): """ path: abs path to use name: base name to use if auto saving in default dir auto: force auto save returns: path: local abs path upath: remote abs path """ if path is None: if self.auto_save_snapshot or auto: if name is None: name = 'snapshot' path, _cnt = unique_path(root=paths.snapshot_dir, base=name, extension='jpg') else: path = self.save_file_dialog() if path: self.info('saving snapshot {}'.format(path)) # play camera shutter sound play_sound('shutter') self._render_snapshot(path) upath = self._upload(path) if inform: self.information_dialog('Snapshot save to {}. Uploaded to'.format(path, upath)) return path, upath
def snapshot(self, path=None, name=None, auto=False, inform=True): """ path: abs path to use name: base name to use if auto saving in default dir auto: force auto save returns: path: local abs path upath: remote abs path """ if path is None: if self.auto_save_snapshot or auto: if name is None: name = 'snapshot' path, _cnt = unique_path(root=paths.snapshot_dir, base=name, extension='jpg') else: path = self.save_file_dialog() if path: self.info('saving snapshot {}'.format(path)) # play camera shutter sound play_sound('shutter') self._render_snapshot(path) upath = self._upload(path) if inform: self.information_dialog( 'Snapshot save to {}. Uploaded to'.format(path, upath)) return path, upath
def _dump_scan(self): root = os.path.join(paths.data_dir, 'mass_calibration_scans') if not os.path.isdir(root): os.mkdir(root) p, _ = unique_path(root, 'scan') d = self._get_coarse_data() savetxt(p, d)
def export_csv(self): ans = self._get_analyses('export_apis') ans = self.make_analyses(ans, unpack=True) root = os.path.join(paths.data_dir, 'apis') p, _ = unique_path(root, 'data', extension='.csv') with open(p, 'w') as wfile: writer = csv.writer(wfile) for ai in ans: self._write_analysis(ai, writer)
def __clear(self): ''' ''' # make a backup copy p, _cnt = unique_path(paths.root, 'laser_shot_history', 'bak') shutil.copy(self.pickle_path, p) os.remove(self.pickle_path) self.history = []
def import_irradiation(self, dest, name, progress, include_analyses=False, include_blanks=False, include_airs=False, include_cocktails=False, include_list=None, dry_run=True): self.connect() p, c = unique_path(paths.data_dir, 'import') self.import_err_file = open(p, 'w') # with dest.session_ctx(commit=not dry_run) as sess: self.dbimport = dest.add_import(source=self.db.name, source_host=self.db.host) # is irrad already in dest dbirrad = dest.get_irradiation(name) added_to_db = False if dbirrad is None: # add chronology dbchron = self._add_chronology(dest, name) # # add production # dbpr = self._add_production_ratios(dest, name) # add irradiation dbirrad = dest.add_irradiation(name, chronology=dbchron) added_to_db = True dest.sess.flush() if dbirrad: # add all the levels and positions for this irradiation added_to_db = self._add_levels(dest, progress, dbirrad, include_analyses, include_blanks, include_airs, include_cocktails, include_list, dry_run=dry_run) else: self.warning( 'no irradiation found or created for {}. not adding levels'. format(name)) self.debug('irradiation import dry_run={}'.format(dry_run)) #if not dry_run: # dest.sess.commit() self.import_err_file.close() return ImportName(name=name, skipped=not added_to_db)
def _save_table(self, editor, root, ident, tag): ft = ('pdf', 'xls', 'csv') sft = ', '.join(ft[:-1]) sft = '{} or {}'.format(sft, ft[-1]) for ext in self._file_types: if ext not in ft: self.warning('Invalid file type "{}". Use "{}"'.format(ext, sft)) p, _ = unique_path(root, '{}_{}_table'.format(tag, ident), extension='.{}'.format(ext)) editor.save_file(p, title='Ar/Ar Step heat data')
def compress(self, base='archive'): up, _cnt = unique_path(os.path.dirname(self.root), base, extension='zip') name = os.path.basename(up) with ZipFile(up, 'w', compression=ZIP_DEFLATED) as af: for p in os.listdir(self.root): if p.startswith('.'): continue if p == name: continue af.write(os.path.join(self.root, p), p) return name
def _make(self, ep): opt = ep.doc(0) db = self.db with db.session_ctx(): ids = opt['identifiers'] progress = self.open_progress(n=len(ids), close_at_end=False) editor = InverseIsochronEditor(processor=self) editor.plotter_options_manager.set_plotter_options('Default') p, _ = unique_path( os.path.join(paths.dissertation, 'data', 'minnabluff'), 'compare_iso_spec') wfile = open(p, 'w') for i in ids: hist = db.get_interpreted_age_histories((i, ))[-1] li = db.get_labnumber(i) ans = self._get_analyses(li) if ans: progress.change_message( 'Calculating isochron for {}'.format(i)) unks = self.make_analyses(ans, use_progress=False, use_cache=False) age, reg, _ = calculate_isochron(unks) # print self._calculate_intercept(reg) iaage = hist.interpreted_age.age iaerr = hist.interpreted_age.age_err ii, ee = self._calculate_intercept(reg) ee2 = 2 * ee * (reg.mswd**0.5 if reg.mswd > 1 else 1) comp = 'EQ' if ii - ee2 > 295.5: comp = 'GT' elif ii + ee2 < 295.5: comp = 'LT' t0 = 'Identifier: {}'.format(li.identifier) t00 = 'Sample: {}'.format(li.sample.name) t1 = 'InterpretedAge: {}+/-{}'.format(iaage, iaerr) t2 = 'IsochronAge: {}'.format(age) t3 = 'Dev: {} ({:0.2f}%)'.format( age - iaage, (age - iaage) / iaage * 100) t4 = 'TrappedComponent: {:0.2f}+/-{:0.3f}'.format(ii, ee) t5 = 'TrappedComparison: {}'.format(comp) t = '\n'.join((t0, t00, t1, t2, t3, t4, t5)) # print t wfile.write(t + '\n---\n') # editor.set_items(unks) # editor.rebuild() # print 'exception', editor.get_trapped_component() wfile.close() progress.close()
def save_summary_table(self, root, auto_view=False): name = '{}_summary'.format(self.name) w = SummaryPDFTableWriter() items = self.interpreted_ages title = self.get_title() opt = self.pdf_table_options p, _ = unique_path(root, name, extension='.pdf') w.options = opt w.build(p, items, title) if auto_view: view_file(p)
def _start_recording( self, path=None, basename='vm_recording', use_dialog=False, user='******', ): if path is None: if use_dialog: path = self.save_file_dialog() else: vd = self.video_archiver.root self.debug('video archiver root {}'.format(vd)) if vd is None: vd = paths.video_dir path, _ = unique_path(vd, basename, extension='avi') self.info('start video recording {}'.format(path)) d = os.path.dirname(path) if not os.path.isdir(d): self.warning('invalid directory {}'.format(d)) self.warning('using default directory') path, _ = unique_path(paths.video_dir, basename, extension='avi') self.info('saving recording to path {}'.format(path)) # if self.use_db: # db = self.get_video_database() # db.connect() # # v = db.add_video_record(rid=basename) # db.add_path(v, path) # self.info('saving {} to database'.format(basename)) # db.commit() renderer = None if self.render_with_markup: renderer = self._render_snapshot self.video.start_recording(path, renderer)
def import_irradiation(self, dest, name, progress, include_analyses=False, include_blanks=False, include_airs=False, include_cocktails=False, include_list=None, dry_run=True): self.connect() p, c = unique_path(paths.data_dir, 'import') self.import_err_file = open(p, 'w') # with dest.session_ctx(commit=not dry_run) as sess: self.dbimport = dest.add_import( source=self.db.name, source_host=self.db.host) # is irrad already in dest dbirrad = dest.get_irradiation(name) added_to_db = False if dbirrad is None: # add chronology dbchron = self._add_chronology(dest, name) # # add production # dbpr = self._add_production_ratios(dest, name) # add irradiation dbirrad = dest.add_irradiation(name, chronology=dbchron) added_to_db = True dest.sess.flush() if dbirrad: # add all the levels and positions for this irradiation added_to_db = self._add_levels(dest, progress, dbirrad, include_analyses, include_blanks, include_airs, include_cocktails, include_list, dry_run=dry_run) else: self.warning('no irradiation found or created for {}. not adding levels'.format(name)) self.debug('irradiation import dry_run={}'.format(dry_run)) #if not dry_run: # dest.sess.commit() self.import_err_file.close() return ImportName(name=name, skipped=not added_to_db)
def _make(self, ep): opt = ep.doc(0) db = self.db with db.session_ctx(): ids = opt['identifiers'] progress = self.open_progress(n=len(ids), close_at_end=False) editor = InverseIsochronEditor(processor=self) editor.plotter_options_manager.set_plotter_options('Default') p, _ = unique_path(os.path.join(paths.dissertation, 'data', 'minnabluff'), 'compare_iso_spec') fp = open(p, 'w') for i in ids: hist = db.get_interpreted_age_histories((i,))[-1] li = db.get_labnumber(i) ans = self._get_analyses(li) if ans: progress.change_message('Calculating isochron for {}'.format(i)) unks = self.make_analyses(ans, use_progress=False, use_cache=False) age, reg, _ = calculate_isochron(unks) # print self._calculate_intercept(reg) iaage = hist.interpreted_age.age iaerr = hist.interpreted_age.age_err ii, ee = self._calculate_intercept(reg) ee2 = 2 * ee * (reg.mswd ** 0.5 if reg.mswd > 1 else 1) comp = 'EQ' if ii - ee2 > 295.5: comp = 'GT' elif ii + ee2 < 295.5: comp = 'LT' t0 = 'Identifier: {}'.format(li.identifier) t00 = 'Sample: {}'.format(li.sample.name) t1 = 'InterpretedAge: {}+/-{}'.format(iaage, iaerr) t2 = 'IsochronAge: {}'.format(age) t3 = 'Dev: {} ({:0.2f}%)'.format(age - iaage, (age - iaage) / iaage * 100) t4 = 'TrappedComponent: {:0.2f}+/-{:0.3f}'.format(ii, ee) t5 = 'TrappedComparison: {}'.format(comp) t = '\n'.join((t0, t00, t1, t2, t3, t4, t5)) # print t fp.write(t + '\n---\n') # editor.set_items(unks) # editor.rebuild() # print editor.get_trapped_component() fp.close() progress.close()
def write(self, db, ias): data = self._assemble_data(db, ias) root = os.path.join(paths.dissertation, 'data', 'minnabluff', 'interpreted_ages') p, _ = unique_path(root, 'tas.csv') with open(p, 'w') as fp: writer = csv.writer(fp) header = ['sample', 'sio2', 'total_alk', 'age'] writer.writerow(header) for b, rs in groupby(data, key=lambda x: int(x[-1])): writer.writerows(rs) writer.writerow([])
def write(self, db, ias): data = self._assemble_data(db, ias) root = os.path.join(paths.dissertation, 'data', 'minnabluff', 'interpreted_ages') p, _ = unique_path(root, 'tas.csv') with open(p, 'w') as wfile: writer = csv.writer(wfile) header = ['sample', 'sio2', 'total_alk', 'age'] writer.writerow(header) for b, rs in groupby(data, key=lambda x: int(x[-1])): writer.writerows(rs) writer.writerow([])
def _start_recording(self, path=None, basename='vm_recording', use_dialog=False, user='******', ): if path is None: if use_dialog: path = self.save_file_dialog() else: vd = self.video_archiver.root self.debug('video archiver root {}'.format(vd)) if vd is None: vd = paths.video_dir path, _ = unique_path(vd, basename, extension='avi') self.info('start video recording {}'.format(path)) d = os.path.dirname(path) if not os.path.isdir(d): self.warning('invalid directory {}'.format(d)) self.warning('using default directory') path, _ = unique_path(paths.video_dir, basename, extension='avi') self.info('saving recording to path {}'.format(path)) # if self.use_db: # db = self.get_video_database() # db.connect() # # v = db.add_video_record(rid=basename) # db.add_path(v, path) # self.info('saving {} to database'.format(basename)) # db.commit() renderer = None if self.render_with_markup: renderer = self._render_snapshot self.video.start_recording(path, renderer)
def _save_pdf_data_table(self, root, ias, step_heat_title, fusion_title, spectrometer, auto_view=False): shgroups, fgroups = self._assemble_groups(ias) ext = '.pdf' if shgroups: w = StepHeatPDFTableWriter() # name = '{}_{}_step_heatdata'.format(self.name, spectrometer) name = '{}stepheatdata'.format(spectrometer) p, _ = unique_path(root, name, extension=ext) iagroups, shgroups = zip(*shgroups) w.build(p, shgroups, title=step_heat_title) if auto_view: view_file(p) if fgroups: w = FusionPDFTableWriter() # name = '{}_{}_fusion_data'.format(self.name, spectrometer) name = '{}fusiondata'.format(spectrometer) p, _ = unique_path(root, name, extension=ext) iagroups, fgroups = zip(*fgroups) w.build(p, fgroups, title=fusion_title) if auto_view: view_file(p)
def start_recording(self, path=None, use_dialog=False): ''' ''' self.info('start video recording ') if path is None: if use_dialog: path = self.save_file_dialog() else: path, _ = unique_path(paths.video_dir, 'vm_recording', extension='avi') self.info('saving recording to path {}'.format(path)) # self.start() self.video.start_recording(path)
def generate_results(self): self.debug('generate results') dvc = self.dvc db = dvc.db positions = sorted([pp for p in self.positions for pp in p.positions]) wb = Workbook() sh = wb.add_sheet('Results') for i, attr in enumerate( ('Analysis', 'Position', 'Age', 'Error', 'Weight', 'Note')): wb.sheet(0, i, attr) wb.nrows = 1 def func(x, prog, i, n): dbmps = db.get_measured_positions(self.load_name, x) dbpos = db.get_load_position(self.load_name, x) weight, note = dbpos.weight, dbpos.note for dbmp in dbmps: rid = dbmp.analysis.record_id # rid = 1 if prog: prog.change_message('Write results for {},{}'.format( rid, x)) # ai = dvc.make_analyses((rid,)) age, error = 0, 0 sh.write(wb.nrows, 0, rid) sh.write(wb.nrows, 1, x) sh.write(wb.nrows, 2, age) sh.write(wb.nrows, 3, error) sh.write(wb.nrows, 4, weight) sh.write(wb.nrows, 5, note) wb.nrows += 1 with db.session_ctx(): progress_iterator(positions, func, threshold=1) path, _ = unique_path(paths.load_results_dir, self.load_name, extension='.xls') wb.save(path)
def generate_results(self): self.debug('generate results') dvc = self.dvc db = dvc.db positions = sorted([pp for p in self.positions for pp in p.positions]) wb = Workbook() sh = wb.add_sheet('Results') for i, attr in enumerate(('Analysis', 'Position', 'Age', 'Error', 'Weight', 'Note')): wb.sheet(0, i, attr) wb.nrows = 1 def func(x, prog, i, n): dbmps = db.get_measured_positions(self.load_name, x) dbpos = db.get_load_position(self.load_name, x) weight, note = dbpos.weight, dbpos.note for dbmp in dbmps: rid = dbmp.analysis.record_id # rid = 1 if prog: prog.change_message('Write results for {},{}'.format(rid, x)) # ai = dvc.make_analyses((rid,)) age, error = 0, 0 sh.write(wb.nrows, 0, rid) sh.write(wb.nrows, 1, x) sh.write(wb.nrows, 2, age) sh.write(wb.nrows, 3, error) sh.write(wb.nrows, 4, weight) sh.write(wb.nrows, 5, note) wb.nrows += 1 with db.session_ctx(): progress_iterator(positions, func, threshold=1) path, _ = unique_path(paths.load_results_dir, self.load_name, extension='.xls') wb.save(path)
def _do_fit_blanks(self, gs, fits, atype, root, save_figure, with_table): ''' fit this block of analyses ''' start, end = gs[0].analysis_timestamp, gs[-1].analysis_timestamp ds = timedelta(minutes=59) atypes = ('blank_{}'.format(atype), ) blanks = self._get_analysis_date_range(start - ds, end + ds, atypes) if blanks: man = self.processor blanks = man.make_analyses(blanks) gs = man.make_analyses(gs) man.load_analyses(gs, show_progress=False) man.load_analyses(blanks, show_progress=False) refiso = gs[0] ae = self.editor ae.tool.load_fits(refiso.isotope_keys, refiso.isotope_fits) fkeys = fits.keys() for fi in ae.tool.fits: if fi.name in fkeys: fi.trait_set(show=True, fit=fits[fi.name], trait_change_notify=False) ae.unknowns = gs ae.references = blanks ae.rebuild_graph() if save_figure: p, _ = unique_path(root, base=refiso.record_id, extension='.pdf') if with_table: writer = BlanksPDFWrtier() writer.build(p, ae.component, gs, blanks) else: ae.graph.save_pdf(p)
def _do_fit_blanks(self, gs, fits, atype, root, save_figure, with_table): ''' fit this block of analyses ''' start, end = gs[0].analysis_timestamp, gs[-1].analysis_timestamp ds = timedelta(minutes=59) atypes = ('blank_{}'.format(atype),) blanks = self._get_analysis_date_range(start - ds, end + ds, atypes) if blanks: man = self.processor blanks = man.make_analyses(blanks) gs = man.make_analyses(gs) man.load_analyses(gs, show_progress=False) man.load_analyses(blanks, show_progress=False) refiso = gs[0] ae = self.editor ae.tool.load_fits(refiso.isotope_keys, refiso.isotope_fits ) fkeys = fits.keys() for fi in ae.tool.fits: if fi.name in fkeys: fi.trait_set(show=True, fit=fits[fi.name], trait_change_notify=False) ae.unknowns = gs ae.references = blanks ae.rebuild_graph() if save_figure: p, _ = unique_path(root, base=refiso.record_id, extension='.pdf') if with_table: writer = BlanksPDFWrtier() writer.build(p, ae.component, gs, blanks) else: ae.graph.save_pdf(p)
def _do_detector_intercalibration(self, gs, fit, reftype, root, save_figure, with_table): start, end = gs[0].analysis_timestamp, gs[-1].analysis_timestamp ds = timedelta(hours=3) refs = self._get_analysis_date_range(start - ds, end + ds, (reftype, )) if refs: man = self.processor refs = man.make_analyses(refs) gs = man.make_analyses(gs) man.load_analyses(gs, show_progress=False) man.load_analyses(refs, show_progress=False) refiso = gs[0] ae = self.editor # ae.tool.load_fits(ks, fs) ae.tool.load_fits(['Ar40/Ar36'], [fit]) ae.tool.fits[0].show = True # fkeys = fits.keys() # for fi in ae.tool.fits: # if fi.name in fkeys: # fi.trait_set(show=True, fit=fits[fi.name], trait_change_notify=False) ae.unknowns = gs ae.references = refs ae.rebuild_graph() if save_figure: p, _ = unique_path(root, base=refiso.record_id, extension='.pdf') if with_table: writer = DetectorIntercalibrationPDFWriter() writer.build(p, ae.component, gs, refs) else: ae.graph.save_pdf(p)
def _do_detector_intercalibration(self, gs, fit, reftype, root, save_figure, with_table): start, end = gs[0].analysis_timestamp, gs[-1].analysis_timestamp ds = timedelta(hours=3) refs = self._get_analysis_date_range(start - ds, end + ds, (reftype,)) if refs: man = self.processor refs = man.make_analyses(refs) gs = man.make_analyses(gs) man.load_analyses(gs, show_progress=False) man.load_analyses(refs, show_progress=False) refiso = gs[0] ae = self.editor # ae.tool.load_fits(ks, fs) ae.tool.load_fits(['Ar40/Ar36'], [fit]) ae.tool.fits[0].show = True # fkeys = fits.keys() # for fi in ae.tool.fits: # if fi.name in fkeys: # fi.trait_set(show=True, fit=fits[fi.name], trait_change_notify=False) ae.unknowns = gs ae.references = refs ae.rebuild_graph() if save_figure: p, _ = unique_path(root, base=refiso.record_id, extension='.pdf') if with_table: writer = DetectorIntercalibrationPDFWriter() writer.build(p, ae.component, gs, refs) else: ae.graph.save_pdf(p)
def _start_recording(self, path, basename): self.info('start video recording {}'.format(path)) d = os.path.dirname(path) if not os.path.isdir(d): self.warning('invalid directory {}'.format(d)) self.warning('using default directory') path, _ = unique_path(paths.video_dir, basename, extension='avi') self.info('saving recording to path {}'.format(path)) # if self.use_db: # db = self.get_video_database() # db.connect() # # v = db.add_video_record(rid=basename) # db.add_path(v, path) # self.info('saving {} to database'.format(basename)) # db.commit() video = self.video crop_to_hole = True dim = self.stage_map.g_dimension cropdim = dim * 8 * self.pxpermm color = self.canvas.crosshairs_color.getRgb()[:3] r = int(self.canvas.get_crosshairs_radius() * self.pxpermm) # offx, offy = self.canvas.get_screen_offset() def renderer(p): # cw, ch = self.get_frame_size() frame = video.get_cached_frame() if frame is not None: if not len(frame.shape): return frame = copy(frame) # ch, cw, _ = frame.shape # ch, cw = int(ch), int(cw) if crop_to_hole: frame = video.crop(frame, 0, 0, cropdim, cropdim) if self.render_with_markup: # draw crosshairs if len(frame.shape) == 2: frame = gray2rgb(frame) ch, cw, _ = frame.shape ch, cw = int(ch), int(cw) y = ch // 2 x = cw // 2 cp = circle_perimeter(y, x, r, shape=(ch, cw)) frame[cp] = color frame[line(y, 0, y, x - r)] = color # left frame[line(y, x + r, y, int(cw) - 1)] = color # right frame[line(0, x, y - r, x)] = color # bottom frame[line(y + r, x, int(ch) - 1, x)] = color # top if frame is not None: pil_save(frame, p) self.video.start_recording(path, renderer)
import gc import sys import cPickle from itertools import groupby import psutil from pychron.core.helpers.filetools import unique_path # import objgraph USE_MEM_LOG = False if USE_MEM_LOG: root = os.path.join(os.path.expanduser('~'), 'Desktop', 'memtest') if not os.path.isdir(root): os.mkdir(root) p, _ = unique_path(root, 'mem') def write_mem(msg, m, verbose): with open(os.path.join(root, p), 'a') as wfile: msg = '{:<50s}:{}\n'.format(msg, m) wfile.write(msg) if verbose: print msg.strip() PID = None def mem_break(): write_mem('#' + '=' * 49, '')
def _execute_calibration(self): name = os.path.join(paths.scripts_dir, '{}_calibration_scan.yaml'.format(self.name)) import csv d = os.path.join(paths.data_dir, 'diode_scans') p, _cnt = unique_path(d, 'calibration', extension='csv') # st = None # # py = self.laser_manager.pyrometer # tc = self.laser_manager.get_device('temperature_monitor') g = StreamStackedGraph() g.clear() g.new_plot(scan_delay=1) g.new_series(x=[], y=[]) g.new_plot(scan_delay=1) g.new_series(x=[], y=[], plotid=1) open_view(g) record = False if record: self.laser_manager.stage_manager.start_recording() time.sleep(1) # def gfunc(t, v1, v2): # g.add_datum((t, v1)) # g.add_datum((t, v2), plotid=1) def gfunc(v1, v2): g.record(v1) g.record(v2, plotid=1) yd = yaml.load(open(name).read()) start = yd['start'] end = yd['end'] step = yd['step'] mean_tol = yd['mean_tol'] std = yd['std'] n = (end - start) / step + 1 # nn = 30 # # py = self.laser_manager.pyrometer # tc = self.laser_manager.get_device('temperature_monitor') with open(p, 'w') as wfile: writer = csv.writer(wfile) st = time.time() for ti in linspace(start, end, n): if self._cancel: break args = self._equilibrate_temp(ti, gfunc, st, mean_tol, std) if args: self.info('{} equilibrated'.format(ti)) py_t, tc_t = args writer.writerow((ti, py_t, tc_t)) else: break self.laser_manager.set_laser_temperature(0) if record: self.laser_manager.stage_manager.stop_recording() self._executing = False
def _execute_scan(self): name = os.path.join(paths.scripts_dir, '{}_scan.yaml'.format(self.name)) import csv d = os.path.join(paths.data_dir, 'diode_scans') p, _cnt = unique_path(d, 'scan', extension='csv') st = None py = self.laser_manager.pyrometer tc = self.laser_manager.get_device('temperature_monitor') yd = yaml.load(open(name).read()) power = yd['power'] duration = yd['duration'] power_on = yd['power_on'] power_off = yd['power_off'] period = yd['period'] if 'temp' in yd: temp = yd['temp'] else: temp = None g = StreamStackedGraph() g.new_plot(scan_delay=1, ) g.new_series(x=[], y=[]) g.new_plot(scan_delay=1, ) g.new_series(x=[], y=[], plotid=1) open_view(g) self.laser_manager.stage_manager.start_recording() time.sleep(1) def gfunc(v1, v2): g.record(v1) g.record(v2, plotid=1) pi = 0 with open(p, 'w') as wfile: writer = csv.writer(wfile) t = 0 ti = 0 while ti <= duration: if self._cancel: break # print ti, power_off, pi, ti >= power_off, (ti >= power_off and pi) if ti == power_on: # turn on set laser to power if temp: self.laser_manager.set_laser_temperature(temp) pi = temp else: pi = power self.laser_manager.set_laser_power(power) elif ti >= power_off and pi: print 'setting power off' if temp: self.laser_manager.set_laser_temperature(0) else: self.laser_manager.set_laser_power(0) pi = 0 if st is None: st = time.time() t = time.time() - st py_t = py.read_temperature(verbose=False) tc_t = tc.read_temperature(verbose=False) gfunc(py_t, tc_t) writer.writerow((ti, pi, t, py_t, tc_t)) ti += 1 time.sleep(period) if temp: self.laser_manager.set_laser_temperature(0) else: self.laser_manager.set_laser_power(0) self.laser_manager.stage_manager.stop_recording() self._executing = False
def __init__(self, *args, **kw): super(StageVisualizer, self).__init__(*args, **kw) # p = os.path.join(data_dir, 'stage_visualizer') self.path, _ = unique_path(paths.stage_visualizer_dir, 'vis', extension='')
def make_strat_canvas_file(self): identifiers = ['57735', '57742', '57734', '57737', '57736', '57744', '57743', '57725', '58627'] identifiers = ['57731', '58612', '58620'] identifiers = ['58616', '57719', '58767'] db = self.db root = os.path.join(paths.dissertation, 'data', 'minnabluff', 'strat_sequences') seqname = 'seq4' p, _ = unique_path(root, seqname, extension='.yaml') with db.session_ctx(): items = [] for i in identifiers: strat = {} ln = db.get_labnumber(i) sample = ln.sample ia = ln.selected_interpreted_age.interpreted_age if ia.age_kind != 'Integrated': strat['elevation'] = sample.elevation mat = sample.material.name strat['label'] = '{}({}) {}+/-{} ({})'.format(sample.name, mat, ia.age, ia.age_err, ia.age_kind) strat['age'] = ia.age strat['age_err'] = ia.age_err strat['mswd'] = ia.mswd items.append(strat) syd = sorted(items, key=lambda x: x['elevation']) for i, yi in enumerate(syd[:-1]): # print i, yi['elevation'], yi['age'] # ee2=2*yi['age_err']*yi['mswd']**0.5 if not strat_ok(yi, syd[i + 1]): yi['color'] = 'red' yd = dict(options={}, items=items) import yaml with open(p, 'w') as wfile: yaml.dump(yd, wfile, default_flow_style=False) from pychron.canvas.canvas2D.strat_canvas import StratCanvas s = StratCanvas() s.load_scene(yd) p, _ = unique_path(root, seqname, extension='.pdf') from chaco.pdf_graphics_context import PdfPlotGraphicsContext g = PdfPlotGraphicsContext( filename=p) s.do_layout(size=(500, 700), force=True) g.render_component(s) g.save()
def path(self): p, _ = unique_path(paths.report_dir, 'report', extension='.pdf') return p
def make_strat_canvas_file(self): identifiers = [ '57735', '57742', '57734', '57737', '57736', '57744', '57743', '57725', '58627' ] identifiers = ['57731', '58612', '58620'] identifiers = ['58616', '57719', '58767'] db = self.db root = os.path.join(paths.dissertation, 'data', 'minnabluff', 'strat_sequences') seqname = 'seq4' p, _ = unique_path(root, seqname, extension='.yaml') with db.session_ctx(): items = [] for i in identifiers: strat = {} ln = db.get_labnumber(i) sample = ln.sample ia = ln.selected_interpreted_age.interpreted_age if ia.age_kind != 'Integrated': strat['elevation'] = sample.elevation mat = sample.material.name strat['label'] = '{}({}) {}+/-{} ({})'.format( sample.name, mat, ia.age, ia.age_err, ia.age_kind) strat['age'] = ia.age strat['age_err'] = ia.age_err strat['mswd'] = ia.mswd items.append(strat) syd = sorted(items, key=lambda x: x['elevation']) for i, yi in enumerate(syd[:-1]): # print i, yi['elevation'], yi['age'] # ee2=2*yi['age_err']*yi['mswd']**0.5 if not strat_ok(yi, syd[i + 1]): yi['color'] = 'red' yd = dict(options={}, items=items) import yaml with open(p, 'w') as wfile: yaml.dump(yd, wfile, default_flow_style=False) from pychron.canvas.canvas2D.strat_canvas import StratCanvas s = StratCanvas() s.load_scene(yd) p, _ = unique_path(root, seqname, extension='.pdf') from chaco.pdf_graphics_context import PdfPlotGraphicsContext g = PdfPlotGraphicsContext(filename=p) s.do_layout(size=(500, 700), force=True) g.render_component(s) g.save()
def _execute_scan(self): name = os.path.join(paths.scripts_dir, '{}_scan.yaml'.format(self.name)) import csv d = os.path.join(paths.data_dir, 'diode_scans') p, _cnt = unique_path(d, 'scan', extension='csv') st = None py = self.laser_manager.pyrometer tc = self.laser_manager.get_device('temperature_monitor') yd = yaml.load(open(name).read()) power = yd['power'] duration = yd['duration'] power_on = yd['power_on'] power_off = yd['power_off'] period = yd['period'] if yd.has_key('temp'): temp = yd['temp'] else: temp = None g = StreamStackedGraph() g.new_plot(scan_delay=1,) g.new_series(x=[], y=[]) g.new_plot(scan_delay=1,) g.new_series(x=[], y=[], plotid=1) self.laser_manager.open_view(g) self.laser_manager.stage_manager.start_recording() time.sleep(1) def gfunc(v1, v2): g.record(v1) g.record(v2, plotid=1) pi = 0 with open(p, 'w') as wfile: writer = csv.writer(wfile) t = 0 ti = 0 while ti <= duration: if self._cancel: break # print ti, power_off, pi, ti >= power_off, (ti >= power_off and pi) if ti == power_on: # turn on set laser to power if temp: self.laser_manager.set_laser_temperature(temp) pi = temp else: pi = power self.laser_manager.set_laser_power(power) elif ti >= power_off and pi: print 'setting power off' if temp: self.laser_manager.set_laser_temperature(0) else: self.laser_manager.set_laser_power(0) pi = 0 if st is None: st = time.time() t = time.time() - st py_t = py.read_temperature(verbose=False) tc_t = tc.read_temperature(verbose=False) gfunc(py_t, tc_t) writer.writerow((ti, pi, t, py_t, tc_t)) ti += 1 time.sleep(period) if temp: self.laser_manager.set_laser_temperature(0) else: self.laser_manager.set_laser_power(0) self.laser_manager.stage_manager.stop_recording() self._executing = False
def _execute_calibration(self): name = os.path.join(paths.scripts_dir, '{}_calibration_scan.yaml'.format(self.name)) import csv d = os.path.join(paths.data_dir, 'diode_scans') p, _cnt = unique_path(d, 'calibration', extension='csv') # st = None # # py = self.laser_manager.pyrometer # tc = self.laser_manager.get_device('temperature_monitor') g = StreamStackedGraph() g.clear() g.new_plot(scan_delay=1) g.new_series(x=[], y=[]) g.new_plot(scan_delay=1) g.new_series(x=[], y=[], plotid=1) self.laser_manager.open_view(g) record = False if record: self.laser_manager.stage_manager.start_recording() time.sleep(1) # def gfunc(t, v1, v2): # g.add_datum((t, v1)) # g.add_datum((t, v2), plotid=1) def gfunc(v1, v2): g.record(v1) g.record(v2, plotid=1) yd = yaml.load(open(name).read()) start = yd['start'] end = yd['end'] step = yd['step'] mean_tol = yd['mean_tol'] std = yd['std'] n = (end - start) / step + 1 # nn = 30 # # py = self.laser_manager.pyrometer # tc = self.laser_manager.get_device('temperature_monitor') with open(p, 'w') as wfile: writer = csv.writer(wfile) st = time.time() for ti in linspace(start, end, n): if self._cancel: break args = self._equilibrate_temp(ti, gfunc, st, mean_tol, std) if args: self.info('{} equilibrated'.format(ti)) py_t, tc_t = args writer.writerow((ti, py_t, tc_t)) else: break self.laser_manager.set_laser_temperature(0) if record: self.laser_manager.stage_manager.stop_recording() self._executing = False
def _save(self, editor, root, pathname, name, project, lns): p, _ = unique_path(root, pathname, extension='.pdf') editor.save_file(p) if self._save_db_figure: editor.save_figure('EasyFigure {}'.format(name), project, lns)
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # =============================================================================== # ============= enthought library imports ======================= from __future__ import absolute_import import os from pychron.core.helpers.filetools import unique_path # ============= standard library imports ======================== # ============= local library imports ========================== root = os.path.join(os.path.expanduser('~'), 'Desktop', 'filelog') if not os.path.isdir(root): os.mkdir(root) p, _ = unique_path(root, 'mem') def file_log(msg): with open(os.path.join(root, p), 'a') as wfile: wfile.write('{}\n'.format(msg)) # ============= EOF =============================================