def _ramp_trap_current(self, v, step, period, use_ramp=False, tol=10): if use_ramp: current = self.source.read_trap_current() if current is None: self.debug('could not read current trap. skipping ramp') return if v - current >= tol: if self.confirmation_dialog('Would you like to ramp up the ' 'Trap current from {} to {}'.format( current, v)): prog = open_progress(1) def func(x): cmd = 'SetParameter Trap Current Set,{:0.5f}'.format(x) prog.change_message(cmd) self.ask(cmd) if not prog.accepted and not prog.canceled: return True r = StepRamper() steps = (v - current) / step prog.max = int(steps) r.ramp(func, current, v, step, period) prog.close() return True
def _save_to_db(self): db = self.dvc.db if not self.dvc.meta_repo.smart_pull(): return with db.session_ctx(): n = len(self.irradiated_positions) prog = open_progress(n) for ir in self.irradiated_positions: ln = ir.identifier dbpos = db.get_irradiation_position(self.irradiation, self.level, ir.hole) if not dbpos: dbpos = db.add_irradiation_position( self.irradiation, self.level, ir.hole) if ln: dbpos2 = db.get_identifier(ln) if dbpos2: irradname = dbpos2.level.irradiation.name if irradname != self.irradiation: self.warning_dialog('Labnumber {} already exists ' 'in Irradiation {}'.format( ln, irradname)) return else: dbpos.identifier = ln # dbpos.j = irs.j # dbpos.j_err = irs.j_err self.dvc.meta_repo.update_flux(self.irradiation, self.level, ir.hole, ir.identifier, ir.j, ir.j_err) dbpos.weight = float(ir.weight or 0) dbpos.note = ir.note sam = ir.sample proj = ir.project mat = ir.material if proj: proj = db.add_project(proj) if mat: mat = db.add_material(mat) if sam: sam = db.add_sample(sam, project=proj, material=mat) dbpos.sample = sam prog.change_message('Saving {}{}{} identifier={}'.format( self.irradiation, self.level, ir.hole, ln)) self.dirty = False self._level_changed(self.level) if self.dvc.meta_repo.has_staged(): self.dvc.meta_commit('Labnumber Entry Save') self.dvc.meta_push()
def make_labbook(self, out): """ assemble a pdf of irradiations ask user for list of irradiations """ db = self.dvc.db with db.session_ctx(): irrads = db.get_irradiations(order_func='asc') irrads = [irrad.name for irrad in irrads] table = IrradiationTableView(irradiations=irrads) info = table.edit_traits() if info.result: if table.selected: w = LabbookPDFWriter() info = w.options.edit_traits() if info.result: irrads = db.get_irradiations(names=table.selected, order_func='asc') n = sum([len(irrad.levels) for irrad in irrads]) prog = open_progress(n=n) w.build(out, irrads, progress=prog) prog.close()
def do_import(self, new_thread=True): if self.import_kind != NULL_STR: selected = self.selected # if selected: if selected: if not isinstance(selected[0], tuple): selected = [(si.name, tuple()) for si in selected] # if self._import_thread and self._import_thread.isRunning(): # return if self.db.connect(): # clear imported self.imported_names = [] # self.db.reset() # self.db.save_username = '******'.format(self.db.username) self.info('====== Import Started ======') self.info('user name= {}'.format(self.db.save_username)) # get import func from extractor n = len(selected) * 2 pd = open_progress(n=n) if new_thread: t = Thread(target=self._do_import, args=(selected, pd)) t.start() self._import_thread = t return t else: self._do_import(selected, pd) return True
def _ramp_trap_current(self, v, step, period, use_ramp=False, tol=10): if use_ramp: current = self.source.read_trap_current() if current is None: current = 0 if v - current >= tol: if self.confirmation_dialog('Would you like to ramp up the ' 'Trap current from {} to {}'.format( current, v)): prog = open_progress(1) def func(x): cmd = 'SetParameter Trap Current Set,{:0.5f}'.format(x) prog.change_message(cmd) self.ask(cmd) if not prog.accepted and not prog.canceled: return True r = StepRamper() steps = (v - current) / step prog.max = int(steps) r.ramp(func, current, v, step, period) prog.close() return True
def generate_identifiers(self): self.warning('GENERATE LABNUMBERS DISABLED') return if self.check_monitor_name(): return ok = True ok = self.confirmation_dialog( 'Are you sure you want to generate the labnumbers for this irradiation?' ) if ok: ret = YES ret = self.confirmation_dialog('Overwrite existing labnumbers?', return_retval=True, cancel=True) if ret != CANCEL: overwrite = ret == YES lg = IdentifierGenerator(monitor_name=self.monitor_name, irradiation=self.irradiation, overwrite=overwrite, db=self.db) if lg.setup(): prog = open_progress() lg.generate_identifiers(prog, overwrite) prog.close() self._update_level()
def _ramp_trap_current(self, v, step, period, use_ramp=False, tol=10): if use_ramp: current = self.source.read_trap_current() if current is None: self.debug('could not read current trap. skipping ramp') return if v - current >= tol: if self.confirmation_dialog('Would you like to ramp up the ' 'Trap current from {} to {}'.format(current, v)): prog = open_progress(1) def func(x): prog.change_message('Set Trap Current {}'.format(x)) self.source.trap_current = x if not prog.accepted and not prog.canceled: return True r = StepRamper() steps = (v - current) / step prog.max = int(steps) r.ramp(func, current, v, step, period) prog.close() return True
def push_changes(self): if self.dvc.meta_repo.has_unpushed_commits(): if self.confirmation_dialog('You have non-pushed commits. Would you like to share them?'): prog = open_progress(2) self.info('Pushing changes to meta repo') prog.change_message('Pushing changes to meta repo') self.dvc.meta_repo.push() prog.close()
def _save_to_db(self): db = self.dvc.db if not self.dvc.meta_repo.smart_pull(): return with db.session_ctx(): n = len(self.irradiated_positions) prog = open_progress(n) for ir in self.irradiated_positions: ln = ir.identifier dbpos = db.get_irradiation_position(self.irradiation, self.level, ir.hole) if not dbpos: dbpos = db.add_irradiation_position(self.irradiation, self.level, ir.hole) if ln: dbpos2 = db.get_identifier(ln) if dbpos2: irradname = dbpos2.level.irradiation.name if irradname != self.irradiation: self.warning_dialog('Labnumber {} already exists ' 'in Irradiation {}'.format(ln, irradname)) return else: dbpos.identifier = ln # dbpos.j = irs.j # dbpos.j_err = irs.j_err self.dvc.meta_repo.update_flux(self.irradiation, self.level, ir.hole, ir.identifier, ir.j, ir.j_err) dbpos.weight = float(ir.weight or 0) dbpos.note = ir.note sam = ir.sample proj = ir.project mat = ir.material if proj: proj = db.add_project(proj) if mat: mat = db.add_material(mat) if sam: sam = db.add_sample(sam, project=proj, material=mat) dbpos.sample = sam prog.change_message('Saving {}{}{} identifier={}'.format(self.irradiation, self.level, ir.hole, ln)) self.dirty = False self._level_changed(self.level) if self.dvc.meta_repo.has_staged(): self.dvc.meta_commit('Labnumber Entry Save') self.dvc.meta_push()
def _selected_samples_changed(self, new): if new: at = self.analysis_table lim = at.limit kw = dict(limit=lim, include_invalid=not at.omit_invalid, mass_spectrometers=self._recent_mass_spectrometers) ss = self.selected_samples xx = ss[:] # if not any(['RECENT' in p for p in self.selected_projects]): # sp=self.selected_projects # if not hasattr(sp, '__iter__'): # sp = (sp, ) if not any(['RECENT' in p.name for p in self.selected_projects]): reftypes = ('blank_unknown', ) if any((si.analysis_type in reftypes for si in ss)): with self.db.session_ctx(): ans = [] for si in ss: if si.analysis_type in reftypes: xx.remove(si) dates = list( self._project_date_bins(si.identifier)) print dates progress = open_progress(len(dates)) for lp, hp in dates: progress.change_message( 'Loading Date Range ' '{} to {}'.format( lp.strftime('%m-%d-%Y %H:%M:%S'), hp.strftime('%m-%d-%Y %H:%M:%S'))) ais = self._retrieve_sample_analyses( [si], make_records=False, low_post=lp, high_post=hp, **kw) ans.extend(ais) progress.close() ans = self._make_records(ans) # print len(ans), len(set([si.record_id for si in ans])) if xx: lp, hp = self.low_post, self.high_post ans = self._retrieve_sample_analyses(xx, low_post=lp, high_post=hp, **kw) self.debug('selected samples changed. loading analyses. ' 'low={}, high={}, limit={}'.format(lp, hp, lim)) self.analysis_table.set_analyses(ans) self.dump_browser() self.filter_focus = not bool(new)
def _open_progress(self, specs): n = len(specs) for _, i in specs: n += len(i.irradiation.levels) for l in i.irradiation.levels: n += len(l.positions) prog = open_progress(n) self._progress = prog return prog
def push_changes(self): if self.dvc.meta_repo.has_unpushed_commits(): if self.confirmation_dialog( 'You have non-pushed commits. Would you like to share them?' ): prog = open_progress(2) self.info('Pushing changes to meta repo') prog.change_message('Pushing changes to meta repo') self.dvc.meta_repo.push() prog.close()
def _open_prog(self, ncycle, hops): from pychron.core.progress import open_progress t = 0 nhops = len(hops) for hop in hops: hop = parse_hop(hop) settle = hop['settle'] t += settle return open_progress(int(t * ncycle + ncycle + nhops))
def import_irradiation_load_xls(self, p): loader = XLSIrradiationLoader(db=self.db, monitor_name=self.monitor_name) prog = open_progress() loader.progress = prog loader.canvas = self.canvas # loader.load_level(p, self.irradiated_positions, # self.irradiation, self.level) self.refresh_table = True
def preview_generate_identifiers(self): if self.check_monitor_name(): return lg = IdentifierGenerator(monitor_name=self.monitor_name, overwrite=True, db=self.db) if lg.setup(): prog = open_progress() lg.preview(prog, self.irradiated_positions, self.irradiation, self.level) prog.close() self.refresh_table = True
def _selected_samples_changed(self, new): if new: at = self.analysis_table lim = at.limit kw = dict(limit=lim, include_invalid=not at.omit_invalid, mass_spectrometers=self._recent_mass_spectrometers) ss = self.selected_samples xx = ss[:] # if not any(['RECENT' in p for p in self.selected_projects]): # sp=self.selected_projects # if not hasattr(sp, '__iter__'): # sp = (sp, ) if not any(['RECENT' in p.name for p in self.selected_projects]): reftypes = ('blank_unknown',) if any((si.analysis_type in reftypes for si in ss)): with self.db.session_ctx(): ans = [] for si in ss: if si.analysis_type in reftypes: xx.remove(si) dates = list(self._project_date_bins(si.identifier)) print dates progress = open_progress(len(dates)) for lp, hp in dates: progress.change_message('Loading Date Range ' '{} to {}'.format(lp.strftime('%m-%d-%Y %H:%M:%S'), hp.strftime('%m-%d-%Y %H:%M:%S'))) ais = self._retrieve_sample_analyses([si], make_records=False, low_post=lp, high_post=hp, **kw) ans.extend(ais) progress.close() ans = self._make_records(ans) # print len(ans), len(set([si.record_id for si in ans])) if xx: lp, hp = self.low_post, self.high_post ans = self._retrieve_sample_analyses(xx, low_post=lp, high_post=hp, **kw) self.debug('selected samples changed. loading analyses. ' 'low={}, high={}, limit={}'.format(lp, hp, lim)) self.analysis_table.set_analyses(ans) self.dump_browser() self.filter_focus = not bool(new)
def dump_sample(self, block=False): self.debug('dump sample') if self._dumper_thread is None: progress = open_progress(n=100) if block: return self._dump_sample(progress) else: self._dumper_thread = Thread(name='DumpSample', target=self._dump_sample, args=(progress,)) self._dumper_thread.setDaemon(True) self._dumper_thread.start() else: self.warning_dialog('dump already in progress')
def pull(self, branch='master', remote='origin', handled=True, use_progress=True): """ fetch and merge """ self.debug('pulling {} from {}'.format(branch, remote)) repo = self._repo try: remote = self._get_remote(remote) except AttributeError as e: print('repo man pull', e) return if remote: self.debug('pulling from url: {}'.format(remote.url)) if use_progress: prog = open_progress(3, show_percent=False, title='Pull Repository {}'.format( self.name), close_at_end=False) prog.change_message('Fetching branch:"{}" from "{}"'.format( branch, remote)) try: self.fetch(remote) except GitCommandError as e: self.debug(e) if not handled: raise e self.debug('fetch complete') # if use_progress: # for i in range(100): # prog.change_message('Merging {}'.format(i)) # time.sleep(1) try: repo.git.merge('FETCH_HEAD') except GitCommandError: self.smart_pull(branch=branch, remote=remote) # self._git_command(lambda: repo.git.merge('FETCH_HEAD'), 'merge') if use_progress: prog.close() self.debug('pull complete')
def dump_sample(self, block=False): self.debug('dump sample') if self._dumper_thread is None: progress = open_progress(n=100) if block: return self._dump_sample(progress) else: self._dumper_thread = Thread(name='DumpSample', target=self._dump_sample, args=(progress, )) self._dumper_thread.setDaemon(True) self._dumper_thread.start() else: self.warning_dialog('dump already in progress')
def clone(self): name = self.selected_repository_name if name == 'meta': root = paths.dvc_dir else: root = paths.experiment_dataset_dir path = os.path.join(root, name) if not os.path.isdir(path): self.debug('cloning repository {}'.format(name)) url = 'https://github.com/{}/{}.git'.format(self.organization, name) prog = open_progress(n=3) prog.change_message('Cloning repository {}'.format(url)) Repo.clone_from(url, path) prog.change_message('Cloning Complete') prog.close() self.refresh_local_names()
def clone(self): name = self.selected_repository_name if name == 'meta': root = paths.dvc_dir else: root = paths.experiment_dataset_dir path = os.path.join(root, name) if not os.path.isdir(path): self.debug('cloning repository {}'.format(name)) url = 'https://github.com/{}/{}.git'.format( self.organization, name) prog = open_progress(n=3) prog.change_message('Cloning repository {}'.format(url)) Repo.clone_from(url, path) prog.change_message('Cloning Complete') prog.close() self.refresh_local_names()
def pull(self, branch='master', remote='origin', handled=True, use_progress=True): """ fetch and merge """ self.debug('pulling {} from {}'.format(branch, remote)) repo = self._repo try: remote = self._get_remote(remote) except AttributeError as e: print('repo man pull', e) return if remote: self.debug('pulling from url: {}'.format(remote.url)) if use_progress: prog = open_progress(3, show_percent=False, title='Pull Repository {}'.format(self.name), close_at_end=False) prog.change_message('Fetching branch:"{}" from "{}"'.format(branch, remote)) try: self.fetch(remote) except GitCommandError as e: self.debug(e) if not handled: raise e self.debug('fetch complete') # if use_progress: # for i in range(100): # prog.change_message('Merging {}'.format(i)) # time.sleep(1) try: repo.git.merge('FETCH_HEAD') except GitCommandError: self.smart_pull(branch=branch, remote=remote) # self._git_command(lambda: repo.git.merge('FETCH_HEAD'), 'merge') if use_progress: prog.close() self.debug('pull complete')
def out_of_date(self, branchname='master'): pd = open_progress(2) repo = self._repo origin = repo.remotes.origin pd.change_message('Fetching {} {}'.format(origin, branchname)) repo.git.fetch(origin, branchname) pd.change_message('Complete') # try: # oref = origin.refs[branchname] # remote_commit = oref.commit # except IndexError: # remote_commit = None # # branch = getattr(repo.heads, branchname) # local_commit = branch.commit local_commit, remote_commit = self._get_local_remote_commit(branchname) self.debug('out of date {} {}'.format(local_commit, remote_commit)) return local_commit != remote_commit
def _generate_labnumbers(self, offset=None, level_offset=None): """ get last labnumber start numbering at 1+offset add level_offset between each level """ if offset is None: offset = self.offset if level_offset is None: level_offset = self.level_offset irradiation = self.irradiation print offset, level_offset mongen, unkgen, n = self._position_generator(offset, level_offset) if n: prog = open_progress(n) # prog.max = n - 1 for gen in (mongen, unkgen): for pos, ident in gen: po = pos.position le = pos.level.name if self.is_preview: self._set_position_identifier(pos, ident) else: pos.identifier = ident self.dvc.set_identifier(pos.level.irradiation.name, pos.level.name, pos.position, ident) # self._add_default_flux(pos) msg = 'setting irrad. pos. {} {}-{} labnumber={}'.format( irradiation, le, po, ident) self.info(msg) if prog: prog.change_message(msg) prog.close()
def _generate_labnumbers(self, offset=None, level_offset=None): """ get last labnumber start numbering at 1+offset add level_offset between each level """ if offset is None: offset = self.offset if level_offset is None: level_offset = self.level_offset irradiation = self.irradiation print offset, level_offset mongen, unkgen, n = self._position_generator(offset, level_offset) if n: prog = open_progress(n) # prog.max = n - 1 for gen in (mongen, unkgen): for pos, ident in gen: po = pos.position le = pos.level.name if self.is_preview: self._set_position_identifier(pos, ident) else: pos.identifier = ident self.dvc.set_identifier(pos.level.irradiation.name, pos.level.name, pos.position, ident) # self._add_default_flux(pos) msg = 'setting irrad. pos. {} {}-{} labnumber={}'.format(irradiation, le, po, ident) self.info(msg) if prog: prog.change_message(msg) prog.close()
def generate_identifiers(self): self.warning('GENERATE LABNUMBERS DISABLED') return if self.check_monitor_name(): return ok = True ok = self.confirmation_dialog('Are you sure you want to generate the labnumbers for this irradiation?') if ok: ret = YES ret = self.confirmation_dialog('Overwrite existing labnumbers?', return_retval=True, cancel=True) if ret != CANCEL: overwrite = ret == YES lg = IdentifierGenerator(monitor_name=self.monitor_name, irradiation=self.irradiation, overwrite=overwrite, db=self.db) if lg.setup(): prog = open_progress() lg.generate_identifiers(prog, overwrite) prog.close() self._update_level()
def _clone_central_db(self, repositories, analyses=None, principal_investigators=None, projects=None): self.info('--------- Clone DB -----------') # create an a sqlite database from pychron.dvc.dvc_orm import Base metadata = Base.metadata from pychron.dvc.dvc_database import DVCDatabase path = database_path() if os.path.isfile(path): if not self.confirmation_dialog('The database "{}" already exists. ' 'Do you want to overwrite it'.format(os.path.basename(path))): path = self._get_new_path() else: os.remove(path) if path: progress = open_progress(n=20) self.debug('--------- Starting db clone to {}'.format(path)) src = self.dvc db = DVCDatabase(path=path, kind='sqlite') db.connect() with db.session_ctx(use_parent_session=False) as sess: metadata.create_all(sess.bind) tables = ['MassSpectrometerTbl', 'ExtractDeviceTbl', 'VersionTbl', 'UserTbl'] for table in tables: mod = __import__('pychron.dvc.dvc_orm', fromlist=[table]) progress.change_message('Cloning {}'.format(table)) self._copy_table(db, getattr(mod, table)) with src.session_ctx(use_parent_session=False): from pychron.dvc.dvc_orm import RepositoryTbl from pychron.dvc.dvc_orm import AnalysisTbl from pychron.dvc.dvc_orm import AnalysisChangeTbl from pychron.dvc.dvc_orm import RepositoryAssociationTbl from pychron.dvc.dvc_orm import AnalysisGroupTbl from pychron.dvc.dvc_orm import AnalysisGroupSetTbl from pychron.dvc.dvc_orm import MaterialTbl from pychron.dvc.dvc_orm import SampleTbl from pychron.dvc.dvc_orm import IrradiationTbl from pychron.dvc.dvc_orm import LevelTbl from pychron.dvc.dvc_orm import IrradiationPositionTbl from pychron.dvc.dvc_orm import PrincipalInvestigatorTbl repos = [src.db.get_repository(reponame) for reponame in repositories] progress.change_message('Assembling Analyses 0/5') st = time.time() if analyses: ans = analyses ras = [rai for ai in ans for rai in ai.repository_associations] else: # at = time.time() ras = [ra for repo in repos for ra in repo.repository_associations] # self.debug('association time={}'.format(time.time()-at)) progress.change_message('Assembling Analyses 1/5') # at = time.time() ans = [ri.analysis for ri in ras] # self.debug('analysis time={}'.format(time.time()-at)) progress.change_message('Assembling Analyses 2/5') # at = time.time() ans_c = [ai.change for ai in ans] # self.debug('change time={}'.format(time.time()-at)) progress.change_message('Assembling Analyses 3/5') # at = time.time() agss = [gi for ai in ans for gi in ai.group_sets] # self.debug('agss time={}'.format(time.time()-at)) progress.change_message('Assembling Analyses 4/5') # at = time.time() ags = {gi.group for gi in agss} # self.debug('ags time={}'.format(time.time()-at)) progress.change_message('Assembling Analyses 5/5') self.debug('total analysis assembly time={}'.format(time.time()-st)) self._copy_records(progress, db, RepositoryTbl, repos) self._copy_records(progress, db, RepositoryAssociationTbl, ras) self._copy_records(progress, db, AnalysisTbl, ans) self._copy_records(progress, db, AnalysisChangeTbl, ans_c) self._copy_records(progress, db, AnalysisGroupTbl, ags) self._copy_records(progress, db, AnalysisGroupSetTbl, agss) if principal_investigators: pis = [src.get_principal_investigator(pp.name) for pp in principal_investigators] else: pis = {repo.principal_investigator for repo in repos} self._copy_records(progress, db, PrincipalInvestigatorTbl, pis) from pychron.dvc.dvc_orm import ProjectTbl if projects: prjs = [src.get_project(pp) for pp in projects] else: prjs = {ai.irradiation_position.sample.project for ai in ans} self._copy_records(progress, db, ProjectTbl, prjs) ips = {ai.irradiation_position for ai in ans} sams = {ip.sample for ip in ips} mats = {si.material for si in sams} self._copy_records(progress, db, MaterialTbl, mats) self._copy_records(progress, db, SampleTbl, sams) ls = {ip.level for ip in ips} irs = {l.irradiation for l in ls} self._copy_records(progress, db, IrradiationTbl, irs) self._copy_records(progress, db, LevelTbl, ls) self._copy_records(progress, db, IrradiationPositionTbl, ips) self.debug('--------- db clone finished') progress.close() self.information_dialog('Database saved to "{}"'.format(path)) return path
def _remote_action(self, name, action): msg = '{} changes to {}'.format(name, self.remote) prog = open_progress(n=10, message=msg) action() prog.close()
def _save_to_db(self): db = self.db with db.session_ctx(): n = len(self.irradiated_positions) prog = open_progress(n) for irs in self.irradiated_positions: ln = irs.labnumber sam = irs.sample proj = irs.project mat = irs.material if proj: proj = db.add_project(proj) if mat: mat = db.add_material(mat) if sam: sam = db.add_sample(sam, project=proj, material=mat) if ln: dbln = db.get_labnumber(ln) if dbln: pos = dbln.irradiation_position if pos is None: pos = db.add_irradiation_position( irs.hole, dbln, self.irradiation, self.level) else: lev = pos.level irrad = lev.irradiation if self.irradiation != irrad.name: self.warning_dialog( 'Labnumber {} already exists in Irradiation {}' .format(ln, irrad.name)) return if irs.hole != pos.position: pos = db.add_irradiation_position( irs.hole, dbln, self.irradiation, self.level) else: dbln = db.add_labnumber( ln, sample=sam, ) pos = db.add_irradiation_position( irs.hole, dbln, self.irradiation, self.level) def add_flux(): hist = db.add_flux_history(pos) dbln.selected_flux_history = hist f = db.add_flux(irs.j, irs.j_err) f.history = hist for ai in dbln.analyses: self.remove_from_cache(ai) if dbln.selected_flux_history: tol = 1e-10 flux = dbln.selected_flux_history.flux if flux: if abs(flux.j - irs.j) > tol or abs(flux.j_err - irs.j_err) > tol: add_flux() else: add_flux() else: add_flux() else: dbpos = db.get_irradiation_position( self.irradiation, self.level, irs.hole) if not dbpos or not dbpos.labnumber: dbln = db.add_labnumber('', unique=False, sample=sam, note=irs.note) db.add_irradiation_position(irs.hole, dbln, self.irradiation, self.level) else: dbln = dbpos.labnumber if sam: dbln.sample = sam dbln.note = irs.note prog.change_message('Saving {}{}{} labnumber={}'.format( self.irradiation, self.level, irs.hole, dbln.identifier)) self.dirty = False self._level_changed(self.level)
fetch and merge """ self.debug('pulling {} from {}'.format(branch, remote)) repo = self._repo try: remote = self._get_remote(remote) except AttributeError, e: print 'repo man pull', e return if remote: self.debug('pulling from url: {}'.format(remote.url)) if use_progress: prog = open_progress(3, show_percent=False, title='Pull Repository {}'.format(self.name), close_at_end=False) prog.change_message('Fetching branch:"{}" from "{}"'.format(branch, remote)) try: self.fetch(remote) except GitCommandError, e: self.debug(e) if not handled: raise e # if use_progress: # for i in range(100): # prog.change_message('Merging {}'.format(i)) # time.sleep(1) try: repo.git.merge('FETCH_HEAD')
def new_message_handler(self): self._progress = open_progress(100) return super(GitProgress, self).new_message_handler()
def _save_to_db(self, level, update, irradiation=None): db = self.dvc.db if not self.dvc.meta_repo.smart_pull(): return n = len(self.irradiated_positions) prog = open_progress(n) if not irradiation: irradiation = self.irradiation dvc = self.dvc with dvc.session_ctx(): for ir in self.irradiated_positions: sam = ir.sample if not sam: self.dvc.remove_irradiation_position( irradiation, level, ir.hole) continue # mssql will not allow multiple null identifiers # so need to use placeholder # if not ir.identifier and (db.kind == 'mssql' or not self.allow_multiple_null_identifiers): if db.kind == 'mssql' or not self.allow_multiple_null_identifiers: k = '{:02n}'.format(ir.hole) if self.use_packet_for_default_identifier: k = ir.packet temp = '{}:{}{}'.format(irradiation, level, k) if not ir.identifier or ir.identifier != temp: ir.identifier = temp ln = ir.identifier dbpos = db.get_irradiation_position(irradiation, level, ir.hole) if not dbpos: dbpos = db.add_irradiation_position( irradiation, level, ir.hole) if ln: dbpos2 = db.get_identifier(ln) if dbpos2: irradname = dbpos2.level.irradiation.name if irradname != irradiation: self.warning_dialog('Labnumber {} already exists ' 'in Irradiation {}'.format( ln, irradname)) return else: dbpos.identifier = ln self.dvc.meta_repo.update_flux(irradiation, level, ir.hole, ir.identifier, ir.j, ir.j_err, 0, 0) dbpos.weight = float(ir.weight or 0) dbpos.note = ir.note dbpos.packet = ir.packet proj = ir.project mat = ir.material grainsize = ir.grainsize if proj: proj = db.add_project(proj, pi=ir.principal_investigator) if mat: mat = db.add_material(mat, grainsize=grainsize) if sam: sam = db.add_sample(sam, proj.name, ir.principal_investigator, mat, grainsize=grainsize) # sam.igsn = ir.igsn dbpos.sample = sam prog.change_message('Saving {}{}{} identifier={}'.format( irradiation, level, ir.hole, ln)) db.commit() prog.close() self.dirty = False if update: self._level_changed(None, level) if self.dvc.meta_repo.has_staged(): self.dvc.meta_commit('Labnumber Entry Save') self.dvc.meta_push()
def _clone_central_db(self, repositories, analyses=None, principal_investigators=None, projects=None): self.info('--------- Clone DB -----------') # create an a sqlite database from pychron.dvc.dvc_orm import Base metadata = Base.metadata from pychron.dvc.dvc_database import DVCDatabase path = database_path() if os.path.isfile(path): if not self.confirmation_dialog( 'The database "{}" already exists. ' 'Do you want to overwrite it'.format( os.path.basename(path))): path = self._get_new_path() else: os.remove(path) if path: progress = open_progress(n=20) self.debug('--------- Starting db clone to {}'.format(path)) src = self.dvc db = DVCDatabase(path=path, kind='sqlite') db.connect() with db.session_ctx(use_parent_session=False) as sess: metadata.create_all(sess.bind) tables = [ 'MassSpectrometerTbl', 'ExtractDeviceTbl', 'VersionTbl', 'UserTbl' ] for table in tables: mod = __import__('pychron.dvc.dvc_orm', fromlist=[table]) progress.change_message('Cloning {}'.format(table)) self._copy_table(db, getattr(mod, table)) with src.session_ctx(use_parent_session=False): from pychron.dvc.dvc_orm import RepositoryTbl from pychron.dvc.dvc_orm import AnalysisTbl from pychron.dvc.dvc_orm import AnalysisChangeTbl from pychron.dvc.dvc_orm import RepositoryAssociationTbl from pychron.dvc.dvc_orm import AnalysisGroupTbl from pychron.dvc.dvc_orm import AnalysisGroupSetTbl from pychron.dvc.dvc_orm import MaterialTbl from pychron.dvc.dvc_orm import SampleTbl from pychron.dvc.dvc_orm import IrradiationTbl from pychron.dvc.dvc_orm import LevelTbl from pychron.dvc.dvc_orm import IrradiationPositionTbl from pychron.dvc.dvc_orm import PrincipalInvestigatorTbl repos = [ src.db.get_repository(reponame) for reponame in repositories ] progress.change_message('Assembling Analyses 0/5') st = time.time() if analyses: ans = analyses ras = [ rai for ai in ans for rai in ai.repository_associations ] else: # at = time.time() ras = [ ra for repo in repos for ra in repo.repository_associations ] # self.debug('association time={}'.format(time.time()-at)) progress.change_message('Assembling Analyses 1/5') # at = time.time() ans = [ri.analysis for ri in ras] # self.debug('analysis time={}'.format(time.time()-at)) progress.change_message('Assembling Analyses 2/5') # at = time.time() ans_c = [ai.change for ai in ans] # self.debug('change time={}'.format(time.time()-at)) progress.change_message('Assembling Analyses 3/5') # at = time.time() agss = [gi for ai in ans for gi in ai.group_sets] # self.debug('agss time={}'.format(time.time()-at)) progress.change_message('Assembling Analyses 4/5') # at = time.time() ags = {gi.group for gi in agss} # self.debug('ags time={}'.format(time.time()-at)) progress.change_message('Assembling Analyses 5/5') self.debug( 'total analysis assembly time={}'.format(time.time() - st)) self._copy_records(progress, db, RepositoryTbl, repos) self._copy_records(progress, db, RepositoryAssociationTbl, ras) self._copy_records(progress, db, AnalysisTbl, ans) self._copy_records(progress, db, AnalysisChangeTbl, ans_c) self._copy_records(progress, db, AnalysisGroupTbl, ags) self._copy_records(progress, db, AnalysisGroupSetTbl, agss) if principal_investigators: pis = [ src.get_principal_investigator(pp.name) for pp in principal_investigators ] else: pis = {repo.principal_investigator for repo in repos} self._copy_records(progress, db, PrincipalInvestigatorTbl, pis) from pychron.dvc.dvc_orm import ProjectTbl if projects: prjs = [src.get_project(pp) for pp in projects] else: prjs = { ai.irradiation_position.sample.project for ai in ans } self._copy_records(progress, db, ProjectTbl, prjs) ips = {ai.irradiation_position for ai in ans} sams = {ip.sample for ip in ips} mats = {si.material for si in sams} self._copy_records(progress, db, MaterialTbl, mats) self._copy_records(progress, db, SampleTbl, sams) ls = {ip.level for ip in ips} irs = {l.irradiation for l in ls} self._copy_records(progress, db, IrradiationTbl, irs) self._copy_records(progress, db, LevelTbl, ls) self._copy_records(progress, db, IrradiationPositionTbl, ips) self.debug('--------- db clone finished') progress.close() self.information_dialog('Database saved to "{}"'.format(path)) return path
class GitRepoManager(Loggable): """ manage a local git repository """ _repo = Any # root=Directory path = Str selected = Any selected_branch = Str selected_path_commits = List selected_commits = List refresh_commits_table_needed = Event path_dirty = Event remote = Str def open_repo(self, name, root=None): """ name: name of repo root: root directory to create new repo """ if root is None: p = name else: p = os.path.join(root, name) self.path = p self.logger = None self.name = '{}<GitRepo>'.format(os.path.basename(p)) if os.path.isdir(p): self.init_repo(p) return True else: os.mkdir(p) repo = Repo.init(p) self.debug('created new repo {}'.format(p)) self._repo = repo return False def init_repo(self, path): """ path: absolute path to repo return True if git repo exists """ if os.path.isdir(path): g = os.path.join(path, '.git') if os.path.isdir(g): self._repo = Repo(path) return True else: self.debug( '{} is not a valid repo. Initializing now'.format(path)) self._repo = Repo.init(path) def add_paths(self, apaths): if not hasattr(apaths, '__iter__'): apaths = (apaths, ) changes = self.get_local_changes() if not changes: changes = self.untracked_files() else: changes = [os.path.join(self.path, c) for c in changes] ps = [p for p in apaths if p in changes] changed = bool(ps) for p in ps: self.debug('adding to index: {}'.format( os.path.relpath(p, self.path))) self.index.add(ps) return changed def add_ignore(self, *args): ignores = [] p = os.path.join(self.path, '.gitignore') if os.path.isfile(p): with open(p, 'r') as rfile: ignores = [line.strip() for line in rfile] args = [a for a in args if a not in ignores] if args: with open(p, 'a') as afile: for a in args: afile.write('{}\n'.format(a)) self.add(p, commit=False) def out_of_date(self, branchname='master'): pd = open_progress(2) repo = self._repo origin = repo.remotes.origin pd.change_message('Fetching {} {}'.format(origin, branchname)) repo.git.fetch(origin, branchname) pd.change_message('Complete') # try: # oref = origin.refs[branchname] # remote_commit = oref.commit # except IndexError: # remote_commit = None # # branch = getattr(repo.heads, branchname) # local_commit = branch.commit local_commit, remote_commit = self._get_local_remote_commit(branchname) self.debug('out of date {} {}'.format(local_commit, remote_commit)) return local_commit != remote_commit def _get_local_remote_commit(self, branchname=None): repo = self._repo origin = repo.remotes.origin try: oref = origin.refs[branchname] remote_commit = oref.commit except IndexError: remote_commit = None if branchname is None: branch = repo.head else: try: branch = repo.heads[branchname] except AttributeError: return None, None local_commit = branch.commit return local_commit, remote_commit def clone(self, url, path): self._repo = Repo.clone_from(url, path) def unpack_blob(self, hexsha, p): """ p: str. should be absolute path """ repo = self._repo tree = repo.commit(hexsha).tree # blob = next((bi for ti in tree.trees # for bi in ti.blobs # if bi.abspath == p), None) blob = None for ts in ((tree, ), tree.trees): for ti in ts: for bi in ti.blobs: # print bi.abspath, p if bi.abspath == p: blob = bi break else: print 'failed unpacking', p return blob.data_stream.read() if blob else '' def shell(self, cmd, *args): repo = self._repo func = getattr(repo.git, cmd) return func(*args) def truncate_repo(self, date='1 month'): repo = self._repo name = os.path.basename(self.path) backup = '.{}'.format(name) repo.git.clone('--mirror', ''.format(name), './{}'.format(backup)) logs = repo.git.log('--pretty=%H', '-after "{}"'.format(date)) logs = reversed(logs.split('\n')) sha = logs.next() gpath = os.path.join(self.path, '.git', 'info', 'grafts') with open(gpath, 'w') as wfile: wfile.write(sha) repo.git.filter_branch('--tag-name-filter', 'cat', '--', '--all') repo.git.gc('--prune=now') def commits_iter(self, p, keys=None, limit='-'): repo = self._repo p = os.path.join(repo.working_tree_dir, p) p = p.replace(' ', '\ ') hx = repo.git.log('--pretty=%H', '--follow', '-{}'.format(limit), '--', p).split('\n') def func(hi): commit = repo.rev_parse(hi) r = [ hi, ] if keys: r.extend([getattr(commit, ki) for ki in keys]) return r return (func(ci) for ci in hx) def diff(self, a, b): repo = self._repo return repo.git.diff( a, b, ) def report_status(self): self.debug('Local Changes to {}'.format(self.path)) for p in self.get_local_changes(): self.debug('\t{}'.format(p)) def commit_dialog(self): from pychron.git_archive.commit_dialog import CommitDialog ps = self.get_local_changes() cd = CommitDialog(ps) info = cd.edit_traits() if info.result: index = self.index index.add([mp.path for mp in cd.valid_paths()]) self.commit(cd.commit_message) return True def get_local_changes(self): repo = self._repo diff_str = repo.git.diff('HEAD', '--full-index') diff_str = StringIO(diff_str) diff_str.seek(0) diff = Diff._index_from_patch_format(repo, diff_str) root = self.path return [ os.path.relpath(di.a_blob.abspath, root) for di in diff.iter_change_type('M') ] # patches = map(str.strip, diff_str.split('diff --git')) # patches = ['\n'.join(p.split('\n')[2:]) for p in patches[1:]] # # diff_str = StringIO(diff_str) # diff_str.seek(0) # index = Diff._index_from_patch_format(repo, diff_str) # # return index, patches # def get_head(self, commit=True, hexsha=True): head = self._repo if commit: head = head.commit() if hexsha: head = head.hexsha return head # return self._repo.head.commit.hexsha def cmd(self, cmd, *args): return getattr(self._repo.git, cmd)(*args) def is_dirty(self): return self._repo.is_dirty() def untracked_files(self): lines = self._repo.git.status(porcelain=True, untracked_files=True) # Untracked files preffix in porcelain mode prefix = "?? " untracked_files = list() for line in lines.split('\n'): # print 'ffff', line if not line.startswith(prefix): continue filename = line[len(prefix):].rstrip('\n') # Special characters are escaped if filename[0] == filename[-1] == '"': filename = filename[1:-1].decode('string_escape') # print 'ffasdfsdf', filename untracked_files.append(os.path.join(self.path, filename)) # finalize_process(proc) return untracked_files def has_staged(self): return self._repo.git.diff('HEAD', '--name-only') # return self._repo.is_dirty() def has_unpushed_commits(self, remote='origin', branch='master'): # return self._repo.git.log('--not', '--remotes', '--oneline') return self._repo.git.log('{}/{}..HEAD'.format(remote, branch), '--oneline') def add_unstaged(self, root, extension=None, use_diff=False): index = self.index def func(ps, extension): if extension: if not isinstance(extension, tuple): extension = (extension, ) ps = [pp for pp in ps if os.path.splitext(pp)[1] in extension] if ps: self.debug('adding to index {}'.format(ps)) index.add(ps) if use_diff: pass # try: # ps = [diff.a_blob.path for diff in index.diff(None)] # func(ps, extension) # except IOError,e: # print 'exception', e else: for r, ds, fs in os.walk(root): ps = [os.path.join(r, fi) for fi in fs] func(ps, extension) def update_gitignore(self, *args): p = os.path.join(self.path, '.gitignore') # mode = 'a' if os.path.isfile(p) else 'w' args = list(args) if os.path.isfile(p): with open(p, 'r') as rfile: for line in fileiter(rfile, strip=True): for i, ai in enumerate(args): if line == ai: args.pop(i) if args: with open(p, 'a') as wfile: for ai in args: wfile.write('{}\n'.format(ai)) self._add_to_repo(p, msg='updated .gitignore') def get_commit(self, hexsha): repo = self._repo return repo.commit(hexsha) def tag_branch(self, tagname): repo = self._repo repo.create_tag(tagname) def get_current_branch(self): repo = self._repo return repo.active_branch.name def checkout_branch(self, name): repo = self._repo branch = getattr(repo.heads, name) branch.checkout() self.selected_branch = name self._load_branch_history() def create_branch(self, name=None, commit='HEAD'): if name is None: nb = NewBranchView() info = nb.edit_traits() if info.result: name = nb.name else: return repo = self._repo if name not in repo.branches: branch = repo.create_head(name, commit=commit) branch.checkout() self.information_dialog('Data set not on branch "{}"'.format(name)) else: self.information_dialog( 'Branch "{}" already exists. Choose a different name'.format( name)) # branch.commit = repo.head.commit # self.checkout_branch(name) def create_remote(self, url, name='origin', force=False): repo = self._repo if repo: self.debug('setting remote {} {}'.format(name, url)) # only create remote if doesnt exist if not hasattr(repo.remotes, name): self.debug('create remote {}'.format(name, url)) repo.create_remote(name, url) elif force: repo.delete_remote(name) repo.create_remote(name, url) def delete_remote(self, name='origin'): repo = self._repo if repo: if hasattr(repo.remotes, name): repo.delete_remote(name) def get_branch_names(self): return [b.name for b in self._repo.branches] @caller def pull(self, branch='master', remote='origin', handled=True, use_progress=True): """ fetch and merge """ repo = self._repo try: remote = self._get_remote(remote) except AttributeError, e: print 'repo man pull', e return if remote: if use_progress: prog = open_progress(3, show_percent=False, title='Pull Repository', close_at_end=False) prog.change_message('Fetching branch:"{}" from "{}"'.format( branch, remote)) try: self.fetch(remote) except GitCommandError, e: self.debug(e) if not handled: raise e # if use_progress: # for i in range(100): # prog.change_message('Merging {}'.format(i)) # time.sleep(1) try: repo.git.merge('FETCH_HEAD') except GitCommandError, e: self.debug(e) if not handled: raise e
def _save_to_db(self): db = self.db with db.session_ctx(): n = len(self.irradiated_positions) prog = open_progress(n) for irs in self.irradiated_positions: ln = irs.labnumber sam = irs.sample proj = irs.project mat = irs.material if proj: proj = db.add_project(proj) if mat: mat = db.add_material(mat) if sam: sam = db.add_sample(sam, project=proj, material=mat) if ln: dbln = db.get_labnumber(ln) if dbln: pos = dbln.irradiation_position if pos is None: pos = db.add_irradiation_position(irs.hole, dbln, self.irradiation, self.level) else: lev = pos.level irrad = lev.irradiation if self.irradiation != irrad.name: self.warning_dialog( 'Labnumber {} already exists in Irradiation {}'.format(ln, irrad.name)) return if irs.hole != pos.position: pos = db.add_irradiation_position(irs.hole, dbln, self.irradiation, self.level) else: dbln = db.add_labnumber(ln, sample=sam, ) pos = db.add_irradiation_position(irs.hole, dbln, self.irradiation, self.level) def add_flux(): hist = db.add_flux_history(pos) dbln.selected_flux_history = hist f = db.add_flux(irs.j, irs.j_err) f.history = hist for ai in dbln.analyses: self.remove_from_cache(ai) if dbln.selected_flux_history: tol = 1e-10 flux = dbln.selected_flux_history.flux if flux: if abs(flux.j - irs.j) > tol or abs(flux.j_err - irs.j_err) > tol: add_flux() else: add_flux() else: add_flux() else: dbpos = db.get_irradiation_position(self.irradiation, self.level, irs.hole) if not dbpos or not dbpos.labnumber: dbln = db.add_labnumber('', unique=False, sample=sam, note=irs.note) db.add_irradiation_position(irs.hole, dbln, self.irradiation, self.level) else: dbln = dbpos.labnumber if sam: dbln.sample = sam dbln.note = irs.note prog.change_message('Saving {}{}{} labnumber={}'.format(self.irradiation, self.level, irs.hole, dbln.identifier)) self.dirty = False self._level_changed(self.level)