def update_productions(self, irrad, level, production, add=True): p = os.path.join(paths.meta_root, irrad, 'productions.json') obj = dvc_load(p) obj[level] = production dvc_dump(obj, p) if add: self.add(p, commit=False)
def update_flux(self, irradiation, level, pos, identifier, j, e, decay=None, analyses=None, add=True): if decay is None: decay = {} if analyses is None: analyses = [] p = self.get_level_path(irradiation, level) jd = dvc_load(p) npos = {'position': pos, 'j': j, 'j_err': e, 'decay_constants': decay, 'identifier': identifier, 'analyses': [{'uuid': ai.uuid, 'record_id': ai.record_id, 'status': ai.is_omitted()} for ai in analyses]} if jd: added = any((ji['position'] == pos for ji in jd)) njd = [ji if ji['position'] != pos else npos for ji in jd] if not added: njd.append(npos) else: njd = [npos] dvc_dump(njd, p) if add: self.add(p, commit=False)
def _save_peak_center(self, pc): self.info('DVC saving peakcenter') p = self._make_path(modifier='peakcenter') if pc: fmt = '>ff' obj = {'reference_detector': pc.reference_detector.name, 'reference_isotope': pc.reference_isotope, 'fmt': fmt, 'interpolation': pc.interpolation_kind if pc.use_interpolation else ''} results = pc.get_results() if results: for result in results: points = encode_blob(pack(fmt, result.points)) obj[result.detector] = {'low_dac': result.low_dac, 'center_dac': result.center_dac, 'high_dac': result.high_dac, 'low_signal': result.low_signal, 'center_signal': result.center_signal, 'high_signal': result.high_signal, 'resolution': result.resolution, 'low_resolving_power': result.low_resolving_power, 'high_resolving_power': result.high_resolving_power, 'points': points} dvc_dump(obj, p)
def update_productions(self, irrad, level, production, note=None, add=True): p = os.path.join(paths.meta_root, irrad, 'productions.json') obj = dvc_load(p) obj['note'] = str(note) or '' if level in obj: if obj[level] != production: self.debug( 'setting production to irrad={}, level={}, prod={}'.format( irrad, level, production)) obj[level] = production dvc_dump(obj, p) if add: self.add(p, commit=False) else: obj[level] = production dvc_dump(obj, p) if add: self.add(p, commit=False)
def experiment_id_modifier(root, expid): for r, ds, fs in os.walk(root, topdown=True): fs = [f for f in fs if not f[0] == '.'] ds[:] = [d for d in ds if not d[0] == '.'] # print 'fff',r, os.path.basename(r) if os.path.basename(r) in ('intercepts', 'blanks', '.git', 'baselines', 'icfactors', 'extraction', 'tags', '.data', 'monitor', 'peakcenter'): continue # dcnt+=1 for fi in fs: # if not fi.endswith('.py') or fi == '__init__.py': # continue # cnt+=1 p = os.path.join(r, fi) # if os.path.basename(os.path.dirname(p)) = print(p) write = False with open(p, 'r') as rfile: jd = json.load(rfile) if 'repository_identifier' in jd: jd['repository_identifier'] = expid write = True if write: dvc_dump(jd, p)
def modify_meta(p): jd = dvc_load(p) jd['aliquot'] = aliquot jd['increment'] = alpha_to_int(step) dvc_dump(jd, p)
def repository_db_sync(self, reponame): repo = self._get_repository(reponame, as_current=False) ps = [] with self.db.session_ctx(): ans = self.db.repository_analyses(reponame) for ai in ans: p = analysis_path(ai.record_id, reponame) obj = dvc_load(p) sample = None project = None material = None changed = False for attr, v in (('sample', sample), ('project', project), ('material', material)): if obj.get(attr) != v: obj[attr] = v changed = True if changed: ps.append(p) dvc_dump(obj, p) if ps: repo.pull() repo.add_paths(ps) repo.commit('Synced repository with database {}'.format( self.db.datasource_url)) repo.push()
def set_spectrometer_file(dban, root): meas = dban.measurement gain_history = dban.gain_history gains = {} if gain_history: gains = {d.detector.name: d.value for d in gain_history.gains if d.value is not None} # deflections deflections = {d.detector.name: d.deflection for d in meas.deflections if d.deflection is not None} # source src = {k: getattr(meas.spectrometer_parameters, k) for k in QTEGRA_SOURCE_KEYS} obj = dict(spectrometer=src, gains=gains, deflections=deflections) # hexsha = self.dvc.get_meta_head() # obj['commit'] = str(hexsha) spec_sha = spectrometer_sha(src, gains, deflections) path = os.path.join(root, '{}.json'.format(spec_sha)) dvc_dump(obj, path) # update analysis's spec_sha path = analysis_path(dban.record_id, os.path.basename(root)) obj = dvc_load(path) obj['spec_sha'] = spec_sha dvc_dump(obj, path)
def _fix_id(src_id, dest_id, identifier, root, repo, new_aliquot=None): sp = analysis_path(src_id, repo, root=root) dp = analysis_path(dest_id, repo, root=root, mode='w') print(sp, dp) if not os.path.isfile(sp): print('not a file', sp) return jd = dvc_load(sp) jd['identifier'] = identifier if new_aliquot: jd['aliquot'] = new_aliquot dvc_dump(jd, dp) print('{}>>{}'.format(sp, dp)) for modifier in ('baselines', 'blanks', 'extraction', 'intercepts', 'icfactors', 'peakcenter', '.data'): sp = analysis_path(src_id, repo, modifier=modifier, root=root) dp = analysis_path(dest_id, repo, modifier=modifier, root=root, mode='w') print('{}>>{}'.format(sp, dp)) if sp and os.path.isfile(sp): # shutil.copy(sp, dp) shutil.move(sp, dp)
def dump(self, path=None): if path is None: path = self.path obj = {} for a in self.attrs: obj[a] = (getattr(self, a), getattr(self, '{}_err'.format(a))) dvc_dump(obj, path)
def _save_spectrometer_file(self, path): obj = dict(spectrometer=dict(self.per_spec.spec_dict), gains=dict(self.per_spec.gains), deflections=dict(self.per_spec.defl_dict)) # hexsha = self.dvc.get_meta_head() # obj['commit'] = str(hexsha) dvc_dump(obj, path)
def post_extraction_save(self): p = self._make_path(modifier='extraction') rblob = self.per_spec.response_blob oblob = self.per_spec.output_blob sblob = self.per_spec.setpoint_blob if rblob: rblob = base64.b64encode(rblob) if oblob: oblob = base64.b64encode(oblob) if sblob: sblob = base64.b64encode(sblob) obj = {'request': rblob, 'response': oblob, 'sblob': sblob} pid = self.per_spec.pid if pid: obj['pid'] = pid for e in EXTRACTION_ATTRS: v = getattr(self.per_spec.run_spec, e) obj[e] = v ps = [] for i, pp in enumerate(self.per_spec.positions): pos, x, y, z = None, None, None, None if isinstance(pp, tuple): if len(pp) == 2: x, y = pp elif len(pp) == 3: x, y, z = pp else: pos = pp try: ep = self.per_spec.extraction_positions[i] x = ep[0] y = ep[1] if len(ep) == 3: z = ep[2] except IndexError: self.debug('no extraction position for {}'.format(pp)) pd = {'x': x, 'y': y, 'z': z, 'position': pos, 'is_degas': self.per_spec.run_spec.identifier == 'dg'} ps.append(pd) db = self.dvc.db load_name = self.per_spec.load_name with db.session_ctx(): for p in ps: db.add_measured_position(load=load_name, **p) obj['positions'] = ps hexsha = self.dvc.get_meta_head() obj['commit'] = str(hexsha) dvc_dump(obj, p)
def dump(self): obj = {'name': self.name, 'note': self.note, 'subgroup': self.subgroup} if not self.path: self.path = analysis_path(self.uuid, self.repository_identifier, modifier='tags', mode='w') dvc_dump(obj, self.path)
def save_figure(self): self.debug('save figure') if not self.has_active_editor(): return ed = self.active_editor root = paths.figure_dir path = os.path.join(root, 'test.json') obj = self._make_save_figure_object(ed) dvc_dump(obj, path)
def save_sensitivities(self, sens): ps = [] for k, v in sens.items(): root = os.path.join(paths.meta_root, 'spectrometers') p = os.path.join(root, add_extension('{}.sens'.format(k), '.json')) dvc_dump(v, p) ps.append(p) if self.add_paths(ps): self.commit('Updated sensitivity')
def set_identifier(self, irradiation, level, pos, identifier): p = self.get_level_path(irradiation, level) jd = dvc_load(p) d = next((p for p in jd if p['position'] != pos), None) if d: d['identifier'] = identifier dvc_dump(jd, p) self.add(p, commit=False)
def dump(self): obj = {'name': self.name} if not self.path: self.path = analysis_path(self.record_id, self.repository_identifier, modifier='tags', mode='w') # with open(self.path, 'w') as wfile: # json.dump(obj, wfile, indent=4) dvc_dump(obj, self.path)
def fix_meta(dest, repo_identifier, root): d = os.path.join(root, repo_identifier) changed = False with dest.session_ctx(): repo = dest.get_repository(repo_identifier) for ra in repo.repository_associations: an = ra.analysis p = analysis_path(an.record_id, repo_identifier) obj = dvc_load(p) if not obj: print('********************** {} not found in repo'.format(an.record_id)) continue print(an.record_id, p) if not obj['irradiation']: obj['irradiation'] = an.irradiation lchanged = True changed = True if not obj['irradiation_position']: obj['irradiation_position'] = an.irradiation_position_position lchanged = True changed = True if not obj['irradiation_level']: obj['irradiation_level'] = an.irradiation_level lchanged = True changed = True if not obj['material']: obj['material'] = an.irradiation_position.sample.material.name lchanged = True changed = True if not obj['project']: obj['project'] = an.irradiation_position.sample.project.name lchanged = True changed = True if obj['repository_identifier'] != an.repository_identifier: obj['repository_identifier'] = an.repository_identifier lchanged = True changed = True if lchanged: print('{} changed'.format(an.record_id)) dvc_dump(obj, p) if changed: from pychron.git_archive.repo_manager import GitRepoManager rm = GitRepoManager() rm.open_repo(d) repo = rm._repo repo.git.add('.') repo.git.commit('-m', '<MANUAL> fixed metadata') repo.git.push()
def fix_iso_list(runid, repository, root): path = analysis_path(runid, repository, root=root) # print('asdf', path) obj = dvc_load(path) isotopes = obj['isotopes'] try: v = isotopes.pop('PHHCbs') v['name'] = 'Ar39' isotopes['Ar39'] = v obj['isotopes'] = isotopes dvc_dump(obj, path) except KeyError: return
def _save_monitor(self): if self.per_spec.monitor: p = self._make_path(modifier='monitor') checks = [] for ci in self.per_spec.monitor.checks: data = ''.join([struct.pack('>ff', x, y) for x, y in ci.data]) params = dict(name=ci.name, parameter=ci.parameter, criterion=ci.criterion, comparator=ci.comparator, tripped=ci.tripped, data=data) checks.append(params) dvc_dump(checks, p)
def _save_monitor(self): if self.per_spec.monitor: p = self._make_path(modifier='monitor') checks = [] for ci in self.per_spec.monitor.checks: data = encode_blob(pack('>ff', ci.data)) params = dict(name=ci.name, parameter=ci.parameter, criterion=ci.criterion, comparator=ci.comparator, tripped=ci.tripped, data=data) checks.append(params) dvc_dump(checks, p)
def fix_run(runid, repository, root, modifier): path = analysis_path(runid, repository, root=root, modifier=modifier) # print('asdf', path) obj = dvc_load(path) # print('ff', obj) try: v = obj.pop('PHHCbs') obj['Ar39'] = v dvc_dump(obj, path) msg = 'fixed' except KeyError: msg = 'skipped' print(runid, msg)
def update_level_z(self, irradiation, level, z): p = self.get_level_path(irradiation, level) obj = dvc_load(p) try: add = obj['z'] != z obj['z'] = z except TypeError: obj = {'z': z, 'positions': obj} add = True dvc_dump(obj, p) if add: self.add(p, commit=False)
def remove_irradiation_position(self, irradiation, level, hole): p = self.get_level_path(irradiation, level) jd = dvc_load(p) if jd: if isinstance(jd, list): positions = jd z = 0 else: positions = jd['positions'] z = jd['z'] npositions = [ji for ji in positions if not ji['position'] == hole] obj = {'z': z, 'positions': npositions} dvc_dump(obj, p) self.add(p, commit=False)
def revert_manual_edits(self, runid, repository_identifier): ps = [] for mod in ('intercepts', 'blanks', 'baselines', 'icfactors'): path = analysis_path(runid, repository_identifier, modifier=mod) with open(path, 'r') as rfile: obj = json.load(rfile) for item in obj.itervalues(): if isinstance(item, dict): item['use_manual_value'] = False item['use_manual_error'] = False ps.append(path) dvc_dump(obj, path) msg = '<MANUAL> reverted to non manually edited' self.commit_manual_edits(repository_identifier, ps, msg)
def _save_peak_center(self, pc): self.info('DVC saving peakcenter') p = self._make_path(modifier='peakcenter') obj = {} if pc: obj['reference_detector'] = pc.reference_detector.name obj['reference_isotope'] = pc.reference_isotope fmt = '>ff' obj['fmt'] = fmt results = pc.get_results() if results: for result in results: obj[result.detector] = { 'low_dac': result.low_dac, 'center_dac': result.center_dac, 'high_dac': result.high_dac, 'low_signal': result.low_signal, 'center_signal': result.center_signal, 'high_signal': result.high_signal, 'points': base64.b64encode(''.join( [struct.pack(fmt, *di) for di in result.points])) } # if pc.result: # xs, ys, _mx, _my = pc.result # obj.update({'low_dac': xs[0], # 'center_dac': xs[1], # 'high_dac': xs[2], # 'low_signal': ys[0], # 'center_signal': ys[1], # 'high_signal': ys[2]}) # # data = pc.get_data() # if data: # fmt = '>ff' # obj['fmt'] = fmt # for det, pts in data: # obj[det] = base64.b64encode(''.join([struct.pack(fmt, *di) for di in pts])) dvc_dump(obj, p)
def update_fluxes(self, irradiation, level, j, e, add=True): p = self.get_level_path(irradiation, level) jd = dvc_load(p) if isinstance(jd, list): positions = jd else: positions = jd.get('positions') if positions: for ip in positions: ip['j'] = j ip['j_err'] = e dvc_dump(jd, p) if add: self.add(p, commit=False)
def add_position(self, irradiation, level, pos, add=True): p = self.get_level_path(irradiation, level) jd = dvc_load(p) if isinstance(jd, list): positions = jd z = 0 else: positions = jd.get('positions', []) z = jd.get('z', 0) pd = next((p for p in positions if p['position'] == pos), None) if pd is None: positions.append({'position': pos, 'decay_constants': {}}) dvc_dump({'z': z, 'positions': positions}, p) if add: self.add(p, commit=False)
def copy_production(self, pr): """ @param pr: irrad_ProductionTable object @return: """ pname = pr.name.replace(' ', '_') path = os.path.join(paths.meta_root, 'productions', '{}.json'.format(pname)) if not os.path.isfile(path): obj = {} for attr in INTERFERENCE_KEYS + RATIO_KEYS: obj[attr] = [ getattr(pr, attr), getattr(pr, '{}_err'.format(attr)) ] dvc_dump(obj, path)
def func(x, prog, i, n): repo, irrad, level, d = x if prog: prog.change_message('Freezing Flux {}{} Repository={}'.format( irrad, level, repo)) root = os.path.join(paths.repository_dataset_dir, repo, 'flux', irrad) r_mkdir(root) p = os.path.join(root, level) if os.path.isfile(p): dd = dvc_load(p) dd.update(d) dvc_dump(d, p) added.append((repo, p))
def fix_a_steps(dest, repo_identifier, root): with dest.session_ctx(): repo = dest.get_repository(repo_identifier) ans = [(ra.analysis.irradiation_position.identifier, ra.analysis.aliquot, ra.analysis.increment, ra.analysis.record_id, ra.analysis.id) for ra in repo.repository_associations] key = lambda x: x[0] ans = sorted(ans, key=key) for identifier, ais in groupby(ans, key=key): try: int(identifier) except ValueError: continue # groupby aliquot key = lambda xi: xi[1] for aliquot, ais in groupby(ais, key=key): ais = sorted(ais, key=lambda ai: ai[2]) print(identifier, aliquot, ais) # if the first increment for a given aliquot is 1 # and the increment for the first analysis of the aliquot is None if len(ais) == 1: continue if ais[0][2] is None and ais[1][2] == 1: an = dest.get_analysis(ais[0][4]) print('fix', ais[0], an, an.record_id) original_record_id = str(an.record_id) path = analysis_path(an.record_id, repo_identifier) obj = dvc_load(path) obj['increment'] = 0 an.increment = 0 npath = analysis_path(an.record_id, repo_identifier) dvc_dump(obj, npath) os.remove(path) for modifier in ('baselines', 'blanks', 'extraction', 'intercepts', 'icfactors', 'peakcenter', '.data'): npath = analysis_path(an.record_id, repo_identifier, modifier=modifier) opath = analysis_path(original_record_id, repo_identifier, modifier=modifier) # print opath, npath os.rename(opath, npath)
def update_flux(self, irradiation, level, pos, identifier, j, e, mj, me, decay=None, position_jerr=None, analyses=None, options=None, add=True): if options is None: options = {} if decay is None: decay = {} if analyses is None: analyses = [] p = self.get_level_path(irradiation, level) jd = dvc_load(p) if isinstance(jd, list): positions = jd z = 0 else: positions = jd.get('positions', []) z = jd.get('z', 0) npos = {'position': pos, 'j': j, 'j_err': e, 'mean_j': mj, 'mean_j_err': me, 'position_jerr': position_jerr, 'decay_constants': decay, 'identifier': identifier, 'options': options, 'analyses': [{'uuid': ai.uuid, 'record_id': ai.record_id, 'status': ai.is_omitted()} for ai in analyses]} if positions: added = any((ji['position'] == pos for ji in positions)) npositions = [ji if ji['position'] != pos else npos for ji in positions] if not added: npositions.append(npos) else: npositions = [npos] obj = {'z': z, 'positions': npositions} dvc_dump(obj, p) if add: self.add(p, commit=False)
def update_productions(self, irrad, level, production, note=None, add=True): p = os.path.join(paths.meta_root, irrad, 'productions.json') obj = dvc_load(p) obj['note'] = str(note) or '' if level in obj: if obj[level] != production: self.debug('setting production to irrad={}, level={}, prod={}'.format(irrad, level, production)) obj[level] = production dvc_dump(obj, p) if add: self.add(p, commit=False) else: obj[level] = production dvc_dump(obj, p) if add: self.add(p, commit=False)
def _make_flux_file(self, repo, irrad, unks): path = os.path.join(paths.repository_dataset_dir, repo, '{}.json'.format(irrad)) # read in existing flux file obj = {} if os.path.isfile(path): obj = dvc_load(path) added = [] for unk in unks: identifier = unk.identifier if identifier not in added: f = {'j': self.recaller.get_flux(identifier)} obj[identifier] = f added.append(identifier) dvc_dump(obj, path) self._paths.append(path)
def manual_edit(self, runid, repository_identifier, values, errors, modifier): self.debug('manual edit {} {} {}'.format(runid, repository_identifier, modifier)) self.debug('values {}'.format(values)) self.debug('errors {}'.format(errors)) path = analysis_path(runid, repository_identifier, modifier=modifier) with open(path, 'r') as rfile: obj = json.load(rfile) for k, v in values.iteritems(): o = obj[k] o['manual_value'] = v o['use_manual_value'] = True for k, v in errors.iteritems(): o = obj[k] o['manual_error'] = v o['use_manual_error'] = True dvc_dump(obj, path) return path
def add_position(self, irradiation, level, pos, add=True): p = self.get_level_path(irradiation, level) jd = dvc_load(p) pd = next((p for p in jd if p['position'] == pos), None) if pd is None: jd.append({'position': pos, 'decay_constants': {}}) # for pd in jd: # if pd['position'] == pos: # njd = [ji if ji['position'] != pos else {'position': pos, 'j': j, 'j_err': e, # 'decay_constants': decay, # 'identifier': identifier, # 'analyses': [{'uuid': ai.uuid, # 'record_id': ai.record_id, # 'status': ai.is_omitted()} # for ai in analyses]} for ji in jd] dvc_dump(jd, p) if add: self.add(p, commit=False)
def _save_peak_center(self, pc): self.info('DVC saving peakcenter') p = self._make_path(modifier='peakcenter') obj = {} if pc: obj['reference_detector'] = pc.reference_detector.name obj['reference_isotope'] = pc.reference_isotope fmt = '>ff' obj['fmt'] = fmt results = pc.get_results() if results: for result in results: obj[result.detector] = {'low_dac': result.low_dac, 'center_dac': result.center_dac, 'high_dac': result.high_dac, 'low_signal': result.low_signal, 'center_signal': result.center_signal, 'high_signal': result.high_signal, 'points': base64.b64encode(''.join([struct.pack(fmt, *di) for di in result.points]))} # if pc.result: # xs, ys, _mx, _my = pc.result # obj.update({'low_dac': xs[0], # 'center_dac': xs[1], # 'high_dac': xs[2], # 'low_signal': ys[0], # 'center_signal': ys[1], # 'high_signal': ys[2]}) # # data = pc.get_data() # if data: # fmt = '>ff' # obj['fmt'] = fmt # for det, pts in data: # obj[det] = base64.b64encode(''.join([struct.pack(fmt, *di) for di in pts])) dvc_dump(obj, p)
def dump_equilibration(self, keys, reviewed=False): path = self._analysis_path(modifier='.data') jd = dvc_load(path) endianness = jd['format'][0] nsignals = [] nsniffs = [] for (new, existing) in ((nsignals, 'signals'), (nsniffs, 'sniffs')): for sig in jd[existing]: key = sig['isotope'] if key in keys: iso = self.get_isotope(key) if existing == 'sniffs': iso = iso.sniff sblob = encode_blob(iso.pack(endianness, as_hex=False)) new.append({'isotope': iso.name, 'blob': sblob, 'detector': iso.detector}) else: new.append(sig) for k in keys: # check to make sure signals/sniffs fully populated for new, issniff in ((nsignals, False), (nsniffs, True)): if not next((n for n in new if n['isotope'] == k), None): iso = self.get_isotope(key) if issniff: iso = iso.sniff sblob = encode_blob(iso.pack(endianness, as_hex=False)) new.append({'isotope': iso.name, 'blob': sblob, 'detector': iso.detector}) jd['reviewed'] = reviewed jd['signals'] = nsignals jd['sniffs'] = nsniffs dvc_dump(jd, path) return path
def _dump(self, obj, path=None, modifier=None): if path is None: path = self._analysis_path(modifier) dvc_dump(obj, path)
def dump_meta(self, meta): dvc_dump(meta, self.meta_path)
def save_gains(self, ms, gains_dict): p = self._gain_path(ms) dvc_dump(gains_dict, p) if self.add_paths(p): self.commit('Updated gains')
def add_level(self, irrad, level, add=True): p = self.get_level_path(irrad, level) dvc_dump([], p) if add: self.add(p, commit=False)
def update_level_z(self, irradiation, level, z): p = self.get_level_path(irradiation, level) obj = dvc_load(p) obj['z'] = z dvc_dump(obj, p)