def run(self, state): if not self.unknowns and not state.unknowns: if not self.configure(): state.canceled = True return review_req = [] unks = self.unknowns for ai in unks: ai.group_id = 0 if self.check_reviewed: for attr in ('blanks', 'iso_evo'): # check analyses to see if they have been reviewed if attr not in review_req: if not self.dvc.analysis_has_review(ai, attr): review_req.append(attr) if review_req: information( None, 'The current data set has been ' 'analyzed and requires {}'.format(','.join(review_req))) # add our analyses to the state items = getattr(state, self.analysis_kind) items.extend(self.unknowns) state.projects = {ai.project for ai in state.unknowns}
def configure(self, pre_run=False, **kw): if not pre_run: self._manual_configured = True if not self.path or not os.path.isfile(self.path): msg = '''CSV File Format Create/select a file with a column header as the first line. The following columns are required: runid, age, age_err Optional columns are: group, aliquot e.x. runid, age, age_error SampleA, 10, 0.24 SampleB, 11, 0.32 SampleC, 10, 0.40''' information(None, msg) dlg = FileDialog() if dlg.open() == OK: self.path = dlg.path return bool(self.path)
def _do_diff_fired(self): if self.selected_commits: lhs = self.selected_lhs rhs = self.selected_rhs lhsid = lhs.hexsha[:8] lhsdate = isoformat_date(lhs.date) rhsid = rhs.hexsha[:8] rhsdate = rhs.date.isoformat() diffs = [] for a in ('blanks', 'icfactors', 'intercepts'): p = analysis_path((self.uuid, self.record_id), self.repository_identifier, modifier=a) dd = get_diff(self.repo, lhs.hexsha, rhs.hexsha, p) if dd: diffs.append((a, dd)) if diffs: v = DiffView(self.record_id, lhsid, rhsid, lhsdate, rhsdate) for a, (aa, bb) in diffs: func = getattr(v, 'set_{}'.format(a)) a = aa.data_stream.read().decode('utf-8') b = bb.data_stream.read().decode('utf-8') func(json.loads(a), json.loads(b)) v.finish() open_view(v) else: information( None, 'No Differences between {} and {}'.format(lhsid, rhsid))
def perform(self, event): name = 'CurrentExperiment.txt' path = os.path.join(paths.experiment_dir, name) if not os.path.isfile(path): information(None, 'No experiment called {}'.format(name)) open_experiment(event, path)
def _do_diff_fired(self): if self.selected_commits: lhs = self.selected_lhs rhs = self.selected_rhs lhsid = lhs.hexsha[:8] lhsdate = isoformat_date(lhs.date) rhsid = rhs.hexsha[:8] rhsdate = rhs.date.isoformat() diffs = [] for a in ('blanks', 'icfactors', 'intercepts'): p = analysis_path((self.uuid, self.record_id), self.repository_identifier, modifier=a) dd = get_diff(self.repo, lhs.hexsha, rhs.hexsha, p) if dd: diffs.append((a, dd)) if diffs: v = DiffView(self.record_id, lhsid, rhsid, lhsdate, rhsdate) for a, (aa, bb) in diffs: func = getattr(v, 'set_{}'.format(a)) a = aa.data_stream.read().decode('utf-8') b = bb.data_stream.read().decode('utf-8') func(json.loads(a), json.loads(b)) v.finish() open_view(v) else: information(None, 'No Differences between {} and {}'.format(lhsid, rhsid))
def _open_via_finder_button_fired(self): msg = '''CSV File Format # Create/select a file with a column header as the first line. # The following columns are required: # # runid, age, age_err # # Optional columns are: # # group, aliquot, sample # # e.x. # runid, age, age_error # Run1, 10, 0.24 # Run2, 11, 0.32 # Run3, 10, 0.40''' information(None, msg) dlg = FileDialog(default_directory=paths.csv_data_dir, action='open') path = '' if dlg.open() == OK: if dlg.path: path = dlg.path self.in_path = path
def run(self, state): if not self.unknowns and not state.unknowns: if not self.configure(): state.canceled = True return review_req = [] unks = self.unknowns for ai in unks: ai.group_id = 0 if self.check_reviewed: for attr in ('blanks', 'iso_evo'): # check analyses to see if they have been reviewed if attr not in review_req: if not self.dvc.analysis_has_review(ai, attr): review_req.append(attr) if review_req: information(None, 'The current data set has been ' 'analyzed and requires {}'.format(','.join(review_req))) # add our analyses to the state items = getattr(state, self.analysis_kind) items.extend(self.unknowns) state.projects = {ai.project for ai in state.unknowns}
def close(self, info, is_ok): print('asdf', info.object, info.object.commit_message) if is_ok and not info.object.commit_message: information(None, 'Please enter a commit message') return False return True
def perform(self, event): from pychron.paths import paths p = paths.hidden_path('analysis_sets') if os.path.isfile(p): if confirm(None, 'Are you sure you want to clear the Analysis Sets?') == YES: os.remove(p) else: information(None, 'No Analysis Sets to remove')
def _apply_history_button_fired(self): if self.spectrometer: self.spectrometer.set_gains(gains=self.selected.gains, history=self.selected) db = self.db hist = db.get_gain_history(self.selected.hashkey) hist.applied_date = datetime.now() db.commit() information(None, 'Gains update to {}'.format(self.selected.create_date))
def perform(self, event): """ goto issues page add an request or report bug """ app = event.task.window.application name = app.preferences.get('pychron.general.remote') if not name: information(event.task.window.control, 'Please set an "Laboratory Repo" in General Preferences') return url = 'https://github.com/{}/issues/new'.format(name) self._open_url(url)
def edit_key_bindings(): from pychron.core.ui.qt.keybinding_editor import KeyBindingsEditor ks = [mKeyBinding(id=k, binding=v[0], description=v[1]) for k, v in user_key_map.items()] kb = mKeyBindings(bindings=ks) # kb.handle() ed = KeyBindingsEditor(model=kb) info = ed.edit_traits() if info.result: dump_key_bindings(kb.dump()) information(None, 'Changes take effect on Restart')
def perform(self, event): information(None, 'Upload to Geochron is not fully implemented') app = event.task.application geochron_service = app.get_service('pychron.geochron.geochron_service.GeochronService') dvc = app.get_service(DVC_PROTOCOL) with dvc.session_ctx(): ai = dvc.get_analysis('c038c72a-cf21-49f9-a5b5-1e43bb4a6b93') ans = dvc.make_analyses(ai) ag = AnalysisGroup() ag.analyses = ans print(geochron_service.assemble_xml(ag))
def _apply_button_fired(self): gains = self.spectrometer.set_gains() if gains: db = self.db hashkey = db.make_gains_hash(gains) hist = db.get_gain_history(hashkey) if not hist: hist = db.add_gain_history(hashkey, save_type='manual') for d, v in gains.items(): db.add_gain(d, v, hist) hist.applied_date = datetime.now() db.commit() gainstr = '\n'.join(['{} {}'.format(*g) for g in gains.items()]) information(None, 'Gains set\n\n{}'.format(gainstr))
def perform(self, event): information(None, 'Upload to Geochron is not fully implemented') app = event.task.application geochron_service = app.get_service( 'pychron.geochron.geochron_service.GeochronService') dvc = app.get_service(DVC_PROTOCOL) with dvc.session_ctx(): ai = dvc.get_analysis('c038c72a-cf21-49f9-a5b5-1e43bb4a6b93') ans = dvc.make_analyses(ai) ag = AnalysisGroup() ag.analyses = ans print(geochron_service.assemble_xml(ag))
def _click_to_show_instructions_fired(self): m = information(None, "\nTo remove spikes from the data:\n\n" " 1. Click \"Show derivative histogram\" to " "determine at what magnitude the spikes are present.\n" " 2. Enter a suitable threshold (lower than the " "lowest magnitude outlier in the histogram) in the " "\"Threshold\" box, which will be the magnitude " "from which to search. \n" " 3. Click \"Find next\" to find the first spike.\n" " 4. If desired, the width and position of the " "boundaries used to replace the spike can be " "adjusted by clicking and dragging on the displayed " "plot.\n " " 5. View the spike (and the replacement data that " "will be added) and click \"Remove spike\" in order " "to alter the data as shown. The tool will " "automatically find the next spike to replace.\n" " 6. Repeat this process for each spike throughout " "the dataset, until the end of the dataset is " "reached.\n" " 7. Click \"OK\" when finished to close the spikes " "removal tool.\n\n" "Note: Various settings can be configured in " "the \"Advanced settings\" section. Hover the " "mouse over each parameter for a description of what " "it does." "\n", title="Instructions"),
def set_interpreted_age(dvc, ias): repos = dvc.get_local_repositories() liths, groups, classes, types = get_lithology_values() for ia in ias: ia.lithology_classes = classes ia.lithology_groups = groups ia.lithology_types = types ia.lithologies = liths model = InterpretedAgeFactoryModel(items=ias, selected=ias[:1], dvc=dvc) iaf = InterpretedAgeFactoryView(model=model, repository_identifiers=repos) while 1: info = iaf.edit_traits() if info.result: no_lat_lon = [] ias = [ia for ia in ias if ia.use] for ia in ias: if not ia.latitude and not ia.longitude: no_lat_lon.append('{} ({})'.format(ia.name, ia.identifier)) if no_lat_lon: n = ','.join(no_lat_lon) if not confirm( None, 'No Lat/Lon. entered for {}. Are you sure you want to continue without setting a ' 'Lat/Lon?'.format(n)) == YES: continue ris = [] for rid, iass in groupby_key(ias, key='repository_identifier'): if dvc.add_interpreted_ages(rid, iass): ris.append(rid) if ris: if confirm( None, 'Would you like to share changes to {}?'.format( ','.join(ris))) == YES: for rid in ris: dvc.push_repository(rid) information(None, 'Sharing changes complete') break else: break
def initialize_version(appname, debug): root = os.path.dirname(__file__) if not debug: add_eggs(root) else: build_sys_path() # can now use pychron. from pychron.envisage.user_login import get_user user = get_user() if not user: logger.info('user login failed') return if appname.startswith('py'): appname = appname[2:] from pychron.paths import paths pref_path = os.path.join(paths.base, '.enthought', 'pychron.{}.application.{}'.format(appname, user), 'preferences.ini') from ConfigParser import ConfigParser cp = ConfigParser() cp.read(pref_path) proot = None try: proot = cp.get('pychron.general', 'root_dir') except BaseException, e: print 'root_dir exception={}'.format(e) from pyface.directory_dialog import DirectoryDialog information( None, 'Pychron root directory not set in Preferences/General. Please select a valid directory' ) dlg = DirectoryDialog(action='open', default_directory=os.path.expanduser('~')) result = dlg.open() if result == OK: proot = str(dlg.path)
def get_user(current=None): """ current: str, current user. if supplied omit from available list """ if os.path.isfile(login_file): with open(login_file, 'r') as rfile: u = rfile.read() os.remove(login_file) return u users, last_login, isfile = load_user_file() use_login, multi_user = get_last_login(last_login) if use_login: # check to see if the login file is set # read the existing user file if not isfile and multi_user: information(None, 'Auto login as root. Quit to populate the user list') dump_user_file(['root'], 'root') return 'root' if current: users = [u for u in users if u != current] login = Login(users=users) if users: login.user = last_login if last_login in users else users[0] while 1: info = login.edit_traits() if info.result: if login.user: # add the manually entered user name to the users file if not current and not multi_user: if login.user not in users: users.append(login.user) dump_user_file(users, login.user) return login.user else: break else: return 'root'
def get_user(current=None): """ current: str, current user. if supplied omit from available list """ if os.path.isfile(paths.login_file): with open(paths.login_file, 'r') as fp: u = fp.read() os.remove(paths.login_file) return u users, last_login, isfile = load_user_file() use_login, multi_user = get_last_login(last_login) # print last_login, use_login if use_login: #check to see if the login file is set #read the existing user file if not isfile and multi_user: information(None, 'Auto login as root. Quit to populate the user list') dump_user_file(['root'], 'root') return 'root' if current: users = [u for u in users if u != current] login = Login(users=users) if users: login.user = last_login if last_login in users else users[0] while 1: info = login.edit_traits() if info.result: if login.user: #add the manually entered user name to the users file if not current and not multi_user: if login.user not in users: users.append(login.user) dump_user_file(users, login.user) return login.user else: break else: return 'root'
def perform(self, event): import os from pychron.globals import globalv from pychron.paths import paths name = self.fname if globalv.debug: p = os.path.join(paths.data_dir, '{}_template.txt'.format(name.lower())) else: p = None dlg = FileDialog(action='save as', default_directory=paths.data_dir) if dlg.open(): p = dlg.path if p is not None: with open(p, 'w') as wfile: wfile.write(self.template) information(event.task.window.control, 'An example {} template was written to {}\n\n' 'Replace the placeholder data in the file with your data\n' 'Make sure to use "TABS" to deliminate columns'.format(name, p))
def initialize_version(appname, debug): root = os.path.dirname(__file__) if not debug: add_eggs(root) else: build_sys_path() # can now use pychron. from pychron.envisage.user_login import get_user user = get_user() if not user: logger.info("user login failed") return if appname.startswith("py"): appname = appname[2:] from pychron.paths import paths pref_path = os.path.join( paths.base, ".enthought", "pychron.{}.application.{}".format(appname, user), "preferences.ini" ) from ConfigParser import ConfigParser cp = ConfigParser() cp.read(pref_path) proot = None try: proot = cp.get("pychron.general", "root_dir") except BaseException, e: print "root_dir exception={}".format(e) from pyface.directory_dialog import DirectoryDialog information(None, "Pychron root directory not set in Preferences/General. Please select a valid directory") dlg = DirectoryDialog(action="open", default_directory=os.path.expanduser("~")) result = dlg.open() if result == OK: proot = str(dlg.path)
def perform(self, event): from git import Repo from git.exc import InvalidGitRepositoryError from pychron.paths import paths remote = 'origin' branch = 'master' repos = [] for d in os.listdir(paths.repository_dataset_dir): if d.startswith('.') or d.startswith('~'): continue try: r = Repo(repository_path(d)) except InvalidGitRepositoryError: continue repos.append(r) n = len(repos) pd = myProgressDialog(max=n - 1, can_cancel=True, can_ok=False) pd.open() shared = False for r in repos: pd.change_message('Fetch {}'.format(os.path.basename( r.working_dir))) c = r.git.log('{}/{}..HEAD'.format(remote, branch), '--oneline') if c: r.git.pull() d = os.path.basename(r.working_dir) if confirm(None, 'Share changes made to {}.\n\n{}'.format(d, c)) == YES: r.git.push(remote, branch) shared = True msg = 'Changes successfully shared' if shared else 'No changes to share' information(None, msg)
def perform(self, event): from git import Repo from git.exc import InvalidGitRepositoryError from pychron.paths import paths remote = 'origin' branch = 'master' repos = [] for d in os.listdir(paths.repository_dataset_dir): if d.startswith('.') or d.startswith('~'): continue try: r = Repo(repository_path(d)) except InvalidGitRepositoryError: continue repos.append(r) n = len(repos) pd = myProgressDialog(max=n - 1, can_cancel=True, can_ok=False) pd.open() shared = False for r in repos: pd.change_message('Fetch {}'.format(os.path.basename(r.working_dir))) c = r.git.log('{}/{}..HEAD'.format(remote, branch), '--oneline') if c: r.git.pull() d = os.path.basename(r.working_dir) if confirm(None, 'Share changes made to {}.\n\n{}'.format(d, c)) == YES: r.git.push(remote, branch) shared = True msg = 'Changes successfully shared' if shared else 'No changes to share' information(None, msg)
def perform(self, event): app = event.task.window.application name = app.preferences.get('pychron.general.remote') if not name: information( event.task.window.control, 'Please set an "Laboratory Repo" in General Preferences') return from pychron.envisage.settings_repo import SettingsRepoManager from pychron.paths import paths root = os.path.join(paths.root_dir, '.lab') exists = os.path.isdir(os.path.join(root, '.git')) if exists: repo = SettingsRepoManager() repo.path = root repo.open_repo(root) repo.pull() else: url = 'https://github.com/{}'.format(name) repo = SettingsRepoManager.clone_from(url, root) self._perform(repo)
def perform(self, event): from pychron.dvc.work_offline import switch_to_offline_database app = event.task.window.application switch_to_offline_database(app.preferences) information(None, 'You are now using the offline database. Close any Browser or Pipeline windows to activate ' 'offline database')
def _click_to_show_instructions_fired(self): from pyface.message_dialog import information m = information(None, SPIKES_REMOVAL_INSTRUCTIONS, title="Instructions"),
def _do_diff_fired(self): if self.selected_commits: # n = len(self.selected_commits) # lhs = self.selected_lhs # if n == 1: # rhsid = 'HEAD' # obj = self.repo.head.commit # rhsdate = isoformat_date(obj.committed_date) # if lhs.tag == 'IMPORT': # diffs = [] # for a in ('blanks', 'icfactors', 'tags', 'intercepts'): # p = analysis_path(self.record_id, self.repository_identifier, modifier=a) # dd = get_diff(self.repo, lhs.hexsha, 'HEAD', p) # if dd: # diffs.append((a, dd)) # if diffs: # v = ImportDiffView(self.record_id, lhsid, rhsid, lhsdate, rhsdate) # for a, (aa, bb) in diffs: # func = getattr(v, 'set_{}'.format(a)) # func(json.load(aa.data_stream), # json.load(bb.data_stream)) # v.finish() # open_view(v) # # return # else: # d = get_diff(self.repo, lhs.hexsha, 'HEAD', lhs.path) # # elif n == 2: # lhs = self.selected_lhs # rhs = self.selected_rhs # else: # warning(None, 'Can only diff max of 2') # return lhs = self.selected_lhs rhs = self.selected_rhs lhsid = lhs.hexsha[:8] lhsdate = isoformat_date(lhs.date) rhsid = rhs.hexsha[:8] rhsdate = rhs.date.isoformat() diffs = [] for a in ('blanks', 'icfactors', 'tags', 'intercepts'): p = analysis_path(self.record_id, self.repository_identifier, modifier=a) dd = get_diff(self.repo, lhs.hexsha, rhs.hexsha, p) if dd: diffs.append((a, dd)) if diffs: v = DiffView(self.record_id, lhsid, rhsid, lhsdate, rhsdate) for a, (aa, bb) in diffs: func = getattr(v, 'set_{}'.format(a)) func(json.load(aa.data_stream), json.load(bb.data_stream)) v.finish() open_view(v) else: information(None, 'No Differences between {} and {}'.format(lhsid, rhsid))
def show_evolutions_factory(record_id, isotopes, show_evo=True, show_equilibration=False, show_baseline=False): if WINDOW_CNT > 20: information( None, 'You have too many Isotope Evolution windows open. Close some before proceeding' ) return from pychron.graph.stacked_regression_graph import StackedRegressionGraph if not show_evo: xmi = Inf xma = -Inf else: xmi, xma = 0, -Inf g = StackedRegressionGraph(resizable=True) g.plotcontainer.spacing = 10 g.window_height = min(275 * len(isotopes), 800) g.window_x = OX + XOFFSET * WINDOW_CNT g.window_y = OY + YOFFSET * WINDOW_CNT isotopes = sort_isotopes(isotopes, reverse=False, key=lambda x: x.name) for i, iso in enumerate(isotopes): ymi, yma = Inf, -Inf p = g.new_plot(padding=[80, 10, 10, 40]) g.add_limit_tool(p, 'x') g.add_limit_tool(p, 'y') g.add_axis_tool(p, p.x_axis) g.add_axis_tool(p, p.y_axis) p.y_axis.title_spacing = 50 if show_equilibration: sniff = iso.sniff g.new_series(sniff.xs, sniff.ys, type='scatter', fit=None, color='red') ymi, yma = min_max(ymi, yma, sniff.ys) xmi, xma = min_max(xmi, xma, sniff.xs) if show_evo: g.new_series(iso.xs, iso.ys, fit=iso.fit, filter_outliers_dict=iso.filter_outliers_dict, color='black') ymi, yma = min_max(ymi, yma, iso.ys) xmi, xma = min_max(xmi, xma, iso.xs) if show_baseline: baseline = iso.baseline g.new_series(baseline.xs, baseline.ys, type='scatter', fit=baseline.fit, filter_outliers_dict=baseline.filter_outliers_dict, color='blue') ymi, yma = min_max(ymi, yma, baseline.ys) xmi, xma = min_max(xmi, xma, baseline.xs) g.set_x_limits(min_=xmi, max_=xma, pad='0.025,0.05') g.set_y_limits(min_=ymi, max_=yma, pad='0.05', plotid=i) g.set_x_title('Time (s)', plotid=i) g.set_y_title('{} (fA)'.format(iso.name), plotid=i) g.refresh() g.window_title = '{} {}'.format( record_id, ','.join([i.name for i in reversed(isotopes)])) return g
def information_dialog(self, msg, title=None): if title is None: information(None, msg) else: information(None, msg, title=title)
def show_evolutions_factory(record_id, isotopes, show_evo=True, show_equilibration=False, show_baseline=False): if WINDOW_CNT > 20: information(None, 'You have too many Isotope Evolution windows open. Close some before proceeding') return from pychron.graph.stacked_regression_graph import StackedRegressionGraph if not show_evo: xmi = Inf xma = -Inf else: xmi, xma = 0, -Inf g = StackedRegressionGraph(resizable=True) g.plotcontainer.spacing = 10 g.window_height = min(275 * len(isotopes), 800) g.window_x = OX + XOFFSET * WINDOW_CNT g.window_y = OY + YOFFSET * WINDOW_CNT isotopes = sort_isotopes(isotopes, reverse=False, key=lambda x: x.name) for i, iso in enumerate(isotopes): ymi, yma = Inf, -Inf p = g.new_plot(padding=[80, 10, 10, 40]) g.add_limit_tool(p, 'x') g.add_limit_tool(p, 'y') g.add_axis_tool(p, p.x_axis) g.add_axis_tool(p, p.y_axis) p.y_axis.title_spacing = 50 if show_equilibration: sniff = iso.sniff g.new_series(sniff.xs, sniff.ys, type='scatter', fit=None, color='red') ymi, yma = min_max(ymi, yma, sniff.ys) xmi, xma = min_max(xmi, xma, sniff.xs) if show_evo: g.new_series(iso.xs, iso.ys, fit=iso.fit, filter_outliers_dict=iso.filter_outliers_dict, color='black') ymi, yma = min_max(ymi, yma, iso.ys) xmi, xma = min_max(xmi, xma, iso.xs) if show_baseline: baseline = iso.baseline g.new_series(baseline.xs, baseline.ys, type='scatter', fit=baseline.fit, filter_outliers_dict=baseline.filter_outliers_dict, color='blue') ymi, yma = min_max(ymi, yma, baseline.ys) xmi, xma = min_max(xmi, xma, baseline.xs) g.set_x_limits(min_=xmi, max_=xma, pad='0.025,0.05') g.set_y_limits(min_=ymi, max_=yma, pad='0.05', plotid=i) g.set_x_title('Time (s)', plotid=i) g.set_y_title('{} (fA)'.format(iso.name), plotid=i) g.refresh() g.window_title = '{} {}'.format(record_id, ','.join([i.name for i in reversed(isotopes)])) return g
def _do_diff_fired(self): if self.selected_commits: # n = len(self.selected_commits) # lhs = self.selected_lhs # if n == 1: # rhsid = 'HEAD' # obj = self.repo.head.commit # rhsdate = isoformat_date(obj.committed_date) # if lhs.tag == 'IMPORT': # diffs = [] # for a in ('blanks', 'icfactors', 'tags', 'intercepts'): # p = analysis_path(self.record_id, self.repository_identifier, modifier=a) # dd = get_diff(self.repo, lhs.hexsha, 'HEAD', p) # if dd: # diffs.append((a, dd)) # if diffs: # v = ImportDiffView(self.record_id, lhsid, rhsid, lhsdate, rhsdate) # for a, (aa, bb) in diffs: # func = getattr(v, 'set_{}'.format(a)) # func(json.load(aa.data_stream), # json.load(bb.data_stream)) # v.finish() # open_view(v) # # return # else: # d = get_diff(self.repo, lhs.hexsha, 'HEAD', lhs.path) # # elif n == 2: # lhs = self.selected_lhs # rhs = self.selected_rhs # else: # warning(None, 'Can only diff max of 2') # return lhs = self.selected_lhs rhs = self.selected_rhs lhsid = lhs.hexsha[:8] lhsdate = isoformat_date(lhs.date) rhsid = rhs.hexsha[:8] rhsdate = rhs.date.isoformat() diffs = [] for a in ('blanks', 'icfactors', 'tags', 'intercepts'): p = analysis_path(self.record_id, self.repository_identifier, modifier=a) dd = get_diff(self.repo, lhs.hexsha, rhs.hexsha, p) if dd: diffs.append((a, dd)) if diffs: v = DiffView(self.record_id, lhsid, rhsid, lhsdate, rhsdate) for a, (aa, bb) in diffs: func = getattr(v, 'set_{}'.format(a)) func(json.load(aa.data_stream), json.load(bb.data_stream)) v.finish() open_view(v) else: information( None, 'No Differences between {} and {}'.format(lhsid, rhsid))
def show_evolutions_factory(record_id, isotopes, show_evo=True, show_equilibration=False, show_baseline=False, show_statistics=False, ncols=1, scale_to_equilibration=False): if WINDOW_CNT > 20: information( None, 'You have too many Isotope Evolution windows open. Close some before proceeding' ) return if not show_evo: xmi = Inf xma = -Inf else: xmi, xma = 0, -Inf if ncols > 1: isotopes = sort_isotopes(isotopes, reverse=True, key=attrgetter('name')) def reorder(l, n): l = [l[i:i + n] for i in range(0, len(l), n)] nl = [] for ri in range(len(l[0])): for col in l: try: nl.append(col[ri]) except IndexError: pass return nl nrows = ceil(len(isotopes) / ncols) isotopes = reorder(isotopes, nrows) g = ColumnStackedRegressionGraph(resizable=True, ncols=ncols, nrows=nrows, container_dict={ 'padding_top': 15 * nrows, 'spacing': (0, 15), 'padding_bottom': 40 }) resizable = 'hv' else: resizable = 'h' isotopes = sort_isotopes(isotopes, reverse=False, key=attrgetter('name')) g = StackedRegressionGraph(resizable=True, container_dict={'spacing': 15}) # g.plotcontainer.spacing = 10 g.window_height = min(275 * len(isotopes), 800) g.window_x = OX + XOFFSET * WINDOW_CNT g.window_y = OY + YOFFSET * WINDOW_CNT for i, iso in enumerate(isotopes): ymi, yma = Inf, -Inf p = g.new_plot(padding=[80, 10, 10, 40], resizable=resizable) g.add_limit_tool(p, 'x') g.add_limit_tool(p, 'y') g.add_axis_tool(p, p.x_axis) g.add_axis_tool(p, p.y_axis) if show_statistics: g.add_statistics(i) p.y_axis.title_spacing = 50 if show_equilibration: sniff = iso.sniff if sniff.xs.shape[0]: g.new_series(sniff.offset_xs, sniff.ys, type='scatter', fit=None, color='red') ymi, yma = min_max(ymi, yma, sniff.ys) xmi, xma = min_max(xmi, xma, sniff.offset_xs) if show_evo: if iso.fit is None: iso.fit = 'linear' g.new_series(iso.offset_xs, iso.ys, fit=iso.efit, truncate=iso.truncate, filter_outliers_dict=iso.filter_outliers_dict, color='black') g.set_regressor(iso.regressor, i) xmi, xma = min_max(xmi, xma, iso.offset_xs) if not scale_to_equilibration: ymi, yma = min_max(ymi, yma, iso.ys) if show_baseline: baseline = iso.baseline g.new_series(baseline.offset_xs, baseline.ys, type='scatter', fit=baseline.efit, filter_outliers_dict=baseline.filter_outliers_dict, color='blue') xmi, xma = min_max(xmi, xma, baseline.offset_xs) if not scale_to_equilibration: ymi, yma = min_max(ymi, yma, baseline.ys) xpad = '0.025,0.05' ypad = '0.05' if scale_to_equilibration: ypad = None r = (yma - ymi) * 0.02 # ymi = yma - r fit = iso.fit if fit != 'average': fit, _ = convert_fit(iso.fit) fy = polyval(polyfit(iso.offset_xs, iso.ys, fit), 0) if ymi > fy: ymi = fy - r fy = polyval(polyfit(iso.offset_xs, iso.ys, fit), xma) if fy > yma: yma = fy elif fy < ymi: ymi = fy - r # yma += r g.set_x_limits(min_=xmi, max_=xma, pad=xpad) g.set_y_limits(min_=ymi, max_=yma, pad=ypad, plotid=i) g.set_x_title('Time (s)', plotid=i) g.set_y_title('{} ({})'.format(iso.name, iso.units), plotid=i) g.refresh() g.window_title = '{} {}'.format( record_id, ','.join([i.name for i in reversed(isotopes)])) return g
def import_db(self, info): result, url = self.model.import_db() if result: information(None, 'Added to database.\n\n {}'.format(url)) else: warning(None, 'Unable to connect to database.\n\n {}'.format(url))
def _sync_metadata_button_fired(self): with self.dvc.session_ctx(): for it in self.items: self.dvc.sync_ia_metadata(it) information(None, 'Metadata sync complete')
def information(self, message, title='Information'): """ Convenience method to show an information message dialog. """ from pyface.message_dialog import information return information(self.window.control, message, title)