Exemplo n.º 1
0
    def _make_panels(self):
        if self.analysis_groups:
            gs = [
                self._panel_factory(analyses=ag,
                                    plot_options=self.plot_options,
                                    graph_id=gid)
                for gid, ag in enumerate(self.analysis_groups)
            ]
        else:
            gs = [
                self._panel_factory(analyses=list(ais),
                                    plot_options=self.plot_options,
                                    graph_id=gid)
                for gid, ais in groupby_key(self.analyses, 'graph_id')
            ]
            # if hasattr(self, 'references'):
            gg = groupby_key(self.references, 'graph_id')
            for gi in gs:
                try:
                    gid, ais = next(gg)
                    gi.references = list(ais)
                except StopIteration:
                    break

        for gi in gs:
            gi.make_figures()

        if self.titles:
            for ti, gi in zip(self.titles, gs):
                gi.title = ti
        elif self.plot_options.auto_generate_title:
            for i, gi in enumerate(gs):
                gi.title = self.plot_options.generate_title(gi.analyses, i)

        return gs
Exemplo n.º 2
0
    def fix(self, runs):
        if not self.confirmation_dialog('Would you like to run the Bulk Run Fixer?'):
            return

        for atype, ris in groupby_key(runs, 'analysis_type'):
            ris = list(ris)
            self.unknown_enabled = atype == 'unknown'
            es, ms = zip(*[(r.extraction_script, r.measurement_script) for r in ris])
            es, ms = list(set(es)), list(set(ms))
            # es,ms = zip(*list({r.extraction_script for r in ris}))

            self.extraction_script_enabled = len(es) > 1
            self.extraction_script.name = es[0]

            self.measurement_script_enabled = len(ms) > 1
            self.measurement_script.name = ms[0]

            if self.unknown_enabled:
                for attr in ATTRS:

                    ats = list({getattr(r, attr) for r in ris})
                    if len(ats) > 1:
                        setattr(self, attr, ats[0])
                        setattr(self, 'enabled_{}'.format(attr), True)
                    else:
                        setattr(self, 'enabled_{}'.format(attr), False)

            self.title = atype.capitalize()
            info = self.edit_traits()
            if info.result:
                self._apply(ris)
            else:
                break
Exemplo n.º 3
0
    def fix(self, runs):
        if not self.confirmation_dialog('Would you like to run the Bulk Run Fixer?'):
            return

        for atype, ris in groupby_key(runs, 'analysis_type'):
            ris = list(ris)
            self.unknown_enabled = atype == 'unknown'
            es, ms = zip(*[(r.extraction_script, r.measurement_script) for r in ris])
            es, ms = list(set(es)), list(set(ms))
            # es,ms = zip(*list({r.extraction_script for r in ris}))

            self.extraction_script_enabled = len(es) > 1
            self.extraction_script.name = es[0]

            self.measurement_script_enabled = len(ms) > 1
            self.measurement_script.name = ms[0]

            if self.unknown_enabled:
                for attr in ATTRS:

                    ats = list({getattr(r, attr) for r in ris})
                    if len(ats) > 1:
                        setattr(self, attr, ats[0])
                        setattr(self, 'enabled_{}'.format(attr), True)
                    else:
                        setattr(self, 'enabled_{}'.format(attr), False)

            self.title = atype.capitalize()
            info = self.edit_traits()
            if info.result:
                self._apply(ris)
            else:
                break
Exemplo n.º 4
0
    def _grouped_rows(self, reverse=True):
        # def func(x):
        #     return x.y

        # holes = sorted(self.sample_holes, key=func, reverse=reverse)
        # return groupby(holes, key=func)
        return groupby_key(self.sample_holes, 'y', reverse=reverse)
Exemplo n.º 5
0
    def get_owners(self):
        """
            eg.
                1. 129.128.12.141-A,B,C:D,E,F
                   A,B,C owned by 141
                   D,E,F free
                2. A,B,C,D,E,F
                   All free
                3. 129.128.12.141-A,B,C:129.138.12.150-D,E:F
                    A,B,C owned by 141,
                    D,E owned by 150
                    F free
        """

        vs = [(v.name.split('-')[1], v.owner) for v in self.switches.values()]

        owners = []
        for owner, valves in groupby_key(vs, itemgetter(1)):
            valves, _ = list(zip(*valves))
            v = ','.join(valves)
            if owner:
                t = '{}-{}'.format(owner, v)
            else:
                t = v
            owners.append(t)

        return ':'.join(owners)
Exemplo n.º 6
0
    def save(self):
        sens = {}
        for ms, ss in groupby_key(self.records, 'mass_spectrometer'):

            sens[ms] = [ri.to_dict() for ri in ss]

        self.dvc.save_sensitivities(sens)
Exemplo n.º 7
0
    def get_owners(self):
        """
            eg.
                1. 129.128.12.141-A,B,C:D,E,F
                   A,B,C owned by 141
                   D,E,F free
                2. A,B,C,D,E,F
                   All free
                3. 129.128.12.141-A,B,C:129.138.12.150-D,E:F
                    A,B,C owned by 141,
                    D,E owned by 150
                    F free
        """

        vs = [(v.name.split('-')[1], v.owner) for v in self.switches.values()]

        owners = []
        for owner, valves in groupby_key(vs, itemgetter(1)):
            valves, _ = list(zip(*valves))
            v = ','.join(valves)
            if owner:
                t = '{}-{}'.format(owner, v)
            else:
                t = v
            owners.append(t)

        return ':'.join(owners)
Exemplo n.º 8
0
    def count_labnumber(self, ln):
        ans = [ai for ai in self.automated_runs if ai.labnumber == ln and ai.is_step_heat()]
        i = 0

        for _ in groupby_key(ans, 'user_defined_aliquot'):
            i += 1
        return i
Exemplo n.º 9
0
    def _grouped_rows(self, reverse=True):
        # def func(x):
        #     return x.y

        # holes = sorted(self.sample_holes, key=func, reverse=reverse)
        # return groupby(holes, key=func)
        return groupby_key(self.sample_holes, 'y', reverse=reverse)
Exemplo n.º 10
0
    def save(self):
        sens = {}
        for ms, ss in groupby_key(self.records, 'mass_spectrometer'):

            sens[ms] = [ri.to_dict() for ri in ss]

        self.dvc.save_sensitivities(sens)
Exemplo n.º 11
0
def analysis_type_func(analyses, mapping, offset=True):
    """
    convert analysis type to number

    if offset is True
    use analysis_mapping_ints to convert atype to integer
    then add a fractional value to indicate position in list
    e.g first unknown 1, second unknown 1.1
    fractional value is normalized to 1 so that there is no overlap
    e.i unknown while always be 1.### and never 2.0

    """
    if offset:
        counts = {
            k.lower(): float(len(list(v)))
            for k, v in groupby_key(analyses, 'analysis_type')
        }

    __cache__ = {}

    def f(x):
        x = x.lower()
        c = 0
        if offset:
            if x in __cache__:
                c = __cache__[x] + 1
            __cache__[x] = c
            c /= counts[x]

        try:
            return mapping[x] + c
        except KeyError:
            return -1

    return f
Exemplo n.º 12
0
def analysis_type_func(analyses, offset=True):
    """
    convert analysis type to number

    if offset is True
    use analysis_mapping_ints to convert atype to integer
    then add a fractional value to indicate position in list
    e.g first unknown 1, second unknown 1.1
    fractional value is normalized to 1 so that there is no overlap
    e.i unknown while always be 1.### and never 2.0

    """
    if offset:
        counts = {k.lower(): float(len(list(v))) for k, v in groupby_key(analyses, 'analysis_type')}

    __cache__ = {}

    def f(x):
        x = x.lower()
        c = 0
        if offset:
            if x in __cache__:
                c = __cache__[x] + 1
            __cache__[x] = c
            c /= counts[x]

        return ANALYSIS_MAPPING_INTS[x] + c if x in ANALYSIS_MAPPING_INTS else -1

    return f
Exemplo n.º 13
0
    def count_labnumber(self, ln):
        ans = [
            ai for ai in self.automated_runs
            if ai.labnumber == ln and ai.is_step_heat()
        ]
        i = 0

        for _ in groupby_key(ans, 'user_defined_aliquot'):
            i += 1
        return i
Exemplo n.º 14
0
    def _get_grouped_positions(self):
        gs = []
        for idn, poss in groupby_key(self.positions, 'identifier'):
            poss = list(poss)
            gp = GroupedPosition(identifier=idn,
                                 meta_position=poss[0],
                                 positions=poss)
            gs.append(gp)

        return gs
Exemplo n.º 15
0
    def _get_grouped_positions(self):
        gs = []
        for idn, poss in groupby_key(self.positions, 'identifier'):
            poss = list(poss)
            gp = GroupedPosition(identifier=idn,
                                 meta_position=poss[0],
                                 positions=poss)
            gs.append(gp)

        return gs
Exemplo n.º 16
0
    def set_positions(self, monitors, unk=None):
        self.debug('setting positions mons={}, unks={}'.format(len(monitors), len(unk) if unk else 0))
        opt = self.plotter_options
        monage = opt.monitor_age * 1e6
        lk = opt.lambda_k
        ek = opt.error_kind

        geom = self.geometry
        poss = []
        ans = []
        slope = True
        prev = None
        for identifier, ais in groupby_key(monitors, 'identifier'):

            ais = list(ais)
            n = len(ais)

            ref = ais[0]
            j = ref.j
            ip = ref.irradiation_position
            sample = ref.sample

            x, y, r, idx = geom[ip - 1]

            p = FluxPosition(identifier=identifier,
                             irradiation=self.irradiation,
                             level=self.level,
                             sample=sample, hole_id=ip,
                             saved_j=nominal_value(j),
                             saved_jerr=std_dev(j),

                             error_kind=ek,
                             monitor_age=monage,
                             analyses=ais,
                             lambda_k=lk,
                             x=x, y=y,
                             n=n)

            p.set_mean_j()
            poss.append(p)
            if prev:
                slope = prev < p.j
            prev = p.j
            vs = self._sort_individuals(p, monage, lk, slope)
            if ans:
                ans = [list(ans[i]) + list(v) for i, v in enumerate(vs)]
                # ans = [ans[0].extend(aa), ans[0].extend(xx), ans[0].extend(yy), ans[0].extend(es)]
            else:
                ans = list(vs)

        self.monitor_positions = sorted(poss, key=attrgetter('hole_id'))

        if unk is not None:
            self.unknown_positions = sorted(unk, key=attrgetter('hole_id'))
Exemplo n.º 17
0
    def run(self, state):
        self.plotter_options = self.plotter_options_manager.selected_options
        po = self.plotter_options
        if not po:
            state.canceled = True
            return

        try:
            use_plotting = po.use_plotting
        except AttributeError:
            use_plotting = True

        if not state.unknowns and self.no_analyses_warning:
            raise NoAnalysesError

        if use_plotting and self.use_plotting:
            for tab_id, unks in groupby_key(state.unknowns, 'tab_id'):
                if tab_id in self.editors:
                    editor = self.editors[tab_id]
                else:
                    editor = self._editor_factory()
                    self.editors[tab_id] = editor

                state.editors.append(editor)
                self.editor = editor
                if self.auto_set_items:
                    bind_preference(self, 'skip_meaning', 'pychron.pipeline.skip_meaning')
                    if self.name in self.skip_meaning.split(','):
                        unks = [u for u in unks if u.tag.lower() != 'skip']

                    editor.set_items(list(unks))
                    editor.refresh_needed = True
                    # if hasattr(editor, 'component'):
                    #     editor.component.invalidate_and_redraw()

        for name, es in groupby_key(state.editors, 'name'):
            for i, ei in enumerate(es):
                ei.name = ' '.join(ei.name.split(' ')[:-1])
                ei.name = '{} {:02n}'.format(ei.name, i + 1)
Exemplo n.º 18
0
    def run(self, state):
        self.plotter_options = self.plotter_options_manager.selected_options
        po = self.plotter_options
        if not po:
            state.canceled = True
            return

        try:
            use_plotting = po.use_plotting
        except AttributeError:
            use_plotting = True

        if not state.unknowns and self.no_analyses_warning:
            raise NoAnalysesError

        if use_plotting and self.use_plotting:
            for tab_id, unks in groupby_key(state.unknowns, 'tab_id'):
                if tab_id in self.editors:
                    editor = self.editors[tab_id]
                else:
                    editor = self._editor_factory()
                    self.editors[tab_id] = editor

                state.editors.append(editor)
                self.editor = editor
                if self.auto_set_items:
                    bind_preference(self, 'skip_meaning',
                                    'pychron.pipeline.skip_meaning')
                    if self.name in self.skip_meaning.split(','):
                        unks = [u for u in unks if u.tag.lower() != 'skip']

                    editor.set_items(list(unks))
                    editor.refresh_needed = True

        for name, es in groupby_key(state.editors, 'name'):
            for i, ei in enumerate(es):
                ei.name = ' '.join(ei.name.split(' ')[:-1])
                ei.name = '{} {:02n}'.format(ei.name, i + 1)
Exemplo n.º 19
0
    def get_value(self, attr):
        attr = self.map_isotope_key(attr)

        r = ufloat(0, 0, tag=attr)
        if attr.endswith('bs'):
            iso = attr[:-2]
            if iso in self.isotopes:
                r = self.isotopes[iso].baseline.uvalue
        elif attr in ('uage', 'uage_w_j_err', 'uage_w_position_err', 'uF'):
            r = getattr(self, attr)
        elif attr.startswith('u') and ('/' in attr or '_' in attr):
            attr = attr[1:]
            r = self.get_ratio(attr, non_ic_corr=True)
        elif attr == 'icf_40_36':
            a40 = self.map_isotope_key('Ar40')
            a36 = self.map_isotope_key('Ar36')
            r = self.get_corrected_ratio(a40, a36)
        elif attr.endswith('ic'):
            # ex. attr='Ar40ic'
            isok = attr[:-2]
            try:
                r = self.isotopes[isok].ic_factor
            except KeyError:
                r = ufloat(0, 0)
        elif attr.endswith('DetIC'):
            r = ufloat(0, 0)
            ratio = attr.split(' ')[0]
            numkey, denkey = ratio.split('/')

            for name, isos in groupby_key(self.isotopes.values(),
                                          key=attrgetter('name')):
                num, den = None, None
                for iso in isos:
                    if iso.detector == numkey:
                        num = iso.get_non_detector_corrected_value()
                    elif iso.detector == denkey:
                        den = iso.get_non_detector_corrected_value()

                    if num and den:
                        return num / den

        elif attr in self.computed:
            r = self.computed[attr]
        elif attr in self.isotopes:
            r = self.isotopes[attr].get_intensity()
        else:
            if hasattr(self, attr):
                r = getattr(self, attr)

        return r
Exemplo n.º 20
0
def renumber_aliquots(aruns):
    akey = attrgetter('user_defined_aliquot')

    for ln, ans in groupby_key(aruns, 'labnumber'):
        if is_special(ln):
            continue

        b, a = partition(ans, akey)
        b = list(b)
        if b:
            minaliquot = min([bi.user_defined_aliquot for bi in b])
            for i, (al, ans) in enumerate(groupby(b, key=akey)):
                for ai in ans:
                    ai.user_defined_aliquot = minaliquot + i
Exemplo n.º 21
0
    def _view_groups(self):
        def groupfunc(task_factory):
            gid = 0
            if hasattr(task_factory, 'task_group'):
                gid = task_factory.task_group
                if gid:
                    gid = ('hardware', 'experiment').index(gid) + 1
                else:
                    gid = 0

            return gid

        application = self.window.application
        groups = []
        for _, factories in groupby_key(application.task_factories,groupfunc):
            items = []
            for factory in factories:
                for win in application.windows:
                    if win.active_task:
                        if win.active_task.id == factory.id:
                            checked = True
                            break
                else:
                    checked = False

                action = myTaskWindowLaunchAction(task_id=factory.id,
                                                  checked=checked)
                # if hasattr(factory, 'size'):
                # action.size = factory.size

                if hasattr(factory, 'accelerator'):
                    action.accelerator = factory.accelerator
                add = True
                if hasattr(factory, 'include_view_menu'):
                    add = factory.include_view_menu

                if hasattr(factory, 'image'):
                    if factory.image:
                        action.image = icon(factory.image)

                if add:
                    items.append(ActionItem(action=action))

            groups.append(TaskGroup(items=items))

        # groups.append(DockPaneToggleGroup())
        return groups
Exemplo n.º 22
0
def set_interpreted_age(dvc, ias):
    repos = dvc.get_local_repositories()
    liths, groups, classes, types = get_lithology_values()
    for ia in ias:
        ia.lithology_classes = classes
        ia.lithology_groups = groups
        ia.lithology_types = types
        ia.lithologies = liths

    model = InterpretedAgeFactoryModel(items=ias, selected=ias[:1], dvc=dvc)
    iaf = InterpretedAgeFactoryView(model=model, repository_identifiers=repos)

    while 1:
        info = iaf.edit_traits()
        if info.result:
            no_lat_lon = []
            ias = [ia for ia in ias if ia.use]
            for ia in ias:
                if not ia.latitude and not ia.longitude:
                    no_lat_lon.append('{} ({})'.format(ia.name, ia.identifier))

            if no_lat_lon:
                n = ','.join(no_lat_lon)
                if not confirm(
                        None,
                        'No Lat/Lon. entered for {}. Are you sure you want to continue without setting a '
                        'Lat/Lon?'.format(n)) == YES:
                    continue

            ris = []
            for rid, iass in groupby_key(ias, key='repository_identifier'):
                if dvc.add_interpreted_ages(rid, iass):
                    ris.append(rid)

            if ris:
                if confirm(
                        None, 'Would you like to share changes to {}?'.format(
                            ','.join(ris))) == YES:
                    for rid in ris:
                        dvc.push_repository(rid)
                    information(None, 'Sharing changes complete')

            break
        else:
            break
Exemplo n.º 23
0
    def delete(self):
        if self.selected:
            def key(s):
                return os.path.basename(os.path.dirname(os.path.dirname(os.path.dirname(s.path))))

            dvc = self.dvc
            for repo, records in groupby_key(self.selected, key):
                ps = []
                ns = []
                for r in records:
                    if os.path.isfile(r.path):
                        os.remove(r.path)
                        ps.append(r.path)
                        ns.append(r.name)
                        self.interpreted_ages.remove(r)

                if dvc.repository_add_paths(repo, ps):
                    dvc.repository_commit(repo, 'Removed interpreted ages {}'.format(','.join(ns)))
Exemplo n.º 24
0
    def _view_groups(self):
        def groupfunc(task_factory):
            gid = 0
            if hasattr(task_factory, 'task_group'):
                gid = task_factory.task_group
                if gid:
                    gid = ('hardware', 'experiment').index(gid) + 1
                else:
                    gid = 0

            return gid

        application = self.window.application
        groups = []
        for _, factories in groupby_key(application.task_factories, groupfunc):
            items = []
            for factory in sorted(factories, key=attrgetter('id')):
                for win in application.windows:
                    if win.active_task:
                        if win.active_task.id == factory.id:
                            checked = True
                            break
                else:
                    checked = False

                action = MyTaskWindowLaunchAction(task_id=factory.id,
                                                  checked=checked)

                if hasattr(factory, 'accelerator'):
                    action.accelerator = factory.accelerator
                add = True
                if hasattr(factory, 'include_view_menu'):
                    add = factory.include_view_menu

                if hasattr(factory, 'image'):
                    if factory.image:
                        action.image = icon(factory.image)

                if add:
                    items.append(ActionItem(action=action))

            groups.append(TaskGroup(items=items))

        return groups
Exemplo n.º 25
0
    def run(self, state):
        if not state.mdd_workspace:
            state.canceled = True
            return

        editor = self._editor_factory()
        editor.roots = state.mdd_workspace.roots

        na = sorted((os.path.basename(ni) for ni in editor.roots))
        na = grouped_name(na)
        editor.name = '{} mdd'.format(na)

        editor.replot()

        state.editors.append(editor)
        self.editor = editor
        for name, es in groupby_key(state.editors, 'name'):
            for i, ei in enumerate(es):
                ei.name = '{} {:02n}'.format(ei.name, i + 1)
Exemplo n.º 26
0
    def run(self, state):
        if not state.mdd_workspace:
            state.canceled = True
            return

        editor = self._editor_factory()
        editor.roots = state.mdd_workspace.roots

        na = sorted((os.path.basename(ni) for ni in editor.roots))
        na = grouped_name(na)
        editor.name = '{} mdd'.format(na)

        editor.replot()

        state.editors.append(editor)
        self.editor = editor
        for name, es in groupby_key(state.editors, 'name'):
            for i, ei in enumerate(es):
                ei.name = '{} {:02n}'.format(ei.name, i + 1)
Exemplo n.º 27
0
    def run(self, state):
        ans = state.unknowns

        icfs = [
            ic_factor for ic_factor in self.options.ic_factors
            if ic_factor.enabled
        ]
        if icfs:
            for ai in ans:
                self._bulk_ic_factor(ai, icfs)

            self.dvc.update_analyses(
                ans, 'icfactors', '<ICFactor> bulk edit {}'.format(
                    self.options.icfactor_message))

        if self.options.aliquot or self.options.step:
            paths = {}
            for ai in ans:
                expid, ps = self._bulk_runid(ai, self.options.aliquot,
                                             self.options.step)
                if expid in paths:
                    pp = paths[expid]
                    pp.extend(ps)
                else:
                    pp = ps

                paths[expid] = pp

            for expid, ps in paths.items():
                if self.dvc.repository_add_paths(expid, ps):
                    self.dvc.repository_commit(expid, '<EDIT> RunID')

        if self.options.sync_sample_enabled:
            for repo, ais in groupby_repo(ans):
                self.dvc.pull_repository(repo)
                ps = []
                for identifier, ais in groupby_key(ais,
                                                   attrgetter('identifier')):
                    ps.extend(self.dvc.analyses_db_sync(identifier, ais, repo))

                if self.dvc.repository_add_paths(repo, ps):
                    self.dvc.repository_commit(repo,
                                               '<EDIT> Sync Sample MetaData')
Exemplo n.º 28
0
    def _freeze_flux(self, repo, unks):
        """
        save a flux file called <IRRADNAME>.json

        simple dictionary of identifier: flux_dict

        10000: {
                value: 0.1
                error: 0.001
                }
        :param repo:
        :param unks:
        :return:
        """
        for irrad, unks in groupby_key(unks, 'irradiation'):
            unks = list(unks)
            self._make_flux_file(repo, irrad, unks)
            levels = {u.irradiation_level for u in unks}
            for l in levels:
                self._make_production_file(repo, irrad, l)
Exemplo n.º 29
0
    def do_import(self):
        self.debug('doing import')

        aspecs = self.source.get_analysis_import_specs()
        dest = self.dvc
        persister = DVCPersister(dvc=dest)
        persister.initialize(self.repository_identifier)

        def key(s):
            return s.run_spec.irradiation

        for irrad, iaspec in groupby_key(aspecs, key):
            if not dest.get_irradiation(irrad):
                self.warning_dialog('No Irradiation "{}". Please import the irradiation'.format(irrad))
                continue

            for aspec in iaspec:
                rspec = aspec.run_spec

                rspec.repository_identifier = self.repository_identifier
                rspec.mass_spectrometer = self.mass_spectrometer
                rspec.principal_investigator = self.principal_investigator

                if dest.get_analysis_runid(rspec.identifier, rspec.aliquot, rspec.step):
                    self.warning('{} already exists'.format(rspec.runid))
                    continue

                self._add_mass_spectrometer(aspec)
                self._add_extract_device(aspec)
                self._add_principal_investigator(aspec)
                self._add_material(aspec)
                self._add_project(aspec)
                self._add_sample(aspec)
                self._add_position(aspec)

                persister.per_spec_save(aspec, commit=True,
                                        commit_tag='Transfer:{}'.format(self.source.url()),
                                        push=False)

            persister.push()
Exemplo n.º 30
0
    def get_igsns(self):
        srv = self.application.get_service(
            'pychron.igsn.igsn_service.IGSNService')
        if srv is None:
            self.warning_dialog(
                'IGSN Plugin is required. Enable used "Help>Edit Initialization"'
            )
            return

        self.info('get igsn')
        items = self.selected
        if not items:
            items = self.irradiated_positions

        def key(x):
            return x.sample, x.material, x.project

        items = [x for x in items if not x.igsn]

        no_save = False
        for (sample, material, project), poss in groupby_key(items):
            if not sample:
                continue

            self.debug(
                'Get IGSN for sample={}, material={}, project={}'.format(
                    sample, material, project))
            igsn = srv.get_new_igsn(sample)
            if igsn:
                for item in poss:
                    item.igsn = igsn
            else:
                no_save = True
                break
                # need to check for existing IGSN for sample
                # if igsn is not None:
                #     item.igsn = igsn
        if not no_save:
            self.save()
        self.refresh_table = True
Exemplo n.º 31
0
    def delete(self):
        if self.selected:

            def key(s):
                return os.path.basename(
                    os.path.dirname(os.path.dirname(os.path.dirname(s.path))))

            dvc = self.dvc
            for repo, records in groupby_key(self.selected, key):
                ps = []
                ns = []
                for r in records:
                    if os.path.isfile(r.path):
                        os.remove(r.path)
                        ps.append(r.path)
                        ns.append(r.name)
                        self.interpreted_ages.remove(r)

                if dvc.repository_add_paths(repo, ps):
                    dvc.repository_commit(
                        repo,
                        'Removed interpreted ages {}'.format(','.join(ns)))
Exemplo n.º 32
0
    def load_predefined_templates(self):
        self.debug('load predefined templates')

        root = PipelineTemplateRoot()
        nodes = {n.__name__: n for n in self.nodes}
        node_factories = {v.name: v for v in self.node_factories}
        groups = []

        default = [('Fit', (('Define Equilibration', DEFINE_EQUILIBRATION),
                            ('Iso Evo', ISOEVO),
                            ('Blanks', BLANKS),
                            ('IC Factor', ICFACTOR),
                            ('Flux', FLUX),
                            ('Ca Correction Factors', CA_CORRECTION_FACTORS),
                            ('K Correction Factors', K_CORRECTION_FACTORS),
                            ('Bulk Edit', BULK_EDIT),
                            ('Audit', AUDIT))),
                   ('Plot', (('Ideogram', IDEO),
                             ('CSV Ideogram', CSV_IDEO),
                             ('Interpreted Age Ideogram', INTERPRETED_AGE_IDEOGRAM),
                             ('Hybrid Ideogram', HYBRID_IDEOGRAM),
                             ('SubGroup Ideogram', SUBGROUP_IDEOGRAM),
                             ('Spectrum', SPEC),
                             ('Series', SERIES),
                             ('InverseIsochron', INVERSE_ISOCHRON),
                             ('XY Scatter', XY_SCATTER),
                             ('Regression', REGRESSION_SERIES),
                             ('Flux Visualization', FLUX_VISUALIZATION),
                             ('Vertical Flux', VERTICAL_FLUX))),
                   ('Table', (('Analysis', ANALYSIS_TABLE),
                              ('Analysis w/Set IA', ANALYSIS_TABLE_W_IA),
                              ('Interpreted Age', INTERPRETED_AGE_TABLE),
                              ('Report', REPORT))),
                   ('History', (('Ideogram', HISTORY_IDEOGRAM),
                                ('Spectrum', HISTORY_SPECTRUM))),
                   ('Share', (('CSV Analyses Export', CSV_ANALYSES_EXPORT),)),
                   ('Transfer', (('Mass Spec Reduced', MASSSPEC_REDUCED),))]

        # predefined_templates contributed to by other plugins
        for name, gs in groupby_key(default + self.predefined_templates, key=itemgetter(0)):
            grp = PipelineTemplateGroup(name=name)

            templates = [PipelineTemplate(n, t, nodes, node_factories) for nn, gg in gs for n, t in gg]

            pp = os.path.join(paths.user_pipeline_template_dir, name.lower())
            # add templates from named user directory
            for temp in glob_list_directory(pp, extension='.yaml', remove_extension=True):
                path = os.path.join(pp, '{}.yaml'.format(temp))
                templates.append(PipelineTemplate(temp, path, nodes, node_factories))

            grp.templates = templates
            groups.append(grp)

        # add user templates from user directory
        grp = PipelineTemplateGroup(name='User')
        user_templates = []
        for temp in glob_list_directory(paths.user_pipeline_template_dir, extension='.yaml', remove_extension=True):
            path = os.path.join(paths.user_pipeline_template_dir, '{}.yaml'.format(temp))
            user_templates.append(PipelineTemplate(temp, path, nodes, node_factories))

        grp.templates = user_templates
        groups.append(grp)

        # reorder groups
        ngroups = []
        for gi in DEFAULT_PIPELINE_ROOTS:  #('Fit', 'Plot', 'Table',...)
            g = next((gii for gii in groups if gii.name == gi), None)
            if g is not None:
                ngroups.append(g)

        # add in groups contributed to by plugins or from users template directories
        for gi in groups:
            if gi.name not in DEFAULT_PIPELINE_ROOTS:
                ngroups.append(gi)

        self.debug('loaded {} user templates'.format(len(user_templates)))

        root.groups = ngroups
        self.pipeline_template_root = root
Exemplo n.º 33
0
 def _make_groups(self):
     rs = [r for r in self.records if r.valid()]
     self.groups = [
         CSVRecordGroup(gid, rs) for gid, rs in groupby_key(rs, 'group')
     ]
Exemplo n.º 34
0
    def load_predefined_templates(self):
        self.debug('load predefined templates')

        root = PipelineTemplateRoot()
        nodes = {n.__name__: n for n in self.nodes}
        node_factories = {v.name: v for v in self.node_factories}
        groups = []

        default = [
            ('Fit',
             (('Define Equilibration', DEFINE_EQUILIBRATION),
              ('Iso Evo', ISOEVO), ('Blanks', BLANKS), ('IC Factor', ICFACTOR),
              ('Flux', FLUX), ('Ca Correction Factors', CA_CORRECTION_FACTORS),
              ('K Correction Factors', K_CORRECTION_FACTORS), ('Audit',
                                                               AUDIT))),
            ('Edit', (('Bulk Edit', BULK_EDIT), )),
            ('Plot',
             (('Ideogram', IDEO), ('CSV Ideogram', CSV_IDEO),
              ('Interpreted Age Ideogram',
               INTERPRETED_AGE_IDEOGRAM), ('Hybrid Ideogram', HYBRID_IDEOGRAM),
              ('SubGroup Ideogram', SUBGROUP_IDEOGRAM), ('Spectrum', SPEC),
              ('Spectrum/Isochron', COMPOSITE), ('Series', SERIES),
              ('InverseIsochron', INVERSE_ISOCHRON),
              ('XY Scatter', XY_SCATTER), ('Regression', REGRESSION_SERIES),
              ('Flux Visualization', FLUX_VISUALIZATION), ('Vertical Flux',
                                                           VERTICAL_FLUX))),
            ('Table',
             (('SubGrouped Analyses', ANALYSIS_TABLE), ('Grouped Analyses',
                                                        SIMPLE_ANALYSIS_TABLE),
              ('Interpreted Age', INTERPRETED_AGE_TABLE), ('Report', REPORT))),
            ('History', (('Ideogram', HISTORY_IDEOGRAM), ('Spectrum',
                                                          HISTORY_SPECTRUM))),
            ('Share', (('CSV Analyses Export', CSV_ANALYSES_EXPORT),
                       ('CSV Raw Data Export', CSV_RAW_DATA_EXPORT))),
            ('Transfer', (('Mass Spec Reduced', MASSSPEC_REDUCED),
                          ('Mass Spec Flux', MASS_SPEC_FLUX)))
        ]

        # predefined_templates contributed to by other plugins
        for name, gs in groupby_key(default + self.predefined_templates,
                                    key=itemgetter(0)):
            grp = PipelineTemplateGroup(name=name)

            templates = [
                PipelineTemplate(n, t, nodes, node_factories) for nn, gg in gs
                for n, t in gg
            ]

            pp = os.path.join(paths.user_pipeline_template_dir, name.lower())
            # add templates from named user directory
            for temp in glob_list_directory(pp,
                                            extension='.yaml',
                                            remove_extension=True):
                path = os.path.join(pp, '{}.yaml'.format(temp))
                templates.append(
                    PipelineTemplate(temp, path, nodes, node_factories))

            grp.templates = templates
            groups.append(grp)

        # add user templates from user directory
        grp = PipelineTemplateGroup(name='User')
        user_templates = []
        for temp in glob_list_directory(paths.user_pipeline_template_dir,
                                        extension='.yaml',
                                        remove_extension=True):
            path = os.path.join(paths.user_pipeline_template_dir,
                                '{}.yaml'.format(temp))
            user_templates.append(
                PipelineTemplate(temp, path, nodes, node_factories))

        grp.templates = user_templates
        groups.append(grp)

        # reorder groups
        ngroups = []
        for gi in DEFAULT_PIPELINE_ROOTS:  # ('Fit', 'Plot', 'Table',...)
            g = next((gii for gii in groups if gii.name == gi), None)
            if g is not None:
                ngroups.append(g)

        # add in groups contributed to by plugins or from users template directories
        for gi in groups:
            if gi.name not in DEFAULT_PIPELINE_ROOTS:
                ngroups.append(gi)

        self.debug('loaded {} user templates'.format(len(user_templates)))

        root.groups = ngroups
        self.pipeline_template_root = root
Exemplo n.º 35
0
    def load_load_by_name(self, loadtable):

        self.canvas = self.make_canvas(loadtable)

        if isinstance(loadtable, str):
            loadtable = self.dvc.db.get_loadtable(loadtable)

        self.positions = []
        if not loadtable:
            return

        pos = []
        for ln, poss in groupby_key(loadtable.loaded_positions, 'identifier'):
            dbpos = self.dvc.db.get_identifier(ln)
            sample = ''
            project = ''
            material = ''
            if dbpos.sample:
                sample = dbpos.sample.name
                if dbpos.sample.project:
                    project = dbpos.sample.project.name
                if dbpos.sample.material:
                    material = dbpos.sample.material.name

            dblevel = dbpos.level
            irrad = dblevel.irradiation.name
            level = dblevel.name
            irradpos = dbpos.position

            for pi in poss:
                item = self.canvas.scene.get_item(str(pi.position))
                if item:
                    item.fill = True
                    item.add_identifier_label(ln, visible=self.show_identifiers)
                    item.add_sample_label(sample, visible=self.show_samples)

                    oy = -10 if not (self.show_identifiers or self.show_samples) else -20
                    wt = '' if pi.weight is None else str(pi.weight)
                    item.add_weight_label(wt, oy=oy, visible=self.show_weights)

                    nxtals = '' if pi.nxtals is None else str(pi.nxtals)
                    item.add_nxtals_label(nxtals, oy=oy, visible=self.show_nxtals)

                    item.nxtals = pi.nxtals
                    item.weight = pi.weight

                p = LoadPosition(identifier=ln,
                                 sample=sample,
                                 material=material,
                                 weight=pi.weight or 0.0,
                                 nxtals=pi.nxtals or 0,
                                 project=project,
                                 irradiation=irrad,
                                 level=level,
                                 irrad_position=int(irradpos),
                                 position=pi.position)
                pos.append(p)

        self.positions = pos
        self._set_group_colors()
        self.canvas.request_redraw()
Exemplo n.º 36
0
    def load_load_by_name(self, loadtable):

        self.canvas = self.make_canvas(loadtable)

        if isinstance(loadtable, str):
            loadtable = self.dvc.db.get_loadtable(loadtable)

        self.positions = []
        if not loadtable:
            return

        pos = []
        for ln, poss in groupby_key(loadtable.loaded_positions, 'identifier'):
            dbpos = self.dvc.db.get_identifier(ln)
            sample = ''
            project = ''
            material = ''
            if dbpos.sample:
                sample = dbpos.sample.name
                if dbpos.sample.project:
                    project = dbpos.sample.project.name
                if dbpos.sample.material:
                    material = dbpos.sample.material.name

            dblevel = dbpos.level
            irrad = dblevel.irradiation.name
            level = dblevel.name
            irradpos = dbpos.position

            for pi in poss:
                item = self.canvas.scene.get_item(str(pi.position))
                if item:
                    item.fill = True
                    item.add_identifier_label(ln,
                                              visible=self.show_identifiers)
                    item.add_sample_label(sample, visible=self.show_samples)

                    oy = -10 if not (self.show_identifiers
                                     or self.show_samples) else -20
                    wt = '' if pi.weight is None else str(pi.weight)
                    item.add_weight_label(wt, oy=oy, visible=self.show_weights)

                    nxtals = '' if pi.nxtals is None else str(pi.nxtals)
                    item.add_nxtals_label(nxtals,
                                          oy=oy,
                                          visible=self.show_nxtals)

                    item.nxtals = pi.nxtals
                    item.weight = pi.weight

                    p = LoadPosition(identifier=ln,
                                     sample=sample,
                                     material=material,
                                     weight=pi.weight or 0.0,
                                     nxtals=pi.nxtals or 0,
                                     project=project,
                                     irradiation=irrad,
                                     level=level,
                                     irrad_position=int(irradpos),
                                     position=pi.position)
                    pos.append(p)

        self.positions = pos
        self._set_group_colors()
        self.canvas.request_redraw()
Exemplo n.º 37
0
    def set_positions(self, monitors, unk=None):
        self.debug('setting positions mons={}, unks={}'.format(
            len(monitors),
            len(unk) if unk else 0))
        opt = self.plotter_options
        monage = opt.monitor_age * 1e6
        lk = opt.lambda_k
        ek = opt.error_kind

        geom = self.geometry
        poss = []
        ans = []
        slope = True
        prev = None

        # calculate padding of the individuals analyses
        # by taking mean of the diffs between adjacent positions divided by 4
        if opt.model_kind in (LEAST_SQUARES_1D, WEIGHTED_MEAN_1D):
            idx = 0 if self.plotter_options.one_d_axis == 'X' else 1
            vs = array([p[idx] for p in geom])
            vs = abs(diff(vs))
            vs = vs[vs.astype(bool)].mean()
        else:
            vs = [p[1] / p[0] if p[0] else Inf for p in geom]
            vs = arctan(vs)

            vs = abs(diff(vs))
            vs = mode(vs[vs.astype(bool)], axis=None)[0][0]

        padding = vs / 4.

        for identifier, ais in groupby_key(monitors, 'identifier'):

            ais = list(ais)
            n = len(ais)

            ref = ais[0]
            j = ref.j
            ip = ref.irradiation_position
            sample = ref.sample

            x, y, r, idx = geom[ip - 1]

            p = FluxPosition(identifier=identifier,
                             irradiation=self.irradiation,
                             level=self.level,
                             sample=sample,
                             hole_id=ip,
                             saved_j=nominal_value(j),
                             saved_jerr=std_dev(j),
                             error_kind=ek,
                             monitor_age=monage,
                             analyses=ais,
                             lambda_k=lk,
                             x=x,
                             y=y,
                             n=n)

            p.set_mean_j(self.plotter_options.use_weighted_fit)
            poss.append(p)
            if prev:
                slope = prev < p.j
            prev = p.j
            vs = self._sort_individuals(p, monage, lk, slope, padding)
            if ans:
                ans = [list(ans[i]) + list(v) for i, v in enumerate(vs)]
                # ans = [ans[0].extend(aa), ans[0].extend(xx), ans[0].extend(yy), ans[0].extend(es)]
            else:
                ans = list(vs)

        self._analyses = ans
        self.monitor_positions = sorted(poss, key=attrgetter('hole_id'))

        if unk is not None:
            ps = [p.hole_id for p in self.monitor_positions]
            for ui in unk:
                ui.available_positions = ps
            self.unknown_positions = sorted(unk, key=attrgetter('hole_id'))