Beispiel #1
0
    def _save_peak_center(self, pc):
        self.info('DVC saving peakcenter')
        p = self._make_path(modifier='peakcenter')

        if pc:
            fmt = '>ff'
            obj = {'reference_detector': pc.reference_detector.name,
                   'reference_isotope': pc.reference_isotope,
                   'fmt': fmt,
                   'interpolation': pc.interpolation_kind if pc.use_interpolation else ''}

            results = pc.get_results()
            if results:
                for result in results:
                    points = encode_blob(pack(fmt, result.points))

                    obj[result.detector] = {'low_dac': result.low_dac,
                                            'center_dac': result.center_dac,
                                            'high_dac': result.high_dac,
                                            'low_signal': result.low_signal,
                                            'center_signal': result.center_signal,
                                            'high_signal': result.high_signal,
                                            'resolution': result.resolution,
                                            'low_resolving_power': result.low_resolving_power,
                                            'high_resolving_power': result.high_resolving_power,
                                            'points': points}

            dvc_dump(obj, p)
Beispiel #2
0
    def _save_monitor(self):
        if self.per_spec.monitor:
            p = self._make_path(modifier='monitor')
            checks = []
            for ci in self.per_spec.monitor.checks:
                data = encode_blob(pack('>ff', ci.data))
                params = dict(name=ci.name,
                              parameter=ci.parameter, criterion=ci.criterion,
                              comparator=ci.comparator, tripped=ci.tripped,
                              data=data)
                checks.append(params)

            dvc_dump(checks, p)
Beispiel #3
0
    def get_grain_polygon_blob(self):
        # self.debug('Get grain polygons n={}'.format(len(self.grain_polygons)))

        try:
            t, md, p = next(self.grain_polygons)

            a = pack('ff', ((t, md), ))
            b = pack('HH', p)

            return encode_blob(a + b)

        except (StopIteration, TypeError) as e:
            self.debug('No more grain polygons. {}'.format(e))
Beispiel #4
0
    def dump_equilibration(self, keys, reviewed=False):
        path = self._analysis_path(modifier='.data')

        jd = dvc_load(path)
        endianness = jd['format'][0]

        nsignals = []
        nsniffs = []

        for (new, existing) in ((nsignals, 'signals'), (nsniffs, 'sniffs')):
            for sig in jd[existing]:
                key = sig['isotope']
                if key in keys:
                    iso = self.get_isotope(key)
                    if existing == 'sniffs':
                        iso = iso.sniff

                    sblob = encode_blob(iso.pack(endianness, as_hex=False))
                    new.append({'isotope': iso.name, 'blob': sblob, 'detector': iso.detector})
                else:
                    new.append(sig)

        for k in keys:
            # check to make sure signals/sniffs fully populated
            for new, issniff in ((nsignals, False), (nsniffs, True)):
                if not next((n for n in new if n['isotope'] == k), None):
                    iso = self.get_isotope(key)
                    if issniff:
                        iso = iso.sniff

                    sblob = encode_blob(iso.pack(endianness, as_hex=False))
                    new.append({'isotope': iso.name, 'blob': sblob, 'detector': iso.detector})
        jd['reviewed'] = reviewed
        jd['signals'] = nsignals
        jd['sniffs'] = nsniffs
        dvc_dump(jd, path)

        return path
Beispiel #5
0
    def dump_equilibration(self, keys, reviewed=False):
        path = self._analysis_path(modifier='.data')

        jd = dvc_load(path)
        endianness = jd['format'][0]

        nsignals = []
        nsniffs = []

        for (new, existing) in ((nsignals, 'signals'), (nsniffs, 'sniffs')):
            for sig in jd[existing]:
                key = sig['isotope']
                if key in keys:
                    iso = self.get_isotope(key)
                    if existing == 'sniffs':
                        iso = iso.sniff

                    sblob = encode_blob(iso.pack(endianness, as_hex=False))
                    new.append({'isotope': iso.name, 'blob': sblob, 'detector': iso.detector})
                else:
                    new.append(sig)

        for k in keys:
            # check to make sure signals/sniffs fully populated
            for new, issniff in ((nsignals, False), (nsniffs, True)):
                if not next((n for n in new if n['isotope'] == k), None):
                    iso = self.get_isotope(key)
                    if issniff:
                        iso = iso.sniff

                    sblob = encode_blob(iso.pack(endianness, as_hex=False))
                    new.append({'isotope': iso.name, 'blob': sblob, 'detector': iso.detector})
        jd['reviewed'] = reviewed
        jd['signals'] = nsignals
        jd['sniffs'] = nsniffs
        dvc_dump(jd, path)

        return path
Beispiel #6
0
    def _save_analysis(self, timestamp):

        isos = {}
        dets = {}
        signals = []
        baselines = []
        sniffs = []
        blanks = {}
        intercepts = {}
        cbaselines = {}
        icfactors = {}

        endianness = '>'
        per_spec = self.per_spec

        source = {'emission': per_spec.emission, 'trap': per_spec.trap}

        clf = None
        if self.use_isotope_classifier:
            clf = self.application.get_service(
                'pychron.classifier.isotope_classifier.IsotopeClassifier')

        for key, iso in per_spec.isotope_group.items():
            sblob = encode_blob(iso.pack(endianness, as_hex=False))
            snblob = encode_blob(iso.sniff.pack(endianness, as_hex=False))

            for ss, blob in ((signals, sblob), (sniffs, snblob)):
                d = {
                    'isotope': iso.name,
                    'detector': iso.detector,
                    'blob': blob
                }
                ss.append(d)

            detector = next(
                (d
                 for d in per_spec.active_detectors if d.name == iso.detector),
                None)

            isod = {
                'detector': iso.detector,
                'name': iso.name,
                'serial_id': detector.serial_id if detector else '00000'
            }

            if clf is not None:
                klass, prob = clf.predict_isotope(iso)
                isod.update(classification=klass,
                            classification_probability=prob)

            isos[key] = isod

            if iso.detector not in dets:
                bblob = encode_blob(iso.baseline.pack(endianness,
                                                      as_hex=False))
                baselines.append({'detector': iso.detector, 'blob': bblob})
                dets[iso.detector] = {
                    'deflection': per_spec.defl_dict.get(iso.detector),
                    'gain': per_spec.gains.get(iso.detector)
                }

                icfactors[iso.detector] = {
                    'value': float(nominal_value(iso.ic_factor or 1)),
                    'error': float(std_dev(iso.ic_factor or 0)),
                    'fit': 'default',
                    'references': []
                }
                cbaselines[iso.detector] = {
                    'fit': iso.baseline.fit,
                    'error_type': iso.baseline.error_type,
                    'filter_outliers_dict': iso.baseline.filter_outliers_dict,
                    'value': float(iso.baseline.value),
                    'error': float(iso.baseline.error)
                }

            intercepts[key] = {
                'fit': iso.fit,
                'error_type': iso.error_type,
                'filter_outliers_dict': iso.filter_outliers_dict,
                'value': float(iso.value),
                'error': float(iso.error)
            }

            blanks[key] = {
                'fit':
                'previous',
                'error_type':
                '',
                'references': [{
                    'record_id': per_spec.previous_blank_runid,
                    'exclude': False
                }],
                'value':
                float(iso.blank.value),
                'error':
                float(iso.blank.error)
            }

        obj = self._make_analysis_dict()

        from pychron.version import __version__ as pversion
        from pychron.experiment import __version__ as eversion
        from pychron.dvc import __version__ as dversion

        obj['timestamp'] = timestamp.isoformat()

        obj['collection_version'] = '{}:{}'.format(eversion, dversion)
        obj['acquisition_software'] = 'pychron {}'.format(pversion)
        obj['data_reduction_software'] = 'pychron {}'.format(pversion)

        obj['environmental'] = {
            'lab_temperatures': per_spec.lab_temperatures,
            'lab_humiditys': per_spec.lab_humiditys,
            'lab_pneumatics': per_spec.lab_pneumatics
        }

        obj['laboratory'] = per_spec.laboratory
        obj['instrument_name'] = per_spec.instrument_name
        obj['analyst_name'] = per_spec.run_spec.username
        obj['whiff_result'] = per_spec.whiff_result
        obj['detectors'] = dets
        obj['isotopes'] = isos
        obj['spec_sha'] = self._get_spectrometer_sha()
        obj['intensity_scalar'] = per_spec.intensity_scalar
        obj['source'] = source
        # save the conditionals
        obj['conditionals'] = [c.to_dict() for c in per_spec.conditionals] if \
            per_spec.conditionals else None
        obj['tripped_conditional'] = per_spec.tripped_conditional.result_dict() if \
            per_spec.tripped_conditional else None

        # save the scripts
        ms = per_spec.run_spec.mass_spectrometer
        for si in ('measurement', 'extraction', 'post_measurement',
                   'post_equilibration'):
            name = getattr(per_spec, '{}_name'.format(si))
            blob = getattr(per_spec, '{}_blob'.format(si))
            self.dvc.meta_repo.update_script(ms, name, blob)
            obj[si] = name

        # save keys for the arar isotopes
        akeys = self.arar_mapping
        if akeys is None:
            akeys = ARAR_MAPPING

        obj['arar_mapping'] = akeys

        # save experiment
        self.debug('---------------- Experiment Queue saving disabled')
        # self.dvc.update_experiment_queue(ms, self.per_spec.experiment_queue_name,
        #                                  self.per_spec.experiment_queue_blob)

        self._save_macrochron(obj)

        hexsha = str(self.dvc.get_meta_head())
        obj['commit'] = hexsha

        # dump runid.json
        p = self._make_path()
        dvc_dump(obj, p)

        p = self._make_path(modifier='intercepts')
        dvc_dump(intercepts, p)

        # dump runid.blank.json
        p = self._make_path(modifier='blanks')
        dvc_dump(blanks, p)

        p = self._make_path(modifier='baselines')
        dvc_dump(cbaselines, p)

        p = self._make_path(modifier='icfactors')
        dvc_dump(icfactors, p)

        # dump runid.data.json
        p = self._make_path(modifier='.data')
        data = {
            'commit': hexsha,
            'encoding': 'base64',
            'format': '{}ff'.format(endianness),
            'signals': signals,
            'baselines': baselines,
            'sniffs': sniffs
        }
        dvc_dump(data, p)
Beispiel #7
0
    def post_extraction_save(self):
        self.info(
            '================= post extraction save started =================')
        per_spec = self.per_spec
        rblob = per_spec.response_blob  # time vs measured response
        oblob = per_spec.output_blob  # time vs %output
        sblob = per_spec.setpoint_blob  # time vs requested
        gp = per_spec.grain_polygons

        if rblob is not None:
            rblob = encode_blob(rblob)
        if oblob is not None:
            oblob = encode_blob(oblob)
        if sblob is not None:
            sblob = encode_blob(sblob)

        if gp:
            gp = [encode_blob(g) for g in gp]

        obj = {
            'measured_response': rblob,
            'requested_output': oblob,
            'setpoint_stream': sblob,
            'snapshots': per_spec.snapshots,
            'videos': per_spec.videos,
            'grain_polygons': gp
        }

        pid = per_spec.pid
        if pid:
            obj['pid'] = pid

        for e in EXTRACTION_ATTRS:
            v = getattr(per_spec.run_spec, e)
            obj[e] = v

        if not per_spec.positions:
            ps = [dict()]
        else:
            ps = []
            for i, pp in enumerate(per_spec.positions):
                pos, x, y, z = None, None, None, None
                if isinstance(pp, tuple):
                    if len(pp) == 2:
                        x, y = pp
                    elif len(pp) == 3:
                        x, y, z = pp
                else:
                    pos = pp
                    try:
                        ep = per_spec.extraction_positions[i]
                        x = ep[0]
                        y = ep[1]
                        if len(ep) == 3:
                            z = ep[2]
                    except IndexError:
                        self.debug('no extraction position for {}'.format(pp))
                pd = {
                    'x': x,
                    'y': y,
                    'z': z,
                    'position': pos,
                    'is_degas': per_spec.run_spec.identifier == 'dg'
                }
                ps.append(pd)
                obj['positions'] = ps

        self._positions = ps

        hexsha = self.dvc.get_meta_head()
        obj['commit'] = str(hexsha)

        path = self._make_path(modifier='extraction')
        dvc_dump(obj, path)
        self.info(
            '================= post extraction save finished ================='
        )
Beispiel #8
0
 def _get_output_blob(self, data):
     return encode_blob(self._manager.get_output_blob())
Beispiel #9
0
 def _get_response_blob(self, data):
     return encode_blob(self._manager.get_response_blob())