Exemplo n.º 1
0
 def transform_dict_out(self, value):
     if value.get('_datatype', None) == 'quantity':
         if 'uncertainty' in value:
             return datastructures.UncertainQuantity(
                 value['magnitude'], value['units'],
                 self.handle_uncert_load(value['uncertainty']))
         else:
             return Quantity(value['magnitude'], value['units'])
     return None
Exemplo n.º 2
0
    def convert_age(self, age, interval):
        """Returns a "base" calibrated age interval. """

        #Assume that Carbon 14 ages are normally distributed with mean being
        #Carbon 14 age provided by lab and standard deviation from intCal CSV.
        #This probability density is mapped to calibrated (true) ages and is
        #no longer normally (Gaussian) distributed or normalized.
        unnormed_density = self.density(*age.unitless_normal())

        #unnormed_density is mostly zeros so need to remove but need to know years removed.
        nz_ages = []
        nz_density = []

        for calage, dens in zip(self.calib_age_ref, unnormed_density):
            if dens:
                nz_ages.append(calage)
                nz_density.append(dens)

        calib_age_ref = np.array(nz_ages)
        unnormed_density = np.array(nz_density)
        # interpolate unnormed density to annual resolution
        annual_calib_ages = np.array(
            range(int(calib_age_ref[0]), int(calib_age_ref[-1] + 1)))
        unnormed_density = np.interp(annual_calib_ages, calib_age_ref,
                                     unnormed_density)
        calib_age_ref = np.array(
            range(int(calib_age_ref[0]), int(calib_age_ref[-1] + 1)))

        #Calculate norm of density and then divide unnormed density to normalize.
        norm = integrate.simps(unnormed_density, calib_age_ref)
        normed_density = unnormed_density / norm
        #Calculate mean which is the "best" true age of the sample.

        weighted_density = np.array(
            [year * dens for year, dens in zip(calib_age_ref, normed_density)])

        mean = integrate.simps(weighted_density, calib_age_ref)

        #The HDR is used to determine the error for the mean calculated above.
        calib_age_error = hdr(normed_density, calib_age_ref, interval)

        distr = datastructures.ProbabilityDistribution(calib_age_ref,
                                                       normed_density, mean,
                                                       calib_age_error)

        return datastructures.UncertainQuantity(data=mean,
                                                units='years',
                                                uncertainty=distr)
Exemplo n.º 3
0
    def run_component(self, core, progress_dialog):
        parameters = self.user_inputs(
            core, [('Ice Thickness', ('float', 'meters', False)),
                   ('Kink Height', ('float', 'meters', False)),
                   ('Accumulation Rate', ('float', 'meters/year', False))])

        #strip units for computation ease
        H = parameters['Ice Thickness'].magnitude
        h = parameters['Kink Height'].magnitude
        c = parameters['Accumulation Rate'].magnitude

        samples = []

        for sample in core:
            # Convert depth to meters
            sample['depth'].units = 'm'
            z = sample['depth'].unitless_normal()[0]
            #z = H - depth

            age = 0
            if z >= h and z < H:
                age = ((2 * H - h) / c) * (h / z - 1) + ((2 * H - h) /
                                                         (2 * c)) * np.log(
                                                             (2 * H - h) / h)

            elif z >= 0 and z < h:
                age = ((2 * H - h) / (2 * c)) * np.log(
                    (2 * H - h) / (2 * z - h))

            sample['Flow Model Age'] = datastructures.UncertainQuantity(
                age, 'years', 0)
            samples.append(sample)

        # Sort the samples
        samples.sort(key=lambda x: x["depth"])

        # Invert the ages
        for ii in range(0, len(samples) / 2):
            end_index = len(samples) - 1 - ii
            if (end_index <= ii):
                break

            temp = samples[ii]["Flow Model Age"]
            samples[ii]["Flow Model Age"] = samples[end_index][
                'Flow Model Age']
            samples[end_index]['Flow Model Age'] = temp
Exemplo n.º 4
0
    def run_component(self, core, progress_dialog):
        """Main entry point for the component."""

        geo = core.properties['Core Site']
        if not geo:
            raise AttributeError("Core Site Not Found!")
        else:
            adj_point = self.get_closest_adjustment(geo)
            dlg = ReservoirCorrection.MapDialog(geo, adj_point)
            if dlg.ShowModal() == wx.ID_OK:
                self.set_value(
                    core, 'Reservoir Correction',
                    datastructures.UncertainQuantity(
                        adj_point.get('Delta R', 0), 'years',
                        adj_point.get('Error', [0])))
                self.set_value(core, 'Manual Reservoir Correction', False)
            else:
                self.user_inputs(core, [('Reservoir Correction',
                                         ('float', 'years', True))])
                self.set_value(core, 'Manual Reservoir Correction', True)
            dlg.Destroy()
        for sample in core:
            sample['Corrected 14C Age'] = sample['14C Age'] + (
                -sample['Reservoir Correction'])
Exemplo n.º 5
0
    def do_file_read(self, event):
        self.fielddict = dict([
            w.fieldassoc for w in self.fieldpage.fieldwidgets if w.fieldassoc
        ])
        if 'depth' not in self.fielddict.values():
            wx.MessageBox(
                "Please assign a column for sample depth before continuing.",
                "Depth Field Required", wx.OK | wx.ICON_INFORMATION)
            event.Veto()
            return

        self.unitdict = dict(
            [w.unitassoc for w in self.fieldpage.fieldwidgets if w.unitassoc])
        self.errdict = dict(
            [w.errassoc for w in self.fieldpage.fieldwidgets if w.errassoc])
        self.errconv = {}
        for key, val in self.errdict.iteritems():
            for v in val:
                self.errconv[v] = key

        reader = self.reader.get_data_reader(self.fielddict)
        self.rows = []

        for index, line in enumerate(reader, 1):
            #do appropriate type conversions...; handle units!
            newline = {}
            for key, value in line.iteritems():
                #don't try to import total blanks.
                if key in self.errconv:
                    attname = self.errconv[key]
                    fname = key
                else:
                    fname = self.fielddict.get(key, None)
                    attname = fname
                try:
                    if fname:
                        if value:
                            newline[fname] = \
                                datastore.sample_attributes.input_value(attname, value)
                        else:
                            newline[fname] = None
                except KeyError:
                    #ignore columns we've elected not to import
                    pass
                except ValueError:
                    #TODO: give ignore line/fix item/give up options
                    wx.MessageBox(
                        "%s on row %i has an incorrect type. "
                        "Please update the csv file and try again." %
                        (key, index), "Operation Cancelled",
                        wx.OK | wx.ICON_INFORMATION)
                    event.Veto()
                    return
            unitline = {}
            #now that we have all the values in the row, do a second pass for
            #unit & error handling
            for key, value in newline.iteritems():
                if key in self.errconv:
                    #skip error fields, they get handled with the parent.
                    continue
                att = datastore.sample_attributes[key]
                if value is None:
                    continue
                if att.is_numeric():
                    uncert = None
                    if key in self.errdict:
                        errkey = self.errdict[key]
                        if len(errkey) > 1:
                            uncert = (newline.get(errkey[0], 0),
                                      newline.get(errkey[1], 0))
                        else:
                            uncert = newline.get(errkey[0], 0)
                    unitline[key] = datastructures.UncertainQuantity(
                        value, self.unitdict.get(key, 'dimensionless'), uncert)
                    #convert units (yay, quantities handles all that)
                    #TODO: maybe allow user to select units for display in some sane way...
                    unitline[key].units = att.unit
                else:
                    unitline[key] = value

            self.rows.append(unitline)

        #doing it this way to keep cols ordered as in source material
        imported = [
            self.fielddict[k] for k in self.reader.fieldnames
            if k in self.fielddict
        ]
        self.confirmpage.setup(imported, self.rows)