Example #1
0
 def calculateSpline(self):
     self.splineX = interpolate.Akima1DInterpolator(self.pointsLength,
                                                    self.xPoints)
     self.splineY = interpolate.Akima1DInterpolator(self.pointsLength,
                                                    self.yPoints)
     self.splineXDeriv1 = self.splineX.derivative(1)
     self.splineYDeriv1 = self.splineY.derivative(1)
     self.splineXDeriv2 = self.splineX.derivative(2)
     self.splineYDeriv2 = self.splineY.derivative(2)
Example #2
0
    def __init__(self, json):
        json["points"] = json.pop("data")
        self.__dict__.update(json)
        self.pointsLength = range(0, len(self.points))
        self.xPoints = [point.point[0] for point in self.points]
        self.yPoints = [point.point[1] for point in self.points]

        self.splineX = interpolate.Akima1DInterpolator(self.pointsLength,
                                                       self.xPoints)
        self.splineY = interpolate.Akima1DInterpolator(self.pointsLength,
                                                       self.yPoints)
Example #3
0
def spicav_irf(perch):
    ''' This function provides the SPICAV IRF as a function of wavenumber
    interval. '''
    # load the SPICAV IR PSF file
    path = '/Users/gouravmahapatr/Dropbox/PhD/spicav_data/psf_lw_all_O2_1270_desc.txt'
    irf_data = np.loadtxt(path, skiprows=1)
    dnu = irf_data[:, 0]  # in cm-1
    df = irf_data[:, 1]  # in kHz
    ch0 = irf_data[:, 2]  # this is the normalized instrument response

    ch0_2 = np.array(ch0)
    ch0_2[(dnu > -4.5) & (dnu < 6)] = 0

    #perch = -20.0   # this is the percentage change normalized to 1.
    ch0_3 = ch0 + perch * ch0_2 / 1e2
    #plt.plot(dnu,ch0_3,label=perch)

    # make a new range of dnu
    dnu_new = np.arange(-100, 100, abs(dnu[0] - dnu[1]))

    # make the interpolator to get the instruments response for a given wavenumber
    ch0_intp = interpolate.Akima1DInterpolator(dnu, ch0_3)

    ch0_4 = ch0_intp(dnu_new)

    # convert the nans into zeros
    ch0_4[np.isnan(ch0_4)] = 0

    # make a new interpolation box
    ch0_intp = interpolate.Akima1DInterpolator(dnu_new, ch0_4)

    # these are the constants to determine the wavenumber (Korablev et al.,)
    f = 1.e3
    a0 = -3.3865473e-8
    b0 = 7.2595705e-2
    c0 = -2.0449838e+0
    a1 = -3.5371703e-8
    b1 = 7.2919764e-2
    c1 = -1.9140569e+1
    # given a frequency of operation determine wavenumber
    nu_0 = a0 * f**2 + b0 * f + c0  # in cm-1
    nu_1 = a1 * f**2 + b1 * f + c1  # in cm-1
    # given a wavenumber determine the frequency

    # model the response function as (sin(x)/x)^2.
    #    x = dnu
    #    f = 14 # in cm-1
    #    xd = (2*np.pi*x/f)+np.pi*1.2
    #    y = (np.sin(xd)/(xd*1.0))**2
    #    plt.plot(x,y)
    return ch0_intp, ch0_3, dnu_new, dnu, ch0
Example #4
0
    def interp_1d_values_from_profiles(self):
        """Interpolate values in 1D (lateral + longitudinal) from profiles"""
        new_values = np.zeros((self.nb_var, self.nb_nodes_in_riverbed))
        for i_zone in np.unique(self.points['zone']):
            filter_points = self.points['zone'] == i_zone
            section_us = self.section_seq[i_zone]
            section_ds = self.section_seq[i_zone + 1]
            xt_us = section_us.coord.array['Xt']
            xt_ds = section_ds.coord.array['Xt']
            xt_us_target = self.points['Xt_upstream'][filter_points]
            xt_ds_target = self.points['Xt_downstream'][filter_points]

            for i, var in enumerate(self.var_names()):
                values_us = section_us.coord.values[var]
                values_ds = section_ds.coord.values[var]

                if self.interp_values == 'LINEAR':
                    new_values_us = np.interp(xt_us_target, xt_us, values_us)
                    new_values_ds = np.interp(xt_ds_target, xt_ds, values_ds)

                elif self.interp_values == 'B-SPLINE':
                    splrep_us = interpolate.splrep(xt_us, values_us)
                    splrep_ds = interpolate.splrep(xt_ds, values_ds)
                    new_values_us = interpolate.splev(xt_us_target, splrep_us)
                    new_values_ds = interpolate.splev(xt_ds_target, splrep_ds)

                elif self.interp_values == 'AKIMA':
                    new_values_us = interpolate.Akima1DInterpolator(
                        xt_us, values_us)(xt_us_target)
                    new_values_ds = interpolate.Akima1DInterpolator(
                        xt_ds, values_ds)(xt_ds_target)

                elif self.interp_values == 'PCHIP':
                    new_values_us = interpolate.pchip_interpolate(
                        xt_us, values_us, xt_us_target)
                    new_values_ds = interpolate.pchip_interpolate(
                        xt_ds, values_ds, xt_ds_target)

                elif self.interp_values == 'CUBIC_SPLINE':
                    new_values_us = interpolate.CubicSpline(
                        xt_us, values_us)(xt_us_target)
                    new_values_ds = interpolate.CubicSpline(
                        xt_ds, values_ds)(xt_ds_target)

                else:
                    raise NotImplementedError

                new_values[i, filter_points] = new_values_us * (1 - self.points['xl'][filter_points]) + \
                                               new_values_ds * self.points['xl'][filter_points]
        return new_values
Example #5
0
def _akima_interpolate(xi, yi, x, der=0, axis=0):
    """
    Convenience function for akima interpolation.
    xi and yi are arrays of values used to approximate some function f,
    with ``yi = f(xi)``.

    See `Akima1DInterpolator` for details.

    Parameters
    ----------
    xi : array_like
        A sorted list of x-coordinates, of length N.
    yi :  array_like
        A 1-D array of real values.  `yi`'s length along the interpolation
        axis must be equal to the length of `xi`. If N-D array, use axis
        parameter to select correct axis.
    x : scalar or array_like
        Of length M.
    der : int or list, optional
        How many derivatives to extract; None for all potentially
        nonzero derivatives (that is a number equal to the number
        of points), or a list of derivatives to extract. This number
        includes the function value as 0th derivative.
    axis : int, optional
        Axis in the yi array corresponding to the x-coordinate values.

    See Also
    --------
    scipy.interpolate.Akima1DInterpolator

    Returns
    -------
    y : scalar or array_like
        The result, of length R or length M or M by R,

    """
    from scipy import interpolate
    try:
        P = interpolate.Akima1DInterpolator(xi, yi, axis=axis)
    except TypeError:
        # Scipy earlier than 0.17.0 missing axis
        P = interpolate.Akima1DInterpolator(xi, yi)
    if der == 0:
        return P(x)
    elif interpolate._isscalar(der):
        return P(x, der=der)
    else:
        return [P(x, nu) for nu in der]
Example #6
0
 def spline2(self,  point):
     y = list(self.df.iloc[:, 0])
     x = list(self.df.index)
     f = interpolate.Akima1DInterpolator(x, y)
     X = np.linspace(x[0], x[-1], num=point, endpoint=True)
     Y = f(X)
     self.df = pd.DataFrame(Y, index=X)
Example #7
0
def arange_atm_to_same_wavelength(atm_containers):
    """
    Function aligns all atmosphere profiles to the same wavelengths.

    :param atm_containers: Iterable[elisa.atm.AtmDataContainer]; atmosphere containers which
                           wavelengths should be aligned
    :return: Iterable[elisa.atm.AtmDataContainer]; wavelength aligned atmospheric containers
    """

    wavelengths = np.unique(
        np.array([atm.model.wavelength for atm in atm_containers]).flatten())
    wavelengths.sort()
    result = list()

    # this code checks if the containers are already alligned
    s_size = sys.maxsize
    for atm in atm_containers:
        s_size = len(atm.model) if len(atm.model) < s_size else s_size

    # if yes, interpolation is unnecessary
    if s_size == len(wavelengths):
        return atm_containers

    # otherwise interpolation is utilized
    for atm in atm_containers:
        i = interpolate.Akima1DInterpolator(atm.model.wavelength,
                                            atm.model.flux)
        atm.model = AtmModel(wavelength=wavelengths,
                             flux=np.nan_to_num(i(wavelengths)))
        result.append(atm)
    return result
def interpolate_data(data, interpolation_step, start_ts=0, end_ts=None):
    """
    :param data: object of Class Data
    :param interpolation_step: interval between new computed values eg. 10ms (data originally has intervals of ~100ms)
    :param start_ts: start of window to cut out from all Data object
    :param end_ts: end of window to cut out from all Data object
    :return: 
    """
    cut_df = data.df.sort_values(file_header_hostTimestamp)
    old_t = cut_df[file_header_hostTimestamp]
    new_t = range(start_ts, end_ts, interpolation_step)
    values_labels = cut_df.columns[5:]
    new_df = pd.DataFrame()
    for vl in values_labels:
        values = cut_df[vl]
        cs = inter.Akima1DInterpolator(old_t, values)
        new_df.insert(len(new_df.columns), vl, cs(new_t))
    new_df.insert(0, file_header_raw_data, "MLEKO")
    new_df.insert(0, file_header_nodeTimestamp, "NA")
    new_df.insert(0, file_header_nodename, data.device)
    new_df.insert(0, file_header_hostTimestamp, new_t)

    # compute sensor_time <--> real_time ratio
    sensor_time = cut_df[file_header_hostTimestamp].iloc[0]
    real_time = cut_df[file_header_real_time].iloc[0]
    start_real_time = real_time - datetime.timedelta(
        milliseconds=int(sensor_time))
    real_time_column = [
        start_real_time + datetime.timedelta(milliseconds=int(ms))
        for ms in list(new_t)
    ]
    real_time_series = pd.to_datetime(real_time_column)
    new_df.insert(0, file_header_real_time, real_time_series)
    return Data(data.device, data.category, new_df)
Example #9
0
def extend_atm_container_on_bandwidth_boundary(atm_container, left_bandwidth,
                                               right_bandwidth):
    """
    Function crops the wavelength boundaries of the atmosphere model to the precise boundaries defined by
    `left_bandwidth` and `right_bandwidth`.

    :param atm_container: elisa.atm.AtmDataContainer;
    :param left_bandwidth: float;
    :param right_bandwidth: float;
    :return: elisa.atm.AtmDataContainer;
    """
    interpolator = interpolate.Akima1DInterpolator(
        atm_container.model.wavelength, atm_container.model.flux)

    # interpolating values precisely on the border of the filter(s) coverage
    on_border_flux = interpolator([left_bandwidth, right_bandwidth])
    if np.isnan(on_border_flux).any():
        raise AtmosphereError(
            'Interpolation on bandwidth boundaries leed to NaN value.')
    atm_model: AtmModel = atm_container.model
    atm_model.wavelength[np.array([0, -1])] = [left_bandwidth, right_bandwidth]
    atm_model.flux[np.array([0, -1])] = [on_border_flux[0], on_border_flux[1]]
    atm_model.flux = np.round(atm_model.flux, 10)

    atm_container.model = atm_model
    # continute here
    return atm_container
Example #10
0
def fgetCosts(pSO):
    vSO = np.array([0, 2, 5, 8, 11, 14, 16])
    vKosten = np.array([0, 1, 5, 15, 50, 150, 500])

    fMean = interpolate.Akima1DInterpolator(vSO, vKosten)
    vx = np.array(range(0, 17))  #np.linspace(2, 16, 10**2)
    vy = fMean(vx)

    fUpBoun = interpolate.Akima1DInterpolator(vx[0:-1], vy[1:])
    fLoBoun = interpolate.Akima1DInterpolator(vx[1:], vy[0:-1])

    vMean = fMean(pSO)
    vUpBoun = fUpBoun(pSO)
    vLoBoun = fLoBoun(pSO)

    return vMean, vUpBoun, vLoBoun
Example #11
0
def interpolation(x_axis,
                  y_axis,
                  x_value,
                  model='chip',
                  method=None,
                  is_function=False):
    methods = [
        'linear', 'nearest', 'zero', 'slinear', 'quadratic', 'cubic',
        'previous', 'next'
    ]
    # print(type(x_value),type(min(x_axis)))
    models = [
        'akima', 'chip', 'interp1d', 'cubicspline', 'krogh', 'barycentric'
    ]
    # print('x_axis => ',x_axis)

    result = None
    f = None
    if model is None or model == 'interp1d':
        if method in methods:
            f = interpolate.interp1d(x_axis, y_axis, kind=method)
        else:
            f = interpolate.interp1d(x_axis, y_axis, kind='cubic')

    elif model in models:
        if model == 'akima':
            f = interpolate.Akima1DInterpolator(x_axis, y_axis)

        elif model == 'chip':
            f = interpolate.PchipInterpolator(x_axis, y_axis)

        elif model == 'cubicspline':
            f = interpolate.CubicSpline(x_axis, y_axis)

        elif model == 'krogh':
            f = interpolate.KroghInterpolator(x_axis, y_axis)

        elif model == 'barycentric':
            f = interpolate.BarycentricInterpolator(x_axis, y_axis)
    else:
        f = interpolate.PchipInterpolator(x_axis, y_axis)

    if is_function == True:
        return f
    else:
        if not isinstance(x_value, list):
            # if x_value <min(x_axis) or x_value >max(x_axis):
            #    raise Exception('interpolation error: value requested is outside of range')
            #    return result
            try:
                result = float(f(x_value))
            except:
                return result

        else:
            result = list(map(lambda x: float(f(x)), x_value))

        return result
 def splineAkima1DInterpolator(xydata,point):
     try:
         x, y = spline.transnp(xydata)
         f = interpolate.Akima1DInterpolator(x, y)
         X = np.linspace(x[0],x[-1],num=point,endpoint=True)
         Y = f(X)
         return X,Y
     except ValueError as e:
         print("catch ValueError", e)
         return (0,0)
Example #13
0
def _interp_missing(series):
    """Interpolate (Akima) signal"""

    if len(series.dropna()) < 2: return series
    days = np.array((series.index - series.index[0]).days)
    data = np.array(series)
    valid_idx = np.where(~np.isnan(data))
    if len(valid_idx[0]) == 0: return series
    f = interpolate.Akima1DInterpolator(days[valid_idx], data[valid_idx])
    return pd.Series(f(days), index=series.index)
Example #14
0
def main():

    data = np.loadtxt('input.csv', delimiter=',')
    k = data[:, 0].tolist()
    energy = data[:, 1].tolist()
    knew = np.linspace(k[0], k[-1], num=len(k) * 500)

    # liner interpolation
    liner = interpolate.interp1d(k, energy)

    # 3d spline interpolation
    cubic = interpolate.interp1d(k, energy, kind="cubic")

    # 0d spline interpolation
    zero = interpolate.interp1d(k, energy, kind="zero")

    # 秋間法
    akima = interpolate.Akima1DInterpolator(k, energy)

    plt.plot(k, energy, "o")
    plt.plot(knew, liner(knew), "b", label="Liner interpolation")
    plt.plot(knew, cubic(knew), "r", label="3d Spline interpolation")
    plt.plot(knew, akima(knew), "g", label="Akima interpolation")
    plt.plot(knew, zero(knew), "m", label="0d Spline interpolation")
    plt.xlim([0.99, 1.00])
    plt.ylim([0.40, 0.55])
    plt.legend()
    plt.savefig("bandstructure.png")

    # difference
    dx = (k[-1] - k[0]) / len(k) * 500
    dot_np = np.gradient(cubic(knew), dx)
    plt.figure()
    plt.plot(knew, dot_np)
    plt.xlim([0.99, 1.00])
    plt.savefig("gradient_1.png")

    dot2_np = np.gradient(dot_np, dx)
    plt.figure()
    plt.plot(knew, dot2_np)
    plt.xlim([0.99, 1.00])
    plt.savefig("gradient_2.png")

    new_band_structure = np.zeros(((len(knew)), 2))

    print(len(new_band_structure[:, 0]))
    print(len(knew))
    new_band_structure[:, 0] = knew
    new_band_structure[:, 1] = cubic(knew)
    with open("interpolated_band_structure.csv", "w") as mycsv:
        csvwriter = csv.writer(mycsv, delimiter=',')
        csvwriter.writerows(new_band_structure)
Example #15
0
 def curveTextToCurveData(self):
     keys = self.curveText.split(",")
     self.v = np.empty(len(keys))
     self.f = np.empty(len(keys))
     for i, k in enumerate(keys):
         self.v[i] = k.split("@")[0]
         self.f[i] = k.split("@")[1]
     self.f_akim = interpolate.Akima1DInterpolator(self.f, self.v)
     self.f_int = interpolate.UnivariateSpline(self.f, self.v, k=2, s=0)
     #self.f_cubic = interpolate.interp1d(self.f, self.v, kind='cubic')
     print(
         "\n----\nFrom string %s\ncreated curve with values \t%s \nat frames \t\t\t%s\n----\n"
         % (self.curveText, self.v, self.f))
Example #16
0
def build_splines(hists, doErrors=False):
    splines = []
    nbins = hists[0][1].GetNbinsX()

    for i in range(0, nbins + 2):
        x = []  # mass
        y = []  # bin content
        for mass, h in hists:
            x.append(mass)
            if doErrors:
                y.append(h.GetBinError(i))
            else:
                y.append(h.GetBinContent(i))

        splines.append(interpolate.Akima1DInterpolator(x, y))

    return splines
Example #17
0
    def table(self, df):
        """
        Setter for passband table.
        It precompute left and right bandwidth for given table and also interpolation function placeholder.
        Akima1DInterpolator is used. If `bolometric` passband is used then interpolation function is like::

            lambda x: 1.0


        :param df: pandas.DataFrame;
        """
        self._table = df
        self.akima = bolometric if (self.passband.lower() in ['bolometric', 'rv_band']) else \
            interpolate.Akima1DInterpolator(df[settings.PASSBAND_DATAFRAME_WAVE],
                                            df[settings.PASSBAND_DATAFRAME_THROUGHPUT])
        self.left_bandwidth = min(df[settings.PASSBAND_DATAFRAME_WAVE])
        self.right_bandwidth = max(df[settings.PASSBAND_DATAFRAME_WAVE])
Example #18
0
def interpolation(x_array: list, y_array: list):
    points = [(x_array[i], y_array[i]) for i in range(0, len(x_array))]
    points.sort(key=lambda tup: tup[0])
    x_data = []
    y_data = []
    for item in points:
        x_data.append(item[0])
        y_data.append(item[1])
    x_data = np.array(x_data)
    y_data = np.array(y_data)
    f = interpolate.Akima1DInterpolator(x_data, y_data)
    max_x = max(x_data)
    x_new = np.linspace(0, max_x, num=100, endpoint=True)
    y_new = f(x_new)
    x_new = list(x_new)
    y_new = list(y_new)
    return x_new, y_new
Example #19
0
def ridiculous_log_transform(data,
                             ndim=1024,
                             fs=400,
                             down=30,
                             smoothing_cutoff=1,
                             hard_cutoff=200,
                             log_low_cut=-2.32,
                             prenormalize=True,
                             useEnvelope=True):
    """
    This function returns a distorted version (x-axis log-transformed) of the fourier transform of the signal.
    My hope is with this approach is that it results in a more normally-distributed looking vector, which should lead
    to less weirdness in the NNs later on. At least that is my hope. I've been informed by my signals teacher that
    taking the log of the y of a signal is bad, and warping x is worse! But oh well, we shall see if it works.
    :param data: input data, [n,t] array-like
    :param ndim: dimension of output vector
    :param fs: input data sampling frequency
    :param smoothing_cutoff: 'Frequency' of smoothing the spectrum
    :param hard_cutoff: Chop off the frequency spectrum above this frequency
    :param log_low_cut: Sets how much to include very low frequency components
    :return:
    """
    if prenormalize:
        data = msig.norm_softclip(data)

    if useEnvelope:
        data = msig.envelope(data)

    # FFT and magnitude
    ftsig = fftpack.fft(data, axis=0)
    ftsig_a = np.abs(ftsig[:len(ftsig) * hard_cutoff // fs])
    # Smooth it with low pass and downsample. Low pass may not be necessary since resample does appropriate
    # pre-filtering
    ftsig_r = signal.resample_poly(ftsig_a, 1, down, axis=0)

    # Ok, now the weird bit. Take the existing x-domain and create an interpolation image of it
    t_rs = np.linspace(0.0001, hard_cutoff, len(ftsig_r))
    fn_ftsig_rs = interpolate.Akima1DInterpolator(t_rs, ftsig_r)
    # And now map an exponential domain, thereby creating a higher density of points around the main freq
    x_basis = np.linspace(log_low_cut, np.log2(hard_cutoff), ndim)
    log_ftsig = fn_ftsig_rs(np.power(2, x_basis))
    return log_ftsig
Example #20
0
def freq_log_transform(ftsig,
                       fs=400,
                       smoothing_cutoff=1,
                       hard_cutoff=100,
                       log_low_cut=-5):

    # todo: FIX UP THE CRUFT!!!
    ndim = len(ftsig)
    ftsig_a = ftsig  #ftsig[:len(ftsig)*hard_cutoff//fs]
    # Smooth it with low pass and downsample. Low pass may not be necessary since resample does appropriate
    # pre-filtering
    # ftsig_f = auxfilter.butterfilt(ftsig_a, smoothing_cutoff, fs)

    # Ok, now the weird bit. Take the existing x-domain and create an interpolation image of it
    t_rs = np.linspace(1, hard_cutoff, ndim)
    fn_ftsig_rs = interpolate.Akima1DInterpolator(t_rs, ftsig_a)
    # And now map an exponential domain, thereby creating a higher density of points around the main freq
    log_ftsig = fn_ftsig_rs(
        np.exp(np.linspace(log_low_cut, np.log(hard_cutoff), hard_cutoff)))
    return log_ftsig
Example #21
0
    def _interpolate(self, df):
        # type: (pandas.DataFrame)->InterpType
        if self.axes == "linear":
            wrapper = AxesWrapper(["linear"], "linear")
        elif self.axes == "log":
            wrapper = AxesWrapper(["linear"], "log")
        elif self.axes == "loglinear":
            wrapper = AxesWrapper(["log"], "linear")
        elif self.axes == "loglog":
            wrapper = AxesWrapper(["log"], "log")
        else:
            raise ValueError("Invalid axes wrapper: %s", self.axes)

        if df.index.nlevels != 1:
            raise ValueError("Scipy1dInterpolator not handle multiindex data.")

        # axes modification; note that the wrappers are numpy.vectorize()-ed.
        xs = wrapper.wx[0](df.index.to_numpy())
        ys = wrapper.wy(df.to_numpy())

        if self.kind == "spline":
            f_bar = sci_interp.CubicSpline(xs,
                                           ys,
                                           bc_type="natural",
                                           extrapolate=False)
        elif self.kind == "pchip":
            f_bar = sci_interp.PchipInterpolator(xs, ys, extrapolate=False)
        elif self.kind == "akima":
            f_bar = sci_interp.Akima1DInterpolator(xs, ys)
            f_bar.extrapolate = False
        else:
            f_bar = sci_interp.interp1d(xs, ys, self.kind, bounds_error=True)

        # now `f_bar` is float->float; we should convert it to Tuple[float]->float.

        def _f_bar(x, f_bar=f_bar):  # noqa: B008
            # type: (Sequence[float], Callable[[float], float])->float
            return f_bar(*x)

        return wrapper.wrapped_f(_f_bar)
Example #22
0
    def __init__(self):
        """Constructs the LUT TABLE needed for making the images"""

        #self.LUT_TABLE = np.array([])

        P = np.array([  # x, y
            [0.00000, 0.00000],
            [0.09975, 0.25581],
            [0.10224, 0.88889],
            [0.13716, 0.00000],
            [0.13965, 0.71576],
            [0.14214, 0.85788],
            [0.28429, 0.00000],
            [0.28678, 0.54005],
            [0.28928, 0.32558],
            [0.29177, 0.58656],  #row 10
            [0.32918, 0.70543],
            [0.33167, 0.62532],
            [0.49377, 0.58915],
            [0.49626, 0.20672],
            [0.67830, 0.55297],
            [0.68080, 0.31525],
            [0.68579, 0.13695],
            [0.71072, 0.78295],
            [0.78055, 0.02326],
            [0.78803, 0.58140],  # row 20
            [0.80050, 0.00000],
            [1.00000, 1.00000]
        ])

        new_y = ip.Akima1DInterpolator(P[:, 0], P[:, 1])

        self.LUT_TABLE = []
        for i in range(0, 256):
            new_val = int(new_y(i / 255) * 255)
            self.LUT_TABLE = np.append(self.LUT_TABLE, new_val)

        self.LUT_TABLE = np.clip(self.LUT_TABLE, 0, 255)
        self.LUT_TABLE_RGB = np.concatenate(
            [self.LUT_TABLE, self.LUT_TABLE, self.LUT_TABLE])
Example #23
0
    def _set_sample(self, sampled_points: np.ndarray, span: float) -> None:
        """Resample sample data to reference rate and store it.

        For the cross-correlation approach to work, sample and reference need
        the same rate of samples per arbitrary unit.

        :param sampled_points: np array of shape (2, n) for n sampled points.
                    The first row represents the x values at which the samples
                    have been measured (don't need to be to scale with respect
                    to the reference, don't need to be equidistant). The second
                    row is the actual signal at those points.
        :param span: How many arbitrary (reference) units does the sample span?
                    This is the crucial indicator of how wide the given sample
                    is, as the actual sample point count is ignored due to
                    resampling.
        """
        # We assume, that for our signal type, Akima splines present a much
        # more reasonable approximation than linear interpolation.  The
        # `length` parameter effectively determines the number of sample
        # points, as it is given with respect to the reference data length.

        # Normalize sample into the [0, 1] (inclusive) interval.
        xvals = sampled_points[0]
        xvals -= min(xvals)
        xvals /= max(xvals)

        # Create an interpolation function defined in the [0, 1] interval and
        # resample the data. We only need the new equidistant y values from now
        # on.
        inter = interpolate.Akima1DInterpolator(xvals, sampled_points[1])
        n_samples = (span / self.ref_span) * len(self.reference)
        sample = inter(np.linspace(0, 1, n_samples))

        # Normalize values for reproducible cross correllation results.
        norm = np.linalg.norm(sample)
        self._sample = np.divide(sample, norm)

        # Correllation needs to be recalculated when a new sample was set.
        self._corr = None
Example #24
0
 def set_interp(self):
     prob = scipy.clip(self.norm * self.nbar, 0., 1.)
     self.interp = interpolate.Akima1DInterpolator(self.z, prob, axis=0)
Example #25
0
 def set_interp(self):
     self.interp = interpolate.Akima1DInterpolator(self.rgrid,
                                                   self.zgrid,
                                                   axis=0)
Example #26
0
 def __init__(self, curve: 'Curve'):
     super().__init__(curve)
     self.akima = interp.Akima1DInterpolator(curve.t, curve.data, axis=0)
Example #27
0
       (np.std(CN[:, 2], dtype=float, ddof=1)))).encode('utf-8')

# creo el fondo de la figura
fig = plt.figure(facecolor='white', figsize=(9, 14))
csfont = {'fontname': 'Liberation Sans'}

# agrego la serie temporal
time1 = fig.add_axes([0.1, 0.58, 0.8, 0.4])
date = arnum[0:17, 16]
flux = arnum[0:17, 2]
ndate = np.empty((0, 1))
for i in date:
    ndate = np.append(ndate, (mdates.date2num(i)))
newx = np.linspace(ndate.min(), ndate.max(), 200)
dd = mdates.num2date(newx)
akima1 = interpolate.Akima1DInterpolator(ndate, flux)
time1.plot(newx, akima1(newx), 'black', linewidth=3)
time1.plot(date,
           flux,
           'o',
           color='black',
           markersize=10,
           markeredgecolor='black')
time1.xaxis.set_major_locator(mdates.MonthLocator(interval=4))
time1.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
plt.ylim(0, 120)
plt.yticks(np.arange(0, 121, 60), size=16, **csfont)
time1.set_ylabel('Total particle flux (mg/$\mathregular{cm^{2}/day}$)',
                 size=22,
                 position=(1, 0),
                 **csfont)
Example #28
0
    vy = fMean(vx)

    fUpBoun = interpolate.Akima1DInterpolator(vx[0:-1], vy[1:])
    fLoBoun = interpolate.Akima1DInterpolator(vx[1:], vy[0:-1])

    vMean = fMean(pSO)
    vUpBoun = fUpBoun(pSO)
    vLoBoun = fLoBoun(pSO)

    return vMean, vUpBoun, vLoBoun


vSO = np.array([0, 2, 5, 8, 11, 14, 16])
vKosten = np.array([0, 1, 5, 15, 50, 150, 500])

fMean = interpolate.Akima1DInterpolator(vSO, vKosten)
vx = np.array(range(0, 17))  #np.linspace(2, 16, 10**2)
vy = fMean(vx)

plt.plot(vSO, vKosten, 'k-', label='Approximierte Mittlere Kosten')
plt.plot(vx, vy, 'r--', label='Approximierte Mittlere Kosten')

plt.xlabel('Sozialstatus')
plt.ylabel('Kosten $D/M$')

plt.legend(loc='best')
plt.tight_layout()
plt.show()
plt.clf()

vx_u1 = np.array(range(2, 17))
Example #29
0
import sys

A,B = 1.0,1.0
lj = lambda r: (A/(r**12))-(B/(r**6)) 

start,stop,n = .1,3,500
n_test_points = 2000

domain = np.linspace(start,stop,n)
lj_range = list(map(lj,domain))

test_points = np.random.random_sample(n_test_points)*(stop-start) + start

pchip = interpolate.PchipInterpolator(domain,lj_range)
cspline = interpolate.CubicSpline(domain,lj_range)
akima = interpolate.Akima1DInterpolator(domain,lj_range)

test_points_interpolated = []
for point in test_points:
    test_points_interpolated.append(akima(point))

test_points_abs_error = []
test_points_rel_error = []

for i,point in enumerate(test_points_interpolated):
    abs_e = abs(point-lj(test_points[i]))
    rel_e = abs(abs_e / lj(test_points[i]))
    test_points_abs_error.append(abs_e)
    test_points_rel_error.append(rel_e)

f, ax = plt.subplots(3, sharex=True)
Example #30
0
print ("Total ST: %.2f %s %.2f vs. %.2f %s %.2f"%((np.mean(WN[:,2])),u"\u00b1",(np.std(WN[:,2],dtype=float,ddof=1)),(np.mean(CN[:,2])),u"\u00b1",(np.std(CN[:,2],dtype=float,ddof=1))))

# creo el fondo de la figura
fig=plt.figure(facecolor='white', figsize=(9,14))
csfont = {'fontname':'Liberation Sans'}

# agrego la serie temporal
time1 = fig.add_axes([0.1, 0.58, 0.8, 0.4])
date = arnum[0:17,16]
flux = arnum[0:17,2]
ndate = np.empty((0,1))
for i in date:
	ndate = np.append(ndate, (mdates.date2num(i)))
newx = np.linspace(ndate.min(), ndate.max(), 200)
dd = mdates.num2date(newx)
akima1 = interpolate.Akima1DInterpolator(ndate, flux)
time1.plot(newx, akima1(newx), 'black', linewidth=3)
time1.plot(date, flux, 'o', color = 'black', markersize=10, markeredgecolor = 'black')
time1.xaxis.set_major_locator(mdates.MonthLocator(interval = 4))
time1.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
plt.ylim(0,110)
plt.yticks(np.arange(0,111,55), size = 16, **csfont)
time1.set_ylabel('Total particle flux (mg.$\mathregular{cm^{-2}.day^{-1}}$)', size = 22, position = (1,0), **csfont)
fig.autofmt_xdate(rotation = 90)
time1.tick_params(axis='x', which='major', labelsize=14)
time1.set_xlim([datetime.date(2007,12,1),datetime.date(2014,3,7)])

# ploteo el ratio en timeserie
time2 = time1.twinx()
ratio = arnum[0:17,10]
"""# paso el copro a mg/g en el array copr