Exemplo n.º 1
0
def b_ink(x, fnca, k):
    "return t, bcoef"
    ca = []
    for fnc in fnca:
        t = [None]*len(x)
        for n in range(len(x)):
            cf = np.empty_like(fnc)
            s = list(fnc.shape)
            s[n] = 1
            xx = x[n]
            xxe = xx[-1]
            xxe += (xxe-xx[-2]) * 0.1
            kk = k[n]
            start = kk // 2
            end = -(kk - start)
            knots = xx[start:end].copy()
            if kk % 2:
                knots[:-1] += knots[1:]
                knots[-1] += xx[end]
                knots *= 0.5
            for idx in np.ndindex(tuple(s)):
                idx = list(idx)
                idx[n] = slice(None)
                #ss = InterpolatedUnivariateSpline(x[n], fnc[idx], bbox=(xx[0],xxe), k=k[n]-1)
                ss = LSQUnivariateSpline(xx, fnc[idx], knots, bbox=(xx[0],xxe), k=kk-1)
                cf[idx] = ss.get_coeffs()
            t[n] = np.pad(knots, kk, 'constant', constant_values=(xx[0],xxe))
            fnc = cf
        ca.append(fnc)
    return t, ca
Exemplo n.º 2
0
def b_ink(x, fnca, k):
    "return t, bcoef"
    ca = []
    for fnc in fnca:
        t = [None] * len(x)
        for n in range(len(x)):
            cf = np.empty_like(fnc)
            s = list(fnc.shape)
            s[n] = 1
            xx = x[n]
            xxe = xx[-1]
            xxe += (xxe - xx[-2]) * 0.1
            kk = k[n]
            start = kk // 2
            end = -(kk - start)
            knots = xx[start:end].copy()
            if kk % 2:
                knots[:-1] += knots[1:]
                knots[-1] += xx[end]
                knots *= 0.5
            for idx in np.ndindex(tuple(s)):
                idx = list(idx)
                idx[n] = slice(None)
                #ss = InterpolatedUnivariateSpline(x[n], fnc[idx], bbox=(xx[0],xxe), k=k[n]-1)
                ss = LSQUnivariateSpline(xx,
                                         fnc[idx],
                                         knots,
                                         bbox=(xx[0], xxe),
                                         k=kk - 1)
                cf[idx] = ss.get_coeffs()
            t[n] = np.pad(knots, kk, 'constant', constant_values=(xx[0], xxe))
            fnc = cf
        ca.append(fnc)
    return t, ca
Exemplo n.º 3
0
    def resid(self, params, **kwargs):
        """Residuals

        Return the residuals

        Args:
            params (lmfit.Parameters): see params in self.model

        Returns:
            array: model minus data

        """
        flux = self.spec.flux.copy()
        wav = self.spec.wav
        model = self.model(params, wav=wav, **kwargs)

        if self.cont_method == 'spline-fm':
            model /= self.spline(params, wav)

        if self.cont_method == 'spline-dd':
            node_wav, node_flux = get_spline_nodes(params)
            t = node_wav[1:-1]

            spl = LSQUnivariateSpline(wav, flux, t, k=3, ext=0)
            flux /= spl(wav)

            spl = LSQUnivariateSpline(wav, model, t, k=3, ext=0)
            model /= spl(wav)

        res = flux - model
        return res
Exemplo n.º 4
0
 def fit(self):
     self.knotPvals = np.zeros(len(self.knots))
     self.interp = LSQUnivariateSpline(self.x,
                                       self.y,
                                       self.knots,
                                       k=1,
                                       w=self.weights)
Exemplo n.º 5
0
def Smooth(x,y):
    """return: smoothed funciton s(x) and estimation of sigma of y for one data point"""
    #define segments to do spline smoothing
    t=np.linspace(0,len(x),3)[1:-1]
    sr = LSQUnivariateSpline(x, y.real, t)
    si = LSQUnivariateSpline(x, y.imag, t)
    return sr(x)+si(x)*1j, (sr.get_residual()/len(x))**0.5+(si.get_residual()/len(x))**0.5*1j
Exemplo n.º 6
0
def spline_coeff(lat, time):

    time = time[~np.isnan(lat)]
    time_diff = time[1:]-time[:-1]
    counter = 0
    indeces_remotion = np.array([])
    for i in range(0, time_diff.shape[0]):

        if time_diff[i] > 60:
            if counter < 5:
                slack = np.arange(i - counter, i + 1)
                indeces_remotion = np.append(indeces_remotion, slack)

            counter = 0

        else:
            counter += 1
    time = np.delete(time, indeces_remotion)
    lat = lat[~np.isnan(lat)]
    lat = np.delete(lat, indeces_remotion)
    if lat.shape[0] > 5:
        save_data = optimal_knots(lat, time)
        t2 = time[save_data[1:-1]]
        try2 = LSQUnivariateSpline(time, lat, t2, k=3)
        spl_coeff = try2.get_coeffs()
        knots = np.r_[(time[save_data[0]],) * (3 + 1), t2, (time[save_data[-1]-1],) * (3 + 1)]
        return [spl_coeff, knots]  # in this case returns spline coeff and knots
    else:
        return ["not enough data", lat]  # in this case returns original data
Exemplo n.º 7
0
def plot_contour_line(ax, x, y, **kwargs):
    """Plot smooth curve from points.

    There is some noise in the contour points from MINUIT,
    which throws off most interpolation schemes.

    The LSQUnivariateSpline as used here gives good results.

    It could probably be simplified, or Bezier curve plotting via
    matplotlib could also be tried:
    https://matplotlib.org/gallery/api/quad_bezier.html
    """
    from scipy.interpolate import LSQUnivariateSpline

    x = np.hstack([x, x, x])
    y = np.hstack([y, y, y])

    t = np.linspace(-1, 2, len(x), endpoint=False)
    tp = np.linspace(0, 1, 50)

    t_knots = np.linspace(t[1], t[-2], 10)
    xs = LSQUnivariateSpline(t, x, t_knots, k=5)(tp)
    ys = LSQUnivariateSpline(t, y, t_knots, k=5)(tp)

    ax.plot(xs, ys, **kwargs)
Exemplo n.º 8
0
 def calc_coeffs(self):
     coeff = []
     for v, (g, kdata) in enumerate(zip(self.basegrid, self.knot_data)):
         ranges = [(slice(r[0], r[1], t[0] * 1j) if kd.used() else slice(
             (r[1] - r[0]) / 2.0, r[1], 1j))
                   for r, t, kd in zip(self.ranges, g[0], kdata)]
         fnc = self.calc_grid(ranges, g[1], g[2])[v]
         for n in range(len(ranges)):
             kd = self.knot_data[v, n]
             if not kd.used():
                 continue
             s = list(fnc.shape)
             s[n] = kd.num_coeffs()
             out = np.empty(s)
             s[n] = 1
             x = np.mgrid[ranges[n]]
             for i in np.ndindex(tuple(s)):
                 i = list(i)
                 i[n] = slice(None)
                 ss = LSQUnivariateSpline(x,
                                          fnc[i],
                                          kd.get_knots(),
                                          bbox=kd.get_bbox(),
                                          k=kd.get_order() - 1)
                 out[i] = ss.get_coeffs()
             fnc = out
         coeff.append(fnc)
     return coeff
Exemplo n.º 9
0
    def __init__(self, file, knots=[]):
        self.filename = file
        with open(file, "r") as f:
            inData = genfromtxt(f, float, comments='#').T
            self.rho = inData[0]
            self.orig_data = inData[1]

        self.spline = LSQUnivariateSpline(self.rho, self.orig_data, knots, k=5)
Exemplo n.º 10
0
def fft_normalize(freq, power, n, bin_sz, cut_peak, knot1, knot2):
    """ Takes the values of fft at given frequencies and normalizes the power spectrum using a second order spline
        
    INPUTS: 
        
    freq: frequencies where the power of the FFT spectrum has been evaluated
    power: power of the FFT spectrum evaluated at freq
    n: Number of points in freq
    bin_sz: Distance between consecutive frequencies
    cut_peak: Maximum relative amplitude for a given peak to be used in second and final spline fit
    knot1: Knot distance for the preliminary second order spline with the peaks on it
    knot2: Knot distance for the final second order spline where the peaks have been removed
        
    OUTPUT:
    
    power_rel: The FFT spectrum normalized using a second order spline fit and normalized by its median.
    """

    knot_w = knot1 * n * bin_sz  # difference in freqs (cycles/day) between each knot
    first_knot_i = np.round(
        knot_w / bin_sz
    )  #index of the first knot is the first point that is knot_w away from the first value of x
    last_knot_i = len(
        freq
    ) - first_knot_i  #index of the last knot is the first point that is knot_w away from the last value of x
    knots = np.arange(freq[first_knot_i], freq[last_knot_i], knot_w)
    spline = LSQUnivariateSpline(
        freq, power, knots,
        k=2)  #the spline, it returns the piecewise function
    fit = spline(freq)  #the actual y values of the fit
    if np.amin(fit) < 0:
        fit = np.ones(len(freq))
    pre_power_rel = power / fit

    # second fit -- by deleting the points higher than cut_peak times the average value of power
    pre_indexes = constraint_index_finder(cut_peak, power)
    power_fit = np.delete(pre_power_rel, pre_indexes)
    freq_fit = np.delete(freq, pre_indexes)

    knot_w1 = knot2 * n * bin_sz
    first_knot_fit_i = np.round(
        knot_w1 / bin_sz
    )  #index of the first knot is the first point that is knot_w away from the first value of x
    last_knot_fit_i = len(
        freq_fit
    ) - first_knot_fit_i  #index of the last knot is the first point that is knot_w away from the last value of x
    knots_fit = np.arange(freq_fit[first_knot_fit_i],
                          freq_fit[last_knot_fit_i], knot_w1)
    spline = LSQUnivariateSpline(
        freq_fit, power_fit, knots_fit,
        k=2)  #the spline, it returns the piecewise function
    fit3 = spline(freq)  #the actual y values of the fit applied to freq
    if np.amin(fit3) < 0:
        fit3 = np.ones(len(freq))

    # relative power
    power_rel = pre_power_rel / fit3
    return power_rel / np.median(power_rel)  # so the median of power_rel is 1
Exemplo n.º 11
0
def fun_residuals(params, xnor, ynor, w, bbox, k, ext):
    """Compute fit residuals"""

    spl = LSQUnivariateSpline(x=xnor,
                              y=ynor,
                              t=[item.value for item in params.values()],
                              w=w,
                              bbox=bbox,
                              k=k,
                              ext=ext,
                              check_finite=False)
    return spl.get_residual()
Exemplo n.º 12
0
def make_smoothed_err_plot(y, y_hat, title, fname, order=3, num_knots=10):
    sort_idxs = np.argsort(y.flatten())
    y_sorted = y.flatten()[sort_idxs]
    y_hat_sorted = y_hat.flatten()[sort_idxs]
    sq_errs = (y_sorted - y_hat_sorted)**2
    cum_sq_errs = np.cumsum(sq_errs)

    winrad = 100
    avg_errs = np.array([
        sq_errs[i - winrad:i + winrad].sum() / (2 * winrad)
        for i in range(winrad, y.size - winrad)
    ])
    log_avg_errs = np.log10(avg_errs)

    fig, ax = plt.subplots()
    x = np.arange(avg_errs.size)
    ax.plot(x, log_avg_errs, color="red")
    knots = np.linspace(x.min() + 1, x.max() - 1, num_knots)
    spline = LSQUnivariateSpline(x, log_avg_errs, k=order, t=knots)
    log_avg_errs_fit = spline(x)

    ax.plot(x, log_avg_errs_fit, "b-")
    plt.title(title)
    plt.xlabel("Points ordered by true outputs")
    plt.ylabel("Log_10 of squared error")
    plt.savefig(fname)
    plt.close()
Exemplo n.º 13
0
def spline_derivatives(landscape, n_bins=100):
    """
    :param landscape: original landscape
    :param n_bins: number of bins for fitting
    :return: tuple of (spline fit to A_z, split fit to A_dot, fit to A_ddot
    """
    z = landscape.z
    A_z = landscape.A_z
    sort_idx = np.argsort(z)
    z_sort = z[sort_idx]
    A_sort = A_z[sort_idx]
    knots = np.linspace(min(z_sort), max(z_sort), endpoint=True, num=n_bins)
    spline_A_z = LSQUnivariateSpline(x=z_sort, y=A_sort, k=3, t=knots[1:-1])
    A_dot_spline_z = spline_A_z.derivative(1)(z)
    A_ddot_spline_z = spline_A_z.derivative(2)(z)
    return spline_A_z, A_dot_spline_z, A_ddot_spline_z
Exemplo n.º 14
0
def fit_spline(lc, niter=0, reject_thresh=5.0):
    '''Fit a smooth spline to the lightcurve.
	   The number of knots is empirically chosen based on examining a few
	   lightcurves of relatively bright quasars.
	   The fit can include iterative rejection, but by default it doesn't.
	   The chi^2 statistic is returned using the spline model for the fluxes.
	'''
    if lc['mjd'][0] < 52000:
        knots = np.array([52500., 53500., 54000.])
    else:
        knots = np.array([52700., 53500., 54000.])
    iternum = 0
    ii = np.where(~lc['mask'])[0]
    while True:
        knots = knots[(knots > lc['mjd'][ii[0]]) & (knots < lc['mjd'][ii[-1]])]
        spfit = LSQUnivariateSpline(lc['mjd'][ii],
                                    lc['flux'][ii],
                                    knots,
                                    np.sqrt(lc['ivar'][ii]),
                                    k=2)
        chi2v = (lc['flux'][ii] - spfit(lc['mjd'][ii])) * np.sqrt(
            lc['ivar'][ii])
        if iternum < niter:
            ii = ii[np.abs(chi2v) < reject_thresh]
            iternum += 1
        else:
            break
    chi2 = np.sum(chi2v**2)
    ndof = len(ii)
    return dict(spfit=spfit,
                fitvals=spfit(lc['mjd']),
                chi2=chi2,
                ndof=ndof,
                good=ii)
Exemplo n.º 15
0
    def _fit_method(self, model, x, y, **kwargs):
        t = kwargs.pop('t', None)
        weights = kwargs.pop('weights', None)
        bbox = kwargs.pop('bbox', [None, None])

        if t is not None:
            if model.user_knots:
                warnings.warn(
                    "The current user specified knots will be "
                    "overwritten for by knots passed into this function",
                    AstropyUserWarning)
        else:
            if model.user_knots:
                t = model.t_interior
            else:
                raise RuntimeError("No knots have been provided")

        if bbox != [None, None]:
            model.bounding_box = bbox

        from scipy.interpolate import LSQUnivariateSpline
        spline = LSQUnivariateSpline(x,
                                     y,
                                     t,
                                     w=weights,
                                     bbox=bbox,
                                     k=model.degree)

        model.tck = spline._eval_args
        return spline
Exemplo n.º 16
0
def _spline_filter(x,y,bins=None,num_bins=100,k=3,**kw):
    """
    :param x: to filter, independent variable
    :param y: to filter
    :param bins: x values we want to bin at
    :param num_bins: number of bins to use (only used if bins is None)
    :param k: degree of spline
    :param kw: see LSQUnivariateSpline
    :return: spline object
    """
    min_x, max_x = min(x), max(x)
    if (bins is None):
        # fit a spline at the given bins
        bins = np.linspace(min_x,max_x,endpoint=True,num=num_bins)
    # determine where the bins are in the range of the data for this landscape
    good_idx = np.where((bins >= min_x) & (bins <= max_x))
    bins_relevant = bins[good_idx]
    """
    exclude the first and last bins, to make sure the Schoenberg-Whitney 
    condition is met for all interior knots (see: 
docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.LSQUnivariateSpline
    """
    t = bins_relevant[1:-1]
    kw = dict(x=x, t=t, k=k, **kw)
    return LSQUnivariateSpline(y=y, **kw)
Exemplo n.º 17
0
def sfit(ptx_eval, ptx, pty, s=1., k=3, t=None):
    """ Fit a bunch of points and return an evaluation vector """
    assert len(ptx) == len(pty)
    if t is None:
        return UnivariateSpline(x=ptx, y=pty, s=s, k=k)(ptx_eval)
    else:
        return LSQUnivariateSpline(x=ptx, y=pty, t=t, k=k)(ptx_eval)
Exemplo n.º 18
0
def iter_spline(time, flux, mask, window_length):
    masked_flux = flux[mask==1]
    masked_time = time[mask==1]
    no_knots = (max(time) - min(time)) / window_length
    newflux = masked_flux.copy()
    newtime = masked_time.copy()
    newtime, newflux = cleaned_array(newtime, newflux)
    detrended_flux = masked_flux.copy()
    for i in range(constants.PSPLINES_MAXITER):
        outliers = 1 - detrended_flux < constants.PSPLINES_STDEV_CUT * np.nanstd(detrended_flux)
        mask_outliers = np.ma.where(outliers)
        newtime, newflux = cleaned_array(newtime[mask_outliers], newflux[mask_outliers])
        # knots must not be at the edges, so we take them as [1:-1]
        if len(newtime) < 5:
            return np.full(len(time), np.nan)
        knots = np.linspace(min(newtime), max(newtime), no_knots)[1:-1]
        try:
            trend = LSQUnivariateSpline(newtime, newflux, knots)
        except:
            return np.full(len(time), np.nan)
        trend_segment = trend(newtime)
        detrended_flux = newflux / trend_segment
        outliers = 1 - detrended_flux > constants.PSPLINES_STDEV_CUT * np.nanstd(detrended_flux)
        mask_outliers = np.ma.where(mask_outliers)
        if len(mask_outliers[0]) == 0:
            break
    return trend(time)  # Final iteration: apply trend to clipped and masked values
Exemplo n.º 19
0
def spline_derivatives(landscape,n_bins=100):
    """
    :param landscape: original landscape
    :param n_bins: number of bins for fitting
    :return: tuple of (spline fit to A_z, split fit to A_dot, fit to A_ddot
    """
    z = landscape.z
    A_z = landscape.A_z
    sort_idx = np.argsort(z)
    z_sort = z[sort_idx]
    A_sort = A_z[sort_idx]
    knots = np.linspace(min(z_sort),max(z_sort),endpoint=True,num=n_bins)
    spline_A_z = LSQUnivariateSpline(x=z_sort,y=A_sort,k=3,t=knots[1:-1])
    A_dot_spline_z = spline_A_z.derivative(1)(z)
    A_ddot_spline_z = spline_A_z.derivative(2)(z)
    return spline_A_z,A_dot_spline_z,A_ddot_spline_z
Exemplo n.º 20
0
def get_order_trace_old(s):
    x = np.arange(len(s))

    s = np.array(s)
    mm = s > max(s) * 0.05
    dd1, dd2 = np.nonzero(mm)[0][[0, -1]]

    # mask out absorption feature
    smooth_size = 20
    s0 = ni.median_filter(s, 40)
    s_s0 = s - s0
    s_s0_std = s_s0[np.abs(s_s0) < 2. * s_s0.std()].std()

    mmm = s_s0 > -3. * s_s0_std

    s1 = ni.gaussian_filter1d(s0[dd1:dd2], smooth_size, order=1)
    #x1 = x[dd1:dd2]

    s1r = s1

    s1_std = s1r.std()
    s1_std = s1r[np.abs(s1r) < 2. * s1_std].std()

    s1r[np.abs(s1r) < 2. * s1_std] = np.nan

    if np.any(np.isfinite(s1r[:1024])):
        i1 = np.nanargmax(s1r[:1024])
        i1r = np.where(~np.isfinite(s1r[:1024][i1:]))[0][0]
        i1 = dd1 + i1 + i1r  #+smooth_size
    else:
        i1 = dd1
    if np.any(np.isfinite(s1r[1024:])):
        i2 = np.nanargmin(s1r[1024:])
        i2r = np.where(~np.isfinite(s1r[1024:][:i2]))[0][-1]
        i2 = dd1 + 1024 + i2r
    else:
        i2 = dd2

    if 0:
        p_init = models.Chebyshev1D(degree=6, window=[0, 2047])
        fit_p = fitting.LinearLSQFitter()
        p = fit_p(p_init, x[i1:i2][mmm[i1:i2]], s[i1:i2][mmm[i1:i2]])

    if 1:
        # t= np.linspace(x[i1]+10, x[i2-1]-10, 10)
        # p = LSQUnivariateSpline(x[i1:i2],
        #                         s[i1:i2],
        #                         t, bbox=[0, 2047])

        # t= np.concatenate([[x[1],x[i1-5],x[i1],x[i1+5]],
        #                    np.linspace(x[i1]+10, x[i2-1]-10, 10),
        #                    [x[i2-5], x[i2], x[i2+5],x[-2]]])

        t = np.concatenate([[x[1], x[i1]],
                            np.linspace(x[i1] + 10, x[i2 - 1] - 10, 10),
                            [x[i2], x[-2]]])

        p = LSQUnivariateSpline(x, s0, t, bbox=[0, 2047])

    return p
Exemplo n.º 21
0
 def __init__(self,t,x,knot_spacing=1/30,fit_type='LSQ',**kwargs):
     """
     Simple extension of UnivariateSpline to handle multidimensional data.
     
     Params:
     -------
     t (ndarray)
     x (ndarray)
         If 2d, then (n_samples,n_dim).
     **kwargs
     """
     assert x.ndim==2
     self.splines = []
     self.knot_spacing = knot_spacing
     
     if fit_type=='LSQ':
         for i in xrange(x.shape[1]):
             self.splines.append(LSQUnivariateSpline(t,x[:,i],
                                     np.linspace(t[1],t[-2],int(np.floor(t[-2]-t[1])/knot_spacing)),
                                     **kwargs))
     elif fit_type=='Uni':
         for i in xrange(x.shape[1]):
             self.splines.append(UnivariateSpline(t,x[:,i],**kwargs))
     else:
         raise NotImplementedError
Exemplo n.º 22
0
def spline_through_data(x,
                        y,
                        k=2,
                        grace_intv=1000.,
                        smoothing_factor=0.001,
                        downsample=10,
                        fixed_control_points=None):
    """Pass a spline through the data

    Examples
    --------
    >>> x = np.arange(1000)
    >>> y = np.random.normal(x * 0.1, 0.01)
    >>> fun = spline_through_data(x, y, grace_intv=10.)
    >>> np.std(y - fun(x)) < 0.01
    True
    """
    lo_lim, hi_lim = x[0], x[-1]

    control_points = \
        np.linspace(lo_lim + 2 * grace_intv, hi_lim - 2 * grace_intv,
                    x.size // downsample)
    if fixed_control_points is not None and len(fixed_control_points) > 0:
        print(f'Adding control points: {fixed_control_points}')
        control_points = np.sort(
            np.concatenate((control_points, fixed_control_points)))

    detrend_fun = LSQUnivariateSpline(x,
                                      y,
                                      t=control_points,
                                      k=k,
                                      bbox=[lo_lim, hi_lim])

    return detrend_fun
Exemplo n.º 23
0
    def detrend_bin_lightcurve(self,value=3*720+1):
        """
        pad the array by 720 bins in each direction 
        I might need to have multiple layer of detrending
        """
        
        time_bin   = np.arange(self.data_pts+2*value)-value
        signal_bin = np.concatenate([np.ones(value)*self.signal_bin[0],
                                     self.signal_bin,
                                     np.ones(value)*self.signal_bin[-1]])
        
        tarray = (np.arange(int(len(time_bin)/value))+1)*value+time_bin[0]

        tarray = tarray[:-1] # the last know is usually too small and freaks out
        
        exclude = 90
        lower_threshold = np.percentile(signal_bin,50-exclude/2.)
        upper_threshold = np.percentile(signal_bin,50+exclude/2.)
        
        truncated = signal_bin.copy()
        truncated[(signal_bin > upper_threshold)] = upper_threshold
        truncated[(signal_bin < lower_threshold)] = lower_threshold            
        
        sp = LSQUnivariateSpline(time_bin, truncated,t=tarray)
        #sp.set_smoothing_factor(10)

        self.lc_filter = sp(time_bin)[value:-value]
        
        self.signal_bin_detrended = self.signal_bin-self.lc_filter+self.Normalization
        self.signal_bin_cleaned = self.signal_cleaned-self.lc_filter+self.Normalization
Exemplo n.º 24
0
def robust_poly(x,y,polyord,sigreject=3.0,iteration=3,useSpline=False,knots=None):
    finitep = np.isfinite(y) & np.isfinite(x)
    goodp = finitep ## Start with the finite points
    for iter in range(iteration):
        if np.sum(goodp) < polyord:
            warntext = "Less than "+str(polyord)+"points accepted, returning flat line"
            warnings.warn(warntext)
            coeff = np.zeros(polyord)
            coeff[0] = 1.0
        else:
            if useSpline == True:
                if knots is None:
                    spl = UnivariateSpline(x[goodp], y[goodp], k=polyord, s=sSpline)
                else:
                    spl = LSQUnivariateSpline(x[goodp], y[goodp], knots, k=polyord)
                ymod = spl(x)
            else:
                coeff = np.polyfit(x[goodp],y[goodp],polyord)
                yPoly = np.poly1d(coeff)
                ymod = yPoly(x)
            
            resid = np.abs(ymod - y)
            madev = np.nanmedian(np.abs(resid - np.nanmedian(resid)))
            goodp = (np.abs(resid) < (sigreject * madev))
    
    if useSpline == True:
        return spl
    else:
        return coeff
Exemplo n.º 25
0
def IRLSSpline(time, flux, error, Q=400.0, ksep=0.07, numpass=5, order=3):
    '''
    IRLS = Iterative Re-weight Least Squares

    Parameters
    ----------
    time
    flux
    error
    Q
    ksep
    numpass
    order

    Returns
    -------

    '''
    weight = 1. / (error**2.0)

    knots = np.arange(min(time) + ksep, max(time) - ksep, ksep)

    for k in range(numpass):
        spl = LSQUnivariateSpline(time, flux, knots, w=weight, k=order)
        # spl = UnivariateSpline(time, flux, w=weight, k=order, s=1)

        chisq = ((flux - spl(time))**2.) / (error**2.0)

        weight = Q / ((error**2.0) * (chisq + Q))

    return spl(time)
Exemplo n.º 26
0
def fit(x, y, errors=None):
    # Sort
    ind = np.argsort(x)
    x = x[ind]
    y = y[ind]

    # Fill in errors
    if errors is None:
        errors = errorEstimate(x, y)


#	print(np.vstack((x,y,errors)).T)

# We learn a global multiplicative factor on top of the errors
# using cross-validation. This lets us set the cutoff to the most
# computationally efficient value and removes a degree of freedom.
    cutoff = np.log(0.5)

    alphas = 10**np.linspace(-3., 3., num=21, endpoint=True)

    best = [1e100, alphas[len(alphas) // 2]]

    for a in alphas:
        # sub-sample for cross-validation
        inds = np.random.choice(len(x), size=len(x) // 5, replace=False)
        xS = x[inds]
        yS = y[inds]
        errorsA = a * errors[inds]
        inds = np.argsort(xS)
        xS = xS[inds]
        yS = yS[inds]
        errorsA = errorsA[inds]

        knots, knotLocs = rawFit(xS, yS, errorsA)
        interp = LSQUnivariateSpline(xS, yS, knotLocs, k=1, w=1 / errorsA**2)
        resid = np.sum((interp(x) - y)**2)
        #		print(best,a,resid)
        if resid < best[0]:
            best[0] = resid
            best[1] = a

    knots, knotLocs = rawFit(x, y, errors * best[1])
    interp = LSQUnivariateSpline(x, y, knotLocs, k=1, w=1 / errors**2)

    print('Final Answer:', len(knots))

    return interp(x), (x, y, knots, knotLocs, best), range(len(x)), interp
Exemplo n.º 27
0
 def detrend_bin_lightcurve(self,value=3*48+1):
     
     #front,back = self.sector_seperator
     
     front = int((self.time_bin[-1]-self.time_bin[0])/2)
     back = front
     
     print(front)
     
     # detrend first orbit
     orbit1_time_bin   = np.arange(front+2*value)-value
     orbit1_signal_bin = self.signal_bin[:front]
     signal_bin = np.concatenate([np.ones(value)*orbit1_signal_bin[0],
                                  orbit1_signal_bin,
                                  np.ones(value)*orbit1_signal_bin[-1]])
     tarray = (np.arange(int(len(orbit1_time_bin)/value))+1)*value+orbit1_time_bin[0]
     tarray = tarray[:-1] # the last knot is usually too small and freaks out so remove
     lower_threshold = np.percentile(signal_bin,5)
     upper_threshold = np.percentile(signal_bin,95)
     truncated = signal_bin.copy()
     truncated[(signal_bin > upper_threshold)] = upper_threshold
     truncated[(signal_bin < lower_threshold)] = lower_threshold            
     
     sp = LSQUnivariateSpline(orbit1_time_bin, truncated,t=tarray)
     lc_filter1 = sp(orbit1_time_bin)[value:-value]
     
     # detrend second orbit
     orbit2_time_bin   = np.arange((self.data_pts-back)+2*value)-value
     orbit2_signal_bin = self.signal_bin[back:]
     signal_bin = np.concatenate([np.ones(value)*orbit2_signal_bin[0],
                                  orbit2_signal_bin,
                                  np.ones(value)*orbit2_signal_bin[-1]])
     tarray = (np.arange(int(len(orbit2_time_bin)/value))+1)*value+orbit2_time_bin[0]
     tarray = tarray[:-1] # the last knot is usually too small and freaks out so remove
     lower_threshold = np.percentile(signal_bin,5)
     upper_threshold = np.percentile(signal_bin,95)
     truncated = signal_bin.copy()
     truncated[(signal_bin > upper_threshold)] = upper_threshold
     truncated[(signal_bin < lower_threshold)] = lower_threshold            
     
     sp = LSQUnivariateSpline(orbit2_time_bin, truncated,t=tarray)
     lc_filter2 = sp(orbit2_time_bin)[value:-value]
     
     self.lc_filter = np.concatenate([lc_filter1,np.zeros(back-front),lc_filter2])
     self.signal_bin_detrended = self.signal_bin-self.lc_filter+self.Normalization
     self.signal_bin_cleaned = self.signal_cleaned-self.lc_filter+self.Normalization
Exemplo n.º 28
0
class dataSmoother:
    def __init__(self, file, knots=[]):
        self.filename = file
        with open(file, "r") as f:
            inData = genfromtxt(f, float, comments='#').T
            self.rho = inData[0]
            self.orig_data = inData[1]

        self.spline = LSQUnivariateSpline(self.rho, self.orig_data, knots, k=5)

    def plot_orig(self):
        fig = self._plot_base()
        fig.scatter(self.rho, self.orig_data, color='red')
        plt.show()
        return fig

    def plot(self):
        fig = self._plot_base()

        fig.scatter(self.rho, self.orig_data, color='red')
        fig.plot(self.rho, self.spline(self.rho), color='black')
        fig.scatter(self.rho, self.spline(self.rho), color='green', marker="x")

        fig.legend(["Spline", "Original", "Cleaned"])

        plt.show()
        return fig

    def save(self):
        answer = ""
        while answer not in ["y", "n"]:
            answer = input("OK to continue [Y/N]? ").lower()
        print("Backing up old file")
        os.rename(self.filename, self.filename + '.bak')
        contents = np.array([self.rho, self.spline(self.rho)]).T
        savetxt(self.filename + "_clean", contents, delimiter='    ')

    def get_spline(self):
        return self.spline

    def set_knots(self, knots):
        self.spline = LSQUnivariateSpline(self.rho, self.orig_data, knots, k=5)
        print("knots set")

    def _plot_base(self):
        plot = plt.figure()
        fig = plot.add_subplot(111)
        fig.set_xlabel(r'$\rho', fontsize=30)
        fig.set_ylabel(r'Data', fontsize=30)
        plt.xticks(fontsize=20)
        plt.yticks(fontsize=20)

        return fig

    def plot_derivative(self):
        fig = self._plot_base()
        fig.scatter(self.rho, self.spline.derivative()(self.rho), color="blue")
        return fig
Exemplo n.º 29
0
def spline_fit(q, G0, k=3, knots=None,num=None):
    if num is None:
        num = min(150,q.size//(k)-1)
    if (knots is None):
        step = q.size//num
        assert step > 0 and step
        knots = q[1:-1:step]
    spline_G0 = LSQUnivariateSpline(x=q, y=G0,t=knots, k=k)
    return spline_G0
Exemplo n.º 30
0
def IRLSSpline(time,
               flux,
               error,
               Q=400.0,
               ksep=0.07,
               numpass=5,
               order=3,
               debug=False):
    '''
    IRLS = Iterative Re-weight Least Squares

    Parameters
    ----------
    time
    flux
    error
    Q
    ksep
    numpass
    order

    Returns
    -------

    '''
    weight = 1. / (error**2.0)

    knots = np.arange(np.nanmin(time) + ksep, np.nanmax(time) - ksep, ksep)

    if debug is True:
        print('IRLSSpline: knots: ', np.shape(knots))
        print('IRLSSpline: time: ', np.shape(time), np.nanmin(time), time[0],
              np.nanmax(time), time[-1])
        print('IRLSSpline: <weight> = ', np.mean(weight))
        print(np.where((time[1:] - time[:-1] < 0))[0])
        print(flux)

        # plt.figure()
        # plt.errorbar(time, flux, error)
        # plt.scatter(knots, knots*0. + np.median(flux))
        # plt.show()

    for k in range(numpass):
        spl = LSQUnivariateSpline(time,
                                  flux,
                                  knots,
                                  k=order,
                                  check_finite=True,
                                  w=weight)
        # spl = UnivariateSpline(time, flux, w=weight, k=order, s=1)

        chisq = ((flux - spl(time))**2.) / (error**2.0)

        weight = Q / ((error**2.0) * (chisq + Q))

    return spl(time)
Exemplo n.º 31
0
    def update_normal_vector(self):
        """
        update the constraint by approximating the
        loglikelihood hypersurface as a spline in
        each dimension.
        This is an approximation which
        improves as the algorithm proceeds
        """
        n = self.ensemble[0].dimension
        tracers_array = np.zeros((len(self.ensemble), n))
        for i, samp in enumerate(self.ensemble):
            tracers_array[i, :] = samp.values
        V_vals = np.atleast_1d([p.logL for p in self.ensemble])

        self.normal = []
        for i, x in enumerate(tracers_array.T):
            # sort the values
            #            self.normal.append(lambda x: -x)
            idx = x.argsort()
            xs = x[idx]
            Vs = V_vals[idx]
            # remove potential duplicate entries
            xs, ids = np.unique(xs, return_index=True)
            Vs = Vs[ids]
            # pick only finite values
            idx = np.isfinite(Vs)
            Vs = Vs[idx]
            xs = xs[idx]
            # filter to within the 90% range of the Pvals
            Vl, Vh = np.percentile(Vs, [5, 95])
            (idx, ) = np.where(np.logical_and(Vs > Vl, Vs < Vh))
            Vs = Vs[idx]
            xs = xs[idx]
            # Pick knots for this parameters: Choose 5 knots between
            # the 1st and 99th percentiles (heuristic tuning WDP)
            knots = np.percentile(xs, np.linspace(1, 99, 5))
            # Guesstimate the length scale for numerical derivatives
            dimwidth = knots[-1] - knots[0]
            delta = 0.1 * dimwidth / len(idx)
            # Apply a Savtzky-Golay filter to the likelihoods (low-pass filter)
            window_length = len(
                idx) // 2 + 1  # Window for Savtzky-Golay filter
            if window_length % 2 == 0: window_length += 1
            f = savgol_filter(
                Vs,
                window_length,
                5,  # Order of polynominal filter
                deriv=1,  # Take first derivative
                delta=delta,  # delta for numerical deriv
                mode='mirror'  # Reflective boundary conds.
            )
            # construct a LSQ spline interpolant
            self.normal.append(LSQUnivariateSpline(xs, f, knots, ext=3, k=3))
            if self.DEBUG:
                np.savetxt('dlogL_spline_%d.txt' % i,
                           np.column_stack((xs, Vs, self.normal[-1](xs), f)))
Exemplo n.º 32
0
def open_coordinate_figure(request):
    selectedVal = request.GET['selectedVal']
    byGeoLat = True if request.GET['byGeoLat'] == 'true' else False

    if (byGeoLat):
        df = pd.read_sql(
            'select "StationName", "Latitude", "' + selectedVal +
            '" from "world_station_supermag" order by "Latitude"', connection)
        indexNames = df[df[selectedVal] > 0.7].index
        df.drop(indexNames, inplace=True)
        df.sort_values('Latitude')

        x = df["Latitude"]
    else:
        df = pd.read_sql(
            'select "StationName", "mlatitude", "' + selectedVal +
            '" from "world_station_supermag" order by "mlatitude"', connection)
        indexNames = df[df[selectedVal] > 0.7].index
        df.drop(indexNames, inplace=True)
        df.sort_values('mlatitude')

        x = df["mlatitude"]

    y = df[selectedVal]
    # f2 = interp1d(x, y, kind='cubic')

    # plt.grid(True,linestyle='dashed')
    # interpolatedOutputList, lnspc = InterpolateData(df, selectedVal, 0)
    plt.xticks(np.arange(-90, 90, 10.0))
    # plt.plot(x, y, 'ro')
    # t = np.linspace(-70, 70, 7)
    # t = np.linspace(-70, 70, 5)
    t = np.linspace(-70, 70, 5)
    spl = LSQUnivariateSpline(np.array(x), np.array(y), t[1:-1])
    xs = np.linspace(-90, 90, 1000)
    plt.plot(x, y, 'ro', ms=5)
    plt.plot(xs, spl(xs), 'g-', lw=3)
    # xnew = np.linspace(-90, 90, num=115, endpoint=True)
    # for k in (1,2,3):  # line parabola cubicspline
    #   extrapolator = UnivariateSpline( x, y, k=k )
    #   y = extrapolator(xnew)
    #   label = "k=%d" % k
    #   # print(label, y)
    #   plt.plot( xnew, y, label=label)  # pylab

    fig = plt.gcf()
    # fig.set_size_inches(18.5, 10.5)
    FigureCanvasAgg(fig)
    buf = io.BytesIO()
    plt.savefig(buf, format='png')
    plt.close(fig)
    response = HttpResponse(buf.getvalue(), content_type="image/png")

    response[
        'Content-Disposition'] = 'attachment; filename="Result' + selectedVal + '.png"'
    return response
Exemplo n.º 33
0
def _gufunc_unispline_noerr_upscale(x,
                                    y,
                                    num_knots,
                                    ix,
                                    out=None):  # pragma: no cover
    xi = x.min()
    xf = x.max()
    t = np.linspace(xi, xf, num_knots)[1:-1]
    fn_interp = LSQUnivariateSpline(x, y, t=t)
    out[:] = fn_interp(ix)
Exemplo n.º 34
0
    def from_knots_coeffs(knots, coeffs, k=3):
        n = len(knots) + 2*k

        t = np.empty(n, dtype="d")
        t[:k] = knots[0]
        t[k:-k] = knots
        t[-k:] = knots[-1]

        c = np.zeros(n, dtype="d")
        c[:len(coeffs)] = coeffs

        return LSQUnivariateSpline._from_tck((t, c, k))
Exemplo n.º 35
0
 def calc_coeffs(self):
     coeff = []
     for v, (g, kdata) in enumerate(zip(self.basegrid, self.knot_data)):
         ranges = [(slice(r[0], r[1], t[0]*1j) if kd.used() else slice((r[1]-r[0])/2.0,r[1],1j)) for r, t, kd in zip(self.ranges,g[0],kdata)]
         fnc = self.calc_grid(ranges, g[1], g[2])[v]
         for n in range(len(ranges)):
             kd = self.knot_data[v,n]
             if not kd.used():
                 continue
             s = list(fnc.shape)
             s[n] = kd.num_coeffs()
             out = np.empty(s)
             s[n] = 1
             x = np.mgrid[ranges[n]]
             for i in np.ndindex(tuple(s)):
                 i = list(i)
                 i[n] = slice(None)
                 ss = LSQUnivariateSpline(x, fnc[i], kd.get_knots(), bbox=kd.get_bbox(), k=kd.get_order()-1)
                 out[i] = ss.get_coeffs()
             fnc = out
         coeff.append(fnc)
     return coeff
Exemplo n.º 36
0
    def fit_cubic(self):
        """Fit (x,y) arrays using a cubic Spline approximation.
           It uses LSQUnivariateSpline from scipy.interpolate
           which uses order-1 knots.
           

        """

        x = self.x
        y = self.y
        xmin,xmax = x.min(),x.max()
        npieces = self.order

        rn = (xmax - xmin)/npieces 
        tn = [(xmin+rn*k) for k in range(1,npieces)]
        zu = LSQUnivariateSpline(x,y,tn)
        
        self.coeff = zu.get_coeffs()
        self.fx = lambda x: x
        self.evfunc = lambda p,x: zu(x)
        self.ier = None
       
        return
Exemplo n.º 37
0
def deserialize(data):
    """Robert's deserialization code."""
    self = LSQUnivariateSpline.__new__(LSQUnivariateSpline)
    self._data = data
    self._reset_class()
    return self
Exemplo n.º 38
0
    coeffs_list = []
    for x, y in r["cent_bottom_list"]:
        msk = ~y.mask & np.isfinite(y.data)
        x1, y1 = x[msk], y.data[msk]
        xmin = x1.min()
        xmax = x1.max()

        # i1 = max(np.searchsorted(t, xmin)-1, 0)
        # i2 = np.searchsorted(t, xmax)

        m = (xmin <= t) & (t <= xmax)
        m1 = ni.maximum_filter1d(m, 3)
        t1 = t[m1]

        spl = LSQUnivariateSpline(x1, y1,
                                  t1[1:-1], bbox = [t1[0], t1[-1]])

        spl = LSQUnivariateSpline(x1, y1,
                                  t2[1:-3], bbox = [t2[0], t2[-3]])

        coeffs = np.empty(len(t)+2, dtype="f")
        coeffs.fill(np.nan)

        if np.all(m1):
            coeffs[:] = spl.get_coeffs()
        else:
            coeffs[1:-1][m1] = spl.get_coeffs()[1:-1]

        coeffs_list.append([coeffs])
        print len(spl.get_knots()), len(spl.get_coeffs())
        plot(x1, y1)