예제 #1
0
 def test_shapes_scalarvalue(self):
     P = BarycentricInterpolator(self.xs,self.ys)
     assert_array_equal(np.shape(P(0)), ())
     assert_array_equal(np.shape(P(np.array(0))), ())
     assert_array_equal(np.shape(P([0])), (1,))
     assert_array_equal(np.shape(P([0,1])), (2,))
예제 #2
0
 def test_shapes_vectorvalue(self):
     P = BarycentricInterpolator(self.xs,np.outer(self.ys,np.arange(3)))
     assert_array_equal(np.shape(P(0)), (3,))
     assert_array_equal(np.shape(P([0])), (1,3))
     assert_array_equal(np.shape(P([0,1])), (2,3))
예제 #3
0
 def test_delayed(self):
     P = BarycentricInterpolator(self.xs)
     P.set_yi(self.ys)
     assert_almost_equal(self.true_poly(self.test_xs),P(self.test_xs))
예제 #4
0
 def test_append(self):
     P = BarycentricInterpolator(self.xs[:3],self.ys[:3])
     P.add_xi(self.xs[3:],self.ys[3:])
     assert_almost_equal(self.true_poly(self.test_xs),P(self.test_xs))
예제 #5
0
 def test_lagrange(self):
     P = BarycentricInterpolator(self.xs,self.ys)
     assert_almost_equal(self.true_poly(self.test_xs),P(self.test_xs))
예제 #6
0
 def test_scalar(self):
     P = BarycentricInterpolator(self.xs,self.ys)
     assert_almost_equal(self.true_poly(7),P(7))
     assert_almost_equal(self.true_poly(np.array(7)),P(np.array(7)))
예제 #7
0
 def test_shapes_1d_vectorvalue(self):
     P = BarycentricInterpolator(self.xs, np.outer(self.ys, [1]))
     assert_array_equal(np.shape(P(0)), (1, ))
     assert_array_equal(np.shape(P([0])), (1, 1))
     assert_array_equal(np.shape(P([0, 1])), (2, 1))
예제 #8
0
 def test_wrapper(self):
     P = BarycentricInterpolator(self.xs, self.ys)
     values = barycentric_interpolate(self.xs, self.ys, self.test_xs)
     assert_almost_equal(P(self.test_xs), values)
예제 #9
0
def xgridFromAnchors(nGrid, p):
    anchors = p[6:]
    pivots = weighAnchor(nGrid, anchors)
    x = BarycentricInterpolator(pivots, anchors)(arange(nGrid))
    return x
예제 #10
0
    # y = norm.pdf(x, loc=mu, scale=sigma)
    # plt.plot(x, y, 'r--', x, y, 'ro', linewidth=2, markersize=4)
    # plt.grid()
    # plt.show()

    # 6.5 插值
    # x = np.random.poisson(lam=5, size=10000)
    # print x
    # pillar = 15
    # a = plt.hist(x, bins=pillar, normed=True, range=[0, pillar], color='g', alpha=0.5)

    rv = poisson(5)
    x1 = a[1]
    #概率质量函数
    y1 = rv.pmf(x1)
    itp = BarycentricInterpolator(x1, y1)  # 重心插值
    x2 = np.linspace(x.min(), x.max(), 50)
    y2 = itp(x2)
    cs = scipy.interpolate.CubicSpline(x1, y1)  # 三次样条插值
    plt.plot(x2, cs(x2), 'm--', linewidth=5, label='CubicSpine')  # 三次样条插值
    plt.plot(x2, y2, 'g-', linewidth=3,
             label='BarycentricInterpolator')  # 重心插值
    plt.plot(x1, y1, 'r-', linewidth=1, label='Actural Value')  # 原始值
    plt.legend(loc='upper right')
    plt.grid()
    plt.show()

    # 7. 绘制三维图像
    # x, y = np.ogrid[-3:3:100j, -3:3:100j]
    # # u = np.linspace(-3, 3, 101)
    # # x, y = np.meshgrid(u, u)
예제 #11
0
def nonlinear_minimal_area_surface_of_revolution():
    l_bc, r_bc = 1., 7.
    N = 80
    D, x = cheb_vectorized(N)
    M = np.dot(D, D)
    guess = 1. + (x - -1.) * ((r_bc - l_bc) / 2.)
    N2 = 50

    def pseudospectral_ode(y):
        out = np.zeros(y.shape)
        yp, ypp = D.dot(y), M.dot(y)
        out = y * ypp - 1. - yp**2.
        out[0], out[-1] = y[0] - r_bc, y[-1] - l_bc
        return out

    u = root(pseudospectral_ode, guess, method='lm', tol=1e-9)
    num_sol = BarycentricInterpolator(x, u.x)

    # Up to this point we have found the numerical solution
    # using the pseudospectral method. In the code that follows
    # we check that solution with the analytic solution,
    # and graph the results

    def f(x):
        return np.array([
            x[1] * np.cosh((-1. + x[0]) / x[1]) - l_bc, x[1] * np.cosh(
                (1. + x[0]) / x[1]) - r_bc
        ])

    parameters = root(f, np.array([1., 1.]), method='lm', tol=1e-9)
    A, B = parameters.x[0], parameters.x[1]

    def analytic_solution(x):
        out = B * np.cosh((x + A) / B)
        return out

    xx = np.linspace(-1, 1, N2)
    uu = num_sol.__call__(xx)
    # print "Max error is ", np.max(np.abs(uu - analytic_solution(xx)))
    plt.plot(x, guess, '-b')
    plt.plot(xx, uu, '-r')  # Numerical solution via
    # the pseudospectral method
    plt.plot(xx, analytic_solution(xx), '*k')  # Analytic solution
    plt.axis([-1., 1., l_bc - 1., r_bc + 1.])
    # plt.show()
    plt.clf()

    theta = np.linspace(0, 2 * np.pi, N2)
    X, Theta = np.meshgrid(xx, theta, indexing='ij')
    print "\nxx = \n", xx
    print "\nuu = \n", uu
    F = uu[:, np.newaxis] + np.zeros(uu.shape)
    print "\nX = \n", X
    print "\nTheta = \n", Theta
    print "\nF = \n", F
    Y = F * np.cos(Theta)
    Z = F * np.sin(Theta)

    fig = plt.figure()
    ax = fig.add_subplot(111, projection='3d')
    # X, Y, Z = axes3d.get_test_data(0.05)
    ax.plot_wireframe(X, Y, Z, rstride=1, cstride=1)
    print ax.azim, ax.elev
    ax.azim = -65
    ax.elev = 0
    # ax.view_init(elev=-60, azim=30)
    # plt.savefig('minimal_surface.pdf')
    plt.show()
예제 #12
0
def interpolation_matrix_1d(fine_grid,
                            coarse_grid,
                            k=2,
                            periodic=False,
                            pad=1,
                            equidist_nested=True):
    """
    Function to contruct the restriction matrix in 1d using barycentric interpolation

    Args:
        fine_grid (np.ndarray): a one dimensional 1d array containing the nodes of the fine grid
        coarse_grid (np.ndarray): a one dimensional 1d array containing the nodes of the coarse grid
        k (int): order of the restriction
        periodic (bool): flag to indicate periodicity
        pad (int): padding parameter for boundaries
        equidist_nested (bool): shortcut possible, if nodes are equidistant and nested

    Returns:
         sprs.csc_matrix: interpolation matrix
    """

    n_f = fine_grid.size

    if periodic:

        M = np.zeros((fine_grid.size, coarse_grid.size))

        if equidist_nested:

            for i, p in zip(range(n_f), fine_grid):

                if i % 2 == 0:
                    M[i, int(i / 2)] = 1.0
                else:

                    nn = []
                    cpos = int(i / 2)
                    offset = int(k / 2)
                    for j in range(k):
                        nn.append(cpos - offset + 1 + j)
                        if nn[-1] < 0:
                            nn[-1] += coarse_grid.size
                        elif nn[-1] > coarse_grid.size - 1:
                            nn[-1] -= coarse_grid.size
                    nn = sorted(nn)

                    circulating_one = np.asarray([1.0] + [0.0] * (k - 1))
                    cont_arr = continue_periodic_array(coarse_grid, nn)

                    if p > np.mean(fine_grid) and not (cont_arr[0] <= p <=
                                                       cont_arr[-1]):
                        cont_arr += 1

                    bary_pol = []
                    for l in range(k):
                        bary_pol.append(
                            BarycentricInterpolator(
                                cont_arr, np.roll(circulating_one, l)))
                    M[i, nn] = np.asarray(list(map(lambda x: x(p), bary_pol)))

        else:

            for i, p in zip(range(n_f), fine_grid):
                nn = next_neighbors_periodic(p, coarse_grid, k)
                circulating_one = np.asarray([1.0] + [0.0] * (k - 1))
                cont_arr = continue_periodic_array(coarse_grid, nn)

                if p > np.mean(fine_grid) and not (cont_arr[0] <= p <=
                                                   cont_arr[-1]):
                    cont_arr += 1

                bary_pol = []
                for l in range(k):
                    bary_pol.append(
                        BarycentricInterpolator(cont_arr,
                                                np.roll(circulating_one, l)))
                M[i, nn] = np.asarray(list(map(lambda x: x(p), bary_pol)))

    else:

        M = np.zeros((fine_grid.size, coarse_grid.size + 2 * pad))
        padded_c_grid = border_padding(coarse_grid, pad, pad)

        if equidist_nested:

            for i, p in zip(range(n_f), fine_grid):

                if i % 2 != 0:
                    M[i, int((i - 1) / 2) + 1] = 1.0
                else:
                    nn = []
                    cpos = int(i / 2)
                    offset = int(k / 2)
                    for j in range(k):
                        nn.append(cpos - offset + 1 + j)
                        if nn[-1] < 0:
                            nn[-1] += k
                        elif nn[-1] > coarse_grid.size + 1:
                            nn[-1] -= k
                    nn = sorted(nn)
                    # construct the lagrange polynomials for the k neighbors
                    circulating_one = np.asarray([1.0] + [0.0] * (k - 1))
                    bary_pol = []
                    for l in range(k):
                        bary_pol.append(
                            BarycentricInterpolator(
                                padded_c_grid[nn], np.roll(circulating_one,
                                                           l)))
                    M[i, nn] = np.asarray(list(map(lambda x: x(p), bary_pol)))

        else:

            for i, p in zip(range(n_f), fine_grid):
                nn = next_neighbors(p, padded_c_grid, k)
                # construct the lagrange polynomials for the k neighbors
                circulating_one = np.asarray([1.0] + [0.0] * (k - 1))
                bary_pol = []
                for l in range(k):
                    bary_pol.append(
                        BarycentricInterpolator(padded_c_grid[nn],
                                                np.roll(circulating_one, l)))
                M[i, nn] = np.asarray(list(map(lambda x: x(p), bary_pol)))

        if pad > 0:
            M = M[:, pad:-pad]

    return sprs.csc_matrix(M)
예제 #13
0
npts = 7
noiseAmp = 0.03
xdata = np.linspace(0.2, 10., npts) + 0.3*np.random.randn(npts)
xdata = np.sort(xdata)
ydata = f(xdata) * (1.0 + noiseAmp * np.random.randn(npts))
np.savetxt('logCurveData9.txt', zip(xdata, ydata), fmt='%10.2f')

# Create x array spanning data set plus 5%
xmin, xmax = frangeAdd(xdata, 0.05)
x = np.linspace(xmin, xmax, 200)

# Plot data and "fit"
plt.plot(xdata, ydata, 'or', label='data')
plt.plot(x, f(x), 'k-', label='fitting function')

# Create y array from cubic spline
f_cubic = interp1d(xdata, ydata, kind='cubic', bounds_error=False)
plt.plot(x, f_cubic(x), 'k-', label='cubic spline')

# Create y array from univariate spline
f_univar = UnivariateSpline(xdata, ydata, w=None, bbox=[xmin, xmax], k=3)
plt.plot(x, f_univar(x), label='univariate spline')

# Create y array from barycentric interpolation
f_bary = BarycentricInterpolator(xdata, ydata)
plt.plot(x, f_bary.__call__(x), label='barycentric interp')

plt.legend(loc='best')

plt.show()
예제 #14
0
    def shift_spec(event):
        #global transition_name
        global line_region
        global dwave
        global dflux_window_up
        global dflux_window_down
        ix, iy = event.xdata, event.ydata

        ##########################################################
        ##################### WINDOW CONTROL #####################
        ##########################################################
        if event.key == '}':
            line_region += big_shift
            pl.xlim(
                Utilities.zoom_region(line_region, dwave, dflux_window_up,
                                      dflux_window_down)[0])
        elif event.key == '{':
            line_region -= big_shift
            pl.xlim(
                Utilities.zoom_region(line_region, dwave, dflux_window_up,
                                      dflux_window_down)[0])
        elif event.key == ']':
            line_region += small_shift
            pl.xlim(
                Utilities.zoom_region(line_region, dwave, dflux_window_up,
                                      dflux_window_down)[0])
        elif event.key == '[':
            line_region -= small_shift
            pl.xlim(
                Utilities.zoom_region(line_region, dwave, dflux_window_up,
                                      dflux_window_down)[0])
        elif event.key == '-':
            dwave += zoom
            pl.xlim(
                Utilities.zoom_region(line_region, dwave, dflux_window_up,
                                      dflux_window_down)[0])
        elif event.key == '=':
            dwave -= zoom
            pl.xlim(
                Utilities.zoom_region(line_region, dwave, dflux_window_up,
                                      dflux_window_down)[0])
        elif event.key == '_':
            # shift -
            dwave += big_zoom
            pl.xlim(
                Utilities.zoom_region(line_region, dwave, dflux_window_up,
                                      dflux_window_down)[0])
        elif event.key == '+':
            dwave -= big_zoom
            pl.xlim(
                Utilities.zoom_region(line_region, dwave, dflux_window_up,
                                      dflux_window_down)[0])
        elif event.key == 'b':
            dflux_window_up += flux_zoom
            pl.ylim(
                Utilities.zoom_region(line_region, dwave, dflux_window_up,
                                      dflux_window_down)[1])
        elif event.key == 'B':
            dflux_window_up -= flux_zoom
            pl.ylim(
                Utilities.zoom_region(line_region, dwave, dflux_window_up,
                                      dflux_window_down)[1])
        elif event.key == 't':
            dflux_window_down -= flux_zoom
            pl.ylim(
                Utilities.zoom_region(line_region, dwave, dflux_window_up,
                                      dflux_window_down)[1])
        elif event.key == 'T':
            dflux_window_down += flux_zoom
            pl.ylim(
                Utilities.zoom_region(line_region, dwave, dflux_window_up,
                                      dflux_window_down)[1])
        elif event.key == 'm':
            dflux_window_up += Big_flux_zoom
            pl.ylim(
                Utilities.zoom_region(line_region, dwave, dflux_window_up,
                                      dflux_window_down)[1])
        elif event.key == 'M':
            dflux_window_up -= Big_flux_zoom
            pl.ylim(
                Utilities.zoom_region(line_region, dwave, dflux_window_up,
                                      dflux_window_down)[1])
        elif event.key == 'u':
            dflux_window_down -= Big_flux_zoom
            pl.ylim(
                Utilities.zoom_region(line_region, dwave, dflux_window_up,
                                      dflux_window_down)[1])
        elif event.key == 'U':
            dflux_window_down += Big_flux_zoom
            pl.ylim(
                Utilities.zoom_region(line_region, dwave, dflux_window_up,
                                      dflux_window_down)[1])
        elif event.key == 'r':
            dwave = 10
            dflux_window_up = 0.0
            dflux_window_down = 0.0
            line_region = np.median(wave)
            pl.xlim(
                Utilities.zoom_region(line_region, dwave, dflux_window_up,
                                      dflux_window_down)[0])
            pl.ylim(
                Utilities.zoom_region(line_region, dwave, dflux_window_up,
                                      dflux_window_down)[1])
        elif event.key == 'k':
            print '\n'
            Utilities.printLine()
            print 'k	Show keys map (What is shown here)'

            Utilities.printLine()
            print 'WINDOW CONTROL KEYS:'
            print '}		shift to right with 0.5 Angstrom'
            print '{		shift to left with 0.5 Angstrom'
            print ']		shift to right with 0.1 Angstrom'
            print '[		shift to left with 0.1 Angstrom'
            print 'shift +/-	Zoom in/out by 0.5 Angstrom'
            print '+/-		Zoom in/out by 0.1 Angstrom'
            print 'T/t		Zoom top by 1e-15'
            print 'B/b		Zoom bottom by 1e-15'
            print 'U/u		Zoom top by 5e-15'
            print 'M/m		Zoom bottom by 5e-15'
            print 'r		replot'

            Utilities.printLine()
            print 'FITTING SPEC KEYS:'
            print 'a		Add points'
            print 'shift+g		Fit Gaussian'
            print 'shift+c		Fit Continuum'
            print 'shift+w		Write fits file and fort.13'
            print 'w		Append guess (N,b,z) to fort.13'
            Utilities.printLine()

        ##########################################################
        ##################### Fitting Control ####################
        ##########################################################
        elif event.key == 'a':
            '''Add 2 Points setting boundary for fitting data'''
            if len(x_record) < 10:
                x_record.append(ix)
                y_record.append(iy)
            else:
                del x_record[:]
                del y_record[:]
                x_record.append(ix)
                y_record.append(iy)
        elif event.key == 'C':
            """
			spline interpolation of the points to get 
			the continuum 
			"""

            spl_cont = BarycentricInterpolator(x_record, y_record)

            x1 = np.min(x_record)
            x2 = np.max(x_record)
            edges = [x1, x2]
            temp_wave, temp_flux, temp_error = Utilities.Select_Data(
                spec, edges)
            cont_flux = spl_cont(temp_wave)

            new_continuum.set_xdata(temp_wave)
            new_continuum.set_ydata(cont_flux)

            normwave = temp_wave
            normflux = temp_flux / cont_flux
            #normdf = temp_error * (temp_flux / cont_flux)
            normdf = temp_error / (cont_flux)

            norm_flux_line.set_xdata(normwave)
            norm_flux_line.set_ydata(normflux)
            norm_dflux_line.set_ydata(normdf)
            norm_dflux_line.set_xdata(normwave)

        elif event.key == 'G':
            """
			Fit a gaussian for inspection
			"""
            if not x_record:
                print 'No data selected to fit.'
                pass
            else:
                print 'transition_name = ', transition_name
                p1, p2 = np.transpose(np.array([x_record, y_record]))

                x1, y1 = p1
                x2, y2 = p2
                temp_spec = Utilities.Select_Data(spec, [x1, x2])
                estimated_cont_level = np.mean((y1, y2))

                gauss_params = Utilities.Fit_Gaussian(temp_spec,
                                                      estimated_cont_level)

                if gauss_params:
                    amplitude, centroid_wave, sigma_width = gauss_params

                    # Apparent column density
                    logN, dlogN = Utilities.ComputeAppColumn(
                        temp_spec, transition_name)

                    # Print out results of gaussian fit
                    Utilities.Print_LineInfo(gauss_params, logN,
                                             transition_name)

                    # Make the plot to show goodness of fit
                    gauss_flux = Utilities.Gaussian_function(
                        temp_spec[0], amplitude, centroid_wave, sigma_width)
                    gauss_wave = temp_spec[0]
                    gauss_fit.set_xdata(gauss_wave)
                    gauss_fit.set_ydata(gauss_flux + estimated_cont_level)

        elif event.key == 'W':
            """
			Fit and write continuum-normalized ion.fits with header
			Also write the filename and range of wavelength for fort.13
			"""

            spl_cont = BarycentricInterpolator(x_record, y_record)

            x1 = np.min(x_record)
            x2 = np.max(x_record)
            edges = [x1, x2]
            temp_wave, temp_flux, temp_error = Utilities.Select_Data(
                spec, edges)
            cont_flux = spl_cont(temp_wave)

            new_continuum.set_xdata(temp_wave)
            new_continuum.set_ydata(cont_flux)

            normwave = temp_wave
            normflux = temp_flux / cont_flux
            normdf = temp_error / cont_flux

            norm_flux_line.set_xdata(normwave)
            norm_flux_line.set_ydata(normflux)
            norm_dflux_line.set_ydata(normdf)
            norm_dflux_line.set_xdata(normwave)

            Write_NormSpec_ascii(normwave, normflux, normdf, transition_name)

        elif event.key == 'E':
            if not x_record:
                print 'No data selected for calculation.'
                pass
            else:
                p1, p2 = np.transpose(np.array([x_record, y_record]))
                x1, y1 = p1
                x2, y2 = p2
                edges = [x1, x2]
                temp_spec = Utilities.Select_Data(spec, edges)
                W0 = Utilities.ComputeEquivWdith(p1, p2, temp_spec)

                print 'W0 = %s mA' % W0

        points_to_fit.set_xdata(x_record)
        points_to_fit.set_ydata(y_record)
        pl.draw()  # needed for instant response.
#! /usr/bin/env python

from scipy.io import wavfile
from scipy.interpolate import BarycentricInterpolator
import damage, recognize, utils

sample_rate, samples = wavfile.read('songs/hakuna_matata.wav')
samples = samples[5000000:5000050]

newsamples = samples.copy()
damage.zerofill(newsamples, 0.3)

matches = recognize.cheat(samples, newsamples)
x, y = utils.tovalidxy(newsamples, matches)
f = BarycentricInterpolator(x, y)
utils.repair(newsamples, matches, f)

import matplotlib.pyplot as plt

plt.title('Lagrange interpolation')
plt.xlabel('Frame')
plt.ylabel('Amplitude')
plt.plot(samples, label='real')
plt.plot(newsamples, label='interpolated')
plt.legend(loc='best')
plt.show()