def approximate(porder, wpower, wtype, epsilon_multiplier, attempt_center_about_coord):
    tic = time.perf_counter()
    gmls_obj=pycompadre.GMLS(porder, input_dimensions, "QR", "STANDARD")
    gmls_obj.setWeightingParameter(wpower)
    gmls_obj.setWeightingType(wtype)
    gmls_helper = pycompadre.ParticleHelper(gmls_obj)

    gmls_helper.generateKDTree(XY_ravel)
    gmls_obj.addTargets(pycompadre.TargetOperation.ScalarPointEvaluation)
    
    gmls_helper.generateNeighborListsFromKNNSearchAndSet(XY_pred_ravel, porder, input_dimensions, epsilon_multiplier)
    gmls_obj.generateAlphas(1, False)
    
    # helper function for applying of alphas
    Z_pred = gmls_helper.applyStencil(Z_ravel, pycompadre.TargetOperation.ScalarPointEvaluation)

    center_about_idx   = np.sum(np.abs(XY_pred_ravel - attempt_center_about_coord), axis=1).argmin()
    center_about_coord = XY_pred_ravel[center_about_idx]
    extra_sites_coords = np.copy(XY_pred_ravel)

    gmls_obj_2=pycompadre.GMLS(porder, input_dimensions, "QR", "STANDARD")
    gmls_obj_2.setWeightingParameter(wpower)
    gmls_obj_2.setWeightingType(wtype)
    gmls_helper_2 = pycompadre.ParticleHelper(gmls_obj_2)
    gmls_helper_2.generateKDTree(XY_ravel)
    gmls_obj_2.addTargets(pycompadre.TargetOperation.ScalarPointEvaluation)
    gmls_helper_2.generateNeighborListsFromKNNSearchAndSet(np.atleast_2d(XY_pred_ravel[center_about_idx]), porder, input_dimensions, epsilon_multiplier)

    extra_sites_idx = np.zeros(shape=(1,extra_sites_coords.shape[0]+1), dtype='i4')
    extra_sites_idx[0,0] = extra_sites_coords.shape[0]
    extra_sites_idx[0,1:] = np.arange(extra_sites_coords.shape[0])
    gmls_helper_2.setAdditionalEvaluationSitesData(extra_sites_idx, extra_sites_coords)

    gmls_obj_2.generateAlphas(1, False)
    
    # manual applying of alphas
    nl = gmls_helper_2.getNeighborLists()
    computed_answer = np.zeros(shape=(len(extra_sites_coords),), dtype='f8')
    sf = pycompadre.SamplingFunctionals['PointSample']

    colors = len(XY_ravel)*['black']
    for k in range(nl.getNumberOfNeighbors(0)):
        colors[nl.getNeighbor(0,k)] = 'red'
    for j in range(extra_sites_idx[0,0]):
        computed_answer[j] = gmls_helper_2.applyStencilSingleTarget(Z_ravel, pycompadre.TargetOperation.ScalarPointEvaluation, sf, j+1)

    center_about_extra_idx   = np.sum(np.abs(extra_sites_coords - center_about_coord), axis=1).argmin()
    center_about_extra_coord = extra_sites_coords[center_about_extra_idx]
    del nl
    del gmls_obj
    del gmls_obj_2
    del gmls_helper
    del gmls_helper_2
    toc = time.perf_counter()
    PRINT_SOLVE_TIME and print("Solve GMLS in %0.6f seconds"%(toc-tic,))
    return (np.reshape(Z_pred, newshape=(len(x_pred), len(y_pred))), np.reshape(computed_answer, newshape=(len(x_pred), len(y_pred))), center_about_extra_idx, center_about_extra_coord, colors)
Exemple #2
0
def approximate(input_dimensions, porder, wpower, wtype, epsilon_multiplier,
                attempt_center_about_coord):

    global xy, xy_pred, z
    gmls_obj = pycompadre.GMLS(porder, input_dimensions, "QR", "MANIFOLD")
    gmls_obj.setWeightingParameter(wpower)
    gmls_obj.setWeightingType(wtype)
    gmls_helper = pycompadre.ParticleHelper(gmls_obj)
    gmls_helper.generateKDTree(xy)
    gmls_obj.addTargets(pycompadre.TargetOperation.ScalarPointEvaluation)

    # one less dimension because it is a manifold problem
    gmls_helper.generateNeighborListsFromKNNSearchAndSet(
        xy_pred, porder, input_dimensions - 1, epsilon_multiplier)
    gmls_obj.generateAlphas(1, False)

    # helper function for applying of alphas
    z_pred = gmls_helper.applyStencil(
        z, pycompadre.TargetOperation.ScalarPointEvaluation)

    # tests that setting and getting tangent bundle works
    gmls_helper.setTangentBundle(
        np.ones(shape=(xy_pred.shape[0], input_dimensions, input_dimensions),
                dtype='f8'))
    tb = gmls_helper.getTangentBundle()

    del gmls_obj
    del gmls_helper
    return z_pred
Exemple #3
0
    def test_square_qr_bugfix(self):

        source_sites = np.array([2.0, 3.0, 5.0, 6.0, 7.0], dtype='f8')
        source_sites = np.reshape(source_sites,
                                  newshape=(source_sites.size, 1))
        data = np.array([2.0, 3.0, 5.0, 6.0, 7.0], dtype='f8')

        polynomial_order = 1
        dim = 1

        gmls_obj = pycompadre.GMLS(polynomial_order, 1, "QR", "STANDARD")
        gmls_obj.addTargets(pycompadre.TargetOperation.ScalarPointEvaluation)
        gmls_obj.addTargets(
            pycompadre.TargetOperation.PartialXOfScalarPointEvaluation)

        gmls_helper = pycompadre.ParticleHelper(gmls_obj)
        gmls_helper.generateKDTree(source_sites)

        point = np.array([4.0], dtype='f8')
        target_site = np.reshape(point, newshape=(1, dim))

        gmls_helper.generateNeighborListsFromKNNSearchAndSet(
            target_site, polynomial_order, dim, 1.5)
        gmls_obj.generateAlphas(1, True)

        output = gmls_helper.applyStencilSingleTarget(
            data, pycompadre.TargetOperation.PartialXOfScalarPointEvaluation)

        del gmls_helper
        del gmls_obj

        self.assertAlmostEqual(output, 1.0, places=15)
Exemple #4
0
    def __init__(self,
                 source_sites,
                 polynomial_order,
                 weighting_power=2,
                 epsilon_multiplier=1.5):

        self.last_target_site = np.zeros((1, ))

        assert len(
            source_sites.shape
        ) == 2, "2D array must be given to GMLS for source_sites (#sites x spatial dimension)"
        self.input_dimensions = source_sites.shape[1]
        self.polynomial_order = polynomial_order

        # initialize 3rd order reconstruction using 2nd order basis in 3D (GMLS)
        self.gmls_obj = pycompadre.GMLS(polynomial_order,
                                        self.input_dimensions, "QR",
                                        "STANDARD")
        self.gmls_obj.setWeightingPower(weighting_power)
        self.weighting_power = weighting_power
        self.gmls_obj.setWeightingType("power")

        # neighbor search
        self.epsilon_multiplier = epsilon_multiplier
        self.gmls_helper = pycompadre.ParticleHelper(self.gmls_obj)
        self.gmls_helper.generateKDTree(source_sites)

        self.gmls_obj.addTargets(
            pycompadre.TargetOperation.ScalarPointEvaluation)
        self.gmls_obj.addTargets(
            pycompadre.TargetOperation.PartialXOfScalarPointEvaluation)
        self.gmls_obj.addTargets(
            pycompadre.TargetOperation.PartialYOfScalarPointEvaluation)
def approximate(solver_type, porder, wpower0, wpower1, wtype, epsilon_multiplier, attempt_center_about_coord):
    gmls_obj=pycompadre.GMLS(porder, input_dimensions, solver_type, "STANDARD")
    gmls_obj.setWeightingParameter(wpower0,0)
    gmls_obj.setWeightingParameter(wpower1,1)
    gmls_obj.setWeightingType(wtype)
    gmls_helper = pycompadre.ParticleHelper(gmls_obj)
    gmls_helper.generateKDTree(np.atleast_2d(x).T)
    gmls_obj.addTargets(pycompadre.TargetOperation.ScalarPointEvaluation)
    
    gmls_helper.generateNeighborListsFromKNNSearchAndSet(np.atleast_2d(x_pred).T, porder, input_dimensions, epsilon_multiplier, 0.0, False, True)
    
    center_about_idx   = (np.abs(x_pred - attempt_center_about_coord)).argmin()
    center_about_coord = x_pred[center_about_idx]
    extra_sites_idx    = np.zeros(shape=(len(x_pred),len(extra_sites_coords)+1), dtype='i4')
    extra_sites_idx[center_about_idx,0] = len(extra_sites_coords)
    extra_sites_idx[center_about_idx,1:] = np.arange(len(extra_sites_coords))
    gmls_helper.setAdditionalEvaluationSitesData(extra_sites_idx, extra_sites_coords)
    
    gmls_obj.generateAlphas(1, False)
    
    # helper function for applying of alphas
    y_pred = gmls_helper.applyStencil(np.atleast_2d(y).T, pycompadre.TargetOperation.ScalarPointEvaluation)
    
    # manual applying of alphas
    nl = gmls_helper.getNeighborLists()
    for i in range(len(x_pred)):
        if (i==center_about_idx):
            computed_answer = np.zeros(shape=(len(extra_sites_coords),), dtype='f8')
            colors = len(x)*['black']
            for j in range(extra_sites_idx[i,0]):
                computed_answer[j] = 0.0
                for k in range(nl.getNumberOfNeighbors(i)):
                    colors[nl.getNeighbor(i,k)] = 'red'
                    computed_answer[j] += gmls_obj.getSolutionSet().getAlpha(pycompadre.TargetOperation.ScalarPointEvaluation, 
                                          i, 0, 0, k, 0, 0, j+1)*y[nl.getNeighbor(i,k)]
    center_about_extra_idx   = (np.abs(extra_sites_coords - center_about_coord)).argmin()
    center_about_extra_coord = extra_sites_coords[center_about_extra_idx]
    del nl
    del gmls_obj
    del gmls_helper
    return (y_pred, computed_answer, center_about_extra_idx, center_about_extra_coord, colors)
Exemple #6
0
def remap(
        polyOrder,
        dimension,
        additional_sites=False,
        epsilon_multiplier=1.5,
        reconstruction_space=pycompadre.ReconstructionSpace.
    VectorOfScalarClonesTaylorPolynomial,
        sampling_functional=pycompadre.SamplingFunctionals["VectorPointSample"]
):

    minND = [[10, 20, 30], [10, 20, 100], [30, 30, 60]]
    ND = minND[dimension - 1][polyOrder - 1]

    random.seed(1234)  # for consistent results

    dimensions = dimension

    # initialize 3rd order reconstruction using 2nd order basis in 3D (GMLS)
    gmls_obj = pycompadre.GMLS(reconstruction_space, sampling_functional,
                               polyOrder, dimensions, "QR", "STANDARD")
    gmls_obj.setWeightingParameter(4)
    gmls_obj.setWeightingType("power")

    NT = 10  # Targets
    nx, ny, nz = (ND, ND, ND)

    xmax = 1
    if (dimension > 1): ymax = 1
    if (dimension > 2): zmax = 1
    xmin = -xmax
    if (dimension > 1): ymin = -ymax
    if (dimension > 2): zmin = -zmax

    dx = np.linspace(xmin, xmax, nx)
    if (dimension > 1): dy = np.linspace(ymin, ymax, ny)
    if (dimension > 2): dz = np.linspace(zmin, zmax, nz)

    N = 1
    for i in range(dimension):
        N *= ND

    # target sites
    target_sites = []
    for i in range(NT):
        if (dimension == 1):
            target_sites.append([random.uniform(xmin, xmax)])
        elif (dimension == 2):
            target_sites.append(
                [random.uniform(xmin, xmax),
                 random.uniform(ymin, ymax)])
        elif (dimension == 3):
            target_sites.append([
                random.uniform(xmin, xmax),
                random.uniform(ymin, ymax),
                random.uniform(zmin, zmax)
            ])
    target_sites = np.array(target_sites, dtype='d')

    # source sites
    t_sites = list()
    for i in range(ND):
        if (dimension == 1):
            t_sites.append([
                dx[i],
            ])
        else:
            for j in range(ND):
                if (dimension == 2):
                    t_sites.append([dx[i], dy[j]])
                else:
                    for k in range(ND):
                        t_sites.append([dx[i], dy[j], dz[k]])
    source_sites = np.array(t_sites, dtype=np.dtype('d'))

    # neighbor search
    gmls_helper = pycompadre.ParticleHelper(gmls_obj)
    gmls_helper.generateKDTree(source_sites)
    gmls_helper.generateNeighborListsFromKNNSearchAndSet(
        target_sites, polyOrder, dimensions, epsilon_multiplier)

    # set data in gmls object
    gmls_helper.setSourceSites(source_sites)

    # used in combination with polynomial coefficients
    epsilons = gmls_helper.getWindowSizes()

    gmls_obj.addTargets(pycompadre.TargetOperation.ScalarPointEvaluation)
    gmls_obj.addTargets(
        pycompadre.TargetOperation.PartialXOfScalarPointEvaluation)
    (dimensions > 1) and gmls_obj.addTargets(
        pycompadre.TargetOperation.PartialYOfScalarPointEvaluation)
    (dimensions > 2) and gmls_obj.addTargets(
        pycompadre.TargetOperation.PartialZOfScalarPointEvaluation)

    # add additional evaluation sites (if specified)
    if additional_sites:
        additional_sites_indices = np.zeros(shape=(NT, 5), dtype='i4')
        additional_site_coordinates = np.zeros(shape=(4 * NT, dimension),
                                               dtype='f8')
        additional_sites_indices[:, 0] = 4
        inds = np.arange(0, 4 * NT)
        inds = np.reshape(inds, newshape=(-1, 4))
        additional_sites_indices[:, 1::] = inds
        h = np.linalg.norm(source_sites[0, :] - source_sites[1, :])
        for i in range(NT):
            for j in range(4):
                for k in range(dimension):
                    additional_site_coordinates[
                        i * 4 + j,
                        k] = target_sites[i, k] + random.uniform(-h, h)

        gmls_helper.setAdditionalEvaluationSitesData(
            additional_sites_indices, additional_site_coordinates)

    # generate stencil with number of batches set to 1, and keeping coefficients (not necessary)
    gmls_obj.generateAlphas(1, True)

    # create sample data at source sites
    data_vector = []
    for i in range(N):
        data_vector.append(exact(source_sites[i], polyOrder, dimension))
    # use rank 2 array and only insert into one column to test
    # whether layouts are being properly propagated into pycompadre
    new_data_vector = np.zeros(shape=(len(data_vector), 3), dtype='f8')
    new_data_vector[:, 1] = np.array(data_vector, dtype=np.dtype('d'))

    # apply stencil to sample data for all targets
    computed_answer = gmls_helper.applyStencil(
        new_data_vector[:, 1],
        pycompadre.TargetOperation.ScalarPointEvaluation)

    l2_error = 0
    for i in range(NT):
        l2_error += np.power(
            abs(computed_answer[i] -
                exact(target_sites[i], polyOrder, dimension)), 2)
    l2_error = math.sqrt(l2_error / float(NT))

    additional_sites_l2_error = 0.0
    if additional_sites:
        nl = gmls_helper.getNeighborLists()
        # test min/max num neighbors computation works
        nl.computeMinNumNeighbors()
        nl.computeMaxNumNeighbors()
        n_min = nl.getMinNumNeighbors()
        n_max = nl.getMaxNumNeighbors()
        for i in range(NT):
            for j in range(4):
                computed_answer = 0.0
                for k in range(nl.getNumberOfNeighbors(i)):
                    computed_answer += gmls_obj.getSolutionSet().getAlpha(
                        pycompadre.TargetOperation.ScalarPointEvaluation, i, 0,
                        0, k, 0, 0, j + 1) * data_vector[nl.getNeighbor(i, k)]
                additional_sites_l2_error += \
                    np.power(abs(computed_answer - exact(additional_site_coordinates[i*4+j],polyOrder,dimension)),2)

    # retrieve additional sites neighbor lists just to make sure it works
    a_nl = gmls_helper.getAdditionalEvaluationIndices()

    # get polynomial coefficients
    polynomial_coefficients = gmls_helper.getPolynomialCoefficients(
        data_vector)

    # alternative way to compute h1 semi norm
    # could remap using the gradient operator, but instead this uses calculated polynomial coefficients and applies
    # the action of the gradient target operation on the polynomial basis at the target sites
    # this serves as a test for getting accurate calculation and retrieval of polynomial coefficients using
    # the python interface
    h1_seminorm_error = 0
    if reconstruction_space in (
            pycompadre.ReconstructionSpace.
            VectorOfScalarClonesTaylorPolynomial,
            pycompadre.ReconstructionSpace.ScalarTaylorPolynomial):
        for i in range(NT):
            h1_seminorm_error += np.power(
                abs(1. / epsilons[i] * polynomial_coefficients[i, 1] -
                    grad_exact(target_sites[i], 0, polyOrder, dimension)), 2)
            if (dimension > 1):
                h1_seminorm_error += np.power(
                    abs(1. / epsilons[i] * polynomial_coefficients[i, 2] -
                        grad_exact(target_sites[i], 1, polyOrder, dimension)),
                    2)
            if (dimension > 2):
                h1_seminorm_error += np.power(
                    abs(1. / epsilons[i] * polynomial_coefficients[i, 3] -
                        grad_exact(target_sites[i], 2, polyOrder, dimension)),
                    2)
    else:
        grad_x = gmls_helper.applyStencil(
            new_data_vector[:, 1],
            pycompadre.TargetOperation.PartialXOfScalarPointEvaluation)
        for i in range(NT):
            h1_seminorm_error += np.power(
                grad_x[i] -
                grad_exact(target_sites[i], 0, polyOrder, dimension), 2)
        if (dimension > 1):
            grad_y = gmls_helper.applyStencil(
                new_data_vector[:, 1],
                pycompadre.TargetOperation.PartialYOfScalarPointEvaluation)
            for i in range(NT):
                h1_seminorm_error += np.power(
                    grad_y[i] -
                    grad_exact(target_sites[i], 1, polyOrder, dimension), 2)
        if (dimension > 2):
            grad_z = gmls_helper.applyStencil(
                new_data_vector[:, 1],
                pycompadre.TargetOperation.PartialZOfScalarPointEvaluation)
            for i in range(NT):
                h1_seminorm_error += np.power(
                    grad_z[i] -
                    grad_exact(target_sites[i], 2, polyOrder, dimension), 2)
    h1_seminorm_error = math.sqrt(h1_seminorm_error / float(NT))

    if additional_sites:
        return l2_error, h1_seminorm_error, additional_sites_l2_error
    return l2_error, h1_seminorm_error
Exemple #7
0
    def test_pickling_additional_evaluation_sites(self):

        source_sites = np.array([2.0, 3.0, 5.0, 6.0, 7.0], dtype='f8')
        source_sites = np.reshape(source_sites,
                                  newshape=(source_sites.size, 1))
        data = np.array([4.0, 9.0, 25.0, 36.0, 49.0], dtype='f8')

        polynomial_order = 2
        dim = 1

        point = np.array([4.0, 3.0], dtype='f8')
        target_site = np.reshape(point, newshape=(2, dim))

        extra_sites_coords = np.atleast_2d(np.linspace(0, 4, 5)).T
        extra_sites_idx = np.zeros(shape=(len(point),
                                          len(extra_sites_coords) + 1),
                                   dtype='i4')
        extra_sites_idx[0, 0] = 0
        extra_sites_idx[0, 1:] = np.arange(len(extra_sites_coords))
        extra_sites_idx[1, 0] = len(extra_sites_coords)
        extra_sites_idx[1, 1:] = np.arange(len(extra_sites_coords))

        gmls_obj = pycompadre.GMLS(polynomial_order, 1, "QR", "STANDARD")
        gmls_helper = pycompadre.ParticleHelper(gmls_obj)
        gmls_helper.generateKDTree(source_sites)
        gmls_helper.generateNeighborListsFromKNNSearchAndSet(
            target_site, polynomial_order, dim, 1.5)
        gmls_helper.setAdditionalEvaluationSitesData(extra_sites_idx,
                                                     extra_sites_coords)

        gmls_obj.addTargets(pycompadre.TargetOperation.ScalarPointEvaluation)

        sol1 = [16.0, 0.0, 0.0, 0.0]
        sol2 = [9.0, 0.0, 1.0, 4.0]

        def check_answer(helper, i):
            output = helper.applyStencil(
                data, pycompadre.TargetOperation.ScalarPointEvaluation,
                pycompadre.SamplingFunctionals['PointSample'], i)
            self.assertAlmostEqual(output[0], sol1[i], places=13)
            self.assertAlmostEqual(output[1], sol2[i], places=13)

        # throws error because alphas are not generated
        with self.assertRaises(RuntimeError):
            [check_answer(gmls_helper, i) for i in range(4)]

        # generate alphas and run again
        gmls_obj.generateAlphas(1, True)

        # now it works
        [check_answer(gmls_helper, i) for i in range(4)]

        # now pickle to a file
        with open('test.p', 'wb') as fn:
            pickle.dump(gmls_helper, fn)
        del gmls_obj
        del gmls_helper

        with open('test.p', 'rb') as fn:
            new_gmls_helper = pickle.load(fn)
        new_gmls_obj = new_gmls_helper.getGMLSObject()
        new_gmls_obj.generateAlphas(1, True)
        [check_answer(new_gmls_helper, i) for i in range(4)]
Exemple #8
0
    def test_pickle_gmls(self):

        source_sites = np.array([2.0, 3.0, 5.0, 6.0, 7.0], dtype='f8')
        source_sites = np.reshape(source_sites,
                                  newshape=(source_sites.size, 1))
        data = np.array([2.0, 3.0, 5.0, 6.0, 7.0], dtype='f8')

        polynomial_order = 1
        dim = 1

        point = np.array([4.0], dtype='f8')
        target_site = np.reshape(point, newshape=(1, dim))

        # rvalue or std::unique_ptr passed to ParticleHelper (not obvious lifecycle of GMLS object)
        # in Clang, rvalue type argument works because it keeps track of which was created first
        #           and python keeps track of rvalue like object
        # in GCC,   rvalue type arguments do not work and cause increment/decrement issues
        # -----v NOT ok in GCC
        #gmls_helper = pycompadre.ParticleHelper(pycompadre.GMLS(polynomial_order, 1, "QR", "STANDARD"))
        gmls_obj = pycompadre.GMLS(polynomial_order, 1, "QR", "STANDARD")
        gmls_helper = pycompadre.ParticleHelper(gmls_obj)
        gmls_helper.generateKDTree(source_sites)
        gmls_helper.generateNeighborListsFromKNNSearchAndSet(
            target_site, polynomial_order, dim, 1.5)
        # print("1",gmls_helper.getGMLSObject().__getstate__())
        gmls_helper.getGMLSObject().__getstate__()

        # gets GMLS object from gmls_helper (python knows C++ owns it, but C++ treats it as
        # a python owned object at construction) so lifecycle of GMLS object is not tied to
        # the lifecycle of the GMLS helper
        gmls_obj = gmls_helper.getGMLSObject()
        # destroy helper
        del gmls_helper
        # python kept a reference count on the argument to GMLS helper
        # which isn't obvious, but there is no move constructor for GMLS helper class
        # so the following is not destroyed, yet
        # print("2",gmls_obj.__getstate__())
        gmls_obj.__getstate__()

        # python knows that original GMLS object was tied to GMLS helper
        # so replacing the helper tells python the GMLS object is no longer needed
        gmls_obj2 = pycompadre.GMLS(polynomial_order, 1, "QR", "STANDARD")
        gmls_helper = pycompadre.ParticleHelper(gmls_obj2)
        # but python keeps this handle internally and now points at new rvalue like object
        # so the following still works
        # print("3",gmls_obj.__getstate__())
        gmls_obj2.__getstate__()

        # # In Clang, this will confuse python because it thinks the rvalue like GMLS object is
        # # no longer needed, so it will throw it away
        # gmls_helper = pycompadre.ParticleHelper(gmls_obj)
        # # so the gmls_obj actual gets destroyed in the previous call
        # gmls_obj = gmls_helper.getGMLSObject()
        # # see what happens to internal GMLS object
        # # object is destroyed and this segfaults
        # # we don't call v--- because it will segfault and unittest can't catch that
        # # print("4",gmls_obj.__getstate__())
        # gmls_obj.__getstate__()
        # # resetting GMLS helper after this block of code will cause deallocation
        # ^--- This wouldn't be tested in GCC because we don't pass rvalue type
        #      arguments to constructors

        # GMLS object destroyed and then relied upon in gmls_helper
        gmls_obj = pycompadre.GMLS(polynomial_order, 1, "QR", "STANDARD")
        gmls_helper = pycompadre.ParticleHelper(gmls_obj)
        gmls_helper.generateKDTree(source_sites)
        gmls_helper.generateNeighborListsFromKNNSearchAndSet(
            target_site, polynomial_order, dim, 1.5)
        # the following v---- will segfault because gmls_obj is deleted
        # del gmls_obj
        # print(gmls_helper.getNeighborLists())
        # print(gmls_helper.getGMLSObject().__getstate__())
        gmls_obj2 = gmls_helper.getGMLSObject()

        # delete python owned gmls_obj, non-owning gmls_obj2 (which points at owned gmls_obj)
        del gmls_obj, gmls_obj2, gmls_helper

        gmls_obj = pycompadre.GMLS(polynomial_order, 1, "QR", "STANDARD")
        gmls_obj.addTargets(pycompadre.TargetOperation.ScalarPointEvaluation)
        gmls_obj.addTargets(
            pycompadre.TargetOperation.PartialXOfScalarPointEvaluation)

        gmls_helper = pycompadre.ParticleHelper(gmls_obj)
        gmls_helper.generateKDTree(source_sites)

        point = np.array([4.0], dtype='f8')
        target_site = np.reshape(point, newshape=(1, dim))

        gmls_helper.generateNeighborListsFromKNNSearchAndSet(
            target_site, polynomial_order, dim, 1.5)
        gmls_obj.generateAlphas(1, True)

        # test pickling the gmls_obj but not gmls_helper
        byte_gmls = pickle.dumps(gmls_obj)
        new_gmls_obj = pickle.loads(byte_gmls)

        with open('test.p', 'wb') as fn:
            pickle.dump(gmls_obj, fn)
        with open('test.p', 'rb') as fn:
            new_gmls_obj = pickle.load(fn)

        del gmls_obj

        gmls_helper = pycompadre.ParticleHelper(new_gmls_obj)

        # explicitly we do not call generateKDTree (it must come from the older gmls_object)
        # gmls_helper.generateKDTree(source_sites)

        # explicitly we do not call generateNeighborL... so that target info must come from older gmls_object
        # gmls_helper.generateNeighborListsFromKNNSearchAndSet(target_site, polynomial_order, dim, 1.5)

        new_gmls_obj.generateAlphas(1, True)

        output = gmls_helper.applyStencilSingleTarget(
            data, pycompadre.TargetOperation.PartialXOfScalarPointEvaluation)

        byte_gmls_helper = pickle.dumps(gmls_helper)
        new_gmls_helper = pickle.loads(byte_gmls_helper)

        with open('test.p', 'wb') as fn:
            pickle.dump(gmls_helper, fn)
        del gmls_helper
        del new_gmls_obj

        self.assertAlmostEqual(output, 1.0, places=15)

        # test pickling of gmls_helper
        with open('test.p', 'rb') as fn:
            new_gmls_helper = pickle.load(fn)
        gmls_obj = new_gmls_helper.getGMLSObject()
        gmls_obj.generateAlphas(1, True)

        output = new_gmls_helper.applyStencilSingleTarget(
            data, pycompadre.TargetOperation.PartialXOfScalarPointEvaluation)
        self.assertAlmostEqual(output, 1.0, places=15)
Exemple #9
0
def remap(polyOrder,dimension):

    minND = [[10,20,30],[10,20,100],[30,30,60]]
    ND = minND[dimension-1][polyOrder-1]

    random.seed(1234) # for consistent results

    dimensions = dimension

    # initialize 3rd order reconstruction using 2nd order basis in 3D (GMLS)
    gmls_obj=pycompadre.GMLS(polyOrder, dimensions, "QR", "STANDARD")
    gmls_obj.setWeightingPower(4)
    gmls_obj.setWeightingType("power")

    NT = 10 # Targets
    nx, ny, nz = (ND, ND, ND)

    xmax = 1
    if (dimension>1): ymax = 1
    if (dimension>2): zmax = 1
    xmin = -xmax
    if (dimension>1): ymin = -ymax
    if (dimension>2): zmin = -zmax

    dx = np.linspace(xmin, xmax, nx)
    if (dimension>1): dy = np.linspace(ymin, ymax, ny)
    if (dimension>2): dz = np.linspace(zmin, zmax, nz)

    N=1
    for i in range(dimension):
        N*=ND

    # target sites
    target_sites = []
    for i in range(NT):
        if (dimension==1):
            target_sites.append([random.uniform(xmin, xmax)])
        elif (dimension==2):
            target_sites.append([random.uniform(xmin, xmax), random.uniform(ymin, ymax)])
        elif (dimension==3):
            target_sites.append([random.uniform(xmin, xmax), random.uniform(ymin, ymax), random.uniform(zmin, zmax)])
    target_sites = np.array(target_sites, dtype='d')

    # source sites
    t_sites = list()
    for i in range(ND):
        if (dimension==1):
            t_sites.append([dx[i],])
        else:
            for j in range(ND):
                if (dimension==2):
                    t_sites.append([dx[i],dy[j]])
                else:
                    for k in range(ND):
                        t_sites.append([dx[i],dy[j],dz[k]])
    source_sites = np.array(t_sites, dtype=np.dtype('d'))

    # neighbor search
    epsilon_multiplier = 1.5
    gmls_helper = pycompadre.ParticleHelper(gmls_obj)
    gmls_helper.generateKDTree(source_sites)
    gmls_helper.generateNeighborListsFromKNNSearchAndSet(target_sites, polyOrder, dimensions, epsilon_multiplier)

    # set data in gmls object
    gmls_helper.setSourceSites(source_sites)

    # used in combination with polynomial coefficients
    epsilons = gmls_helper.getWindowSizes()

    gmls_obj.addTargets(pycompadre.TargetOperation.ScalarPointEvaluation)
    gmls_obj.addTargets(pycompadre.TargetOperation.PartialXOfScalarPointEvaluation)
    (dimensions>1) and gmls_obj.addTargets(pycompadre.TargetOperation.PartialYOfScalarPointEvaluation)
    (dimensions>2) and gmls_obj.addTargets(pycompadre.TargetOperation.PartialYOfScalarPointEvaluation)

    # generate stencil with number of batches set to 1, and keeping coefficients (not necessary)
    gmls_obj.generateAlphas(1, True)

    # create sample data at source sites
    data_vector = []
    for i in range(N):
        data_vector.append(exact(source_sites[i], polyOrder, dimension))
    data_vector = np.array(data_vector, dtype=np.dtype('d'))

    # apply stencil to sample data for all targets
    computed_answer = gmls_helper.applyStencil(data_vector, pycompadre.TargetOperation.ScalarPointEvaluation)

    l2_error = 0
    for i in range(NT):
        l2_error += np.power(abs(computed_answer[i] - exact(target_sites[i],polyOrder,dimension)),2)
    l2_error = math.sqrt(l2_error/float(NT))

    # get polynomial coefficients
    polynomial_coefficients = gmls_helper.getPolynomialCoefficients(data_vector)

    # alternative way to compute h1 semi norm
    # could remap using the gradient operator, but instead this uses calculated polynomial coefficients and applies
    # the action of the gradient target operation on the polynomial basis at the target sites
    # this serves as a test for getting accurate calculation and retrieval of polynomial coefficients using
    # the python interface
    h1_seminorm_error = 0
    for i in range(NT):
        h1_seminorm_error += np.power(abs(1./epsilons[i]*polynomial_coefficients[i,1] - grad_exact(target_sites[i], 0, polyOrder, dimension)),2)
        if (dimension>1): h1_seminorm_error += np.power(abs(1./epsilons[i]*polynomial_coefficients[i,2] - grad_exact(target_sites[i], 1, polyOrder, dimension)),2)
        if (dimension>2): h1_seminorm_error += np.power(abs(1./epsilons[i]*polynomial_coefficients[i,3] - grad_exact(target_sites[i], 2, polyOrder, dimension)),2)
    h1_seminorm_error = math.sqrt(h1_seminorm_error/float(NT))

    return l2_error, h1_seminorm_error