def add_gradient_constraints(self, w=1.0):
        """
        Adds gradient constraints to the least squares system with a weight
        defined by w
        Parameters
        ----------
        w : either numpy array of length number of

        Returns
        -------
        Notes
        -----
        Gradient constraints add a constraint that the gradient of the
        implicit function should
        be orthogonal to the strike vector and the dip vector defined by the
        normal.
        This does not control the direction of the gradient and therefore
        requires at least two other
        value constraints OR a norm constraint for the interpolant to solve.
        """

        points = self.get_gradient_constraints()
        if points.shape[0] > 0:
            (
                vertices,
                element_gradients,
                tetras,
                inside,
            ) = self.support.get_element_gradient_for_location(points[:, :3])
            # e, inside = self.support.elements_for_array(points[:, :3])
            # nodes = self.support.nodes[self.support.elements[e]]
            vecs = vertices[:, 1:, :] - vertices[:, 0, None, :]
            vol = np.abs(np.linalg.det(vecs)) / 6
            norm = np.linalg.norm(points[:, 3:6], axis=1)
            points[:, 3:6] /= norm[:, None]
            element_gradients /= norm[:, None, None]
            # d_t *= vol[:,None,None]
            strike_vector, dip_vector = get_vectors(points[:, 3:6])
            A = np.einsum("ji,ijk->ik", strike_vector, element_gradients)

            # A *= vol[:, None]

            gi = np.zeros(self.support.n_nodes).astype(int)
            gi[:] = -1
            gi[self.region] = np.arange(0, self.nx).astype(int)
            # w /= 3
            idc = gi[tetras]
            B = np.zeros(idc.shape[0])
            outside = ~np.any(idc == -1, axis=1)
            w *= points[:, 6]
            self.add_constraints_to_least_squares(
                A[outside, :], B[outside], idc[outside, :], w=w, name="gradient strike"
            )
            A = np.einsum("ji,ijk->ik", dip_vector, element_gradients)
            # A *= vol[:, None]
            self.add_constraints_to_least_squares(
                A[outside, :], B[outside], idc[outside, :], w=w, name="gradient dip"
            )
Ejemplo n.º 2
0
    def add_gradient_ctr_pts(self):
        points = self.get_gradient_constraints()
        if points.shape[0] > 0:
            logger.info("Adding ")
            strike_vector, dip_vector = get_vectors(points[:, 3:6])

            strike_vector = np.hstack([points[:, :3], strike_vector.T])
            dip_vector = np.hstack([points[:, :3], dip_vector.T])
            self.surfe.SetTangentConstraints(strike_vector)
            self.surfe.SetTangentConstraints(dip_vector)
    def add_gradient_constraints(self, w=1.0):
        """

        Parameters
        ----------
        w : double / numpy array

        Returns
        -------

        """

        points = self.get_gradient_constraints()
        if points.shape[0] > 0:
            # calculate unit vector for orientation data
            # points[:,3:]/=np.linalg.norm(points[:,3:],axis=1)[:,None]

            node_idx, inside = self.support.position_to_cell_corners(points[:, :3])
            # calculate unit vector for node gradients
            # this means we are only constraining direction of grad not the
            # magnitude
            gi = np.zeros(self.support.n_nodes)
            gi[:] = -1
            gi[self.region] = np.arange(0, self.nx)
            idc = np.zeros(node_idx.shape)
            idc[:] = -1
            idc[inside, :] = gi[node_idx[inside, :]]
            inside = np.logical_and(~np.any(idc == -1, axis=1), inside)

            (
                vertices,
                T,
                elements,
                inside_,
            ) = self.support.get_element_gradient_for_location(points[inside, :3])
            # normalise constraint vector and scale element matrix by this
            norm = np.linalg.norm(points[:, 3:6], axis=1)
            points[:, 3:6] /= norm[:, None]
            T /= norm[inside, None, None]
            # calculate two orthogonal vectors to constraint (strike and dip vector)
            strike_vector, dip_vector = get_vectors(points[inside, 3:6])
            A = np.einsum("ij,ijk->ik", strike_vector.T, T)
            B = np.zeros(points[inside, :].shape[0])
            self.add_constraints_to_least_squares(
                A, B, idc[inside, :], w=w * self.vol, name="gradient"
            )
            A = np.einsum("ij,ijk->ik", dip_vector.T, T)
            self.add_constraints_to_least_squares(
                A, B, idc[inside, :], w=w * self.vol, name="gradient"
            )
            if np.sum(inside) <= 0:
                logger.warning(
                    f"{self.propertyname}: {np.sum(~inside)} norm constraints not added: outside of model bounding box"
                )
    def add_gradient_constraint(self, w=1.):
        """

        Parameters
        ----------
        w : double / numpy array

        Returns
        -------

        """

        points = self.get_gradient_constraints()
        if points.shape[0] > 0:
            # calculate unit vector for orientation data
            # points[:,3:]/=np.linalg.norm(points[:,3:],axis=1)[:,None]

            node_idx, inside = self.support.position_to_cell_corners(
                points[:, :3])
            # calculate unit vector for node gradients
            # this means we are only constraining direction of grad not the
            # magnitude
            gi = np.zeros(self.support.n_nodes)
            gi[:] = -1
            gi[self.region] = np.arange(0, self.nx)
            idc = np.zeros(node_idx.shape)
            idc[:] = -1
            idc[inside, :] = gi[node_idx[inside, :]]
            inside = np.logical_and(~np.any(idc == -1, axis=1), inside)

            T = self.support.calcul_T(points[inside, :3])
            strike_vector, dip_vector = get_vectors(points[inside, 3:6])
            A = np.einsum('ij,ijk->ik', strike_vector.T, T)

            B = np.zeros(points[inside, :].shape[0])
            self.add_constraints_to_least_squares(A * w, B, idc[inside, :])
            A = np.einsum('ij,ijk->ik', dip_vector.T, T)
            self.add_constraints_to_least_squares(A * w, B, idc[inside, :])