def get_tangent_constraints(self):
        """

        Returns
        -------
        numpy array
        """
        mask = np.all(
            ~np.isnan(self.data.loc[:, tangent_vec_names()].to_numpy()),
            axis=1)
        if mask.shape[0] > 0:
            return self.data.loc[mask,
                                 xyz_names() + tangent_vec_names() +
                                 weight_name()].to_numpy()
        else:
            return np.zeros((0, 7))
    def __init__(self, interpolator, name='Feature', region=None, **kwargs):
        """
        Constructor for a GeologicalFeatureInterpolator

        Parameters
        ----------
        interpolator : GeologicalInterpolator
            An empty GeologicalInterpolator
        region : lambda function
            defining whether the location (xyz) should be included in the
        kwargs - name of the feature, region to interpolate the feature
        """
        self.interpolator = interpolator
        self.name = name
        self.interpolator.set_property_name(self.name)
        # everywhere region is just a lambda that returns true for all locations
        if region is None:
            self.region = lambda pos: np.ones(pos.shape[0], dtype=bool)
        else:
            self.region = region
        header = xyz_names()+val_name()+gradient_vec_names()+\
                 normal_vec_names()+tangent_vec_names()+weight_name()
        self.data = pd.DataFrame(columns=header)
        self.faults = []
        self.data_added = False
        self.interpolator.set_region(region=self.region)
    def __init__(
        self,
        interpolator: GeologicalInterpolator,
        name="Feature",
        region=None,
        **kwargs,
    ):
        """
        Constructor for a GeologicalFeatureBuilder

        Parameters
        ----------
        interpolator : GeologicalInterpolator
            An empty GeologicalInterpolator
        region : lambda function
            defining whether the location (xyz) should be included in the
        kwargs - name of the feature, region to interpolate the feature
        """
        if issubclass(type(interpolator), GeologicalInterpolator) == False:
            raise TypeError(
                "interpolator is {} and must be a GeologicalInterpolator".
                format(type(interpolator)))
        self._interpolator = interpolator
        self._name = name
        self._interpolator.set_property_name(self._name)
        # everywhere region is just a lambda that returns true for all locations
        if region is None:
            self.region = RegionEverywhere()
        else:
            self.region = region
        header = (xyz_names() + val_name() + gradient_vec_names() +
                  normal_vec_names() + tangent_vec_names() + weight_name())
        self.data = pd.DataFrame(columns=header)
        self.faults = []
        self.data_added = False
        self._interpolator.set_region(region=self.region)
        self._feature = None
        self._up_to_date = False
        self._build_arguments = {}
        self._feature = GeologicalFeature(
            self._name,
            self._interpolator,
            builder=self,
            regions=[self.region],
            faults=self.faults,
        )
        self._orthogonal_features = {}
        self._equality_constraints = {}
    def add_data_to_interpolator(self,
                                 constrained=False,
                                 force_constrained=False,
                                 **kwargs):
        """
        Iterates through the list of data and applies any faults active on the
        data in the order they are added

        Parameters
        -----------
        constrained : boolean
        force_constrained : boolean

        Returns
        -------

        """
        # first move the data for the fault
        logger.info("Adding %i faults to %s" % (len(self.faults), self.name))
        data = self.data.copy()
        # convert data locations to numpy array and then update
        for f in self.faults:
            data.loc[:, xyz_names()] = f.apply_to_points(
                self.get_data_locations())
        # Now check whether there are enough constraints for the
        # interpolator to be able to solve
        # we need at least 2 different value points or a single norm
        # constraint. If there are not enough
        # try converting grad to norms, if still not enough send user an error
        if constrained:
            # Change normals to gradients
            mask = np.all(~np.isnan(data.loc[:, normal_vec_names()]), axis=1)
            if mask.shape[0] > 0:
                data.loc[mask, gradient_vec_names()] = data.loc[
                    mask, normal_vec_names()].to_numpy()
                data.loc[mask, normal_vec_names()] = np.nan
        if self.get_norm_constraints().shape[0] > 0:
            constrained = True

        if np.unique(self.get_value_constraints()[:, 3]).shape[0] > 1:
            constrained = True

        if not constrained or force_constrained:
            # change gradient constraints to normal vector constraints
            mask = np.all(~np.isnan(data.loc[:, gradient_vec_names()]), axis=1)
            if mask.shape[0] > 0:

                data.loc[mask, normal_vec_names()] = data.loc[
                    mask, gradient_vec_names()].to_numpy()
                data.loc[mask, gradient_vec_names()] = np.nan
                logger.info("Setting gradient points to norm constraints")
                constrained = True
                mask = np.all(
                    ~np.isnan(data.loc[:, normal_vec_names()].to_numpy()),
                    axis=1)

        if not constrained:
            logger.error("Not enough constraints for scalar field add more")
        # self.interpolator.reset()
        mask = ~np.isnan(data.loc[:, val_name()].to_numpy())

        # add value constraints
        if mask.shape[0] > 0:
            value_data = data.loc[mask[:, 0],
                                  xyz_names() + val_name() +
                                  weight_name()].to_numpy()
            self.interpolator.set_value_constraints(value_data)

        # add gradient constraints
        mask = np.all(~np.isnan(data.loc[:, gradient_vec_names()].to_numpy()),
                      axis=1)
        if mask.shape[0] > 0:
            gradient_data = data.loc[mask,
                                     xyz_names() + gradient_vec_names() +
                                     weight_name()].to_numpy()
            self.interpolator.set_gradient_constraints(gradient_data)

        # add normal vector data
        mask = np.all(~np.isnan(data.loc[:, normal_vec_names()].to_numpy()),
                      axis=1)
        if mask.shape[0] > 0:
            normal_data = data.loc[mask,
                                   xyz_names() + normal_vec_names() +
                                   weight_name()].to_numpy()
            self.interpolator.set_normal_constraints(normal_data)

        # add tangent data
        mask = np.all(~np.isnan(data.loc[:, tangent_vec_names()].to_numpy()),
                      axis=1)
        if mask.shape[0] > 0:
            tangent_data = data.loc[mask,
                                    xyz_names() + tangent_vec_names() +
                                    weight_name()].to_numpy()
            self.interpolator.set_tangent_constraints(tangent_data)

        # add interface constraints
        mask = np.all(~np.isnan(data.loc[:, interface_name()].to_numpy()),
                      axis=1)
        if mask.shape[0] > 0:
            interface_data = data.loc[mask,
                                      xyz_names() + interface_name() +
                                      weight_name()].to_numpy()
            self.interpolator.set_interface_constraints(interface_data)

        self.data_added = True