Exemple #1
0
    def _apply_check_args(image, mask, edit_steps):
        """
        Check arguments for apply edit steps.

        Args:
            image (numpy.ndarray): Image tensor in CHW or NCHW(N=1) format.
            mask (Union[str, tuple[float, float, float], float, numpy.ndarray]): The mask, type can be
                str: String mask, e.g. 'gaussian:9' - Gaussian blur with radius of 9.
                tuple[float, float, float]: RGB solid color mask,
                float: Grey scale solid color mask.
                numpy.ndarray: Image mask in CHW or NCHW(N=1) format.
            edit_steps (list[EditStep], optional): Edit steps to be applied.

        Raises:
            TypeError: Be raised for any argument or data type problem.
            ValueError: Be raised for any argument or data value problem.
        """
        check_value_type('image', image, np.ndarray)
        check_value_type('mask', mask, (str, tuple, float, np.ndarray))
        if isinstance(mask, tuple):
            _check_iterable_type('mask', mask, tuple, float)

        if edit_steps is not None:
            _check_iterable_type('edit_steps', edit_steps, (tuple, list),
                                 EditStep)
Exemple #2
0
def verify_argument(inputs, arg_name):
    """Verify the validity of the parsed arguments."""
    check_value_type(arg_name, inputs, Tensor)
    if len(inputs.shape) != 4:
        raise ValueError('Argument {} must be a 4D Tensor.'.format(arg_name))
    if len(inputs) > 1:
        raise ValueError('Support single data evaluation only, but got {}.'.format(len(inputs)))
 def _check_collect_freq(freq):
     """Check collect freq type and value."""
     check_value_type('collect_freq', freq, int)
     if freq <= 0:
         raise ValueError(
             f'For `collect_freq` the value should be greater than 0, but got `{freq}`.'
         )
Exemple #4
0
 def __init__(self, num_labels, activation_fn):
     super().__init__(num_labels)
     check_value_type("activation_fn", activation_fn, nn.Cell)
     self._perturb = RandomPerturb()
     self._num_perturbations = 10  # number of perturbations used in evaluation
     self._threshold = 0.1  # threshold to generate perturbation
     self._activation_fn = activation_fn
Exemple #5
0
 def __init__(self,
              network,
              activation_fn=softmax(),
              ):
     super(PerturbationAttribution, self).__init__(network)
     check_value_type("activation_fn", activation_fn, Cell)
     self._activation_fn = activation_fn
Exemple #6
0
 def __init__(self, summary_dir: Optional[str] = "./"):
     check_value_type("summary_dir", summary_dir, str)
     self._summary_dir = summary_dir
     self._count = 0
     self._classes = None
     self._model = None
     self._uncertainty = None
     self._summary_timestamp = None
Exemple #7
0
 def _verify_network(self):
     """Verify the network."""
     next_element = next(self._dataset.create_tuple_iterator())
     inputs, _, _ = self._unpack_next_element(next_element)
     prop_test = self._full_network(inputs)
     check_value_type("output of network in explainer", prop_test, ms.Tensor)
     if prop_test.shape[1] != len(self._labels):
         raise ValueError("The dimension of network output does not match the no. of classes. Please "
                          "check labels or the network in the explainer again.")
Exemple #8
0
 def _check_evaluate_param_with_mask(self, explainer, inputs, targets, saliency, mask):
     self._check_evaluate_param(explainer, inputs, targets, saliency)
     if len(inputs.shape) != 4:
         raise ValueError('Argument mask must be 4D Tensor')
     if mask is None:
         raise ValueError('To compute localization, mask must be provided.')
     check_value_type('mask', mask, (Tensor, np.ndarray))
     if len(mask.shape) != 4 or len(mask) != len(inputs):
         raise ValueError("The input mask must be 4-dimensional (1, 1, h, w) with same length of inputs.")
Exemple #9
0
 def _check_positive(name, value, allow_none=False):
     """Check if the value to be int type and positive."""
     if allow_none and value is None:
         return
     check_value_type(name, value, int)
     if value <= 0:
         raise ValueError(
             f'For `{name}` the value should be greater than 0, but got `{value}`.'
         )
Exemple #10
0
def auto_str_mask(image):
    """Generate auto string mask for the image."""
    check_value_type('image', image, np.ndarray)
    short_side = np.min(image.shape[-2:])
    radius = int(round(short_side / AUTO_MASK_GAUSSIAN_RADIUS_DIV))
    if radius == 0:
        raise ValueError(
            f"Input image's short side:{short_side} is too small for auto mask, "
            f"at least {AUTO_MASK_GAUSSIAN_RADIUS_DIV}pixels is required.")
    return f'gaussian:{radius}'
Exemple #11
0
 def _verify_data(inputs, targets):
     """Verify the validity of the parsed inputs."""
     check_value_type('inputs', inputs, Tensor)
     if len(inputs.shape) != 4:
         raise ValueError('Argument inputs must be 4D Tensor')
     check_value_type('targets', targets, (Tensor, int))
     if isinstance(targets, Tensor):
         if len(targets.shape) > 1 or (len(targets.shape) == 1 and len(targets) != len(inputs)):
             raise ValueError('Argument targets must be a 1D or 0D Tensor. If it is a 1D Tensor, '
                              'it should have the same length as inputs.')
Exemple #12
0
def verify_targets(targets, num_labels):
    """Verify the validity of the parsed targets."""
    check_value_type('targets', targets, (int, Tensor))

    if isinstance(targets, Tensor):
        if len(targets.shape) > 1 or (len(targets.shape) == 1 and len(targets) != 1):
            raise ValueError('Argument targets must be a 1D or 0D Tensor. If it is a 1D Tensor, '
                             'it should have the length = 1 as we only support single evaluation now.')
        targets = int(targets.asnumpy()[0]) if len(targets.shape) == 1 else int(targets.asnumpy())
    if targets > num_labels - 1 or targets < 0:
        raise ValueError('Parsed targets exceed the label range.')
Exemple #13
0
def compile_str_mask(mask, image):
    """Concert string mask to numpy.ndarray."""
    check_value_type('mask', mask, str)
    check_value_type('image', image, np.ndarray)
    match = re.match(MASK_GAUSSIAN_RE, mask)
    if match:
        radius = int(match.group(1))
        if radius > 0:
            sigma = [0] * len(image.shape)
            sigma[-2] = radius
            sigma[-1] = radius
            return gaussian_filter(image, sigma=sigma, mode='nearest')
    raise ValueError(f"Invalid string mask: '{mask}'.")
    def register_saliency(self, explainers, benchmarkers=None):
        """
        Register saliency explanation instances.

        Note:
            This function can not be invoked more than once on each runner.

        Args:
            explainers (list[Attribution]): The explainers to be evaluated,
                see `mindspore.explainer.explanation`. All explainers' class must be distinct and their network
                must be the exact same instance of the runner's network.
            benchmarkers (list[AttributionMetric], optional): The benchmarkers for scoring the explainers,
                see `mindspore.explainer.benchmark`. All benchmarkers' class must be distinct.

        Raises:
            ValueError: Be raised for any data or settings' value problem.
            TypeError: Be raised for any data or settings' type problem.
            RuntimeError: Be raised if this function was invoked before.
        """
        check_value_type("explainers", explainers, list)
        if not all(isinstance(ele, Attribution) for ele in explainers):
            raise TypeError(
                "Argument explainers is not list of mindspore.explainer.explanation ."
            )

        if not explainers:
            raise ValueError("Argument explainers is empty.")

        if benchmarkers is not None:
            check_value_type("benchmarkers", benchmarkers, list)
            if not all(
                    isinstance(ele, AttributionMetric)
                    for ele in benchmarkers):
                raise TypeError(
                    "Argument benchmarkers is not list of mindspore.explainer.benchmark ."
                )

        if self._explainers is not None:
            raise RuntimeError(
                "Function register_saliency() was invoked already.")

        self._explainers = explainers
        self._benchmarkers = benchmarkers

        try:
            self._verify_data_n_settings(check_saliency=True,
                                         check_environment=True)
        except (ValueError, TypeError):
            self._explainers = None
            self._benchmarkers = None
            raise
Exemple #15
0
    def _process_specified_data(self, specified_data, action):
        """Check specified data type and value."""
        if specified_data is None:
            if action:
                return self._DEFAULT_SPECIFIED_DATA
            return None

        check_value_type('collect_specified_data', specified_data, [dict, type(None)])

        for param_name in specified_data:
            check_value_type(param_name, param_name, [str])

        unexpected_params = set(specified_data) - set(self._DEFAULT_SPECIFIED_DATA)
        if unexpected_params:
            raise ValueError(f'For `collect_specified_data` the keys {unexpected_params} are unsupported.')

        if 'histogram_regular' in specified_data:
            check_value_type('histogram_regular', specified_data.get('histogram_regular'), (str, type(None)))

        bool_items = set(self._DEFAULT_SPECIFIED_DATA) - {'histogram_regular'}
        for item in bool_items:
            if item in specified_data:
                check_value_type(item, specified_data.get(item), bool)

        if action:
            result = dict(self._DEFAULT_SPECIFIED_DATA).update(specified_data)
        else:
            result = specified_data
        return result
Exemple #16
0
 def _verify_data(inputs, targets):
     """Verify the validity of the parsed inputs."""
     check_value_type('inputs', inputs, Tensor)
     if len(inputs.shape) != 4:
         raise ValueError(f'Argument inputs must be 4D Tensor, but got {len(inputs.shape)}D Tensor.')
     check_value_type('targets', targets, (Tensor, int, tuple, list))
     if isinstance(targets, Tensor):
         if len(targets.shape) > 2:
             raise ValueError('Dimension invalid. If `targets` is a Tensor, it should be 0D, 1D or 2D. '
                              'But got {}D.'.format(len(targets.shape)))
         if targets.shape and len(targets) != len(inputs):
             raise ValueError(
                 'If `targets` is a 2D, 1D Tensor, it should have the same length as inputs {}. But got {}.'.format(
                     len(inputs), len(targets)))
Exemple #17
0
 def __init__(
     self,
     network,
     activation_fn,
     perturbation_per_eval,
 ):
     super(PerturbationAttribution, self).__init__(network)
     check_value_type("activation_fn", activation_fn, Cell)
     self._activation_fn = activation_fn
     check_value_type('perturbation_per_eval', perturbation_per_eval, int)
     if perturbation_per_eval <= 0:
         raise ValueError(
             'Argument perturbation_per_eval should be a positive integer.')
     self._perturbation_per_eval = perturbation_per_eval
Exemple #18
0
    def _process_summary_dir(summary_dir):
        """Check the summary dir, and create a new directory if it not exists."""
        check_value_type('summary_dir', summary_dir, str)
        summary_dir = summary_dir.strip()
        if not summary_dir:
            raise ValueError('For `summary_dir` the value should be a valid string of path, but got empty string.')

        summary_dir = os.path.realpath(summary_dir)
        if not os.path.exists(summary_dir):
            os.makedirs(summary_dir, exist_ok=True)
        else:
            if not os.path.isdir(summary_dir):
                raise NotADirectoryError('For `summary_dir` it should be a directory path.')

        return summary_dir
Exemple #19
0
    def __init__(self,
                 network,
                 win_sizes=None,
                 strides=None,
                 threshold=DEFAULT_THRESHOLD,
                 by_masking=False):

        check_value_type('network', network, nn.Cell)

        if win_sizes is not None:
            _check_iterable_type('win_sizes', win_sizes, list, int)
            if not win_sizes:
                raise ValueError('Argument win_sizes is empty.')

            for i in range(1, len(win_sizes)):
                if win_sizes[i] >= win_sizes[i - 1]:
                    raise ValueError(
                        'Argument win_sizes is not strictly descending.')

            if win_sizes[-1] <= 0:
                raise ValueError('Argument win_sizes has non-positive number.')
        elif strides is not None:
            raise ValueError(
                'Argument win_sizes cannot be None if strides is not None.')

        if strides is not None:
            _check_iterable_type('strides', strides, list, int)
            for i in range(1, len(strides)):
                if strides[i] >= strides[i - 1]:
                    raise ValueError(
                        'Argument win_sizes is not strictly descending.')

            if strides[-1] <= 0:
                raise ValueError('Argument strides has non-positive number.')

            if len(strides) != len(win_sizes):
                raise ValueError(
                    'Length of strides and win_sizes is not equal.')
        elif win_sizes is not None:
            raise ValueError(
                'Argument strides cannot be None if win_sizes is not None.')

        self._network = copy.deepcopy(network)
        self._compiled_mask = None
        self._threshold = threshold
        self._win_sizes = copy.copy(win_sizes) if win_sizes else None
        self._strides = copy.copy(strides) if strides else None
        self._by_masking = by_masking
Exemple #20
0
 def _verify_data(inputs, targets):
     """Verify the validity of the parsed inputs."""
     check_value_type('inputs', inputs, ms.Tensor)
     if len(inputs.shape) != 4:
         raise ValueError('Argument inputs must be 4D Tensor')
     check_value_type('targets', targets, (ms.Tensor, int))
     if isinstance(targets, ms.Tensor):
         if len(targets.shape) > 1 or (len(targets.shape) == 1
                                       and len(targets) != len(inputs)):
             raise ValueError(
                 'Argument targets must be a 1D or 0D Tensor. If it is a 1D Tensor, '
                 'it should have the same length as inputs.')
     elif inputs.shape[0] != 1:
         raise ValueError(
             'If targets have type of int, batch_size of inputs should equals 1. Receive batch_size {}'
             .format(inputs.shape[0]))
Exemple #21
0
    def __init__(self, num_labels, activation_fn, metric="NaiveFaithfulness"):
        super(Faithfulness, self).__init__(num_labels)

        perturb_percent = 0.5  # ratio of pixels to be perturbed, future argument
        perturb_method = "Constant"  # perturbation method, all the perturbed pixels will be set to constant
        base_value = 0.0  # the pixel value set for the perturbed pixels

        check_value_type("activation_fn", activation_fn, nn.Cell)
        self._activation_fn = activation_fn

        self._verify_metrics(metric)
        for method in self._methods:
            if metric == method.__name__:
                self._faithfulness_helper = method(
                    perturb_percent=perturb_percent,
                    perturb_method=perturb_method,
                    base_value=base_value)
Exemple #22
0
    def __init__(self,
                 network,
                 activation_fn=nn.Softmax(),
                 perturbation_per_eval=32):
        super(RISE, self).__init__(network, activation_fn)
        check_value_type('perturbation_per-eval', perturbation_per_eval, int)
        if perturbation_per_eval <= 0:
            raise ValueError(
                'perturbation_per_eval should be postive integer.')
        self._perturbation_per_eval = perturbation_per_eval

        self._num_masks = 6000  # number of masks to be sampled
        self._mask_probability = 0.2  # ratio of inputs to be masked
        self._down_sample_size = 10  # the original size of binary masks
        self._resize_mode = 'bilinear'  # mode choice to resize the down-sized binary masks to size of the inputs
        self._perturbation_mode = 'constant'  # setting the perturbed pixels to a constant value
        self._base_value = 0  # setting the perturbed pixels to this constant value
        self._num_classes = None  # placeholder of self._num_classes just for future assignment in other methods
    def _verify_network(self):
        """Verify the network."""
        label_set = set()
        for i, label in enumerate(self._labels):
            if label.strip() == "":
                raise ValueError(f"Label [{i}] is all whitespaces or empty. Please make sure there is "
                                 f"no empty label.")
            if label in label_set:
                raise ValueError(f"Duplicated label:{label}! Please make sure all labels are unique.")
            label_set.add(label)

        next_element = next(self._dataset.create_tuple_iterator())
        inputs, _, _ = self._unpack_next_element(next_element)
        prop_test = self._full_network(inputs)
        check_value_type("output of network in explainer", prop_test, ms.Tensor)
        if prop_test.shape[1] != len(self._labels):
            raise ValueError("The dimension of network output does not match the no. of classes. Please "
                             "check labels or the network in the explainer again.")
Exemple #24
0
def compile_mask(mask, image):
    """Compile mask to a ready to use object."""
    if mask is None:
        return compile_str_mask(auto_str_mask(image), image)
    check_value_type('mask', mask, (str, tuple, float, np.ndarray))
    if isinstance(mask, str):
        return compile_str_mask(mask, image)

    if isinstance(mask, tuple):
        _check_iterable_type('mask', mask, tuple, float)
    elif isinstance(mask, np.ndarray):
        if len(image.shape) == 4 and len(mask.shape) == 3:
            mask = np.expand_dims(mask, axis=0)
        elif len(image.shape) == 3 and len(
                mask.shape) == 4 and mask.shape[0] == 1:
            mask = mask.squeeze(0)
        if image.shape != mask.shape:
            raise ValueError("Image and mask is not match in shape.")
    return mask
Exemple #25
0
    def _verify_data(inputs, targets):
        """
        Verify the validity of the parsed inputs.

        Args:
            inputs (Tensor): The inputs to be explained.
            targets (Tensor, int): The label of interest. It should be a 1D or 0D tensor, or an integer.
          If it is a 1D tensor, its length should be the same as `inputs`.
        """
        check_value_type('inputs', inputs, Tensor)
        if len(inputs.shape) != 4:
            raise ValueError(
                f'Argument inputs must be 4D Tensor. But got {len(inputs.shape)}D Tensor.'
            )
        check_value_type('targets', targets, (Tensor, int))
        if isinstance(targets, Tensor):
            if len(targets.shape) > 1 or (len(targets.shape) == 1
                                          and len(targets) != len(inputs)):
                raise ValueError(
                    'Argument targets must be a 1D or 0D Tensor. If it is a 1D Tensor, '
                    'it should have the same length as inputs.')
Exemple #26
0
 def _check_custom_lineage_type(self, param_name, custom_lineage):
     """Check custom lineage type."""
     check_value_type(param_name, custom_lineage, [dict, type(None)])
     for key, value in custom_lineage.items():
         check_value_type(f'{param_name} -> {key}', key, str)
         check_value_type(f'the value of {param_name} -> {key}', value,
                          (int, str, float))
Exemple #27
0
    def _process_specified_data(self, specified_data, action):
        """Check specified data type and value."""
        if specified_data is None:
            if action:
                return dict(self._DEFAULT_SPECIFIED_DATA)
            return dict()

        check_value_type('collect_specified_data', specified_data,
                         [dict, type(None)])

        for param_name in specified_data:
            check_value_type(param_name, param_name, [str])

        unexpected_params = set(specified_data) - set(
            self._DEFAULT_SPECIFIED_DATA)
        if unexpected_params:
            raise ValueError(
                f'For `collect_specified_data` the keys {unexpected_params} are unsupported, '
                f'expect the follow keys: {list(self._DEFAULT_SPECIFIED_DATA.keys())}'
            )

        if 'histogram_regular' in specified_data:
            regular = specified_data.get('histogram_regular')
            check_value_type('histogram_regular', regular, (str, type(None)))
            if isinstance(regular, str):
                try:
                    re.match(regular, '')
                except re.error as exc:
                    raise ValueError(
                        f'For `collect_specified_data`, the value of `histogram_regular` '
                        f'is not a valid regular expression. Detail: {str(exc)}.'
                    )

        bool_items = set(self._DEFAULT_SPECIFIED_DATA) - {'histogram_regular'}
        for item in bool_items:
            if item in specified_data:
                check_value_type(item, specified_data.get(item), bool)

        if action:
            result = dict(self._DEFAULT_SPECIFIED_DATA)
            result.update(specified_data)
        else:
            result = specified_data
        return result
Exemple #28
0
 def _check_evaluate_param(self, explainer, inputs, targets, saliency):
     """Check the evaluate parameters."""
     check_value_type('explainer', explainer, Attribution)
     self._record_explainer(explainer)
     verify_argument(inputs, 'inputs')
     output = explainer.network(inputs)
     check_value_type("output of explainer model", output, Tensor)
     output_dim = explainer.network(inputs).shape[1]
     if output_dim != self._num_labels:
         raise ValueError("The output dimension of of black-box model in explainer does not match the dimension "
                          "of num_labels set in the __init__, please check explainer and num_labels again.")
     verify_targets(targets, self._num_labels)
     check_value_type('saliency', saliency, (Tensor, type(None)))
Exemple #29
0
 def _check_evaluate_param(self, explainer, inputs, targets, saliency):
     """Check the evaluate parameters."""
     check_value_type('explainer', explainer, Attribution)
     verify_argument(inputs, 'inputs')
     output = explainer.model(inputs)
     check_value_type("output of explainer model", output, Tensor)
     output_dim = explainer.model(inputs).shape[1]
     if output_dim > self._num_labels:
         raise ValueError(
             "The output dimension of of black-box model in explainer should not exceed the dimension "
             "of num_labels set in the __init__, please set num_labels larger."
         )
     verify_targets(targets, self._num_labels)
     check_value_type('saliency', saliency, (Tensor, type(None)))
Exemple #30
0
    def _check_custom_lineage_data(custom_lineage_data):
        """
        Check user custom lineage data.

        Args:
            custom_lineage_data (dict): The user custom defined data.

        Raises:
            TypeError: If the type of parameters is invalid.
        """
        if custom_lineage_data is None:
            return

        check_value_type('custom_lineage_data', custom_lineage_data, [dict, type(None)])
        for key, value in custom_lineage_data.items():
            check_value_type(f'custom_lineage_data -> {key}', key, str)
            check_value_type(f'the value of custom_lineage_data -> {key}', value, (int, str, float))