Beispiel #1
0
    def __createTransposedView(self):
        """Create the new view on the stack depending on the perspective
        (set orthogonal axis browsed on the viewer as first dimension)
        """
        assert self._stack is not None
        assert 0 <= self._perspective < 3

        # ensure we have the stack encapsulated in an array-like object
        # having a transpose() method
        if isinstance(self._stack, numpy.ndarray):
            self.__transposed_view = self._stack

        elif is_dataset(self._stack) or isinstance(self._stack, DatasetView):
            self.__transposed_view = DatasetView(self._stack)

        elif isinstance(self._stack, ListOfImages):
            self.__transposed_view = ListOfImages(self._stack)

        # transpose the array-like object if necessary
        if self._perspective == 1:
            self.__transposed_view = self.__transposed_view.transpose((1, 0, 2))
        elif self._perspective == 2:
            self.__transposed_view = self.__transposed_view.transpose((2, 0, 1))

        self._browser.setRange(0, self.__transposed_view.shape[0] - 1)
        self._browser.setValue(0)
Beispiel #2
0
    def errors(self):
        """Return errors (uncertainties) associated with the signal values.

        :return: Dataset with errors, or None
        """
        if not self.is_valid:
            raise InvalidNXdataError("Unable to parse invalid NXdata")

        # case of signal
        signal_errors = self.signal_dataset_name + "_errors"
        if "errors" in self.group and is_dataset(self.group["errors"]):
            errors = "errors"
        elif signal_errors in self.group and is_dataset(self.group[signal_errors]):
            errors = signal_errors
        else:
            return None
        return self.group[errors]
Beispiel #3
0
def _are_auxiliary_signals_valid(group, signal_name, auxiliary_signals_names):
    """Check data dimensionality and size. Return False if invalid."""
    for asn in auxiliary_signals_names:
        if asn not in group or not is_dataset(group[asn]):
            _nxdata_warning("Cannot find auxiliary signal dataset '%s'" % asn,
                            group.name)
            return False
        if group[signal_name].shape != group[asn].shape:
            _nxdata_warning(
                "Auxiliary signal dataset '%s' does not" % asn +
                " have the same shape as the main signal.", group.name)
            return False
    return True
Beispiel #4
0
    def auxiliary_signals_dataset_names(self):
        """Sorted list of names of the auxiliary signals datasets.

        These are the names provided by the *@auxiliary_signals* attribute
        on the NXdata group.

        In case the NXdata group does not specify a *@signal* attribute
        but has a dataset with an attribute *@signal=1*,
        we look for datasets with attributes *@signal=2, @signal=3...*
        (deprecated NXdata specification)."""
        if not self.is_valid:
            raise InvalidNXdataError("Unable to parse invalid NXdata")
        signal_dataset_name = get_attr_as_unicode(self.group, "signal")
        if signal_dataset_name is not None:
            auxiliary_signals_names = get_attr_as_unicode(
                self.group, "auxiliary_signals")
            if auxiliary_signals_names is not None:
                if not isinstance(auxiliary_signals_names,
                                  (tuple, list, numpy.ndarray)):
                    # tolerate a single string, but coerce into a list
                    return [auxiliary_signals_names]
                return list(auxiliary_signals_names)
            return []

        # try old spec, @signal=1 (2, 3...) on dataset
        numbered_names = []
        for dsname in self.group:
            if dsname == self.signal_dataset_name:
                # main signal, not auxiliary
                continue
            ds = self.group[dsname]
            signal_attr = ds.attrs.get("signal")
            if signal_attr is not None and not is_dataset(ds):
                nxdata_logger.warning(
                    "Item %s with @signal=%s is not a dataset (%s)", dsname,
                    signal_attr, type(ds))
                continue
            if signal_attr is not None:
                try:
                    signal_number = int(signal_attr)
                except (ValueError, TypeError):
                    nxdata_logger.warning(
                        "Could not parse attr @signal=%s on "
                        "dataset %s as an int", signal_attr, dsname)
                    continue
                numbered_names.append((signal_number, dsname))
        return [a[1] for a in sorted(numbered_names)]
Beispiel #5
0
    def _loadFromHdf5(filename):
        """Load a mask array from a HDF5 file.

        :param str filename: name of an existing HDF5 file
        :returns: A mask as a numpy array, or None if the interactive dialog
            was cancelled
        """
        dataPath = _selectDataset(filename, mode=DatasetDialog.LoadMode)
        if dataPath is None:
            return None

        with h5py.File(filename, "r") as h5f:
            dataset = h5f.get(dataPath)
            if not is_dataset(dataset):
                raise IOError("%s is not a dataset" % dataPath)
            mask = dataset[()]
        return mask
Beispiel #6
0
    def _loadFromHdf5(filename):
        """Load a mask array from a HDF5 file.

        :param str filename: name of an existing HDF5 file
        :returns: A mask as a numpy array, or None if the interactive dialog
            was cancelled
        """
        dataPath = _selectDataset(filename, mode=DatasetDialog.LoadMode)
        if dataPath is None:
            return None

        with h5py.File(filename, "r") as h5f:
            dataset = h5f.get(dataPath)
            if not is_dataset(dataset):
                raise IOError("%s is not a dataset" % dataPath)
            mask = dataset[()]
        return mask
Beispiel #7
0
    def auxiliary_signals_dataset_names(self):
        """Sorted list of names of the auxiliary signals datasets.

        These are the names provided by the *@auxiliary_signals* attribute
        on the NXdata group.

        In case the NXdata group does not specify a *@signal* attribute
        but has a dataset with an attribute *@signal=1*,
        we look for datasets with attributes *@signal=2, @signal=3...*
        (deprecated NXdata specification)."""
        if not self.is_valid:
            raise InvalidNXdataError("Unable to parse invalid NXdata")
        signal_dataset_name = get_attr_as_unicode(self.group, "signal")
        if signal_dataset_name is not None:
            auxiliary_signals_names = get_attr_as_unicode(self.group, "auxiliary_signals")
            if auxiliary_signals_names is not None:
                if not isinstance(auxiliary_signals_names,
                                  (tuple, list, numpy.ndarray)):
                    # tolerate a single string, but coerce into a list
                    return [auxiliary_signals_names]
                return list(auxiliary_signals_names)
            return []

        # try old spec, @signal=1 (2, 3...) on dataset
        numbered_names = []
        for dsname in self.group:
            if dsname == self.signal_dataset_name:
                # main signal, not auxiliary
                continue
            ds = self.group[dsname]
            signal_attr = ds.attrs.get("signal")
            if signal_attr is not None and not is_dataset(ds):
                nxdata_logger.warning("Item %s with @signal=%s is not a dataset (%s)",
                                      dsname, signal_attr, type(ds))
                continue
            if signal_attr is not None:
                try:
                    signal_number = int(signal_attr)
                except (ValueError, TypeError):
                    nxdata_logger.warning("Could not parse attr @signal=%s on "
                                          "dataset %s as an int",
                                          signal_attr, dsname)
                    continue
                numbered_names.append((signal_number, dsname))
        return [a[1] for a in sorted(numbered_names)]
Beispiel #8
0
    def title(self):
        """Plot title. If not found, returns an empty string.

        This attribute does not appear in the NXdata specification, but it is
        implemented in *nexpy* as a dataset named "title" inside the NXdata
        group. This dataset is expected to contain text.

        Because the *nexpy* approach could cause a conflict if the signal
        dataset or an axis dataset happened to be called "title", we also
        support providing the title as an attribute of the NXdata group.
        """
        title = self.group.get("title")
        data_dataset_names = [self.signal_name] + self.axes_dataset_names
        if (title is not None and is_dataset(title)
                and "title" not in data_dataset_names):
            return str(title[()])

        title = self.group.attrs.get("title")
        if title is None:
            return ""
        return str(title)
Beispiel #9
0
    def title(self):
        """Plot title. If not found, returns an empty string.

        This attribute does not appear in the NXdata specification, but it is
        implemented in *nexpy* as a dataset named "title" inside the NXdata
        group. This dataset is expected to contain text.

        Because the *nexpy* approach could cause a conflict if the signal
        dataset or an axis dataset happened to be called "title", we also
        support providing the title as an attribute of the NXdata group.
        """
        if not self.is_valid:
            raise InvalidNXdataError("Unable to parse invalid NXdata")

        title = self.group.get("title")
        data_dataset_names = [self.signal_name] + self.axes_dataset_names
        if (title is not None and is_dataset(title) and
                "title" not in data_dataset_names):
            return str(title[()])

        title = self.group.attrs.get("title")
        if title is None:
            return ""
        return str(title)
Beispiel #10
0
    def get_axis_errors(self, axis_name):
        """Return errors (uncertainties) associated with an axis.

        If the axis has attributes @first_good or @last_good, the output
        is trimmed accordingly (a numpy array will be returned rather than a
        dataset).

        :param str axis_name: Name of axis dataset. This dataset **must exist**.
        :return: Dataset with axis errors, or None
        :raise KeyError: if this group does not contain a dataset named axis_name
        """
        if not self.is_valid:
            raise InvalidNXdataError("Unable to parse invalid NXdata")

        # ensure axis_name is decoded, before comparing it with decoded attributes
        if hasattr(axis_name, "decode"):
            axis_name = axis_name.decode("utf-8")
        if axis_name not in self.group:
            # tolerate axis_name given as @long_name
            for item in self.group:
                long_name = get_attr_as_unicode(self.group[item], "long_name")
                if long_name is not None and long_name == axis_name:
                    axis_name = item
                    break

        if axis_name not in self.group:
            raise KeyError("group does not contain a dataset named '%s'" % axis_name)

        len_axis = len(self.group[axis_name])

        fg_idx = self.group[axis_name].attrs.get("first_good", 0)
        lg_idx = self.group[axis_name].attrs.get("last_good", len_axis - 1)

        # case of axisname_errors dataset present
        errors_name = axis_name + "_errors"
        if errors_name in self.group and is_dataset(self.group[errors_name]):
            if fg_idx != 0 or lg_idx != (len_axis - 1):
                return self.group[errors_name][fg_idx:lg_idx + 1]
            else:
                return self.group[errors_name]
        # case of uncertainties dataset name provided in @uncertainties
        uncertainties_names = get_attr_as_unicode(self.group, "uncertainties")
        if uncertainties_names is None:
            uncertainties_names = get_attr_as_unicode(self.signal, "uncertainties")
        if isinstance(uncertainties_names, six.text_type):
            uncertainties_names = [uncertainties_names]
        if uncertainties_names is not None:
            # take the uncertainty with the same index as the axis in @axes
            axes_ds_names = get_attr_as_unicode(self.group, "axes")
            if axes_ds_names is None:
                axes_ds_names = get_attr_as_unicode(self.signal, "axes")
            if isinstance(axes_ds_names, six.text_type):
                axes_ds_names = [axes_ds_names]
            elif isinstance(axes_ds_names, numpy.ndarray):
                # transform numpy.ndarray into list
                axes_ds_names = list(axes_ds_names)
            assert isinstance(axes_ds_names, list)
            if hasattr(axes_ds_names[0], "decode"):
                axes_ds_names = [ax_name.decode("utf-8") for ax_name in axes_ds_names]
            if axis_name not in axes_ds_names:
                raise KeyError("group attr @axes does not mention a dataset " +
                               "named '%s'" % axis_name)
            errors = self.group[uncertainties_names[list(axes_ds_names).index(axis_name)]]
            if fg_idx == 0 and lg_idx == (len_axis - 1):
                return errors      # dataset
            else:
                return errors[fg_idx:lg_idx + 1]    # numpy array
        return None
Beispiel #11
0
    def axes_dataset_names(self):
        """List of axes dataset names.

        If an axis dataset applies to several dimensions of the signal, its
        name will be repeated in the list.

        If a dimension of the signal has no dimension scale (i.e. there is a
        "." in that position in the *@axes* array), `None` is inserted in the
        output list in its position.
        """
        if not self.is_valid:
            raise InvalidNXdataError("Unable to parse invalid NXdata")

        numbered_names = []     # used in case of @axis=0 (old spec)
        axes_dataset_names = get_attr_as_unicode(self.group, "axes")
        if axes_dataset_names is None:
            # try @axes on signal dataset (older NXdata specification)
            axes_dataset_names = get_attr_as_unicode(self.signal, "axes")
            if axes_dataset_names is not None:
                # we expect a comma separated string
                if hasattr(axes_dataset_names, "split"):
                    axes_dataset_names = axes_dataset_names.split(":")
            else:
                # try @axis on the individual datasets (oldest NXdata specification)
                for dsname in self.group:
                    if not is_dataset(self.group[dsname]):
                        continue
                    axis_attr = self.group[dsname].attrs.get("axis")
                    if axis_attr is not None:
                        try:
                            axis_num = int(axis_attr)
                        except (ValueError, TypeError):
                            nxdata_logger.warning("Could not interpret attr @axis as"
                                                  "int on dataset %s", dsname)
                            continue
                        numbered_names.append((axis_num, dsname))

        ndims = len(self.signal.shape)
        if axes_dataset_names is None:
            if numbered_names:
                axes_dataset_names = []
                numbers = [a[0] for a in numbered_names]
                names = [a[1] for a in numbered_names]
                for i in range(ndims):
                    if i in numbers:
                        axes_dataset_names.append(names[numbers.index(i)])
                    else:
                        axes_dataset_names.append(None)
                return axes_dataset_names
            else:
                return [None] * ndims

        if isinstance(axes_dataset_names, (six.text_type, six.binary_type)):
            axes_dataset_names = [axes_dataset_names]

        for i, axis_name in enumerate(axes_dataset_names):
            if hasattr(axis_name, "decode"):
                axis_name = axis_name.decode()
            if axis_name == ".":
                axes_dataset_names[i] = None

        if len(axes_dataset_names) != ndims:
            if self.is_scatter and ndims == 1:
                # case of a 1D signal with arbitrary number of axes
                return list(axes_dataset_names)
            if self.interpretation != "rgba-image":
                # @axes may only define 1 or 2 axes if @interpretation=spectrum/image.
                # Use the existing names for the last few dims, and prepend with Nones.
                assert len(axes_dataset_names) == INTERPDIM[self.interpretation]
                all_dimensions_names = [None] * (ndims - INTERPDIM[self.interpretation])
                for axis_name in axes_dataset_names:
                    all_dimensions_names.append(axis_name)
            else:
                # 2 axes applying to the first two dimensions.
                # The 3rd signal dimension is expected to contain 3(4) RGB(A) values.
                assert len(axes_dataset_names) == 2
                all_dimensions_names = [axn for axn in axes_dataset_names]
                all_dimensions_names.append(None)
            return all_dimensions_names

        return list(axes_dataset_names)
Beispiel #12
0
    def _validate(self):
        """Fill :attr:`issues` with error messages for each error found."""
        if not is_group(self.group):
            raise TypeError("group must be a h5py-like group")
        if get_attr_as_unicode(self.group, "NX_class") != "NXdata":
            self.issues.append("Group has no attribute @NX_class='NXdata'")

        signal_name = get_signal_name(self.group)
        if signal_name is None:
            self.issues.append("No @signal attribute on the NXdata group, "
                               "and no dataset with a @signal=1 attr found")
            # very difficult to do more consistency tests without signal
            return

        elif signal_name not in self.group or not is_dataset(self.group[signal_name]):
            self.issues.append("Cannot find signal dataset '%s'" % signal_name)
            return

        auxiliary_signals_names = get_auxiliary_signals_names(self.group)
        self.issues += validate_auxiliary_signals(self.group,
                                                  signal_name,
                                                  auxiliary_signals_names)

        if "axes" in self.group.attrs:
            axes_names = get_attr_as_unicode(self.group, "axes")
            if isinstance(axes_names, (six.text_type, six.binary_type)):
                axes_names = [axes_names]

            self.issues += validate_number_of_axes(self.group, signal_name,
                                                   num_axes=len(axes_names))

            # Test consistency of @uncertainties
            uncertainties_names = get_uncertainties_names(self.group, signal_name)
            if uncertainties_names is not None:
                if len(uncertainties_names) != len(axes_names):
                    self.issues.append("@uncertainties does not define the same " +
                                       "number of fields than @axes")

            # Test individual axes
            is_scatter = True  # true if all axes have the same size as the signal
            signal_size = 1
            for dim in self.group[signal_name].shape:
                signal_size *= dim
            polynomial_axes_names = []
            for i, axis_name in enumerate(axes_names):

                if axis_name == ".":
                    continue
                if axis_name not in self.group or not is_dataset(self.group[axis_name]):
                    self.issues.append("Could not find axis dataset '%s'" % axis_name)
                    continue

                axis_size = 1
                for dim in self.group[axis_name].shape:
                    axis_size *= dim

                if len(self.group[axis_name].shape) != 1:
                    # I don't know how to interpret n-D axes
                    self.issues.append("Axis %s is not 1D" % axis_name)
                    continue
                else:
                    # for a  1-d axis,
                    fg_idx = self.group[axis_name].attrs.get("first_good", 0)
                    lg_idx = self.group[axis_name].attrs.get("last_good", len(self.group[axis_name]) - 1)
                    axis_len = lg_idx + 1 - fg_idx

                if axis_len != signal_size:
                    if axis_len not in self.group[signal_name].shape + (1, 2):
                        self.issues.append(
                                "Axis %s number of elements does not " % axis_name +
                                "correspond to the length of any signal dimension,"
                                " it does not appear to be a constant or a linear calibration," +
                                " and this does not seem to be a scatter plot.")
                        continue
                    elif axis_len in (1, 2):
                        polynomial_axes_names.append(axis_name)
                    is_scatter = False
                else:
                    if not is_scatter:
                        self.issues.append(
                                "Axis %s number of elements is equal " % axis_name +
                                "to the length of the signal, but this does not seem" +
                                " to be a scatter (other axes have different sizes)")
                        continue

                # Test individual uncertainties
                errors_name = axis_name + "_errors"
                if errors_name not in self.group and uncertainties_names is not None:
                    errors_name = uncertainties_names[i]
                    if errors_name in self.group and axis_name not in polynomial_axes_names:
                        if self.group[errors_name].shape != self.group[axis_name].shape:
                            self.issues.append(
                                    "Errors '%s' does not have the same " % errors_name +
                                    "dimensions as axis '%s'." % axis_name)

        # test dimensions of errors associated with signal
        if "errors" in self.group and is_dataset(self.group["errors"]):
            if self.group["errors"].shape != self.group[signal_name].shape:
                self.issues.append(
                        "Dataset containing standard deviations must " +
                        "have the same dimensions as the signal.")
Beispiel #13
0
    def setColormap(self, colormap=None, normalization=None,
                    autoscale=None, vmin=None, vmax=None, colors=None):
        """Set the colormap and update active image.

        Parameters that are not provided are taken from the current colormap.

        The colormap parameter can also be a dict with the following keys:

        - *name*: string. The colormap to use:
          'gray', 'reversed gray', 'temperature', 'red', 'green', 'blue'.
        - *normalization*: string. The mapping to use for the colormap:
          either 'linear' or 'log'.
        - *autoscale*: bool. Whether to use autoscale (True) or range
          provided by keys
          'vmin' and 'vmax' (False).
        - *vmin*: float. The minimum value of the range to use if 'autoscale'
          is False.
        - *vmax*: float. The maximum value of the range to use if 'autoscale'
          is False.
        - *colors*: optional. Nx3 or Nx4 array of float in [0, 1] or uint8.
                    List of RGB or RGBA colors to use (only if name is None)

        :param colormap: Name of the colormap in
            'gray', 'reversed gray', 'temperature', 'red', 'green', 'blue'.
            Or a :class`.Colormap` object.
        :type colormap: dict or str.
        :param str normalization: Colormap mapping: 'linear' or 'log'.
        :param bool autoscale: Whether to use autoscale or [vmin, vmax] range.
            Default value of autoscale is False. This option is not compatible
            with h5py datasets.
        :param float vmin: The minimum value of the range to use if
                           'autoscale' is False.
        :param float vmax: The maximum value of the range to use if
                           'autoscale' is False.
        :param numpy.ndarray colors: Only used if name is None.
            Custom colormap colors as Nx3 or Nx4 RGB or RGBA arrays
        """
        # if is a colormap object or a dictionary
        if isinstance(colormap, Colormap) or isinstance(colormap, dict):
            # Support colormap parameter as a dict
            errmsg = "If colormap is provided as a Colormap object, all other parameters"
            errmsg += " must not be specified when calling setColormap"
            assert normalization is None, errmsg
            assert autoscale is None, errmsg
            assert vmin is None, errmsg
            assert vmax is None, errmsg
            assert colors is None, errmsg

            if isinstance(colormap, dict):
                reason = 'colormap parameter should now be an object'
                replacement = 'Colormap()'
                since_version = '0.6'
                deprecated_warning(type_='function',
                                   name='setColormap',
                                   reason=reason,
                                   replacement=replacement,
                                   since_version=since_version)
                _colormap = Colormap._fromDict(colormap)
            else:
                _colormap = colormap
        else:
            norm = normalization if normalization is not None else 'linear'
            name = colormap if colormap is not None else 'gray'
            _colormap = Colormap(name=name,
                                 normalization=norm,
                                 vmin=vmin,
                                 vmax=vmax,
                                 colors=colors)

            # Patch: since we don't apply this colormap to a single 2D data but
            # a 2D stack we have to deal manually with vmin, vmax
            if autoscale is None:
                # set default
                autoscale = False
            elif autoscale and is_dataset(self._stack):
                # h5py dataset has no min()/max() methods
                raise RuntimeError(
                    "Cannot auto-scale colormap for a h5py dataset")
            else:
                autoscale = autoscale
            self.__autoscaleCmap = autoscale

            if autoscale and (self._stack is not None):
                _vmin, _vmax = _colormap.getColormapRange(data=self._stack)
                _colormap.setVRange(vmin=_vmin, vmax=_vmax)
            else:
                if vmin is None and self._stack is not None:
                    _colormap.setVMin(self._stack.min())
                else:
                    _colormap.setVMin(vmin)
                if vmax is None and self._stack is not None:
                    _colormap.setVMax(self._stack.max())
                else:
                    _colormap.setVMax(vmax)

        cursorColor = cursorColorForColormap(_colormap.getName())
        self._plot.setInteractiveMode('zoom', color=cursorColor)

        self._plot.setDefaultColormap(_colormap)

        # Update active image colormap
        activeImage = self._plot.getActiveImage()
        if isinstance(activeImage, items.ColormapMixIn):
            activeImage.setColormap(self.getColormap())
Beispiel #14
0
    def setStack(self, stack, perspective=None, reset=True, calibrations=None):
        """Set the 3D stack.

        The perspective parameter is used to define which dimension of the 3D
        array is to be used as frame index. The lowest remaining dimension
        number is the row index of the displayed image (Y axis), and the highest
        remaining dimension is the column index (X axis).

        :param stack: 3D stack, or `None` to clear plot.
        :type stack: 3D numpy.ndarray, or 3D h5py.Dataset, or list/tuple of 2D
            numpy arrays, or None.
        :param int perspective: Dimension for the frame index: 0, 1 or 2.
            Use ``None`` to keep the current perspective (default).
        :param bool reset: Whether to reset zoom or not.
        :param calibrations: Sequence of 3 calibration objects for each axis.
            These objects can be a subclass of :class:`AbstractCalibration`,
            or 2-tuples *(a, b)* where *a* is the y-intercept and *b* is the
            slope of a linear calibration (:math:`x \mapsto a + b x`)
        """
        if stack is None:
            self.clear()
            self.sigStackChanged.emit(0)
            return

        self._set3DScaleAndOrigin(calibrations)

        # stack as list of 2D arrays: must be converted into an array_like
        if not isinstance(stack, numpy.ndarray):
            if not is_dataset(stack):
                try:
                    assert hasattr(stack, "__len__")
                    for img in stack:
                        assert hasattr(img, "shape")
                        assert len(img.shape) == 2
                except AssertionError:
                    raise ValueError(
                        "Stack must be a 3D array/dataset or a list of " +
                        "2D arrays.")
                stack = ListOfImages(stack)

        assert len(stack.shape) == 3, "data must be 3D"

        self._stack = stack
        self.__createTransposedView()

        perspective_changed = False
        if perspective not in [None, self._perspective]:
            perspective_changed = True
            self.setPerspective(perspective)

        # This call to setColormap redefines the meaning of autoscale
        # for 3D volume: take global min/max rather than frame min/max
        if self.__autoscaleCmap:
            self.setColormap(autoscale=True)

        # init plot
        self._plot.addImage(self.__transposed_view[0, :, :],
                            legend=self.__imageLegend,
                            colormap=self.getColormap(),
                            origin=self._getImageOrigin(),
                            scale=self._getImageScale(),
                            replace=True,
                            resetzoom=False)
        self._plot.setActiveImage(self.__imageLegend)
        self.__updatePlotLabels()
        self._updateTitle()

        if reset:
            self._plot.resetZoom()

        # enable and init browser
        self._browser.setEnabled(True)

        if not perspective_changed:    # avoid double signal (see self.setPerspective)
            self.sigStackChanged.emit(stack.size)
Beispiel #15
0
def is_valid_nxdata(group):  # noqa
    """Check if a h5py group is a **valid** NX_data group.

    If the group does not have attribute *@NX_class=NXdata*, this function
    simply returns *False*.

    Else, warning messages are logged to troubleshoot malformed NXdata groups
    prior to returning *False*.

    :param group: h5py-like group
    :return: True if this NXdata group is valid.
    :raise TypeError: if group is not a h5py group, a spech5 group,
        or a fabioh5 group
    """
    if not is_group(group):
        raise TypeError("group must be a h5py-like group")
    if get_attr_as_unicode(group, "NX_class") != "NXdata":
        return False

    signal_name = _get_signal_name(group)
    if signal_name is None:
        _nxdata_warning(
            "No @signal attribute on the NXdata group, "
            "and no dataset with a @signal=1 attr found", group.name)
        return False

    if signal_name not in group or not is_dataset(group[signal_name]):
        _nxdata_warning("Cannot find signal dataset '%s'" % signal_name,
                        group.name)
        return False

    auxiliary_signals_names = _get_auxiliary_signals_names(group)
    if not _are_auxiliary_signals_valid(group, signal_name,
                                        auxiliary_signals_names):
        return False

    if "axes" in group.attrs:
        axes_names = get_attr_as_unicode(group, "axes")
        if isinstance(axes_names, (six.text_type, six.binary_type)):
            axes_names = [axes_names]

        if not _has_valid_number_of_axes(
                group, signal_name, num_axes=len(axes_names)):
            return False

        # Test consistency of @uncertainties
        uncertainties_names = _get_uncertainties_names(group, signal_name)
        if uncertainties_names is not None:
            if len(uncertainties_names) != len(axes_names):
                _nxdata_warning(
                    "@uncertainties does not define the same " +
                    "number of fields than @axes", group.name)
                return False

        # Test individual axes
        is_scatter = True  # true if all axes have the same size as the signal
        signal_size = 1
        for dim in group[signal_name].shape:
            signal_size *= dim
        polynomial_axes_names = []
        for i, axis_name in enumerate(axes_names):

            if axis_name == ".":
                continue
            if axis_name not in group or not is_dataset(group[axis_name]):
                _nxdata_warning("Could not find axis dataset '%s'" % axis_name,
                                group.name)
                return False

            axis_size = 1
            for dim in group[axis_name].shape:
                axis_size *= dim

            if len(group[axis_name].shape) != 1:
                # too me, it makes only sense to have a n-D axis if it's total
                # size is exactly the signal's size (weird n-d scatter)
                if axis_size != signal_size:
                    _nxdata_warning(
                        "Axis %s is not a 1D dataset" % axis_name +
                        " and its shape does not match the signal's shape",
                        group.name)
                    return False
                axis_len = axis_size
            else:
                # for a  1-d axis,
                fg_idx = group[axis_name].attrs.get("first_good", 0)
                lg_idx = group[axis_name].attrs.get("last_good",
                                                    len(group[axis_name]) - 1)
                axis_len = lg_idx + 1 - fg_idx

            if axis_len != signal_size:
                if axis_len not in group[signal_name].shape + (1, 2):
                    _nxdata_warning(
                        "Axis %s number of elements does not " % axis_name +
                        "correspond to the length of any signal dimension,"
                        " it does not appear to be a constant or a linear calibration,"
                        + " and this does not seem to be a scatter plot.",
                        group.name)
                    return False
                elif axis_len in (1, 2):
                    polynomial_axes_names.append(axis_name)
                is_scatter = False
            else:
                if not is_scatter:
                    _nxdata_warning(
                        "Axis %s number of elements is equal " % axis_name +
                        "to the length of the signal, but this does not seem" +
                        " to be a scatter (other axes have different sizes)",
                        group.name)
                    return False

            # Test individual uncertainties
            errors_name = axis_name + "_errors"
            if errors_name not in group and uncertainties_names is not None:
                errors_name = uncertainties_names[i]
                if errors_name in group and axis_name not in polynomial_axes_names:
                    if group[errors_name].shape != group[axis_name].shape:
                        _nxdata_warning(
                            "Errors '%s' does not have the same " % errors_name
                            + "dimensions as axis '%s'." % axis_name,
                            group.name)
                        return False

    # test dimensions of errors associated with signal
    if "errors" in group and is_dataset(group["errors"]):
        if group["errors"].shape != group[signal_name].shape:
            _nxdata_warning(
                "Dataset containing standard deviations must " +
                "have the same dimensions as the signal.", group.name)
            return False
    return True
Beispiel #16
0
    def get_axis_errors(self, axis_name):
        """Return errors (uncertainties) associated with an axis.

        If the axis has attributes @first_good or @last_good, the output
        is trimmed accordingly (a numpy array will be returned rather than a
        dataset).

        :param str axis_name: Name of axis dataset. This dataset **must exist**.
        :return: Dataset with axis errors, or None
        :raise KeyError: if this group does not contain a dataset named axis_name
        """
        if not self.is_valid:
            raise InvalidNXdataError("Unable to parse invalid NXdata")

        # ensure axis_name is decoded, before comparing it with decoded attributes
        if hasattr(axis_name, "decode"):
            axis_name = axis_name.decode("utf-8")
        if axis_name not in self.group:
            # tolerate axis_name given as @long_name
            for item in self.group:
                long_name = get_attr_as_unicode(self.group[item], "long_name")
                if long_name is not None and long_name == axis_name:
                    axis_name = item
                    break

        if axis_name not in self.group:
            raise KeyError("group does not contain a dataset named '%s'" %
                           axis_name)

        len_axis = len(self.group[axis_name])

        fg_idx = self.group[axis_name].attrs.get("first_good", 0)
        lg_idx = self.group[axis_name].attrs.get("last_good", len_axis - 1)

        # case of axisname_errors dataset present
        errors_name = axis_name + "_errors"
        if errors_name in self.group and is_dataset(self.group[errors_name]):
            if fg_idx != 0 or lg_idx != (len_axis - 1):
                return self.group[errors_name][fg_idx:lg_idx + 1]
            else:
                return self.group[errors_name]
        # case of uncertainties dataset name provided in @uncertainties
        uncertainties_names = get_attr_as_unicode(self.group, "uncertainties")
        if uncertainties_names is None:
            uncertainties_names = get_attr_as_unicode(self.signal,
                                                      "uncertainties")
        if isinstance(uncertainties_names, six.text_type):
            uncertainties_names = [uncertainties_names]
        if uncertainties_names is not None:
            # take the uncertainty with the same index as the axis in @axes
            axes_ds_names = get_attr_as_unicode(self.group, "axes")
            if axes_ds_names is None:
                axes_ds_names = get_attr_as_unicode(self.signal, "axes")
            if isinstance(axes_ds_names, six.text_type):
                axes_ds_names = [axes_ds_names]
            elif isinstance(axes_ds_names, numpy.ndarray):
                # transform numpy.ndarray into list
                axes_ds_names = list(axes_ds_names)
            assert isinstance(axes_ds_names, list)
            if hasattr(axes_ds_names[0], "decode"):
                axes_ds_names = [
                    ax_name.decode("utf-8") for ax_name in axes_ds_names
                ]
            if axis_name not in axes_ds_names:
                raise KeyError("group attr @axes does not mention a dataset " +
                               "named '%s'" % axis_name)
            errors = self.group[uncertainties_names[list(axes_ds_names).index(
                axis_name)]]
            if fg_idx == 0 and lg_idx == (len_axis - 1):
                return errors  # dataset
            else:
                return errors[fg_idx:lg_idx + 1]  # numpy array
        return None
Beispiel #17
0
    def axes_dataset_names(self):
        """List of axes dataset names.

        If an axis dataset applies to several dimensions of the signal, its
        name will be repeated in the list.

        If a dimension of the signal has no dimension scale (i.e. there is a
        "." in that position in the *@axes* array), `None` is inserted in the
        output list in its position.
        """
        if not self.is_valid:
            raise InvalidNXdataError("Unable to parse invalid NXdata")

        numbered_names = []  # used in case of @axis=0 (old spec)
        axes_dataset_names = get_attr_as_unicode(self.group, "axes")
        if axes_dataset_names is None:
            # try @axes on signal dataset (older NXdata specification)
            axes_dataset_names = get_attr_as_unicode(self.signal, "axes")
            if axes_dataset_names is not None:
                # we expect a comma separated string
                if hasattr(axes_dataset_names, "split"):
                    axes_dataset_names = axes_dataset_names.split(":")
            else:
                # try @axis on the individual datasets (oldest NXdata specification)
                for dsname in self.group:
                    if not is_dataset(self.group[dsname]):
                        continue
                    axis_attr = self.group[dsname].attrs.get("axis")
                    if axis_attr is not None:
                        try:
                            axis_num = int(axis_attr)
                        except (ValueError, TypeError):
                            nxdata_logger.warning(
                                "Could not interpret attr @axis as"
                                "int on dataset %s", dsname)
                            continue
                        numbered_names.append((axis_num, dsname))

        ndims = len(self.signal.shape)
        if axes_dataset_names is None:
            if numbered_names:
                axes_dataset_names = []
                numbers = [a[0] for a in numbered_names]
                names = [a[1] for a in numbered_names]
                for i in range(ndims):
                    if i in numbers:
                        axes_dataset_names.append(names[numbers.index(i)])
                    else:
                        axes_dataset_names.append(None)
                return axes_dataset_names
            else:
                return [None] * ndims

        if isinstance(axes_dataset_names, (six.text_type, six.binary_type)):
            axes_dataset_names = [axes_dataset_names]

        for i, axis_name in enumerate(axes_dataset_names):
            if hasattr(axis_name, "decode"):
                axis_name = axis_name.decode()
            if axis_name == ".":
                axes_dataset_names[i] = None

        if len(axes_dataset_names) != ndims:
            if self.is_scatter and ndims == 1:
                # case of a 1D signal with arbitrary number of axes
                return list(axes_dataset_names)
            if self.interpretation != "rgba-image":
                # @axes may only define 1 or 2 axes if @interpretation=spectrum/image.
                # Use the existing names for the last few dims, and prepend with Nones.
                assert len(axes_dataset_names) == INTERPDIM[
                    self.interpretation]
                all_dimensions_names = [None] * (
                    ndims - INTERPDIM[self.interpretation])
                for axis_name in axes_dataset_names:
                    all_dimensions_names.append(axis_name)
            else:
                # 2 axes applying to the first two dimensions.
                # The 3rd signal dimension is expected to contain 3(4) RGB(A) values.
                assert len(axes_dataset_names) == 2
                all_dimensions_names = [axn for axn in axes_dataset_names]
                all_dimensions_names.append(None)
            return all_dimensions_names

        return list(axes_dataset_names)
Beispiel #18
0
    def _validate(self):
        """Fill :attr:`issues` with error messages for each error found."""
        if not is_group(self.group):
            raise TypeError("group must be a h5py-like group")
        if get_attr_as_unicode(self.group, "NX_class") != "NXdata":
            self.issues.append("Group has no attribute @NX_class='NXdata'")
            return

        signal_name = get_signal_name(self.group)
        if signal_name is None:
            self.issues.append("No @signal attribute on the NXdata group, "
                               "and no dataset with a @signal=1 attr found")
            # very difficult to do more consistency tests without signal
            return

        elif signal_name not in self.group or not is_dataset(
                self.group[signal_name]):
            self.issues.append("Cannot find signal dataset '%s'" % signal_name)
            return

        auxiliary_signals_names = get_auxiliary_signals_names(self.group)
        self.issues += validate_auxiliary_signals(self.group, signal_name,
                                                  auxiliary_signals_names)

        if "axes" in self.group.attrs:
            axes_names = get_attr_as_unicode(self.group, "axes")
            if isinstance(axes_names, (six.text_type, six.binary_type)):
                axes_names = [axes_names]

            self.issues += validate_number_of_axes(self.group,
                                                   signal_name,
                                                   num_axes=len(axes_names))

            # Test consistency of @uncertainties
            uncertainties_names = get_uncertainties_names(
                self.group, signal_name)
            if uncertainties_names is not None:
                if len(uncertainties_names) != len(axes_names):
                    if len(uncertainties_names) < len(axes_names):
                        # ignore the field to avoid index error in the axes loop
                        uncertainties_names = None
                        self.issues.append(
                            "@uncertainties does not define the same " +
                            "number of fields than @axes. Field ignored")
                    else:
                        self.issues.append(
                            "@uncertainties does not define the same " +
                            "number of fields than @axes")

            # Test individual axes
            is_scatter = True  # true if all axes have the same size as the signal
            signal_size = 1
            for dim in self.group[signal_name].shape:
                signal_size *= dim
            polynomial_axes_names = []
            for i, axis_name in enumerate(axes_names):

                if axis_name == ".":
                    continue
                if axis_name not in self.group or not is_dataset(
                        self.group[axis_name]):
                    self.issues.append("Could not find axis dataset '%s'" %
                                       axis_name)
                    continue

                axis_size = 1
                for dim in self.group[axis_name].shape:
                    axis_size *= dim

                if len(self.group[axis_name].shape) != 1:
                    # I don't know how to interpret n-D axes
                    self.issues.append("Axis %s is not 1D" % axis_name)
                    continue
                else:
                    # for a  1-d axis,
                    fg_idx = self.group[axis_name].attrs.get("first_good", 0)
                    lg_idx = self.group[axis_name].attrs.get(
                        "last_good",
                        len(self.group[axis_name]) - 1)
                    axis_len = lg_idx + 1 - fg_idx

                if axis_len != signal_size:
                    if axis_len not in self.group[signal_name].shape + (1, 2):
                        self.issues.append(
                            "Axis %s number of elements does not " %
                            axis_name +
                            "correspond to the length of any signal dimension,"
                            " it does not appear to be a constant or a linear calibration,"
                            + " and this does not seem to be a scatter plot.")
                        continue
                    elif axis_len in (1, 2):
                        polynomial_axes_names.append(axis_name)
                    is_scatter = False
                else:
                    if not is_scatter:
                        self.issues.append(
                            "Axis %s number of elements is equal " %
                            axis_name +
                            "to the length of the signal, but this does not seem"
                            +
                            " to be a scatter (other axes have different sizes)"
                        )
                        continue

                # Test individual uncertainties
                errors_name = axis_name + "_errors"
                if errors_name not in self.group and uncertainties_names is not None:
                    errors_name = uncertainties_names[i]
                    if errors_name in self.group and axis_name not in polynomial_axes_names:
                        if self.group[errors_name].shape != self.group[
                                axis_name].shape:
                            self.issues.append(
                                "Errors '%s' does not have the same " %
                                errors_name +
                                "dimensions as axis '%s'." % axis_name)

        # test dimensions of errors associated with signal

        signal_errors = signal_name + "_errors"
        if "errors" in self.group and is_dataset(self.group["errors"]):
            errors = "errors"
        elif signal_errors in self.group and is_dataset(
                self.group[signal_errors]):
            errors = signal_errors
        else:
            errors = None
        if errors:
            if self.group[errors].shape != self.group[signal_name].shape:
                # In principle just the same size should be enough but
                # NeXus documentation imposes to have the same shape
                self.issues.append(
                    "Dataset containing standard deviations must " +
                    "have the same dimensions as the signal.")