Exemple #1
0
    def write_signal(self, signal, group, write_dataset=True, chunks=None,
                     **kwds):
        "Writes a hyperspy signal to a hdf5 group"
        group.attrs.update(get_object_package_info(signal))

        if Version(version) < Version("1.2"):
            metadata = "mapped_parameters"
            original_metadata = "original_parameters"
        else:
            metadata = "metadata"
            original_metadata = "original_metadata"

        for axis in signal.axes_manager._axes:
            axis_dict = axis.get_axis_dictionary()
            group_name = f'axis-{axis.index_in_array}'
            # delete existing group in case the file have been open in 'a' mode
            # and we are saving a different type of axis, to avoid having
            # incompatible axis attributes from previously saved axis.
            if group_name in group.keys():
                del group[group_name]
            coord_group = group.create_group(group_name)
            self.dict2group(axis_dict, coord_group, **kwds)

        mapped_par = group.require_group(metadata)
        metadata_dict = signal.metadata.as_dictionary()

        if write_dataset:
            self.overwrite_dataset(
                group,
                signal.data,
                'data',
                signal_axes=signal.axes_manager.signal_indices_in_array,
                chunks=chunks,
                **kwds
                )

        if default_version < Version("1.2"):
            metadata_dict["_internal_parameters"] = \
                metadata_dict.pop("_HyperSpy")

        self.dict2group(metadata_dict, mapped_par, **kwds)
        original_par = group.require_group(original_metadata)
        self.dict2group(signal.original_metadata.as_dictionary(), original_par,
                      **kwds)
        learning_results = group.require_group('learning_results')
        self.dict2group(signal.learning_results.__dict__,
                      learning_results, **kwds)

        if hasattr(signal, 'peak_learning_results'):  # pragma: no cover
            peak_learning_results = group.require_group(
                'peak_learning_results')
            self.dict2group(signal.peak_learning_results.__dict__,
                          peak_learning_results, **kwds)

        if len(signal.models):
            model_group = self.file.require_group('Analysis/models')
            self.dict2group(signal.models._models.as_dictionary(),
                          model_group, **kwds)
            for model in model_group.values():
                model.attrs['_signal'] = group.name
Exemple #2
0
    def as_dictionary(self, fullcopy=True):
        """Returns component as a dictionary
        For more information on method and conventions, see
        :meth:`hyperspy.misc.export_dictionary.export_to_dictionary`

        Parameters
        ----------
        fullcopy : Bool (optional, False)
            Copies of objects are stored, not references. If any found,
            functions will be pickled and signals converted to dictionaries

        Returns
        -------
        dic : dictionary
            A dictionary, containing at least the following fields:
            parameters : list
                a list of dictionaries of the parameters, one per
            _whitelist : dictionary
                a dictionary with keys used as references saved attributes, for
                more information, see
                :meth:`hyperspy.misc.export_dictionary.export_to_dictionary`
            * any field from _whitelist.keys() *
        """
        dic = {
            'parameters': [p.as_dictionary(fullcopy) for p in self.parameters]
        }
        dic.update(get_object_package_info(self))
        export_to_dictionary(self, self._whitelist, dic, fullcopy)
        from hyperspy.model import _COMPONENTS
        if self._id_name not in _COMPONENTS:
            import dill
            dic['_class_dump'] = dill.dumps(self.__class__)
        return dic
Exemple #3
0
def write_signal(signal, group, **kwds):
    "Writes a hyperspy signal to a hdf5 group"

    group.attrs.update(get_object_package_info(signal))
    if default_version < LooseVersion("1.2"):
        metadata = "mapped_parameters"
        original_metadata = "original_parameters"
    else:
        metadata = "metadata"
        original_metadata = "original_metadata"

    if 'compression' not in kwds:
        kwds['compression'] = 'gzip'

    for axis in signal.axes_manager._axes:
        axis_dict = axis.get_axis_dictionary()
        coord_group = group.create_group(
            'axis-%s' % axis.index_in_array)
        dict2hdfgroup(axis_dict, coord_group, **kwds)
    mapped_par = group.create_group(metadata)
    metadata_dict = signal.metadata.as_dictionary()
    overwrite_dataset(group, signal.data, 'data',
                      signal_axes=signal.axes_manager.signal_indices_in_array,
                      **kwds)
    if default_version < LooseVersion("1.2"):
        metadata_dict["_internal_parameters"] = \
            metadata_dict.pop("_HyperSpy")
    # Remove chunks from the kwds since it wouldn't have the same rank as the
    # dataset and can't be used
    kwds.pop('chunks', None)
    dict2hdfgroup(metadata_dict, mapped_par, **kwds)
    original_par = group.create_group(original_metadata)
    dict2hdfgroup(signal.original_metadata.as_dictionary(), original_par,
                  **kwds)
    learning_results = group.create_group('learning_results')
    dict2hdfgroup(signal.learning_results.__dict__,
                  learning_results, **kwds)
    if hasattr(signal, 'peak_learning_results'):
        peak_learning_results = group.create_group(
            'peak_learning_results')
        dict2hdfgroup(signal.peak_learning_results.__dict__,
                      peak_learning_results, **kwds)

    if len(signal.models):
        model_group = group.file.require_group('Analysis/models')
        dict2hdfgroup(signal.models._models.as_dictionary(),
                      model_group, **kwds)
        for model in model_group.values():
            model.attrs['_signal'] = group.name