def _parse_sb_size(s, reference, sb_position, sb_size, parallel): # Default value is 1/2 distance between sideband and central band if sb_size is None: if reference is None: sb_size = s.estimate_sideband_size( sb_position, parallel=parallel) else: sb_size = reference.estimate_sideband_size( sb_position, parallel=parallel) else: if not isinstance(sb_size, BaseSignal): if isinstance(sb_size, (np.ndarray, daArray)) and sb_size.size > 1: # transpose if np.array of multiple instances sb_size = BaseSignal(sb_size).T else: sb_size = BaseSignal(sb_size) if isinstance(sb_size.data, daArray): sb_size = sb_size.as_lazy() if sb_size.axes_manager.navigation_size != s.axes_manager.navigation_size: if sb_size.axes_manager.navigation_size: raise ValueError('Sideband size dimensions do not match ' 'neither reference nor hologram dimensions.') # sb_position navdim=0, therefore map function should not iterate: else: sb_size_temp = np.float64(sb_size.data) else: sb_size_temp = sb_size.deepcopy() return sb_size, sb_size_temp
def test_axes_configuration_binning(tmp_path, file): fname = tmp_path / file s = BaseSignal(np.zeros((2, 2, 2))) s.axes_manager.signal_axes[-1].is_binned = True s.save(fname) s = load(fname) assert s.axes_manager.signal_axes[-1].is_binned
def test_lazy_metadata_arrays(tmpfilepath): s = BaseSignal([1, 2, 3]) s.metadata.array = np.arange(10.) s.save(tmpfilepath) l = load(tmpfilepath + ".hspy", lazy=True) # Can't deepcopy open hdf5 file handles with pytest.raises(TypeError): l.deepcopy() del l
def test_hdf5_extension(tmpfilepath): try: hspy_extension = preferences.General.hspy_extension preferences.General.hspy_extension = False s = BaseSignal([0]) s.save(tmpfilepath) assert os.path.exists(tmpfilepath + ".hdf5") finally: preferences.General.hspy_extension = hspy_extension
def test_save_ragged_array(tmp_path): a = np.array([0, 1]) b = np.array([0, 1, 2]) s = BaseSignal(np.array([a, b], dtype=object)).T fname = tmp_path / 'test_save_ragged_array.hspy' s.save(fname) s1 = load(fname) for i in range(len(s.data)): np.testing.assert_allclose(s.data[i], s1.data[i]) assert s.__class__ == s1.__class__
def test_save_ragged_array(tmpfilepath): a = np.array([0, 1]) b = np.array([0, 1, 2]) s = BaseSignal(np.array([a, b])).T filename = os.path.join(tmpfilepath, "test_save_ragged_array.hspy") s.save(filename) s1 = load(filename) for i in range(len(s.data)): np.testing.assert_allclose(s.data[i], s1.data[i]) assert s.__class__ == s1.__class__
def _spikes_diagnosis(self, signal_mask=None, navigation_mask=None): """Plots a histogram to help in choosing the threshold for spikes removal. Parameters ---------- signal_mask: boolean array Restricts the operation to the signal locations not marked as True (masked) navigation_mask: boolean array Restricts the operation to the navigation locations not marked as True (masked). See also -------- spikes_removal_tool """ self._check_signal_dimension_equals_one() dc = self.data if signal_mask is not None: dc = dc[..., ~signal_mask] if navigation_mask is not None: dc = dc[~navigation_mask, :] der = np.abs(np.diff(dc, 1, -1)) n = ((~navigation_mask).sum() if navigation_mask else self.axes_manager.navigation_size) # arbitrary cutoff for number of spectra necessary before histogram # data is compressed by finding maxima of each spectrum tmp = BaseSignal(der) if n < 2000 else BaseSignal( np.ravel(der.max(-1))) # get histogram signal using smart binning and plot tmph = tmp.get_histogram() tmph.plot() # Customize plot appearance plt.gca().set_title('') plt.gca().fill_between(tmph.axes_manager[0].axis, tmph.data, facecolor='#fddbc7', interpolate=True, color='none') ax = tmph._plot.signal_plot.ax axl = tmph._plot.signal_plot.ax_lines[0] axl.set_line_properties(color='#b2182b') plt.xlabel('Derivative magnitude') plt.ylabel('Log(Counts)') ax.set_yscale('log') ax.set_ylim(10 ** -1, plt.ylim()[1]) ax.set_xlim(plt.xlim()[0], 1.1 * plt.xlim()[1]) plt.draw()
def test_add_signal_in_dictionary(self, tree): s = BaseSignal([1.0, 2, 3]) s.axes_manager[0].name = "x" s.axes_manager[0].units = "ly" tree.add_dictionary({"_sig_signal name": s._to_dictionary()}) assert isinstance(tree.signal_name, BaseSignal) np.testing.assert_array_equal(tree.signal_name.data, s.data) assert tree.signal_name.metadata.as_dictionary( ) == s.metadata.as_dictionary() assert (tree.signal_name.axes_manager._get_axes_dicts() == s.axes_manager._get_axes_dicts())
def test_axes_configuration(tmp_path, file): fname = tmp_path / file s = BaseSignal(np.zeros((2, 2, 2, 2, 2))) s.axes_manager.signal_axes[0].navigate = True s.axes_manager.signal_axes[0].navigate = True s.save(fname, overwrite=True) s = load(fname) assert s.axes_manager.navigation_axes[0].index_in_array == 4 assert s.axes_manager.navigation_axes[1].index_in_array == 3 assert s.axes_manager.signal_dimension == 3
def test_add_signal_in_dictionary(self): tree = self.tree s = BaseSignal([1., 2, 3]) s.axes_manager[0].name = 'x' s.axes_manager[0].units = 'ly' tree.add_dictionary({"_sig_signal name": s._to_dictionary()}) assert isinstance(tree.signal_name, BaseSignal) np.testing.assert_array_equal(tree.signal_name.data, s.data) assert (tree.signal_name.metadata.as_dictionary() == s.metadata.as_dictionary()) assert (tree.signal_name.axes_manager._get_axes_dicts() == s.axes_manager._get_axes_dicts())
def test_add_signal_in_dictionary(self): tree = self.tree s = BaseSignal([1., 2, 3]) s.axes_manager[0].name = 'x' s.axes_manager[0].units = 'ly' tree.add_dictionary({"_sig_signal name": s._to_dictionary()}) nt.assert_is_instance(tree.signal_name, BaseSignal) np.testing.assert_array_equal(tree.signal_name.data, s.data) nt.assert_dict_equal(tree.signal_name.metadata.as_dictionary(), s.metadata.as_dictionary()) nt.assert_equal(tree.signal_name.axes_manager._get_axes_dicts(), s.axes_manager._get_axes_dicts())
def _spikes_diagnosis(self, signal_mask=None, navigation_mask=None): """Plots a histogram to help in choosing the threshold for spikes removal. Parameters ---------- signal_mask: boolean array Restricts the operation to the signal locations not marked as True (masked) navigation_mask: boolean array Restricts the operation to the navigation locations not marked as True (masked). See also -------- spikes_removal_tool """ self._check_signal_dimension_equals_one() dc = self.data if signal_mask is not None: dc = dc[..., ~signal_mask] if navigation_mask is not None: dc = dc[~navigation_mask, :] der = np.abs(np.diff(dc, 1, -1)) n = ((~navigation_mask).sum() if navigation_mask else self.axes_manager.navigation_size) # arbitrary cutoff for number of spectra necessary before histogram # data is compressed by finding maxima of each spectrum tmp = BaseSignal(der) if n < 2000 else BaseSignal(np.ravel( der.max(-1))) # get histogram signal using smart binning and plot tmph = tmp.get_histogram() tmph.plot() # Customize plot appearance plt.gca().set_title('') plt.gca().fill_between(tmph.axes_manager[0].axis, tmph.data, facecolor='#fddbc7', interpolate=True, color='none') ax = tmph._plot.signal_plot.ax axl = tmph._plot.signal_plot.ax_lines[0] axl.set_line_properties(color='#b2182b') plt.xlabel('Derivative magnitude') plt.ylabel('Log(Counts)') ax.set_yscale('log') ax.set_ylim(10**-1, plt.ylim()[1]) ax.set_xlim(plt.xlim()[0], 1.1 * plt.xlim()[1]) plt.draw()
def test_save_ragged_dim2(tmp_path, file): x = np.empty(5, dtype=object) for i in range(1, 6): x[i - 1] = np.array([list(range(i)), list(range(i))]) s = BaseSignal(x, ragged=True) filename = tmp_path / file s.save(filename) s2 = load(filename) for i, j in zip(s.data,s2.data): np.testing.assert_array_equal(i,j)
def setUp(self): s = BaseSignal(np.empty((5, 5, 5))) s.save('tmp.hdf5', overwrite=True) self.shape = (10000, 10000, 100) del s f = h5py.File('tmp.hdf5', model='r+') s = f['Experiments/__unnamed__'] del s['data'] s.create_dataset( 'data', shape=self.shape, dtype='float64', chunks=True) f.close()
def __setattr__(self, key, value): if key in ['_double_lines', '_lazy_attributes']: super().__setattr__(key, value) return if key == 'binned': warnings.warn( 'Use of the `binned` attribute in metadata is ' 'going to be deprecated in v2.0. Set the ' '`axis.is_binned` attribute instead. ', VisibleDeprecationWarning) if key.startswith('_sig_'): key = key[5:] from hyperspy.signal import BaseSignal value = BaseSignal(**value) slugified_key = str(slugify(key, valid_variable_name=True)) if isinstance(value, dict): if slugified_key in self.__dict__.keys(): self.__dict__[slugified_key]['_dtb_value_'].add_dictionary( value, double_lines=self._double_lines) return else: value = DictionaryTreeBrowser(value, double_lines=self._double_lines, lazy=False) super().__setattr__(slugified_key, {'key': key, '_dtb_value_': value})
def test_signal_to_dictionary(self, tree): s = BaseSignal([1.0, 2, 3]) s.axes_manager[0].name = "x" s.axes_manager[0].units = "ly" tree.set_item("Some name", s) d = tree.as_dictionary() np.testing.assert_array_equal(d["_sig_Some name"]["data"], s.data) d["_sig_Some name"]["data"] = 0 assert { "Node1": { "leaf11": 11, "Node11": { "leaf111": 111 }, }, "Node2": { "leaf21": 21, "Node21": { "leaf211": 211 }, }, "_sig_Some name": { "attributes": { "_lazy": False, "ragged": False }, "axes": [{ "_type": "UniformDataAxis", "name": "x", "navigate": False, "is_binned": False, "offset": 0.0, "scale": 1.0, "size": 3, "units": "ly", }], "data": 0, "learning_results": {}, "metadata": { "General": { "title": "" }, "Signal": { "signal_type": "" }, "_HyperSpy": { "Folding": { "original_axes_manager": None, "original_shape": None, "unfolded": False, "signal_unfolded": False, } }, }, "original_metadata": {}, "tmp_parameters": {}, }, } == d
def create_model(self, signal_dict, model_letter): _logger.debug('Creating model in worker {}'.format(self.identity)) sig = BaseSignal(**signal_dict) sig._assign_subclass() self.model = sig.models[model_letter].restore() for component in self.model: component.active_is_multidimensional = False component.active = True for par in component.parameters: par.map = par.map.copy() if self.model.signal.metadata.has_item( 'Signal.Noise_properties.variance'): var = self.model.signal.metadata.Signal.Noise_properties.variance if isinstance(var, BaseSignal): var.data = var.data.copy() self._array_views_to_copies()
def create_model(self, signal_dict, model_letter): _logger.debug('Creating model in worker {}'.format(self.identity)) sig = BaseSignal(**signal_dict) sig._assign_subclass() self.model = sig.models[model_letter].restore() self.model.signal.data = self.model.signal.data.copy() for component in self.model: component.active_is_multidimensional = False component.active = True for par in component.parameters: par.map = par.map.copy() var = self.model.signal.metadata.Signal.Noise_properties.variance if isinstance(var, BaseSignal): var.data = var.data.copy() if self.model.low_loss is not None: self.model.low_loss.data = self.model.low_loss.data.copy()
def test_compression_opts(self, tmp_path): self.filename = tmp_path / 'testfile.zspy' from numcodecs import Blosc comp = Blosc(cname='zstd', clevel=1, shuffle=Blosc.SHUFFLE) BaseSignal([1, 2, 3]).save(self.filename, compressor=comp) f = zarr.open(self.filename.__str__(), mode='r+') d = f['Experiments/__unnamed__/data'] assert (d.compressor == comp)\
class TestSignalVarianceFolding: def setUp(self): self.s = BaseSignal(np.zeros((2, 3, 4, 5))) self.s.axes_manager.set_signal_dimension(2) self.s.estimate_poissonian_noise_variance() def test_unfold_navigation(self): s = self.s.deepcopy() s.unfold_navigation_space() meta_am = s.metadata.Signal.Noise_properties.variance.axes_manager nt.assert_equal(meta_am.navigation_shape, (self.s.axes_manager.navigation_size,)) def test_unfold_signal(self): s = self.s.deepcopy() s.unfold_signal_space() meta_am = s.metadata.Signal.Noise_properties.variance.axes_manager nt.assert_equal(meta_am.signal_shape, (self.s.axes_manager.signal_size,))
def test_signal_to_dictionary(self): tree = self.tree s = BaseSignal([1., 2, 3]) s.axes_manager[0].name = 'x' s.axes_manager[0].units = 'ly' tree.set_item('Some name', s) d = tree.as_dictionary() np.testing.assert_array_equal(d['_sig_Some name']['data'], s.data) d['_sig_Some name']['data'] = 0 assert ({ "Node1": { "leaf11": 11, "Node11": { "leaf111": 111 }, }, "Node2": { "leaf21": 21, "Node21": { "leaf211": 211 }, }, "_sig_Some name": { 'attributes': { '_lazy': False }, 'axes': [{ 'name': 'x', 'navigate': False, 'offset': 0.0, 'scale': 1.0, 'size': 3, 'units': 'ly' }], 'data': 0, 'learning_results': {}, 'metadata': { 'General': { 'title': '' }, 'Signal': { 'binned': False, 'signal_type': '' }, '_HyperSpy': { 'Folding': { 'original_axes_manager': None, 'original_shape': None, 'unfolded': False, 'signal_unfolded': False } } }, 'original_metadata': {}, 'tmp_parameters': {} } } == d)
def as_signal(self, field='values'): """Get a parameter map as a signal object. Please note that this method only works when the navigation dimension is greater than 0. Parameters ---------- field : {'values', 'std', 'is_set'} Raises ------ NavigationDimensionError : if the navigation dimension is 0 """ from hyperspy.signal import BaseSignal s = BaseSignal(data=self.map[field], axes=self._axes_manager._get_navigation_axes_dicts()) if self.component is not None and \ self.component.active_is_multidimensional: s.data[np.logical_not(self.component._active_array)] = np.nan s.metadata.General.title = ("%s parameter" % self.name if self.component is None else "%s parameter of %s component" % (self.name, self.component.name)) for axis in s.axes_manager._axes: axis.navigate = False if self._number_of_elements > 1: s.axes_manager._append_axis( size=self._number_of_elements, name=self.name, navigate=True) s._assign_subclass() if field == "values": # Add the variance if available std = self.as_signal(field="std") if not np.isnan(std.data).all(): std.data = std.data ** 2 std.metadata.General.title = "Variance" s.metadata.set_item( "Signal.Noise_properties.variance", std) return s
def test_passing_compression_opts_saving(tmp_path): filename = tmp_path / 'testfile.hdf5' BaseSignal([1, 2, 3]).save(filename, compression_opts=8) f = h5py.File(filename, mode='r+') d = f['Experiments/__unnamed__/data'] assert d.compression_opts == 8 assert d.compression == 'gzip' f.close()
def setup_method(self, method): s = BaseSignal(np.random.random((3, 2, 5))) s.axes_manager.set_signal_dimension(1) s.axes_manager[0].name = "x" s.axes_manager[1].name = "y" s.axes_manager[2].name = "E" s.axes_manager[2].scale = 0.5 s.metadata.General.title = 'test' self.signal = s
def test_general_type_not_working(self): s = self.s s.metadata.set_item('test', (BaseSignal([1]), 0.1, 'test_string')) s.save('tmp.hdf5', overwrite=True) l = load('tmp.hdf5') nt.assert_is_instance(l.metadata.test, tuple) nt.assert_is_instance(l.metadata.test[0], Signal1D) nt.assert_is_instance(l.metadata.test[1], float) nt.assert_is_instance(l.metadata.test[2], str)
def test_general_type_not_working(self, tmpfilepath): s = self.s s.metadata.set_item('test', (BaseSignal([1]), 0.1, 'test_string')) s.save(tmpfilepath) l = load(tmpfilepath + ".hspy") assert isinstance(l.metadata.test, tuple) assert isinstance(l.metadata.test[0], Signal1D) assert isinstance(l.metadata.test[1], float) assert isinstance(l.metadata.test[2], str)
class TestSignalVarianceFolding: def setup_method(self, method): self.s = BaseSignal(np.zeros((2, 3, 4, 5))) self.s = self.s.transpose(signal_axes=2) self.s.estimate_poissonian_noise_variance() def test_unfold_navigation(self): s = self.s.deepcopy() s.unfold_navigation_space() meta_am = s.metadata.Signal.Noise_properties.variance.axes_manager assert (meta_am.navigation_shape == ( self.s.axes_manager.navigation_size, )) def test_unfold_signal(self): s = self.s.deepcopy() s.unfold_signal_space() meta_am = s.metadata.Signal.Noise_properties.variance.axes_manager assert (meta_am.signal_shape == (self.s.axes_manager.signal_size, ))
class TestSignalVarianceFolding: def setUp(self): self.s = BaseSignal(np.zeros((2, 3, 4, 5))) self.s.axes_manager.set_signal_dimension(2) self.s.estimate_poissonian_noise_variance() def test_unfold_navigation(self): s = self.s.deepcopy() s.unfold_navigation_space() meta_am = s.metadata.Signal.Noise_properties.variance.axes_manager nt.assert_equal(meta_am.navigation_shape, (self.s.axes_manager.navigation_size, )) def test_unfold_signal(self): s = self.s.deepcopy() s.unfold_signal_space() meta_am = s.metadata.Signal.Noise_properties.variance.axes_manager nt.assert_equal(meta_am.signal_shape, (self.s.axes_manager.signal_size, ))
def test_general_type_not_working(self, tmp_path, file): s = self.s s.metadata.set_item('test', (BaseSignal([1]), 0.1, 'test_string')) fname = tmp_path / file s.save(fname) l = load(fname) assert isinstance(l.metadata.test, tuple) assert isinstance(l.metadata.test[0], Signal1D) assert isinstance(l.metadata.test[1], float) assert isinstance(l.metadata.test[2], str)
class TestSignalVarianceFolding: def setup_method(self, method): self.s = BaseSignal(np.zeros((2, 3, 4, 5))) self.s = self.s.transpose(signal_axes=2) self.s.estimate_poissonian_noise_variance() def test_unfold_navigation(self): s = self.s.deepcopy() s.unfold_navigation_space() meta_am = s.metadata.Signal.Noise_properties.variance.axes_manager assert ( meta_am.navigation_shape == (self.s.axes_manager.navigation_size,)) def test_unfold_signal(self): s = self.s.deepcopy() s.unfold_signal_space() meta_am = s.metadata.Signal.Noise_properties.variance.axes_manager assert ( meta_am.signal_shape == (self.s.axes_manager.signal_size,))
def reconstruct_object(flags, value): """ Reconstructs the value (if necessary) after having saved it in a dictionary """ if not isinstance(flags, list): flags = parse_flag_string(flags) if 'sig' in flags: if isinstance(value, dict): from hyperspy.signal import BaseSignal value = BaseSignal(**value) value._assign_subclass() return value if 'fn' in flags: ifdill, thing = value if ifdill is None: return thing if ifdill in [True, 'True', b'True']: return dill.loads(thing) # should not be reached raise ValueError("The object format is not recognized") return value
def _parse_sb_size(s, reference, sb_position, sb_size, parallel): # Default value is 1/2 distance between sideband and central band if sb_size is None: if reference is None: sb_size = s.estimate_sideband_size(sb_position, parallel=parallel) else: sb_size = reference.estimate_sideband_size(sb_position, parallel=parallel) else: if not isinstance(sb_size, BaseSignal): if isinstance(sb_size, (np.ndarray, daArray)) and sb_size.size > 1: # transpose if np.array of multiple instances sb_size = BaseSignal(sb_size).T else: sb_size = BaseSignal(sb_size) if isinstance(sb_size.data, daArray): sb_size = sb_size.as_lazy() if sb_size.axes_manager.navigation_size != s.axes_manager.navigation_size: if sb_size.axes_manager.navigation_size: raise ValueError('Sideband size dimensions do not match ' 'neither reference nor hologram dimensions.') # sb_position navdim=0, therefore map function should not iterate: else: sb_size_temp = np.float64(sb_size.data) else: sb_size_temp = sb_size.deepcopy() return sb_size, sb_size_temp
def test_two_peaks(self): if self.s._lazy: pytest.skip( "Lazy Signals don't work properly with 0 dimension data") s = self.s.deepcopy() shifts = BaseSignal([1.0]) s.shift1D(shifts) self.s = self.s.isig[10:] + s width, left, right = self.s.estimate_peak_width(window=None, return_interval=True) assert np.isnan(width.data).all() assert np.isnan(left.data).all() assert np.isnan(right.data).all()
def test_lazy_loading(tmp_path): s = BaseSignal(np.empty((5, 5, 5))) fname = tmp_path / 'tmp.hdf5' s.save(fname, overwrite=True) shape = (10000, 10000, 100) del s f = h5py.File(fname, mode='r+') s = f['Experiments/__unnamed__'] del s['data'] s.create_dataset('data', shape=shape, dtype='float64', chunks=True) f.close() s = load(fname, lazy=True) assert shape == s.data.shape assert isinstance(s.data, da.Array) assert s._lazy s.close_file()
def __setattr__(self, key, value): if key.startswith('_sig_'): key = key[5:] from hyperspy.signal import BaseSignal value = BaseSignal(**value) slugified_key = str(slugify(key, valid_variable_name=True)) if isinstance(value, dict): if self.has_item(slugified_key): self.get_item(slugified_key).add_dictionary( value, double_lines=self._double_lines) return else: value = DictionaryTreeBrowser(value, double_lines=self._double_lines) super(DictionaryTreeBrowser, self).__setattr__(slugified_key, { 'key': key, '_dtb_value_': value })
def setup_method(self, method): s = BaseSignal(np.empty((5, 5, 5))) s.save('tmp.hdf5', overwrite=True) self.shape = (10000, 10000, 100) del s f = h5py.File('tmp.hdf5', mode='r+') s = f['Experiments/__unnamed__'] del s['data'] s.create_dataset('data', shape=self.shape, dtype='float64', chunks=True) f.close()
def setup_method(self, method): self.s = BaseSignal(np.zeros((2, 3, 4, 5))) self.s = self.s.transpose(signal_axes=2) self.s.estimate_poissonian_noise_variance()
class TestSignalFolding: def setUp(self): self.s = BaseSignal(np.zeros((2, 3, 4, 5))) self.s.axes_manager.set_signal_dimension(2) def test_unfold_navigation(self): s = self.s.deepcopy() s.unfold_navigation_space() nt.assert_equal(s.axes_manager.navigation_shape, (self.s.axes_manager.navigation_size,)) def test_unfold_signal(self): s = self.s.deepcopy() s.unfold_signal_space() nt.assert_equal(s.axes_manager.signal_shape, (self.s.axes_manager.signal_size,)) def test_unfolded_repr(self): self.s.unfold() nt.assert_true("unfolded" in repr(self.s)) def test_unfold_navigation_by_keyword(self): s = self.s.deepcopy() s.unfold(unfold_navigation=True, unfold_signal=False) nt.assert_equal(s.axes_manager.navigation_shape, (self.s.axes_manager.navigation_size,)) def test_unfold_signal_by_keyword(self): s = self.s.deepcopy() s.unfold(unfold_navigation=False, unfold_signal=True) nt.assert_equal(s.axes_manager.signal_shape, (self.s.axes_manager.signal_size,)) def test_unfold_nothing_by_keyword(self): s = self.s.deepcopy() s.unfold(unfold_navigation=False, unfold_signal=False) nt.assert_equal(s.data.shape, self.s.data.shape) def test_unfold_full_by_keyword(self): s = self.s.deepcopy() s.unfold(unfold_navigation=True, unfold_signal=True) nt.assert_equal(s.axes_manager.signal_shape, (self.s.axes_manager.signal_size,)) nt.assert_equal(s.axes_manager.navigation_shape, (self.s.axes_manager.navigation_size,)) def test_unfolded_context_manager(self): s = self.s.deepcopy() with s.unfolded(): # Check that both spaces unfold as expected nt.assert_equal(s.axes_manager.navigation_shape, (self.s.axes_manager.navigation_size,)) nt.assert_equal(s.axes_manager.signal_shape, (self.s.axes_manager.signal_size,)) # Check that it folds back as expected nt.assert_equal(s.axes_manager.navigation_shape, self.s.axes_manager.navigation_shape) nt.assert_equal(s.axes_manager.signal_shape, self.s.axes_manager.signal_shape) def test_unfolded_full_by_keywords(self): s = self.s.deepcopy() with s.unfolded(unfold_navigation=True, unfold_signal=True) as folded: nt.assert_true(folded) # Check that both spaces unfold as expected nt.assert_equal(s.axes_manager.navigation_shape, (self.s.axes_manager.navigation_size,)) nt.assert_equal(s.axes_manager.signal_shape, (self.s.axes_manager.signal_size,)) # Check that it folds back as expected nt.assert_equal(s.axes_manager.navigation_shape, self.s.axes_manager.navigation_shape) nt.assert_equal(s.axes_manager.signal_shape, self.s.axes_manager.signal_shape) def test_unfolded_navigation_by_keyword(self): s = self.s.deepcopy() with s.unfolded(unfold_navigation=True, unfold_signal=False) as folded: nt.assert_true(folded) # Check that only navigation space unfolded nt.assert_equal(s.axes_manager.navigation_shape, (self.s.axes_manager.navigation_size,)) nt.assert_equal(s.axes_manager.signal_shape, self.s.axes_manager.signal_shape) # Check that it folds back as expected nt.assert_equal(s.axes_manager.navigation_shape, self.s.axes_manager.navigation_shape) nt.assert_equal(s.axes_manager.signal_shape, self.s.axes_manager.signal_shape) def test_unfolded_signal_by_keyword(self): s = self.s.deepcopy() with s.unfolded(unfold_navigation=False, unfold_signal=True) as folded: nt.assert_true(folded) # Check that only signal space unfolded nt.assert_equal(s.axes_manager.navigation_shape, self.s.axes_manager.navigation_shape) nt.assert_equal(s.axes_manager.signal_shape, (self.s.axes_manager.signal_size,)) # Check that it folds back as expected nt.assert_equal(s.axes_manager.navigation_shape, self.s.axes_manager.navigation_shape) nt.assert_equal(s.axes_manager.signal_shape, self.s.axes_manager.signal_shape) def test_unfolded_nothin_by_keyword(self): s = self.s.deepcopy() with s.unfolded(False, False) as folded: nt.assert_false(folded) # Check that nothing folded nt.assert_equal(s.axes_manager.navigation_shape, self.s.axes_manager.navigation_shape) nt.assert_equal(s.axes_manager.signal_shape, self.s.axes_manager.signal_shape) # Check that it "folds back" as expected nt.assert_equal(s.axes_manager.navigation_shape, self.s.axes_manager.navigation_shape) nt.assert_equal(s.axes_manager.signal_shape, self.s.axes_manager.signal_shape)
def setUp(self): self.s = BaseSignal(np.zeros((2, 3, 4, 5))) self.s.axes_manager.set_signal_dimension(2)
def reconstruct_phase(self, reference=None, sb_size=None, sb_smoothness=None, sb_unit=None, sb='lower', sb_position=None, high_cf=True, output_shape=None, plotting=False, show_progressbar=False, store_parameters=True, parallel=None): """Reconstruct electron holograms. Operates on multidimensional hyperspy signals. There are several usage schemes: 1. Reconstruct 1d or Nd hologram without reference 2. Reconstruct 1d or Nd hologram using single reference hologram 3. Reconstruct Nd hologram using Nd reference hologram (applies each reference to each hologram in Nd stack) The reconstruction parameters (sb_position, sb_size, sb_smoothness) have to be 1d or to have same dimensionality as the hologram. Parameters ---------- reference : ndarray, :class:`~hyperspy.signals.Signal2D, None Vacuum reference hologram. sb_size : float, ndarray, :class:`~hyperspy.signals.BaseSignal, None Sideband radius of the aperture in corresponding unit (see 'sb_unit'). If None, the radius of the aperture is set to 1/3 of the distance between sideband and center band. sb_smoothness : float, ndarray, :class:`~hyperspy.signals.BaseSignal, None Smoothness of the aperture in the same unit as sb_size. sb_unit : str, None Unit of the two sideband parameters 'sb_size' and 'sb_smoothness'. Default: None - Sideband size given in pixels 'nm': Size and smoothness of the aperture are given in 1/nm. 'mrad': Size and smoothness of the aperture are given in mrad. sb : str, None Select which sideband is selected. 'upper' or 'lower'. sb_position : tuple, :class:`~hyperspy.signals.Signal1D, None The sideband position (y, x), referred to the non-shifted FFT. If None, sideband is determined automatically from FFT. high_cf : bool, optional If False, the highest carrier frequency allowed for the sideband location is equal to half of the Nyquist frequency (Default: True). output_shape: tuple, None Choose a new output shape. Default is the shape of the input hologram. The output shape should not be larger than the input shape. plotting : boolean Shows details of the reconstruction (i.e. SB selection). show_progressbar : boolean Shows progressbar while iterating over different slices of the signal (passes the parameter to map method). parallel : bool Run the reconstruction in parallel store_parameters : boolean Store reconstruction parameters in metadata Returns ------- wave : :class:`~hyperspy.signals.WaveImage Reconstructed electron wave. By default object wave is devided by reference wave Examples -------- >>> import hyperspy.api as hs >>> s = hs.datasets.example_signals.object_hologram() >>> sb_position = s.estimate_sideband_position() >>> sb_size = s.estimate_sideband_size(sb_position) >>> sb_size.data >>> wave = s.reconstruct_phase(sb_position=sb_position, sb_size=sb_size) """ # TODO: Use defaults for choosing sideband, smoothness, relative filter # size and output shape if not provided # TODO: Plot FFT with marked SB and SB filter if plotting is enabled # Parsing reference: if not isinstance(reference, HologramImage): if isinstance(reference, Signal2D): if (not reference.axes_manager.navigation_shape == self.axes_manager.navigation_shape and reference.axes_manager.navigation_size): raise ValueError('The navigation dimensions of object and' 'reference holograms do not match') _logger.warning('The reference image signal type is not ' 'HologramImage. It will be converted to ' 'HologramImage automatically.') reference.set_signal_type('hologram') elif reference is not None: reference = HologramImage(reference) if isinstance(reference.data, daArray): reference = reference.as_lazy() # Testing match of navigation axes of reference and self # (exception: reference nav_dim=1): if (reference and not reference.axes_manager.navigation_shape == self.axes_manager.navigation_shape and reference.axes_manager.navigation_size): raise ValueError('The navigation dimensions of object and ' 'reference holograms do not match') if reference and not reference.axes_manager.signal_shape == self.axes_manager.signal_shape: raise ValueError('The signal dimensions of object and reference' ' holograms do not match') # Parsing sideband position: (sb_position, sb_position_temp) = _parse_sb_position(self, reference, sb_position, sb, high_cf, parallel) # Parsing sideband size: (sb_size, sb_size_temp) = _parse_sb_size(self, reference, sb_position, sb_size, parallel) # Standard edge smoothness of sideband aperture 5% of sb_size if sb_smoothness is None: sb_smoothness = sb_size * 0.05 else: if not isinstance(sb_smoothness, BaseSignal): if isinstance( sb_smoothness, (np.ndarray, daArray)) and sb_smoothness.size > 1: sb_smoothness = BaseSignal(sb_smoothness).T else: sb_smoothness = BaseSignal(sb_smoothness) if isinstance(sb_smoothness.data, daArray): sb_smoothness = sb_smoothness.as_lazy() if sb_smoothness.axes_manager.navigation_size != self.axes_manager.navigation_size: if sb_smoothness.axes_manager.navigation_size: raise ValueError('Sideband smoothness dimensions do not match' ' neither reference nor hologram ' 'dimensions.') # sb_position navdim=0, therefore map function should not iterate # it: else: sb_smoothness_temp = np.float64(sb_smoothness.data) else: sb_smoothness_temp = sb_smoothness.deepcopy() # Convert sideband size from 1/nm or mrad to pixels if sb_unit == 'nm': f_sampling = np.divide( 1, [a * b for a, b in zip(self.axes_manager.signal_shape, (self.axes_manager.signal_axes[0].scale, self.axes_manager.signal_axes[1].scale))] ) sb_size_temp = sb_size_temp / np.mean(f_sampling) sb_smoothness_temp = sb_smoothness_temp / np.mean(f_sampling) elif sb_unit == 'mrad': f_sampling = np.divide( 1, [a * b for a, b in zip(self.axes_manager.signal_shape, (self.axes_manager.signal_axes[0].scale, self.axes_manager.signal_axes[1].scale))] ) try: ht = self.metadata.Acquisition_instrument.TEM.beam_energy except BaseException: raise AttributeError("Please define the beam energy." "You can do this e.g. by using the " "set_microscope_parameters method") momentum = 2 * constants.m_e * constants.elementary_charge * ht * \ 1000 * (1 + constants.elementary_charge * ht * 1000 / (2 * constants.m_e * constants.c ** 2)) wavelength = constants.h / np.sqrt(momentum) * 1e9 # in nm sb_size_temp = sb_size_temp / (1000 * wavelength * np.mean(f_sampling)) sb_smoothness_temp = sb_smoothness_temp / (1000 * wavelength * np.mean(f_sampling)) # Find output shape: if output_shape is None: # Future improvement will give a possibility to choose # if sb_size.axes_manager.navigation_size > 0: # output_shape = (np.int(sb_size.inav[0].data*2), np.int(sb_size.inav[0].data*2)) # else: # output_shape = (np.int(sb_size.data*2), np.int(sb_size.data*2)) output_shape = self.axes_manager.signal_shape output_shape = output_shape[::-1] # Logging the reconstruction parameters if appropriate: _logger.info('Sideband position in pixels: {}'.format(sb_position)) _logger.info('Sideband aperture radius in pixels: {}'.format(sb_size)) _logger.info('Sideband aperture smoothness in pixels: {}'.format( sb_smoothness)) # Reconstructing object electron wave: # Checking if reference is a single image, which requires sideband # parameters as a nparray to avoid iteration trough those: wave_object = self.map( reconstruct, holo_sampling=(self.axes_manager.signal_axes[0].scale, self.axes_manager.signal_axes[1].scale), sb_size=sb_size_temp, sb_position=sb_position_temp, sb_smoothness=sb_smoothness_temp, output_shape=output_shape, plotting=plotting, show_progressbar=show_progressbar, inplace=False, parallel=parallel, ragged=False) # Reconstructing reference wave and applying it (division): if reference is None: wave_reference = 1 # case when reference is 1d elif reference.axes_manager.navigation_size != self.axes_manager.navigation_size: # Prepare parameters for reconstruction of the reference wave: if reference.axes_manager.navigation_size == 0 and \ sb_position.axes_manager.navigation_size > 0: # 1d reference, but parameters are multidimensional sb_position_ref = _first_nav_pixel_data(sb_position_temp) else: sb_position_ref = sb_position_temp if reference.axes_manager.navigation_size == 0 and \ sb_size.axes_manager.navigation_size > 0: # 1d reference, but parameters are multidimensional sb_size_ref = _first_nav_pixel_data(sb_size_temp) else: sb_size_ref = sb_size_temp if reference.axes_manager.navigation_size == 0 and \ sb_smoothness.axes_manager.navigation_size > 0: # 1d reference, but parameters are multidimensional sb_smoothness_ref = np.float64( _first_nav_pixel_data(sb_smoothness_temp)) else: sb_smoothness_ref = sb_smoothness_temp # wave_reference = reference.map( reconstruct, holo_sampling=(self.axes_manager.signal_axes[0].scale, self.axes_manager.signal_axes[1].scale), sb_size=sb_size_ref, sb_position=sb_position_ref, sb_smoothness=sb_smoothness_ref, output_shape=output_shape, plotting=plotting, show_progressbar=show_progressbar, inplace=False, parallel=parallel, ragged=False) else: wave_reference = reference.map( reconstruct, holo_sampling=(self.axes_manager.signal_axes[0].scale, self.axes_manager.signal_axes[1].scale), sb_size=sb_size_temp, sb_position=sb_position_temp, sb_smoothness=sb_smoothness_temp, output_shape=output_shape, plotting=plotting, show_progressbar=show_progressbar, inplace=False, parallel=parallel, ragged=False) wave_image = wave_object / wave_reference # New signal is a complex wave_image.set_signal_type('complex_signal2d') wave_image.axes_manager.signal_axes[0].scale = \ self.axes_manager.signal_axes[0].scale * \ self.axes_manager.signal_shape[0] / output_shape[1] wave_image.axes_manager.signal_axes[1].scale = \ self.axes_manager.signal_axes[1].scale * \ self.axes_manager.signal_shape[1] / output_shape[0] # Reconstruction parameters are stored in # holo_reconstruction_parameters: if store_parameters: rec_param_dict = OrderedDict( [('sb_position', sb_position_temp), ('sb_size', sb_size_temp), ('sb_units', sb_unit), ('sb_smoothness', sb_smoothness_temp)]) wave_image.metadata.Signal.add_node('Holography') wave_image.metadata.Signal.Holography.add_node( 'Reconstruction_parameters') wave_image.metadata.Signal.Holography.Reconstruction_parameters.add_dictionary( rec_param_dict) _logger.info('Reconstruction parameters stored in metadata') return wave_image
def setUp(self): self.filename = 'testfile.hdf5' s = BaseSignal(np.zeros((2, 2, 2, 2, 2))) s.axes_manager.signal_axes[0].navigate = True s.axes_manager.signal_axes[0].navigate = True s.save(self.filename)
class TestSignalFolding: def setup_method(self, method): self.s = BaseSignal(np.zeros((2, 3, 4, 5))) self.s = self.s.transpose(signal_axes=2) def test_unfold_navigation(self): s = self.s.deepcopy() s.unfold_navigation_space() assert (s.axes_manager.navigation_shape == (self.s.axes_manager.navigation_size,)) def test_unfold_signal(self): s = self.s.deepcopy() s.unfold_signal_space() assert (s.axes_manager.signal_shape == (self.s.axes_manager.signal_size,)) def test_unfolded_repr(self): self.s.unfold() assert "unfolded" in repr(self.s) def test_unfold_navigation_by_keyword(self): s = self.s.deepcopy() s.unfold(unfold_navigation=True, unfold_signal=False) assert (s.axes_manager.navigation_shape == (self.s.axes_manager.navigation_size,)) def test_unfold_signal_by_keyword(self): s = self.s.deepcopy() s.unfold(unfold_navigation=False, unfold_signal=True) assert (s.axes_manager.signal_shape == (self.s.axes_manager.signal_size,)) def test_unfold_nothing_by_keyword(self): s = self.s.deepcopy() s.unfold(unfold_navigation=False, unfold_signal=False) assert s.data.shape == self.s.data.shape def test_unfold_full_by_keyword(self): s = self.s.deepcopy() s.unfold(unfold_navigation=True, unfold_signal=True) assert (s.axes_manager.signal_shape == (self.s.axes_manager.signal_size,)) assert (s.axes_manager.navigation_shape == (self.s.axes_manager.navigation_size,)) def test_unfolded_context_manager(self): s = self.s.deepcopy() with s.unfolded(): # Check that both spaces unfold as expected assert (s.axes_manager.navigation_shape == (self.s.axes_manager.navigation_size,)) assert (s.axes_manager.signal_shape == (self.s.axes_manager.signal_size,)) # Check that it folds back as expected assert (s.axes_manager.navigation_shape == self.s.axes_manager.navigation_shape) assert (s.axes_manager.signal_shape == self.s.axes_manager.signal_shape) def test_unfolded_full_by_keywords(self): s = self.s.deepcopy() with s.unfolded(unfold_navigation=True, unfold_signal=True) as folded: assert folded # Check that both spaces unfold as expected assert (s.axes_manager.navigation_shape == (self.s.axes_manager.navigation_size,)) assert (s.axes_manager.signal_shape == (self.s.axes_manager.signal_size,)) # Check that it folds back as expected assert (s.axes_manager.navigation_shape == self.s.axes_manager.navigation_shape) assert (s.axes_manager.signal_shape == self.s.axes_manager.signal_shape) def test_unfolded_navigation_by_keyword(self): s = self.s.deepcopy() with s.unfolded(unfold_navigation=True, unfold_signal=False) as folded: assert folded # Check that only navigation space unfolded assert (s.axes_manager.navigation_shape == (self.s.axes_manager.navigation_size,)) assert (s.axes_manager.signal_shape == self.s.axes_manager.signal_shape) # Check that it folds back as expected assert (s.axes_manager.navigation_shape == self.s.axes_manager.navigation_shape) assert (s.axes_manager.signal_shape == self.s.axes_manager.signal_shape) def test_unfolded_signal_by_keyword(self): s = self.s.deepcopy() with s.unfolded(unfold_navigation=False, unfold_signal=True) as folded: assert folded # Check that only signal space unfolded assert (s.axes_manager.navigation_shape == self.s.axes_manager.navigation_shape) assert (s.axes_manager.signal_shape == (self.s.axes_manager.signal_size,)) # Check that it folds back as expected assert (s.axes_manager.navigation_shape == self.s.axes_manager.navigation_shape) assert (s.axes_manager.signal_shape == self.s.axes_manager.signal_shape) def test_unfolded_nothin_by_keyword(self): s = self.s.deepcopy() with s.unfolded(False, False) as folded: assert not folded # Check that nothing folded assert (s.axes_manager.navigation_shape == self.s.axes_manager.navigation_shape) assert (s.axes_manager.signal_shape == self.s.axes_manager.signal_shape) # Check that it "folds back" as expected assert (s.axes_manager.navigation_shape == self.s.axes_manager.navigation_shape) assert (s.axes_manager.signal_shape == self.s.axes_manager.signal_shape)
def setup_method(self, method): self.s = BaseSignal(np.zeros((2, 3, 4, 5))) self.s = self.s.transpose(signal_axes=2)
def estimate_parameters(self, signal, x1, x2, only_current=False, out=False): """Estimate the parameters for the power law component by the two area method. Parameters ---------- signal : Signal1D instance x1 : float Defines the left limit of the spectral range to use for the estimation. x2 : float Defines the right limit of the spectral range to use for the estimation. only_current : bool If False, estimates the parameters for the full dataset. out : bool If True, returns the result arrays directly without storing in the parameter maps/values. The returned order is (A, r). Returns ------- {bool, tuple of values} """ super(PowerLaw, self)._estimate_parameters(signal) axis = signal.axes_manager.signal_axes[0] i1, i2 = axis.value_range_to_indices(x1, x2) if not (i2 + i1) % 2 == 0: i2 -= 1 if i2 == i1: i2 += 2 i3 = (i2 + i1) // 2 x1 = axis.index2value(i1) x2 = axis.index2value(i2) x3 = axis.index2value(i3) if only_current is True: s = signal.get_current_signal() else: s = signal if s._lazy: import dask.array as da log = da.log I1 = s.isig[i1:i3].integrate1D(2j).data I2 = s.isig[i3:i2].integrate1D(2j).data else: from hyperspy.signal import BaseSignal shape = s.data.shape[:-1] I1_s = BaseSignal(np.empty(shape, dtype='float')) I2_s = BaseSignal(np.empty(shape, dtype='float')) # Use the `out` parameters to avoid doing the deepcopy s.isig[i1:i3].integrate1D(2j, out=I1_s) s.isig[i3:i2].integrate1D(2j, out=I2_s) I1 = I1_s.data I2 = I2_s.data log = np.log with np.errstate(divide='raise'): try: r = 2 * log(I1 / I2) / log(x2 / x1) k = 1 - r A = k * I2 / (x2 ** k - x3 ** k) if s._lazy: r = r.map_blocks(np.nan_to_num) A = A.map_blocks(np.nan_to_num) else: r = np.nan_to_num(r) A = np.nan_to_num(A) except (RuntimeWarning, FloatingPointError): _logger.warning('Power law paramaters estimation failed ' 'because of a "divide by zero" error.') return False if only_current is True: self.r.value = r self.A.value = A return True if out: return A, r else: if self.A.map is None: self._create_arrays() self.A.map['values'][:] = A self.A.map['is_set'][:] = True self.r.map['values'][:] = r self.r.map['is_set'][:] = True self.fetch_stored_values() return True
def setUp(self): self.s = BaseSignal(np.zeros((2, 3, 4, 5))) self.s = self.s.transpose(signal_axes=2)
def setUp(self): self.s = BaseSignal(np.zeros((2, 3, 4, 5))) self.s.axes_manager.set_signal_dimension(2) self.s.estimate_poissonian_noise_variance()