def test_read_unit_from_DM_stack(lazy): fname = os.path.join(MY_PATH, 'tiff_files', 'test_loading_image_saved_with_DM_stack.tif') s = hs.load(fname, lazy=lazy) assert s.data.shape == (2, 68, 68) assert s.axes_manager[0].units == 's' assert s.axes_manager[1].units == 'µm' assert s.axes_manager[2].units == 'µm' assert_allclose(s.axes_manager[0].scale, 2.5, atol=1E-5) assert_allclose(s.axes_manager[1].scale, 0.16867, atol=1E-5) assert_allclose(s.axes_manager[2].scale, 1.68674, atol=1E-5) with tempfile.TemporaryDirectory() as tmpdir: fname2 = os.path.join( tmpdir, 'test_loading_image_saved_with_DM_stack2.tif') s.save(fname2, overwrite=True) s2 = hs.load(fname2) _compare_signal_shape_data(s, s2) assert s2.axes_manager[0].units == s.axes_manager[0].units assert s2.axes_manager[1].units == 'µm' assert s2.axes_manager[2].units == 'µm' assert_allclose( s2.axes_manager[0].scale, s.axes_manager[0].scale, atol=1E-5) assert_allclose( s2.axes_manager[1].scale, s.axes_manager[1].scale, atol=1E-5) assert_allclose( s2.axes_manager[2].scale, s.axes_manager[2].scale, atol=1E-5) assert_allclose( s2.axes_manager[0].offset, s.axes_manager[0].offset, atol=1E-5) assert_allclose( s2.axes_manager[1].offset, s.axes_manager[1].offset, atol=1E-5) assert_allclose( s2.axes_manager[2].offset, s.axes_manager[2].offset, atol=1E-5)
def test_write_read_unit_imagej_with_description(import_local_tifffile=True): fname = os.path.join(my_path, 'tiff_files', 'test_loading_image_saved_with_imageJ.tif') s = hs.load(fname, import_local_tifffile=import_local_tifffile) s.axes_manager[0].units = 'µm' s.axes_manager[1].units = 'µm' nt.assert_almost_equal(s.axes_manager[0].scale, 0.16867, places=5) nt.assert_almost_equal(s.axes_manager[1].scale, 0.16867, places=5) fname2 = fname.replace('.tif', '_description.tif') s.save(fname2, export_scale=False, overwrite=True, description='test') s2 = hs.load(fname2, import_local_tifffile=import_local_tifffile) nt.assert_equal(s2.axes_manager[0].units, t.Undefined) nt.assert_equal(s2.axes_manager[1].units, t.Undefined) nt.assert_almost_equal(s2.axes_manager[0].scale, 1.0, places=5) nt.assert_almost_equal(s2.axes_manager[1].scale, 1.0, places=5) fname3 = fname.replace('.tif', '_description2.tif') s.save(fname3, export_scale=True, overwrite=True, description='test') s3 = hs.load(fname3, import_local_tifffile=import_local_tifffile) nt.assert_equal(s3.axes_manager[0].units, 'µm') nt.assert_equal(s3.axes_manager[1].units, 'µm') nt.assert_almost_equal(s3.axes_manager[0].scale, 0.16867, places=5) nt.assert_almost_equal(s3.axes_manager[1].scale, 0.16867, places=5) if remove_files: os.remove(fname2) os.remove(fname3)
def test_save_load_cycle(save_path): sig_reload = None signal = hs.load(FILE2) serial = signal.original_metadata['blockfile_header']['Acquisition_time'] date, time, timezone = serial_date_to_ISO_format(serial) assert signal.metadata.General.original_filename == 'test2.blo' assert signal.metadata.General.date == date assert signal.metadata.General.time == time assert signal.metadata.General.time_zone == timezone assert ( signal.metadata.General.notes == "Precession angle : \r\nPrecession Frequency : \r\nCamera gamma : on") signal.save(save_path, overwrite=True) sig_reload = hs.load(save_path) np.testing.assert_equal(signal.data, sig_reload.data) assert (signal.axes_manager.as_dictionary() == sig_reload.axes_manager.as_dictionary()) assert (signal.original_metadata.as_dictionary() == sig_reload.original_metadata.as_dictionary()) # change original_filename to make the metadata of both signals equals sig_reload.metadata.General.original_filename = signal.metadata.General.original_filename assert_deep_almost_equal(signal.metadata.as_dictionary(), sig_reload.metadata.as_dictionary()) assert ( signal.metadata.General.date == sig_reload.metadata.General.date) assert ( signal.metadata.General.time == sig_reload.metadata.General.time) assert isinstance(signal, hs.signals.Signal2D) # Delete reference to close memmap file! del sig_reload
def test_save_load_cycle(): sig_reload = None signal = hs.load(file2) nt.assert_equal(signal.metadata.General.original_filename, 'test2.blo') nt.assert_equal(signal.metadata.General.date, "2015-12-01") nt.assert_equal(signal.metadata.General.time, "15:43:09.828057") nt.assert_equal( signal.metadata.General.notes, "Precession angle : \r\nPrecession Frequency : \r\nCamera gamma : on") signal.save(save_path, overwrite=True) sig_reload = hs.load(save_path) np.testing.assert_equal(signal.data, sig_reload.data) nt.assert_equal(signal.axes_manager.as_dictionary(), sig_reload.axes_manager.as_dictionary()) nt.assert_equal(signal.original_metadata.as_dictionary(), sig_reload.original_metadata.as_dictionary()) # change original_filename to make the metadata of both signals equals sig_reload.metadata.General.original_filename = signal.metadata.General.original_filename assert_deep_almost_equal(signal.metadata.as_dictionary(), sig_reload.metadata.as_dictionary()) nt.assert_equal( signal.metadata.General.date, sig_reload.metadata.General.date) nt.assert_equal( signal.metadata.General.time, sig_reload.metadata.General.time) nt.assert_is_instance(signal, hs.signals.Signal2D) # Delete reference to close memmap file! del sig_reload gc.collect() _remove_file(save_path)
def test_read_unit_from_DM_stack(import_local_tifffile=False): fname = os.path.join(MY_PATH, 'tiff_files', 'test_loading_image_saved_with_DM_stack.tif') s = hs.load(fname, import_local_tifffile=import_local_tifffile) nt.assert_equal(s.data.shape, (2, 68, 68)) nt.assert_equal(s.axes_manager[0].units, 's') nt.assert_equal(s.axes_manager[1].units, 'µm') nt.assert_equal(s.axes_manager[2].units, 'µm') nt.assert_almost_equal(s.axes_manager[0].scale, 2.5, places=5) nt.assert_almost_equal(s.axes_manager[1].scale, 0.16867, places=5) nt.assert_almost_equal(s.axes_manager[2].scale, 1.68674, places=5) with tempfile.TemporaryDirectory() as tmpdir: fname2 = os.path.join( tmpdir, 'test_loading_image_saved_with_DM_stack2.tif') s.save(fname2, overwrite=True) s2 = hs.load(fname2) _compare_signal_shape_data(s, s2) nt.assert_equal(s2.axes_manager[0].units, s.axes_manager[0].units) nt.assert_equal(s2.axes_manager[1].units, 'micron') nt.assert_equal(s2.axes_manager[2].units, 'micron') nt.assert_almost_equal( s2.axes_manager[0].scale, s.axes_manager[0].scale, places=5) nt.assert_almost_equal( s2.axes_manager[1].scale, s.axes_manager[1].scale, places=5) nt.assert_almost_equal( s2.axes_manager[2].scale, s.axes_manager[2].scale, places=5) nt.assert_almost_equal( s2.axes_manager[0].offset, s.axes_manager[0].offset, places=5) nt.assert_almost_equal( s2.axes_manager[1].offset, s.axes_manager[1].offset, places=5) nt.assert_almost_equal( s2.axes_manager[2].offset, s.axes_manager[2].offset, places=5)
def test_save_load_cycle_grayscale(dtype, ext): s = hs.signals.Signal2D(np.arange(128*128).reshape(128, 128).astype(dtype)) with tempfile.TemporaryDirectory() as tmpdir: print('Saving-loading cycle for the extension:', ext) filename = os.path.join(tmpdir, 'test_image.'+ext) s.save(filename) hs.load(filename)
def test_convert_tia_single_item(self): self.tia_reader.contrast_streching = True self.tia_reader.overwrite = True data = np.arange(100).reshape((10,10)).astype("float") self.tia_reader._convert_tia_single_item(hs.signals.Signal2D(data)) assert os.path.exists(self.tia_reader.fname_ext) if self.delete_files: os.remove(self.tia_reader.fname_ext) self.tia_reader.contrast_streching = False data = np.arange(100).reshape((10,10)).astype("float") self.tia_reader._convert_tia_single_item(hs.signals.Signal2D(data)) assert os.path.exists(self.tia_reader.fname_ext) a = hs.load(self.tia_reader.fname_ext) a.data = data if self.delete_files: os.remove(self.tia_reader.fname_ext) self.tia_reader.contrast_streching = False self.tia_reader.read(self._get_absolute_path(self.tia_reader.fname)) self.tia_reader.extension_list = ['tif'] self.tia_reader._convert_tia_single_item(self.tia_reader.s) assert os.path.exists(self.tia_reader.fname_ext) fname = self._get_absolute_path(self.tia_reader.fname.replace('.emi', '')) s = hs.load(fname+'.tif') nt.assert_array_equal(s.data, np.load(fname+'.npy')) if self.delete_files: os.remove(self.tia_reader.fname_ext)
def test_fit_EELS_convolved(convolved): dname = os.path.join(my_path, 'data') cl = hs.load(os.path.join(dname, 'Cr_L_cl.hspy')) cl.metadata.Signal.binned = False cl.metadata.General.title = 'Convolved: {}'.format(convolved) ll = hs.load(os.path.join(dname, 'Cr_L_ll.hspy')) if convolved else None m = cl.create_model(auto_background=False, ll=ll, GOS='hydrogenic') m.fit(kind='smart') m.plot(plot_components=True) return m._plot.signal_plot.figure
def test_write_scale_with_um_unit(): """ Lazy test, still need to open the files in ImageJ or DM to check if the scale and unit are correct """ s = hs.load(os.path.join(MY_PATH, 'tiff_files', 'test_dm_image_um_unit.dm3')) with tempfile.TemporaryDirectory() as tmpdir: fname = os.path.join(tmpdir, 'test_export_um_unit.tif') s.save(fname, overwrite=True, export_scale=True) s1 = hs.load(fname) _compare_signal_shape_data(s, s1)
def test_write_read_intensity_axes_DM(): s = hs.load(os.path.join(MY_PATH2, 'test_dm_image_um_unit.dm3')) s.metadata.Signal.set_item('quantity', 'Electrons (Counts)') d = {'gain_factor': 5.0, 'gain_offset': 2.0} s.metadata.Signal.set_item('Noise_properties.Variance_linear_model', d) with tempfile.TemporaryDirectory() as tmpdir: fname = os.path.join(tmpdir, 'tiff_files', 'test_export_um_unit2.tif') s.save(fname, overwrite=True, export_scale=True) s2 = hs.load(fname, import_local_tifffile=True) assert_deep_almost_equal(s.metadata.Signal.as_dictionary(), s2.metadata.Signal.as_dictionary())
def test_different_x_y_scale_units(save_path): # perform load and save cycle with changing the scale on y signal = hs.load(FILE2) signal.axes_manager[0].scale = 50.0 signal.save(save_path, overwrite=True) sig_reload = hs.load(save_path) assert_allclose(sig_reload.axes_manager[0].scale, 50.0, atol=1E-2) assert_allclose(sig_reload.axes_manager[1].scale, 64.0, atol=1E-2) assert_allclose(sig_reload.axes_manager[2].scale, 0.0160616, atol=1E-5)
def test_write_read_unit_imagej(import_local_tifffile=True): fname = os.path.join(my_path, 'tiff_files', 'test_loading_image_saved_with_imageJ.tif') s = hs.load(fname, import_local_tifffile=import_local_tifffile) s.axes_manager[0].units = 'µm' s.axes_manager[1].units = 'µm' fname2 = fname.replace('.tif', '2.tif') s.save(fname2, export_scale=True, overwrite=True) s2 = hs.load(fname2, import_local_tifffile=import_local_tifffile) nt.assert_equal(s2.axes_manager[0].units, 'µm') nt.assert_equal(s2.axes_manager[1].units, 'µm') if remove_files: os.remove(fname2)
def test_write_read_unit_imagej(): fname = os.path.join(MY_PATH, 'tiff_files', 'test_loading_image_saved_with_imageJ.tif') s = hs.load(fname, convert_units=True) s.axes_manager[0].units = 'µm' s.axes_manager[1].units = 'µm' with tempfile.TemporaryDirectory() as tmpdir: fname2 = os.path.join( tmpdir, 'test_loading_image_saved_with_imageJ2.tif') s.save(fname2, export_scale=True, overwrite=True) s2 = hs.load(fname2) assert s2.axes_manager[0].units == 'µm' assert s2.axes_manager[1].units == 'µm' assert s.data.shape == s.data.shape
def test_write_read_unit_imagej(import_local_tifffile=True): fname = os.path.join(MY_PATH, 'tiff_files', 'test_loading_image_saved_with_imageJ.tif') s = hs.load(fname, import_local_tifffile=import_local_tifffile) s.axes_manager[0].units = 'µm' s.axes_manager[1].units = 'µm' with tempfile.TemporaryDirectory() as tmpdir: fname2 = os.path.join( tmpdir, 'test_loading_image_saved_with_imageJ2.tif') s.save(fname2, export_scale=True, overwrite=True) s2 = hs.load(fname2, import_local_tifffile=import_local_tifffile) nt.assert_equal(s2.axes_manager[0].units, 'micron') nt.assert_equal(s2.axes_manager[1].units, 'micron') nt.assert_equal(s.data.shape, s.data.shape)
def test_save_load_cycle_color(color, ext): dim = 4 if "rgba" in color else 3 dtype = 'uint8' if "8" in color else 'uint16' if dim == 4 and ext == 'jpeg': # JPEG does not support alpha channel. return print('color:', color, '; dim:', dim, '; dtype:', dtype) s = hs.signals.Signal1D(np.arange(128*128*dim).reshape(128, 128, dim).astype(dtype)) s.change_dtype(color) with tempfile.TemporaryDirectory() as tmpdir: print('Saving-loading cycle for the extension:', ext) filename = os.path.join(tmpdir, 'test_image.'+ext) s.save(filename) hs.load(filename)
def test_read1(): s = hs.load(file1) np.testing.assert_allclose(s.data, ref_T) nt.assert_almost_equal(s.axes_manager[0].scale, 0.33) nt.assert_almost_equal(s.axes_manager[0].offset, 50077.68) ref_date = datetime(2015, 4, 16, 13, 53) nt.assert_equal(s.metadata.General.time, ref_date)
def test_read_Zeiss_SEM_scale_metadata_512_image(): md = {'Acquisition_instrument': {'SEM': {'Stage': {'rotation': 245.8, 'tilt': 0.0, 'x': 62.9961, 'y': 65.3168, 'z': 44.678}, 'beam_energy': 5.0, 'magnification': '50.00 K X', 'microscope': 'ULTRA 55-36-06', 'working_distance': 3.9}}, 'General': {'authors': 'LIBERATO', 'date': '2018-09-25', 'original_filename': 'test_tiff_Zeiss_SEM_512pix.tif', 'time': '08:20:42', 'title': ''}, 'Signal': {'binned': False, 'signal_type': ''}, '_HyperSpy': {'Folding': {'original_axes_manager': None, 'original_shape': None, 'signal_unfolded': False, 'unfolded': False}}} fname = os.path.join(MY_PATH2, 'test_tiff_Zeiss_SEM_512pix.tif') s = hs.load(fname, convert_units=True) assert s.axes_manager[0].units == 'um' assert s.axes_manager[1].units == 'um' assert_allclose(s.axes_manager[0].scale, 0.011649976, rtol=1E-6) assert_allclose(s.axes_manager[1].scale, 0.011649976, rtol=1E-6) assert s.data.dtype == 'uint8' assert_deep_almost_equal(s.metadata.as_dictionary(), md)
def test_read_unit_um(): # Load DM file and save it as tif s = hs.load(os.path.join(MY_PATH2, 'test_dm_image_um_unit.dm3')) nt.assert_equal(s.axes_manager[0].units, 'µm') nt.assert_equal(s.axes_manager[1].units, 'µm') nt.assert_almost_equal(s.axes_manager[0].scale, 0.16867, places=5) nt.assert_almost_equal(s.axes_manager[1].scale, 0.16867, places=5) with tempfile.TemporaryDirectory() as tmpdir: fname = os.path.join(tmpdir, 'tiff_files', 'test_export_um_unit.tif') s.save(fname, overwrite=True, export_scale=True) # load tif file s2 = hs.load(fname, import_local_tifffile=True) nt.assert_equal(s.axes_manager[0].units, 'µm') nt.assert_equal(s.axes_manager[1].units, 'µm') nt.assert_almost_equal(s2.axes_manager[0].scale, 0.16867, places=5) nt.assert_almost_equal(s2.axes_manager[1].scale, 0.16867, places=5)
def test_read_TVIPS_metadata(): md = {'Acquisition_instrument': {'TEM': {'Detector': {'Camera': {'exposure': 0.4, 'name': 'F416'}}, 'Stage': {'tilt_alpha': -0.0070000002, 'tilt_beta': -0.055, 'x': 0.0, 'y': -9.2000000506686774e-05, 'z': 7.0000001350933871e-06}, 'beam_energy': 99.0, 'magnification': 32000.0}}, 'General': {'original_filename': 'TVIPS_bin4.tif', 'time': '9:01:17', 'title': ''}, 'Signal': {'binned': False, 'signal_type': ''}, '_HyperSpy': {'Folding': {'original_axes_manager': None, 'original_shape': None, 'signal_unfolded': False, 'unfolded': False}}} fname = os.path.join(MY_PATH2, 'TVIPS_bin4.tif') s = hs.load(fname, convert_units=True) assert s.data.dtype == np.uint8 assert s.data.shape == (1024, 1024) assert s.axes_manager[0].units == 'nm' assert s.axes_manager[1].units == 'nm' assert_allclose(s.axes_manager[0].scale, 1.42080, rtol=1E-5) assert_allclose(s.axes_manager[1].scale, 1.42080, rtol=1E-5) assert_deep_almost_equal(s.metadata.as_dictionary(), md)
def _load(self, filename=None, *args, **kargs): """HyperSpy Loader file loader routine. Args: filename (string or bool): File to load. If None then the existing filename is used, if False, then a file dialog will be used. Returns: A copy of the itself after loading the data. """ if filename is None or not filename: self.get_filename("r") else: self.filename = filename # Open the file and read the main file header and unpack into a dict try: signal = hs.load(self.filename) if not isinstance(signal, hs.signals.Signal2D): raise Core.StonerLoadError("Not a 2D signal object - aborting!") except Exception as e: # Pretty generic error catcher print("8" * 120, e, "5" * 120) raise Core.StonerLoadError("Not readable by HyperSpy error was {}".format(e)) self.data = signal.data self._unpack_meta("", signal.metadata.as_dictionary()) self._unpack_axes(signal.axes_manager) return self
def test_write_scale_unit_image_stack(): """ Lazy test, still need to open the files in ImageJ or DM to check if the scale and unit are correct """ s = hs.signals.Signal2D( np.arange( 5 * 10 * 15, dtype=np.uint8).reshape( (5, 10, 15))) s.axes_manager[0].scale = 0.25 s.axes_manager[1].scale = 0.5 s.axes_manager[2].scale = 1.5 s.axes_manager[0].units = 'nm' s.axes_manager[1].units = 'um' s.axes_manager[2].units = 'mm' with tempfile.TemporaryDirectory() as tmpdir: fname = os.path.join(tmpdir, 'test_export_scale_unit_stack2.tif') s.save(fname, overwrite=True, export_scale=True) s1 = hs.load(fname) _compare_signal_shape_data(s, s1) nt.assert_equal(s1.axes_manager[0].units, 'nm') # only one unit can be read nt.assert_equal(s1.axes_manager[1].units, 'mm') nt.assert_equal(s1.axes_manager[2].units, 'mm') nt.assert_almost_equal( s1.axes_manager[0].scale, s.axes_manager[0].scale) nt.assert_almost_equal( s1.axes_manager[1].scale, s.axes_manager[1].scale) nt.assert_almost_equal( s1.axes_manager[2].scale, s.axes_manager[2].scale)
def test_saving_loading_stack_no_scale(): with tempfile.TemporaryDirectory() as tmpdir: fname = os.path.join(tmpdir, 'test_export_scale_unit_stack2.tif') s0 = hs.signals.Signal2D(np.zeros((10, 20, 30))) s0.save(fname, overwrite=True) s1 = hs.load(fname) _compare_signal_shape_data(s0, s1)
def test_read_Zeiss_SEM_scale_metadata_1k_image(): md = {'Acquisition_instrument': {'SEM': {'Stage': {'rotation': 10.2, 'tilt': -0.0, 'x': 75.6442, 'y': 60.4901, 'z': 25.193}, 'beam_current': 80000.0, 'beam_energy': 25.0, 'dwell_time': 5e-08, 'magnification': 105.0, 'microscope': 'Merlin-61-08', 'working_distance': 14.81}}, 'General': {'authors': 'LIM', 'date': '2015-12-23', 'original_filename': 'test_tiff_Zeiss_SEM_1k.tif', 'time': '09:40:32', 'title': ''}, 'Signal': {'binned': False, 'signal_type': ''}, '_HyperSpy': {'Folding': {'original_axes_manager': None, 'original_shape': None, 'signal_unfolded': False, 'unfolded': False}}} fname = os.path.join(MY_PATH2, 'test_tiff_Zeiss_SEM_1k.tif') s = hs.load(fname, convert_units=True) assert s.axes_manager[0].units == 'um' assert s.axes_manager[1].units == 'um' assert_allclose(s.axes_manager[0].scale, 2.614514, rtol=1E-6) assert_allclose(s.axes_manager[1].scale, 2.614514, rtol=1E-6) assert s.data.dtype == 'uint8' assert_deep_almost_equal(s.metadata.as_dictionary(), md)
def _test_read_unit_from_dm(import_local_tifffile=False): fname = os.path.join(my_path2, 'test_loading_image_saved_with_DM.tif') s = hs.load(fname, import_local_tifffile=import_local_tifffile) nt.assert_equal(s.axes_manager[0].units, 'µm') nt.assert_equal(s.axes_manager[1].units, 'µm') nt.assert_almost_equal(s.axes_manager[0].scale, 0.16867, places=5) nt.assert_almost_equal(s.axes_manager[1].scale, 0.16867, places=5)
def test_read_Zeiss_SEM_scale_metadata_512_image(): fname = os.path.join(MY_PATH2, 'test_tiff_Zeiss_SEM_512.tif') s = hs.load(fname) nt.assert_equal(s.axes_manager[0].units, 'm') nt.assert_equal(s.axes_manager[1].units, 'm') nt.assert_almost_equal(s.axes_manager[0].scale, 7.4240e-08, places=12) nt.assert_almost_equal(s.axes_manager[1].scale, 7.4240e-08, places=12) nt.assert_equal(s.data.dtype, 'uint16')
def _test_read_unit_from_imagej(import_local_tifffile=False): fname = os.path.join(MY_PATH, 'tiff_files', 'test_loading_image_saved_with_imageJ.tif') s = hs.load(fname, import_local_tifffile=import_local_tifffile) nt.assert_equal(s.axes_manager[0].units, 'micron') nt.assert_equal(s.axes_manager[1].units, 'micron') nt.assert_almost_equal(s.axes_manager[0].scale, 0.16867, places=5) nt.assert_almost_equal(s.axes_manager[1].scale, 0.16867, places=5)
def test_read_unit_from_imagej_no_scale(import_local_tifffile=False): fname = os.path.join(MY_PATH, 'tiff_files', 'test_loading_image_saved_with_imageJ_no_scale.tif') s = hs.load(fname, import_local_tifffile=import_local_tifffile) nt.assert_equal(s.axes_manager[0].units, t.Undefined) nt.assert_equal(s.axes_manager[1].units, t.Undefined) nt.assert_almost_equal(s.axes_manager[0].scale, 1.0, places=5) nt.assert_almost_equal(s.axes_manager[1].scale, 1.0, places=5)
def test_read_convert_units(): s = hs.load(file1, convert_units=None) np.testing.assert_allclose(s.data, ref_T) assert_allclose(s.axes_manager[0].scale, 0.33) assert_allclose(s.axes_manager[0].offset, 50077.68) assert s.axes_manager[0].units == 's' s = hs.load(file1, convert_units=False) assert_allclose(s.axes_manager[0].scale, 0.33) assert_allclose(s.axes_manager[0].offset, 50077.68) assert s.axes_manager[0].units == 's' s = hs.load(file1, convert_units=True) np.testing.assert_allclose(s.data, ref_T) assert_allclose(s.axes_manager[0].scale, 330.0) assert_allclose(s.axes_manager[0].offset, 50077680.0) assert s.axes_manager[0].units == 'ms'
def test_read_FEI_SEM_scale_metadata_16bits(): fname = os.path.join(MY_PATH2, 'FEI-Helios-Ebeam-16bits.tif') s = hs.load(fname) nt.assert_equal(s.axes_manager[0].units, 'm') nt.assert_equal(s.axes_manager[1].units, 'm') nt.assert_almost_equal(s.axes_manager[0].scale, 3.3724e-06, places=12) nt.assert_almost_equal(s.axes_manager[1].scale, 3.3724e-06, places=12) nt.assert_equal(s.data.dtype, 'uint16')
def test_save_load_cycle(): sig_reload = None signal = hs.load(file2) try: signal.save(save_path, overwrite=True) sig_reload = hs.load(save_path) np.testing.assert_equal(signal.data, sig_reload.data) nt.assert_equal(signal.axes_manager.as_dictionary(), sig_reload.axes_manager.as_dictionary()) nt.assert_equal(signal.original_metadata.as_dictionary(), sig_reload.original_metadata.as_dictionary()) nt.assert_is_instance(signal, hs.signals.Image) finally: # Delete reference to close memmap file! del sig_reload gc.collect() _remove_file(save_path)
def test_read_file2_metadata_keys(): s = hs.load(file2, nxdata_only=True, dataset_keys=["rocks"], metadata_keys=["energy"]) assert s.original_metadata.instrument.energy.value == 12.0
def test_load2(convert_units): s = hs.load(FILE2, convert_units=convert_units) assert s.data.shape == (2, 3, 5, 5) axes = axes2_converted if convert_units else axes2 np.testing.assert_equal(s.axes_manager.as_dictionary(), axes) np.testing.assert_allclose(s.data, ref_data2)
def test_elid(pathname): s = hs.load(pathname) assert len(s) == 11 assert s[0].data.shape == (16, 20) assert s[0].axes_manager.as_dictionary() == { 'axis-0': {'_type': 'UniformDataAxis', 'name': 'y', 'scale': 0.9757792598920122, 'offset': 0.0, 'size': 16, 'units': 'µm', 'navigate': True, 'is_binned': False}, 'axis-1': {'_type': 'UniformDataAxis', 'name': 'x', 'scale': 0.9757792598920122, 'offset': 0.0, 'size': 20, 'units': 'µm', 'navigate': True, 'is_binned': False} } assert s[0].metadata['Acquisition_instrument']['SEM']['Stage']['x'] == -2.586744298575455 assert s[0].metadata['Acquisition_instrument']['SEM']['Stage']['y'] == -0.7322168400784014 assert s[0].metadata['Acquisition_instrument']['SEM']['beam_energy'] == 15.0 assert s[0].metadata['Acquisition_instrument']['SEM']['microscope'] == 'MVE027364-0026-L' assert s[0].metadata['General']['date'] == '2019-08-07' assert s[0].metadata['General']['original_filename'] == os.path.split(pathname)[1] assert s[0].metadata['General']['time'] == '09:37:31' assert s[0].metadata['General']['title'] == 'Image 1' assert s[0].metadata['Signal']['signal_type'] == '' assert s[0].original_metadata['acquisition']['scan']['dwellTime']['value'] == '200' assert s[0].original_metadata['acquisition']['scan']['dwellTime']['unit'] == 'ns' assert s[0].original_metadata['acquisition']['scan']['fieldSize'] == 0.000019515585197840245 assert s[0].original_metadata['acquisition']['scan']['highVoltage']['value'] == '-15' assert s[0].original_metadata['acquisition']['scan']['highVoltage']['unit'] == 'kV' assert s[0].original_metadata['pixelWidth']['value'] == '975.7792598920121' assert s[0].original_metadata['pixelWidth']['unit'] == 'nm' assert s[0].original_metadata['pixelHeight']['value'] == '975.7792598920121' assert s[0].original_metadata['pixelHeight']['unit'] == 'nm' assert s[0].original_metadata['samplePosition']['x'] == '-0.002586744298575455' assert s[0].original_metadata['samplePosition']['y'] == '-0.0007322168400784014' assert s[0].original_metadata['workingDistance']['value'] == '8.141749999999993' assert s[0].original_metadata['workingDistance']['unit'] == 'mm' assert s[0].original_metadata['instrument']['softwareVersion'] == '5.4.5.rc1.bb8fbe3.23039' assert s[0].original_metadata['instrument']['type'] == 'PhenomXL' assert s[0].original_metadata['instrument']['uniqueID'] == 'MVE027364-0026-L' assert s[1].metadata['General']['title'] == 'Image 1, Spot 1' assert s[1].data.shape == (2048,) assert s[1].axes_manager.as_dictionary() == { 'axis-0': {'_type': 'UniformDataAxis', 'name': 'Energy', 'scale': 0.00988676802994421, 'offset': -0.03634370080990722, 'size': 2048, 'units': 'keV', 'navigate': False, 'is_binned': True} } assert s[1].data.tolist()[0:300] == [ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,15,16,19,30,52,61,98,125, 145,129,114,69,45,22,14,11,18,17,30,26,29,19,16,20,26,29,35,51,59,103, 139,157,209,220,179,113,99,65,49,31,36,39,42,35,48,37,55,50,45,46,49, 40,49,54,35,49,57,63,71,64,75,76,92,98,83,81,94,118,120,160,215,325, 363,368,429,403,376,254,204,173,136,124,102,89,97,84,83,75,83,71,85, 101,81,72,87,84,90,93,84,68,93,91,82,86,112,85,84,100,110,118,132,118, 125,138,128,135,143,143,136,148,227,301,538,1077,1946,3319,5108,7249, 9032,10755,11441,10804,9219,7245,5335,3568,2213,1455,825,543,338,283, 196,160,123,104,105,92,88,109,89,88,82,95,88,91,87,108,86,85,59,77,72, 58,66,69,64,76,56,67,58,60,59,71,56,57,62,50,67,59,59,52,45,60,53,57, 59,39,43,55,54,40,43,37,39,41,52,39,53,41,48,40,41,45,36,45,32,40,44, 43,55,50,45,59,45,44,66,52,67,74,83,90,92,114,130,131,114,100,100,106, 103,84,87,77,76,82,83,78,81,63,49,54,64,45,41,40,41,38,50,39,45,44,42, 44,31,36,38,37,55,40,32,34,32,34,37,27,28,45,35,24,40,22,29,33,33,44,34] assert s[1].original_metadata['acquisition']['scan']['detectors']['EDS']['live_time'] == 5.7203385 assert s[1].original_metadata['acquisition']['scan']['detectors']['EDS']['real_time'] == 10.162500000000001 assert s[1].original_metadata['acquisition']['scan']['detectors']['EDS']['fast_peaking_time'] == 100e-9 assert s[1].original_metadata['acquisition']['scan']['detectors']['EDS']['slow_peaking_time'] == 11.2e-6 assert s[2].metadata['General']['title'] == 'Image 1, Region 2' assert s[2].data.shape == (2048,) assert s[2].axes_manager.as_dictionary() == { 'axis-0': {'_type': 'UniformDataAxis', 'name': 'Energy', 'scale': 0.00988676802994421, 'offset': -0.03634370080990722, 'size': 2048, 'units': 'keV', 'navigate': False, 'is_binned': True} } assert s[2].original_metadata['acquisition']['scan']['detectors']['EDS']['live_time'] == 6.5802053 assert s[2].original_metadata['acquisition']['scan']['detectors']['EDS']['real_time'] == 10.177700000000003 assert s[3].metadata['General']['title'] == 'Image 1, Map 3' assert s[3].data.shape == (16, 16, 2048) assert s[3].axes_manager.as_dictionary() == { 'axis-0': {'_type': 'UniformDataAxis', 'name': 'y', 'scale': 1.2197240748650153, 'offset': 0.0, 'size': 16, 'units': 'µm', 'navigate': True, 'is_binned': False}, 'axis-1': {'_type': 'UniformDataAxis', 'name': 'x', 'scale': 1.2197240748650153, 'offset': 0.0, 'size': 16, 'units': 'µm', 'navigate': True, 'is_binned': False}, 'axis-2': {'_type': 'UniformDataAxis', 'name': 'X-ray energy', 'scale': 0.00988676802994421, 'offset': -0.03634370080990722, 'size': 2048, 'units': 'keV', 'navigate': False, 'is_binned': True} } assert s[3].original_metadata['acquisition']['scan']['detectors']['EDS']['live_time'] == 4.047052 assert s[3].original_metadata['acquisition']['scan']['detectors']['EDS']['real_time'] == 3.0005599999999997 assert s[4].metadata['General']['title'] == 'Image 1, Line 4' assert s[4].data.shape == (64, 2048) assert s[4].axes_manager.as_dictionary() == { 'axis-0': {'_type': 'UniformDataAxis', 'name': 'i', 'scale': 1.0, 'offset': 0.0, 'size': 64, 'units': 'points', 'navigate': True, 'is_binned': False}, 'axis-1': {'_type': 'UniformDataAxis', 'name': 'X-ray energy', 'scale': 0.00988676802994421, 'offset': -0.03634370080990722, 'size': 2048, 'units': 'keV', 'navigate': False, 'is_binned': True} } assert s[4].original_metadata['acquisition']['scan']['detectors']['EDS']['live_time'] == 5.504343599999998 assert s[4].original_metadata['acquisition']['scan']['detectors']['EDS']['real_time'] == 6.410299999999996 assert s[5].metadata['General']['title'] == 'Image 1, Map 6' assert s[5].data.shape == (16, 16, 2048) assert s[5].axes_manager.as_dictionary() == { 'axis-0': {'_type': 'UniformDataAxis', 'name': 'y', 'scale': 1.2197240748650153, 'offset': 0.0, 'size': 16, 'units': 'µm', 'navigate': True, 'is_binned': False}, 'axis-1': {'_type': 'UniformDataAxis', 'name': 'x', 'scale': 1.2197240748650153, 'offset': 0.0, 'size': 16, 'units': 'µm', 'navigate': True, 'is_binned': False}, 'axis-2': {'_type': 'UniformDataAxis', 'name': 'X-ray energy', 'scale': 0.009886797201840245, 'offset': -0.04478043655810262, 'size': 2048, 'units': 'keV', 'navigate': False, 'is_binned': True} } assert s[5].original_metadata['acquisition']['scan']['detectors']['EDS']['live_time'] == 4.5919591 assert s[5].original_metadata['acquisition']['scan']['detectors']['EDS']['real_time'] == 3.00056 assert s[6].metadata['General']['title'] == 'Image 1, Difference 3 - 6' assert s[6].data.shape == (2048,) assert s[6].axes_manager.as_dictionary() == { 'axis-0': {'_type': 'UniformDataAxis', 'name': 'Energy', 'scale': 0.00988676802994421, 'offset': -0.03634370080990722, 'size': 2048, 'units': 'keV', 'navigate': False, 'is_binned': True} } assert s[7].metadata['General']['title'] == '385test - spectrum' assert s[7].data.shape == (24, 32) assert s[7].axes_manager.as_dictionary() == { 'axis-0': {'_type': 'UniformDataAxis', 'name': 'y', 'scale': 1.0, 'offset': 0.0, 'size': 24, 'units': 'points', 'navigate': True, 'is_binned': False}, 'axis-1': {'_type': 'UniformDataAxis', 'name': 'x', 'scale': 1.0, 'offset': 0.0, 'size': 32, 'units': 'points', 'navigate': True, 'is_binned': False} } assert not 'acquisition' in s[7].original_metadata assert s[8].metadata['General']['title'] == '385test - spectrum, MSA 1' assert s[8].data.shape == (2048,) assert s[8].axes_manager.as_dictionary() == { 'axis-0': {'_type': 'UniformDataAxis', 'name': 'Energy', 'scale': 0.0098868, 'offset': -0.0363437, 'size': 2048, 'units': 'keV', 'navigate': False, 'is_binned': True} } assert s[8].original_metadata['acquisition']['scan']['detectors']['EDS']['live_time'] == 0.0 assert s[8].original_metadata['acquisition']['scan']['detectors']['EDS']['real_time'] == 5.066 assert s[9].metadata['General']['title'] == 'Image 1' assert s[9].data.shape == (35, 40) assert s[9].axes_manager.as_dictionary() == { 'axis-0': {'_type': 'UniformDataAxis', 'name': 'y', 'scale': 0.8120422280865187, 'offset': 0.0, 'size': 35, 'units': 'µm', 'navigate': True, 'is_binned': False}, 'axis-1': {'_type': 'UniformDataAxis', 'name': 'x', 'scale': 0.8120422280865187, 'offset': 0.0, 'size': 40, 'units': 'µm', 'navigate': True, 'is_binned': False} } assert not 'EDS' in s[9].original_metadata['acquisition']['scan']['detectors'] assert s[10].metadata['General']['title'] == 'Image 1, Map 1' assert s[10].data.shape == (16, 16, 2048) assert s[10].axes_manager.as_dictionary() == { 'axis-0': {'_type': 'UniformDataAxis', 'name': 'y', 'scale': 2.0301055702162967, 'offset': 0.0, 'size': 16, 'units': 'µm', 'navigate': True, 'is_binned': False}, 'axis-1': {'_type': 'UniformDataAxis', 'name': 'x', 'scale': 2.0301055702162967, 'offset': 0.0, 'size': 16, 'units': 'µm', 'navigate': True, 'is_binned': False}, 'axis-2': {'_type': 'UniformDataAxis', 'name': 'X-ray energy', 'scale': 0.009886797201840245, 'offset': -0.04478043655810262, 'size': 2048, 'units': 'keV', 'navigate': False, 'is_binned': True} } assert s[10].original_metadata['acquisition']['scan']['detectors']['EDS']['live_time'] == 4.821238 assert s[10].original_metadata['acquisition']['scan']['detectors']['EDS']['real_time'] == 3.0005600000000006
def test_load_to_memory(): s = hs.load(FILE2, lazy=False) assert isinstance(s.data, np.ndarray) assert not isinstance(s.data, np.memmap)
def test_read_file2_dataset_key_test(): s = hs.load(file2, nxdata_only=True, dataset_keys=["rocks"]) assert not isinstance(s, list)
def load_shift_and_build_area(c_to_o_stem=None, c_to_o_eels=None, o_to_c_stem=None, o_to_c_eels=None, shifts=None, smoothing_parm=0.05, return_unshifted=False, return_uncropped=False, debug=False): """ Load a number of STEM signals and EELS line scans in order to build useful area scans out of them for decomposition and other analysis If no filenames are supplied, four file chooser dialogs will be opened. The files should be chosen in the order of SiC to SiO2 STEM, SiC to SiO2 EELS, SiO2 to SiC STEM, and then SiO2 to SiC EELS. If there are not reversed line scans to analyze (i.e. the scans were acquired just in one direction), then select them in the appropriate window, and press 'Cancel' on the file selection for the ones that are not relevant. Note: all line scans must be same dimensions, or there will be an error. Parameters ----------- c_to_o_stem: list of str If supplied as keyword arguments, this method will not bring up a dialog in order to get the file names, and just use those that are in the lists instead. This can be useful when combined with :py:meth:`get_scans_and_eels_fnames` so the function can be run multiple times without having to click through all the dialogs. c_to_o_eels: list of str See ``c_to_o_stem`` o_to_c_stem: list of str See ``c_to_o_stem`` o_to_c_eels: list of str See ``c_to_o_stem`` shifts: list of float list of shift amounts to use. Allows one to supply custom shifts for each line, which will be applied to both the EELS and STEM scans If None, the method will try to figure it out itself smoothing_parameter: float or 'ask' This is the parameter passed to :py:meth:`determine_shifts` in order to figure out how much to smooth the STEM signals before doing all the derivative work. Lower values are less smoothing, which will be more accurate, but be more susceptible to noise. Typical values are on the order of [0.03, 0.1], depending on the signal. return_unshifted: bool switch whether or not to return the unshifted data (good for comparison) return_uncropped: bool switch whether or not to return the uncropped data (good for comparison) debug: bool switch whether debugging information is printed out to see the shift values and everything Returns ------- res: tuple the results tuple will have the following signals, in the following order: area_stem: :py:class:`~hyperspy.signal.Signal` Hyperspy signal containing shifted and cropped STEM signals as an image, rather than a list of profiles area_eels: :py:class:`~hyperspy.signal.Signal` Hyperspy signal containing the shifted and cropped EELS line scans as an area scan, rather than a list of single line scans file_list: list List of the files that were processed area_stem_nocrop: :py:class:`~hyperspy.signal.Signal` (Optional) Hyperspy signal containing shifted but not cropped STEM signals as an image, rather than a list of profiles area_eels_nocrop: :py:class:`~hyperspy.signal.Signal` (Optional) Hyperspy signal containing the shifted but not cropped EELS line scans as an area scan, rather than a list of single line scans area_stem_unshifted: :py:class:`~hyperspy.signal.Signal` (Optional) Hyperspy signal containing the unshifted STEM signals as an image, rather than a list of profiles area_eels_unshifted: :py:class:`~hyperspy.signal.Signal` (Optional) Hyperspy signal containing the unshifted EELS line scans as an area scan, rather than a list of single line scans """ def _check_list_equal(iterator): # will return whether all items in list are the same or not return len(set(iterator)) <= 1 # if no EELS scans are provided, get the information from dialog: if c_to_o_eels is None and o_to_c_eels is None: # get files from dialog if not supplied: (c_to_o_stem, c_to_o_eels, o_to_c_stem, o_to_c_eels) = get_scans_and_eels_fnames() # Save filenames in a list for reporting file_list = c_to_o_stem + c_to_o_eels + o_to_c_stem + o_to_c_eels # load in the files from the list of files: c_to_o_scans = [hs.load(x) for x in c_to_o_stem] c_to_o_lines = [hs.load(x) for x in c_to_o_eels] o_to_c_scans = [hs.load(x) for x in o_to_c_stem] o_to_c_lines = [hs.load(x) for x in o_to_c_eels] # flip the data in the OtoC scans and lines: for i in o_to_c_scans: i.data = i.data[::-1] for i in o_to_c_lines: i.data = i.data[::-1] # combine lists to make bigger lists: scans = c_to_o_scans + o_to_c_scans lines = c_to_o_lines + o_to_c_lines scan_sizes = [i.axes_manager.shape for i in scans] scan_scales = [i.axes_manager[0].scale for i in scans] line_sizes = [i.axes_manager.shape for i in lines] line_scales = [i.axes_manager[0].scale for i in lines] # Handle some errors related to scan sizes and magnifications if not _check_list_equal(scan_sizes): print("STEM scans were not all same size.") print("") print("SiC to SiO2 files were:") for i in c_to_o_stem: print(i) print("") print("SiO2 to SiC files were:") for i in o_to_c_stem: print(i) print("") print("Sizes were:") pprint(scan_sizes) raise ValueError("All line scans must be same size for stacking.") if not _check_list_equal(scan_scales): print("STEM scans were not all same scale (different mag?).") print("") print("SiC to SiO2 files were:") for i in c_to_o_stem: print(i) print("") print("SiO2 to SiC files were:") for i in o_to_c_stem: print(i) print("") print("Scales were:") pprint(scan_scales) raise ValueError("All line scans must be same scale for stacking.") if not _check_list_equal(line_sizes): print("EELS line scans were not all same size.") print("") print("SiC to SiO2 files were:") for i in c_to_o_eels: print(i) print("") print("SiO2 to SiC files were:") for i in o_to_c_eels: print(i) print("") print("Sizes were:") pprint(line_sizes) raise ValueError("All line scans must be same size for stacking.") if not _check_list_equal(line_scales): print("EELS line scans were not all same scale (different mag?).") print("") print("SiC to SiO2 files were:") for i in c_to_o_stem: print(i) print("") print("SiO2 to SiC files were:") for i in o_to_c_stem: print(i) print("") print("Scales were:") pprint(line_scales) raise ValueError("All line scans must be same scale for stacking.") # smooth scans: if shifts is None: smoothed_scans = smooth_scans(scans, progress_label="Smoothing STEM signals:", smoothing_parm=smoothing_parm) # do actual shifting and cropping: if shifts is None: shifts = determine_shifts(smoothed_scans, do_smoothing=False, debug=debug) if debug: print("Shifts are:") pprint(list(shifts)) # normalize the intensity of the line scans: normalize_lines(lines, progress_label='Normalizing EELS line scans:') # normalize the intensity of the STEM profiles: normalize_lines(scans, progress_label='Normalizing STEM signals:') # shift EELS line scans shifted_lines = shift_lines(lines, shifts, progress_label='Shifting EELS line scans:') # shift HAADF STEM signals shifted_scans = shift_lines(scans, shifts, progress_label='Shifting STEM signals:') # create area spectrum images from the lines area_eels_nocrop = hs.stack(shifted_lines) area_eels_nocrop.axes_manager[1].name = 'line scan' area_eels_nocrop.axes_manager[1].units = '#' area_stem_nocrop = hs.stack(shifted_scans) area_stem_nocrop.axes_manager[0].name = 'STEM profile' area_stem_nocrop.axes_manager[0].units = '#' # Set appropriate titles for the signals area_eels_nocrop.metadata.General.title = 'Stacked EELS line scans - ' \ 'shifted' area_stem_nocrop.metadata.General.title = 'Stacked STEM signals - shifted' # crop the area spectrum images so there is no blank data area_eels = crop_area_scan(area_eels_nocrop, shifts) area_eels.axes_manager[1].name = 'line scan' area_eels.axes_manager[1].units = '#' area_stem = crop_area_scan(area_stem_nocrop, shifts) area_stem.axes_manager[0].name = 'STEM profile' area_stem.axes_manager[0].units = '#' # Set appropriate titles for the signals area_eels.metadata.General.title = 'Stacked EELS line scans - shifted ' \ 'and cropped' area_stem.metadata.General.title = 'Stacked STEM signals - shifted and ' \ 'cropped' # initialize the results list with the cropped and shifted data and the # list of file names that were analyzed res = [area_stem, area_eels, file_list] # if we want to return the uncropped data, add it to the list if return_uncropped: res.append(area_stem_nocrop) res.append(area_eels_nocrop) # if we want to return the unshifted data, add it to the list if return_unshifted: area_stem_unshifted = hs.stack(scans) area_eels_unshifted = hs.stack(lines) # Set appropriate titles for the signals area_eels_unshifted.metadata.General.title = 'Stacked EELS line scans' area_eels_unshifted.axes_manager[1].name = 'line scan' area_eels_unshifted.axes_manager[1].units = '#' area_stem_unshifted.metadata.General.title = 'Stacked STEM signals' area_stem_unshifted.axes_manager[0].name = 'STEM profile' area_stem_unshifted.axes_manager[0].units = '#' res.append(area_stem_unshifted) res.append(area_eels_unshifted) return res
def loadhspy(filename, tilts=None): """ Read an MRC file to a TomoStack object using the Hyperspy reader. Parameters ---------- filename : string Name of file that contains data to be read. Accepted formats (.MRC, .RAW/.RPL pair, .DM3, .DM4) tilts : list or NumPy array List of floats indicating the specimen tilt at each projection Returns ---------- stack : TomoStack object """ stack = hspy.load(filename) if not stack.metadata.has_item("Tomography"): stack.metadata.add_node("Tomography") ext = os.path.splitext(filename)[1] if ext.lower() in ['.mrc', '.ali', '.rec']: tiltfile = os.path.splitext(filename)[0] + '.rawtlt' txtfile = os.path.splitext(filename)[0] + '.txt' if stack.original_metadata.fei_header.has_item('a_tilt'): tilts = stack.original_metadata.\ fei_header['a_tilt'][0:stack.data.shape[0]] stack.axes_manager[0].name = 'Tilt' stack.axes_manager[0].units = 'degrees' stack.axes_manager[0].scale = tilts[1] - tilts[0] stack.axes_manager[0].offset = tilts[0] stack.metadata.Tomography.tilts = tilts logger.info('Tilts found in MRC file header') elif os.path.isfile(tiltfile): tilts = np.loadtxt(tiltfile) logger.info('.rawtlt file detected.') stack.axes_manager[0].name = 'Tilt' stack.axes_manager[0].units = 'degrees' stack.axes_manager[0].scale = tilts[1] - tilts[0] stack.axes_manager[0].offset = tilts[0] stack.metadata.Tomography.tilts = tilts if len(tilts) == stack.data.shape[0]: logger.info('Tilts loaded from .rawtlt file') else: logger.info('Number of tilts in .rawtlt file inconsistent' ' with data shape') else: logger.info('Unable to find tilt angles. Calibrate axis 0.') stack.axes_manager[0].name = 'Tilt' stack.axes_manager[0].units = 'degrees' if stack.original_metadata.fei_header.has_item('pixel_size'): pixel_size = stack.original_metadata.fei_header.pixel_size[0] logger.info('Pixel size found in MRC file header') elif os.path.isfile(txtfile): pixel_line = None with open(txtfile, 'r') as h: text = h.readlines() for i in text: if 'Image pixel size' in i: pixel_line = i if pixel_line: pixel_size = np.float32(pixel_line.split()[-1:])[0] pixel_units = pixel_line.split()[-2:-1][0][1:-2] stack.axes_manager[1].name = 'x' stack.axes_manager[1].units = pixel_units stack.axes_manager[1].scale = pixel_size stack.axes_manager[1].offset = 0 stack.axes_manager[2].name = 'y' stack.axes_manager[2].units = pixel_units stack.axes_manager[2].scale = pixel_size stack.axes_manager[2].offset = 0 logger.info('Pixel size loaded from text file') else: logger.info('Unable to find pixel size in text file') else: logger.info('Unable to find pixel size') stack.axes_manager[1].name = 'x' stack.axes_manager[1].units = 'unknown' stack.axes_manager[2].name = 'y' stack.axes_manager[2].units = 'unknown' elif ext.lower() in ['.hdf5', '.hd5', '.hspy']: pass else: raise ValueError('Cannot read file type: %s' % ext) if stack.data.min() < 0: stack.data = np.float32(stack.data) stack.data += np.abs(stack.data.min()) axes_list = [ x for _, x in sorted(stack.axes_manager.as_dictionary().items()) ] metadata_dict = stack.metadata.as_dictionary() original_metadata_dict = stack.original_metadata.as_dictionary() stack = TomoStack(stack, axes=axes_list, metadata=metadata_dict, original_metadata=original_metadata_dict) return stack
def setUp(self): self.signal = load( my_path + "/test_find_peaks1D_ohaver/test_find_peaks1D_ohaver.hdf5")
def test_read3(): with pytest.raises(AssertionError): hs.load(file3)
def setup_method(self, method): filepath = (Path(__file__).resolve().parent.joinpath( "data/test_find_peaks1D_ohaver.hdf5")) self.signal = load(filepath)
def test_load_inplace(): with pytest.raises(ValueError): hs.load(FILE2, lazy=True, mmap_mode='r+')
import sys import time import hyperspy.api as hs emd_filename_list = sys.argv[1:] emd_filename_list.sort() for emd_filename in emd_filename_list: t0 = time.time() s = hs.load(emd_filename).transpose(signal_axes=(2, 3)) t1 = time.time() result = s.sum() t2 = time.time() delta = t2 - t0 print(f"\n{emd_filename}") print("init", t1 - t0) print(delta) print(f"{s.data.nbytes / delta / 1024 / 1024} MB/s (overall)") print(f"{s.data.nbytes / (t2 - t1) / 1024 / 1024} MB/s (without init)")
def test_read_file2_signal1(): s = hs.load(file2, nxdata_only=True, dataset_keys=["rocks"]) assert s.metadata.General.title == "rocks"
""" Loads hyperspy as a regular python library, loads spectrums from files, does curve fitting, and plotting the model and original spectrum to a png file""" import hyperspy.api as hs import matplotlib.pyplot as plt coreLossSpectrumFileName = "coreloss_spectrum.msa" lowlossSpectrumFileName = "lowloss_spectrum.msa" s = hs.load(coreLossSpectrumFileName).to_EELS() s.add_elements(("Mn", "O")) s.set_microscope_parameters(beam_energy=300, convergence_angle=24.6, collection_angle=13.6) ll = hs.load(lowlossSpectrumFileName).to_EELS() m = s.create_model(ll=ll) m.enable_fine_structure() m.multifit(kind="smart") m.plot() plt.savefig("model_original_spectrum_plot.png")
def setup_method(self, method): self.signal = load( my_path + "/test_find_peaks1D_ohaver/test_find_peaks1D_ohaver.hdf5")
def setup_method(self, method): filename = os.path.join(dirpath, 'protochips_gas_cell.csv') self.s_list = hs.load(filename)
fin_1 = h5py.File("data/89109_16_Fe_mantis_norm.hdf5") data_1 = fin_1["/exchange/data"] signal_1 = hs.signals.Signal2D(data_1) signal_1 = signal_1.transpose(signal_axes=(0,2)) fin_2 = h5py.File("data/mantis_55510_55660.hdf5") data_2 = fin_2["/exchange/data"] signal_2 = hs.signals.Signal2D(data_2) signal_2 = signal_2.transpose(signal_axes=(0,2)) fin_3 = h5py.File("data/mantis_raw_55499_55509.hdf5") data_3 = fin_3["/exchange/data"] signal_3 = hs.signals.Signal2D(data_3) signal_3 = signal_3.transpose(signal_axes=(0,2)) signal_4 = hs.load("data/0005-RotSTEM90 ADF1.dm3") signal_5 = hs.load("data/20_Aligned 20-Stack-5MxHAADF STACK(20).dm3") signal_1_reduced = hs.signals.Signal2D(signal_1.data[:115][::5]) signal_2_reduced = hs.signals.Signal2D(signal_2.data[::5]) # Generate a very simple synthetic signal for demonstration purposes height = 38 width = 46 vfield_1 = np.array([np.ones((height, width)) * -2, np.ones((height, width)) * +1]) vfield_2 = np.array([np.ones((height, width)) * -2.7, np.ones((height, width)) * +1.2]) arr_A = utils.make_capital_A((height, width)) signal_A = hs.signals.Signal2D(np.array([arr_A, ip.apply_displacement_field_sitk(vfield_1, arr_A), ip.apply_displacement_field_sitk(vfield_2, arr_A)]))
def test_load_lazy(): from dask.array import Array s = hs.load(FILE2, lazy=True) assert isinstance(s.data, Array)
def test_load_single_dataset(dataset_path): filename = os.path.join(FILES_PATH, 'Si100_2D_3D_DPC_potential_2slices.emd') s = hs.load(filename, dataset_path=dataset_path) assert isinstance(s, hs.signals.Signal2D)
def process_data(proc_path, proc_bin_path, proc_dict): if 'Overwrite' in proc_dict: Overwrite = bool(proc_dict['Overwrite']) else: Overwrite = False print('Overwrite : ', Overwrite) #load data lazily print('loading : ', proc_path) time0 = time.time() dp = hs.load(proc_path, lazy=True) time1 = time.time() print('lazy loaded full data in :', time1 - time0, ' s') dp_bin = hs.load(proc_bin_path, lazy=True) time2 = time.time() print('lazy loaded binned data in :', time2 - time1, 's') #flag to tell if bf has already been calculated bf_bin_exist = 0 bf_exist = 0 #ADF analysis if 'ADF' in proc_dict: #pass ADF value from config file run_ADF = proc_dict['ADF'] #define file save names ADF_file = proc_bin_path.rpartition('.')[0] + '_ADF' #print('ADF_file : ', ADF_file) if os.path.isfile(ADF_file + '.hspy'): #check overwrite flag and skip processing if set to zero if Overwrite == False: print('ADF data exists, skipping ADF analysis') run_ADF = 0 else: print('ADF data exists, overwriting') #run adf analysis if run_ADF == 1: print('Running ADF analysis') time_ADF0 = time.time() #check if lazy and compute #if dp_bin._lazy: #dp_bin = dp_bin.compute() time_ADF1 = time.time() print('loaded binned data into memory in : ', time_ADF1 - time_ADF0) if bf_bin_exist == 0: #get bf thrershold value bf_bin, bf_bin_exist = define_bf_disk(dp_bin, proc_dict) #get ADF inner angle if 'ADF_expand' in proc_dict: ADF_expand = proc_dict['ADF_expand'] else: ADF_expand = 20 #get ADF image ADF = get_adf(dp_bin, bf_bin, ADF_expand) #save ADF image ADF_file = proc_bin_path.rpartition('.')[0] + '_ADF' hs_ADF = hs.signals.Signal2D(ADF) hs_ADF.save(ADF_file, overwrite=Overwrite) hs_ADF.save(ADF_file, overwrite=Overwrite, extension='png') time_ADF2 = time.time() print('ADF analysis completed and saved in : ', time_ADF2 - time_ADF0, ' s') #CoM analysis if 'CoM' in proc_dict: run_COM = proc_dict['CoM'] #define file save names if 'bin_CoM' in proc_dict: if proc_dict['bin_CoM'] == 1: file_path = proc_bin_path.rpartition('.')[0] elif proc_dict['bin_CoM'] == 0: file_path = proc_path.rpartition('.')[0] CoMx_file = file_path + '_CoMx' CoMy_file = file_path + '_CoMy' #check if file exists if os.path.isfile(CoMx_file + '.hspy'): #check overwrite flag and skip processing if set to zero if Overwrite == False: print('CoM data exists, skipping CoM analysis') run_COM = 0 else: print('CoM data exists, overwriting') #run CoM analysis if run_COM == 1: print('Running CoM analysis') time_CoM1 = time.time() if 'bin_CoM' in proc_dict: if proc_dict['bin_CoM'] == 1: #if dp_bin._lazy: #dp_bin = dp_bin.compute() if bf_bin_exist == 0: #get BF thrershold value bf_bin, bf_bin_exist = define_bf_disk( dp_bin, proc_dict) bf_CoM = bf_bin dp_CoM = py4DSTEM.file.datastructure.DataCube(dp_bin.data) elif proc_dict['bin_CoM'] == 0: if bf_exist == 0: bf, bf_exist = define_bf_disk(dp, proc_dict) bf_CoM = bf dp_CoM = py4DSTEM.file.datastructure.DataCube(dp.data) #get BF outer angle if 'BF_expand' in proc_dict: BF_expand = proc_dict['ADF_expand'] else: BF_expand = 20 #build mask mask = get_mask(dp_CoM, bf_CoM, BF_expand, bf_df='bf') #set normalise CoM parameter if 'Normalize_CoM' in proc_dict: Normalize_CoM = bool(proc_dict['Normalize_CoM']) else: Normalize_CoM = True #get CoM CoMx, CoMy = get_CoM_images(dp_CoM, mask=mask, normalize=Normalize_CoM) #pass to hyperspy and save hs_CoMx = hs.signals.Signal2D(CoMx) hs_CoMx.save(CoMx_file, overwrite=Overwrite) hs_CoMx.save(CoMx_file, overwrite=Overwrite, extension='png') hs_CoMy = hs.signals.Signal2D(CoMy) hs_CoMy.save(CoMy_file, overwrite=Overwrite) hs_CoMy.save(CoMy_file, overwrite=Overwrite, extension='png') time_CoM2 = time.time() print('CoM analysis completed and saved in : ', time_CoM2 - time_CoM1, ' s') if 'DPC' in proc_dict: run_DPC = proc_dict['DPC'] #define file name phase_file = file_path + '_phase' if os.path.isfile(phase_file + '.hspy'): #check overwrite flag and skip processing if set to zero if Overwrite == False: print('DPC data exists, skipping DPC analysis') run_DPC = 0 else: print('DPC data exists, overwriting') if run_DPC == 1: print('Running DPC analysis') time_DPC1 = time.time() #get parameters theta = proc_dict['DPC_theta'] flip = bool(proc_dict['DPC_flip']) pad_factor = int(proc_dict['DPC_pad']) low_pass = proc_dict['DPC_lowpass'] high_pass = proc_dict['DPC_highpass'] step_size = proc_dict['DPC_stepsize'] niter = int(proc_dict['DPC_niter']) #load CoMx and CoMy if not already calculated try: CoMx except NameError: CoM_flag = False else: CoM_flag = True if CoM_flag == False: try: CoMx_file = file_path + '_CoMx.hspy' CoMy_file = file_path + '_CoMy.hspy' CoMx = hs.load(CoMx_file) CoMy = hs.load(CoMy_file) CoMx = CoMx.data CoMy = CoMy.data except: print('CoM files do not exist - run CoM caculation') #calulate phase from CoM phase, error = get_phase_from_CoM(CoMx, CoMy, theta=theta, flip=flip, paddingfactor=pad_factor, regLowPass=low_pass, regHighPass=high_pass, stepsize=step_size, n_iter=niter) #pass to hyperspy object and save phase_file = file_path + '_phase' hs_phase = hs.signals.Signal2D(phase) hs_phase.save(phase_file, overwrite=Overwrite) hs_phase.save(phase_file, overwrite=Overwrite, extension='png') time_DPC2 = time.time() print('DPC analysis completed and saved in : ', time_DPC2 - time_DPC1, ' s') time2 = time.time() print('Processing complete in : ', time2 - time0, ' s')
def test_loading_invalid_protochips_file(): filename = os.path.join(dirpath, 'invalid_protochips_file.csv') with pytest.raises(IOError) as cm: hs.load(filename) cm.match(invalid_file_error)
def test_read_lazy_file(): s = hs.load(file3, nxdata_only=True, lazy=True) assert s[0]._lazy and s[1]._lazy
def _load_and_cast(filepath, x, y, chunk_size): """ Loads a chunk of a larger diffraction pattern""" s = hs.load(filepath, lazy=True) s = s.inav[x:x + chunk_size, y:y + chunk_size] s.compute() return pxm.ElectronDiffraction2D(s)
## Testing ImagePlotter #Im = np.random.random((2000,2000)) #a = ImagePlotter.ImagePlotter(ax, Im) #### Testing SIPlotter #SIdata = np.random.random(size = (20,30,50)) #SIdata[:, :, 10] = 2 #SI = SpectrumImage.EELSSpectrumImage(SIdata, dispersion=0.01) #plotter=SpectrumImagePlotter.SpectrumImagePlotter(SI) #plt.show() ###Testing SIPlotter with real data! folder = '/home/isobel/Documents/McMaster/EELS/2016-07-28/Sq2R_(1,7)/' filebase = 'EELS Spectrum Image (dark ref corrected).dm3' s = hp.load(folder+filebase) eels = SpectrumImage.EELSSpectrumImage(s.data) #PSF = Spectrum.EELSSpectrum.LoadFromCSV('/home/isobel/Documents/McMaster/EELS/2016-07-27/SI3/Processed/Spectrum_ZLP.csv') #print np.shape(PSF.intensity) p1=SpectrumImagePlotter.SpectrumImagePlotter(eels, filepath=folder) #eels2 = eels.RLDeconvolution(2, PSF) #p2 = SpectrumImagePlotter.SpectrumImagePlotter(eels2) p1.ShowPlot() folderCL = '/home/isobel/Documents/McMaster/CL/T9-3_Sq1A_(1,3)/' fileCL = 'T9-3_Sq1A_(1,3)h_Gr800at750_30keV_Ap3Spot4_2s_noQWP_noPol_full2.h5' cl = CLSpectrumData.CLDataSet.LoadFromFile(folderCL + fileCL) plotter = SpectrumImagePlotter.SpectrumImagePlotter(cl.SI)
def load_data(path=None, fls_file='', al_file='', flip=None, flip_fls_file=None, filtersize=3): """Load files in a directory (from a .fls file) using hyperspy. For more information on how to organize the directory and load the data, as well as how to setup the .fls file please refer to the README or the TIE_template.ipynb notebook. Args: path: String. Location of data directory. fls_file: String. Name of the .fls file which contains the image names and defocus values. al_file: String. Name of the aligned stack image file. flip: Bool. Is there a flip stack? If false, it will not assume a uniformly thick film and not reconstruct electrostatic phase shift. Optional Args: flip_fls_file: String. Name of the .fls file for the flip images if they are not named the same as the unflip files. Will only be applied to the /flip/ directory. filtersize: Int. The images are processed with a median filter to remove hot pixels which occur in experimental data. This should be set to 0 for simulated data, though generally one would only use this function for experimental data. Returns: imstack: array of hyperspy signal2D objects (one per image) flipstack: array of hyperspy signal2D objects, only if flip ptie: TIE_params object holding a reference to the imstack and many useful parameters. """ unflip_files = [] flip_files = [] if not fls_file.endswith('.fls'): fls_file += '.fls' if flip_fls_file is None: # one fls file given fls = [] with open(path + fls_file) as file: for line in file: fls.append(line.strip()) num_files = int(fls[0]) if flip: for line in fls[1:num_files + 1]: unflip_files.append(path + 'unflip/' + line) for line in fls[1:num_files + 1]: flip_files.append(path + 'flip/' + line) else: for line in fls[1:num_files + 1]: unflip_files.append(path + 'tfs/' + line) else: # there are 2 fls files given if not flip: print( textwrap.dedent(""" You probably made a mistake. You're defining a flip fls file but saying there is no full tfs for both unflip and flip. If just one tfs use one fls file.\n""")) sys.exit(1) if not flip_fls_file.endswith('.fls'): flip_fls_file += '.fls' fls = [] flip_fls = [] with open(path + fls_file) as file: for line in file: fls.append(line.strip()) with open(path + flip_fls_file) as file: for line in file: flip_fls.append(line.strip()) assert int(fls[0]) == int(flip_fls[0]) num_files = int(fls[0]) for line in fls[1:num_files + 1]: unflip_files.append(path + 'unflip/' + line) for line in flip_fls[1:num_files + 1]: flip_files.append(path + 'flip/' + line) # Actually load the data using hyperspy imstack = hs.load(unflip_files) if flip: flipstack = hs.load(flip_files) else: flipstack = [] # convert scale dimensions to nm for sig in imstack + flipstack: sig.axes_manager.convert_units(units=['nm', 'nm']) if unflip_files[0][-4:] != '.dm3' and unflip_files[0][-4:] != '.dm4': # if not dm3's then they generally don't have the title metadata. for sig in imstack + flipstack: sig.metadata.General.title = sig.metadata.General.original_filename # load the aligned tifs and update the dm3 data to match # The data from the dm3's will be replaced with the aligned image data. try: al_tifs = io.imread(path + al_file) except FileNotFoundError as e: print('Incorrect aligned stack filename given.') raise e if flip: tot_files = 2 * num_files else: tot_files = num_files for i in range(tot_files): # pull slices from correct axis, assumes fewer slices than images are tall if al_tifs.shape[0] < al_tifs.shape[2]: im = al_tifs[i] elif al_tifs.shape[0] > al_tifs.shape[2]: im = al_tifs[:, :, i] else: print("Bad stack\n Or maybe the second axis is slice axis?") print('Loading failed.\n') sys.exit(1) # then median filter to remove "hot pixels" im = median_filter(im, size=filtersize) # and assign to appropriate stack if i < num_files: print('loading unflip:', unflip_files[i]) imstack[i].data = im else: j = i - num_files print('loading flip:', flip_files[j]) flipstack[j].data = im # read the defocus values defvals = fls[-(num_files // 2):] assert num_files == 2 * len(defvals) + 1 defvals = [float(i) for i in defvals] # defocus values +/- # Create a TIE_params object ptie = TIE_params(imstack, flipstack, defvals, flip, path) print('Data loaded successfully.') return (imstack, flipstack, ptie)
def test_load1(): s = hs.load(FILE1) assert s.data.shape == (3, 2, 144, 144) assert s.axes_manager.as_dictionary() == axes1
def load_data(path=None, fls_file='', al_file='', flip=None, flip_fls_file=None, filtersize=3): """Load files in a directory (from a .fls file) using hyperspy. For more information on how to organize the directory and load the data, as well as how to setup the .fls file please refer to the README or the TIE_template.ipynb notebook. Args: path (str): Location of data directory. fls_file (str): Name of the .fls file which contains the image names and defocus values. al_file (str): Name of the aligned stack image file. flip (Bool): True if using a flip stack, False otherwise. Uniformly thick films can be reconstructed without a flip stack. The electrostatic phase shift will not be reconstructed. flip_fls_file (str): Name of the .fls file for the flip images if they are not named the same as the unflip files. Will only be applied to the /flip/ directory. filtersize (int): (`optional`) The images are processed with a median filter to remove hot pixels which occur in experimental data. This should be set to 0 for simulated data, though generally one would only use this function for experimental data. Returns: list: List of length 3, containing the following items: - imstack: array of hyperspy signal2D objects (one per image) - flipstack: array of hyperspy signal2D objects, empty array if flip == False - ptie: TIE_params object holding a reference to the imstack and many other parameters. """ unflip_files = [] flip_files = [] # Finding the unflip fls file path = os.path.abspath(path) if not fls_file.endswith('.fls'): fls_file += '.fls' if os.path.isfile(os.path.join(path, fls_file)): fls_full = os.path.join(path, fls_file) elif os.path.isfile(os.path.join(path, 'unflip', fls_file)): fls_full = os.path.join(path, 'unflip', fls_file) elif os.path.isfile(os.path.join(path, 'tfs', fls_file)) and not flip: fls_full = os.path.join(path, 'tfs', fls_file) else: print("fls file could not be found.") sys.exit(1) if flip_fls_file is None: # one fls file given fls = [] with open(fls_full) as file: for line in file: fls.append(line.strip()) num_files = int(fls[0]) if flip: for line in fls[1:num_files + 1]: unflip_files.append(os.path.join(path, 'unflip', line)) for line in fls[1:num_files + 1]: flip_files.append(os.path.join(path, 'flip', line)) else: if os.path.isfile(os.path.join(path, 'tfs', fls[2])): tfs_dir = 'tfs' else: tfs_dir = 'unflip' for line in fls[1:num_files + 1]: unflip_files.append(os.path.join(path, tfs_dir, line)) else: # there are 2 fls files given if not flip: print( textwrap.dedent(""" You probably made a mistake. You're defining both unflip and flip fls files but have flip=False. Proceeding anyways, will only load unflip stack (if it doesnt break).\n""" )) # find the flip fls file if not flip_fls_file.endswith('.fls'): flip_fls_file += '.fls' if os.path.isfile(os.path.join(path, flip_fls_file)): flip_fls_full = os.path.join(path, flip_fls_file) elif os.path.isfile(os.path.join(path, 'flip', flip_fls_file)): flip_fls_full = os.path.join(path, 'flip', flip_fls_file) fls = [] flip_fls = [] with open(fls_full) as file: for line in file: fls.append(line.strip()) with open(flip_fls_full) as file: for line in file: flip_fls.append(line.strip()) assert int(fls[0]) == int(flip_fls[0]) num_files = int(fls[0]) for line in fls[1:num_files + 1]: unflip_files.append(os.path.join(path, "unflip", line)) for line in flip_fls[1:num_files + 1]: flip_files.append(os.path.join(path, "flip", line)) # Actually load the data using hyperspy imstack = hs.load(unflip_files) if flip: flipstack = hs.load(flip_files) else: flipstack = [] # convert scale dimensions to nm for sig in imstack + flipstack: sig.axes_manager.convert_units(units=['nm', 'nm']) if unflip_files[0][-4:] != '.dm3' and unflip_files[0][-4:] != '.dm4': # if not dm3's then they generally don't have the title metadata. for sig in imstack + flipstack: sig.metadata.General.title = sig.metadata.General.original_filename # load the aligned tifs and update the dm3 data to match # The data from the dm3's will be replaced with the aligned image data. try: al_tifs = io.imread(os.path.join(path, al_file)) except FileNotFoundError as e: print('Incorrect aligned stack filename given.') raise e if flip: tot_files = 2 * num_files else: tot_files = num_files for i in range(tot_files): # pull slices from correct axis, assumes fewer slices than images are tall if al_tifs.shape[0] < al_tifs.shape[2]: im = al_tifs[i] elif al_tifs.shape[0] > al_tifs.shape[2]: im = al_tifs[:, :, i] else: print("Bad stack\n Or maybe the second axis is slice axis?") print('Loading failed.\n') sys.exit(1) # then median filter to remove "hot pixels" im = median_filter(im, size=filtersize) # and assign to appropriate stack if i < num_files: print('loading unflip:', unflip_files[i]) imstack[i].data = im else: j = i - num_files print('loading flip:', flip_files[j]) flipstack[j].data = im # read the defocus values defvals = fls[-(num_files // 2):] assert num_files == 2 * len(defvals) + 1 defvals = [float(i) for i in defvals] # defocus values +/- # Create a TIE_params object ptie = TIE_params(imstack, flipstack, defvals, flip, path) print('Data loaded successfully.') return (imstack, flipstack, ptie)
import numpy as np # hyperspy module import hyperspy.api as hs from hyperspy.drawing import widgets # change working directory path wd = os.getcwd() os.chdir(wd) ## To generating calibration factor table as csv file calibration = [] ## read all dm3 fils in the folder for filepath in sorted(glob.glob('./*.dm3')): img = hs.load(filepath) # get information from DM3 metadatas min = img.original_metadata.DocumentObjectList.TagGroup0.ImageDisplayInfo.LowLimit max = img.original_metadata.DocumentObjectList.TagGroup0.ImageDisplayInfo.HighLimit res = float( img.original_metadata.ImageList.TagGroup0.ImageData.Dimensions.Data0) cal = img.original_metadata.ImageList.TagGroup0.ImageData.Calibrations.Dimension.TagGroup0.Scale unit = img.original_metadata.ImageList.TagGroup0.ImageData.Calibrations.Dimension.TagGroup0.Units ### make a table for calibration factor item = [filepath, cal] calibration.append(item) np.savetxt("./calibration_dm3.csv", calibration, delimiter=",", fmt='%s') ## matplotlib export setting for savefig function
def test_write_data_line(save_path): signal = hs.signals.Signal2D( (255 * np.random.rand(3, 5, 5)).astype(np.uint8)) signal.save(save_path, overwrite=True) sig_reload = hs.load(save_path) np.testing.assert_equal(signal.data, sig_reload.data)
import os import numpy as np import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec from mpl_toolkits.axes_grid1.anchored_artists import AnchoredSizeBar import matplotlib.font_manager as fm import matplotlib.patheffects as patheffects import hyperspy.api as hs my_path = os.path.join(os.path.dirname(__file__), 'make_nice_figures') if not os.path.exists(my_path): os.makedirs(my_path) # Load the atomic resolution image s_adf = hs.load(os.path.join(my_path, 'ADF_image.hdf5')) # Load the structural data atoms_A = np.load(os.path.join(my_path, 'sublattice_A.npz')) atoms_B = np.load(os.path.join(my_path, 'sublattice_B.npz')) dd_map = hs.load(os.path.join(my_path, 'distance_difference_map.hdf5')) dd_line = hs.load(os.path.join(my_path, 'dd_line.hdf5')) # Scaling the data scale = 0.142 s_adf.axes_manager[0].scale = scale s_adf.axes_manager[1].scale = scale # dd_map has twice the amount of pixels, so the scale is half dd_map.axes_manager[0].scale = scale / 2 dd_map.axes_manager[1].scale = scale / 2 # Crop images