def get_settings_from_file( filename: str, ) -> Tuple[DictionaryTreeBrowser, DictionaryTreeBrowser, DictionaryTreeBrowser]: """Return metadata with parameters from NORDIF setting file. Parameters ---------- filename File path of NORDIF setting file. Returns ------- md Metadata complying with HyperSpy's metadata structure. omd Metadata that does not fit into HyperSpy's metadata structure. scan_size Information on image size, scan size and scan steps. """ f = open(filename, "r", encoding="latin-1") # Avoid byte strings content = f.read().splitlines() # Get line numbers of setting blocks blocks = { "[NORDIF]": -1, "[Microscope]": -1, "[EBSD detector]": -1, "[Detector angles]": -1, "[Acquisition settings]": -1, "[Area]": -1, "[Specimen]": -1, } for i, line in enumerate(content): for block in blocks: if block in line: blocks[block] = i l_nordif = blocks["[NORDIF]"] l_mic = blocks["[Microscope]"] l_det = blocks["[EBSD detector]"] l_ang = blocks["[Detector angles]"] l_acq = blocks["[Acquisition settings]"] l_area = blocks["[Area]"] l_specimen = blocks["[Specimen]"] # Create metadata and original metadata structures md = ebsd_metadata() sem_node, ebsd_node = metadata_nodes(["sem", "ebsd"]) omd = DictionaryTreeBrowser() omd.set_item("nordif_header", content) # Get metadata values from settings file using regular expressions azimuth_angle = get_string(content, "Azimuthal\t(.*)\t", l_ang + 4, f) md.set_item(ebsd_node + ".azimuth_angle", float(azimuth_angle)) beam_energy = get_string( content, "Accelerating voltage\t(.*)\tkV", l_mic + 5, f ) md.set_item(sem_node + ".beam_energy", float(beam_energy)) detector = get_string(content, "Model\t(.*)\t", l_det + 1, f) detector = re.sub("[^a-zA-Z0-9]", repl="", string=detector) md.set_item(ebsd_node + ".detector", "NORDIF " + detector) elevation_angle = get_string(content, "Elevation\t(.*)\t", l_ang + 5, f) md.set_item(ebsd_node + ".elevation_angle", float(elevation_angle)) exposure_time = get_string(content, "Exposure time\t(.*)\t", l_acq + 3, f) md.set_item(ebsd_node + ".exposure_time", float(exposure_time) / 1e6) frame_rate = get_string(content, "Frame rate\t(.*)\tfps", l_acq + 1, f) md.set_item(ebsd_node + ".frame_rate", int(frame_rate)) gain = get_string(content, "Gain\t(.*)\t", l_acq + 4, f) md.set_item(ebsd_node + ".gain", float(gain)) magnification = get_string(content, "Magnification\t(.*)\t#", l_mic + 3, f) md.set_item(sem_node + ".magnification", int(magnification)) mic_manufacturer = get_string(content, "Manufacturer\t(.*)\t", l_mic + 1, f) mic_model = get_string(content, "Model\t(.*)\t", l_mic + 2, f) md.set_item(sem_node + ".microscope", mic_manufacturer + " " + mic_model) sample_tilt = get_string(content, "Tilt angle\t(.*)\t", l_mic + 7, f) md.set_item(ebsd_node + ".sample_tilt", float(sample_tilt)) scan_time = get_string(content, "Scan time\t(.*)\t", l_area + 7, f) scan_time = time.strptime(scan_time, "%H:%M:%S") scan_time = datetime.timedelta( hours=scan_time.tm_hour, minutes=scan_time.tm_min, seconds=scan_time.tm_sec, ).total_seconds() md.set_item(ebsd_node + ".scan_time", int(scan_time)) version = get_string(content, "Software version\t(.*)\t", l_nordif + 1, f) md.set_item(ebsd_node + ".version", version) working_distance = get_string( content, "Working distance\t(.*)\tmm", l_mic + 6, f ) md.set_item(sem_node + ".working_distance", float(working_distance)) md.set_item(ebsd_node + ".grid_type", "square") md.set_item(ebsd_node + ".manufacturer", "NORDIF") specimen = get_string(content, "Name\t(.*)\t", l_specimen + 1, f) pmd = _phase_metadata() pmd["material_name"] = specimen md.set_item("Sample.Phases.1", pmd) # Get scan size values scan_size = DictionaryTreeBrowser() num_samp = get_string(content, "Number of samples\t(.*)\t#", l_area + 6, f) ny, nx = [int(i) for i in num_samp.split("x")] scan_size.set_item("nx", int(nx)) scan_size.set_item("ny", int(ny)) pattern_size = get_string(content, "Resolution\t(.*)\tpx", l_acq + 2, f) sx, sy = [int(i) for i in pattern_size.split("x")] scan_size.set_item("sx", int(sx)) scan_size.set_item("sy", int(sy)) step_size = get_string(content, "Step size\t(.*)\t", l_area + 5, f) scan_size.set_item("step_x", float(step_size)) scan_size.set_item("step_y", float(step_size)) return md, omd, scan_size
def file_writer( filename: str, signal, add_scan: Optional[bool] = None, scan_number: int = 1, **kwargs, ): """Write an :class:`~kikuchipy.signals.EBSD` or :class:`~kikuchipy.signals.LazyEBSD` signal to an existing, but not open, or new h5ebsd file. Only writing to kikuchipy's h5ebsd format is supported. Parameters ---------- filename Full path of HDF file. signal : kikuchipy.signals.EBSD or kikuchipy.signals.LazyEBSD Signal instance. add_scan Add signal to an existing, but not open, h5ebsd file. If it does not exist it is created and the signal is written to it. scan_number Scan number in name of HDF dataset when writing to an existing, but not open, h5ebsd file. kwargs Keyword arguments passed to :meth:`h5py:Group.require_dataset`. """ # Set manufacturer and version to use in file from kikuchipy.release import version as ver_signal man_ver_dict = {"manufacturer": "kikuchipy", "version": ver_signal} # Open file in correct mode mode = "w" if os.path.isfile(filename) and add_scan: mode = "r+" try: f = h5py.File(filename, mode=mode) except OSError: raise OSError("Cannot write to an already open file.") if os.path.isfile(filename) and add_scan: check_h5ebsd(f) man_file, ver_file = manufacturer_version(f) if man_ver_dict["manufacturer"].lower() != man_file.lower(): f.close() raise IOError( f"Only writing to kikuchipy's (and not {man_file}'s) h5ebsd " "format is supported." ) man_ver_dict["version"] = ver_file # Get valid scan number scans_file = [f[k] for k in f["/"].keys() if "Scan" in k] scan_nos = [int(i.name.split()[-1]) for i in scans_file] for i in scan_nos: if i == scan_number: q = f"Scan {i} already in file, enter another scan number:\n" scan_number = _get_input_variable(q, int) if scan_number is None: raise IOError("Invalid scan number.") else: # File did not exist dict2h5ebsdgroup(man_ver_dict, f["/"], **kwargs) scan_group = f.create_group("Scan " + str(scan_number)) # Create scan dictionary with EBSD and SEM metadata # Add scan size, image size and detector pixel size to dictionary to write data_shape = [1] * 4 # (ny, nx, sy, sx) data_scales = [1] * 4 # (y, x, dy, dx) nav_extent = [0, 1, 0, 1] # (x0, x1, y0, y1) am = signal.axes_manager nav_axes = am.navigation_axes nav_dim = am.navigation_dimension if nav_dim == 1: nav_axis = nav_axes[0] if nav_axis.name == "y": data_shape[0] = nav_axis.size data_scales[0] = nav_axis.scale nav_extent[2:] = am.navigation_extent else: # nav_axis.name == "x" or something else data_shape[1] = nav_axis.size data_scales[1] = nav_axis.scale nav_extent[:2] = am.navigation_extent elif nav_dim == 2: data_shape[:2] = [i.size for i in nav_axes][::-1] data_scales[:2] = [i.scale for i in nav_axes][::-1] nav_extent = am.navigation_extent data_shape[2:] = am.signal_shape data_scales[2:] = [i.scale for i in am.signal_axes] ny, nx, sy, sx = data_shape scale_ny, scale_nx, scale_sy, _ = data_scales md = signal.metadata.deepcopy() sem_node, ebsd_node = metadata_nodes(["sem", "ebsd"]) md.set_item(ebsd_node + ".pattern_width", sx) md.set_item(ebsd_node + ".pattern_height", sy) md.set_item(ebsd_node + ".n_columns", nx) md.set_item(ebsd_node + ".n_rows", ny) md.set_item(ebsd_node + ".step_x", scale_nx) md.set_item(ebsd_node + ".step_y", scale_ny) md.set_item(ebsd_node + ".detector_pixel_size", scale_sy) # Separate EBSD and SEM metadata det_str, ebsd_str = ebsd_node.split(".")[-2:] # Detector and EBSD nodes md_sem = md.get_item(sem_node).copy().as_dictionary() # SEM node as dict md_det = md_sem.pop(det_str) # Remove/assign detector node from SEM node md_ebsd = md_det.pop(ebsd_str) # Phases if md.get_item("Sample.Phases") is None: md = _update_phase_info(md, _phase_metadata()) # Add default phase md_ebsd["Phases"] = md.Sample.Phases.as_dictionary() for phase in md_ebsd["Phases"].keys(): # Ensure coordinates are arrays atom_coordinates = md_ebsd["Phases"][phase]["atom_coordinates"] for atom in atom_coordinates.keys(): atom_coordinates[atom]["coordinates"] = np.array( atom_coordinates[atom]["coordinates"] ) scan = {"EBSD": {"Header": md_ebsd}, "SEM": {"Header": md_sem}} # Write scan dictionary to HDF groups dict2h5ebsdgroup(scan, scan_group) # Write signal to file man_pats = manufacturer_pattern_names() dset_pattern_name = man_pats["kikuchipy"] overwrite_dataset( scan_group.create_group("EBSD/Data"), signal.data.reshape(nx * ny, sy, sx), dset_pattern_name, signal_axes=(2, 1), **kwargs, ) nx_start, nx_stop, ny_start, ny_stop = nav_extent sample_pos = { "y_sample": np.tile(np.linspace(ny_start, ny_stop, ny), nx), "x_sample": np.tile(np.linspace(nx_start, nx_stop, nx), ny), } dict2h5ebsdgroup(sample_pos, scan_group["EBSD/Data"]) f.close()