Exemplo n.º 1
0
def _write_nexus_groups(dictionary, group, **kwds):
    """Recursively iterate throuh dictionary and write groups to nexus.

    Parameters
    ----------
    dictionary : dict
        dictionary contents to store to hdf group
    group : hdf group
        location to store dictionary
    **kwds : additional keywords
       additional keywords to pass to h5py.create_dataset method

    """
    for key, value in dictionary.items():
        if key == 'attrs':
            # we will handle attrs later...
            continue
        if isinstance(value, dict):
            if "attrs" in value:
                if "NX_class" in value["attrs"] and \
                        value["attrs"]["NX_class"] == "NXdata":
                    continue
            if 'value' in value.keys() \
                and not isinstance(value["value"], dict) \
                    and len(set(list(value.keys()) + ["attrs", "value"])) == 2:
                value = value["value"]
            else:
                _write_nexus_groups(value, group.require_group(key), **kwds)
        if isinstance(value, (list, tuple, np.ndarray, da.Array)):
            data = _parse_to_file(value)
            overwrite_dataset(group, data, key, chunks=None, **kwds)
        elif isinstance(value, (int, float, str, bytes)):
            group.create_dataset(key, data=_parse_to_file(value))
Exemplo n.º 2
0
def _write_signal(signal, nxgroup, signal_name, **kwds):
    """Store the signal data as an NXdata dataset.

    Parameters
    ----------
    signal : Hyperspy signal
    nxgroup : HDF group
        Entry at which to save signal data
    signal_name : str
        Name  under which to store the signal entry in the file

    """
    smd = signal.metadata.Signal
    if signal.axes_manager.signal_dimension == 1:
        smd.record_by = "spectrum"
    elif signal.axes_manager.signal_dimension == 2:
        smd.record_by = "image"
    else:
        smd.record_by = ""

    nxdata = nxgroup.require_group(signal_name)
    nxdata.attrs["NX_class"] = _parse_to_file("NXdata")
    nxdata.attrs["signal"] = _parse_to_file("data")
    if smd.record_by:
        nxdata.attrs["interpretation"] = _parse_to_file(smd.record_by)
    datastr = _parse_to_file("data")
    overwrite_dataset(nxdata, signal.data, datastr, chunks=None, **kwds)
    axis_names = [_parse_to_file(".")] * len(signal.axes_manager.shape)
    for i, axis in enumerate(signal.axes_manager._axes):
        if axis.name != t.Undefined:
            axname = _parse_to_file(axis.name)
            axindex = [axis.index_in_array]
            indices = _parse_to_file(axis.name + "_indices")
            nxdata.attrs[indices] = _parse_to_file(axindex)
            nxdata.require_dataset(axname,
                                   data=axis.axis,
                                   shape=axis.axis.shape,
                                   dtype=axis.axis.dtype)
            axis_names[axis.index_in_array] = axname

    nxdata.attrs["axes"] = _parse_to_file(axis_names)
    return nxdata
Exemplo n.º 3
0
def dict2h5ebsdgroup(dictionary, group, **kwargs):
    """Write a dictionary from metadata to datasets in a new group in an
    opened HDF file in the h5ebsd format.

    Parameters
    ----------
    dictionary : dict
        Metadata, with keys as dataset names.
    group : h5py.Group
        HDF group to write dictionary to.
    **kwargs :
        Keyword arguments passed to h5py.
    """

    for key, val in dictionary.items():
        ddtype = type(val)
        dshape = (1, )
        written = False
        if isinstance(val, (dict, DictionaryTreeBrowser)):
            if isinstance(val, DictionaryTreeBrowser):
                val = val.as_dictionary()
            dict2h5ebsdgroup(val, group.create_group(key), **kwargs)
            written = True
        elif isinstance(val, str):
            ddtype = 'S' + str(len(val) + 1)
            val = val.encode()
        elif isinstance(val, (np.ndarray, da.Array)):
            overwrite_dataset(group, val, key, **kwargs)
            written = True
        elif ddtype == np.dtype('O'):
            ddtype = h5py.special_dtype(vlen=val[0].dtype)
            dshape = np.shape(val)

        if written:
            continue  # Jump to next item in dictionary
        try:
            group.create_dataset(key, shape=dshape, dtype=ddtype, **kwargs)
            group[key][()] = val
        except (TypeError, IndexError):
            warnings.warn("The hdf5 writer could not write the following "
                          "information to the file '{} : {}'.".format(key, val))
Exemplo n.º 4
0
def file_writer(
    filename: str,
    signal,
    add_scan: Optional[bool] = None,
    scan_number: int = 1,
    **kwargs,
):
    """Write an :class:`~kikuchipy.signals.EBSD` or
    :class:`~kikuchipy.signals.LazyEBSD` signal to an existing,
    but not open, or new h5ebsd file.

    Only writing to kikuchipy's h5ebsd format is supported.

    Parameters
    ----------
    filename
        Full path of HDF file.
    signal : kikuchipy.signals.EBSD or kikuchipy.signals.LazyEBSD
        Signal instance.
    add_scan
        Add signal to an existing, but not open, h5ebsd file. If it does
        not exist it is created and the signal is written to it.
    scan_number
        Scan number in name of HDF dataset when writing to an existing,
        but not open, h5ebsd file.
    kwargs
        Keyword arguments passed to :meth:`h5py:Group.require_dataset`.
    """
    # Set manufacturer and version to use in file
    from kikuchipy.release import version as ver_signal

    man_ver_dict = {"manufacturer": "kikuchipy", "version": ver_signal}

    # Open file in correct mode
    mode = "w"
    if os.path.isfile(filename) and add_scan:
        mode = "r+"
    try:
        f = h5py.File(filename, mode=mode)
    except OSError:
        raise OSError("Cannot write to an already open file.")

    if os.path.isfile(filename) and add_scan:
        check_h5ebsd(f)
        man_file, ver_file = manufacturer_version(f)
        if man_ver_dict["manufacturer"].lower() != man_file.lower():
            f.close()
            raise IOError(
                f"Only writing to kikuchipy's (and not {man_file}'s) h5ebsd "
                "format is supported."
            )
        man_ver_dict["version"] = ver_file

        # Get valid scan number
        scans_file = [f[k] for k in f["/"].keys() if "Scan" in k]
        scan_nos = [int(i.name.split()[-1]) for i in scans_file]
        for i in scan_nos:
            if i == scan_number:
                q = f"Scan {i} already in file, enter another scan number:\n"
                scan_number = _get_input_variable(q, int)
                if scan_number is None:
                    raise IOError("Invalid scan number.")
    else:  # File did not exist
        dict2h5ebsdgroup(man_ver_dict, f["/"], **kwargs)

    scan_group = f.create_group("Scan " + str(scan_number))

    # Create scan dictionary with EBSD and SEM metadata
    # Add scan size, image size and detector pixel size to dictionary to write
    data_shape = [1] * 4  # (ny, nx, sy, sx)
    data_scales = [1] * 4  # (y, x, dy, dx)
    nav_extent = [0, 1, 0, 1]  # (x0, x1, y0, y1)
    am = signal.axes_manager
    nav_axes = am.navigation_axes
    nav_dim = am.navigation_dimension
    if nav_dim == 1:
        nav_axis = nav_axes[0]
        if nav_axis.name == "y":
            data_shape[0] = nav_axis.size
            data_scales[0] = nav_axis.scale
            nav_extent[2:] = am.navigation_extent
        else:  # nav_axis.name == "x" or something else
            data_shape[1] = nav_axis.size
            data_scales[1] = nav_axis.scale
            nav_extent[:2] = am.navigation_extent
    elif nav_dim == 2:
        data_shape[:2] = [i.size for i in nav_axes][::-1]
        data_scales[:2] = [i.scale for i in nav_axes][::-1]
        nav_extent = am.navigation_extent
    data_shape[2:] = am.signal_shape
    data_scales[2:] = [i.scale for i in am.signal_axes]
    ny, nx, sy, sx = data_shape
    scale_ny, scale_nx, scale_sy, _ = data_scales
    md = signal.metadata.deepcopy()
    sem_node, ebsd_node = metadata_nodes(["sem", "ebsd"])
    md.set_item(ebsd_node + ".pattern_width", sx)
    md.set_item(ebsd_node + ".pattern_height", sy)
    md.set_item(ebsd_node + ".n_columns", nx)
    md.set_item(ebsd_node + ".n_rows", ny)
    md.set_item(ebsd_node + ".step_x", scale_nx)
    md.set_item(ebsd_node + ".step_y", scale_ny)
    md.set_item(ebsd_node + ".detector_pixel_size", scale_sy)
    # Separate EBSD and SEM metadata
    det_str, ebsd_str = ebsd_node.split(".")[-2:]  # Detector and EBSD nodes
    md_sem = md.get_item(sem_node).copy().as_dictionary()  # SEM node as dict
    md_det = md_sem.pop(det_str)  # Remove/assign detector node from SEM node
    md_ebsd = md_det.pop(ebsd_str)
    # Phases
    if md.get_item("Sample.Phases") is None:
        md = _update_phase_info(md, _phase_metadata())  # Add default phase
    md_ebsd["Phases"] = md.Sample.Phases.as_dictionary()
    for phase in md_ebsd["Phases"].keys():  # Ensure coordinates are arrays
        atom_coordinates = md_ebsd["Phases"][phase]["atom_coordinates"]
        for atom in atom_coordinates.keys():
            atom_coordinates[atom]["coordinates"] = np.array(
                atom_coordinates[atom]["coordinates"]
            )
    scan = {"EBSD": {"Header": md_ebsd}, "SEM": {"Header": md_sem}}

    # Write scan dictionary to HDF groups
    dict2h5ebsdgroup(scan, scan_group)

    # Write signal to file
    man_pats = manufacturer_pattern_names()
    dset_pattern_name = man_pats["kikuchipy"]
    overwrite_dataset(
        scan_group.create_group("EBSD/Data"),
        signal.data.reshape(nx * ny, sy, sx),
        dset_pattern_name,
        signal_axes=(2, 1),
        **kwargs,
    )
    nx_start, nx_stop, ny_start, ny_stop = nav_extent
    sample_pos = {
        "y_sample": np.tile(np.linspace(ny_start, ny_stop, ny), nx),
        "x_sample": np.tile(np.linspace(nx_start, nx_stop, nx), ny),
    }
    dict2h5ebsdgroup(sample_pos, scan_group["EBSD/Data"])

    f.close()
Exemplo n.º 5
0
def file_writer(filename, signal, add_scan=None, scan_number=1,
                **kwargs):
    """Write an EBSD signal to an existing, but not open, or new h5ebsd
    file. Only writing to KikuchiPy's h5ebsd format is supported.

    Parameters
    ----------
    filename : str
        Full path of HDF file.
    signal : kikuchipy.signals.EBSD or kikuchipy.lazy_signals.LazyEBSD
        Signal instance.
    add_scan : bool or None, optional
        Add signal to an existing, but not open, h5ebsd file. If it does
        not exist it is created and the signal is written to it.
    scan_number : int, optional
        Scan number in name of HDF dataset when writing to an existing,
        but not open, h5ebsd file.
    **kwargs :
        Keyword arguments passed to h5py.
    """

    # Set manufacturer and version to use in file
    from kikuchipy.version import __version__ as ver_signal
    man_ver_dict = {'manufacturer': 'KikuchiPy', 'version': ver_signal}

    # Open file in correct mode
    mode = 'w'
    if os.path.isfile(filename) and add_scan:
        mode = 'r+'
    try:
        f = h5py.File(filename, mode=mode)
    except OSError:
        raise OSError("Cannot write to an already open file (e.g. a file from "
                      "which data has been read lazily).")

    if os.path.isfile(filename) and add_scan:
        check_h5ebsd(f)
        man_file, ver_file = manufacturer_version(f)
        if man_ver_dict['manufacturer'].lower() != man_file.lower():
            f.close()
            raise IOError("Only writing to KikuchiPy's (and not {}'s) h5ebsd "
                          "format is supported.".format(man_file))
        man_ver_dict['version'] = ver_file

        # Get valid scan number
        scans_file = [f[k] for k in f['/'].keys() if 'Scan' in k]
        scan_nos = [int(i.name.split()[-1]) for i in scans_file]
        for i in scan_nos:
            if i == scan_number:
                q = "Scan {} already in file, enter another scan "\
                    "number:\n".format(i)
                scan_number = get_input_variable(q, int)
                if scan_number is None:
                    raise IOError("Invalid scan number.")
    else:  # File did not exist
        dict2h5ebsdgroup(man_ver_dict, f['/'], **kwargs)

    scan_group = f.create_group('Scan ' + str(scan_number))

    # Create scan dictionary with EBSD and SEM metadata
    # Add scan size, pattern size and detector pixel size to dictionary to write
    sx, sy = signal.axes_manager.signal_shape
    nx, ny = signal.axes_manager.navigation_shape
    nav_indices = signal.axes_manager.navigation_indices_in_array
    sig_indices = signal.axes_manager.signal_indices_in_array
    md = signal.metadata.deepcopy()
    sem_node, ebsd_node = metadata_nodes()
    md.set_item(ebsd_node + '.pattern_width', sx)
    md.set_item(ebsd_node + '.pattern_height', sy)
    md.set_item(ebsd_node + '.n_columns', nx)
    md.set_item(ebsd_node + '.n_rows', ny)
    md.set_item(ebsd_node + '.step_x',
                signal.axes_manager[nav_indices[0]].scale)
    md.set_item(ebsd_node + '.step_y',
                signal.axes_manager[nav_indices[1]].scale)
    md.set_item(ebsd_node + '.detector_pixel_size',
                signal.axes_manager[sig_indices[1]].scale)
    # Separate EBSD and SEM metadata
    det_str, ebsd_str = ebsd_node.split('.')[-2:]  # Detector and EBSD nodes
    md_sem = md.get_item(sem_node).copy().as_dictionary()  # SEM node as dict
    md_det = md_sem.pop(det_str)  # Remove/assign detector node from SEM node
    md_ebsd = md_det.pop(ebsd_str)
    # Phases
    if md.get_item('Sample.Phases') is None:
        md = _update_phase_info(md, _phase_metadata())  # Add default phase
    md_ebsd['Phases'] = md.Sample.Phases.as_dictionary()
    for phase in md_ebsd['Phases'].keys():  # Ensure coordinates are arrays
        atom_coordinates = md_ebsd['Phases'][phase]['atom_coordinates']
        for atom in atom_coordinates.keys():
            atom_coordinates[atom]['coordinates'] = np.array(
                atom_coordinates[atom]['coordinates'])
    scan = {'EBSD': {'Header': md_ebsd}, 'SEM': {'Header': md_sem}}

    # Write scan dictionary to HDF groups
    dict2h5ebsdgroup(scan, scan_group)

    # Write signal to file
    man_pats = manufacturer_pattern_names()
    dset_pattern_name = man_pats['KikuchiPy']
    overwrite_dataset(scan_group.create_group('EBSD/Data'),
                      signal.data.reshape(nx * ny, sy, sx),
                      dset_pattern_name, signal_axes=(2, 1), **kwargs)
    nx_start, nx_stop, ny_start, ny_stop = signal.axes_manager.navigation_extent
    sample_pos = {'x_sample': np.tile(np.linspace(nx_start, nx_stop, nx), ny),
                  'y_sample': np.tile(np.linspace(ny_start, ny_stop, ny), nx)}
    dict2h5ebsdgroup(sample_pos, scan_group['EBSD/Data'])

    f.close()
    _logger.info("File closed.")