コード例 #1
0
def read_with_hyperspy(filename):
    """
    Read a non-py4DSTEM file using hyperspy.
    """
    # Get metadata
    metadata = Metadata(init='hs', filepath=filename)

    # Get data
    hyperspy_file = hs.load(filename)
    data = hyperspy_file.data

    # Get datacube
    datacube = DataCube(data=data)

    # Link metadata and data
    datacube.metadata = metadata

    # Set scan shape, if in metadata
    try:
        R_Nx = int(metadata.get_metadata_item('scan_size_Nx'))
        R_Ny = int(metadata.get_metadata_item('scan_size_Ny'))
        datacube.set_scan_shape(R_Nx, R_Ny)
    except ValueError:
        print(
            "Warning: scan shape not detected in metadata; please check / set manually."
        )

    return datacube
コード例 #2
0
    def add_file(self, fname, generate_preview=True):
        """
        Add a file to this activity's file list, parse its metadata (storing
        a flattened copy of it to this activity), generate a preview
        thumbnail, get the file's type, and a lazy HyperSpy signal

        Parameters
        ----------
        fname : str
            The file to be added to the file list
        generate_preview : bool
            Whether or not to create the preview thumbnail images
        """
        if _os.path.exists(fname):
            self.files.append(fname)
            gen_prev = generate_preview
            meta, preview_fname = _parse_metadata(fname,
                                                  generate_preview=gen_prev)

            if meta is None:
                # Something bad happened, so we need to alert the user
                _logger.warning(f'Could not parse metadata of {fname}')
                pass
            else:
                s = _hs.load(fname, lazy=True)
                self.previews.append(preview_fname)
                self.sigs.append(s)
                self.meta.append(_flatten_dict(meta['nx_meta']))
                self.warnings.append(
                    [' '.join(w) for w in meta['nx_meta']['warnings']])
        else:
            raise FileNotFoundError(fname + ' was not found')
        _logger.debug(f'appended {fname} to files')
        _logger.debug(f'self.files is now {self.files}')
コード例 #3
0
    def setup_metadata_hs(self, filepath):

        # Get hyperspy metadata trees
        hyperspy_file = hs.load(filepath, lazy=True)
        original_metadata_shortlist = hyperspy_file.metadata
        original_metadata_all = hyperspy_file.original_metadata

        # Store original metadata
        self.original_metadata.shortlist = original_metadata_shortlist
        self.original_metadata.all = original_metadata_all

        # Search hyperspy metadata trees and use to populate metadata groups
        self.get_metadata_from_hs_tree(
            original_metadata_all,
            self._search_dicts.original_to_microscope_search_dict,
            self.microscope)
        self.get_metadata_from_hs_tree(
            original_metadata_all,
            self._search_dicts.original_to_sample_search_dict, self.sample)
        self.get_metadata_from_hs_tree(
            original_metadata_all,
            self._search_dicts.original_to_user_search_dict, self.user)
        self.get_metadata_from_hs_tree(
            original_metadata_all,
            self._search_dicts.original_to_calibration_search_dict,
            self.calibration)
        self.get_metadata_from_hs_tree(
            original_metadata_all,
            self._search_dicts.original_to_comments_search_dict, self.comments)

        self.get_metadata_from_hs_tree(
            original_metadata_shortlist,
            self._search_dicts.original_to_microscope_search_dict,
            self.microscope)
        self.get_metadata_from_hs_tree(
            original_metadata_shortlist,
            self._search_dicts.original_to_sample_search_dict, self.sample)
        self.get_metadata_from_hs_tree(
            original_metadata_shortlist,
            self._search_dicts.original_to_user_search_dict, self.user)
        self.get_metadata_from_hs_tree(
            original_metadata_shortlist,
            self._search_dicts.original_to_calibration_search_dict,
            self.calibration)
        self.get_metadata_from_hs_tree(
            original_metadata_shortlist,
            self._search_dicts.original_to_comments_search_dict, self.comments)
コード例 #4
0
ファイル: __init__.py プロジェクト: tkphd/NexusLIMS
def parse_metadata(fname, write_output=True, generate_preview=True,
                   overwrite=True):
    """
    Given an input filename, read the file, determine what "type" of file (i.e.
    what instrument it came from) it is, filter the metadata (if necessary) to
    what we are interested in, and return it as a dictionary (writing to the
    NexusLIMS directory as JSON by default). Also calls the preview
    generation method, if desired.

    Parameters
    ----------
    fname : str
        The filename from which to read data
    write_output : bool
        Whether to write the metadata dictionary as a json file in the NexusLIMS
        folder structure
    generate_preview : bool
        Whether to generate the thumbnail preview of this dataset (that
        operation is not done in this method, it is just called from here so
        it can be done at the same time)
    overwrite : bool
        Whether or not to overwrite the .json metadata file and thumbnail
        image if either exists

    Returns
    -------
    nx_meta : dict or None
        The "relevant" metadata that is of use for NexusLIMS. If None,
        the file could not be opened
    preview_fname : str or None
        The file path of the generated preview image, or `None` if it was not
        requested
    """

    extension = _os.path.splitext(fname)[1][1:]

    nx_meta = extension_reader_map[extension](fname)
    preview_fname = None

    # nx_meta should never be None, because the extractors are defensive and
    # will always return _something_
    if nx_meta is not None:
        # Set the dataset type to Misc if it was not set by the file reader
        if 'DatasetType' not in nx_meta['nx_meta']:
            nx_meta['nx_meta']['DatasetType'] = 'Misc'
            nx_meta['nx_meta']['Data Type'] = 'Miscellaneous'

        if write_output:
            out_fname = fname.replace(_os.environ["mmfnexus_path"],
                                      _os.environ["nexusLIMS_path"]) + '.json'
            if not _os.path.isfile(out_fname) or overwrite:
                # Create the directory for the metadata file, if needed
                _pathlib.Path(_os.path.dirname(out_fname)).mkdir(parents=True,
                                                                 exist_ok=True)
                # Make sure that the nx_meta dict comes first in the json output
                out_dict = {'nx_meta': nx_meta['nx_meta']}
                for k, v in nx_meta.items():
                    if k == 'nx_meta':
                        pass
                    else:
                        out_dict[k] = v
                with open(out_fname, 'w') as f:
                    _logger.debug(f'Dumping metadata to {out_fname}')
                    _json.dump(out_dict, f, sort_keys=False,
                               indent=2, cls=_CustomEncoder)

    if generate_preview:
        preview_fname = fname.replace(_os.environ["mmfnexus_path"],
                                      _os.environ["nexusLIMS_path"]) + \
                        '.thumb.png'
        if extension == 'tif':
            instr = _get_instr(fname)
            instr_name = instr.name if instr is not None else None
            if instr_name == '**REMOVED**':
                # we know the output size we want for the Quanta
                output_size = (512, 471)
                _down_sample(fname,
                             out_path=preview_fname,
                             output_size=output_size)
            else:
                factor = 2
                _down_sample(fname,
                             out_path=preview_fname,
                             factor=factor)

        else:
            load_options = {'lazy': True}
            if extension == 'ser':
                load_options['only_valid_data'] = True

            try:
                s = _hs.load(fname, **load_options)
            except Exception as _:
                _logger.warning('Signal could not be loaded by HyperSpy. '
                                'Using placeholder image for preview.')
                preview_fname = fname.replace(
                    _os.environ["mmfnexus_path"],
                    _os.environ["nexusLIMS_path"]) + '.thumb.png'
                _sh.copyfile(PLACEHOLDER_PREVIEW, preview_fname)
                return nx_meta, preview_fname

            # If s is a list of signals, use just the first one for
            # our purposes
            if isinstance(s, list):
                num_sigs = len(s)
                fname = s[0].metadata.General.original_filename
                s = s[0]
                s.metadata.General.title = \
                    s.metadata.General.title + \
                    f' (1 of {num_sigs} total signals in file "{fname}")'
            elif s.metadata.General.title == '':
                s.metadata.General.title = \
                    s.metadata.General.original_filename.replace(
                        extension, '').strip('.')

            # only generate the preview if it doesn't exist, or overwrite
            # parameter is explicitly provided
            if not _os.path.isfile(preview_fname) or overwrite:
                _logger.info(f'Generating preview: {preview_fname}')
                # Create the directory for the thumbnail, if needed
                _pathlib.Path(_os.path.dirname(preview_fname)).mkdir(
                    parents=True, exist_ok=True)
                # Generate the thumbnail
                s.compute(progressbar=False)
                _s2thumb(s, out_path=preview_fname)
            else:
                _logger.info(f'Preview already exists: {preview_fname}')

    return nx_meta, preview_fname