示例#1
0
def emi_reader(filename, dump_xml=False, **kwds):
    # TODO: recover the tags from the emi file. It is easy: just look for
    # <ObjectInfo> and </ObjectInfo>. It is standard xml :)
    # xml chunks are identified using UUID, if we can find how these UUID are
    # generated then, it will possible to match to the corresponding ser file
    # and add the detector information in the metadata
    objects = get_xml_info_from_emi(filename)
    filename = os.path.splitext(filename)[0]
    if dump_xml is True:
        for i, obj in enumerate(objects):
            with open(filename + '-object-%s.xml' % i, 'w') as f:
                f.write(obj)

    ser_files = sorted(glob(filename + '_[0-9].ser'))
    sers = []
    for f in ser_files:
        _logger.info("Opening %s", f)
        try:
            sers.append(ser_reader(f, objects, **kwds))
        except IOError:  # Probably a single spectrum that we don't support
            continue

        index = int(os.path.splitext(f)[0].split("_")[-1]) - 1
        op = DictionaryTreeBrowser(sers[-1]['original_metadata'])
        emixml2dtb(ET.fromstring(objects[index]), op)
        sers[-1]['original_metadata'] = op.as_dictionary()
    return sers
示例#2
0
def test_get_date_time_from_metadata():
    assert (dtt.get_date_time_from_metadata(md1) ==
            '2014-12-27T00:00:00+00:00')
    assert (dtt.get_date_time_from_metadata(md1, formatting='ISO') ==
            '2014-12-27T00:00:00+00:00')
    assert (dtt.get_date_time_from_metadata(md1, formatting='datetime64') ==
            np.datetime64('2014-12-27T00:00:00.000000'))
    assert (dtt.get_date_time_from_metadata(md1, formatting='datetime') ==
            dt1)

    assert (dtt.get_date_time_from_metadata(md2) ==
            '2124-03-25T10:04:48-05:00')
    assert (dtt.get_date_time_from_metadata(md2, formatting='datetime') ==
            dt2)
    assert (dtt.get_date_time_from_metadata(md2, formatting='datetime64') ==
            np.datetime64('2124-03-25T10:04:48'))

    assert (dtt.get_date_time_from_metadata(md3) ==
            '2016-07-12T22:57:32')
    assert (dtt.get_date_time_from_metadata(md3, formatting='datetime') ==
            dt3)
    assert (dtt.get_date_time_from_metadata(md3, formatting='datetime64') ==
            np.datetime64('2016-07-12T22:57:32.000000'))

    assert (dtt.get_date_time_from_metadata(DictionaryTreeBrowser({'General': {}})) ==
            None)
    assert (dtt.get_date_time_from_metadata(DictionaryTreeBrowser({'General': {'date': '2016-07-12'}})) ==
            '2016-07-12')
    assert (dtt.get_date_time_from_metadata(DictionaryTreeBrowser({'General': {'time': '12:00'}})) ==
            '12:00:00')
    assert (dtt.get_date_time_from_metadata(DictionaryTreeBrowser({'General': {'time': '12:00',
                                                                               'time_zone': 'CET'}})) ==
            '12:00:00')
示例#3
0
文件: fei.py 项目: jonpdx/hyperspy
def emi_reader(filename, dump_xml=False, verbose=False, **kwds):
    # TODO: recover the tags from the emi file. It is easy: just look for
    # <ObjectInfo> and </ObjectInfo>. It is standard xml :)
    objects = get_xml_info_from_emi(filename)
    filename = os.path.splitext(filename)[0]
    if dump_xml is True:
        for i, obj in enumerate(objects):
            with open(filename + '-object-%s.xml' % i, 'w') as f:
                f.write(obj)

    ser_files = glob(filename + '_[0-9].ser')
    sers = []
    for f in ser_files:
        if verbose is True:
            print "Opening ", f
        try:
            sers.append(ser_reader(f, objects))
        except IOError:  # Probably a single spectrum that we don't support
            continue

        index = int(os.path.splitext(f)[0].split("_")[-1]) - 1
        op = DictionaryTreeBrowser(sers[-1]['original_metadata'])
        emixml2dtb(ET.fromstring(objects[index]), op)
        sers[-1]['original_metadata'] = op.as_dictionary()
    return sers
示例#4
0
文件: fei.py 项目: gdonval/hyperspy
def emi_reader(filename, dump_xml=False, verbose=False, **kwds):
    # TODO: recover the tags from the emi file. It is easy: just look for
    # <ObjectInfo> and </ObjectInfo>. It is standard xml :)
    objects = get_xml_info_from_emi(filename)
    filename = os.path.splitext(filename)[0]
    if dump_xml is True:
        for i, obj in enumerate(objects):
            with open(filename + '-object-%s.xml' % i, 'w') as f:
                f.write(obj)

    ser_files = glob(filename + '_[0-9].ser')
    sers = []
    for f in ser_files:
        if verbose is True:
            print "Opening ", f
        try:
            sers.append(ser_reader(f, objects))
        except IOError:  # Probably a single spectrum that we don't support
            continue

        index = int(os.path.splitext(f)[0].split("_")[-1]) - 1
        op = DictionaryTreeBrowser(sers[-1]['original_metadata'])
        emixml2dtb(ET.fromstring(objects[index]), op)
        sers[-1]['original_metadata'] = op.as_dictionary()
    return sers
示例#5
0
def _set_metadata_from_mapping(
    omd: dict,
    md: DictionaryTreeBrowser,
    mapping: dict,
):
    """Update metadata dictionary inplace from original metadata
    dictionary via a mapping.

    Parameters
    ----------
    omd
        Dictionary with original metadata.
    md
        Dictionary with metadata to update.
    mapping
        Mapping between `omd` and `md`.
    """
    for key_out, key_in in mapping.items():
        try:
            if isinstance(key_in, list):
                value = _get_nested_dictionary(omd, key_in)
            else:
                value = omd[key_in]
            md.set_item(key_out, value)
        except KeyError:
            warnings.warn(f"Could not read {key_in} from file.")
def test_get_date_time_from_metadata():
    nt.assert_equal(dtt.get_date_time_from_metadata(md1),
                    '2014-12-27T00:00:00+00:00')
    nt.assert_equal(dtt.get_date_time_from_metadata(md1, formatting='ISO'),
                    '2014-12-27T00:00:00+00:00')
    nt.assert_equal(dtt.get_date_time_from_metadata(md1, formatting='datetime64'),
                    np.datetime64('2014-12-27T00:00:00.000000'))
    nt.assert_equal(dtt.get_date_time_from_metadata(md1, formatting='datetime'),
                    dt1)

    nt.assert_equal(dtt.get_date_time_from_metadata(md2),
                    '2124-03-25T10:04:48-05:00')
    nt.assert_equal(dtt.get_date_time_from_metadata(md2, formatting='datetime'),
                    dt2)
    nt.assert_equal(dtt.get_date_time_from_metadata(md2, formatting='datetime64'),
                    np.datetime64('2124-03-25T10:04:48'))

    nt.assert_equal(dtt.get_date_time_from_metadata(md3),
                    '2016-07-12T22:57:32')
    nt.assert_equal(dtt.get_date_time_from_metadata(md3, formatting='datetime'),
                    dt3)
    nt.assert_equal(dtt.get_date_time_from_metadata(md3, formatting='datetime64'),
                    np.datetime64('2016-07-12T22:57:32.000000'))

    nt.assert_equal(dtt.get_date_time_from_metadata(DictionaryTreeBrowser({'General': {}})),
                    None)
    nt.assert_equal(dtt.get_date_time_from_metadata(DictionaryTreeBrowser({'General': {'date': '2016-07-12'}})),
                    '2016-07-12')
    nt.assert_equal(dtt.get_date_time_from_metadata(DictionaryTreeBrowser({'General': {'time': '12:00'}})),
                    '12:00:00')
    nt.assert_equal(dtt.get_date_time_from_metadata(DictionaryTreeBrowser({'General': {'time': '12:00',
                                                                                       'time_zone': 'CET'}})),
                    '12:00:00')
示例#7
0
文件: fei.py 项目: jhemmelg/hyperspy
def emi_reader(filename, dump_xml=False, **kwds):
    # TODO: recover the tags from the emi file. It is easy: just look for
    # <ObjectInfo> and </ObjectInfo>. It is standard xml :)
    # xml chunks are identified using UUID, if we can find how these UUID are
    # generated then, it will possible to match to the corresponding ser file
    # and add the detector information in the metadata
    objects = get_xml_info_from_emi(filename)
    filename = os.path.splitext(filename)[0]
    if dump_xml is True:
        for i, obj in enumerate(objects):
            with open(filename + '-object-%s.xml' % i, 'w') as f:
                f.write(obj)

    ser_files = sorted(glob(filename + '_[0-9].ser'))
    sers = []
    for f in ser_files:
        _logger.info("Opening %s", f)
        try:
            sers.append(ser_reader(f, objects))
        except IOError:  # Probably a single spectrum that we don't support
            continue

        index = int(os.path.splitext(f)[0].split("_")[-1]) - 1
        op = DictionaryTreeBrowser(sers[-1]['original_metadata'])
        emixml2dtb(ET.fromstring(objects[index]), op)
        sers[-1]['original_metadata'] = op.as_dictionary()
    return sers
示例#8
0
    def __init__(self, model, workers=None, setup=True, **kwargs):
        # constants:
        if workers is None:
            workers = max(1, cpu_count() - 1)
        self.model = model
        self.metadata = DictionaryTreeBrowser()

        self._scale = 1.0
        # -1 -> done pixel, use
        # -2 -> done, ignore when diffusion
        #  0 -> bad fit/no info
        # >0 -> select when turn comes

        self.metadata.add_node('marker')
        self.metadata.add_node('goodness_test')

        marker = np.empty(self.model.axes_manager.navigation_shape[::-1])
        marker.fill(self._scale)

        self.metadata.marker = marker
        self.strategies = StrategyList(self)
        self.strategies.append(ReducedChiSquaredStrategy())
        self.strategies.append(HistogramStrategy())
        self._active_strategy_ind = 0
        self.update_every = max(10, workers * 2)  # some sensible number....
        from hyperspy.samfire_utils.fit_tests import red_chisq_test
        self.metadata.goodness_test = red_chisq_test(tolerance=1.0)
        self.metadata._gt_dump = None
        from hyperspy.samfire_utils.samfire_kernel import single_kernel
        self.single_kernel = single_kernel
        self._workers = workers
        if len(kwargs) or setup:
            self._setup(**kwargs)
        self.refresh_database()
示例#9
0
    def setup_method(self, method):
        self.w1 = DictionaryTreeBrowser()
        self.w2 = DictionaryTreeBrowser()

        for w in [self.w1, self.w2]:
            w.add_node('samf')
        self.samf = object()
        self.sl = StrategyList(self.samf)
示例#10
0
    def setup_method(self, method):
        self.shape = (5, 7)
        self.s = LocalStrategy('test diffusion strategy')
        self.samf = create_artificial_samfire(self.shape)

        m = DictionaryTreeBrowser()
        m.set_item('chisq.data', np.ones(self.shape) * 5.)

        self.samf.model = m
示例#11
0
    def setUp(self):
        self.shape = (5, 7)
        self.s = LocalStrategy('test diffusion strategy')
        self.samf = create_artificial_samfire(self.shape)

        m = DictionaryTreeBrowser()
        m.set_item('chisq.data', np.ones(self.shape) * 5.)

        self.samf.model = m
    def setUp(self):
        self.shape = (7, 15)
        art_model = DictionaryTreeBrowser()
        art_model.set_item("red_chisq.data", np.ones(self.shape))
        art_model.red_chisq.data[3, 5] = 0.8
        art_model.red_chisq.data[2, 5] = 2.0
        self.m = art_model
        # have to be imported here, as otherwise crashes nosetools
        from hyperspy.samfire_utils.goodness_of_fit_tests.red_chisq import red_chisq_test as rct

        self.t = rct(0.9)
示例#13
0
def _get_example(date, time, time_zone=None):
    md = DictionaryTreeBrowser({'General': {'date': date, 'time': time}})
    if time_zone:
        md.set_item('General.time_zone', time_zone)
        dt = parser.parse('%sT%s' % (date, time))
        dt = dt.replace(tzinfo=tz.gettz(time_zone))
        iso = dt.isoformat()
    else:
        iso = '%sT%s' % (date, time)
        dt = parser.parse(iso)
    return md, dt, iso
 def setup_method(self, method):
     self.shape = (7, 15)
     art_model = DictionaryTreeBrowser()
     art_model.set_item('red_chisq.data', np.ones(self.shape))
     art_model.red_chisq.data[3, 5] = 0.8
     art_model.red_chisq.data[2, 5] = 2.
     self.m = art_model
     # have to be imported here, as otherwise crashes nosetools
     from hyperspy.samfire_utils.goodness_of_fit_tests.red_chisq import \
         red_chisq_test as rct
     self.t = rct(0.9)
示例#15
0
def _get_example(date, time, time_zone=None):
    md = DictionaryTreeBrowser({'General': {'date': date,
                                            'time': time}})
    if time_zone:
        md.set_item('General.time_zone', time_zone)
        dt = parser.parse('%sT%s' % (date, time))
        dt = dt.replace(tzinfo=tz.gettz(time_zone))
        iso = dt.isoformat()
    else:
        iso = '%sT%s' % (date, time)
        dt = parser.parse(iso)
    return md, dt, iso
示例#16
0
def test_update_date_time_in_metadata():
    md = DictionaryTreeBrowser({'General': {}})
    # in case of iso, the exact time is lost, only the time offset is kept
    md11 = dtt.update_date_time_in_metadata(iso1, md.deepcopy())
    assert_deep_almost_equal(md11.General.date, md1.General.date)
    assert_deep_almost_equal(md11.General.time, md1.General.time)
    assert_deep_almost_equal(md11.General.time_zone, 'UTC')

    md12 = dtt.update_date_time_in_metadata(dt1, md.deepcopy())
    assert_deep_almost_equal(md12.General.date, md1.General.date)
    assert_deep_almost_equal(md12.General.time, md1.General.time)
    import locale
    if locale.getlocale()[0] in ['en_GB', 'en_US']:
        assert md12.General.time_zone in ('UTC', 'Coordinated Universal Time')

    md13 = dtt.update_date_time_in_metadata(iso2, md.deepcopy())
    assert_deep_almost_equal(md13.General.date, md2.General.date)
    assert_deep_almost_equal(md13.General.time, md2.General.time)
    assert_deep_almost_equal(md13.General.time_zone, '-05:00')
    assert_deep_almost_equal(dtt.update_date_time_in_metadata(dt2, md.deepcopy()).as_dictionary(),
                             md2.as_dictionary())

    assert_deep_almost_equal(dtt.update_date_time_in_metadata(iso3, md.deepcopy()).as_dictionary(),
                             md3.as_dictionary())
    assert_deep_almost_equal(dtt.update_date_time_in_metadata(dt3, md.deepcopy()).as_dictionary(),
                             md3.as_dictionary())
示例#17
0
def extract_metadata(lookup, header, instrument, filename, signal_type):
    """
    Extracts hyperspy metadata from header.

    Returns
    -------
    :class:`DictionaryTreeBrowser`
    """
    metadata = DictionaryTreeBrowser()

    # General metadata
    metadata.set_item('General.original_filename', filename)
    metadata.set_item('Signal.signal_type', signal_type)

    # Loop through the lookup
    for source_key, destination_key, transformer in lookup:
        if source_key not in header:
            continue

        destination_key = destination_key.format(instrument=instrument)

        value = header[source_key]
        if transformer is not None:
            value = transformer(value)

        metadata.set_item(destination_key, value)

    return metadata
示例#18
0
    def __init__(self, model, workers=None, setup=True, **kwargs):
        # constants:
        if workers is None:
            workers = max(1, cpu_count() - 1)
        self.model = model
        self.metadata = DictionaryTreeBrowser()

        self._scale = 1.0
        # -1 -> done pixel, use
        # -2 -> done, ignore when diffusion
        #  0 -> bad fit/no info
        # >0 -> select when turn comes

        self.metadata.add_node('marker')
        self.metadata.add_node('goodness_test')

        marker = np.empty(self.model.axes_manager.navigation_shape[::-1])
        marker.fill(self._scale)

        self.metadata.marker = marker
        self.strategies = StrategyList(self)
        self.strategies.append(ReducedChiSquaredStrategy())
        self.strategies.append(HistogramStrategy())
        self._active_strategy_ind = 0
        self.update_every = max(10, workers * 2)  # some sensible number....
        from hyperspy.samfire_utils.fit_tests import red_chisq_test
        self.metadata.goodness_test = red_chisq_test(tolerance=1.0)
        self.metadata._gt_dump = None
        from hyperspy.samfire_utils.samfire_kernel import single_kernel
        self.single_kernel = single_kernel
        self._workers = workers
        if len(kwargs) or setup:
            self._setup(**kwargs)
        self.refresh_database()
示例#19
0
def delete_from_nested_dictionary(dictionary, keys):
    """Delete key(s) from a nested dictionary.

    Parameters
    ----------
    dictionary : dictionary or DictionaryTreeBrowser
        Dictionary to delete key(s) from.
    keys : dict_values
        Key(s) to delete.

    Returns
    -------
    modified_dict : dictionary or DictionaryTreeBrowser
        Dictionary without deleted keys.
    """

    dict_type = type(dictionary)
    if isinstance(dictionary, DictionaryTreeBrowser):
        dictionary = dictionary.as_dictionary()
    modified_dict = {}
    for key, val in dictionary.items():
        if key not in keys:
            if isinstance(val, dict):
                modified_dict[key] = delete_from_nested_dictionary(val, keys)
            else:
                modified_dict[key] = val
    if dict_type != dict:  # Revert to DictionaryTreeBrowser
        modified_dict = DictionaryTreeBrowser(modified_dict)
    return modified_dict
    def __init__(self, *args, **kwargs):
        """Create an :class:`~kikuchipy.signals.EBSDMasterPattern`
        object from a :class:`hyperspy.signals.Signal2D` or a
        :class:`numpy.ndarray`.

        """

        Signal2D.__init__(self, *args, **kwargs)

        # Update metadata if object is initialized from numpy array or
        # with set_signal_type()
        if not self.metadata.has_item(metadata_nodes("ebsd_master_pattern")):
            md = self.metadata.as_dictionary()
            md.update(ebsd_master_pattern_metadata().as_dictionary())
            self.metadata = DictionaryTreeBrowser(md)
        if not self.metadata.has_item("Sample.Phases"):
            self.set_phase_parameters()
示例#21
0
 def metadata(self, d):
     warnings.warn(
         "Setting the `metadata` attribute is deprecated and will be removed "
         "in HyperSpy 2.0. Use the `set_item` and `add_dictionary` methods "
         "of the `metadata` attribute instead.")
     if isinstance(d, dict):
         d = DictionaryTreeBrowser(d)
     self._metadata = d
示例#22
0
def test_update_date_time_in_metadata():
    md = DictionaryTreeBrowser({'General': {}})
    # in case of iso, the exact time is lost, only the time offset is kept
    md11 = dtt.update_date_time_in_metadata(iso1, md.deepcopy())
    assert_deep_almost_equal(md11.General.date, md1.General.date)
    assert_deep_almost_equal(md11.General.time, md1.General.time)
    assert_deep_almost_equal(md11.General.time_zone, 'UTC')

    md12 = dtt.update_date_time_in_metadata(dt1, md.deepcopy())
    assert_deep_almost_equal(md12.General.date, md1.General.date)
    assert_deep_almost_equal(md12.General.time, md1.General.time)
    import locale
    if locale.getlocale()[0] in ['en_GB', 'en_US']:
        assert md12.General.time_zone in ('UTC', 'Coordinated Universal Time')

    md13 = dtt.update_date_time_in_metadata(iso2, md.deepcopy())
    assert_deep_almost_equal(md13.General.date, md2.General.date)
    assert_deep_almost_equal(md13.General.time, md2.General.time)
    assert_deep_almost_equal(md13.General.time_zone, '-05:00')
    assert_deep_almost_equal(dtt.update_date_time_in_metadata(dt2, md.deepcopy()).as_dictionary(),
                             md2.as_dictionary())

    assert_deep_almost_equal(dtt.update_date_time_in_metadata(iso3, md.deepcopy()).as_dictionary(),
                             md3.as_dictionary())
    assert_deep_almost_equal(dtt.update_date_time_in_metadata(dt3, md.deepcopy()).as_dictionary(),
                             md3.as_dictionary())
示例#23
0
def _write_parameters_to_dictionary(
    parameters: dict, dictionary: DictionaryTreeBrowser, node: str
):
    """Write dictionary of parameters to DictionaryTreeBrowser.

    Parameters
    ----------
    parameters
        Dictionary of parameters to write to dictionary.
    dictionary
        Dictionary to write parameters to.
    node
        String like 'Acquisition_instrument.SEM' etc. with dictionary
        nodes to write parameters to.
    """
    for key, val in parameters.items():
        if val is not None:
            dictionary.set_item(node + "." + key, val)
示例#24
0
def _defaut_metadata():
    md = DictionaryTreeBrowser({
        'General': {
            'date': '2016-01-01',
            'time': '00:00:00',
            'time_zone': 'GMT'
        }
    })
    return md
示例#25
0
def _update_phase_info(
    metadata: DictionaryTreeBrowser, dictionary: dict, phase_number: int = 1
) -> DictionaryTreeBrowser:
    """Update information of phase in metadata, adding it if it doesn't
    already exist.

    Parameters
    ----------
    metadata
        Metadata to update.
    dictionary
        Dictionary with only values to update.
    phase_number
        Number of phase to update.

    Returns
    -------
    metadata : DictionaryTreeBrowser
        Updated metadata.
    """
    # Check if metadata has phases
    if not metadata.has_item("Sample.Phases"):
        metadata.add_node("Sample.Phases")

    # Check if phase number is already in metadata
    phase = metadata.Sample.Phases.get_item(str(phase_number))
    if phase is None:
        phase = _phase_metadata()
    phase = dict(phase)

    # Loop over input dictionary and update items in phase dictionary
    for key, val in dictionary.items():
        key = re.sub(r"(\w)([A-Z])", r"\1 \2", key)  # Space before UPPERCASE
        key = key.lower()
        key = key.replace(" ", "_")
        if key in phase:
            if isinstance(val, list):
                val = np.array(val)
            phase[key] = val

    # Update phase info in metadata
    metadata.Sample.Phases.add_dictionary({str(phase_number): phase})

    return metadata
示例#26
0
 def test_dict2h5ebsdgroup(self, save_path_hdf5):
     dictionary = {
         "a": [np.array(24.5)],
         "b": DictionaryTreeBrowser(),
         "c": set(),
     }
     with File(save_path_hdf5, mode="w") as f:
         group = f.create_group(name="a_group")
         with pytest.warns(UserWarning, match="The hdf5 writer could not"):
             dict2h5ebsdgroup(dictionary, group)
示例#27
0
def ebsd_master_pattern_metadata() -> DictionaryTreeBrowser:
    """Return a dictionary in HyperSpy's DictionaryTreeBrowser format
    with the default kikuchipy EBSD master pattern metadata.

    The parameters are chosen based on the contents in EMsoft's EBSD
    master pattern HDF5 file.

    See
    :meth:`~kikuchipy.signals.EBSDMasterPattern.set_simulation_parameters`
    for an explanation of the parameters.

    Returns
    -------
    md : hyperspy.misc.utils.DictionaryTreeBrowser
    """
    ebsd_master_pattern = {
        "BSE_simulation": {
            "depth_step": -1.0,
            "energy_step": -1.0,
            "incident_beam_energy": -1.0,
            "max_depth": -1.0,
            "min_beam_energy": -1.0,
            "mode": "",
            "number_of_electrons": -1,
            "pixels_along_x": -1,
            "sample_tilt": -1.0,
        },
        "Master_pattern": {
            "Bethe_parameters": {
                "complete_cutoff": -1.0,
                "strong_beam_cutoff": -1.0,
                "weak_beam_cutoff": -1.0,
            },
            "smallest_interplanar_spacing": -1.0,
            "projection": "",
            "hemisphere": "",
        },
    }

    md = DictionaryTreeBrowser()
    md.set_item(metadata_nodes("ebsd_master_pattern"), ebsd_master_pattern)

    return md
示例#28
0
 def setUp(self):
     tree = DictionaryTreeBrowser(
         {
             "Node1": {"leaf11": 11,
                       "Node11": {"leaf111": 111},
                       },
             "Node2": {"leaf21": 21,
                       "Node21": {"leaf211": 211},
                       },
         })
     self.tree = tree
示例#29
0
def kikuchipyheader2dicts(scan_group, md, lazy=False):
    """Return scan metadata dictionaries from a KikuchiPy h5ebsd file.

    Parameters
    ----------
    scan_group : h5py.Group
        HDF group of scan data and header.
    md : DictionaryTreeBrowser
        Dictionary with empty fields from KikuchiPy's metadata.
    lazy : bool, optional

    Returns
    -------
    md, omd, scan_size : DictionaryTreeBrowser
    """

    from kikuchipy.utils.general_utils import (delete_from_nested_dictionary,
                                               get_nested_dictionary)

    omd = DictionaryTreeBrowser()
    sem_node, ebsd_node = metadata_nodes()
    md.set_item(ebsd_node, h5ebsdgroup2dict(scan_group['EBSD/Header'],
                                            lazy=lazy))
    md = delete_from_nested_dictionary(md, 'Phases')
    phase_node = 'Sample.Phases'
    md.set_item(sem_node, h5ebsdgroup2dict(scan_group['SEM/Header'], lazy=lazy))
    md.set_item(phase_node, h5ebsdgroup2dict(scan_group['EBSD/Header/Phases'],
                                             recursive=True))

    # Get and remove scan info values from metadata
    mapping = {'sx': 'pattern_width', 'sy': 'pattern_height', 'nx': 'n_columns',
               'ny': 'n_rows', 'step_x': 'step_x', 'step_y': 'step_y',
               'delta': 'detector_pixel_size'}
    scan_size = DictionaryTreeBrowser()
    for k, v in mapping.items():
        scan_size.set_item(k, get_nested_dictionary(md, ebsd_node + '.' + v))
    md = delete_from_nested_dictionary(md, mapping.values())

    return md, omd, scan_size
示例#30
0
 def setUp(self):
     tree = DictionaryTreeBrowser(
         {
             "Node1": {"leaf11": 11,
                       "Node11": {"leaf111": 111},
                       },
             "Node2": {"leaf21": 21,
                       "Node21": {"leaf211": 211},
                       },
             "Leaf3": 3
         })
     self.tree = tree
     self.dummy = DummyThing()
示例#31
0
def fake_metadata_diffraction():
    metadata = {
        "Acquisition_instrument": {
            "TEM": {
                "beam_current": 23,
                "beam_energy": 200,
                "camera_length": 80,
            }
        },
        "General": {
            "date": "1993-06-18",
            "time": "12:34:56",
            "time_zone": "CET",
        }
    }
    return DictionaryTreeBrowser(metadata)
示例#32
0
def fake_metadata_imaging():
    metadata = {
        "Acquisition_instrument": {
            "TEM": {
                "beam_current": 23,
                "beam_energy": 200,
                "magnification": 3000,
            }
        },
        "General": {
            "date": "1993-06-18",
            "time": "12:34:56",
            "time_zone": "CET",
        }
    }
    return DictionaryTreeBrowser(metadata)
 def setup_method(self, method):
     tree = DictionaryTreeBrowser({
         "Node1": {
             "leaf11": 11,
             "Node11": {
                 "leaf111": 111
             },
         },
         "Node2": {
             "leaf21": 21,
             "Node21": {
                 "leaf211": 211
             },
         },
     })
     self.tree = tree
示例#34
0
def tree(request):
    lazy = request.param
    tree = DictionaryTreeBrowser(
        {
            "Node1": {
                "leaf11": 11,
                "Node11": {
                    "leaf111": 111
                },
            },
            "Node2": {
                "leaf21": 21,
                "Node21": {
                    "leaf211": 211
                },
            },
        },
        lazy=lazy)
    return tree
示例#35
0
def ebsd_metadata() -> DictionaryTreeBrowser:
    """Return a dictionary in HyperSpy's DictionaryTreeBrowser format
    with the default kikuchipy EBSD metadata.

    See :meth:`~kikuchipy.signals.EBSD.set_experimental_parameters` for
    an explanation of the parameters.

    Returns
    -------
    md : hyperspy.misc.utils.DictionaryTreeBrowser

    Notes
    -----
    .. deprecated:: 0.5
    """
    md = DictionaryTreeBrowser()
    sem_node, ebsd_node = metadata_nodes(["sem", "ebsd"])
    ebsd = {
        "azimuth_angle": -1.0,
        "binning": 1,
        "detector": "",
        "elevation_angle": -1.0,
        "exposure_time": -1,
        "frame_number": -1,
        "frame_rate": -1,
        "gain": -1.0,
        "grid_type": "",
        "sample_tilt": -1.0,
        "scan_time": -1.0,
        "static_background": -1,
        "xpc": -1.0,
        "ypc": -1.0,
        "zpc": -1.0,
    }
    sem = {
        "microscope": "",
        "magnification": -1,
        "beam_energy": -1.0,
        "working_distance": -1.0,
    }
    md.set_item(sem_node, sem)
    md.set_item(ebsd_node, ebsd)
    return md
def h5_to_dictionary(fh, tree=None, level=''):
    """
    Transforms hdf5 file tree into a DictionaryTreeBrowser

    Recursively generates a hyperspy.misc.utils.DictionaryTreeBrowser with the
    structure of the given h5py.File object.

    Parameters
    ----------
        fh : h5py.File
            h5py File handle
        tree : DictionaryTreeBrowser, optional
            A DictionaryTreeBrowser to append to recursively
        level : str, optional
            Location in hierarchy of the hdf5/DictionaryTreeBrowser

    Returns
    ----------
        DictionaryTreeBrowser
            dictionary object with hierarchy of given HDF5 file handle
    """
    assert tree is None or isinstance(tree, DictionaryTreeBrowser)

    if tree is None:
        tree = DictionaryTreeBrowser()
    else:
        level += '.'

    for key in list(fh.attrs.keys()):
        val = fh.attrs[key]
        tree.set_item(level + key, copy(val))

    for (key, desc) in list(fh.items()):
        key = str(key)
        desc = str(desc)
        if '(0 members)' in desc:
            continue
        elif 'HDF5 dataset' in desc:
            tree.set_item(level + key, np.squeeze(fh[key]).copy())
        elif 'HDF5 group' in desc:
            tree = h5_to_dictionary(fh[key], tree=tree, level=level + key)
    return tree
示例#37
0
def kikuchipy_metadata():
    """Return a dictionary in HyperSpy's DictionaryTreeBrowser format
    with the default KikuchiPy metadata.

    See :func:`kikuchipy.signals.EBSD.set_experimental_parameters` for
    an explanation of the parameters.

    Returns
    -------
    md : DictionaryTreeBrowser
    """

    md = DictionaryTreeBrowser()
    sem_node, ebsd_node = metadata_nodes()
    ebsd = {
        'azimuth_angle': 1.,
        'binning': 1,
        'detector': '',
        'elevation_angle': 1.,
        'exposure_time': 1,
        'frame_number': 1,
        'frame_rate': 1,
        'gain': 1.,
        'grid_type': '',
        'sample_tilt': 1.,
        'scan_time': 1.,
        'static_background': 1,
        'xpc': 1.,
        'ypc': 1.,
        'zpc': 1.
    }
    sem = {
        'microscope': '',
        'magnification': 1,
        'beam_energy': 1.,
        'working_distance': 1.
    }
    md.set_item(sem_node, sem)
    md.set_item(ebsd_node, ebsd)
    return md
示例#38
0
def file_reader(filename, rpl_info=None, encoding="latin-1",
                mmap_mode='c', *args, **kwds):
    """Parses a Lispix (http://www.nist.gov/lispix/) ripple (.rpl) file
    and reads the data from the corresponding raw (.raw) file;
    or, read a raw file if the dictionary rpl_info is provided.

    This format is often uses in EDS/EDX experiments.

    Images and spectral images or data cubes that are written in the
    (Lispix) raw file format are just a continuous string of numbers.

    Data cubes can be stored image by image, or spectrum by spectrum.
    Single images are stored row by row, vector cubes are stored row by row
    (each row spectrum by spectrum), image cubes are stored image by image.

    All of the numbers are in the same format, such as 16 bit signed integer,
    IEEE 8-byte real, 8-bit unsigned byte, etc.

    The "raw" file should be accompanied by text file with the same name and
    ".rpl" extension. This file lists the characteristics of the raw file so
    that it can be loaded without human intervention.

    Alternatively, dictionary 'rpl_info' containing the information can
    be given.

    Some keys are specific to HyperSpy and will be ignored by other software.

    RPL stands for "Raw Parameter List", an ASCII text, tab delimited file in
    which HyperSpy reads the image parameters for a raw file.

                    TABLE OF RPL PARAMETERS
        key             type     description
      ----------   ------------ --------------------
      # Mandatory      keys:
      width            int      # pixels per row
      height           int      # number of rows
      depth            int      # number of images or spectral pts
      offset           int      # bytes to skip
      data-type        str      # 'signed', 'unsigned', or 'float'
      data-length      str      # bytes per pixel  '1', '2', '4', or '8'
      byte-order       str      # 'big-endian', 'little-endian', or 'dont-care'
      record-by        str      # 'image', 'vector', or 'dont-care'
      # X-ray keys:
      ev-per-chan      int      # optional, eV per channel
      detector-peak-width-ev  int   # optional, FWHM for the Mn K-alpha line
      # HyperSpy-specific keys
      depth-origin    int      # energy offset in pixels
      depth-scale     float    # energy scaling (units per pixel)
      depth-units     str      # energy units, usually eV
      depth-name      str      # Name of the magnitude stored as depth
      width-origin         int      # column offset in pixels
      width-scale          float    # column scaling (units per pixel)
      width-units          str      # column units, usually nm
      width-name      str           # Name of the magnitude stored as width
      height-origin         int      # row offset in pixels
      height-scale          float    # row scaling (units per pixel)
      height-units          str      # row units, usually nm
      height-name      str           # Name of the magnitude stored as height
      signal            str        # Name of the signal stored, e.g. HAADF
      convergence-angle float   # TEM convergence angle in mrad
      collection-angle  float   # EELS spectrometer collection semi-angle in mrad
      beam-energy       float   # TEM beam energy in keV
      elevation-angle   float   # Elevation angle of the EDS detector
      azimuth-angle     float   # Elevation angle of the EDS detector
      live-time         float   # Live time per spectrum
      energy-resolution float   # Resolution of the EDS (FHWM of MnKa)
      tilt-stage        float   # The tilt of the stage
      date              str     # date in ISO 8601
      time              str     # time in ISO 8601

    NOTES

    When 'data-length' is 1, the 'byte order' is not relevant as there is only
    one byte per datum, and 'byte-order' should be 'dont-care'.

    When 'depth' is 1, the file has one image, 'record-by' is not relevant and
    should be 'dont-care'. For spectral images, 'record-by' is 'vector'.
    For stacks of images, 'record-by' is 'image'.

    Floating point numbers can be IEEE 4-byte, or IEEE 8-byte. Therefore if
    data-type is float, data-length MUST be 4 or 8.

    The rpl file is read in a case-insensitive manner. However, when providing
    a dictionary as input, the keys MUST be lowercase.

    Comment lines, beginning with a semi-colon ';' are allowed anywhere.

    The first non-comment in the rpl file line MUST have two column names:
    'name_1'<TAB>'name_2'; any name would do e.g. 'key'<TAB>'value'.

    Parameters can be in ANY order.

    In the rpl file, the parameter name is followed by ONE tab (spaces are
    ignored) e.g.: 'data-length'<TAB>'2'

    In the rpl file, other data and more tabs can follow the two items on
    each row, and are ignored.

    Other keys and values can be included and are ignored.

    Any number of spaces can go along with each tab.

    """

    if not rpl_info:
        if filename[-3:] in file_extensions:
            with codecs.open(filename, encoding=encoding,
                             errors='replace') as f:
                rpl_info = parse_ripple(f)
        else:
            raise IOError('File has wrong extension: "%s"' % filename[-3:])
    for ext in ['raw', 'RAW']:
        rawfname = filename[:-3] + ext
        if os.path.exists(rawfname):
            break
        else:
            rawfname = ''
    if not rawfname:
        raise IOError('RAW file "%s" does not exists' % rawfname)
    else:
        data = read_raw(rpl_info, rawfname, mmap_mode=mmap_mode)

    if rpl_info['record-by'] == 'vector':
        _logger.info('Loading as Signal1D')
        record_by = 'spectrum'
    elif rpl_info['record-by'] == 'image':
        _logger.info('Loading as Signal2D')
        record_by = 'image'
    else:
        if len(data.shape) == 1:
            _logger.info('Loading as Signal1D')
            record_by = 'spectrum'
        else:
            _logger.info('Loading as Signal2D')
            record_by = 'image'

    if rpl_info['record-by'] == 'vector':
        idepth, iheight, iwidth = 2, 0, 1
        names = ['height', 'width', 'depth', ]
    else:
        idepth, iheight, iwidth = 0, 1, 2
        names = ['depth', 'height', 'width']

    scales = [1, 1, 1]
    origins = [0, 0, 0]
    units = ['', '', '']
    sizes = [rpl_info[names[i]] for i in range(3)]

    if 'date' not in rpl_info:
        rpl_info['date'] = ""

    if 'time' not in rpl_info:
        rpl_info['time'] = ""

    if 'signal' not in rpl_info:
        rpl_info['signal'] = ""

    if 'depth-scale' in rpl_info:
        scales[idepth] = rpl_info['depth-scale']
    # ev-per-chan is the only calibration supported by the original ripple
    # format
    elif 'ev-per-chan' in rpl_info:
        scales[idepth] = rpl_info['ev-per-chan']

    if 'depth-origin' in rpl_info:
        origins[idepth] = rpl_info['depth-origin']

    if 'depth-units' in rpl_info:
        units[idepth] = rpl_info['depth-units']

    if 'depth-name' in rpl_info:
        names[idepth] = rpl_info['depth-name']

    if 'width-origin' in rpl_info:
        origins[iwidth] = rpl_info['width-origin']

    if 'width-scale' in rpl_info:
        scales[iwidth] = rpl_info['width-scale']

    if 'width-units' in rpl_info:
        units[iwidth] = rpl_info['width-units']

    if 'width-name' in rpl_info:
        names[iwidth] = rpl_info['width-name']

    if 'height-origin' in rpl_info:
        origins[iheight] = rpl_info['height-origin']

    if 'height-scale' in rpl_info:
        scales[iheight] = rpl_info['height-scale']

    if 'height-units' in rpl_info:
        units[iheight] = rpl_info['height-units']

    if 'height-name' in rpl_info:
        names[iheight] = rpl_info['height-name']

    mp = DictionaryTreeBrowser({
        'General': {'original_filename': os.path.split(filename)[1],
                    'date': rpl_info['date'],
                    'time': rpl_info['time']},
        "Signal": {'signal_type': rpl_info['signal'],
                   'record_by': record_by},
    })
    if 'convergence-angle' in rpl_info:
        mp.set_item('Acquisition_instrument.TEM.convergence_angle',
                    rpl_info['convergence-angle'])
    if 'tilt-stage' in rpl_info:
        mp.set_item('Acquisition_instrument.TEM.tilt_stage',
                    rpl_info['tilt-stage'])
    if 'collection-angle' in rpl_info:
        mp.set_item('Acquisition_instrument.TEM.Detector.EELS.' +
                    'collection_angle',
                    rpl_info['collection-angle'])
    if 'beam-energy' in rpl_info:
        mp.set_item('Acquisition_instrument.TEM.beam_energy',
                    rpl_info['beam-energy'])
    if 'elevation-angle' in rpl_info:
        mp.set_item('Acquisition_instrument.TEM.Detector.EDS.elevation_angle',
                    rpl_info['elevation-angle'])
    if 'azimuth-angle' in rpl_info:
        mp.set_item('Acquisition_instrument.TEM.Detector.EDS.azimuth_angle',
                    rpl_info['azimuth-angle'])
    if 'energy-resolution' in rpl_info:
        mp.set_item('Acquisition_instrument.TEM.Detector.EDS.' +
                    'energy_resolution_MnKa',
                    rpl_info['energy-resolution'])
    if 'detector-peak-width-ev' in rpl_info:
        mp.set_item('Acquisition_instrument.TEM.Detector.EDS.' +
                    'energy_resolution_MnKa',
                    rpl_info['detector-peak-width-ev'])
    if 'live-time' in rpl_info:
        mp.set_item('Acquisition_instrument.TEM.Detector.EDS.live_time',
                    rpl_info['live-time'])

    axes = []
    index_in_array = 0
    for i in range(3):
        if sizes[i] > 1:
            axes.append({
                'size': sizes[i],
                'index_in_array': index_in_array,
                'name': names[i],
                'scale': scales[i],
                'offset': origins[i],
                'units': units[i],
            })
            index_in_array += 1

    dictionary = {
        'data': data.squeeze(),
        'axes': axes,
        'metadata': mp.as_dictionary(),
        'original_metadata': rpl_info
    }
    return [dictionary, ]
示例#39
0
 def __init__(self, imdict, file, order="C", record_by=None):
     self.imdict = DictionaryTreeBrowser(imdict)
     self.file = file
     self._order = order if order else "C"
     self._record_by = record_by
示例#40
0
class Samfire:

    """Smart Adaptive Multidimensional Fitting (SAMFire) object

    SAMFire is a more robust way of fitting multidimensional datasets. By
    extracting starting values for each pixel from already fitted pixels,
    SAMFire stops the fitting algorithm from getting lost in the parameter
    space by always starting close to the optimal solution.

    SAMFire only picks starting parameters and the order the pixels (in the
    navigation space) are fitted, and does not provide any new minimisation
    algorithms.

    Attributes
    ----------

    model : Model instance
        The complete model
    optional_components : list
        A list of components that can be switched off at some pixels if it
        returns a better Akaike's Information Criterion with correction (AICc)
    workers : int
        A number of processes that will perform the fitting parallely
    pool : samfire_pool instance
        A proxy object that manages either multiprocessing or ipyparallel pool
    strategies : strategy list
        A list of strategies that will be used to select pixel fitting order
        and calculate required starting parameters. Strategies come in two
        "flavours" - local and global. Local strategies spread the starting
        values to the nearest pixels and forces certain pixel fitting order.
        Global strategies look for clusters in parameter values, and suggests
        most frequent values. Global strategy do not depend on pixel fitting
        order, hence it is randomised.
    metadata : dictionary
        A dictionary for important samfire parameters
    active_strategy : strategy
        The currently active strategy from the strategies list
    update_every : int
        If segmenter strategy is running, updates the historams every time
        update_every good fits are found.
    plot_every : int
        When running, samfire plots results every time plot_every good fits are
        found.
    save_every : int
        When running, samfire saves results every time save_every good fits are
        found.

    Methods
    -------

    start
        start SAMFire
    stop
        stop SAMFire
    plot
        force plot of currently selected active strategy
    refresh_database
        refresh current active strategy database. No previous structure is
        preserved
    backup
        backs up the current version of the model
    change_strategy
        changes strategy to a new one. Certain rules apply
    append
        appends strategy to the strategies list
    extend
        extends strategies list
    remove
        removes strategy from strategies list
    update
        updates the current model with values, received from a worker
    log
        if _log exists, logs the arguments to the list.
    generate_values
        creates a generator to calculate values to be sent to the workers
    """

    __active_strategy_ind = 0
    _progressbar = None
    pool = None
    _figure = None
    optional_components = []
    running_pixels = []
    plot_every = 0
    save_every = np.nan
    _workers = None
    _args = None
    count = 0

    def __init__(self, model, workers=None, setup=True, **kwargs):
        # constants:
        if workers is None:
            workers = max(1, cpu_count() - 1)
        self.model = model
        self.metadata = DictionaryTreeBrowser()

        self._scale = 1.0
        # -1 -> done pixel, use
        # -2 -> done, ignore when diffusion
        #  0 -> bad fit/no info
        # >0 -> select when turn comes

        self.metadata.add_node('marker')
        self.metadata.add_node('goodness_test')

        marker = np.empty(self.model.axes_manager.navigation_shape[::-1])
        marker.fill(self._scale)

        self.metadata.marker = marker
        self.strategies = StrategyList(self)
        self.strategies.append(ReducedChiSquaredStrategy())
        self.strategies.append(HistogramStrategy())
        self._active_strategy_ind = 0
        self.update_every = max(10, workers * 2)  # some sensible number....
        from hyperspy.samfire_utils.fit_tests import red_chisq_test
        self.metadata.goodness_test = red_chisq_test(tolerance=1.0)
        self.metadata._gt_dump = None
        from hyperspy.samfire_utils.samfire_kernel import single_kernel
        self.single_kernel = single_kernel
        self._workers = workers
        if len(kwargs) or setup:
            self._setup(**kwargs)
        self.refresh_database()

    @property
    def active_strategy(self):
        """Returns the active strategy"""
        return self.strategies[self._active_strategy_ind]

    @active_strategy.setter
    def active_strategy(self, value):
        self.change_strategy(value)

    def _setup(self, **kwargs):
        """Set up SAMFire - configure models, set up pool if necessary"""
        from hyperspy.samfire_utils.samfire_pool import SamfirePool
        self._figure = None
        self.metadata._gt_dump = dill.dumps(self.metadata.goodness_test)
        self._enable_optional_components()

        if hasattr(self.model, '_suspend_auto_fine_structure_width'):
            self.model._suspend_auto_fine_structure_width = True

        if hasattr(self, '_log'):
            self._log = []

        if self._workers and self.pool is None:
            if 'num_workers' not in kwargs:
                kwargs['num_workers'] = self._workers
            if self.pool is None:
                self.pool = SamfirePool(**kwargs)
            self._workers = self.pool.num_workers
            self.pool.prepare_workers(self)

    def start(self, **kwargs):
        """Starts SAMFire.

        Parameters
        ----------
        **kwargs : key-word arguments
            Any key-word arguments to be passed to Model.fit() call
        """
        self._setup()
        if self._workers and self.pool is not None:
            self.pool.update_parameters()
        if 'min_function' in kwargs:
            kwargs['min_function'] = dill.dumps(kwargs['min_function'])
        if 'min_function_grad' in kwargs:
            kwargs['min_function_grad'] = dill.dumps(
                kwargs['min_function_grad'])
        self._args = kwargs
        num_of_strat = len(self.strategies)
        total_size = self.model.axes_manager.navigation_size - self.pixels_done
        self._progressbar = progressbar(total=total_size)
        try:
            while True:
                self._run_active_strategy()
                self.plot()
                if self.pixels_done == self.model.axes_manager.navigation_size:
                    # all pixels are done, no need to go to the next strategy
                    break
                if self._active_strategy_ind == num_of_strat - 1:
                    # last one just finished running
                    break
                self.change_strategy(self._active_strategy_ind + 1)
        except KeyboardInterrupt:
            if self.pool is not None:
                _logger.warning(
                    'Collecting already started pixels, please wait')
                self.pool.collect_results()

    def append(self, strategy):
        """appends the given strategy to the end of the strategies list

        Parameters
        ----------
        strategy : strategy instance
        """
        self.strategies.append(strategy)

    def extend(self, iterable):
        """extend the strategies list by the given iterable

        Parameters
        ----------
        iterable : an iterable of strategy instances
        """
        self.strategies.extend(iterable)

    def remove(self, thing):
        """removes given strategy from the strategies list

        Parameters
        ----------
        thing : int or strategy instance
            Strategy that is in current strategies list or its index.
        """
        self.strategies.remove(thing)

    @property
    def _active_strategy_ind(self):
        return self.__active_strategy_ind

    @_active_strategy_ind.setter
    def _active_strategy_ind(self, value):
        self.__active_strategy_ind = np.abs(int(value))

    def _run_active_strategy(self):
        if self.pool is not None:
            self.count = 0
            self.pool.run()
        else:
            self._run_active_strategy_one()

    @property
    def pixels_left(self):
        """Returns the number of pixels that are left to solve. This number can
        increase as SAMFire learns more information about the data.
        """
        return np.sum(self.metadata.marker > 0.)

    @property
    def pixels_done(self):
        """Returns the number of pixels that have been solved"""
        return np.sum(self.metadata.marker <= -self._scale)

    def _run_active_strategy_one(self):
        self.count = 0
        while self.pixels_left:
            ind = self._next_pixels(1)[0]
            vals = self.active_strategy.values(ind)
            self.running_pixels.append(ind)
            isgood = self.single_kernel(self.model,
                                        ind,
                                        vals,
                                        self.optional_components,
                                        self._args,
                                        self.metadata.goodness_test)
            self.running_pixels.remove(ind)
            self.count += 1
            if isgood:
                self._progressbar.update(1)
            self.active_strategy.update(ind, isgood)
            self.plot(on_count=True)
            self.backup(on_count=True)

    def backup(self, filename=None, on_count=True):
        """Backs-up the samfire results in a file

        Parameters
        ----------
        filename: {str, None}
            the filename. If None, a default value of "backup_"+signal_title is
            used
        on_count: bool
            if True (default), only saves on the required count of steps
        """
        if filename is None:
            title = self.model.signal.metadata.General.title
            filename = slugify('backup_' + title)
        # maybe add saving marker + strategies as well?
        if self.count % self.save_every == 0 or not on_count:
            self.model.save(filename,
                            name='samfire_backup', overwrite=True)
            self.model.signal.models.remove('samfire_backup')

    def update(self, ind, results=None, isgood=None):
        """Updates the current model with the results, received from the
        workers. Results are only stored if the results are good enough

        Parameters
        ----------
        ind : tuple
            contains the index of the pixel of the results
        results : {dict, None}
            dictionary of the results. If None, means we are updating in-place
            (e.g. refreshing the marker or strategies)
        isgood : {bool, None}
            if it is known if the results are good according to the
            goodness-of-fit test. If None, the pixel is tested
        """
        if results is not None and (isgood is None or isgood):
            self._swap_dict_and_model(ind, results)

        if isgood is None:
            isgood = self.metadata.goodness_test.test(self.model, ind)
        self.count += 1
        if isgood and self._progressbar is not None:
            self._progressbar.update(1)

        self.active_strategy.update(ind, isgood)
        if not isgood and results is not None:
            self._swap_dict_and_model(ind, results)

    def refresh_database(self):
        """Refreshes currently selected strategy without preserving any
        "ignored" pixels
        """
        # updates current active strategy database / prob.
        # Assume when chisq is not None, it's relevant

        # TODO: if no calculated pixels, request user input

        calculated_pixels = np.logical_not(np.isnan(self.model.red_chisq.data))
        # only include pixels that are good enough
        calculated_pixels = self.metadata.goodness_test.map(
            self.model,
            calculated_pixels)

        self.active_strategy.refresh(True, calculated_pixels)

    def change_strategy(self, new_strat):
        """Changes current strategy to a new one. Certain rules apply:
        diffusion -> diffusion : resets all "ignored" pixels
        diffusion -> segmenter : saves already calculated pixels to be ignored
            when(if) subsequently diffusion strategy is run

        Parameters
        ----------
        new_strat : {int | strategy}
            index of the new strategy from the strategies list or the
            strategy object itself
        """
        from numbers import Number
        if not isinstance(new_strat, Number):
            try:
                new_strat = self.strategies.index(new_strat)
            except ValueError:
                raise ValueError(
                    "The passed object is not in current strategies list")

        new_strat = np.abs(int(new_strat))
        if new_strat == self._active_strategy_ind:
            self.refresh_database()

        # copy previous "done" pixels to the new one, delete old database

        # TODO: make sure it's a number. Get index if object is passed?
        if new_strat >= len(self.strategies):
            raise ValueError('too big new strategy index')

        current = self.active_strategy
        new = self.strategies[new_strat]

        if isinstance(current, LocalStrategy) and isinstance(
                new, LocalStrategy):
            # forget ignore/done levels, keep just calculated or not
            new.refresh(True)
        else:
            if isinstance(current, LocalStrategy) and isinstance(
                    new, GlobalStrategy):
                # if diffusion->segmenter, set previous -1 to -2 (ignored for
                # the next diffusion)
                self.metadata.marker[
                    self.metadata.marker == -
                    self._scale] -= self._scale

            new.refresh(False)
        current.clean()
        if current.close_plot is not None:
            current.close_plot()
        self._active_strategy_ind = new_strat

    def generate_values(self, need_inds):
        """Returns an iterator that yields the index of the pixel and the
        value dictionary to be sent to the workers.

        Parameters
        ----------
        need_inds: int
            the number of pixels to be returned in the generator
        """
        if need_inds:
            # get pixel index
            for ind in self._next_pixels(need_inds):
                # get starting parameters / array of possible values
                value_dict = self.active_strategy.values(ind)
                value_dict['fitting_kwargs'] = self._args
                value_dict['signal.data'] = \
                    self.model.signal.data[ind + (...,)]
                if self.model.signal._lazy:
                    value_dict['signal.data'] = value_dict[
                        'signal.data'].compute()
                if self.model.signal.metadata.has_item(
                        'Signal.Noise_properties.variance'):
                    var = self.model.signal.metadata.Signal.Noise_properties.variance
                    if isinstance(var, BaseSignal):
                        dat = var.data[ind + (...,)]
                        value_dict['variance.data'] = dat.compute(
                        ) if var._lazy else dat
                if hasattr(self.model,
                           'low_loss') and self.model.low_loss is not None:
                    dat = self.model.low_loss.data[ind + (...,)]
                    value_dict['low_loss.data'] = dat.compute(
                    ) if self.model.low_loss._lazy else dat

                self.running_pixels.append(ind)
                self.metadata.marker[ind] = 0.
                yield ind, value_dict

    def _next_pixels(self, number):
        best = self.metadata.marker.max()
        inds = []
        if best > 0.0:
            ind_list = np.where(self.metadata.marker == best)
            while number and ind_list[0].size > 0:
                i = np.random.randint(len(ind_list[0]))
                ind = tuple([lst[i] for lst in ind_list])
                if ind not in self.running_pixels:
                    inds.append(ind)
                # removing the added indices
                ind_list = [np.delete(lst, i, 0) for lst in ind_list]
                number -= 1
        return inds

    def _swap_dict_and_model(self, m_ind, dict_, d_ind=None):
        if d_ind is None:
            d_ind = tuple([0 for _ in dict_['dof.data'].shape])
        m = self.model
        for k in dict_.keys():
            if k.endswith('.data'):
                item = k[:-5]
                getattr(m, item).data[m_ind], dict_[k] = \
                    dict_[k].copy(), getattr(m, item).data[m_ind].copy()
        for comp_name, comp in dict_['components'].items():
            # only active components are sent
            if self.model[comp_name].active_is_multidimensional:
                self.model[comp_name]._active_array[m_ind] = True
            self.model[comp_name].active = True

            for param_model in self.model[comp_name].parameters:
                param_dict = comp[param_model.name]
                param_model.map[m_ind], param_dict[d_ind] = \
                    param_dict[d_ind].copy(), param_model.map[m_ind].copy()

        for component in self.model:
            # switch off all that did not appear in the dictionary
            if component.name not in dict_['components'].keys():
                if component.active_is_multidimensional:
                    component._active_array[m_ind] = False

    def _enable_optional_components(self):
        if len(self.optional_components) == 0:
            return
        for c in self.optional_components:
            comp = self.model._get_component(c)
            if not comp.active_is_multidimensional:
                comp.active_is_multidimensional = True
        if not np.all([isinstance(a, int) for a in
                       self.optional_components]):
            new_list = []
            for op in self.optional_components:
                for ic, c in enumerate(self.model):
                    if c is self.model._get_component(op):
                        new_list.append(ic)
            self.optional_components = new_list

    def _request_user_input(self):
        from hyperspy.signals import Image
        from hyperspy.drawing.widgets import SquareWidget
        mark = Image(self.metadata.marker,
                     axes=self.model.axes_manager._get_navigation_axes_dicts())
        mark.metadata.General.title = 'SAMFire marker'

        def update_when_triggered():
            ind = self.model.axes_manager.indices[::-1]
            isgood = self.metadata.goodness_test.test(self.model, ind)
            self.active_strategy.update(ind, isgood, 0)
            mark.events.data_changed.trigger(mark)

        self.model.plot()
        self.model.events.fitted.connect(update_when_triggered, [])
        self.model._plot.signal_plot.events.closed.connect(
            lambda: self.model.events.fitted.disconnect(update_when_triggered),
            [])

        mark.plot(navigator='slider')

        w = SquareWidget(self.model.axes_manager)
        w.color = 'yellow'
        w.set_mpl_ax(mark._plot.signal_plot.ax)
        w.connect_navigate()

        def connect_other_navigation1(axes_manager):
            with mark.axes_manager.events.indices_changed.suppress_callback(
                    connect_other_navigation2):
                for ax1, ax2 in zip(mark.axes_manager.navigation_axes,
                                    axes_manager.navigation_axes[2:]):
                    ax1.value = ax2.value

        def connect_other_navigation2(axes_manager):
            with self.model.axes_manager.events.indices_changed.suppress_callback(
                    connect_other_navigation1):
                for ax1, ax2 in zip(self.model.axes_manager.navigation_axes[2:],
                                    axes_manager.navigation_axes):
                    ax1.value = ax2.value

        mark.axes_manager.events.indices_changed.connect(
            connect_other_navigation2, {'obj': 'axes_manager'})
        self.model.axes_manager.events.indices_changed.connect(
            connect_other_navigation1, {'obj': 'axes_manager'})

        self.model._plot.signal_plot.events.closed.connect(
            lambda: mark._plot.close, [])
        self.model._plot.signal_plot.events.closed.connect(
            lambda: self.model.axes_manager.events.indices_changed.disconnect(
                connect_other_navigation1), [])

    def plot(self, on_count=False):
        """(if possible) plots current strategy plot. Local strategies plot
        grayscale navigation signal with brightness representing order of the
        pixel selection. Global strategies plot a collection of histograms,
        one per parameter.

        Parameters
        ----------
        on_count : bool
            if True, only tries to plot every speficied count, otherwise
            (default) always plots if possible.
        """
        count_test = self.plot_every and (self.count % self.plot_every == 0)
        if not on_count or count_test:
            if self.strategies:
                try:
                    self._figure = self.active_strategy.plot(self._figure)
                except BaseException:
                    self._figure = None
                    self._figure = self.active_strategy.plot(self._figure)

    def log(self, *args):
        """If has a list named "_log", appends the arguments there
        """
        if hasattr(self, '_log') and isinstance(self._log, list):
            self._log.append(args)

    def __repr__(self):
        ans = u"<SAMFire of the signal titled: '"
        ans += self.model.signal.metadata.General.title
        ans += u"'>"
        return ans
示例#41
0
 def setUp(self):
     self.w = ReducedChiSquaredWeight()
     artificial_model = DictionaryTreeBrowser()
     artificial_model.add_node('red_chisq.data')
     artificial_model.red_chisq.data = np.arange(35).reshape((5, 7))
     self.w.model = artificial_model
示例#42
0
文件: msa.py 项目: AakashV/hyperspy
def parse_msa_string(string, filename=None):
    """Parse an EMSA/MSA file content.

    Parameters
    ----------
    string: string or file object
        It must complain with the EMSA/MSA standard.
    filename: string or None
        The filename.

    Returns:
    --------
    file_data_list: list
        The list containts a dictionary that contains the parsed
        information. It can be used to create a `:class:Signal`
        using `:func:hyperspy.io.dict2signal`.

    """
    if not hasattr(string, "readlines"):
        string = string.splitlines()
    parameters = {}
    mapped = DictionaryTreeBrowser({})
    y = []
    # Read the keywords
    data_section = False
    for line in string:
        if data_section is False:
            if line[0] == "#":
                try:
                    key, value = line.split(': ')
                    value = value.strip()
                except ValueError:
                    key = line
                    value = None
                key = key.strip('#').strip()

                if key != 'SPECTRUM':
                    parameters[key] = value
                else:
                    data_section = True
        else:
            # Read the data
            if line[0] != "#" and line.strip():
                if parameters['DATATYPE'] == 'XY':
                    xy = line.replace(',', ' ').strip().split()
                    y.append(float(xy[1]))
                elif parameters['DATATYPE'] == 'Y':
                    data = [
                        float(i) for i in line.replace(',', ' ').strip().split()]
                    y.extend(data)
    # We rewrite the format value to be sure that it complies with the
    # standard, because it will be used by the writer routine
    parameters['FORMAT'] = "EMSA/MAS Spectral Data File"

    # Convert the parameters to the right type and map some
    # TODO: the msa format seems to support specifying the units of some
    # parametes. We should add this feature here
    for parameter, value in parameters.items():
        # Some parameters names can contain the units information
        # e.g. #AZIMANGLE-dg: 90.
        if '-' in parameter:
            clean_par, units = parameter.split('-')
            clean_par, units = clean_par.strip(), units.strip()
        else:
            clean_par, units = parameter, None
        if clean_par in keywords:
            try:
                parameters[parameter] = keywords[clean_par]['dtype'](value)
            except:
                # Normally the offending mispelling is a space in the scientic
                # notation, e.g. 2.0 E-06, so we try to correct for it
                try:
                    parameters[parameter] = keywords[clean_par]['dtype'](
                        value.replace(' ', ''))
                except:
                    _logger.exception(
                        "The %s keyword value, %s could not be converted to "
                        "the right type", parameter, value)

            if keywords[clean_par]['mapped_to'] is not None:
                mapped.set_item(keywords[clean_par]['mapped_to'],
                                parameters[parameter])
                if units is not None:
                    mapped.set_item(keywords[clean_par]['mapped_to'] +
                                    '_units', units)

    # The data parameter needs some extra care
    # It is necessary to change the locale to US english to read the date
    # keyword
    loc = locale.getlocale(locale.LC_TIME)
    # Setting locale can raise an exception because
    # their name depends on library versions, platform etc.
    try:
        if os_name == 'posix':
            locale.setlocale(locale.LC_TIME, ('en_US', 'utf8'))
        elif os_name == 'windows':
            locale.setlocale(locale.LC_TIME, 'english')
        try:
            H, M = time.strptime(parameters['TIME'], "%H:%M")[3:5]
            mapped.set_item('General.time', datetime.time(H, M))
        except:
            if 'TIME' in parameters and parameters['TIME']:
                _logger.warn('The time information could not be retrieved')
        try:
            Y, M, D = time.strptime(parameters['DATE'], "%d-%b-%Y")[0:3]
            mapped.set_item('General.date', datetime.date(Y, M, D))
        except:
            if 'DATE' in parameters and parameters['DATE']:
                _logger.warn('The date information could not be retrieved')
    except:
        warnings.warn("I couldn't write the date information due to"
                      "an unexpected error. Please report this error to "
                      "the developers")
    locale.setlocale(locale.LC_TIME, loc)  # restore saved locale

    axes = [{
        'size': len(y),
        'index_in_array': 0,
        'name': parameters['XLABEL'] if 'XLABEL' in parameters else '',
        'scale': parameters['XPERCHAN'] if 'XPERCHAN' in parameters else 1,
        'offset': parameters['OFFSET'] if 'OFFSET' in parameters else 0,
        'units': parameters['XUNITS'] if 'XUNITS' in parameters else '',
    }]
    if filename is not None:
        mapped.set_item('General.original_filename',
                        os.path.split(filename)[1])
    mapped.set_item('Signal.record_by', 'spectrum')
    if mapped.has_item('Signal.signal_type'):
        if mapped.Signal.signal_type == 'ELS':
            mapped.Signal.signal_type = 'EELS'
    else:
        # Defaulting to EELS looks reasonable
        mapped.set_item('Signal.signal_type', 'EELS')

    dictionary = {
        'data': np.array(y),
        'axes': axes,
        'metadata': mapped.as_dictionary(),
        'original_metadata': parameters
    }
    file_data_list = [dictionary, ]
    return file_data_list
示例#43
0
class ImageObject(object):

    def __init__(self, imdict, file, order="C", record_by=None):
        self.imdict = DictionaryTreeBrowser(imdict)
        self.file = file
        self._order = order if order else "C"
        self._record_by = record_by

    @property
    def shape(self):
        dimensions = self.imdict.ImageData.Dimensions
        shape = tuple([dimension[1] for dimension in dimensions])
        return shape[::-1]  # DM uses image indexing X, Y, Z...

    # For some image stacks created using plugins in Digital Micrograph
    # the metadata under Calibrations.Dimension would not reflect the
    # actual dimensions in the dataset, leading to these images not
    # loading properly. To allow HyperSpy to load these files, any missing
    # dimensions in the metadata is appended with "dummy" values.
    # This is done for the offsets, scales and units properties, using
    # the len_diff variable
    @property
    def offsets(self):
        dimensions = self.imdict.ImageData.Calibrations.Dimension
        len_diff = len(self.shape) - len(dimensions)
        origins = np.array([dimension[1].Origin for dimension in dimensions])
        origins = np.append(origins, (0.0,) * len_diff)
        return -1 * origins[::-1] * self.scales

    @property
    def scales(self):
        dimensions = self.imdict.ImageData.Calibrations.Dimension
        len_diff = len(self.shape) - len(dimensions)
        scales = np.array([dimension[1].Scale for dimension in dimensions])
        scales = np.append(scales, (1.0,) * len_diff)
        return scales[::-1]

    @property
    def units(self):
        dimensions = self.imdict.ImageData.Calibrations.Dimension
        len_diff = len(self.shape) - len(dimensions)
        return (tuple([dimension[1].Units
                       if dimension[1].Units else ""
                       for dimension in dimensions]) + ('',) * len_diff)[::-1]

    @property
    def names(self):
        names = [t.Undefined] * len(self.shape)
        indices = list(range(len(self.shape)))
        if self.signal_type == "EELS":
            if "eV" in self.units:
                names[indices.pop(self.units.index("eV"))] = "Energy loss"
        elif self.signal_type in ("EDS", "EDX"):
            if "keV" in self.units:
                names[indices.pop(self.units.index("keV"))] = "Energy"
        for index, name in zip(indices[::-1], ("x", "y", "z")):
            names[index] = name
        return names

    @property
    def title(self):
        title = self.imdict.get_item("Name", "")
        # ``if title else ""`` below is there to account for when Name
        # contains an empty list.
        # See https://github.com/hyperspy/hyperspy/issues/1937
        return title if title else ""

    @property
    def record_by(self):
        if self._record_by is not None:
            return self._record_by
        if len(self.scales) == 1:
            return "spectrum"
        elif (('ImageTags.Meta_Data.Format' in self.imdict and
               self.imdict.ImageTags.Meta_Data.Format in ("Spectrum image",
                                                          "Spectrum")) or (
                "ImageTags.spim" in self.imdict)) and len(self.scales) == 2:
            return "spectrum"
        else:
            return "image"

    @property
    def to_spectrum(self):
        if (('ImageTags.Meta_Data.Format' in self.imdict and
                self.imdict.ImageTags.Meta_Data.Format == "Spectrum image") or
                ("ImageTags.spim" in self.imdict)) and len(self.scales) > 2:
            return True
        else:
            return False

    @property
    def order(self):
        return self._order

    @property
    def intensity_calibration(self):
        ic = self.imdict.ImageData.Calibrations.Brightness.as_dictionary()
        if not ic['Units']:
            ic['Units'] = ""
        return ic

    @property
    def dtype(self):
        # Signal2D data types (Signal2D Object chapter on DM help)#
        # key = DM data type code
        # value = numpy data type
        if self.imdict.ImageData.DataType == 4:
            raise NotImplementedError(
                "Reading data of this type is not implemented.")

        imdtype_dict = {
            0: 'not_implemented',  # null
            1: 'int16',
            2: 'float32',
            3: 'complex64',
            5: 'float32',  # not numpy: 8-Byte packed complex (FFT data)
            6: 'uint8',
            7: 'int32',
            8: np.dtype({'names': ['B', 'G', 'R', 'A'],
                         'formats': ['u1', 'u1', 'u1', 'u1']}),
            9: 'int8',
            10: 'uint16',
            11: 'uint32',
            12: 'float64',
            13: 'complex128',
            14: 'bool',
            23: np.dtype({'names': ['B', 'G', 'R', 'A'],
                          'formats': ['u1', 'u1', 'u1', 'u1']}),
            27: 'complex64',  # not numpy: 8-Byte packed complex (FFT data)
            28: 'complex128',  # not numpy: 16-Byte packed complex (FFT data)
        }
        return imdtype_dict[self.imdict.ImageData.DataType]

    @property
    def signal_type(self):
        if 'ImageTags.Meta_Data.Signal' in self.imdict:
            if self.imdict.ImageTags.Meta_Data.Signal == "X-ray":
                return "EDS_TEM"
            return self.imdict.ImageTags.Meta_Data.Signal
        elif 'ImageTags.spim.eels' in self.imdict:  # Orsay's tag group
            return "EELS"
        else:
            return ""

    def _get_data_array(self):
        need_to_close = False
        if self.file.closed:
            self.file = open(self.filename, "rb")
            need_to_close = True
        self.file.seek(self.imdict.ImageData.Data.offset)
        count = self.imdict.ImageData.Data.size
        if self.imdict.ImageData.DataType in (27, 28):  # Packed complex
            count = int(count / 2)
        data = np.fromfile(self.file,
                           dtype=self.dtype,
                           count=count)
        if need_to_close:
            self.file.close()
        return data

    @property
    def size(self):
        if self.imdict.ImageData.DataType in (27, 28):  # Packed complex
            if self.imdict.ImageData.Data.size % 2:
                raise IOError(
                    "ImageData.Data.size should be an even integer for "
                    "this datatype.")
            else:
                return int(self.imdict.ImageData.Data.size / 2)
        else:
            return self.imdict.ImageData.Data.size

    def get_data(self):
        if isinstance(self.imdict.ImageData.Data, np.ndarray):
            return self.imdict.ImageData.Data
        data = self._get_data_array()
        if self.imdict.ImageData.DataType in (27, 28):  # New packed complex
            return self.unpack_new_packed_complex(data)
        elif self.imdict.ImageData.DataType == 5:  # Old packed compled
            return self.unpack_packed_complex(data)
        elif self.imdict.ImageData.DataType in (8, 23):  # ABGR
            # Reorder the fields
            data = data[['R', 'G', 'B', 'A']].astype(
                [('R', 'u1'), ('G', 'u1'), ('B', 'u1'), ('A', 'u1')])
        return data.reshape(self.shape, order=self.order)

    def unpack_new_packed_complex(self, data):
        packed_shape = (self.shape[0], int(self.shape[1] / 2 + 1))
        data = data.reshape(packed_shape, order=self.order)
        return np.hstack((data[:, ::-1], np.conjugate(data[:, 1:-1])))

    def unpack_packed_complex(self, tmpdata):
        shape = self.shape
        if shape[0] != shape[1] or len(shape) > 2:
            raise IOError(
                'Unable to read this DM file in packed complex format. '
                'Please report the issue to the HyperSpy developers providing '
                'the file if possible')
        N = int(self.shape[0] / 2)      # think about a 2Nx2N matrix
        # create an empty 2Nx2N ndarray of complex
        data = np.zeros(shape, dtype="complex64")

        # fill in the real values:
        data[N, 0] = tmpdata[0]
        data[0, 0] = tmpdata[1]
        data[N, N] = tmpdata[2 * N ** 2]  # Nyquist frequency
        data[0, N] = tmpdata[2 * N ** 2 + 1]  # Nyquist frequency

        # fill in the non-redundant complex values:
        # top right quarter, except 1st column
        for i in range(N):  # this could be optimized
            start = 2 * i * N + 2
            stop = start + 2 * (N - 1) - 1
            step = 2
            realpart = tmpdata[start:stop:step]
            imagpart = tmpdata[start + 1:stop + 1:step]
            data[i, N + 1:2 * N] = realpart + imagpart * 1j
        # 1st column, bottom left quarter
        start = 2 * N
        stop = start + 2 * N * (N - 1) - 1
        step = 2 * N
        realpart = tmpdata[start:stop:step]
        imagpart = tmpdata[start + 1:stop + 1:step]
        data[N + 1:2 * N, 0] = realpart + imagpart * 1j
        # 1st row, bottom right quarter
        start = 2 * N ** 2 + 2
        stop = start + 2 * (N - 1) - 1
        step = 2
        realpart = tmpdata[start:stop:step]
        imagpart = tmpdata[start + 1:stop + 1:step]
        data[N, N + 1:2 * N] = realpart + imagpart * 1j
        # bottom right quarter, except 1st row
        start = stop + 1
        stop = start + 2 * N * (N - 1) - 1
        step = 2
        realpart = tmpdata[start:stop:step]
        imagpart = tmpdata[start + 1:stop + 1:step]
        complexdata = realpart + imagpart * 1j
        data[
            N +
            1:2 *
            N,
            N:2 *
            N] = complexdata.reshape(
            N -
            1,
            N,
            order=self.order)

        # fill in the empty pixels: A(i)(j) = A(2N-i)(2N-j)*
        # 1st row, top left quarter, except 1st element
        data[0, 1:N] = np.conjugate(data[0, -1:-N:-1])
        # 1st row, bottom left quarter, except 1st element
        data[N, 1:N] = np.conjugate(data[N, -1:-N:-1])
        # 1st column, top left quarter, except 1st element
        data[1:N, 0] = np.conjugate(data[-1:-N:-1, 0])
        # 1st column, top right quarter, except 1st element
        data[1:N, N] = np.conjugate(data[-1:-N:-1, N])
        # top left quarter, except 1st row and 1st column
        data[1:N, 1:N] = np.conjugate(data[-1:-N:-1, -1:-N:-1])
        # bottom left quarter, except 1st row and 1st column
        data[N + 1:2 * N, 1:N] = np.conjugate(data[-N - 1:-2 * N:-1, -1:-N:-1])

        return data

    def get_axes_dict(self):
        return [{'name': name,
                 'size': size,
                 'index_in_array': i,
                 'scale': scale,
                 'offset': offset,
                 'units': str(units), }
                for i, (name, size, scale, offset, units) in enumerate(
                    zip(self.names, self.shape, self.scales, self.offsets,
                        self.units))]

    def get_metadata(self, metadata={}):
        if "General" not in metadata:
            metadata['General'] = {}
        if "Signal" not in metadata:
            metadata['Signal'] = {}
        metadata['General']['title'] = self.title
        metadata["Signal"]['record_by'] = self.record_by
        metadata["Signal"]['signal_type'] = self.signal_type
        return metadata

    def _get_quantity(self, units):
        quantity = "Intensity"
        if len(units) == 0:
            units = ""
        elif units == 'e-':
            units = "Counts"
            quantity = "Electrons"
        if self.signal_type == 'EDS_TEM':
            quantity = "X-rays"
        if len(units) != 0:
            units = " (%s)" % units
        return "%s%s" % (quantity, units)

    def _get_mode(self, mode):
        if 'STEM' in mode:
            return 'STEM'
        else:
            return 'TEM'

    def _get_time(self, time):
        try:
            dt = dateutil.parser.parse(time)
            return dt.time().isoformat()
        except BaseException:
            _logger.warning("Time string, %s,  could not be parsed", time)

    def _get_date(self, date):
        try:
            dt = dateutil.parser.parse(date)
            return dt.date().isoformat()
        except BaseException:
            _logger.warning("Date string, %s,  could not be parsed", date)

    def _get_microscope_name(self, ImageTags):
        locations = (
            "Session_Info.Microscope",
            "Microscope_Info.Name",
            "Microscope_Info.Microscope",
        )
        for loc in locations:
            mic = ImageTags.get_item(loc)
            if mic and mic != "[]":
                return mic
        _logger.info("Microscope name not present")
        return None

    def _parse_string(self, tag):
        if len(tag) == 0:
            return None
        else:
            return tag

    def _get_EELS_exposure_time(self, tags):
        # for GMS 2 and quantum/enfinium, the  "Integration time (s)" tag is
        # only present for single spectrum acquisition;  for maps we need to
        # compute exposure * number of frames
        if 'Integration_time_s' in tags.keys():
            return float(tags["Integration_time_s"])
        elif 'Exposure_s' in tags.keys():
            frame_number = 1
            if "Number_of_frames" in tags.keys():
                frame_number = float(tags["Number_of_frames"])
            return float(tags["Exposure_s"]) * frame_number
        else:
            _logger.info("EELS exposure time can't be read.")

    def get_mapping(self):
        if 'source' in self.imdict.ImageTags.keys():
            # For stack created with the stack builder plugin
            tags_path = 'ImageList.TagGroup0.ImageTags.source.Tags at creation'
            image_tags_dict = self.imdict.ImageTags.source['Tags at creation']
        else:
            # Standard tags
            tags_path = 'ImageList.TagGroup0.ImageTags'
            image_tags_dict = self.imdict.ImageTags
        is_scanning = "DigiScan" in image_tags_dict.keys()
        mapping = {
            "{}.DataBar.Acquisition Date".format(tags_path): (
                "General.date", self._get_date),
            "{}.DataBar.Acquisition Time".format(tags_path): (
                "General.time", self._get_time),
            "{}.Microscope Info.Voltage".format(tags_path): (
                "Acquisition_instrument.TEM.beam_energy", lambda x: x / 1e3),
            "{}.Microscope Info.Stage Position.Stage Alpha".format(tags_path): (
                "Acquisition_instrument.TEM.Stage.tilt_alpha", None),
            "{}.Microscope Info.Stage Position.Stage Beta".format(tags_path): (
                "Acquisition_instrument.TEM.Stage.tilt_beta", None),
            "{}.Microscope Info.Stage Position.Stage X".format(tags_path): (
                "Acquisition_instrument.TEM.Stage.x", lambda x: x * 1e-3),
            "{}.Microscope Info.Stage Position.Stage Y".format(tags_path): (
                "Acquisition_instrument.TEM.Stage.y", lambda x: x * 1e-3),
            "{}.Microscope Info.Stage Position.Stage Z".format(tags_path): (
                "Acquisition_instrument.TEM.Stage.z", lambda x: x * 1e-3),
            "{}.Microscope Info.Illumination Mode".format(tags_path): (
                "Acquisition_instrument.TEM.acquisition_mode", self._get_mode),
            "{}.Microscope Info.Probe Current (nA)".format(tags_path): (
                "Acquisition_instrument.TEM.beam_current", None),
            "{}.Session Info.Operator".format(tags_path): (
                "General.authors", self._parse_string),
            "{}.Session Info.Specimen".format(tags_path): (
                "Sample.description", self._parse_string),
        }

        if "Microscope_Info" in image_tags_dict.keys():
            is_TEM = is_diffraction = None
            if "Illumination_Mode" in image_tags_dict['Microscope_Info'].keys(
            ):
                is_TEM = (
                    'TEM' == image_tags_dict.Microscope_Info.Illumination_Mode)
            if "Imaging_Mode" in image_tags_dict['Microscope_Info'].keys():
                is_diffraction = (
                    'DIFFRACTION' == image_tags_dict.Microscope_Info.Imaging_Mode)

            if is_TEM:
                if is_diffraction:
                    mapping.update({
                        "{}.Microscope Info.Indicated Magnification".format(tags_path): (
                            "Acquisition_instrument.TEM.camera_length",
                            None),
                    })
                else:
                    mapping.update({
                        "{}.Microscope Info.Indicated Magnification".format(tags_path): (
                            "Acquisition_instrument.TEM.magnification",
                            None),
                    })
            else:
                mapping.update({
                    "{}.Microscope Info.STEM Camera Length".format(tags_path): (
                        "Acquisition_instrument.TEM.camera_length",
                        None),
                    "{}.Microscope Info.Indicated Magnification".format(tags_path): (
                        "Acquisition_instrument.TEM.magnification",
                        None),
                })

            mapping.update({
                tags_path: (
                    "Acquisition_instrument.TEM.microscope",
                    self._get_microscope_name),
            })

        if self.signal_type == "EELS":
            if is_scanning:
                mapped_attribute = 'dwell_time'
            else:
                mapped_attribute = 'exposure'
            mapping.update({
                "{}.EELS.Acquisition.Date".format(tags_path): (
                    "General.date",
                    self._get_date),
                "{}.EELS.Acquisition.Start time".format(tags_path): (
                    "General.time",
                    self._get_time),
                "{}.EELS.Experimental Conditions.".format(tags_path) +
                "Collection semi-angle (mrad)": (
                    "Acquisition_instrument.TEM.Detector.EELS.collection_angle",
                    None),
                "{}.EELS.Experimental Conditions.".format(tags_path) +
                "Convergence semi-angle (mrad)": (
                    "Acquisition_instrument.TEM.convergence_angle",
                    None),
                "{}.EELS.Acquisition".format(tags_path): (
                    "Acquisition_instrument.TEM.Detector.EELS.%s" % mapped_attribute,
                    self._get_EELS_exposure_time),
                "{}.EELS.Acquisition.Number_of_frames".format(tags_path): (
                    "Acquisition_instrument.TEM.Detector.EELS.frame_number",
                    None),
                "{}.EELS_Spectrometer.Aperture_label".format(tags_path): (
                    "Acquisition_instrument.TEM.Detector.EELS.aperture_size",
                    lambda string: float(string.replace('mm', ''))),
                "{}.EELS Spectrometer.Instrument name".format(tags_path): (
                    "Acquisition_instrument.TEM.Detector.EELS.spectrometer",
                    None),
            })
        elif self.signal_type == "EDS_TEM":
            mapping.update({
                "{}.EDS.Acquisition.Date".format(tags_path): (
                    "General.date",
                    self._get_date),
                "{}.EDS.Acquisition.Start time".format(tags_path): (
                    "General.time",
                    self._get_time),
                "{}.EDS.Detector_Info.Azimuthal_angle".format(tags_path): (
                    "Acquisition_instrument.TEM.Detector.EDS.azimuth_angle",
                    None),
                "{}.EDS.Detector_Info.Elevation_angle".format(tags_path): (
                    "Acquisition_instrument.TEM.Detector.EDS.elevation_angle",
                    None),
                "{}.EDS.Solid_angle".format(tags_path): (
                    "Acquisition_instrument.TEM.Detector.EDS.solid_angle",
                    None),
                "{}.EDS.Live_time".format(tags_path): (
                    "Acquisition_instrument.TEM.Detector.EDS.live_time",
                    None),
                "{}.EDS.Real_time".format(tags_path): (
                    "Acquisition_instrument.TEM.Detector.EDS.real_time",
                    None),
            })
        elif "DigiScan" in image_tags_dict.keys():
            mapping.update({
                "{}.DigiScan.Sample Time".format(tags_path): (
                    "Acquisition_instrument.TEM.dwell_time",
                    lambda x: x / 1e6),
            })
        else:
            mapping.update({
                "{}.Acquisition.Parameters.Detector.".format(tags_path) +
                "exposure_s": (
                    "Acquisition_instrument.TEM.Camera.exposure",
                    None),
            })
        mapping.update({
            "ImageList.TagGroup0.ImageData.Calibrations.Brightness.Units": (
                "Signal.quantity",
                self._get_quantity),
            "ImageList.TagGroup0.ImageData.Calibrations.Brightness.Scale": (
                "Signal.Noise_properties.Variance_linear_model.gain_factor",
                None),
            "ImageList.TagGroup0.ImageData.Calibrations.Brightness.Origin": (
                "Signal.Noise_properties.Variance_linear_model.gain_offset",
                None),
        })
        return mapping
示例#44
0
def create_artificial_samfire(shape):
    artificial_samfire = DictionaryTreeBrowser()
    artificial_samfire.add_node('running_pixels')
    artificial_samfire.running_pixels = []
    artificial_samfire.add_node('model')
    artificial_samfire.add_node('metadata')
    artificial_samfire.metadata.add_node('marker')
    artificial_samfire.metadata.marker = np.zeros(shape)
    artificial_samfire.add_node('_scale')
    artificial_samfire._scale = 1.0
    return artificial_samfire