def hdfgroup2signaldict(group): exp = {} exp['data'] = group['data'][:] axes = [] for i in xrange(len(exp['data'].shape)): try: axes.append(dict(group['axis-%i' % i].attrs)) except KeyError: raise IOError(not_valid_format) for axis in axes: for key, item in axis.iteritems(): axis[key] = ensure_unicode(item) exp['mapped_parameters'] = hdfgroup2dict( group['mapped_parameters'], {}) exp['original_parameters'] = hdfgroup2dict( group['original_parameters'], {}) exp['axes'] = axes exp['attributes']={} if 'learning_results' in group.keys(): exp['attributes']['learning_results'] = \ hdfgroup2dict(group['learning_results'],{}) if 'peak_learning_results' in group.keys(): exp['attributes']['peak_learning_results'] = \ hdfgroup2dict(group['peak_learning_results'],{}) # Load the decomposition results written with the old name, # mva_results if 'mva_results' in group.keys(): exp['attributes']['learning_results'] = hdfgroup2dict( group['mva_results'],{}) if 'peak_mva_results' in group.keys(): exp['attributes']['peak_learning_results']=hdfgroup2dict( group['peak_mva_results'],{}) # Replace the old signal and name keys with their current names if 'signal' in exp['mapped_parameters']: exp['mapped_parameters']['signal_type'] = \ exp['mapped_parameters']['signal'] del exp['mapped_parameters']['signal'] if 'name' in exp['mapped_parameters']: exp['mapped_parameters']['title'] = \ exp['mapped_parameters']['name'] del exp['mapped_parameters']['name'] # If the title was not defined on writing the Experiment is # then called __unnamed__. The next "if" simply sets the title # back to the empty string if '__unnamed__' == exp['mapped_parameters']['title']: exp['mapped_parameters']['title'] = '' return exp
def hdfgroup2signaldict(group): exp = {} exp['data'] = group['data'][:] axes = [] for i in xrange(len(exp['data'].shape)): try: axes.append(dict(group['axis-%i' % i].attrs)) except KeyError: raise IOError(not_valid_format) for axis in axes: for key, item in axis.iteritems(): axis[key] = ensure_unicode(item) exp['mapped_parameters'] = hdfgroup2dict(group['mapped_parameters'], {}) exp['original_parameters'] = hdfgroup2dict(group['original_parameters'], {}) exp['axes'] = axes exp['attributes'] = {} if 'learning_results' in group.keys(): exp['attributes']['learning_results'] = \ hdfgroup2dict(group['learning_results'],{}) if 'peak_learning_results' in group.keys(): exp['attributes']['peak_learning_results'] = \ hdfgroup2dict(group['peak_learning_results'],{}) # Load the decomposition results written with the old name, # mva_results if 'mva_results' in group.keys(): exp['attributes']['learning_results'] = hdfgroup2dict( group['mva_results'], {}) if 'peak_mva_results' in group.keys(): exp['attributes']['peak_learning_results'] = hdfgroup2dict( group['peak_mva_results'], {}) # Replace the old signal and name keys with their current names if 'signal' in exp['mapped_parameters']: exp['mapped_parameters']['signal_type'] = \ exp['mapped_parameters']['signal'] del exp['mapped_parameters']['signal'] if 'name' in exp['mapped_parameters']: exp['mapped_parameters']['title'] = \ exp['mapped_parameters']['name'] del exp['mapped_parameters']['name'] # If the title was not defined on writing the Experiment is # then called __unnamed__. The next "if" simply sets the title # back to the empty string if '__unnamed__' == exp['mapped_parameters']['title']: exp['mapped_parameters']['title'] = '' return exp
def hdfgroup2signaldict(group, load_to_memory=True): global current_file_version global default_version if current_file_version < LooseVersion("1.2"): metadata = "mapped_parameters" original_metadata = "original_parameters" else: metadata = "metadata" original_metadata = "original_metadata" exp = { 'metadata': hdfgroup2dict(group[metadata], load_to_memory=load_to_memory), 'original_metadata': hdfgroup2dict(group[original_metadata], load_to_memory=load_to_memory) } data = group['data'] if load_to_memory: data = np.asanyarray(data) exp['data'] = data axes = [] for i in range(len(exp['data'].shape)): try: axes.append(dict(group['axis-%i' % i].attrs)) axis = axes[-1] for key, item in axis.items(): if isinstance(item, np.bool_): axis[key] = bool(item) else: axis[key] = ensure_unicode(item) except KeyError: break if len(axes) != len(exp['data'].shape): # broke from the previous loop try: axes = [ i for k, i in sorted( iter( hdfgroup2dict(group['_list_' + str(len(exp['data'].shape)) + '_axes'], load_to_memory=load_to_memory).items())) ] except KeyError: raise IOError(not_valid_format) exp['axes'] = axes exp['attributes'] = {} if 'learning_results' in group.keys(): exp['attributes']['learning_results'] = \ hdfgroup2dict( group['learning_results'], load_to_memory=load_to_memory) if 'peak_learning_results' in group.keys(): exp['attributes']['peak_learning_results'] = \ hdfgroup2dict( group['peak_learning_results'], load_to_memory=load_to_memory) # If the title was not defined on writing the Experiment is # then called __unnamed__. The next "if" simply sets the title # back to the empty string if "General" in exp["metadata"] and "title" in exp["metadata"]["General"]: if '__unnamed__' == exp['metadata']['General']['title']: exp['metadata']["General"]['title'] = '' if current_file_version < LooseVersion("1.1"): # Load the decomposition results written with the old name, # mva_results if 'mva_results' in group.keys(): exp['attributes']['learning_results'] = hdfgroup2dict( group['mva_results'], load_to_memory=load_to_memory) if 'peak_mva_results' in group.keys(): exp['attributes']['peak_learning_results'] = hdfgroup2dict( group['peak_mva_results'], load_to_memory=load_to_memory) # Replace the old signal and name keys with their current names if 'signal' in exp['metadata']: if "Signal" not in exp["metadata"]: exp["metadata"]["Signal"] = {} exp['metadata']["Signal"]['signal_type'] = \ exp['metadata']['signal'] del exp['metadata']['signal'] if 'name' in exp['metadata']: if "General" not in exp["metadata"]: exp["metadata"]["General"] = {} exp['metadata']['General']['title'] = \ exp['metadata']['name'] del exp['metadata']['name'] if current_file_version < LooseVersion("1.2"): if '_internal_parameters' in exp['metadata']: exp['metadata']['_HyperSpy'] = \ exp['metadata']['_internal_parameters'] del exp['metadata']['_internal_parameters'] if 'stacking_history' in exp['metadata']['_HyperSpy']: exp['metadata']['_HyperSpy']["Stacking_history"] = \ exp['metadata']['_HyperSpy']['stacking_history'] del exp['metadata']['_HyperSpy']["stacking_history"] if 'folding' in exp['metadata']['_HyperSpy']: exp['metadata']['_HyperSpy']["Folding"] = \ exp['metadata']['_HyperSpy']['folding'] del exp['metadata']['_HyperSpy']["folding"] if 'Variance_estimation' in exp['metadata']: if "Noise_properties" not in exp["metadata"]: exp["metadata"]["Noise_properties"] = {} exp['metadata']['Noise_properties']["Variance_linear_model"] = \ exp['metadata']['Variance_estimation'] del exp['metadata']['Variance_estimation'] if "TEM" in exp["metadata"]: if "Acquisition_instrument" not in exp["metadata"]: exp["metadata"]["Acquisition_instrument"] = {} exp["metadata"]["Acquisition_instrument"]["TEM"] = \ exp["metadata"]["TEM"] del exp["metadata"]["TEM"] tem = exp["metadata"]["Acquisition_instrument"]["TEM"] if "EELS" in tem: if "dwell_time" in tem: tem["EELS"]["dwell_time"] = tem["dwell_time"] del tem["dwell_time"] if "dwell_time_units" in tem: tem["EELS"]["dwell_time_units"] = tem["dwell_time_units"] del tem["dwell_time_units"] if "exposure" in tem: tem["EELS"]["exposure"] = tem["exposure"] del tem["exposure"] if "exposure_units" in tem: tem["EELS"]["exposure_units"] = tem["exposure_units"] del tem["exposure_units"] if "Detector" not in tem: tem["Detector"] = {} tem["Detector"] = tem["EELS"] del tem["EELS"] if "EDS" in tem: if "Detector" not in tem: tem["Detector"] = {} if "EDS" not in tem["Detector"]: tem["Detector"]["EDS"] = {} tem["Detector"]["EDS"] = tem["EDS"] del tem["EDS"] del tem if "SEM" in exp["metadata"]: if "Acquisition_instrument" not in exp["metadata"]: exp["metadata"]["Acquisition_instrument"] = {} exp["metadata"]["Acquisition_instrument"]["SEM"] = \ exp["metadata"]["SEM"] del exp["metadata"]["SEM"] sem = exp["metadata"]["Acquisition_instrument"]["SEM"] if "EDS" in sem: if "Detector" not in sem: sem["Detector"] = {} if "EDS" not in sem["Detector"]: sem["Detector"]["EDS"] = {} sem["Detector"]["EDS"] = sem["EDS"] del sem["EDS"] del sem if "Sample" in exp["metadata"] and "Xray_lines" in exp["metadata"][ "Sample"]: exp["metadata"]["Sample"]["xray_lines"] = exp["metadata"][ "Sample"]["Xray_lines"] del exp["metadata"]["Sample"]["Xray_lines"] for key in ["title", "date", "time", "original_filename"]: if key in exp["metadata"]: if "General" not in exp["metadata"]: exp["metadata"]["General"] = {} exp["metadata"]["General"][key] = exp["metadata"][key] del exp["metadata"][key] for key in ["record_by", "signal_origin", "signal_type"]: if key in exp["metadata"]: if "Signal" not in exp["metadata"]: exp["metadata"]["Signal"] = {} exp["metadata"]["Signal"][key] = exp["metadata"][key] del exp["metadata"][key] return exp
def hdfgroup2signaldict(group, load_to_memory=True): global current_file_version global default_version if current_file_version < StrictVersion("1.2"): metadata = "mapped_parameters" original_metadata = "original_parameters" else: metadata = "metadata" original_metadata = "original_metadata" exp = {'metadata': hdfgroup2dict( group[metadata], load_to_memory=load_to_memory), 'original_metadata': hdfgroup2dict( group[original_metadata], load_to_memory=load_to_memory) } data = group['data'] if load_to_memory: data = np.asanyarray(data) exp['data'] = data axes = [] for i in range(len(exp['data'].shape)): try: axes.append(dict(group['axis-%i' % i].attrs)) axis = axes[-1] for key, item in axis.items(): if isinstance(item, np.bool_): axis[key] = bool(item) else: axis[key] = ensure_unicode(item) except KeyError: break if len(axes) != len(exp['data'].shape): # broke from the previous loop try: axes = [i for k, i in sorted(iter(hdfgroup2dict( group['_list_' + str(len(exp['data'].shape)) + '_axes'], load_to_memory=load_to_memory).items()))] except KeyError: raise IOError(not_valid_format) exp['axes'] = axes exp['attributes'] = {} if 'learning_results' in group.keys(): exp['attributes']['learning_results'] = \ hdfgroup2dict( group['learning_results'], load_to_memory=load_to_memory) if 'peak_learning_results' in group.keys(): exp['attributes']['peak_learning_results'] = \ hdfgroup2dict( group['peak_learning_results'], load_to_memory=load_to_memory) # If the title was not defined on writing the Experiment is # then called __unnamed__. The next "if" simply sets the title # back to the empty string if "General" in exp["metadata"] and "title" in exp["metadata"]["General"]: if '__unnamed__' == exp['metadata']['General']['title']: exp['metadata']["General"]['title'] = '' if current_file_version < StrictVersion("1.1"): # Load the decomposition results written with the old name, # mva_results if 'mva_results' in group.keys(): exp['attributes']['learning_results'] = hdfgroup2dict( group['mva_results'], load_to_memory=load_to_memory) if 'peak_mva_results' in group.keys(): exp['attributes']['peak_learning_results'] = hdfgroup2dict( group['peak_mva_results'], load_to_memory=load_to_memory) # Replace the old signal and name keys with their current names if 'signal' in exp['metadata']: if "Signal" not in exp["metadata"]: exp["metadata"]["Signal"] = {} exp['metadata']["Signal"]['signal_type'] = \ exp['metadata']['signal'] del exp['metadata']['signal'] if 'name' in exp['metadata']: if "General" not in exp["metadata"]: exp["metadata"]["General"] = {} exp['metadata']['General']['title'] = \ exp['metadata']['name'] del exp['metadata']['name'] if current_file_version < StrictVersion("1.2"): if '_internal_parameters' in exp['metadata']: exp['metadata']['_HyperSpy'] = \ exp['metadata']['_internal_parameters'] del exp['metadata']['_internal_parameters'] if 'stacking_history' in exp['metadata']['_HyperSpy']: exp['metadata']['_HyperSpy']["Stacking_history"] = \ exp['metadata']['_HyperSpy']['stacking_history'] del exp['metadata']['_HyperSpy']["stacking_history"] if 'folding' in exp['metadata']['_HyperSpy']: exp['metadata']['_HyperSpy']["Folding"] = \ exp['metadata']['_HyperSpy']['folding'] del exp['metadata']['_HyperSpy']["folding"] if 'Variance_estimation' in exp['metadata']: if "Noise_properties" not in exp["metadata"]: exp["metadata"]["Noise_properties"] = {} exp['metadata']['Noise_properties']["Variance_linear_model"] = \ exp['metadata']['Variance_estimation'] del exp['metadata']['Variance_estimation'] if "TEM" in exp["metadata"]: if "Acquisition_instrument" not in exp["metadata"]: exp["metadata"]["Acquisition_instrument"] = {} exp["metadata"]["Acquisition_instrument"]["TEM"] = \ exp["metadata"]["TEM"] del exp["metadata"]["TEM"] tem = exp["metadata"]["Acquisition_instrument"]["TEM"] if "EELS" in tem: if "dwell_time" in tem: tem["EELS"]["dwell_time"] = tem["dwell_time"] del tem["dwell_time"] if "dwell_time_units" in tem: tem["EELS"]["dwell_time_units"] = tem["dwell_time_units"] del tem["dwell_time_units"] if "exposure" in tem: tem["EELS"]["exposure"] = tem["exposure"] del tem["exposure"] if "exposure_units" in tem: tem["EELS"]["exposure_units"] = tem["exposure_units"] del tem["exposure_units"] if "Detector" not in tem: tem["Detector"] = {} tem["Detector"] = tem["EELS"] del tem["EELS"] if "EDS" in tem: if "Detector" not in tem: tem["Detector"] = {} if "EDS" not in tem["Detector"]: tem["Detector"]["EDS"] = {} tem["Detector"]["EDS"] = tem["EDS"] del tem["EDS"] del tem if "SEM" in exp["metadata"]: if "Acquisition_instrument" not in exp["metadata"]: exp["metadata"]["Acquisition_instrument"] = {} exp["metadata"]["Acquisition_instrument"]["SEM"] = \ exp["metadata"]["SEM"] del exp["metadata"]["SEM"] sem = exp["metadata"]["Acquisition_instrument"]["SEM"] if "EDS" in sem: if "Detector" not in sem: sem["Detector"] = {} if "EDS" not in sem["Detector"]: sem["Detector"]["EDS"] = {} sem["Detector"]["EDS"] = sem["EDS"] del sem["EDS"] del sem if "Sample" in exp["metadata"] and "Xray_lines" in exp[ "metadata"]["Sample"]: exp["metadata"]["Sample"]["xray_lines"] = exp[ "metadata"]["Sample"]["Xray_lines"] del exp["metadata"]["Sample"]["Xray_lines"] for key in ["title", "date", "time", "original_filename"]: if key in exp["metadata"]: if "General" not in exp["metadata"]: exp["metadata"]["General"] = {} exp["metadata"]["General"][key] = exp["metadata"][key] del exp["metadata"][key] for key in ["record_by", "signal_origin", "signal_type"]: if key in exp["metadata"]: if "Signal" not in exp["metadata"]: exp["metadata"]["Signal"] = {} exp["metadata"]["Signal"][key] = exp["metadata"][key] del exp["metadata"][key] return exp
def hdfgroup2signaldict(group, lazy=False): global current_file_version global default_version if current_file_version < LooseVersion("1.2"): metadata = "mapped_parameters" original_metadata = "original_parameters" else: metadata = "metadata" original_metadata = "original_metadata" exp = { 'metadata': hdfgroup2dict(group[metadata], lazy=lazy), 'original_metadata': hdfgroup2dict(group[original_metadata], lazy=lazy), 'attributes': {} } if "package" in group.attrs: # HyperSpy version is >= 1.5 exp["package"] = group.attrs["package"] exp["package_version"] = group.attrs["package_version"] else: # Prior to v1.4 we didn't store the package information. Since there # were already external package we cannot assume any package provider so # we leave this empty. exp["package"] = "" exp["package_version"] = "" data = group['data'] if lazy: data = da.from_array(data, chunks=data.chunks) exp['attributes']['_lazy'] = True else: data = np.asanyarray(data) exp['data'] = data axes = [] for i in range(len(exp['data'].shape)): try: axes.append(dict(group['axis-%i' % i].attrs)) axis = axes[-1] for key, item in axis.items(): if isinstance(item, np.bool_): axis[key] = bool(item) else: axis[key] = ensure_unicode(item) except KeyError: break if len(axes) != len(exp['data'].shape): # broke from the previous loop try: axes = [ i for k, i in sorted( iter( hdfgroup2dict(group['_list_' + str(len(exp['data'].shape)) + '_axes'], lazy=lazy).items())) ] except KeyError: raise IOError(not_valid_format) exp['axes'] = axes if 'learning_results' in group.keys(): exp['attributes']['learning_results'] = \ hdfgroup2dict( group['learning_results'], lazy=lazy) if 'peak_learning_results' in group.keys(): exp['attributes']['peak_learning_results'] = \ hdfgroup2dict( group['peak_learning_results'], lazy=lazy) # If the title was not defined on writing the Experiment is # then called __unnamed__. The next "if" simply sets the title # back to the empty string if "General" in exp["metadata"] and "title" in exp["metadata"]["General"]: if '__unnamed__' == exp['metadata']['General']['title']: exp['metadata']["General"]['title'] = '' if current_file_version < LooseVersion("1.1"): # Load the decomposition results written with the old name, # mva_results if 'mva_results' in group.keys(): exp['attributes']['learning_results'] = hdfgroup2dict( group['mva_results'], lazy=lazy) if 'peak_mva_results' in group.keys(): exp['attributes']['peak_learning_results'] = hdfgroup2dict( group['peak_mva_results'], lazy=lazy) # Replace the old signal and name keys with their current names if 'signal' in exp['metadata']: if "Signal" not in exp["metadata"]: exp["metadata"]["Signal"] = {} exp['metadata']["Signal"]['signal_type'] = \ exp['metadata']['signal'] del exp['metadata']['signal'] if 'name' in exp['metadata']: if "General" not in exp["metadata"]: exp["metadata"]["General"] = {} exp['metadata']['General']['title'] = \ exp['metadata']['name'] del exp['metadata']['name'] if current_file_version < LooseVersion("1.2"): if '_internal_parameters' in exp['metadata']: exp['metadata']['_HyperSpy'] = \ exp['metadata']['_internal_parameters'] del exp['metadata']['_internal_parameters'] if 'stacking_history' in exp['metadata']['_HyperSpy']: exp['metadata']['_HyperSpy']["Stacking_history"] = \ exp['metadata']['_HyperSpy']['stacking_history'] del exp['metadata']['_HyperSpy']["stacking_history"] if 'folding' in exp['metadata']['_HyperSpy']: exp['metadata']['_HyperSpy']["Folding"] = \ exp['metadata']['_HyperSpy']['folding'] del exp['metadata']['_HyperSpy']["folding"] if 'Variance_estimation' in exp['metadata']: if "Noise_properties" not in exp["metadata"]: exp["metadata"]["Noise_properties"] = {} exp['metadata']['Noise_properties']["Variance_linear_model"] = \ exp['metadata']['Variance_estimation'] del exp['metadata']['Variance_estimation'] if "TEM" in exp["metadata"]: if "Acquisition_instrument" not in exp["metadata"]: exp["metadata"]["Acquisition_instrument"] = {} exp["metadata"]["Acquisition_instrument"]["TEM"] = \ exp["metadata"]["TEM"] del exp["metadata"]["TEM"] tem = exp["metadata"]["Acquisition_instrument"]["TEM"] if "EELS" in tem: if "dwell_time" in tem: tem["EELS"]["dwell_time"] = tem["dwell_time"] del tem["dwell_time"] if "dwell_time_units" in tem: tem["EELS"]["dwell_time_units"] = tem["dwell_time_units"] del tem["dwell_time_units"] if "exposure" in tem: tem["EELS"]["exposure"] = tem["exposure"] del tem["exposure"] if "exposure_units" in tem: tem["EELS"]["exposure_units"] = tem["exposure_units"] del tem["exposure_units"] if "Detector" not in tem: tem["Detector"] = {} tem["Detector"] = tem["EELS"] del tem["EELS"] if "EDS" in tem: if "Detector" not in tem: tem["Detector"] = {} if "EDS" not in tem["Detector"]: tem["Detector"]["EDS"] = {} tem["Detector"]["EDS"] = tem["EDS"] del tem["EDS"] del tem if "SEM" in exp["metadata"]: if "Acquisition_instrument" not in exp["metadata"]: exp["metadata"]["Acquisition_instrument"] = {} exp["metadata"]["Acquisition_instrument"]["SEM"] = \ exp["metadata"]["SEM"] del exp["metadata"]["SEM"] sem = exp["metadata"]["Acquisition_instrument"]["SEM"] if "EDS" in sem: if "Detector" not in sem: sem["Detector"] = {} if "EDS" not in sem["Detector"]: sem["Detector"]["EDS"] = {} sem["Detector"]["EDS"] = sem["EDS"] del sem["EDS"] del sem if "Sample" in exp["metadata"] and "Xray_lines" in exp["metadata"][ "Sample"]: exp["metadata"]["Sample"]["xray_lines"] = exp["metadata"][ "Sample"]["Xray_lines"] del exp["metadata"]["Sample"]["Xray_lines"] for key in ["title", "date", "time", "original_filename"]: if key in exp["metadata"]: if "General" not in exp["metadata"]: exp["metadata"]["General"] = {} exp["metadata"]["General"][key] = exp["metadata"][key] del exp["metadata"][key] for key in ["record_by", "signal_origin", "signal_type"]: if key in exp["metadata"]: if "Signal" not in exp["metadata"]: exp["metadata"]["Signal"] = {} exp["metadata"]["Signal"][key] = exp["metadata"][key] del exp["metadata"][key] if current_file_version < LooseVersion("3.0"): if "Acquisition_instrument" in exp["metadata"]: # Move tilt_stage to Stage.tilt_alpha # Move exposure time to Detector.Camera.exposure_time if "TEM" in exp["metadata"]["Acquisition_instrument"]: tem = exp["metadata"]["Acquisition_instrument"]["TEM"] exposure = None if "tilt_stage" in tem: tem["Stage"] = {"tilt_alpha": tem["tilt_stage"]} del tem["tilt_stage"] if "exposure" in tem: exposure = "exposure" # Digital_micrograph plugin was parsing to 'exposure_time' # instead of 'exposure': need this to be compatible with # previous behaviour if "exposure_time" in tem: exposure = "exposure_time" if exposure is not None: if "Detector" not in tem: tem["Detector"] = { "Camera": { "exposure": tem[exposure] } } tem["Detector"]["Camera"] = {"exposure": tem[exposure]} del tem[exposure] # Move tilt_stage to Stage.tilt_alpha if "SEM" in exp["metadata"]["Acquisition_instrument"]: sem = exp["metadata"]["Acquisition_instrument"]["SEM"] if "tilt_stage" in sem: sem["Stage"] = {"tilt_alpha": sem["tilt_stage"]} del sem["tilt_stage"] return exp
def group2signaldict(self, group, lazy=False): """ Reads a h5py/zarr group and returns a signal dictionary. Parameters ---------- group : :py:class:`h5py.Group` or :py:class:`zarr.hierarchy.Group` A group following hspy specification. lazy : bool, optional Return the data as dask array. The default is False. Raises ------ IOError Raise an IOError when the group can't be read, if the group doesn't follow hspy format specification, etc. """ if self.version < Version("1.2"): metadata = "mapped_parameters" original_metadata = "original_parameters" else: metadata = "metadata" original_metadata = "original_metadata" exp = {'metadata': self._group2dict( group[metadata], lazy=lazy), 'original_metadata': self._group2dict( group[original_metadata], lazy=lazy), 'attributes': {} } if "package" in group.attrs: # HyperSpy version is >= 1.5 exp["package"] = group.attrs["package"] exp["package_version"] = group.attrs["package_version"] else: # Prior to v1.4 we didn't store the package information. Since there # were already external package we cannot assume any package provider so # we leave this empty. exp["package"] = "" exp["package_version"] = "" data = group['data'] try: ragged_shape = group["ragged_shapes"] new_data = np.empty(shape=data.shape, dtype=object) for i in np.ndindex(data.shape): new_data[i] = np.reshape(data[i], ragged_shape[i]) data = new_data except KeyError: pass if lazy: data = da.from_array(data, chunks=data.chunks) exp['attributes']['_lazy'] = True else: data = np.asanyarray(data) exp['data'] = data axes = [] for i in range(len(exp['data'].shape)): try: axes.append(self._group2dict(group[f'axis-{i}'])) axis = axes[-1] for key, item in axis.items(): if isinstance(item, np.bool_): axis[key] = bool(item) else: axis[key] = ensure_unicode(item) except KeyError: break if len(axes) != len(exp['data'].shape): # broke from the previous loop try: axes = [i for k, i in sorted(iter(self._group2dict( group['_list_' + str(len(exp['data'].shape)) + '_axes'], lazy=lazy).items()))] except KeyError: raise IOError(not_valid_format) exp['axes'] = axes if 'learning_results' in group.keys(): exp['attributes']['learning_results'] = \ self._group2dict( group['learning_results'], lazy=lazy) if 'peak_learning_results' in group.keys(): exp['attributes']['peak_learning_results'] = \ self._group2dict( group['peak_learning_results'], lazy=lazy) # If the title was not defined on writing the Experiment is # then called __unnamed__. The next "if" simply sets the title # back to the empty string if "General" in exp["metadata"] and "title" in exp["metadata"]["General"]: if '__unnamed__' == exp['metadata']['General']['title']: exp['metadata']["General"]['title'] = '' if self.version < Version("1.1"): # Load the decomposition results written with the old name, # mva_results if 'mva_results' in group.keys(): exp['attributes']['learning_results'] = self._group2dict( group['mva_results'], lazy=lazy) if 'peak_mva_results' in group.keys(): exp['attributes']['peak_learning_results'] = self._group2dict( group['peak_mva_results'], lazy=lazy) # Replace the old signal and name keys with their current names if 'signal' in exp['metadata']: if "Signal" not in exp["metadata"]: exp["metadata"]["Signal"] = {} exp['metadata']["Signal"]['signal_type'] = \ exp['metadata']['signal'] del exp['metadata']['signal'] if 'name' in exp['metadata']: if "General" not in exp["metadata"]: exp["metadata"]["General"] = {} exp['metadata']['General']['title'] = \ exp['metadata']['name'] del exp['metadata']['name'] if self.version < Version("1.2"): if '_internal_parameters' in exp['metadata']: exp['metadata']['_HyperSpy'] = \ exp['metadata']['_internal_parameters'] del exp['metadata']['_internal_parameters'] if 'stacking_history' in exp['metadata']['_HyperSpy']: exp['metadata']['_HyperSpy']["Stacking_history"] = \ exp['metadata']['_HyperSpy']['stacking_history'] del exp['metadata']['_HyperSpy']["stacking_history"] if 'folding' in exp['metadata']['_HyperSpy']: exp['metadata']['_HyperSpy']["Folding"] = \ exp['metadata']['_HyperSpy']['folding'] del exp['metadata']['_HyperSpy']["folding"] if 'Variance_estimation' in exp['metadata']: if "Noise_properties" not in exp["metadata"]: exp["metadata"]["Noise_properties"] = {} exp['metadata']['Noise_properties']["Variance_linear_model"] = \ exp['metadata']['Variance_estimation'] del exp['metadata']['Variance_estimation'] if "TEM" in exp["metadata"]: if "Acquisition_instrument" not in exp["metadata"]: exp["metadata"]["Acquisition_instrument"] = {} exp["metadata"]["Acquisition_instrument"]["TEM"] = \ exp["metadata"]["TEM"] del exp["metadata"]["TEM"] tem = exp["metadata"]["Acquisition_instrument"]["TEM"] if "EELS" in tem: if "dwell_time" in tem: tem["EELS"]["dwell_time"] = tem["dwell_time"] del tem["dwell_time"] if "dwell_time_units" in tem: tem["EELS"]["dwell_time_units"] = tem["dwell_time_units"] del tem["dwell_time_units"] if "exposure" in tem: tem["EELS"]["exposure"] = tem["exposure"] del tem["exposure"] if "exposure_units" in tem: tem["EELS"]["exposure_units"] = tem["exposure_units"] del tem["exposure_units"] if "Detector" not in tem: tem["Detector"] = {} tem["Detector"] = tem["EELS"] del tem["EELS"] if "EDS" in tem: if "Detector" not in tem: tem["Detector"] = {} if "EDS" not in tem["Detector"]: tem["Detector"]["EDS"] = {} tem["Detector"]["EDS"] = tem["EDS"] del tem["EDS"] del tem if "SEM" in exp["metadata"]: if "Acquisition_instrument" not in exp["metadata"]: exp["metadata"]["Acquisition_instrument"] = {} exp["metadata"]["Acquisition_instrument"]["SEM"] = \ exp["metadata"]["SEM"] del exp["metadata"]["SEM"] sem = exp["metadata"]["Acquisition_instrument"]["SEM"] if "EDS" in sem: if "Detector" not in sem: sem["Detector"] = {} if "EDS" not in sem["Detector"]: sem["Detector"]["EDS"] = {} sem["Detector"]["EDS"] = sem["EDS"] del sem["EDS"] del sem if "Sample" in exp["metadata"] and "Xray_lines" in exp[ "metadata"]["Sample"]: exp["metadata"]["Sample"]["xray_lines"] = exp[ "metadata"]["Sample"]["Xray_lines"] del exp["metadata"]["Sample"]["Xray_lines"] for key in ["title", "date", "time", "original_filename"]: if key in exp["metadata"]: if "General" not in exp["metadata"]: exp["metadata"]["General"] = {} exp["metadata"]["General"][key] = exp["metadata"][key] del exp["metadata"][key] for key in ["record_by", "signal_origin", "signal_type"]: if key in exp["metadata"]: if "Signal" not in exp["metadata"]: exp["metadata"]["Signal"] = {} exp["metadata"]["Signal"][key] = exp["metadata"][key] del exp["metadata"][key] if self.version < Version("3.0"): if "Acquisition_instrument" in exp["metadata"]: # Move tilt_stage to Stage.tilt_alpha # Move exposure time to Detector.Camera.exposure_time if "TEM" in exp["metadata"]["Acquisition_instrument"]: tem = exp["metadata"]["Acquisition_instrument"]["TEM"] exposure = None if "tilt_stage" in tem: tem["Stage"] = {"tilt_alpha": tem["tilt_stage"]} del tem["tilt_stage"] if "exposure" in tem: exposure = "exposure" # Digital_micrograph plugin was parsing to 'exposure_time' # instead of 'exposure': need this to be compatible with # previous behaviour if "exposure_time" in tem: exposure = "exposure_time" if exposure is not None: if "Detector" not in tem: tem["Detector"] = {"Camera": { "exposure": tem[exposure]}} tem["Detector"]["Camera"] = {"exposure": tem[exposure]} del tem[exposure] # Move tilt_stage to Stage.tilt_alpha if "SEM" in exp["metadata"]["Acquisition_instrument"]: sem = exp["metadata"]["Acquisition_instrument"]["SEM"] if "tilt_stage" in sem: sem["Stage"] = {"tilt_alpha": sem["tilt_stage"]} del sem["tilt_stage"] return exp
def hdfgroup2signaldict(group): global current_file_version global default_version if current_file_version < StrictVersion("1.2"): metadata = "mapped_parameters" original_metadata = "original_parameters" else: metadata = "metadata" original_metadata = "original_metadata" exp = {} exp['data'] = group['data'][:] axes = [] for i in xrange(len(exp['data'].shape)): try: axes.append(dict(group['axis-%i' % i].attrs)) except KeyError: raise IOError(not_valid_format) for axis in axes: for key, item in axis.iteritems(): axis[key] = ensure_unicode(item) exp['metadata'] = hdfgroup2dict( group[metadata], {}) exp['original_metadata'] = hdfgroup2dict( group[original_metadata], {}) exp['axes'] = axes exp['attributes'] = {} if 'learning_results' in group.keys(): exp['attributes']['learning_results'] = \ hdfgroup2dict(group['learning_results'], {}) if 'peak_learning_results' in group.keys(): exp['attributes']['peak_learning_results'] = \ hdfgroup2dict(group['peak_learning_results'], {}) # If the title was not defined on writing the Experiment is # then called __unnamed__. The next "if" simply sets the title # back to the empty string if "General" in exp["metadata"] and "title" in exp["metadata"]["General"]: if '__unnamed__' == exp['metadata']['General']['title']: exp['metadata']["General"]['title'] = '' if current_file_version < StrictVersion("1.1"): # Load the decomposition results written with the old name, # mva_results if 'mva_results' in group.keys(): exp['attributes']['learning_results'] = hdfgroup2dict( group['mva_results'], {}) if 'peak_mva_results' in group.keys(): exp['attributes']['peak_learning_results'] = hdfgroup2dict( group['peak_mva_results'], {}) # Replace the old signal and name keys with their current names if 'signal' in exp['metadata']: if not "Signal" in exp["metadata"]: exp["metadata"]["Signal"] = {} exp['metadata']["Signal"]['signal_type'] = \ exp['metadata']['signal'] del exp['metadata']['signal'] if 'name' in exp['metadata']: if "General" not in exp["metadata"]: exp["metadata"]["General"] = {} exp['metadata']['General']['title'] = \ exp['metadata']['name'] del exp['metadata']['name'] if current_file_version < StrictVersion("1.2"): if '_internal_parameters' in exp['metadata']: exp['metadata']['_HyperSpy'] = \ exp['metadata']['_internal_parameters'] del exp['metadata']['_internal_parameters'] if 'stacking_history' in exp['metadata']['_HyperSpy']: exp['metadata']['_HyperSpy']["Stacking_history"] = \ exp['metadata']['_HyperSpy']['stacking_history'] del exp['metadata']['_HyperSpy']["stacking_history"] if 'folding' in exp['metadata']['_HyperSpy']: exp['metadata']['_HyperSpy']["Folding"] = \ exp['metadata']['_HyperSpy']['folding'] del exp['metadata']['_HyperSpy']["folding"] if 'Variance_estimation' in exp['metadata']: if "Noise_properties" not in exp["metadata"]: exp["metadata"]["Noise_properties"] = {} exp['metadata']['Noise_properties']["Variance_linear_model"] = \ exp['metadata']['Variance_estimation'] del exp['metadata']['Variance_estimation'] if "TEM" in exp["metadata"]: if not "Acquisition_instrument" in exp["metadata"]: exp["metadata"]["Acquisition_instrument"] = {} exp["metadata"]["Acquisition_instrument"][ "TEM"] = exp["metadata"]["TEM"] del exp["metadata"]["TEM"] if "EELS" in exp["metadata"]["Acquisition_instrument"]["TEM"]: if "dwell_time" in exp["metadata"]["Acquisition_instrument"]["TEM"]: exp["metadata"]["Acquisition_instrument"]["TEM"]["EELS"]["dwell_time"] =\ exp["metadata"]["Acquisition_instrument"][ "TEM"]["dwell_time"] del exp["metadata"]["Acquisition_instrument"][ "TEM"]["dwell_time"] if "dwell_time_units" in exp["metadata"]["Acquisition_instrument"]["TEM"]: exp["metadata"]["Acquisition_instrument"]["TEM"]["EELS"]["dwell_time_units"] =\ exp["metadata"]["Acquisition_instrument"][ "TEM"]["dwell_time_units"] del exp["metadata"]["Acquisition_instrument"][ "TEM"]["dwell_time_units"] if "exposure" in exp["metadata"]["Acquisition_instrument"]["TEM"]: exp["metadata"]["Acquisition_instrument"]["TEM"]["EELS"]["exposure"] =\ exp["metadata"]["Acquisition_instrument"][ "TEM"]["exposure"] del exp["metadata"]["Acquisition_instrument"][ "TEM"]["exposure"] if "exposure_units" in exp["metadata"]["Acquisition_instrument"]["TEM"]: exp["metadata"]["Acquisition_instrument"]["TEM"]["EELS"]["exposure_units"] =\ exp["metadata"]["Acquisition_instrument"][ "TEM"]["exposure_units"] del exp["metadata"]["Acquisition_instrument"][ "TEM"]["exposure_units"] if "Detector" not in exp["metadata"]["Acquisition_instrument"]["TEM"]: exp["metadata"]["Acquisition_instrument"][ "TEM"]["Detector"] = {} exp["metadata"]["Acquisition_instrument"]["TEM"]["Detector"] = \ exp["metadata"]["Acquisition_instrument"]["TEM"]["EELS"] del exp["metadata"]["Acquisition_instrument"]["TEM"]["EELS"] if "EDS" in exp["metadata"]["Acquisition_instrument"]["TEM"]: if "Detector" not in exp["metadata"]["Acquisition_instrument"]["TEM"]: exp["metadata"]["Acquisition_instrument"][ "TEM"]["Detector"] = {} if "EDS" not in exp["metadata"]["Acquisition_instrument"]["TEM"]["Detector"]: exp["metadata"]["Acquisition_instrument"][ "TEM"]["Detector"]["EDS"] = {} exp["metadata"]["Acquisition_instrument"]["TEM"]["Detector"]["EDS"] = \ exp["metadata"]["Acquisition_instrument"]["TEM"]["EDS"] del exp["metadata"]["Acquisition_instrument"]["TEM"]["EDS"] if "SEM" in exp["metadata"]: if not "Acquisition_instrument" in exp["metadata"]: exp["metadata"]["Acquisition_instrument"] = {} exp["metadata"]["Acquisition_instrument"][ "SEM"] = exp["metadata"]["SEM"] del exp["metadata"]["SEM"] if "EDS" in exp["metadata"]["Acquisition_instrument"]["SEM"]: if "Detector" not in exp["metadata"]["Acquisition_instrument"]["SEM"]: exp["metadata"]["Acquisition_instrument"][ "SEM"]["Detector"] = {} if "EDS" not in exp["metadata"]["Acquisition_instrument"]["SEM"]["Detector"]: exp["metadata"]["Acquisition_instrument"][ "SEM"]["Detector"]["EDS"] = {} exp["metadata"]["Acquisition_instrument"]["SEM"]["Detector"]["EDS"] = \ exp["metadata"]["Acquisition_instrument"]["SEM"]["EDS"] del exp["metadata"]["Acquisition_instrument"]["SEM"]["EDS"] if "Sample" in exp["metadata"] and "Xray_lines" in exp["metadata"]["Sample"]: exp["metadata"]["Sample"]["xray_lines"] = exp[ "metadata"]["Sample"]["Xray_lines"] del exp["metadata"]["Sample"]["Xray_lines"] for key in ["title", "date", "time", "original_filename"]: if key in exp["metadata"]: if "General" not in exp["metadata"]: exp["metadata"]["General"] = {} exp["metadata"]["General"][key] = exp["metadata"][key] del exp["metadata"][key] for key in ["record_by", "signal_origin", "signal_type"]: if key in exp["metadata"]: if "Signal" not in exp["metadata"]: exp["metadata"]["Signal"] = {} exp["metadata"]["Signal"][key] = exp["metadata"][key] del exp["metadata"][key] return exp
def hdfgroup2signaldict(group): global current_file_version global default_version if current_file_version < StrictVersion("1.2"): metadata = "mapped_parameters" original_metadata = "original_parameters" else: metadata = "metadata" original_metadata = "original_metadata" exp = {} exp['data'] = group['data'][:] axes = [] for i in xrange(len(exp['data'].shape)): try: axes.append(dict(group['axis-%i' % i].attrs)) except KeyError: raise IOError(not_valid_format) for axis in axes: for key, item in axis.iteritems(): axis[key] = ensure_unicode(item) exp['metadata'] = hdfgroup2dict(group[metadata], {}) exp['original_metadata'] = hdfgroup2dict(group[original_metadata], {}) exp['axes'] = axes exp['attributes'] = {} if 'learning_results' in group.keys(): exp['attributes']['learning_results'] = \ hdfgroup2dict(group['learning_results'], {}) if 'peak_learning_results' in group.keys(): exp['attributes']['peak_learning_results'] = \ hdfgroup2dict(group['peak_learning_results'], {}) # If the title was not defined on writing the Experiment is # then called __unnamed__. The next "if" simply sets the title # back to the empty string if "General" in exp["metadata"] and "title" in exp["metadata"]["General"]: if '__unnamed__' == exp['metadata']['General']['title']: exp['metadata']["General"]['title'] = '' if current_file_version < StrictVersion("1.1"): # Load the decomposition results written with the old name, # mva_results if 'mva_results' in group.keys(): exp['attributes']['learning_results'] = hdfgroup2dict( group['mva_results'], {}) if 'peak_mva_results' in group.keys(): exp['attributes']['peak_learning_results'] = hdfgroup2dict( group['peak_mva_results'], {}) # Replace the old signal and name keys with their current names if 'signal' in exp['metadata']: if not "Signal" in exp["metadata"]: exp["metadata"]["Signal"] = {} exp['metadata']["Signal"]['signal_type'] = \ exp['metadata']['signal'] del exp['metadata']['signal'] if 'name' in exp['metadata']: if "General" not in exp["metadata"]: exp["metadata"]["General"] = {} exp['metadata']['General']['title'] = \ exp['metadata']['name'] del exp['metadata']['name'] if current_file_version < StrictVersion("1.2"): if '_internal_parameters' in exp['metadata']: exp['metadata']['_HyperSpy'] = \ exp['metadata']['_internal_parameters'] del exp['metadata']['_internal_parameters'] if 'stacking_history' in exp['metadata']['_HyperSpy']: exp['metadata']['_HyperSpy']["Stacking_history"] = \ exp['metadata']['_HyperSpy']['stacking_history'] del exp['metadata']['_HyperSpy']["stacking_history"] if 'folding' in exp['metadata']['_HyperSpy']: exp['metadata']['_HyperSpy']["Folding"] = \ exp['metadata']['_HyperSpy']['folding'] del exp['metadata']['_HyperSpy']["folding"] if 'Variance_estimation' in exp['metadata']: if "Noise_properties" not in exp["metadata"]: exp["metadata"]["Noise_properties"] = {} exp['metadata']['Noise_properties']["Variance_linear_model"] = \ exp['metadata']['Variance_estimation'] del exp['metadata']['Variance_estimation'] if "TEM" in exp["metadata"]: if not "Acquisition_instrument" in exp["metadata"]: exp["metadata"]["Acquisition_instrument"] = {} exp["metadata"]["Acquisition_instrument"]["TEM"] = exp["metadata"][ "TEM"] del exp["metadata"]["TEM"] if "EELS" in exp["metadata"]["Acquisition_instrument"]["TEM"]: if "dwell_time" in exp["metadata"]["Acquisition_instrument"][ "TEM"]: exp["metadata"]["Acquisition_instrument"]["TEM"]["EELS"]["dwell_time"] =\ exp["metadata"]["Acquisition_instrument"][ "TEM"]["dwell_time"] del exp["metadata"]["Acquisition_instrument"]["TEM"][ "dwell_time"] if "dwell_time_units" in exp["metadata"][ "Acquisition_instrument"]["TEM"]: exp["metadata"]["Acquisition_instrument"]["TEM"]["EELS"]["dwell_time_units"] =\ exp["metadata"]["Acquisition_instrument"][ "TEM"]["dwell_time_units"] del exp["metadata"]["Acquisition_instrument"]["TEM"][ "dwell_time_units"] if "exposure" in exp["metadata"]["Acquisition_instrument"][ "TEM"]: exp["metadata"]["Acquisition_instrument"]["TEM"]["EELS"]["exposure"] =\ exp["metadata"]["Acquisition_instrument"][ "TEM"]["exposure"] del exp["metadata"]["Acquisition_instrument"]["TEM"][ "exposure"] if "exposure_units" in exp["metadata"][ "Acquisition_instrument"]["TEM"]: exp["metadata"]["Acquisition_instrument"]["TEM"]["EELS"]["exposure_units"] =\ exp["metadata"]["Acquisition_instrument"][ "TEM"]["exposure_units"] del exp["metadata"]["Acquisition_instrument"]["TEM"][ "exposure_units"] if "Detector" not in exp["metadata"]["Acquisition_instrument"][ "TEM"]: exp["metadata"]["Acquisition_instrument"]["TEM"][ "Detector"] = {} exp["metadata"]["Acquisition_instrument"]["TEM"]["Detector"] = \ exp["metadata"]["Acquisition_instrument"]["TEM"]["EELS"] del exp["metadata"]["Acquisition_instrument"]["TEM"]["EELS"] if "EDS" in exp["metadata"]["Acquisition_instrument"]["TEM"]: if "Detector" not in exp["metadata"]["Acquisition_instrument"][ "TEM"]: exp["metadata"]["Acquisition_instrument"]["TEM"][ "Detector"] = {} if "EDS" not in exp["metadata"]["Acquisition_instrument"][ "TEM"]["Detector"]: exp["metadata"]["Acquisition_instrument"]["TEM"][ "Detector"]["EDS"] = {} exp["metadata"]["Acquisition_instrument"]["TEM"]["Detector"]["EDS"] = \ exp["metadata"]["Acquisition_instrument"]["TEM"]["EDS"] del exp["metadata"]["Acquisition_instrument"]["TEM"]["EDS"] if "SEM" in exp["metadata"]: if not "Acquisition_instrument" in exp["metadata"]: exp["metadata"]["Acquisition_instrument"] = {} exp["metadata"]["Acquisition_instrument"]["SEM"] = exp["metadata"][ "SEM"] del exp["metadata"]["SEM"] if "EDS" in exp["metadata"]["Acquisition_instrument"]["SEM"]: if "Detector" not in exp["metadata"]["Acquisition_instrument"][ "SEM"]: exp["metadata"]["Acquisition_instrument"]["SEM"][ "Detector"] = {} if "EDS" not in exp["metadata"]["Acquisition_instrument"][ "SEM"]["Detector"]: exp["metadata"]["Acquisition_instrument"]["SEM"][ "Detector"]["EDS"] = {} exp["metadata"]["Acquisition_instrument"]["SEM"]["Detector"]["EDS"] = \ exp["metadata"]["Acquisition_instrument"]["SEM"]["EDS"] del exp["metadata"]["Acquisition_instrument"]["SEM"]["EDS"] if "Sample" in exp["metadata"] and "Xray_lines" in exp["metadata"][ "Sample"]: exp["metadata"]["Sample"]["xray_lines"] = exp["metadata"][ "Sample"]["Xray_lines"] del exp["metadata"]["Sample"]["Xray_lines"] for key in ["title", "date", "time", "original_filename"]: if key in exp["metadata"]: if "General" not in exp["metadata"]: exp["metadata"]["General"] = {} exp["metadata"]["General"][key] = exp["metadata"][key] del exp["metadata"][key] for key in ["record_by", "signal_origin", "signal_type"]: if key in exp["metadata"]: if "Signal" not in exp["metadata"]: exp["metadata"]["Signal"] = {} exp["metadata"]["Signal"][key] = exp["metadata"][key] del exp["metadata"][key] return exp
def storeTag( tagName, tagValue ): global storedTags, tagHash storedTags.append( str(tagName) + " = " + str(tagValue) ) tagHash[ ensure_unicode(tagName, 'utf8', 'latin-1')] = ensure_unicode(tagValue, 'utf16')