def setup_method(self, method): self.axes_list = [{ 'name': 'x', 'navigate': True, 'is_binned': False, 'offset': 0.0, 'scale': 1.5E-9, 'size': 1024, 'units': 'm' }, { 'name': 'y', 'navigate': True, 'is_binned': False, 'offset': 0.0, 'scale': 0.5E-9, 'size': 1024, 'units': 'm' }, { 'name': 'energy', 'navigate': False, 'is_binned': False, 'offset': 0.0, 'scale': 5.0, 'size': 4096, 'units': 'eV' }] self.am = AxesManager(self.axes_list) self.axes_list2 = [{ 'name': 'x', 'navigate': True, 'is_binned': False, 'offset': 0.0, 'scale': 1.5E-9, 'size': 1024, 'units': 'm' }, { 'name': 'energy', 'navigate': False, 'is_binned': False, 'offset': 0.0, 'scale': 2.5, 'size': 4096, 'units': 'eV' }, { 'name': 'energy2', 'navigate': False, 'is_binned': False, 'offset': 0.0, 'scale': 5.0, 'size': 4096, 'units': 'eV' }] self.am2 = AxesManager(self.axes_list2)
def setup_method(self, method): axes_list = [{ 'name': 'a', 'navigate': True, 'offset': 0.0, 'scale': 1.3, 'size': 2, 'units': 'aa' }, { 'name': 'b', 'navigate': False, 'offset': 1.0, 'scale': 6.0, 'size': 3, 'units': 'bb' }, { 'name': 'c', 'navigate': False, 'offset': 2.0, 'scale': 100.0, 'size': 4, 'units': 'cc' }, { 'name': 'd', 'navigate': True, 'offset': 3.0, 'scale': 1000000.0, 'size': 5, 'units': 'dd' }] self.am = AxesManager(axes_list)
def append(self, *args): if len(args) < 1: pass else: smp = self.mapped_parameters print args for arg in args: #object parameters mp = arg.mapped_parameters if mp.original_filename not in smp.original_files.keys(): smp.original_files[mp.original_filename] = arg # add the data to the aggregate array if self.data == None: self.data = np.atleast_3d(arg.data) else: self.data = np.append(self.data, np.atleast_3d(arg.data), axis=2) print "File %s added to aggregate." % mp.original_filename else: print "Data from file %s already in this aggregate. \n \ Delete it first if you want to update it." % mp.original_filename # refresh the axes for the new sized data self.axes_manager = AxesManager(self._get_undefined_axes_list()) smp.original_filename = "Aggregate Image: %s" % smp.original_files.keys( ) self.summary()
def _add_object(self, arg): #object parameters mp = arg.mapped_parameters smp = self.mapped_parameters if mp.original_filename not in smp.original_files.keys(): smp.original_files[mp.original_filename] = arg # save the original data shape to the mva_results for later use smp.original_files[ mp. original_filename].mva_results.original_shape = arg.data.shape[: -1] arg.unfold() smp.aggregate_address[mp.original_filename] = ( smp.aggregate_end_pointer, smp.aggregate_end_pointer + arg.data.shape[0] - 1) # add the data to the aggregate array if self.data == None: self.data = np.atleast_2d(arg.data) # copy the axes for the sake of calibration axes = [ arg.axes_manager.axes[i].get_axis_dictionary() for i in xrange(len(arg.axes_manager.axes)) ] self.axes_manager = AxesManager(axes) else: self.data = np.append(self.data, arg.data, axis=0) smp.aggregate_end_pointer = self.data.shape[0] print "File %s added to aggregate." % mp.original_filename else: print "Data from file %s already in this aggregate. \n \ Delete it first if you want to update it." % mp.original_filename
def hdfgroup2dict(group, dictionary={}): for key, value in group.attrs.iteritems(): if isinstance(value, (np.string_, str)): if value == '_None_': value = None elif isinstance(value, np.bool_): value = bool(value) elif isinstance(value, np.ndarray) and \ value.dtype == np.dtype('|S1'): value = value.tolist() # skip signals - these are handled below. if key.startswith('_sig_'): pass elif key.startswith('_datetime_'): dictionary[key.replace("_datetime_", "")] = eval(value) else: dictionary[key] = value if not isinstance(group, h5py.Dataset): for key in group.keys(): if key.startswith('_sig_'): from hyperspy.io import dict2signal dictionary[key[len('_sig_'):]] = (dict2signal( hdfgroup2signaldict(group[key]))) elif isinstance(group[key], h5py.Dataset): dictionary[key] = np.array(group[key]) elif key.startswith('_hspy_AxesManager_'): dictionary[key[len('_hspy_AxesManager_'):]] = \ AxesManager([i for k, i in sorted(iter( hdfgroup2dict(group[key]).iteritems()))]) else: dictionary[key] = {} hdfgroup2dict(group[key], dictionary[key]) return dictionary
def do_ffts(): j = 0 for s in signals: ffts = s.deepcopy() if ffts.data.itemsize <= 4: ffts.change_dtype(np.complex64) else: ffts.change_dtype(np.complex128) am = AxesManager(s.axes_manager._get_axes_dicts()) for idx in am: fftdata = s.data[am._getitem_tuple] fftdata = scipy.fftpack.fftn(fftdata) fftdata = scipy.fftpack.fftshift(fftdata) ffts.data[am._getitem_tuple] = fftdata j += 1 yield j for i in range(ffts.axes_manager.signal_dimension): axis = ffts.axes_manager.signal_axes[i] s_axis = s.axes_manager.signal_axes[i] axis.scale = 1 / (s_axis.size * s_axis.scale) shift = (axis.high_value - axis.low_value) / 2 axis.offset -= shift u = s_axis.units if u.endswith('-1'): u = u[:-2] else: u += '-1' axis.units = u indstr = ' ' + str(s.axes_manager.indices) \ if len(s.axes_manager.indices) > 0 else '' ffts.metadata.General.title = 'FFT of ' + \ ffts.metadata.General.title + indstr fftsignals.append(ffts)
def append(self, *args): if len(args) < 1: pass else: smp = self.mapped_parameters for arg in args: #object parameters mp = arg.mapped_parameters pmp = mp.parent.mapped_parameters if pmp.original_filename not in smp.locations.keys(): smp.locations[pmp.original_filename] = mp.locations smp.original_files[pmp.original_filename] = mp.parent smp.image_stacks[pmp.original_filename] = arg smp.aggregate_address[pmp.original_filename] = ( smp.aggregate_end_pointer, smp.aggregate_end_pointer + arg.data.shape[-1] - 1) # add the data to the aggregate array if self.data == None: self.data = np.atleast_3d(arg.data) else: self.data = np.append(self.data, arg.data, axis=2) print "File %s added to aggregate." % mp.original_filename smp.aggregate_end_pointer = self.data.shape[2] else: print "Data from file %s already in this aggregate. \n \ Delete it first if you want to update it." % mp.original_filename # refresh the axes for the new sized data self.axes_manager = AxesManager(self._get_undefined_axes_list()) smp.original_filename = "Aggregate Cells: %s" % smp.locations.keys( ) self.summary()
def setup_method(self, method): self.c = Component(["parameter"]) self.c._axes_manager = AxesManager([{ "size": 3, "navigate": True }, { "size": 2, "navigate": True }])
def test_convert_to_navigation_units_Undefined(self): self.axes_list[0]['units'] = t.Undefined am = AxesManager(self.axes_list) am.convert_units(axes='navigation', same_units=True) assert am['x'].units == t.Undefined nt.assert_almost_equal(am['x'].scale, 1.5E-9) assert am['y'].units == 'm' nt.assert_almost_equal(am['y'].scale, 0.5E-9) assert am['energy'].units == 'eV' nt.assert_almost_equal(am['energy'].scale, 5)
def remove(self, *keys): smp = self.mapped_parameters for key in keys: idx = smp.original_files.keys().index(key) self.data = np.delete(self.data, np.s_[idx:idx + 1:1], 2) del smp.original_files[key] print "File %s removed from aggregate." % key self.axes_manager = AxesManager(self._get_undefined_axes_list()) smp.original_filename = "Aggregate Image: %s" % smp.original_files.keys( ) self.summary()
def remove(self, *keys): smp = self.mapped_parameters for key in keys: del smp.locations[key] del smp.original_files[key] del smp.image_stacks[key] address = smp.aggregate_address[key] self.data = np.delete(self.data, np.s_[address[0]:address[1]:1], 2) print "File %s removed from aggregate." % key self.axes_manager = AxesManager(self._get_undefined_axes_list()) smp.aggregate_end_pointer = self.data.shape[2] smp.original_filename = "Aggregate Cells: %s" % smp.locations.keys() self.summary()
def __init__(self, *args, **kw): # this axes_manager isn't really ideal for Aggregates. self.axes_manager = AxesManager([{ 'name': 'undefined', 'scale': 1., 'offset': 0., 'size': 1, 'units': 'undefined', 'index_in_array': 0, }]) super(Aggregate, self).__init__(*args, **kw) self.data = None self.mapped_parameters.original_files = OrderedDict()
def __init__(self, signal): if signal.axes_manager.signal_dimension != 1: raise SignalDimensionError( signal.axes_manager.signal_dimension, 1) self.signal = signal self.signal.plot() axis_dict = signal.axes_manager.signal_axes[0].get_axis_dictionary() am = AxesManager([axis_dict, ]) am._axes[0].navigate = True # Set the position of the line in the middle of the spectral # range by default am._axes[0].index = int(round(am._axes[0].size / 2)) self.axes_manager = am self.axes_manager.connect(self.update_position) self.on_trait_change(self.switch_on_off, 'on')
def test_convert_to_navigation_units_different(self): # Don't convert the units since the units of the navigation axes are # different self.axes_list.insert( 0, { 'name': 'time', 'navigate': True, 'offset': 0.0, 'scale': 1.5, 'size': 20, 'units': 's' }) am = AxesManager(self.axes_list) am.convert_units(axes='navigation', same_units=True) assert am['time'].units == 's' nt.assert_almost_equal(am['time'].scale, 1.5) assert am['x'].units == 'nm' nt.assert_almost_equal(am['x'].scale, 1.5) assert am['y'].units == 'nm' nt.assert_almost_equal(am['y'].scale, 0.5) assert am['energy'].units == 'eV' nt.assert_almost_equal(am['energy'].scale, 5)
def do_iffts(): j = 0 for s in signals: ffts = s.deepcopy() if ffts.data.itemsize <= 4: ffts.change_dtype(np.float32) else: ffts.change_dtype(np.float64) am = AxesManager(s.axes_manager._get_axes_dicts()) for i in range(ffts.axes_manager.signal_dimension): axis = ffts.axes_manager.signal_axes[i] s_axis = s.axes_manager.signal_axes[i] shift = (axis.high_value - axis.low_value) / 2 axis.offset += shift axis.scale = 1 / (s_axis.size * s_axis.scale) u = s_axis.units if u is traits.Undefined: pass # Leave unit as undefined elif u.endswith('-1'): u = u[:-2] else: u += '-1' axis.units = u for idx in am: fftdata = s.data[am._getitem_tuple] fftdata = scipy.fftpack.ifftshift(fftdata) fftdata = scipy.fftpack.ifftn(fftdata) fftdata = np.abs(fftdata) ffts.data[am._getitem_tuple] = fftdata j += 1 yield j indstr = ' ' + str(s.axes_manager.indices) \ if len(s.axes_manager.indices) > 0 else '' ffts.metadata.General.title = 'Inverse FFT of ' + \ ffts.metadata.General.title + indstr fftsignals.append(ffts)
def hdfgroup2dict(group, dictionary={}): for key, value in group.attrs.iteritems(): if type(value) is np.string_: if value == '_None_': value = None else: try: value = value.decode('utf8') except UnicodeError: # For old files value = value.decode('latin-1') elif type(value) is np.bool_: value = bool(value) elif type(value) is np.ndarray and \ value.dtype == np.dtype('|S1'): value = value.tolist() # skip signals - these are handled below. if key.startswith('_sig_'): pass else: dictionary[key] = value if not isinstance(group, h5py.Dataset): for key in group.keys(): if key.startswith('_sig_'): from hyperspy.io import dict2signal dictionary[key[len('_sig_'):]] = (dict2signal( hdfgroup2signaldict(group[key]))) elif isinstance(group[key], h5py.Dataset): dictionary[key] = np.array(group[key]) elif key.startswith('_hspy_AxesManager_'): dictionary[key[len('_hspy_AxesManager_'):]] = \ AxesManager([i for k, i in sorted(iter( hdfgroup2dict(group[key]).iteritems()))]) else: dictionary[key] = {} hdfgroup2dict(group[key], dictionary[key]) return dictionary
def setup_method(self, method): axes_list = [ { "name": "a", "navigate": True, "offset": 0.0, "scale": 1.3, "size": 2, "units": "aa", }, { "name": "b", "navigate": False, "offset": 1.0, "scale": 6.0, "size": 3, "units": "bb", }, { "name": "c", "navigate": False, "offset": 2.0, "scale": 100.0, "size": 4, "units": "cc", }, { "name": "d", "navigate": True, "offset": 3.0, "scale": 1000000.0, "size": 5, "units": "dd", }, ] self.am = AxesManager(axes_list)
def load_dictionary(self, file_data_dict): """Parameters: ----------- file_data_dict : dictionary A dictionary containing at least a 'data' keyword with an array of arbitrary dimensions. Additionally the dictionary can contain the following keys: axes: a dictionary that defines the axes (see the AxesManager class) attributes: a dictionary which keywords are stored as attributes of the signal class mapped_parameters: a dictionary containing a set of parameters that will be stored as attributes of a Parameters class. For some subclasses some particular parameters might be mandatory. original_parameters: a dictionary that will be accesible in the original_parameters attribute of the signal class and that typically contains all the parameters that has been imported from the original data file. """ self.data = file_data_dict['data'] if 'axes' not in file_data_dict: file_data_dict['axes'] = self._get_undefined_axes_list() self.axes_manager = AxesManager(file_data_dict['axes']) if not 'mapped_parameters' in file_data_dict: file_data_dict['mapped_parameters'] = {} if not 'original_parameters' in file_data_dict: file_data_dict['original_parameters'] = {} if 'attributes' in file_data_dict: for key, value in file_data_dict['attributes'].iteritems(): self.__setattr__(key, value) self.original_parameters.load_dictionary( file_data_dict['original_parameters']) self.mapped_parameters.load_dictionary( file_data_dict['mapped_parameters'])
def hdfgroup2dict(group, dictionary=None, load_to_memory=True): if dictionary is None: dictionary = {} for key, value in group.attrs.items(): if isinstance(value, bytes): value = value.decode() if isinstance(value, (np.string_, str)): if value == '_None_': value = None elif isinstance(value, np.bool_): value = bool(value) elif isinstance(value, np.ndarray) and value.dtype.char == "S": # Convert strings to unicode value = value.astype("U") if value.dtype.str.endswith("U1"): value = value.tolist() # skip signals - these are handled below. if key.startswith('_sig_'): pass elif key.startswith('_list_empty_'): dictionary[key[len('_list_empty_'):]] = [] elif key.startswith('_tuple_empty_'): dictionary[key[len('_tuple_empty_'):]] = () elif key.startswith('_bs_'): dictionary[key[len('_bs_'):]] = value.tostring() # The following two elif stataments enable reading date and time from # v < 2 of HyperSpy's metadata specifications elif key.startswith('_datetime_date'): date_iso = datetime.date( *ast.literal_eval(value[value.index("("):])).isoformat() dictionary[key.replace("_datetime_", "")] = date_iso elif key.startswith('_datetime_time'): date_iso = datetime.time( *ast.literal_eval(value[value.index("("):])).isoformat() dictionary[key.replace("_datetime_", "")] = date_iso else: dictionary[key] = value if not isinstance(group, h5py.Dataset): for key in group.keys(): if key.startswith('_sig_'): from hyperspy.io import dict2signal dictionary[key[len('_sig_'):]] = (dict2signal( hdfgroup2signaldict(group[key], load_to_memory=load_to_memory))) elif isinstance(group[key], h5py.Dataset): ans = np.array(group[key]) if ans.dtype.char == "S": try: ans = ans.astype("U") except UnicodeDecodeError: # There are some strings that must stay in binary, # for example dill pickles. This will obviously also # let "wrong" binary string fail somewhere else... pass kn = key if key.startswith("_list_"): ans = ans.tolist() kn = key[6:] elif key.startswith("_tuple_"): ans = tuple(ans.tolist()) kn = key[7:] elif load_to_memory: kn = key else: # leave as h5py dataset ans = group[key] kn = key dictionary[kn] = ans elif key.startswith('_hspy_AxesManager_'): dictionary[key[len('_hspy_AxesManager_'):]] = AxesManager([ i for k, i in sorted( iter( hdfgroup2dict( group[key], load_to_memory=load_to_memory).items())) ]) elif key.startswith('_list_'): dictionary[key[7 + key[6:].find('_'):]] = \ [i for k, i in sorted(iter( hdfgroup2dict( group[key], load_to_memory=load_to_memory).items() ))] elif key.startswith('_tuple_'): dictionary[key[8 + key[7:].find('_'):]] = tuple([ i for k, i in sorted( iter( hdfgroup2dict( group[key], load_to_memory=load_to_memory).items())) ]) else: dictionary[key] = {} hdfgroup2dict(group[key], dictionary[key], load_to_memory=load_to_memory) return dictionary
def _group2dict(self, group, dictionary=None, lazy=False): if dictionary is None: dictionary = {} for key, value in group.attrs.items(): if isinstance(value, bytes): value = value.decode() if isinstance(value, (np.string_, str)): if value == '_None_': value = None elif isinstance(value, np.bool_): value = bool(value) elif isinstance(value, np.ndarray) and value.dtype.char == "S": # Convert strings to unicode value = value.astype("U") if value.dtype.str.endswith("U1"): value = value.tolist() # skip signals - these are handled below. if key.startswith('_sig_'): pass elif key.startswith('_list_empty_'): dictionary[key[len('_list_empty_'):]] = [] elif key.startswith('_tuple_empty_'): dictionary[key[len('_tuple_empty_'):]] = () elif key.startswith('_bs_'): dictionary[key[len('_bs_'):]] = value.tobytes() # The following two elif stataments enable reading date and time from # v < 2 of HyperSpy's metadata specifications elif key.startswith('_datetime_date'): date_iso = datetime.date( *ast.literal_eval(value[value.index("("):])).isoformat() dictionary[key.replace("_datetime_", "")] = date_iso elif key.startswith('_datetime_time'): date_iso = datetime.time( *ast.literal_eval(value[value.index("("):])).isoformat() dictionary[key.replace("_datetime_", "")] = date_iso else: dictionary[key] = value if not isinstance(group, self.Dataset): for key in group.keys(): if key.startswith('_sig_'): from hyperspy.io import dict2signal dictionary[key[len('_sig_'):]] = ( dict2signal(self.group2signaldict( group[key], lazy=lazy))) elif isinstance(group[key], self.Dataset): dat = group[key] kn = key if key.startswith("_list_"): if (h5py.check_string_dtype(dat.dtype) and hasattr(dat, 'asstr')): # h5py 3.0 and newer # https://docs.h5py.org/en/3.0.0/strings.html dat = dat.asstr()[:] ans = np.array(dat) ans = ans.tolist() kn = key[6:] elif key.startswith("_tuple_"): ans = np.array(dat) ans = tuple(ans.tolist()) kn = key[7:] elif dat.dtype.char == "S": ans = np.array(dat) try: ans = ans.astype("U") except UnicodeDecodeError: # There are some strings that must stay in binary, # for example dill pickles. This will obviously also # let "wrong" binary string fail somewhere else... pass elif lazy: ans = da.from_array(dat, chunks=dat.chunks) else: ans = np.array(dat) dictionary[kn] = ans elif key.startswith('_hspy_AxesManager_'): dictionary[key[len('_hspy_AxesManager_'):]] = AxesManager( [i for k, i in sorted(iter( self._group2dict( group[key], lazy=lazy).items() ))]) elif key.startswith('_list_'): dictionary[key[7 + key[6:].find('_'):]] = \ [i for k, i in sorted(iter( self._group2dict( group[key], lazy=lazy).items() ))] elif key.startswith('_tuple_'): dictionary[key[8 + key[7:].find('_'):]] = tuple( [i for k, i in sorted(iter( self._group2dict( group[key], lazy=lazy).items() ))]) else: dictionary[key] = {} self._group2dict( group[key], dictionary[key], lazy=lazy) return dictionary
def hdfgroup2dict(group, dictionary=None, load_to_memory=True): if dictionary is None: dictionary = {} for key, value in group.attrs.iteritems(): if isinstance(value, (np.string_, str)): if value == '_None_': value = None elif isinstance(value, np.bool_): value = bool(value) elif isinstance(value, np.ndarray) and \ value.dtype == np.dtype('|S1'): value = value.tolist() # skip signals - these are handled below. if key.startswith('_sig_'): pass elif key.startswith('_list_empty_'): dictionary[key[len('_list_empty_'):]] = [] elif key.startswith('_tuple_empty_'): dictionary[key[len('_tuple_empty_'):]] = () elif key.startswith('_bs_'): dictionary[key[len('_bs_'):]] = value.tostring() elif key.startswith('_datetime_'): dictionary[key.replace("_datetime_", "")] = eval(value) else: dictionary[key] = value if not isinstance(group, h5py.Dataset): for key in group.keys(): if key.startswith('_sig_'): from hyperspy.io import dict2signal dictionary[key[len('_sig_'):]] = ( dict2signal(hdfgroup2signaldict(group[key], load_to_memory=load_to_memory))) elif isinstance(group[key], h5py.Dataset): if key.startswith("_list_"): ans = np.array(group[key]) ans = ans.tolist() kn = key[6:] elif key.startswith("_tuple_"): ans = np.array(group[key]) ans = tuple(ans.tolist()) kn = key[7:] elif load_to_memory: ans = np.array(group[key]) kn = key else: # leave as h5py dataset ans = group[key] kn = key dictionary[kn] = ans elif key.startswith('_hspy_AxesManager_'): dictionary[key[len('_hspy_AxesManager_'):]] = \ AxesManager([i for k, i in sorted(iter( hdfgroup2dict(group[key], load_to_memory=load_to_memory).iteritems()))]) elif key.startswith('_list_'): dictionary[key[7 + key[6:].find('_'):]] = \ [i for k, i in sorted(iter( hdfgroup2dict(group[key], load_to_memory=load_to_memory).iteritems()))] elif key.startswith('_tuple_'): dictionary[key[8 + key[7:].find('_'):]] = tuple( [i for k, i in sorted(iter( hdfgroup2dict(group[key], load_to_memory=load_to_memory).iteritems()))]) else: dictionary[key] = {} hdfgroup2dict( group[key], dictionary[key], load_to_memory=load_to_memory) return dictionary
def hdfgroup2dict(group, dictionary=None, load_to_memory=True): if dictionary is None: dictionary = {} for key, value in group.attrs.items(): if isinstance(value, bytes): value = value.decode() if isinstance(value, (np.string_, str)): if value == '_None_': value = None elif isinstance(value, np.bool_): value = bool(value) elif isinstance(value, np.ndarray) and value.dtype.char == "S": # Convert strings to unicode value = value.astype("U") if value.dtype.str.endswith("U1"): value = value.tolist() # skip signals - these are handled below. if key.startswith('_sig_'): pass elif key.startswith('_list_empty_'): dictionary[key[len('_list_empty_'):]] = [] elif key.startswith('_tuple_empty_'): dictionary[key[len('_tuple_empty_'):]] = () elif key.startswith('_bs_'): dictionary[key[len('_bs_'):]] = value.tostring() # The following is commented out as it could be used to evaluate # arbitrary code i.e. it was a security flaw. We should instead # use a standard string for date and time. # elif key.startswith('_datetime_'): # dictionary[key.replace("_datetime_", "")] = eval(value) else: dictionary[key] = value if not isinstance(group, h5py.Dataset): for key in group.keys(): if key.startswith('_sig_'): from hyperspy.io import dict2signal dictionary[key[len('_sig_'):]] = ( dict2signal(hdfgroup2signaldict( group[key], load_to_memory=load_to_memory))) elif isinstance(group[key], h5py.Dataset): ans = np.array(group[key]) if ans.dtype.char == "S": try: ans = ans.astype("U") except UnicodeDecodeError: # There are some strings that must stay in binary, # for example dill pickles. This will obviously also # let "wrong" binary string fail somewhere else... pass kn = key if key.startswith("_list_"): ans = ans.tolist() kn = key[6:] elif key.startswith("_tuple_"): ans = tuple(ans.tolist()) kn = key[7:] elif load_to_memory: kn = key else: # leave as h5py dataset ans = group[key] kn = key dictionary[kn] = ans elif key.startswith('_hspy_AxesManager_'): dictionary[key[len('_hspy_AxesManager_'):]] = AxesManager( [i for k, i in sorted(iter( hdfgroup2dict( group[key], load_to_memory=load_to_memory).items() ))]) elif key.startswith('_list_'): dictionary[key[7 + key[6:].find('_'):]] = \ [i for k, i in sorted(iter( hdfgroup2dict( group[key], load_to_memory=load_to_memory).items() ))] elif key.startswith('_tuple_'): dictionary[key[8 + key[7:].find('_'):]] = tuple( [i for k, i in sorted(iter( hdfgroup2dict( group[key], load_to_memory=load_to_memory).items() ))]) else: dictionary[key] = {} hdfgroup2dict( group[key], dictionary[key], load_to_memory=load_to_memory) return dictionary