def treeOffsets(basePath, snapNum, id, treeName): """ Handle offset loading for a SubLink merger tree cutout. """ # old or new format if 'fof_subhalo' in gcPath(basePath, snapNum): # load groupcat chunk offsets from separate 'offsets_nnn.hdf5' files with h5py.File(offsetPath(basePath, snapNum), 'r') as f: groupFileOffsets = f['FileOffsets/Subhalo'][()] offsetFile = offsetPath(basePath, snapNum) prefix = 'Subhalo/' + treeName + '/' groupOffset = id else: # load groupcat chunk offsets from header of first file with h5py.File(gcPath(basePath, snapNum), 'r') as f: groupFileOffsets = f['Header'].attrs['FileOffsets_Subhalo'] # calculate target groups file chunk which contains this id groupFileOffsets = int(id) - groupFileOffsets fileNum = np.max(np.where(groupFileOffsets >= 0)) groupOffset = groupFileOffsets[fileNum] offsetFile = gcPath(basePath, snapNum, fileNum) prefix = 'Offsets/Subhalo_Sublink' with h5py.File(offsetFile, 'r') as f: # load the merger tree offsets of this subgroup RowNum = f[prefix + 'RowNum'][groupOffset] LastProgID = f[prefix + 'LastProgenitorID'][groupOffset] SubhaloID = f[prefix + 'SubhaloID'][groupOffset] return RowNum, LastProgID, SubhaloID
def treeOffsets(basePath, snapNum, id): """ Handle offset loading for a LHaloTree merger tree cutout. """ # load groupcat chunk offsets from header of first file (old or new format) if 'fof_subhalo' in gcPath(basePath, snapNum): # load groupcat chunk offsets from separate 'offsets_nnn.hdf5' files with h5py.File(offsetPath(basePath, snapNum), 'r') as f: groupFileOffsets = f['FileOffsets/Subhalo'][()] offsetFile = offsetPath(basePath, snapNum) prefix = 'Subhalo/LHaloTree/' groupOffset = id else: # load groupcat chunk offsets from header of first file with h5py.File(gcPath(basePath, snapNum), 'r') as f: groupFileOffsets = f['Header'].attrs['FileOffsets_Subhalo'] # calculate target groups file chunk which contains this id groupFileOffsets = int(id) - groupFileOffsets fileNum = np.max(np.where(groupFileOffsets >= 0)) groupOffset = groupFileOffsets[fileNum] offsetFile = gcPath(basePath, snapNum, fileNum) prefix = 'Offsets/Subhalo_LHaloTree' with h5py.File(offsetFile, 'r') as f: # load the merger tree offsets of this subgroup TreeFile = f[prefix+'File'][groupOffset] TreeIndex = f[prefix+'Index'][groupOffset] TreeNum = f[prefix+'Num'][groupOffset] return TreeFile, TreeIndex, TreeNum
def getSnapOffsets(basePath, snapNum, id, type): """ Compute offsets within snapshot for a particular group/subgroup. """ r = {} # old or new format if 'fof_subhalo' in gcPath(basePath, snapNum): # use separate 'offsets_nnn.hdf5' files with h5py.File(offsetPath(basePath, snapNum), 'r') as f: groupFileOffsets = f['FileOffsets/' + type][()] r['snapOffsets'] = np.transpose( f['FileOffsets/SnapByType'][()]) # consistency else: # load groupcat chunk offsets from header of first file with h5py.File(gcPath(basePath, snapNum), 'r') as f: groupFileOffsets = f['Header'].attrs['FileOffsets_' + type] r['snapOffsets'] = f['Header'].attrs['FileOffsets_Snap'] # calculate target groups file chunk which contains this id groupFileOffsets = int(id) - groupFileOffsets fileNum = np.max(np.where(groupFileOffsets >= 0)) groupOffset = groupFileOffsets[fileNum] # load the length (by type) of this group/subgroup from the group catalog with h5py.File(gcPath(basePath, snapNum, fileNum), 'r') as f: r['lenType'] = f[type][type + 'LenType'][groupOffset, :] # old or new format: load the offset (by type) of this group/subgroup within the snapshot if 'fof_subhalo' in gcPath(basePath, snapNum): with h5py.File(offsetPath(basePath, snapNum), 'r') as f: r['offsetType'] = f[type + '/SnapByType'][id, :] else: with h5py.File(gcPath(basePath, snapNum, fileNum), 'r') as f: r['offsetType'] = f['Offsets'][type + '_SnapByType'][groupOffset, :] return r