def bgc_to_png(path, axes='xy', resolution=1024, outputdir='output', name_padding=0): '''converts a single bgc2 file to png. Outputs are named <snapshot #>.png :path: filepath to bgc2 file, :axes: axes to take the snapshot across, xy means looking into the page from z, :resolution: number of bins to use along each axis to put halos in, also res. of output pic, :outputdir: name of output directory :name_padding: number of zeroes to pad outputfiles with ''' if not os.path.isdir(outputdir): os.makedirs(outputdir) H = Halos(path, verbose=False) H.read_data(level=1, strict=True) coords = [] for c in axes: if c=='x': coords.append(np.array([halo.x for halo in H.h])) elif c=='y': coords.append(np.array([halo.y for halo in H.h])) elif c=='z': coords.append(np.array([halo.z for halo in H.g])) hist_array, _, _ = np.histogram2d(coords[0], coords[1], bins=resolution, range=[[0,H.header[0].box_size], [0, H.header[0].box_size]]) #mpimg.imsave(os.path.join(outputdir, str(H.header[0].snapshot)+'.png'), hist_array, cmap=plt.cm.binary) fig = plt.figure(dpi=100, tight_layout=True, frameon=False, figsize=(resolution/100.,resolution/100.)) fig.figimage(hist_array, cmap=plt.cm.binary) fig.text(0.8,0.1,'z=%.3f' % H.header[0].redshift, size='medium', backgroundcolor='white', alpha=0.5) plt.savefig(os.path.join(outputdir, str(H.header[0].snapshot).zfill(name_padding)+'.png')) plt.close(fig)
def only_fof_halos(self, filepath): """identifies which halos in a snapshot have a Friends-Of-Friends relationship. It does this by checking the parent_id attribute in halo group data. If ==-1 then FOF halo. """ temp_H = Halos(filepath, verbose=False) # read only header for snapshot temp_H.read_data(level=1) fof_halos = [halo.id for halo in temp_H.h if halo.parent_id==-1] return fof_halos
def shared_particles(self, filepath): """This function compares the the sharing frequency of unique particles in distinct halos (Friends-Of-Friends halos). The frequency is: (total particles - unique particles) / unique particles """ halo_filter = self.only_fof_halos(filepath) H = Halos(filepath, verbose=False) H.read_data(level=2, sieve=halo_filter, onlyid=True) # read all data w/ filter s = set([]) unique = 0. total = 0. for halo in H.halos: total += len(halo.particles.id) s.update(halo.particles.id) unique = len(s) redshift = H.header[0].redshift shared = (total / unique) - 1. # fraction of particles that are shared return (redshift, shared)
def bgc2_merger_test(f1=r'data\*0000.bgc2', f2=r'data\*0001.bgc2'): try: h = Halos(f1, verbose=False) g = Halos(f2, verbose=False) h.read_data(level=0) g.read_data(level=0) except Exception as e: print( '\t\tFAILURE: Error reading test data files: data\\*0000.bgc2 and data\\*0001.bgc2: ' ) print('\t\t' + str(e)) return try: h + g print('\t\tSUCCESS: Compatible files merged.') except Exception as e: print('\t\tFAILURE: BGC2 merger failed: ' + str(e)) try: h + h except ValueError as e: print('\t\tSUCCESS: Invalid merger prevented.')
def bgc2_merger_test(f1=r'data\*0000.bgc2', f2=r'data\*0001.bgc2'): try: h = Halos(f1, verbose=False) g = Halos(f2, verbose=False) h.read_data(level=0) g.read_data(level=0) except Exception as e: print('\t\tFAILURE: Error reading test data files: data\\*0000.bgc2 and data\\*0001.bgc2: ') print('\t\t' + str(e)) return try: h+g print('\t\tSUCCESS: Compatible files merged.') except Exception as e: print('\t\tFAILURE: BGC2 merger failed: ' + str(e)) try: h+h except ValueError as e: print('\t\tSUCCESS: Invalid merger prevented.')
def worker(self, pool_ids, files, queue, args): try: while len(args[1]) < 3: args[1] = '0' + args[1] path_2lpt = '/scratch/sissomdj/projects/simulations/rockstar/' + args[0] \ + '/2lpt/snap' + args[1] + '/halos/*1.bgc2' path_za = '/scratch/sissomdj/projects/simulations/rockstar/' + args[0] \ + '/za/snap' + args[1] + '/halos/*1.bgc2' #path_2lpt = '../data/halos_0.1.bgc2' #path_2lpt = '../data/halos_0.2.bgc2' H2 = Halos(path_2lpt, verbose=False) HZ = Halos(path_za, verbose=False) bins = np.linspace(1, 5, 26) # 25 bins bins_mean = [ 0.5 * (bins[i] + bins[i + 1]) for i in range(len(bins) - 1) ] H2.read_data() H2.filter(100) H2.center_halos() H2.get_covariance_matrices() H2.get_eigenvectors() H2.convert_bases() H2.get_radii() H2.get_half_mass_radii() ratios2 = [max(h.radii) / h.half_mass_radius for h in H2.halos] del H2 HZ.read_data() HZ.filter(100) HZ.center_halos() HZ.get_covariance_matrices() HZ.get_eigenvectors() HZ.convert_bases() HZ.get_radii() HZ.get_half_mass_radii() ratiosz = [max(h.radii) / h.half_mass_radius for h in HZ.halos] del HZ counts2, _ = np.histogram(ratios2, bins) countsz, _ = np.histogram(ratiosz, bins) n2 = sum(counts2) nz = sum(countsz) fig = plt.figure() ax1 = fig.add_subplot(111) _ = ax1.hist(bins_mean, bins, label='2LPT', weights=counts2, color='r', histtype='step') _ = ax1.hist(bins_mean, bins, label='ZA', color='b', weights=countsz, histtype='step') ax1.set_xlabel('Outer to Inner halo radius') ax1.set_ylabel('Frequency') fig.suptitle('2LPT vs. ZA') ax1.set_title('2LPT: $\\mu $= ' + '{:.3f}'.format(np.mean(ratios2)) + \ ', N= ' + str(n2) + '\tZA: $\\mu $= ' + '{:.3f}'.format(np.mean(ratiosz)) + \ ', N= ' + str(nz)) _ = ax1.legend() ax1.grid(True) fig.savefig('results/' + args[0] + '/' + args[1] + '.png') queue.put((0, None)) return (0, None) except Exception as e: queue.put((-1, e)) return -1
def bgc2_test(path='data\\halos_0.1.bgc2'): """ reading data from a sample bgc2 file containing multiple halos :param path: path to file :return: a Halos instance containing list of halos (<return_variable>.halos) """ print("\n\t->Reading only header data: ") t = Halos(path, verbose=False) t.read_data(level=0) if len(t.header)==1: print("\t\tSUCCESS: Header read.") else: print("\t\tFAILURE: Header not read.") print("\n\t->Reading header + halo data: ") t = Halos(path, verbose=False) t.read_data(level=1) if t.header[0].ngroups==len(t.h): print("\t\tSUCCESS: Halo data read.") else: print("\t\tFAILURE: Halo data improperly read.") sample_ids = np.random.choice(t.h.id, 100, replace=False) print("\n\t->Reading header + only halo id + only particle id data: ") t = Halos(path, verbose=False) t.read_data(level=2, onlyid=True) if len(t.h.id)==t.header[0].ngroups and len(t.halos[0].particles.id)>0: print("\t\tSUCCESS: ID data read.") else: print("\t\tFAILURE: ID data improperly read.") print("\n\t->Reading header + halo + particle data: ") t = Halos(path, verbose=False) t.read_data(level=2, sieve=sample_ids) if len(t.h.id)==len(sample_ids) and len(t.halos[0].particles.id)==t.h[0].npart: print("\t\tSUCCESS: Full data read.") else: print("\t\tFAILURE: Full data improperly read.") print("\n\t->Performing calculations. ") t.filter(100) # filter out halos w/ less than 4 particles t.center_halos() t.get_covariance_matrices() t.get_eigenvectors() t.convert_bases() t.get_radii() # center_halo(), get_covariance_matrices() and get_eigenvectors() functions must be called before t.get_half_mass_radii() print("\t\tSUCCESS: All calculations finished.")
def bgc2_test(path='data\\halos_0.1.bgc2'): """ reading data from a sample bgc2 file containing multiple halos :param path: path to file :return: a Halos instance containing list of halos (<return_variable>.halos) """ print("\n\t->Reading only header data: ") t = Halos(path, verbose=False) t.read_data(level=0) if len(t.header) == 1: print("\t\tSUCCESS: Header read.") else: print("\t\tFAILURE: Header not read.") print("\n\t->Reading header + halo data: ") t = Halos(path, verbose=False) t.read_data(level=1) if t.header[0].ngroups == len(t.h): print("\t\tSUCCESS: Halo data read.") else: print("\t\tFAILURE: Halo data improperly read.") sample_ids = np.random.choice(t.h.id, 100, replace=False) print("\n\t->Reading header + only halo id + only particle id data: ") t = Halos(path, verbose=False) t.read_data(level=2, onlyid=True) if len(t.h.id) == t.header[0].ngroups and len(t.halos[0].particles.id) > 0: print("\t\tSUCCESS: ID data read.") else: print("\t\tFAILURE: ID data improperly read.") print("\n\t->Reading header + halo + particle data: ") t = Halos(path, verbose=False) t.read_data(level=2, sieve=sample_ids) if len(t.h.id) == len(sample_ids) and len( t.halos[0].particles.id) == t.h[0].npart: print("\t\tSUCCESS: Full data read.") else: print("\t\tFAILURE: Full data improperly read.") print("\n\t->Performing calculations. ") t.filter(100) # filter out halos w/ less than 4 particles t.center_halos() t.get_covariance_matrices() t.get_eigenvectors() t.convert_bases() t.get_radii( ) # center_halo(), get_covariance_matrices() and get_eigenvectors() functions must be called before t.get_half_mass_radii() print("\t\tSUCCESS: All calculations finished.")