def clus_avg(data_loc,halo_file,chris_data_root,newfilename,write_data=True,clobber=True): C4 = CFOUR({'H0':70,'chris_data_root':chris_data_root}) C = Caustic() # Load Halos halos = fits.open(data_loc+'/'+halo_file)[1].data HaloID = halos['orig_order'] RA = halos['ra_bcg'] DEC = halos['dec_bcg'] Z = halos['z_biwt'] RVIR = halos['RVIR'] SINGLE = halos['single'] SUB = halos['sub'] NC4 = halos['nc4'] RA_AVG,DEC_AVG,Z_AVG = [],[],[] # Loop Over Halos print '' print '-'*40 print '...running average cluster center code' for i in range(len(halos)): if i % 100 == 0: print '...working on cluster '+str(i)+' out of '+str(len(halos)) try: # Assign Halo Properties clus_ra = RA[i] clus_dec = DEC[i] clus_z = Z[i] # Load Galaxies galdata = C4.load_chris_gals(HaloID[i]) gal_ra,gal_dec,gal_z,gal_gmags,gal_rmags,gal_imags = galdata # Take Iterative Average, four times # vlim = 1500, rlim = 1.5 clus_ra,clus_dec,clus_z = proj_avg(clus_ra,clus_dec,clus_z,gal_ra,gal_dec,gal_z,1500,1.5,C) # vlim = 1000, rlim = 1.5 clus_ra,clus_dec,clus_z = proj_avg(clus_ra,clus_dec,clus_z,gal_ra,gal_dec,gal_z,1000,1.5,C) # vlim = 1500, rlim = 1.5 clus_ra,clus_dec,clus_z = proj_avg(clus_ra,clus_dec,clus_z,gal_ra,gal_dec,gal_z,1000,1.5,C) # vlim = 2000, rlim = 1.5 clus_ra,clus_dec,clus_z = proj_avg(clus_ra,clus_dec,clus_z,gal_ra,gal_dec,gal_z,2000,1.5,C) except: print i clus_ra,clus_dec,clus_z = 0, 0, 0 RA_AVG.append(clus_ra) DEC_AVG.append(clus_dec) Z_AVG.append(clus_z) RA_AVG,DEC_AVG,Z_AVG = np.array(RA_AVG),np.array(DEC_AVG),np.array(Z_AVG) print '...finished average cluster-center calculations' ## Write Data Out if write_data == True: print '...writing out cluster catalgoue with average centers included' # Dictionary of new columns new_keys = ['RA_AVG','DEC_AVG','Z_AVG'] new_dic = ez.create(new_keys,locals()) # Original fits record file orig_table = halos # Write own fits file keys = ['HaloID','RA','DEC','Z','RVIR','RA_AVG','DEC_AVG','Z_AVG'] dic = ez.create(keys,locals()) fits_table(dic,keys,data_loc+'/avg_centers.fits',clobber=True) # Append new columns fits_append(orig_table,new_dic,new_keys,filename=data_loc+'/'+newfilename,clobber=clobber) print '-'*40 print ''
SINGLE = halos['single'] SUB = halos['sub'] NC4 = halos['nc4'] RA_AVG,DEC_AVG,Z_AVG = [],[],[] # Loop Over Halos for i in range(len(halos)): if i % 100 == 0: print i # Assign Halo Properties clus_ra = RA[i] clus_dec = DEC[i] clus_z = Z[i] # Load Galaxies galdata = C4.load_chris_gals(HaloID[i]) gal_ra,gal_dec,gal_z,gal_gmags,gal_rmags,gal_imags = galdata # Project Galaxies ang_d,lum_d = C.zdistance(clus_z,H0) angles = C.findangle(gal_ra,gal_dec,clus_ra,clus_dec) rdata = angles * ang_d vdata = c * (gal_z - clus_z) / (1 + clus_z) # Take Average Three times cut1 = np.where((np.abs(vdata)<1000)&(rdata<1.5))[0] clus_ra = astats.biweight_location(gal_ra[cut1]) clus_dec = astats.biweight_location(gal_dec[cut1]) clus_z = astats.biweight_location(gal_z[cut1]) ang_d,lum_d = C.zdistance(clus_z,H0)
def clus_avg(data_loc, halo_file, chris_data_root, newfilename, write_data=True, clobber=True): C4 = CFOUR({'H0': 70, 'chris_data_root': chris_data_root}) C = Caustic() # Load Halos halos = fits.open(data_loc + '/' + halo_file)[1].data HaloID = halos['orig_order'] RA = halos['ra_bcg'] DEC = halos['dec_bcg'] Z = halos['z_biwt'] RVIR = halos['RVIR'] SINGLE = halos['single'] SUB = halos['sub'] NC4 = halos['nc4'] RA_AVG, DEC_AVG, Z_AVG = [], [], [] # Loop Over Halos print '' print '-' * 40 print '...running average cluster center code' for i in range(len(halos)): if i % 100 == 0: print '...working on cluster ' + str(i) + ' out of ' + str( len(halos)) try: # Assign Halo Properties clus_ra = RA[i] clus_dec = DEC[i] clus_z = Z[i] # Load Galaxies galdata = C4.load_chris_gals(HaloID[i]) gal_ra, gal_dec, gal_z, gal_gmags, gal_rmags, gal_imags = galdata # Take Iterative Average, four times # vlim = 1500, rlim = 1.5 clus_ra, clus_dec, clus_z = proj_avg(clus_ra, clus_dec, clus_z, gal_ra, gal_dec, gal_z, 1500, 1.5, C) # vlim = 1000, rlim = 1.5 clus_ra, clus_dec, clus_z = proj_avg(clus_ra, clus_dec, clus_z, gal_ra, gal_dec, gal_z, 1000, 1.5, C) # vlim = 1500, rlim = 1.5 clus_ra, clus_dec, clus_z = proj_avg(clus_ra, clus_dec, clus_z, gal_ra, gal_dec, gal_z, 1000, 1.5, C) # vlim = 2000, rlim = 1.5 clus_ra, clus_dec, clus_z = proj_avg(clus_ra, clus_dec, clus_z, gal_ra, gal_dec, gal_z, 2000, 1.5, C) except: print i clus_ra, clus_dec, clus_z = 0, 0, 0 RA_AVG.append(clus_ra) DEC_AVG.append(clus_dec) Z_AVG.append(clus_z) RA_AVG, DEC_AVG, Z_AVG = np.array(RA_AVG), np.array(DEC_AVG), np.array( Z_AVG) print '...finished average cluster-center calculations' ## Write Data Out if write_data == True: print '...writing out cluster catalgoue with average centers included' # Dictionary of new columns new_keys = ['RA_AVG', 'DEC_AVG', 'Z_AVG'] new_dic = ez.create(new_keys, locals()) # Original fits record file orig_table = halos # Write own fits file keys = [ 'HaloID', 'RA', 'DEC', 'Z', 'RVIR', 'RA_AVG', 'DEC_AVG', 'Z_AVG' ] dic = ez.create(keys, locals()) fits_table(dic, keys, data_loc + '/avg_centers.fits', clobber=True) # Append new columns fits_append(orig_table, new_dic, new_keys, filename=data_loc + '/' + newfilename, clobber=clobber) print '-' * 40 print ''