def __init__(self,sample,sky_fraction, az, pol, redshift, stellarmass, spacing, load_pairs=False, pairs_src=None): self.sample = sample self.sky_fraction = sky_fraction self.az, self.pol, self.redshift, self.stellarmass = map( self.sample.data.get, (az,pol,redshift,stellarmass) ) self.R_comoving = cosmo.comoving_distance( self.redshift ) self.x, self.y, self.z = mf.sphere2cart( self.az, self.pol) self.grid = tp.spherical_grid( points = np.array([ self.pol, self.az, 3e5*self.redshift ]).T, spacing = spacing ) if load_pairs: if pairs_src is None: raise Exception('need pairs_src to load pairs') self.pairs = pair_list(pairs = load(pairs_src+'pairs'), separations = load(pairs_src+'separations') ) self.pairs.dR_comoving = np.abs(np.diff( self.R_comoving[self.pairs.pairs], axis=1 )).reshape(self.pairs.count,) self.pairs.parent = self.sample else: P = [] for chunk in self.grid.iter_pairs(chunk=True): pair_chunk = pair_list( np.array(chunk) ) pair_chunk.parent = self.sample pair_chunk.separations = pair_chunk.compute_rp( az=az,pol=pol,redshift=redshift, angular_units='radians', output_units='physical' ) cdz = 3e5*np.abs(pair_chunk.parent.data[redshift][pair_chunk.first()] - pair_chunk.parent.data[redshift][pair_chunk.second()]) separation_limit = cosmo.projected_separation(angular_separation = np.array([ spacing[0] ]), redshift = np.array([ min(self.redshift) ]), angular_units = 'radians', output_units = 'physical')[0] pair_chunk = pair_chunk.select( np.all([ pair_chunk.first()!=pair_chunk.second(), pair_chunk.separations<separation_limit, cdz<spacing[1] ], axis=0) ) P.append(pair_chunk.pairs) P = np.array(list(chain(*P))) pairs = pair_list(P) pairs.dR_comoving = np.abs(np.diff( self.R_comoving[pairs.pairs], axis=1 )).reshape(pairs.count,) pairs.parent = self.sample pairs.separations = pairs.compute_rp(az=az,pol=pol,redshift=redshift) self.pairs = pairs
def density_5nn(sample, spacing=(4 * 0.116, 500), az='azimuthal_angle', pol='polar_angle', redshift='redshift', mass='StellarMass', tracerlim=10.4): c = 299792.458 az, pol, redshift, mass = map(sample.data.get, (az, pol, redshift, mass)) g = tp.spherical_grid(points=np.array([pol, az, c * redshift]).T, spacing=spacing) rp5 = np.zeros(sample.count, ) _, _, radial = zip(*g.points) radial = np.array(radial) separation_limit = cosmo.projected_separation( angular_separation=np.array([g.angular_spacing]), redshift=np.array([min(redshift)]), angular_units='radians', output_units='physical')[0] for pair_chunk in g.iter_pairs(chunk=True): pair_chunk = pair_list(np.array(pair_chunk), parent=sample) pair_chunk.separations = pair_chunk.compute_rp(az='azimuthal_angle', pol='polar_angle', redshift='redshift', angular_units='radians', output_units='physical') cdz = np.abs(np.diff(radial[pair_chunk.pairs], axis=1)).reshape(-1) pair_chunk = pair_chunk.select( np.all([ mass[pair_chunk.second()] > tracerlim, pair_chunk.first() != pair_chunk.second(), pair_chunk.separations < separation_limit, cdz < g.radial_spacing ], axis=0)) for gal in np.unique(pair_chunk.first()): nn = 4 if mass[gal] > tracerlim else 5 try: rp5[gal] = sorted( pair_chunk.separations[pair_chunk.first() == gal])[nn - 1] except: rp5[gal] = -1 assert sum(rp5 == 0) == 0 if any(rp5 == -1): print('Replacing rp5\'s larger than spacing with max') rp5[rp5 == -1] = separation_limit l = cosmo.comoving_distance(redshift + (500 / c)) - cosmo.comoving_distance(redshift - (500 / c)) return np.log10(5 / (np.pi * rp5**2 * l))
def smooth2d(x, y, z, func, smoothness, normalize=False): f = np.empty(z.shape) f.fill(np.nan) if normalize: normed = lambda a: np.copy(a) / np.std(a) x, y = normed(x), normed(y) #if smoothBy=='length': # xbins = np.arange(min(x)-smoothness, max(x)+smoothness,smoothness) # ybins = np.arange(min(y)-smoothness, max(y)+smoothness,smoothness) g = tp.grid(list(zip(x, y)), spacing=smoothness) for pair_chunk in g.iter_pairs(chunk=True): pair_chunk = pair_list(np.array(pair_chunk)) pair_chunk.separations = np.linalg.norm(g.points[pair_chunk.first()] - g.points[pair_chunk.second()], axis=1) pair_chunk = pair_chunk.select( np.logical_and(pair_chunk.first() != pair_chunk.second(), pair_chunk.separations < smoothness)) pair_chunk.sort(by='first') try: bin_counts = np.histogram(pair_chunk.first(), bins=np.arange( np.min(pair_chunk.first()), np.max(pair_chunk.first()) + 2, 1))[0] except: if pair_chunk.count == 0: continue else: raise #print(np.split(pair_chunk.pairs,np.cumsum(bin_counts)[:-1])) for sub_chunk in np.split(pair_chunk.pairs, np.cumsum(bin_counts)[:-1]): if sub_chunk.shape[0] != 0: assert len(set(sub_chunk[:, 0])) == 1 f[sub_chunk[0, 0]] = func(z[sub_chunk[:, 1]]) IsNan = np.isnan(f) if any(IsNan): print( 'smooth2d returned {} values without estimates, imputing with their own value' .format(np.sum(IsNan))) f[IsNan] = z[IsNan] return f
sample = sys.argv[1] if sample=='sdss': data_sources = ['../data/SDSS/'] elif sample=='mock': data_sources = ['../data/LGalaxies/{}/'.format(i) for i in range(8)] for data_src in data_sources: data = load(data_src+'data') pairs = load(data_src+'pairs') separations = load(data_src+'separations') part = galaxy_sample(data) part.define_pairs( pair_list(pairs,separations,parent=part) ) part.define_corrfunc('full_sample',var='Delta_md') obs_same_halo = np.equal(*(part.pair_list.get('ObsGrNr').T)) obs_diff_halo = np.logical_not(obs_same_halo) if sample=='sdss': selections = { 'obs_same_halo': obs_same_halo, 'obs_diff_halo': obs_diff_halo} elif sample=='mock': true_same_halo = np.equal(*(part.pair_list.get('FOFCentralGal').T)) true_diff_halo = np.logical_not(true_same_halo) selections = {'true_same_halo':true_same_halo, 'true_diff_halo':true_diff_halo, 'obs_same_halo': obs_same_halo,
c, h = 299792.458, 0.677 AM_redshift_bins = np.arange(0.02, 0.09, 0.01) AM_src = '../data/LGalaxies/' N_iter = int(sys.argv[1]) #print(N_iter) data_sources = ['../data/LGalaxies/{}/'.format(i) for i in range(8)] training_src = data_sources[0] data = load(training_src + 'data') part = galaxy_sample(data) part.define_pairs( pair_list(load(training_src + 'pairs'), load(training_src + 'separations'), parent=part)) #initialize group finder try: AM = halo_mass_model.RedshiftDependentAbundanceMatching( src=AM_src, SHAM=False, redshift_bins=AM_redshift_bins) except: # easiest way to handle this is to have run derive_quantities.py once with a dummy set of group finder params raise Exception( 'Need to have halo mass function and subhalo mass function defined already under ' + AM_src) gf = tinker_group_finder(part, sky_fraction=None, az='azimuthal_angle',