コード例 #1
0
def measure_all_histograms(models,options,pool):

	#Look at a sample map
	sample_map = ConvergenceMap.fromfilename(models[0].getNames(z=1.0,realizations=[1])[0],loader=load_fits_default_convergence)
	#Initialize Gaussian shape noise generator for the sample map shape and angle
	generator = GaussianNoiseGenerator.forMap(sample_map)

	#Parsed from options
	num_realizations = options.getint("analysis","num_realizations")
	smoothing_scales = [float(scale) for scale in options.get("analysis","smoothing_scales").split(",")]
	bin_edges = np.ogrid[options.getfloat("analysis","bin_edge_low"):options.getfloat("analysis","bin_edge_high"):(options.getint("analysis","num_bins") - 2)*1j]
	bin_edges = np.hstack((-10.0,bin_edges,10.0))
	z = options.getfloat("analysis","redshift")

	bin_midpoints = 0.5*(bin_edges[1:] + bin_edges[:-1])
	

	#Create smoothing scale index for the histograms
	idx = Indexer.stack([PDF(bin_edges) for scale in smoothing_scales])

	#Build the data type of the structure array in output
	data_type = [(model.name,Ensemble) for model in models]
	#Append info about the smoothing scale
	data_type = [("Smooth",np.float),] + data_type

	#Create output struct array
	ensemble_array = np.zeros(len(smoothing_scales),dtype=data_type)

	#Write smoothing scale information
	ensemble_array["Smooth"] = np.array(smoothing_scales)
	
	#The for loop runs the distributed computations
	for model in models:

		#Build Ensemble instance with the maps to analyze
		map_ensemble = Ensemble.fromfilelist(range(1,num_realizations+1))
		
		#Measure the histograms and load the data in the ensemble
		map_ensemble.load(callback_loader=compute_map_histograms,pool=pool,simulation_set=model,smoothing_scales=smoothing_scales,index=idx,generator=generator,bin_edges=bin_edges,redshift=z)

		#Split the ensemble between different smoothing scales
		map_ensemble_list = map_ensemble.split(idx)

		#Add to output struct array
		ensemble_array[model.name] = np.array(map_ensemble_list)

	return ensemble_array
コード例 #2
0
ファイル: bornProject.py プロジェクト: apetri/Dissertation
def convergencePeaks(cmd_args,fontsize=22):

	#Plot setup
	fig,ax = plt.subplots(1,2,figsize=(16,8))

	#Load the convergence map and smooth on 0.5 arcmin
	conv = ConvergenceMap.load(os.path.join(fiducial["c0"].getMapSet("kappa").home,"WLconv_z2.00_0001r.fits"))
	conv.smooth(0.5*u.arcmin,kind="gaussianFFT",inplace=True)

	#Find the peak locations and height
	sigma = np.linspace(-2.,13.,101)
	height,positions = conv.locatePeaks(sigma,norm=True)

	#Show the convergence with the peak locations
	conv.visualize(fig=fig,ax=ax[0],colorbar=True,cbar_label=r"$\kappa$")
	ax[0].scatter(*positions[height>2.].to(u.deg).value.T,color="red",marker="o")
	ax[0].set_xlim(0,conv.side_angle.to(u.deg).value)
	ax[0].set_ylim(0,conv.side_angle.to(u.deg).value)

	#Build a gaussianized version of the map
	gen = GaussianNoiseGenerator.forMap(conv)
	ell = np.linspace(conv.lmin,conv.lmax,100)
	ell,Pell = conv.powerSpectrum(ell)
	convGauss = gen.fromConvPower(np.array([ell,Pell]),bounds_error=False,fill_value=0.)

	#Show the peak histogram (measured + gaussian)
	conv.peakHistogram(sigma,norm=True,fig=fig,ax=ax[1],label=r"${\rm Measured}$")
	convGauss.peakHistogram(sigma,norm=True,fig=fig,ax=ax[1],label=r"${\rm Gaussianized}$")
	conv.gaussianPeakHistogram(sigma,norm=True,fig=fig,ax=ax[1],label=r"${\rm Prediction}:(dN_{\rm pk}/d\nu)_G$")

	#Limits
	ax[1].set_ylim(1,1.0e3)

	#Labels
	ax[1].set_xlabel(r"$\kappa/\sigma_0$",fontsize=fontsize)
	ax[1].set_ylabel(r"$dN_{\rm pk}(\kappa)$")
	ax[1].legend()

	#Save
	fig.tight_layout()
	fig.savefig("{0}/convergencePeaks.{0}".format(cmd_args.type))
コード例 #3
0
if (pool is not None) and not (pool.is_master()):

    pool.wait()
    sys.exit(0)

map_mock_ids = range(int(sys.argv[1]))

igs1_set = IGS1(
    root_path=
    "/Users/andreapetri/Documents/Columbia/spurious_shear/convergence_maps")
map_igs1_ids = igs1_set.getNames(z=1.0,
                                 realizations=range(1,
                                                    int(sys.argv[1]) + 1))

gen = GaussianNoiseGenerator(shape=(2048, 2048),
                             side_angle=3.41 * deg,
                             label="convergence")
power_func = np.loadtxt("Data/ee4e-7.txt", unpack=True)

ens_mock = Ensemble.fromfilelist(map_mock_ids)
ens_igs1 = Ensemble.fromfilelist(map_igs1_ids)

ens_mock.load(callback_loader=generate_and_measure,
              pool=pool,
              generator=gen,
              power_func=power_func)
ens_igs1.load(callback_loader=measure_from_IGS1, pool=pool)

if pool is not None:
    pool.close()
コード例 #4
0
    smoothing_scales = [theta * arcmin for theta in [0.1, 0.5, 1.0, 2.0]]
    bin_edges = np.ogrid[-0.15:0.15:128j]
    bin_midpoints = 0.5 * (bin_edges[1:] + bin_edges[:-1])

    #Create smoothing scale index for the histogram
    idx = Indexer.stack([PDF(bin_edges) for scale in smoothing_scales])

    #Create IGS1 simulation set object to look for the right simulations
    simulation_set = IGS1(root_path=root_path)

    #Look at a sample map
    sample_map = ConvergenceMap.load(
        simulation_set.getNames(z=1.0, realizations=[1])[0])

    #Initialize Gaussian shape noise generator
    generator = GaussianNoiseGenerator.forMap(sample_map)

    #Build Ensemble instance with the maps to analyze
    map_ensemble = Ensemble.fromfilelist(range(1, num_realizations + 1))

    #Measure the histograms and load the data in the ensemble
    map_ensemble.load(callback_loader=compute_histograms,
                      pool=pool,
                      simulation_set=simulation_set,
                      smoothing_scales=smoothing_scales,
                      index=idx,
                      generator=generator,
                      bin_edges=bin_edges)

    if pool is not None:
        pool.close()
コード例 #5
0
ファイル: histograms.py プロジェクト: apetri/LensTools
	#Smoothing scales in arcmin
	smoothing_scales = [ theta*arcmin for theta in [0.1,0.5,1.0,2.0] ]
	bin_edges = np.ogrid[-0.15:0.15:128j]
	bin_midpoints = 0.5*(bin_edges[1:] + bin_edges[:-1])
	
	#Create smoothing scale index for the histogram
	idx = Indexer.stack([PDF(bin_edges) for scale in smoothing_scales])
	
	#Create IGS1 simulation set object to look for the right simulations
	simulation_set = IGS1(root_path=root_path)
	
	#Look at a sample map
	sample_map = ConvergenceMap.load(simulation_set.getNames(z=1.0,realizations=[1])[0])
	
	#Initialize Gaussian shape noise generator
	generator = GaussianNoiseGenerator.forMap(sample_map)
	
	#Build Ensemble instance with the maps to analyze
	map_ensemble = Ensemble.fromfilelist(range(1,num_realizations+1))
	
	#Measure the histograms and load the data in the ensemble
	map_ensemble.load(callback_loader=compute_histograms,pool=pool,simulation_set=simulation_set,smoothing_scales=smoothing_scales,index=idx,generator=generator,bin_edges=bin_edges)
	
	if pool is not None:
		pool.close()

	##########################################################################################################################################
	###############################Ensemble data available at this point for covariance, PCA, etc...##########################################
	##########################################################################################################################################
	
	#Plot results to check
コード例 #6
0
"""
Generate gaussian random fields with a known power spectrum

"""

import numpy as np
import matplotlib.pyplot as plt

from astropy.units import deg

from lenstools import GaussianNoiseGenerator

#Set map side angle, and number of pixels on a side
num_pixel_side = 512
side_angle = 3.41 * deg

#Read the power spectrum (l,Pl) from an external file, and load it in numpy array format (the generator interpolates the power spectrum between bins)
l,Pl = np.loadtxt("Data/ee4e-7.txt",unpack=True)

#Instantiate the gaussian noise generator
gen = GaussianNoiseGenerator(shape=(num_pixel_side,num_pixel_side),side_angle=side_angle,label="convergence")

#Generate one random realization
gaussian_map = gen.fromConvPower(np.array([l,Pl]),seed=1,kind="linear",bounds_error=False,fill_value=0.0)

#gaussian_map is a ConvergenceMap instance
gaussian_map.visualize()
gaussian_map.savefig("example_map.png")
コード例 #7
0
except ImportError:

    import sys
    sys.path.append("..")
    from lenstools import ConvergenceMap, ShearMap, GaussianNoiseGenerator
    from lenstools.defaults import sample_power_shape

import numpy as np
import matplotlib.pyplot as plt

from astropy.units import deg, arcmin

test_map_conv = ConvergenceMap.load("Data/conv.fit")

shape_noise_gen = GaussianNoiseGenerator.forMap(test_map_conv)
corr_noise_gen = GaussianNoiseGenerator.forMap(test_map_conv)

test_map_noisy = test_map_conv + shape_noise_gen.getShapeNoise(
    z=1.0, ngal=15.0 * arcmin**-2, seed=1)

l = np.arange(200.0, 50000.0, 200.0)
scale = 5000.0


def test_smooth():

    test_map_conv_smoothed = test_map_conv.smooth(1.0 * arcmin)

    fig, ax = plt.subplots(1, 2, figsize=(16, 8))
    test_map_conv.visualize(fig, ax[0])
コード例 #8
0
    # run class
    LambdaCDM.compute()

    si8 = LambdaCDM.sigma8()

    cls = LambdaCDM.density_cl(lmax)
    ell = cls['ell'][2:]
    clphiphi = cls['ll'][0][2:]
    clkk = 1.0 / 4 * (ell + 2.0) * (ell + 1.0) * (ell) * (ell - 1.0) * clphiphi

    return si8, ell, clkk


######### GRF initiate

gen = GaussianNoiseGenerator(shape=(num_pixel_side, num_pixel_side),
                             side_angle=side_angle)  #,label="convergence")

A_se9_find = lambda om, S8: -2.1 + 7.915 * S8 / sqrt(om / 0.3)


def GRF_from_PS(omS8seed, zs=1.0):
    om, S8, iseed = omS8seed
    print iseed, om, S8
    #fn_GRF='GRFs/GRF_si%.4f_om%.4f.fits'%(si8, om)
    #fn_cl='cls/clkk_si%.4f_om%.4f.npy'%(si8, om)
    fn_GRF = 'GRFs/GRF_%06d.fits' % (iseed)
    fn_cl = 'cls/clkk_%06d.npy' % (iseed)
    A_se9 = A_se9_find(om, S8)
    si8, ell, clkk = clkk_gen(om, A_se9, zs=1.0)
    gaussian_map = gen.fromConvPower(np.array([ell, clkk]),
                                     seed=int(iseed),
コード例 #9
0
def igs1_convergence_measure_all(realization,model,index,mask_filename=None,redshift=1.0,big_fiducial_set=False,smoothing=1.0*arcmin):

	"""
	Measures all the statistical descriptors of a convergence map as indicated by the index instance
	
	"""

	logging.debug("Processing {0}".format(model.getNames(realization,z=redshift,big_fiducial_set=big_fiducial_set,kind="convergence")))

	#Load the map
	conv_map = model.load(realization,z=redshift,big_fiducial_set=big_fiducial_set,kind="convergence")

	#Add the noise
	gen = GaussianNoiseGenerator.forMap(conv_map)
	noise = gen.getShapeNoise(z=redshift,ngal=15.0*arcmin**-2,seed=realization)

	logging.debug("Adding shape noise with rms {0:.3f}".format(noise.data.std()))
	conv_map += noise

	#Smooth the map
	logging.debug("Smoothing the map on {0}".format(smoothing))
	conv_map.smooth(scale_angle=smoothing)

	if mask_filename is not None:
		raise ValueError("Masks not implemented!") 
	
	logging.debug("Measuring...")

	#Allocate memory for observables
	descriptors = index
	observables = np.zeros(descriptors.size)

	#Measure descriptors as directed by input
	for n in range(descriptors.num_descriptors):

		
		if type(descriptors[n]) == PowerSpectrum:
			
			if mask_filename is None:
				l,observables[descriptors[n].first:descriptors[n].last] = conv_map.powerSpectrum(descriptors[n].l_edges)
			else:
				l,observables[descriptors[n].first:descriptors[n].last] = (conv_map*mask_profile).powerSpectrum(descriptors[n].l_edges)

		elif type(descriptors[n]) == Moments:

			if mask_filename is None:
				observables[descriptors[n].first:descriptors[n].last] = conv_map.moments(connected=descriptors[n].connected)
			else:
				observables[descriptors[n].first:descriptors[n].last] = masked_conv_map.moments(connected=descriptors[n].connected)
		
		elif type(descriptors[n]) == Peaks:
			
			if mask_filename is None:
				v,observables[descriptors[n].first:descriptors[n].last] = conv_map.peakCount(descriptors[n].thresholds,norm=descriptors[n].norm)
			else:
				v,observables[descriptors[n].first:descriptors[n].last] = masked_conv_map.peakCount(descriptors[n].thresholds,norm=descriptors[n].norm)

		elif type(descriptors[n]) == PDF:

			if mask_filename is None:
				v,observables[descriptors[n].first:descriptors[n].last] = conv_map.pdf(descriptors[n].thresholds,norm=descriptors[n].norm)
			else:
				v,observables[descriptors[n].first:descriptors[n].last] = masked_conv_map.pdf(descriptors[n].thresholds,norm=descriptors[n].norm)
		
		elif type(descriptors[n]) == MinkowskiAll:
			
			if mask_filename is None:
				v,V0,V1,V2 = conv_map.minkowskiFunctionals(descriptors[n].thresholds,norm=descriptors[n].norm)
			else:
				v,V0,V1,V2 = masked_conv_map.minkowskiFunctionals(descriptors[n].thresholds,norm=descriptors[n].norm)
			
			observables[descriptors[n].first:descriptors[n].last] = np.hstack((V0,V1,V2))
		
		elif type(descriptors[n]) == MinkowskiSingle:
			
			raise ValueError("Due to computational performance you have to measure all Minkowski functionals at once!")
		
		else:
			
			raise ValueError("Measurement of this descriptor not implemented!!!")

	#Return
	return observables