Beispiel #1
0
def test_index():

    #Decide the statistical descriptors to measure, and build an index
    idx = Indexer.stack([
        PowerSpectrum(l_edges),
        Peaks(thresholds_pk, norm=True),
        MinkowskiAll(thresholds_mf, norm=True),
        PDF(thresholds_mf, norm=True),
        Moments(connected=True)
    ])
    l = idx[0].l
    v = idx[1].midpoints
    v_mf = idx[2].midpoints

    #Initiate the statistical ensemble
    ens = Ensemble.fromfilelist(map_list)

    #Load measurements into the ensemble (this is the expensive part!!!)
    ens.load(callback_loader=convergence_measure_all, pool=None, index=idx)

    #Split the ensemble in power_spectrum,peaks, and the second and third minkowski functional
    mink_idx = idx[2].separate()
    subset_idx = Indexer([idx[0], idx[1], idx[3], mink_idx[2], idx[-1]])

    ens_pow, ens_pk, ens_pdf, ens_mink2, ens_mom = ens.split(subset_idx)

    #####################################################################

    #Plot to check
    fig, ax = plt.subplots(2, 2, figsize=(16, 16))
    for i in range(ens.num_realizations):

        ax[0, 0].plot(l, l * (l + 1) * ens_pow.data[i] / (2.0 * np.pi))
        ax[0, 1].plot(v, ens_pk.data[i])
        ax[1, 0].plot(v_mf, ens_pdf.data[i])
        ax[1, 1].plot(v_mf, ens_mink2.data[i])

    ax[0, 0].set_xscale("log")
    ax[0, 0].set_yscale("log")

    ax[0, 0].set_xlabel(r"$l$")
    ax[0, 0].set_ylabel(r"$l(l+1)P_l/2\pi$")

    ax[0, 1].set_xlabel(r"$\nu$")
    ax[0, 1].set_ylabel(r"$dN/d\nu$")

    ax[1, 0].set_xlabel(r"$\nu$")
    ax[1, 0].set_ylabel(r"$P(\nu)$")

    ax[1, 1].set_xlabel(r"$\nu$")
    ax[1, 1].set_ylabel(r"$V_2(\nu)$")

    fig.tight_layout()

    plt.savefig("conv_all.png")
    plt.clf()

    #Save moments to check
    np.savetxt("moments.txt", ens_mom.mean())
Beispiel #2
0
def test_index():

	#Decide the statistical descriptors to measure, and build an index
	idx = Indexer.stack([PowerSpectrum(l_edges),Peaks(thresholds_pk,norm=True),MinkowskiAll(thresholds_mf,norm=True),PDF(thresholds_mf,norm=True),Moments(connected=True)])
	l = idx[0].l
	v = idx[1].midpoints
	v_mf = idx[2].midpoints

	#Initiate the statistical ensemble
	ens = Ensemble.fromfilelist(map_list)

	#Load measurements into the ensemble (this is the expensive part!!!)
	ens.load(callback_loader=convergence_measure_all,pool=None,index=idx)

	#Split the ensemble in power_spectrum,peaks, and the second and third minkowski functional
	mink_idx = idx[2].separate()
	subset_idx = Indexer([idx[0],idx[1],idx[3],mink_idx[2],idx[-1]])

	ens_pow,ens_pk,ens_pdf,ens_mink2,ens_mom = ens.split(subset_idx)

	#####################################################################

	#Plot to check
	fig,ax = plt.subplots(2,2,figsize=(16,16))
	for i in range(ens.num_realizations):
		
		ax[0,0].plot(l,l*(l+1)*ens_pow.data[i]/(2.0*np.pi))
		ax[0,1].plot(v,ens_pk.data[i])
		ax[1,0].plot(v_mf,ens_pdf.data[i])
		ax[1,1].plot(v_mf,ens_mink2.data[i])

	ax[0,0].set_xscale("log")
	ax[0,0].set_yscale("log")

	ax[0,0].set_xlabel(r"$l$")
	ax[0,0].set_ylabel(r"$l(l+1)P_l/2\pi$")

	ax[0,1].set_xlabel(r"$\nu$")
	ax[0,1].set_ylabel(r"$dN/d\nu$")

	ax[1,0].set_xlabel(r"$\nu$")
	ax[1,0].set_ylabel(r"$P(\nu)$")

	ax[1,1].set_xlabel(r"$\nu$")
	ax[1,1].set_ylabel(r"$V_2(\nu)$")

	fig.tight_layout()

	plt.savefig("conv_all.png")
	plt.clf()

	#Save moments to check
	np.savetxt("moments.txt",ens_mom.mean())
Beispiel #3
0
def test_differentiate():

    thresholds = np.arange(-0.04, 0.12, 0.001)
    midpoints = 0.5 * (thresholds[:-1] + thresholds[1:])

    index = Indexer.stack([MinkowskiAll(thresholds)])
    index_separate = Indexer(MinkowskiAll(thresholds).separate())

    diff_ensemble = Ensemble.fromfilelist(map_list)
    diff_ensemble.load(callback_loader=convergence_measure_all, index=index)

    ensemble_0 = diff_ensemble.split(index_separate)[0]
    ensemble_pdf = ensemble_0.differentiate(step=thresholds[0] - thresholds[1])

    fig, ax = plt.subplots()
    for i in range(ensemble_0.num_realizations):
        ax.plot(0.5 * (midpoints[:-1] + midpoints[1:]), ensemble_pdf[i])

    ax.set_xlabel(r"$\kappa$")
    ax.set_ylabel(r"$P(\kappa)$")

    fig.savefig("ensemble_differentiate.png")
def measure_all_histograms(models,options,pool):

	#Look at a sample map
	sample_map = ConvergenceMap.fromfilename(models[0].getNames(z=1.0,realizations=[1])[0],loader=load_fits_default_convergence)
	#Initialize Gaussian shape noise generator for the sample map shape and angle
	generator = GaussianNoiseGenerator.forMap(sample_map)

	#Parsed from options
	num_realizations = options.getint("analysis","num_realizations")
	smoothing_scales = [float(scale) for scale in options.get("analysis","smoothing_scales").split(",")]
	bin_edges = np.ogrid[options.getfloat("analysis","bin_edge_low"):options.getfloat("analysis","bin_edge_high"):(options.getint("analysis","num_bins") - 2)*1j]
	bin_edges = np.hstack((-10.0,bin_edges,10.0))
	z = options.getfloat("analysis","redshift")

	bin_midpoints = 0.5*(bin_edges[1:] + bin_edges[:-1])
	

	#Create smoothing scale index for the histograms
	idx = Indexer.stack([PDF(bin_edges) for scale in smoothing_scales])

	#Build the data type of the structure array in output
	data_type = [(model.name,Ensemble) for model in models]
	#Append info about the smoothing scale
	data_type = [("Smooth",np.float),] + data_type

	#Create output struct array
	ensemble_array = np.zeros(len(smoothing_scales),dtype=data_type)

	#Write smoothing scale information
	ensemble_array["Smooth"] = np.array(smoothing_scales)
	
	#The for loop runs the distributed computations
	for model in models:

		#Build Ensemble instance with the maps to analyze
		map_ensemble = Ensemble.fromfilelist(range(1,num_realizations+1))
		
		#Measure the histograms and load the data in the ensemble
		map_ensemble.load(callback_loader=compute_map_histograms,pool=pool,simulation_set=model,smoothing_scales=smoothing_scales,index=idx,generator=generator,bin_edges=bin_edges,redshift=z)

		#Split the ensemble between different smoothing scales
		map_ensemble_list = map_ensemble.split(idx)

		#Add to output struct array
		ensemble_array[model.name] = np.array(map_ensemble_list)

	return ensemble_array
Beispiel #5
0
def test_differentiate():

	thresholds = np.arange(-0.04,0.12,0.001)
	midpoints = 0.5*(thresholds[:-1] + thresholds[1:])

	index = Indexer.stack([MinkowskiAll(thresholds)])
	index_separate = Indexer(MinkowskiAll(thresholds).separate())
	
	diff_ensemble = Ensemble.fromfilelist(map_list)
	diff_ensemble.load(callback_loader=convergence_measure_all,index=index)

	ensemble_0 = diff_ensemble.split(index_separate)[0]
	ensemble_pdf = ensemble_0.differentiate(step=thresholds[0]-thresholds[1])

	fig,ax = plt.subplots()
	for i in range(ensemble_0.num_realizations):
		ax.plot(0.5*(midpoints[:-1]+midpoints[1:]),ensemble_pdf[i])
		
	ax.set_xlabel(r"$\kappa$")
	ax.set_ylabel(r"$P(\kappa)$")

	fig.savefig("ensemble_differentiate.png")
		feature_list.append(PowerSpectrum(l_edges))

	if options.has_section("moments"):
		feature_list.append(Moments())

	if options.has_section("peaks"):
		th_peaks = np.ogrid[options.getfloat("peaks","th_min"):options.getfloat("peaks","th_max"):(options.getint("peaks","num_bins")+1)*1j]
		np.save(os.path.join(save_path,"th_peaks.npy"),0.5*(th_peaks[1:]+th_peaks[:-1]))
		feature_list.append(Peaks(th_peaks))

	if options.has_section("minkowski_functionals"):
		th_minkowski = np.ogrid[options.getfloat("minkowski_functionals","th_min"):options.getfloat("minkowski_functionals","th_max"):(options.getint("minkowski_functionals","num_bins")+1)*1j]
		np.save(os.path.join(save_path,"th_minkowski.npy"),0.5*(th_minkowski[1:]+th_minkowski[:-1]))
		feature_list.append(MinkowskiAll(th_minkowski))

	idx = Indexer.stack(feature_list)

	#Write an info file with all the analysis information
	with open(os.path.join(save_path,"INFO.txt"),"w") as infofile:
		infofile.write(write_info(options))

	#Build the progress bar
	pbar = progressbar.ProgressBar(widgets=widgets,maxval=len(models)*len(subfields)*len(smoothing_scales)).start()
	i = 0

	#Cycle through the models and perform the measurements of the selected features (create the appropriate directories to save the outputs)
	for model in models:

		if type(model)==CFHTemu1:
			dir_to_make = os.path.join(save_path,model._cosmo_id_string)
		elif type(model)==CFHTcov:
Beispiel #7
0
    if (pool is not None) and not (pool.is_master()):

        pool.wait()
        sys.exit(0)

    #Root path of IGS1 maps
    root_path = cmd_args.path
    num_realizations = cmd_args.num_realizations

    #Smoothing scales in arcmin
    smoothing_scales = [theta * arcmin for theta in [0.1, 0.5, 1.0, 2.0]]
    bin_edges = np.ogrid[-0.15:0.15:128j]
    bin_midpoints = 0.5 * (bin_edges[1:] + bin_edges[:-1])

    #Create smoothing scale index for the histogram
    idx = Indexer.stack([PDF(bin_edges) for scale in smoothing_scales])

    #Create IGS1 simulation set object to look for the right simulations
    simulation_set = IGS1(root_path=root_path)

    #Look at a sample map
    sample_map = ConvergenceMap.load(
        simulation_set.getNames(z=1.0, realizations=[1])[0])

    #Initialize Gaussian shape noise generator
    generator = GaussianNoiseGenerator.forMap(sample_map)

    #Build Ensemble instance with the maps to analyze
    map_ensemble = Ensemble.fromfilelist(range(1, num_realizations + 1))

    #Measure the histograms and load the data in the ensemble
Beispiel #8
0
    if (pool is not None) and not (pool.is_master()):

        pool.wait()
        sys.exit(0)

        # Root path of IGS1 maps
    root_path = cmd_args.path
    num_realizations = cmd_args.num_realizations

    # Smoothing scales in arcmin
    smoothing_scales = [theta * arcmin for theta in [0.1, 0.5, 1.0, 2.0]]
    bin_edges = np.ogrid[-0.15:0.15:128j]
    bin_midpoints = 0.5 * (bin_edges[1:] + bin_edges[:-1])

    # Create smoothing scale index for the histogram
    idx = Indexer.stack([PDF(bin_edges) for scale in smoothing_scales])

    # Create IGS1 simulation set object to look for the right simulations
    simulation_set = IGS1(root_path=root_path)

    # Look at a sample map
    sample_map = ConvergenceMap.load(simulation_set.getNames(z=1.0, realizations=[1])[0])

    # Initialize Gaussian shape noise generator
    generator = GaussianNoiseGenerator.forMap(sample_map)

    # Build Ensemble instance with the maps to analyze
    map_ensemble = Ensemble.fromfilelist(range(1, num_realizations + 1))

    # Measure the histograms and load the data in the ensemble
    map_ensemble.load(