Exemple #1
0
def eb_modes(cmd_args):

    #Plot setup
    fig, ax = plt.subplots()

    #Load in the shear map, compute E and B modes power spectrum
    shear = ShearMap.load(
        os.path.join(dataExtern(), "WLshear_z2.00_0001r.fits"))
    l_edges = np.linspace(200., 50000., 50)
    l, ee, bb, eb = shear.decompose(l_edges)

    #Plot the power spectra and prediction from NICAEA
    ax.plot(l, l * (l + 1) * ee / (2. * np.pi), label=r"$P^{EE}$", color="red")
    cosmo = Nicaea(Om0=0.26, Ode0=0.74, w0=-1, sigma8=0.8)
    ax.plot(l,
            l * (l + 1) * cosmo.convergencePowerSpectrum(l, z=2.0) /
            (2. * np.pi),
            label=r"$P^{\kappa\kappa}{\rm (NICAEA)}$",
            linestyle="--",
            color="red")
    ax.plot(l,
            l * (l + 1) * bb / (2. * np.pi),
            label=r"$P^{BB}$",
            color="blue")

    #Labels
    ax.set_xscale("log")
    ax.set_yscale("log")
    ax.set_xlabel(r"$\ell$", fontsize=22)
    ax.set_ylabel(r"$\ell(\ell+1)P_\ell/2\pi$", fontsize=22)
    ax.legend(loc="upper left")

    #Save
    fig.savefig("eb_modes." + cmd_args.type)
Exemple #2
0
def test_Bmode():

	pure_E = np.zeros((512,257),dtype=np.complex128)
	pure_B = np.zeros((512,257),dtype=np.complex128)

	pure_B[0,250] = 2.0 + 0.0j
	pure_B[250,0] = 2.0 + 0.0j
	pure_B[250,250] = 2.0 + 0.0j

	new_shear_map = ShearMap.fromEBmodes(pure_E,pure_B,angle=1.95*deg)

	fig,ax = plt.subplots(1,3,figsize=(24,8))
	ax1 = ax[1].imshow(new_shear_map.data[0],origin="lower",cmap=plt.cm.PRGn,extent=[0,new_shear_map.side_angle.value,0,new_shear_map.side_angle.value])
	ax2 = ax[2].imshow(new_shear_map.data[1],origin="lower",cmap=plt.cm.PRGn,extent=[0,new_shear_map.side_angle.value,0,new_shear_map.side_angle.value])
	plt.colorbar(ax1,ax=ax[1])
	plt.colorbar(ax2,ax=ax[2])
	new_shear_map.sticks(ax[0],pixel_step=10,multiplier=1.5)

	ax[0].set_xlabel(r"$x$(deg)")
	ax[0].set_ylabel(r"$y$(deg)")
	
	ax[1].set_xlabel(r"$x$(deg)")
	ax[1].set_ylabel(r"$y$(deg)")
	ax[1].set_title(r"$\gamma_1$")
	
	ax[2].set_xlabel(r"$x$(deg)")
	ax[2].set_ylabel(r"$y$(deg)")
	ax[2].set_title(r"$\gamma_2$")

	fig.tight_layout()

	plt.savefig("pure_B.png")
	plt.clf()
Exemple #3
0
def test_Bmode():

    pure_E = np.zeros((512, 257), dtype=np.complex128)
    pure_B = np.zeros((512, 257), dtype=np.complex128)

    pure_B[0, 250] = 2.0 + 0.0j
    pure_B[250, 0] = 2.0 + 0.0j
    pure_B[250, 250] = 2.0 + 0.0j

    new_shear_map = ShearMap.fromEBmodes(pure_E, pure_B, angle=1.95 * deg)

    fig, ax = plt.subplots(1, 3, figsize=(24, 8))
    ax1 = ax[1].imshow(new_shear_map.data[0],
                       origin="lower",
                       cmap=plt.cm.PRGn,
                       extent=[
                           0, new_shear_map.side_angle.value, 0,
                           new_shear_map.side_angle.value
                       ])
    ax2 = ax[2].imshow(new_shear_map.data[1],
                       origin="lower",
                       cmap=plt.cm.PRGn,
                       extent=[
                           0, new_shear_map.side_angle.value, 0,
                           new_shear_map.side_angle.value
                       ])
    plt.colorbar(ax1, ax=ax[1])
    plt.colorbar(ax2, ax=ax[2])
    new_shear_map.sticks(ax[0], pixel_step=10, multiplier=1.5)

    ax[0].set_xlabel(r"$x$(deg)")
    ax[0].set_ylabel(r"$y$(deg)")

    ax[1].set_xlabel(r"$x$(deg)")
    ax[1].set_ylabel(r"$y$(deg)")
    ax[1].set_title(r"$\gamma_1$")

    ax[2].set_xlabel(r"$x$(deg)")
    ax[2].set_ylabel(r"$y$(deg)")
    ax[2].set_title(r"$\gamma_2$")

    fig.tight_layout()

    plt.savefig("pure_B.png")
    plt.clf()
Exemple #4
0
    shear_file_1 = fits.open(filename1)
    angle = shear_file_1[0].header["ANGLE"]
    gamma = shear_file_1[0].data.astype(np.float)
    shear_file_1.close()

    shear_file_2 = fits.open(filename2)
    assert shear_file_2[0].header["ANGLE"] == angle
    gamma = np.array((gamma, shear_file_2[0].data.astype(np.float)))
    shear_file_2.close()

    return angle * deg, gamma


test_map = ShearMap.load("Data/shear1.fit",
                         filename2="Data/shear2.fit",
                         format=two_file_loader)
test_map_conv = ConvergenceMap.load("Data/conv.fit")

l_edges = np.arange(200.0, 50000.0, 200.0)


def test_visualize1():

    assert hasattr(test_map, "data")
    assert hasattr(test_map, "side_angle")
    assert test_map.data.shape[0] == 2

    test_map.setAngularUnits(arcsec)
    test_map.visualize(colorbar=True)
    test_map.fig.tight_layout()
Exemple #5
0
def test_ray_simple():

	z_final = 2.0

	start = time.time()
	last_timestamp = start

	#Start a bucket of light rays from these positions
	b = np.linspace(0.0,tracer.lens[0].side_angle.to(deg).value,512)
	xx,yy = np.meshgrid(b,b)
	pos = np.array([xx,yy]) * deg

	#Trace the rays
	fin = tracer.shoot(pos,z=z_final)

	now = time.time()
	logging.info("Ray tracing completed in {0:.3f}s".format(now-last_timestamp))
	last_timestamp = now

	#Build the deflection plane
	dfl = DeflectionPlane(fin.value-pos.value,angle=tracer.lens[0].side_angle,redshift=tracer.redshift[-1],cosmology=tracer.lens[0].cosmology,unit=pos.unit)

	#Compute shear and convergence
	conv = dfl.convergence()
	shear = dfl.shear()
	omega = dfl.omega()

	now = time.time()
	logging.info("Weak lensing calculations completed in {0:.3f}s".format(now-last_timestamp))
	last_timestamp = now

	#Finally visualize the result
	conv.visualize(colorbar=True)
	conv.savefig("raytraced_convergence.png")
	omega.visualize(colorbar=True)
	omega.savefig("raytraced_omega.png")
	shear.visualize(colorbar=True)
	shear.savefig("raytraced_shear.png")

	#We want to plot the power spectrum of the raytraced maps
	fig,ax = plt.subplots()
	l_edges = np.arange(200.0,10000.0,100.0)
	l,Pl = conv.powerSpectrum(l_edges)
	ax.plot(l,l*(l+1)*Pl/(2.0*np.pi),label="From ray positions")

	#And why not, E and B modes too
	figEB,axEB = plt.subplots()
	l,EEl,BBl,EBl = shear.decompose(l_edges)
	axEB.plot(l,l*(l+1)*EEl/(2.0*np.pi),label="EE From ray positions",color="black")
	axEB.plot(l,l*(l+1)*BBl/(2.0*np.pi),label="BB From ray positions",color="green")
	axEB.plot(l,l*(l+1)*np.abs(EBl)/(2.0*np.pi),label="EB From ray positions",color="blue")

	#Now compute the shear and convergence raytracing the actual jacobians (more expensive computationally cause it computes the jacobian at every step)
	finJ = tracer.shoot(pos,z=z_final,kind="jacobians")
	conv = ConvergenceMap(data=1.0-0.5*(finJ[0]+finJ[3]),angle=conv.side_angle)
	shear = ShearMap(data=np.array([0.5*(finJ[3]-finJ[0]),-0.5*(finJ[1]+finJ[2])]),angle=shear.side_angle)

	now = time.time()
	logging.info("Jacobian ray tracing completed in {0:.3f}s".format(now-last_timestamp))
	last_timestamp = now

	#Finally visualize the result
	conv.visualize(colorbar=True)
	conv.savefig("raytraced_convergence_jacobian.png")
	shear.visualize(colorbar=True)
	shear.savefig("raytraced_shear_jacobian.png")

	#We want to plot the power spectrum of the raytraced maps
	l,Pl = conv.powerSpectrum(l_edges)
	ax.plot(l,l*(l+1)*Pl/(2.0*np.pi),label="From Jacobians")
	ax.set_xlabel(r"$l$")
	ax.set_ylabel(r"$l(l+1)P_l/2\pi$")
	ax.set_xscale("log")
	ax.set_yscale("log")
	ax.legend()
	fig.savefig("raytracing_conv_power.png")

	#And why not, E and B modes too
	axEB.plot(l,l*(l+1)*EEl/(2.0*np.pi),label="EE From jacobians",color="black",linestyle="--")
	axEB.plot(l,l*(l+1)*BBl/(2.0*np.pi),label="BB From jacobians",color="green",linestyle="--")
	axEB.plot(l,l*(l+1)*np.abs(EBl)/(2.0*np.pi),label="EB From jacobians",color="blue",linestyle="--")
	axEB.set_xlabel(r"$l$")
	axEB.set_ylabel(r"$l(l+1)P_l/2\pi$")
	axEB.set_xscale("log")
	axEB.set_yscale("log")
	axEB.legend(loc="lower right",prop={"size":10})
	figEB.savefig("raytracing_shear_power.png")

	now = time.time()
	logging.info("Total runtime {0:.3f}s".format(now-start))
Exemple #6
0
def singleRedshift(pool,batch,settings,id):

	#Safety check
	assert isinstance(pool,MPIWhirlPool) or (pool is None)
	assert isinstance(batch,SimulationBatch)

	parts = id.split("|")

	if len(parts)==2:

		assert isinstance(settings,MapSettings)
	
		#Separate the id into cosmo_id and geometry_id
		cosmo_id,geometry_id = parts

		#Get a handle on the model
		model = batch.getModel(cosmo_id)

		#Get the corresponding simulation collection and map batch handlers
		collection = [model.getCollection(geometry_id)]
		map_batch = collection[0].getMapSet(settings.directory_name)
		cut_redshifts = np.array([0.0])

	elif len(parts)==1:

		assert isinstance(settings,TelescopicMapSettings)

		#Get a handle on the model
		model = batch.getModel(parts[0])

		#Get the corresponding simulation collection and map batch handlers
		map_batch = model.getTelescopicMapSet(settings.directory_name)
		collection = map_batch.mapcollections
		cut_redshifts = map_batch.redshifts

	else:
		
		if (pool is None) or (pool.is_master()):
			logdriver.error("Format error in {0}: too many '|'".format(id))
		sys.exit(1)


	#Override the settings with the previously pickled ones, if prompted by user
	if settings.override_with_local:

		local_settings_file = os.path.join(map_batch.home_subdir,"settings.p")
		settings = MapSettings.read(local_settings_file)
		assert isinstance(settings,MapSettings)

		if (pool is None) or (pool.is_master()):
			logdriver.warning("Overriding settings with the previously pickled ones at {0}".format(local_settings_file))

	##################################################################
	##################Settings read###################################
	##################################################################

	#Set random seed to generate the realizations
	if pool is not None:
		np.random.seed(settings.seed + pool.rank)
	else:
		np.random.seed(settings.seed)

	#Read map angle,redshift and resolution from the settings
	map_angle = settings.map_angle
	source_redshift = settings.source_redshift
	resolution = settings.map_resolution

	if len(parts)==2:

		#########################
		#Use a single collection#
		#########################

		#Read the plane set we should use
		plane_set = (settings.plane_set,)

		#Randomization
		nbody_realizations = (settings.mix_nbody_realizations,)
		cut_points = (settings.mix_cut_points,)
		normals = (settings.mix_normals,)
		map_realizations = settings.lens_map_realizations

	elif len(parts)==1:

		#######################
		#####Telescopic########
		#######################

		#Check that we have enough info
		for attr_name in ["plane_set","mix_nbody_realizations","mix_cut_points","mix_normals"]:
			if len(getattr(settings,attr_name))!=len(collection):
				if (pool is None) or (pool.is_master()):
					logdriver.error("You need to specify a setting {0} for each collection!".format(attr_name))
				sys.exit(1)

		#Read the plane set we should use
		plane_set = settings.plane_set

		#Randomization
		nbody_realizations = settings.mix_nbody_realizations
		cut_points = settings.mix_cut_points
		normals = settings.mix_normals
		map_realizations = settings.lens_map_realizations



	#Decide which map realizations this MPI task will take care of (if pool is None, all of them)
	try:
		realization_offset = settings.first_realization - 1
	except AttributeError:
		realization_offset = 0

	if pool is None:
		first_map_realization = 0 + realization_offset
		last_map_realization = map_realizations + realization_offset
		realizations_per_task = map_realizations
		logdriver.debug("Generating lensing map realizations from {0} to {1}".format(first_map_realization+1,last_map_realization))
	else:
		assert map_realizations%(pool.size+1)==0,"Perfect load-balancing enforced, map_realizations must be a multiple of the number of MPI tasks!"
		realizations_per_task = map_realizations//(pool.size+1)
		first_map_realization = realizations_per_task*pool.rank + realization_offset
		last_map_realization = realizations_per_task*(pool.rank+1) + realization_offset
		logdriver.debug("Task {0} will generate lensing map realizations from {1} to {2}".format(pool.rank,first_map_realization+1,last_map_realization))

	#Planes will be read from this path
	plane_path = os.path.join("{0}","ic{1}","{2}")

	if (pool is None) or (pool.is_master()):
		for c,coll in enumerate(collection):
			logdriver.info("Reading planes from {0}".format(plane_path.format(coll.storage_subdir,"-".join([str(n) for n in nbody_realizations[c]]),plane_set[c])))

	#Plane info file is the same for all collections
	if (not hasattr(settings,"plane_info_file")) or (settings.plane_info_file is None):
		info_filename = batch.syshandler.map(os.path.join(plane_path.format(collection[0].storage_subdir,nbody_realizations[0][0],plane_set[0]),"info.txt"))
	else:
		info_filename = settings.plane_info_file

	if (pool is None) or (pool.is_master()):
		logdriver.info("Reading lens plane summary information from {0}".format(info_filename))

	#Read how many snapshots are available
	with open(info_filename,"r") as infofile:
		num_snapshots = len(infofile.readlines())

	#Save path for the maps
	save_path = map_batch.storage_subdir

	if (pool is None) or (pool.is_master()):
		logdriver.info("Lensing maps will be saved to {0}".format(save_path))

	begin = time.time()

	#Log initial memory load
	peak_memory_task,peak_memory_all = peakMemory(),peakMemoryAll(pool)
	if (pool is None) or (pool.is_master()):
		logstderr.info("Initial memory usage: {0:.3f} (task), {1[0]:.3f} (all {1[1]} tasks)".format(peak_memory_task,peak_memory_all))

	#We need one of these for cycles for each map random realization
	for rloc,r in enumerate(range(first_map_realization,last_map_realization)):

		#Instantiate the RayTracer
		tracer = RayTracer()

		#Force garbage collection
		gc.collect()

		#Start timestep
		start = time.time()
		last_timestamp = start

		#############################################################
		###############Add the lenses to the system##################
		#############################################################

		#Open the info file to read the lens specifications (assume the info file is the same for all nbody realizations)
		infofile = open(info_filename,"r")

		#Read the info file line by line, and decide if we should add the particular lens corresponding to that line or not
		for s in range(num_snapshots):

			#Read the line
			line = infofile.readline().strip("\n")

			#Stop if there is nothing more to read
			if line=="":
				break

			#Split the line in snapshot,distance,redshift
			line = line.split(",")

			snapshot_number = int(line[0].split("=")[1])
		
			distance,unit = line[1].split("=")[1].split(" ")
			if unit=="Mpc/h":
				distance = float(distance)*model.Mpc_over_h
			else:
				distance = float(distance)*getattr(u,"unit")

			lens_redshift = float(line[2].split("=")[1])

			#Select the right collection
			for n,z in enumerate(cut_redshifts):
				if lens_redshift>=z:
					c = n

			#Randomization of planes
			nbody = np.random.randint(low=0,high=len(nbody_realizations[c]))
			cut = np.random.randint(low=0,high=len(cut_points[c]))
			normal = np.random.randint(low=0,high=len(normals[c]))

			#Log to user
			logdriver.debug("Realization,snapshot=({0},{1}) --> NbodyIC,cut_point,normal=({2},{3},{4})".format(r,s,nbody_realizations[c][nbody],cut_points[c][cut],normals[c][normal]))

			#Add the lens to the system
			logdriver.info("Adding lens at redshift {0}".format(lens_redshift))
			plane_name = batch.syshandler.map(os.path.join(plane_path.format(collection[c].storage_subdir,nbody_realizations[c][nbody],plane_set[c]),settings.plane_name_format.format(snapshot_number,cut_points[c][cut],normals[c][normal],settings.plane_format)))
			tracer.addLens((plane_name,distance,lens_redshift))

		#Close the infofile
		infofile.close()

		now = time.time()
		logdriver.info("Plane specification reading completed in {0:.3f}s".format(now-start))
		last_timestamp = now

		#Rearrange the lenses according to redshift and roll them randomly along the axes
		tracer.reorderLenses()

		now = time.time()
		logdriver.info("Reordering completed in {0:.3f}s".format(now-last_timestamp))
		last_timestamp = now

		#Start a bucket of light rays from a regular grid of initial positions
		b = np.linspace(0.0,map_angle.value,resolution)
		xx,yy = np.meshgrid(b,b)
		pos = np.array([xx,yy]) * map_angle.unit

		#Trace the ray deflections
		jacobian = tracer.shoot(pos,z=source_redshift,kind="jacobians")

		now = time.time()
		logdriver.info("Jacobian ray tracing for realization {0} completed in {1:.3f}s".format(r+1,now-last_timestamp))
		last_timestamp = now

		#Compute shear,convergence and omega from the jacobians
		if settings.convergence:
		
			convMap = ConvergenceMap(data=1.0-0.5*(jacobian[0]+jacobian[3]),angle=map_angle)
			savename = batch.syshandler.map(os.path.join(save_path,"WLconv_z{0:.2f}_{1:04d}r.{2}".format(source_redshift,r+1,settings.format)))
			logdriver.info("Saving convergence map to {0}".format(savename)) 
			convMap.save(savename)
			logdriver.debug("Saved convergence map to {0}".format(savename)) 

		##############################################################################################################################
	
		if settings.shear:
		
			shearMap = ShearMap(data=np.array([0.5*(jacobian[3]-jacobian[0]),-0.5*(jacobian[1]+jacobian[2])]),angle=map_angle)
			savename = batch.syshandler.map(os.path.join(save_path,"WLshear_z{0:.2f}_{1:04d}r.{2}".format(source_redshift,r+1,settings.format)))
			logdriver.info("Saving shear map to {0}".format(savename))
			shearMap.save(savename) 

		##############################################################################################################################
	
		if settings.omega:
		
			omegaMap = Spin0(data=-0.5*(jacobian[2]-jacobian[1]),angle=map_angle)
			savename = batch.syshandler.map(os.path.join(save_path,"WLomega_z{0:.2f}_{1:04d}r.{2}".format(source_redshift,r+1,settings.format)))
			logdriver.info("Saving omega map to {0}".format(savename))
			omegaMap.save(savename)

		now = time.time()
		
		#Log peak memory usage to stdout
		peak_memory_task,peak_memory_all = peakMemory(),peakMemoryAll(pool)
		logdriver.info("Weak lensing calculations for realization {0} completed in {1:.3f}s".format(r+1,now-last_timestamp))
		logdriver.info("Peak memory usage: {0:.3f} (task), {1[0]:.3f} (all {1[1]} tasks)".format(peak_memory_task,peak_memory_all))

		#Log progress and peak memory usage to stderr
		if (pool is None) or (pool.is_master()):
			logstderr.info("Progress: {0:.2f}%, peak memory usage: {1:.3f} (task), {2[0]:.3f} (all {2[1]} tasks)".format(100*(rloc+1.)/realizations_per_task,peak_memory_task,peak_memory_all))
	
	#Safety sync barrier
	if pool is not None:
		pool.comm.Barrier()

	if (pool is None) or (pool.is_master()):	
		now = time.time()
		logdriver.info("Total runtime {0:.3f}s".format(now-begin))
	shear_file_1 = fits.open(filename1)
	angle = shear_file_1[0].header["ANGLE"]
	gamma = shear_file_1[0].data.astype(np.float)
	shear_file_1.close()

	shear_file_2 = fits.open(filename2)
	assert shear_file_2[0].header["ANGLE"] == angle
	gamma = np.array((gamma,shear_file_2[0].data.astype(np.float)))
	shear_file_2.close()

	return angle*deg,gamma




test_map = ShearMap.load("Data/shear1.fit",filename2="Data/shear2.fit",format=two_file_loader)

l_edges = np.arange(200.0,50000.0,200.0)

l,EE,BB,EB = test_map.decompose(l_edges,keep_fourier=True)

assert l.shape == EE.shape == BB.shape == EB.shape

fig,ax = plt.subplots()
ax.plot(l,l*(l+1)*EE/(2.0*np.pi),label=r"$P_{EE}$")
ax.plot(l,l*(l+1)*BB/(2.0*np.pi),label=r"$P_{BB}$")
ax.plot(l,l*(l+1)*np.abs(EB)/(2.0*np.pi),label=r"$\vert P_{EB}\vert$")

ax.set_xscale("log")
ax.set_yscale("log")
ax.set_xlabel(r"$l$")