コード例 #1
0
ファイル: defaults.py プロジェクト: TheisEizo/LensTools
def convergence_measure_all(filename,index,fits_loader=None):

	"""
	Measures all the statistical descriptors of a convergence map as indicated by the index instance
	
	"""

	logging.debug("Processing {0}".format(filename))

	#Load the map
	if fits_loader is not None:
		conv_map = ConvergenceMap.load(filename,format=fits_loader)
	else: 
		conv_map = ConvergenceMap.load(filename,format=load_fits_default_convergence)

	#Allocate memory for observables
	descriptors = index
	observables = np.zeros(descriptors.size)

	#Measure descriptors as directed by input
	for n in range(descriptors.num_descriptors):

		
		if type(descriptors[n]) == PowerSpectrum:
			
			l,observables[descriptors[n].first:descriptors[n].last] = conv_map.powerSpectrum(descriptors[n].l_edges)

		elif type(descriptors[n]) == Moments:

			observables[descriptors[n].first:descriptors[n].last] = conv_map.moments(connected=descriptors[n].connected)
		
		elif type(descriptors[n]) == Peaks:
			
			v,observables[descriptors[n].first:descriptors[n].last] = conv_map.peakCount(descriptors[n].thresholds,norm=descriptors[n].norm)

		elif type(descriptors[n]) == PDF:

			v,observables[descriptors[n].first:descriptors[n].last] = conv_map.pdf(descriptors[n].thresholds,norm=descriptors[n].norm)
		
		elif type(descriptors[n]) == MinkowskiAll:
			
			v,V0,V1,V2 = conv_map.minkowskiFunctionals(descriptors[n].thresholds,norm=descriptors[n].norm)
			observables[descriptors[n].first:descriptors[n].last] = np.hstack((V0,V1,V2))
		
		elif type(descriptors[n]) == MinkowskiSingle:
			
			raise ValueError("Due to computational performance you have to measure all Minkowski functionals at once!")
		
		else:
			
			raise ValueError("Measurement of this descriptor not implemented!!!")

	#Return
	return observables
コード例 #2
0
	def fromConvPower(self,power_func,seed=0,**kwargs):

		"""
		This method uses a supplied power spectrum to generate correlated noise maps in real space via FFTs

		:param power_func: function that given a numpy array of l's returns a numpy array with the according Pl's (this is the input power spectrum); alternatively you can pass an array (l,Pl) and the power spectrum will be calculated with scipy's interpolation routines
		:type power_func: function with the above specifications, or numpy array (l,Pl) of shape (2,n) 

		:param seed: seed of the random generator 
		:type seed: int.

		:param kwargs: keyword arguments to be passed to power_func, or to the interpolate.interp1d routine

		:returns: ConvergenceMap instance of the same exact shape as the one used as blueprint

		"""
		assert self.label == "convergence"

		#Initialize random number generator
		np.random.seed(seed)

		#Generate a random Fourier realization and invert it
		ft_map = self._fourierMap(power_func,**kwargs)
		noise_map = irfft2(ft_map)

		return ConvergenceMap(noise_map,self.side_angle)
コード例 #3
0
ファイル: defaults.py プロジェクト: TheisEizo/LensTools
def peaks_loader(filename,thresholds):

	logging.debug("Processing {0} peaks".format(filename))
	conv_map = ConvergenceMap.load(filename,format=load_fits_default_convergence)

	v,pk = conv_map.peakCount(thresholds,norm=True)
	return v
コード例 #4
0
def peaks_loader(filename, thresholds):

    logging.debug("Processing {0} peaks".format(filename))
    conv_map = ConvergenceMap.load(filename,
                                   format=load_fits_default_convergence)

    v, pk = conv_map.peakCount(thresholds, norm=True)
    return v
コード例 #5
0
    def convergence(self):
        """
		Reconstructs the convergence from the E component of the shear

		:returns: new ConvergenceMap instance 

		"""

        #Compute Fourier transforms if it wasn't done before
        if not hasattr(self, "fourier_E"):
            l_edges = np.array([200.0, 400.0])
            l, EE, BB, EB = self.decompose(l_edges, keep_fourier=True)

        #Invert the Fourier transform to go back to real space
        conv = irfft2(self.fourier_E)

        #Return the ConvergenceMap instance
        return ConvergenceMap(conv, self.side_angle)
コード例 #6
0
ファイル: defaults.py プロジェクト: TheisEizo/LensTools
def default_callback_loader(filename,l_edges):
	"""
	
	Default ensemble loader: reads a FITS data file containing a convergence map and measures its power spectrum

	:param args: A dictionary that contains all the relevant parameters as keys. Must have a "map_id" key
	:type args: Dictionary

	:returns: ndarray of the measured statistics

	:raises: AssertionError if the input dictionary doesn't have the required keywords

	"""

	logging.debug("Processing {0} power".format(filename))

	conv_map = ConvergenceMap.load(filename,format=load_fits_default_convergence)
	l,Pl = conv_map.powerSpectrum(l_edges)
	return Pl
コード例 #7
0
def default_callback_loader(filename, l_edges):
    """
	
	Default ensemble loader: reads a FITS data file containing a convergence map and measures its power spectrum

	:param args: A dictionary that contains all the relevant parameters as keys. Must have a "map_id" key
	:type args: Dictionary

	:returns: ndarray of the measured statistics

	:raises: AssertionError if the input dictionary doesn't have the required keywords

	"""

    logging.debug("Processing {0} power".format(filename))

    conv_map = ConvergenceMap.load(filename,
                                   format=load_fits_default_convergence)
    l, Pl = conv_map.powerSpectrum(l_edges)
    return Pl
コード例 #8
0
	def getShapeNoise(self,z=1.0,ngal=15.0*arcmin**-2,seed=0):

		"""
		This method generates a white, gaussian shape noise map for the given redshift of the map

		:param z: single redshift of the backround sources on the map
		:type z: float.

		:param ngal: assumed angular number density of galaxies (must have units of angle^-2)
		:type ngal: float.

		:param seed: seed of the random generator
		:type seed: int.

		:returns: ConvergenceMap instance of the same exact shape as the one used as blueprint

		"""

		#Sanity check
		assert (ngal.unit**-0.5).physical_type=="angle"

		if self.label == "convergence":
		
			#Compute shape noise amplitude
			pixel_angular_side = self.side_angle / self.shape[0]
			sigma = ((0.15 + 0.035*z) / (pixel_angular_side * np.sqrt(ngal))).decompose().value

			#Generate shape noise
			np.random.seed(seed)
			noise_map = np.random.normal(loc=0.0,scale=sigma,size=self.shape) 

			#Build the ConvergenceMap object
			return ConvergenceMap(noise_map,self.side_angle)

		else:

			raise ValueError("Only convergence implemented so far!!!")
コード例 #9
0
def convergence_measure_all(filename, index, fits_loader=None):
    """
	Measures all the statistical descriptors of a convergence map as indicated by the index instance
	
	"""

    logging.debug("Processing {0}".format(filename))

    #Load the map
    if fits_loader is not None:
        conv_map = ConvergenceMap.load(filename, format=fits_loader)
    else:
        conv_map = ConvergenceMap.load(filename,
                                       format=load_fits_default_convergence)

    #Allocate memory for observables
    descriptors = index
    observables = np.zeros(descriptors.size)

    #Measure descriptors as directed by input
    for n in range(descriptors.num_descriptors):

        if type(descriptors[n]) == PowerSpectrum:

            l, observables[descriptors[n].first:descriptors[n].
                           last] = conv_map.powerSpectrum(
                               descriptors[n].l_edges)

        elif type(descriptors[n]) == Moments:

            observables[descriptors[n].first:descriptors[n].
                        last] = conv_map.moments(
                            connected=descriptors[n].connected)

        elif type(descriptors[n]) == Peaks:

            v, observables[descriptors[n].first:descriptors[n].
                           last] = conv_map.peakCount(
                               descriptors[n].thresholds,
                               norm=descriptors[n].norm)

        elif type(descriptors[n]) == PDF:

            v, observables[descriptors[n].first:descriptors[n].
                           last] = conv_map.pdf(descriptors[n].thresholds,
                                                norm=descriptors[n].norm)

        elif type(descriptors[n]) == MinkowskiAll:

            v, V0, V1, V2 = conv_map.minkowskiFunctionals(
                descriptors[n].thresholds, norm=descriptors[n].norm)
            observables[descriptors[n].first:descriptors[n].last] = np.hstack(
                (V0, V1, V2))

        elif type(descriptors[n]) == MinkowskiSingle:

            raise ValueError(
                "Due to computational performance you have to measure all Minkowski functionals at once!"
            )

        else:

            raise ValueError(
                "Measurement of this descriptor not implemented!!!")

    #Return
    return observables