Exemplo n.º 1
0
    def getpolsky(self, debug=False, celestial=True):
        """Create a realisation of the *polarised* sky.

        Parameters
        ----------
        debug : boolean, optional
            Return intermediate products for debugging. Default is False.
        celestial : boolean, optional
            Maps are returned in celestial co-ordinates.

        Returns
        -------
        skymap : np.ndarray[freq, pol, pixel]

        Notes
        -----
        This routine tries to make a decent simulation of the galactic
        polarised emission.
        """

        # Load and smooth the Faraday rotation map to get an estimate for the
        # width of the distribution
        sigma_phi = healpy.ud_grade(
            healpy.smoothing(np.abs(self._faraday), fwhm=np.radians(10.0), verbose=False), self.nside
        )

        # Set the correlation length in phi
        xiphi = 1.0

        lmax = 3 * self.nside - 1
        la = np.arange(lmax + 1)

        # The angular powerspectrum of polarisation fluctuations, we will use
        # this to generate the base fluctuations
        def angular(l):
            l[np.where(l == 0)] = 1.0e16
            return (l / 100.0) ** -2.8

        # Define a grid in phi that we will use to model the Faraday emission.
        dphi = self._dphi
        maxphi = self._maxphi
        nphi = 2 * int(maxphi / dphi)
        phifreq = np.fft.fftfreq(nphi, d=(1.0 / (dphi * nphi)))

        # Create the weights required to turn random variables into alms
        ps_weight = (angular(la[:, np.newaxis]) / 2.0) ** 0.5

        # Generate random maps in the Fourier conjugate of phi. This is
        # equivalent to generating random uncorrelated phi maps and FFting
        # into the conjugate.
        map2 = np.zeros((12 * self.nside ** 2, nphi), dtype=np.complex128)
        print "SHTing to give random maps"
        for i in range(nphi):
            w = np.random.standard_normal((lmax + 1, 2 * lmax + 1, 2)).view(np.complex128)[..., 0]
            w *= ps_weight
            map2[:, i] = hputil.sphtrans_inv_complex(w, self.nside)

        # Weight the conj-phi direction to give the phi correlation structure.
        pcfreq = np.fft.fftfreq(nphi, d=dphi)
        map2 *= np.exp(-2 * (np.pi * xiphi * pcfreq[np.newaxis, :]) ** 2)

        # We need to FFT back into phi, but as scipy does not have an inplace
        # transform, we can do this in blocks, replacing as we go.
        chunksize = self.nside ** 2
        nchunk = 12

        for ci in range(nchunk):
            si = ci * chunksize
            ei = (ci + 1) * chunksize

            map2[si:ei] = np.fft.ifft(map2[si:ei], axis=1)

        # numpy's var routine is extremely memory inefficient. Use a crappy
        # chunking one.
        map2 /= 2.0 * chunk_var(map2) ** 0.5

        w = np.exp(-0.25 * (phifreq[np.newaxis, :] / sigma_phi[:, np.newaxis]) ** 2)

        # Calculate the normalisation explicitly (required when Faraday depth
        # is small, as grid is too large).
        w /= w.sum(axis=1)[:, np.newaxis]

        print "Applying phi weighting"
        map2 *= w

        if not debug:
            del w

        def ptrans(phi, freq, dfreq):

            dx = dfreq / freq

            alpha = 2.0 * phi * 3e2 ** 2 / freq ** 2

            return np.exp(1.0j * alpha) * np.sinc(alpha * dx / np.pi)
            # return np.exp(1.0J * alpha)

        fa = self.nu_pixels
        df = np.median(np.diff(fa))

        pta = ptrans(phifreq[:, np.newaxis], fa[np.newaxis, :], df) / dphi

        print "Transforming to freq"
        map4 = np.dot(map2, pta)

        if not debug:
            del map2

        print "Rescaling freq"
        map4a = np.abs(map4)
        map4 = map4 * np.tanh(map4a) / map4a

        del map4a

        map5 = np.zeros((self.nu_num, 4, 12 * self.nside ** 2), dtype=np.float64)

        print "Scaling by T"
        # Unflatten the intensity by multiplying by the unpolarised realisation
        map5[:, 0] = self.getsky(celestial=False)
        map5[:, 1] = map4.real.T
        map5[:, 2] = map4.imag.T
        map5[:, 1:3] *= map5[:, 0, np.newaxis, :]

        if not debug:
            del map4

        print "Rotating"
        # Rotate to celestial co-ordinates if required.
        if celestial:
            map5 = hputil.coord_g2c(map5)

        if debug:
            return map2, map4, w, sigma_phi, pta, map5
        else:
            return map5
Exemplo n.º 2
0
    def getsky(self, debug=False, celestial=True):
        """Create a realisation of the *unpolarised* sky.

        Parameters
        ----------
        debug : boolean, optional
            Return intermediate products for debugging. Default is False.
        celestial : boolean, optional
            Maps are returned in celestial co-ordinates.

        Returns
        -------
        skymap : np.ndarray[freq, pixel]
        """
        # Read in data files.
        haslam = healpy.ud_grade(self._haslam, self.nside)

        syn = FullSkySynchrotron()

        lmax = 3 * self.nside - 1
        efreq = np.concatenate((np.array([408.0, 1420.0]), self.nu_pixels))

        ## Construct map of random fluctuations
        # cla = skysim.clarray(syn.angular_powerspectrum, lmax, efreq, zwidth=(self.nu_pixels[1] - self.nu_pixels[0]))
        cla = skysim.clarray(syn.angular_powerspectrum, lmax, efreq, zromb=0)
        fg = skysim.mkfullsky(cla, self.nside)

        ## Find the smoothed fluctuations on each scale
        sub408 = healpy.smoothing(fg[0], fwhm=np.radians(1.0), verbose=False)
        sub1420 = healpy.smoothing(fg[1], fwhm=np.radians(5.8), verbose=False)

        ## Make a multifrequency map constrained to look like the smoothed maps
        ## depending on the spectral_map apply constraints at upper and lower frequency (GSM),
        ## or just at Haslam map frequency
        if self.spectral_map == "gsm":
            fgs = skysim.mkconstrained(cla, [(0, sub408), (1, sub1420)], self.nside)
        else:
            fgs = skysim.mkconstrained(cla, [(0, sub408)], self.nside)

        # Construct maps of appropriate resolution
        sc = healpy.ud_grade(self._sp_ind[self.spectral_map], self.nside)
        am = healpy.ud_grade(self._amp_map, self.nside)

        ## Bump up the variance of the fluctuations according to the variance
        #  map
        vm = healpy.smoothing(fg[0], sigma=np.radians(0.5), verbose=False)
        vm = healpy.smoothing(map_variance(vm, 16) ** 0.5, sigma=np.radians(2.0), verbose=False)
        mv = vm.mean()

        ## Construct the fluctuations map
        fgt = (am / mv) * (fg - fgs)

        ## Get the smooth, large scale emission from Haslam+spectralmap
        fgsmooth = haslam[np.newaxis, :] * ((efreq / 408.0)[:, np.newaxis] ** sc)

        # Rescale to ensure output is always positive
        tanh_lin = lambda x: np.where(x < 0, np.tanh(x), x)
        fg2 = (fgsmooth * (1.0 + tanh_lin(fgt / fgsmooth)))[2:]

        ## Co-ordinate transform if required
        if celestial:
            fg2 = hputil.coord_g2c(fg2)

        if debug:
            return fg2, fg, fgs, fgt, fgsmooth, am, mv

        return fg2
Exemplo n.º 3
0
    def getpolsky(self, debug=False, celestial=True):
        """Create a realisation of the *polarised* sky.

        Parameters
        ----------
        debug : boolean, optional
            Return intermediate products for debugging. Default is False.
        celestial : boolean, optional
            Maps are returned in celestial co-ordinates.

        Returns
        -------
        skymap : np.ndarray[freq, pol, pixel]

        Notes
        -----
        This routine tries to make a decent simulation of the galactic
        polarised emission.
        """

        # Load and smooth the Faraday rotation map to get an estimate for the
        # width of the distribution
        sigma_phi = healpy.ud_grade(
            healpy.smoothing(np.abs(self._faraday),
                             fwhm=np.radians(10.0),
                             verbose=False), self.nside)

        # Set the correlation length in phi
        xiphi = 1.0

        lmax = 3 * self.nside - 1
        la = np.arange(lmax + 1)

        # The angular powerspectrum of polarisation fluctuations, we will use
        # this to generate the base fluctuations
        def angular(l):
            l[np.where(l == 0)] = 1.0e16
            return (l / 100.0)**-2.8

        # Define a grid in phi that we will use to model the Faraday emission.
        dphi = self._dphi
        maxphi = self._maxphi
        nphi = 2 * int(maxphi / dphi)
        phifreq = np.fft.fftfreq(nphi, d=(1.0 / (dphi * nphi)))

        # Create the weights required to turn random variables into alms
        ps_weight = (angular(la[:, np.newaxis]) / 2.0)**0.5

        # Generate random maps in the Fourier conjugate of phi. This is
        # equivalent to generating random uncorrelated phi maps and FFting
        # into the conjugate.
        map2 = np.zeros((12 * self.nside**2, nphi), dtype=np.complex128)
        print "SHTing to give random maps"
        for i in range(nphi):
            w = np.random.standard_normal(
                (lmax + 1, 2 * lmax + 1, 2)).view(np.complex128)[..., 0]
            w *= ps_weight
            map2[:, i] = hputil.sphtrans_inv_complex(w, self.nside)

        # Weight the conj-phi direction to give the phi correlation structure.
        pcfreq = np.fft.fftfreq(nphi, d=dphi)
        map2 *= np.exp(-2 * (np.pi * xiphi * pcfreq[np.newaxis, :])**2)

        # We need to FFT back into phi, but as scipy does not have an inplace
        # transform, we can do this in blocks, replacing as we go.
        chunksize = self.nside**2
        nchunk = 12

        for ci in range(nchunk):
            si = ci * chunksize
            ei = (ci + 1) * chunksize

            map2[si:ei] = np.fft.ifft(map2[si:ei], axis=1)

        # numpy's var routine is extremely memory inefficient. Use a crappy
        # chunking one.
        map2 /= (2.0 * chunk_var(map2)**0.5)

        w = np.exp(-0.25 *
                   (phifreq[np.newaxis, :] / sigma_phi[:, np.newaxis])**2)

        # Calculate the normalisation explicitly (required when Faraday depth
        # is small, as grid is too large).
        w /= w.sum(axis=1)[:, np.newaxis]

        print "Applying phi weighting"
        map2 *= w

        if not debug:
            del w

        def ptrans(phi, freq, dfreq):

            dx = dfreq / freq

            alpha = 2.0 * phi * 3e2**2 / freq**2

            return (np.exp(1.0J * alpha) * np.sinc(alpha * dx / np.pi))

        fa = self.nu_pixels
        df = np.median(np.diff(fa))

        pta = ptrans(phifreq[:, np.newaxis], fa[np.newaxis, :], df) / dphi

        print "Transforming to freq"
        map4 = np.dot(map2, pta)

        if not debug:
            del map2

        print "Rescaling freq"
        map4a = np.abs(map4)
        map4 = map4 * np.tanh(map4a) / map4a

        del map4a

        map5 = np.zeros((self.nu_num, 4, 12 * self.nside**2), dtype=np.float64)

        print "Scaling by T"
        # Unflatten the intensity by multiplying by the unpolarised realisation
        map5[:, 0] = self.getsky(celestial=False)
        map5[:, 1] = map4.real.T
        map5[:, 2] = map4.imag.T
        map5[:, 1:3] *= map5[:, 0, np.newaxis, :]

        if not debug:
            del map4

        print "Rotating"
        # Rotate to celestial co-ordinates if required.
        if celestial:
            map5 = hputil.coord_g2c(map5)

        if debug:
            return map2, map4, w, sigma_phi, pta, map5
        else:
            return map5
Exemplo n.º 4
0
    def getpolsky(self, debug=False, celestial=True):
        """Create a realisation of the *polarised* sky.

        Parameters
        ----------
        debug : boolean, optional
            Return intermediate products for debugging. Default is False.
        celestial : boolean, optional
            Maps are returned in celestial co-ordinates.

        Returns
        -------
        skymap : np.ndarray[freq, pol, pixel]
        """

        # Load and smooth the Faraday emission
        sigma_phi = healpy.ud_grade(healpy.smoothing(np.abs(self._faraday), fwhm=np.radians(10.0)), self.nside)

        # Get the Haslam map as a base for the unpolarised emission
        haslam = healpy.smoothing(healpy.ud_grade(self._haslam, self.nside), fwhm=np.radians(3.0))

        # Create a map of the correlation length in phi
        xiphi = 3.0
        xiphimap = np.minimum(sigma_phi / 20.0, xiphi)

        lmax = 3*self.nside - 1
        la = np.arange(lmax+1)

        # The angular powerspectrum of polarisation fluctuations
        def angular(l):
            l[np.where(l == 0)] = 1.0e16
            return (l / 100.0)**-2.8

        c6 = 3e2 # Speed of light in million m/s
        xf = 1.5 # Factor to exand the region in lambda^2 by.

        ## Calculate the range in lambda^2 to generate
        l2l = (c6 / self.nu_upper)**2
        l2h = (c6 / self.nu_lower)**2

        l2a = 0.5 * (l2h + l2l)
        l2d = 0.5 * (l2h - l2l)

        # Bounds and number of points in lambda^2
        l2l = l2a - xf * l2d
        l2h = l2a + xf * l2d
        nl2 = int(xf * self.nu_num)

        l2 = np.linspace(l2l, l2h, nl2) # Grid in lambda^2 to use

        ## Make a realisation of c(n, l2)

        # Generate random numbers and weight by powerspectrum
        w = (np.random.standard_normal((nl2, lmax+1, 2*lmax+1)) + 1.0J * np.random.standard_normal((nl2, lmax+1, 2*lmax+1))) / 2**0.5
        w *= angular(la[np.newaxis, :, np.newaxis])**0.5

        # Transform from spherical harmonics to maps
        map1 = np.zeros((nl2, 12*self.nside**2), dtype=np.complex128)
        print "Making maps"
        for i in range(nl2):
            map1[i] = hputil.sphtrans_inv_complex(w[i], self.nside)

        # Weight frequencies for the internal Faraday depolarisation
        map1 *= np.exp(-2 * (xiphimap[np.newaxis, :] * l2[:, np.newaxis])**2) / 100.0


        if not debug:
            del w
 
        # Transform into phispace
        map2 = np.fft.fft(map1, axis=0)


        # Calculate the phi samples
        phifreq = np.pi * np.fft.fftfreq(nl2, d=(l2[1] - l2[0]))

        if not debug:
            del map1

        # Create weights for smoothing the emission (corresponding to the
        # Gaussian region of emission).
        w = np.exp(-0.25 * (phifreq[:, np.newaxis] / sigma_phi[np.newaxis, :])**2)

        # Calculate the normalisation explicitly (required when Faraday depth
        # is small, as grid is too large).
        w /= w.sum(axis=0)[np.newaxis, :]

        # When the spacing between phi samples is too large we don't get the
        # decorrelation from the independent regions correct.
        xiphimap = np.maximum(xiphimap, np.pi / (l2h - l2l))
        xiphimap = np.minimum(xiphimap, sigma_phi)
        w *= 0.2 * (sigma_phi / xiphimap)**0.5 * (10.0 / haslam)**0.5

        # Additional weighting to account for finite frequency bin width
        # (assume channels are gaussian). Approximate by performing in
        # lambda^2 not nu.
        dnu_nu = np.diff(self.nu_pixels).mean() / self.nu_pixels.mean()

        # Calculate the spacing lambda^2
        dl2 = 2*l2.mean() * dnu_nu / (8*np.log(2.0))**0.5
        w *= np.exp(-2.0 * dl2**2 * phifreq**2)[:, np.newaxis]

        # Weight map, and transform back into lambda^2
        map3 = np.fft.ifft(map2 * w, axis=0)

        if not debug:
            del map2, w

        # Array to hold frequency maps in.
        map4 = np.zeros((self.nu_num, 4, 12*self.nside**2), dtype=np.float64)

        ## Interpolate lambda^2 sampling into regular frequency grid.
        ## Use a piecewise linear method
        for i in range(self.nu_num):

            # Find the lambda^2 for the current frequency
            l2i = (c6 / self.nu_pixels[i])**2

            # Find the bounding array indices in the lambda^2 array
            ih = np.searchsorted(l2, l2i)
            il = ih - 1

            # Calculate the interpolating coefficient
            alpha = (l2i - l2[il]) / (l2[ih] - l2[il])

            # Interpolate each map at the same time.
            mapint = map3[ih] * alpha + (1.0 - alpha) * map3[il]

            # Separate real and imaginary parts into polarised matrix
            map4[i, 1] = mapint.real
            map4[i, 2] = mapint.imag

        if not debug:
            del map3

        # Unflatten the intensity by multiplying by the unpolarised realisation
        map4[:, 0] = self.getsky(celestial=False)
        map4[:, 1:3] *= map4[:, 0, np.newaxis, :]

        # Rotate to celestial co-ordinates if required.
        if celestial:
            map4 = hputil.coord_g2c(map4)

        if debug:
            return map4, map1, map2, map3, w, sigma_phi
        else:
            return map4
Exemplo n.º 5
0
    def getsky(self, debug=False, celestial=True):
        """Create a realisation of the *unpolarised* sky.

        Parameters
        ----------
        debug : boolean, optional
            Return intermediate products for debugging. Default is False.
        celestial : boolean, optional
            Maps are returned in celestial co-ordinates.

        Returns
        -------
        skymap : np.ndarray[freq, pixel]
        """
        # Read in data files.
        haslam = healpy.ud_grade(self._haslam, self.nside)

        syn = FullSkySynchrotron()

        lmax = 3 * self.nside - 1
        efreq = np.concatenate((np.array([408.0, 1420.0]), self.nu_pixels))

        # Construct map of random fluctuations
        cla = skysim.clarray(syn.angular_powerspectrum, lmax, efreq, zromb=0)
        fg = skysim.mkfullsky(cla, self.nside)

        # Find the smoothed fluctuations on each scale
        sub408 = healpy.smoothing(fg[0], fwhm=np.radians(1.0), verbose=False)
        sub1420 = healpy.smoothing(fg[1], fwhm=np.radians(5.8), verbose=False)

        # Make a multifrequency map constrained to look like the smoothed maps
        # depending on the spectral_map apply constraints at upper and lower
        # frequency (GSM), or just at Haslam map frequency
        if self.spectral_map == 'gsm':
            fgs = skysim.mkconstrained(cla, [(0, sub408), (1, sub1420)],
                                       self.nside)
        else:
            fgs = skysim.mkconstrained(cla, [(0, sub408)], self.nside)

        # Construct maps of appropriate resolution
        sc = healpy.ud_grade(self._sp_ind[self.spectral_map], self.nside)
        am = healpy.ud_grade(self._amp_map, self.nside)

        # Bump up the variance of the fluctuations according to the variance map
        vm = healpy.smoothing(fg[0], sigma=np.radians(0.5), verbose=False)
        vm = healpy.smoothing(map_variance(vm, 16)**0.5,
                              sigma=np.radians(2.0),
                              verbose=False)
        mv = vm.mean()

        # Construct the fluctuations map
        fgt = (am / mv) * (fg - fgs)

        if not debug:
            del fg, fgs

        # Get the smooth, large scale emission from Haslam+spectralmap
        fgsmooth = haslam[np.newaxis, :] * ((efreq / 408.0)[:, np.newaxis]**sc)

        # Rescale to ensure output is always positive, do this inplace where
        # possible to save memory
        def tanh_lin(x):
            return np.where(x < 0, np.tanh(x), x)

        fgt /= fgsmooth
        fgt = tanh_lin(fgt)
        fgt += 1
        fgt *= fgsmooth
        fgt = fgt[2:]

        # Co-ordinate transform if required
        if celestial:
            fgt = hputil.coord_g2c(fgt)

        if debug:
            return fgt, fg, fgs, fgsmooth, am, mv

        return fgt