Exemple #1
0
 def get_e2iphi_mat(self, cache_only=False):
     """
     Built such that it should hit the pbs barrier only if the matrix is not already cached.
     """
     fname = self.lib_dir + '/e2iphimat.npy'
     if os.path.exists(fname):
         return None if cache_only else np.load(fname, mmap_mode='r')
     if not os.path.exists(fname) and pbs.rank == 0:
         print 'ell_mat:caching e2iphi in ', fname
         np.save(fname, np.exp(2j * np.arctan2(self.get_ky_mat(), self.get_kx_mat())))
     pbs.barrier()
     return None if cache_only else np.load(fname, mmap_mode='r')
Exemple #2
0
 def get_sim_qumap(self, idx):
     if self.cache_sims and not os.path.exists(self.lib_dir +
                                               '/sim_qumap_%05d.npy' % idx):
         if pbs.rank == 0:
             np.save(self.lib_dir + '/sim_qumap_%05d.npy' % idx,
                     np.array(self._build_sim_qumap(idx)))
         pbs.barrier()
     if self.cache_sims:
         return np.load(self.lib_dir + '/sim_qumap_%05d.npy' % idx,
                        mmap_mode='r')
     else:
         return self._build_sim_qumap(idx)
Exemple #3
0
 def get_ellmat(self, ellmax=None):
     """
     Returns the matrix containing the multipole ell assigned to k = (kx,ky)
     """
     if ellmax is None:
         return np.load(self.lib_dir + '/ellmat.npy', mmap_mode='r')
     else:
         fname = self.lib_dir + '/ellmat_ellmax%s.npy' % ellmax
         if os.path.exists(fname): return np.load(fname, mmap_mode='r')
         if pbs.rank == 0:
             print 'ell_mat:caching ells in ', fname
             np.save(fname, self.get_ellmat()[np.where(self.get_ellmat() <= ellmax)])
         pbs.barrier()
         return np.load(fname, mmap_mode='r')
Exemple #4
0
    def __init__(self, lib_dir, lib_datalm, lib_lencmb, cl_transf, TQUcovfname, pix_pha=None, cache_sims=True):
        """
        Library for sims with pixel to pixel independent noise with specified noise variance maps.
        :param lib_dir:
        :param lib_datalm:  ffs_alm library for the data maps.
        :param lib_lencmb: library of lensed cmb sims
        :param cl_transf: transfer function cl (identical for T Q U)
        :param TQUcov: (npix,npix,3,3) shaped array, TQU covariance matrix
        :param pix_pha: random for phases for the noise maps
        :param cache_sims: does cache ims on disk if set
        """
        assert np.load(TQUcovfname).shape == (3, 3, lib_datalm.shape[0], lib_datalm.shape[1])
        self.lencmbs = lib_lencmb
        self.lib_datalm = lib_datalm
        self.lib_skyalm = lib_lencmb.lib_skyalm
        self.cl_transf = np.zeros(self.lib_skyalm.ellmax + 1, dtype=float, order='C')
        self.cl_transf[:min(len(self.cl_transf), len(cl_transf))] = cl_transf[:min(len(self.cl_transf), len(cl_transf))]
        self.lib_dir = lib_dir
        self.cache_sims = cache_sims
        self.TQUcovfname = TQUcovfname
        if not os.path.exists(lib_dir) and pbs.rank == 0:
            os.makedirs(lib_dir)
        pbs.barrier()

        if pix_pha is None:
            self.pix_pha = ffs_phas.pix_lib_phas(lib_dir + '/pix_pha', 3, lib_datalm.shape)
        else:
            self.pix_pha = pix_pha
            assert pix_pha.shape == self.lib_datalm.shape, (pix_pha.shape, self.lib_datalm.shape)
            assert pix_pha.nfields == 3, (pix_pha.nfields, 3)
        if (np.array([not os.path.exists(lib_dir + '/rmat%s.npy' % a) for a in
                      ['TT', 'TQ', 'TU', 'QQ', 'QU', 'UU']]).any()) and pbs.rank == 0:
            # FIXME : triangularise this
            TQUcov = np.load(TQUcovfname)
            rmat = np.zeros((3, 3, self.lib_datalm.shape[0], self.lib_datalm.shape[1]), dtype=float)
            for _i, i in misc.misc_utils.enumerate_progress(range(self.lib_datalm.shape[0]), 'building root matrix'):
                for j in range(self.lib_datalm.shape[1]):
                    t, v = np.linalg.eigh(TQUcov[:, :, i, j], UPLO='U')
                    assert np.all(t >= 0.), (i, j, t)  # Matrix not positive semidefinite
                    rmat[:, :, i, j] = np.dot(v, np.dot(np.diag(np.sqrt(t)), v.T))
            for (i, j), lab in zip([(0, 0), (0, 1), (0, 2), (1, 1), (1, 2), (2, 2)],
                                   ['TT', 'TQ', 'TU', 'QQ', 'QU', 'UU']):
                _sav = rmat[i, j, :, :]
                np.save(lib_dir + '/rmat%s.npy' % lab, _sav if _sav.any() else np.array([0.]))
        if not os.path.exists(lib_dir + '/sim_hash.pk') and pbs.rank == 0:
            pk.dump(self.hashdict(), open(lib_dir + '/sim_hash.pk', 'w'))
        pbs.barrier()
        hash_check(self.hashdict(), pk.load(open(lib_dir + '/sim_hash.pk', 'r')))
Exemple #5
0
    def __init__(self,
                 lib_dir,
                 get_state_func=np.random.get_state,
                 nsims_max=None):
        if not os.path.exists(lib_dir) and pbs.rank == 0:
            os.makedirs(lib_dir)
        self.nmax = nsims_max
        if pbs.rank == 0 and not os.path.exists(lib_dir + '/sim_hash.pk'):
            pk.dump(self.hashdict(), open(lib_dir + '/sim_hash.pk', 'w'))
        pbs.barrier()

        hash = pk.load(open(lib_dir + '/sim_hash.pk', 'r'))
        hash_check(hash, self.hashdict(), ignore=['lib_dir'])

        self._rng_db = rng_db(lib_dir + '/rngdb.db', idtype='INTEGER')
        self._get_rng_state = get_state_func
Exemple #6
0
    def __init__(self, fname, idtype="INTEGER"):
        if not os.path.exists(fname) and pbs.rank == 0:
            con = sqlite3.connect(fname,
                                  detect_types=sqlite3.PARSE_DECLTYPES,
                                  timeout=3600)
            cur = con.cursor()
            cur.execute(
                "create table rngdb (id %s PRIMARY KEY, "
                "type STRING, pos INTEGER, has_gauss INTEGER,cached_gaussian REAL, keys array)"
                % idtype)
            con.commit()
        pbs.barrier()

        self.con = sqlite3.connect(fname,
                                   timeout=3600.,
                                   detect_types=sqlite3.PARSE_DECLTYPES)
Exemple #7
0
 def __init__(self,
              sims_cmb_len,
              cl_ttransf,
              cl_ptransf,
              nside=2048,
              lib_dir=None):
     self.sims_cmb_len = sims_cmb_len
     self.cl_ttransf = cl_ttransf
     self.cl_ptransf = cl_ptransf
     self.nside = nside
     if lib_dir is not None:
         if pbs.rank == 0 and not os.path.exists(lib_dir + '/sim_hash.pk'):
             pk.dump(self.hashdict(), open(lib_dir + '/sim_hash.pk', 'w'))
         pbs.barrier()
         sims_generic.hash_check(
             self.hashdict(), pk.load(open(lib_dir + '/sim_hash.pk', 'r')))
Exemple #8
0
 def get_phasemat(self, ellmax=None):
     """
     Returns the matrix containing the phase k = 'k' e^i phi
     """
     if ellmax is None:
         fname = self.lib_dir + '/phasemat.npy'
         if os.path.exists(fname): return np.load(fname, mmap_mode='r')
         if not os.path.exists(fname) and pbs.rank == 0:
             print 'ell_mat:caching phases in ', fname
             np.save(fname, np.arctan2(self.get_ky_mat(), self.get_kx_mat()))
         pbs.barrier()
         return np.load(fname, mmap_mode='r')
     else:
         fname = self.lib_dir + '/phase_ellmax%s.npy' % ellmax
         if not os.path.exists(fname) and pbs.rank == 0:
             print 'ell_mat:caching phases in ', fname
             np.save(fname, np.arctan2(self.get_ky_mat(), self.get_kx_mat())[np.where(self.get_ellmat() <= ellmax)])
         pbs.barrier()
         return np.load(fname, mmap_mode='r')
Exemple #9
0
    def __init__(self, lib_dir, lib_datalm, lib_lencmb, cl_transf, cache_sims=False):

        self.lencmbs = lib_lencmb
        self.lib_datalm = lib_datalm
        self.lib_skyalm = lib_lencmb.lib_skyalm
        self.cl_transf = np.zeros(self.lib_skyalm.ellmax + 1, dtype=float)
        self.cl_transf[:min(len(self.cl_transf), len(cl_transf))] = cl_transf[:min(len(self.cl_transf), len(cl_transf))]
        self.lib_dir = lib_dir
        self.cache_sims = cache_sims
        if self.cache_sims :
            assert lib_dir is not None
            if not os.path.exists(lib_dir) and pbs.rank == 0:
                os.makedirs(lib_dir)
            pbs.barrier()

            if not os.path.exists(lib_dir + '/sim_hash.pk') and pbs.rank == 0:
                pk.dump(self.hashdict(), open(lib_dir + '/sim_hash.pk', 'w'))
            pbs.barrier()
            hash_check(self.hashdict(), pk.load(open(lib_dir + '/sim_hash.pk', 'r')))
Exemple #10
0
 def __init__(self,
              ellmat,
              filt_func=lambda ell: ell > 0,
              num_threads=4,
              flags_init=('FFTW_MEASURE', )):
     super(ffs_alm_pyFFTW, self).__init__(ellmat, filt_func=filt_func)
     # FIXME : This can be tricky in in hybrid MPI-OPENMP
     # Builds FFTW Wisdom :
     wisdom_fname = self.ell_mat.lib_dir + '/FFTW_wisdom_%s_%s.npy' % (
         num_threads, ''.join(flags_init))
     if not os.path.exists(wisdom_fname):
         print "++ ffs_alm_pyFFTW :: building and caching FFTW wisdom, this might take a little while..."
         if pbs.rank == 0:
             inpt = pyfftw.empty_aligned(self.ell_mat.shape,
                                         dtype='float64')
             oupt = pyfftw.empty_aligned(self.ell_mat.rshape,
                                         dtype='complex128')
             fft = pyfftw.FFTW(inpt,
                               oupt,
                               axes=(0, 1),
                               direction='FFTW_FORWARD',
                               flags=flags_init,
                               threads=num_threads)
             ifft = pyfftw.FFTW(oupt,
                                inpt,
                                axes=(0, 1),
                                direction='FFTW_BACKWARD',
                                flags=flags_init,
                                threads=num_threads)
             wisdom = pyfftw.export_wisdom()
             np.save(wisdom_fname, wisdom)
             del inpt, oupt, fft, ifft
         pbs.barrier()
     pyfftw.import_wisdom(np.load(wisdom_fname))
     # print "++ ffs_alm_pyFFTW :: loaded widsom ", wisdom_fname
     self.flags = (
         'FFTW_WISDOM_ONLY',
     )  # This will make the code crash if arrays are not properly aligned.
     # self.flags = ('FFTW_MEASURE',)
     self.threads = num_threads
Exemple #11
0
    def __init__(self,
                 lib_dir,
                 lib_skyalm,
                 cls_unl,
                 lib_pha=None,
                 use_Pool=0,
                 cache_lens=False):
        if not os.path.exists(lib_dir) and pbs.rank == 0:
            os.makedirs(lib_dir)
        pbs.barrier()
        self.lib_skyalm = lib_skyalm
        fields = get_fields(cls_unl)
        if lib_pha is None and pbs.rank == 0:
            lib_pha = fs.sims.ffs_phas.ffs_lib_phas(lib_dir + '/phas',
                                                    len(fields), lib_skyalm)
        else:  # Check that the lib_alms are compatible :
            assert lib_pha.lib_alm == lib_skyalm
        pbs.barrier()

        self.unlcmbs = sim_cmb_unl(cls_unl, lib_pha)
        self.Pool = use_Pool
        self.cache_lens = cache_lens
        if not os.path.exists(lib_dir + '/sim_hash.pk') and pbs.rank == 0:
            pk.dump(self.hashdict(), open(lib_dir + '/sim_hash.pk', 'w'))
        pbs.barrier()
        fs.sims.sims_generic.hash_check(
            self.hashdict(), pk.load(open(lib_dir + '/sim_hash.pk', 'r')))
        self.lib_dir = lib_dir
        self.fields = fields
Exemple #12
0
 def __init__(self,
              lib_dir,
              lmax,
              cls_unl,
              dlmax=1024,
              nside_lens=2048,
              lib_pha=None):
     #FIXME : add aberration and modulation
     if not os.path.exists(lib_dir) and pbs.rank == 0:
         os.makedirs(lib_dir)
     pbs.barrier()
     self.lmax = lmax
     self.dlmax = dlmax
     fields = get_fields(cls_unl)
     assert 'o' not in fields, 'Check lenscurv.py if everything is implemented. Should be easy.'
     if lib_pha is None and pbs.rank == 0:
         lib_pha = phas.lib_phas(lib_dir + '/phas', len(fields),
                                 lmax + dlmax)
     else:  # Check that the lib_alms are compatible :
         assert lib_pha.lmax == lmax + dlmax
     pbs.barrier()
     self.nside_lens = nside_lens
     self.unlcmbs = sims_cmb_unl(cls_unl, lib_pha)
     self.lib_dir = lib_dir
     self.fields = get_fields(cls_unl)
     if pbs.rank == 0 and not os.path.exists(lib_dir + '/sim_hash.pk'):
         pk.dump(self.hashdict(), open(lib_dir + '/sim_hash.pk', 'w'))
     pbs.barrier()
     sims_generic.hash_check(self.hashdict(),
                             pk.load(open(lib_dir + '/sim_hash.pk', 'r')))
Exemple #13
0
 def build_pha(self, iter):
     """
     Sets up sim libraries for the MF evaluation
     :param iter:
     :return:
     """
     if self.nsims == 0: return None
     phas_pix = fs.sims.ffs_phas.pix_lib_phas(
         self.lib_dir + '/%s_sky_noise_iter%s' % (self.type, iter *
                                                  (not self.same_seeds)),
         len(self.type),
         self.cov.lib_datalm.shape,
         nsims_max=self.nsims)
     phas_cmb = None  # dont need it so far
     if self.PBSRANK == 0:
         for lib, lab in zip([phas_pix, phas_cmb],
                             ['phas pix', 'phas_cmb']):
             if not lib is None and not lib.is_full():
                 print "++ run iterator regenerating %s phases mf_sims rank %s..." % (
                     lab, self.PBSRANK)
                 for idx in np.arange(self.nsims):
                     lib.get_sim(idx, phas_only=True)
     pbs.barrier()
     return phas_pix, phas_cmb
Exemple #14
0
    def __init__(self, lib_dir, lib_alm, cls_len, cl_transf, cls_noise):
        self.lib_alm = lib_alm
        self.cls_len = cls_len
        self.cls_noise = cls_noise
        self.cl_transf = np.zeros(self.lib_alm.ellmax + 1, dtype=float)
        self.cl_transf[:min(len(self.cl_transf), len(cl_transf))] = cl_transf[:min(len(self.cl_transf), len(cl_transf))]
        self.lib_dir = lib_dir
        if not os.path.exists(lib_dir) and pbs.rank == 0:
            os.makedirs(lib_dir)
        pbs.barrier()
        if not os.path.exists(lib_dir + '/Pmats') and pbs.rank == 0:
            os.makedirs(lib_dir + '/Pmats')
        pbs.barrier()

        if not os.path.exists(lib_dir + '/MFMC_hash.pk') and pbs.rank == 0:
            pk.dump(self.hashdict(), open(lib_dir + '/MFMC_hash.pk', 'w'))
        pbs.barrier()
        fs.sims.sims_generic.hash_check(self.hashdict(), pk.load(open(lib_dir + '/MFMC_hash.pk', 'r')))
Exemple #15
0
    def __init__(self,
                 lib_dir,
                 type,
                 cov,
                 dat_maps,
                 lib_qlm,
                 Plm0,
                 H0,
                 cpp_prior,
                 use_Pool_lens=0,
                 use_Pool_inverse=0,
                 chain_descr=None,
                 opfilt=None,
                 soltn0=None,
                 no_deglensing=False,
                 NR_method=100,
                 tidy=10,
                 verbose=True,
                 maxcgiter=150,
                 PBSSIZE=None,
                 PBSRANK=None,
                 **kwargs):
        """
        Normalisation of gradients etc are now complex-like, not real and imag.

        qlm_norm is the normalization of the qlms.

        H0 the starting Hessian estimate. (cl array, ~ 1 / N0)
        """
        assert type in _types

        self.PBSSIZE = pbs.size if PBSSIZE is None else PBSSIZE
        self.PBSRANK = pbs.rank if PBSRANK is None else PBSRANK
        assert self.PBSRANK < self.PBSSIZE, (self.PBSRANK, self.PBSSIZE)
        self.barrier = (lambda: 0) if self.PBSSIZE == 1 else pbs.barrier

        self.type = type
        self.lib_dir = lib_dir
        self.dat_maps = dat_maps

        self.chain_descr = chain_descr
        self.opfilt = opfilt
        assert self.chain_descr is not None
        assert opfilt is not None
        # lib_noise = getattr(par, 'lib_noise_%s' % type)
        # lib_cmb_unl = getattr(par, 'lib_cmb_unl_%s' % type)

        self.cl_pp = cpp_prior
        self.lib_qlm = lib_qlm

        self.lsides = cov.lib_skyalm.lsides
        assert cov.lib_skyalm.lsides == lib_qlm.lsides
        self.lmax_qlm = self.lib_qlm.ellmax
        self.NR_method = NR_method

        self.tidy = tidy
        self.maxiter = maxcgiter
        self.verbose = verbose

        self.nodeglensing = no_deglensing
        if self.verbose:
            print " I see t", cov.Nlev_uKamin('t')
            print " I see q", cov.Nlev_uKamin('q')
            print " I see u", cov.Nlev_uKamin('u')

            # Defining a trial newton step length :

        def newton_step_length(iter, norm_incr):  # FIXME
            # Just trying if half the step is better for S4 QU
            if cov.Nlev_uKamin('t') > 2.1: return 1.
            if cov.Nlev_uKamin('t') <= 2.1:
                return 0.5
            return 0.5

        self.newton_step_length = newton_step_length
        self.soltn0 = soltn0
        # Default tolerance function(iter,key)
        # FIXME Put tolerance and maxiter in chain descrt
        # def tol_func(iter, key, **kwargs):
        #    return 1e-3

        # self.tol_func = tol_func
        f_id = fs.ffs_deflect.ffs_deflect.ffs_id_displacement(
            cov.lib_skyalm.shape, cov.lib_skyalm.lsides)
        if not hasattr(cov, 'f') or not hasattr(cov, 'fi'):
            self.cov = cov.turn2wlfilt(f_id, f_id)
        else:
            cov.set_ffi(f_id, f_id)
            self.cov = cov
        if self.PBSRANK == 0:
            if not os.path.exists(self.lib_dir): os.makedirs(self.lib_dir)
        pbs.barrier()

        print 'ffs iterator : This is %s trying to setup %s' % (self.PBSRANK,
                                                                lib_dir)
        # Lensed covariance matrix library :
        # We will redefine the displacement at each iteration step
        self.use_Pool = use_Pool_lens
        self.use_Pool_inverse = use_Pool_inverse

        if self.PBSRANK == 0:  # FIXME : hash and hashcheck
            if not os.path.exists(self.lib_dir):
                os.makedirs(self.lib_dir)
            if not os.path.exists(self.lib_dir + '/MAPlms'):
                os.makedirs(self.lib_dir + '/MAPlms')
            if not os.path.exists(self.lib_dir + '/cghistories'):
                os.makedirs(self.lib_dir + '/cghistories')

        # pre_calculation of qlm_norms with rank 0:
        if self.PBSRANK == 0 and \
                (not os.path.exists(self.lib_dir + '/qlm_%s_H0.dat' % ('P'))
                 or not os.path.exists(self.lib_dir + '/%shi_plm_it%03d.npy' % ('P', 0))):
            print '++ ffs_%s_iterator: Caching qlm_norms and N0s' % type, self.lib_dir

            # Caching qlm norm that we will use as zeroth order curvature : (with lensed weights)
            # Prior curvature :
            # Gaussian priors
            prior_pp = cl_inverse(self.cl_pp[0:self.lmax_qlm + 1])
            prior_pp[0] *= 0.5

            curv_pp = H0 + prior_pp  # isotropic estimate of the posterior curvature at the starting point
            self.cache_cl(self.lib_dir + '/qlm_%s_H0.dat' % ('P'),
                          cl_inverse(curv_pp))
            print "     cached %s" % self.lib_dir + '/qlm_%s_H0.dat' % ('P')
            fname_P = self.lib_dir + '/%shi_plm_it%03d.npy' % ('P', 0)
            self.cache_qlm(fname_P, self.load_qlm(Plm0))
        self.barrier()

        if not os.path.exists(self.lib_dir + '/Hessian') and self.PBSRANK == 0:
            os.makedirs(self.lib_dir + '/Hessian')
            # We store here the rank 2 updates to the Hessian according to the BFGS iterations.

        if not os.path.exists(self.lib_dir +
                              '/history_increment.txt') and self.PBSRANK == 0:
            with open(self.lib_dir + '/history_increment.txt', 'w') as file:
                file.write(
                    '# Iteration step \n' + '# Exec. time in sec.\n' +
                    '# Increment norm (normalized to starting point displacement norm) \n'
                    +
                    '# Total gradient norm  (all grad. norms normalized to initial total gradient norm)\n'
                    + '# Quad. gradient norm\n' + '# Det. gradient norm\n' +
                    '# Pri. gradient norm\n' + '# Newton step length\n')
                file.close()

        if self.PBSRANK == 0:
            print '++ ffs_%s masked iterator : setup OK' % type
        self.barrier()
Exemple #16
0
    def __init__(self,
                 lib_dir,
                 lib_datalm,
                 lib_lencmb,
                 cl_transf,
                 nTpix,
                 nQpix,
                 nUpix,
                 pix_pha=None,
                 cache_sims=True):
        """
        Library for sims with pixel to pixel independent noise with specified noise variance maps.
        :param lib_dir:
        :param lib_datalm:  ffs_alm library for the data maps.
        :param lib_lencmb: library of lensed cmb sims
        :param cl_transf: transfer function cl (identical for T Q U)
        :param nTpix:  pixel noise rms noise in T (either scalar or a map of the right shape, or a path to the map)
        :param nQpix:  pixel noise rms noise in Q
        :param nUpix:  pixel noise rms noise in U
        :param pix_pha: random for phases for the noise maps
        :param cache_sims: does cache ims on disk if set
        """
        self.lencmbs = lib_lencmb
        self.lib_datalm = lib_datalm
        self.lib_skyalm = lib_lencmb.lib_skyalm
        self.cl_transf = np.zeros(self.lib_skyalm.ellmax + 1, dtype=float)
        self.cl_transf[:min(len(self.cl_transf), len(
            cl_transf))] = cl_transf[:min(len(self.cl_transf), len(cl_transf))]
        self.lib_dir = lib_dir
        self.cache_sims = cache_sims

        if not os.path.exists(lib_dir) and pbs.rank == 0:
            os.makedirs(lib_dir)
        pbs.barrier()

        if pix_pha is None:
            self.pix_pha = ffs_phas.pix_lib_phas(lib_dir + '/pix_pha', 3,
                                                 lib_datalm.shape)
        else:
            self.pix_pha = pix_pha
            assert pix_pha.shape == self.lib_datalm.shape, (
                pix_pha.shape, self.lib_datalm.shape)
            assert pix_pha.nfields == 3, (pix_pha.nfields, 3)

        if not isinstance(nTpix, str):
            if not os.path.exists(lib_dir + '/nTpix.npy') and pbs.rank == 0:
                np.save(lib_dir + '/nTpix.npy', nTpix)
            pbs.barrier()
            self.nTpix = lib_dir + '/nTpix.npy'
        else:
            assert os.path.exists(nTpix), nTpix
            self.nTpix = nTpix

        if not isinstance(nQpix, str):
            if not os.path.exists(lib_dir + '/nQpix.npy') and pbs.rank == 0:
                np.save(lib_dir + '/nQpix.npy', nQpix)
            pbs.barrier()
            self.nQpix = lib_dir + '/nQpix.npy'
        else:
            assert os.path.exists(nQpix), nQpix
            self.nQpix = nQpix

        if not isinstance(nUpix, str):
            if not os.path.exists(lib_dir + '/nUpix.npy') and pbs.rank == 0:
                np.save(lib_dir + '/nUpix.npy', nUpix)
            pbs.barrier()
            self.nUpix = lib_dir + '/nUpix.npy'
        else:
            assert os.path.exists(nUpix), nUpix
            self.nUpix = nUpix

        # Check noise maps inputs
        for _noise in [self._loadTnoise, self._loadQnoise, self._loadUnoise]:
            assert _noise().size == 1 or _noise(
            ).shape == self.lib_datalm.shape, (_noise().size,
                                               self.lib_datalm.shape)

        if not os.path.exists(lib_dir + '/sim_hash.pk') and pbs.rank == 0:
            pk.dump(self.hashdict(), open(lib_dir + '/sim_hash.pk', 'w'))
        pbs.barrier()
        hash_check(self.hashdict(), pk.load(open(lib_dir + '/sim_hash.pk',
                                                 'r')))
Exemple #17
0
    def calc_gradPlikPdet(self, iter, key, callback='default_callback'):
        """
        Caches the det term for iter via MC sims, together with the data one, for maximal //isation.
        """
        assert key.lower() in ['p', 'o'], key  # potential or curl potential.
        fname_detterm = self.lib_dir + '/qlm_grad%sdet_it%03d.npy' % (
            key.upper(), iter - 1)
        fname_likterm = self.lib_dir + '/qlm_grad%slik_it%03d.npy' % (
            key.upper(), iter - 1)
        if os.path.exists(fname_detterm) and os.path.exists(fname_likterm):
            return 0
        assert self.is_previous_iter_done(iter, key)

        pix_pha, cmb_pha = self.build_pha(iter)
        if self.PBSRANK == 0 and not os.path.exists(self.lib_dir +
                                                    '/mf_it%03d' % (iter - 1)):
            os.makedirs(self.lib_dir + '/mf_it%03d' % (iter - 1))
        pbs.barrier()

        # Caching gradients for the mc_sims_mf sims , plus the dat map.
        # The gradient of the det term is the data averaged lik term, with the opposite sign.

        jobs = []
        try:
            self.load_qlm(fname_likterm)
        except:
            jobs.append(-1)  # data map
        for idx in range(self.nsims):  # sims
            if not os.path.exists(self.lib_dir + '/mf_it%03d/g%s_%04d.npy' %
                                  (iter - 1, key.lower(), idx)):
                jobs.append(idx)
            else:
                try:  # just checking if file is OK.
                    self.load_qlm(self.lib_dir + '/mf_it%03d/g%s_%04d.npy' %
                                  (iter - 1, key.lower(), idx))
                except:
                    jobs.append(idx)
        self.opfilt._type = self.type
        # By setting the chain outside the main loop we avoid potential MPI barriers
        # in degrading the lib_alm libraries:
        mchain = fs.qcinv.multigrid.multigrid_chain(
            self.opfilt,
            self.type,
            self.chain_descr,
            self.cov,
            no_deglensing=self.nodeglensing)
        for i in range(self.PBSRANK, len(jobs), self.PBSSIZE):
            idx = jobs[i]
            print "rank %s, doing mc det. gradients idx %s, job %s in %s at iter level %s:" \
                  % (self.PBSRANK, idx, i, len(jobs), iter)
            ti = time.time()

            if idx >= 0:  # sim
                grad_fname = self.lib_dir + '/mf_it%03d/g%s_%04d.npy' % (
                    iter - 1, key.lower(), idx)
                self.cov.set_ffi(self.load_f(iter - 1, key),
                                 self.load_finv(iter - 1, key))
                MFest = ql.MFestimator(self.cov,
                                       self.opfilt,
                                       mchain,
                                       self.lib_qlm,
                                       pix_pha=pix_pha,
                                       cmb_pha=cmb_pha,
                                       use_Pool=self.use_Pool)
                grad = MFest.get_MFqlms(self.type, self.MFkey, idx)[{
                    'p': 0,
                    'o': 1
                }[key.lower()]]
                if self.subtract_phi0:
                    isofilt = self.cov.turn2isofilt()
                    chain_descr_iso = fs.qcinv.chain_samples.get_isomgchain(
                        self.cov.lib_skyalm.ellmax,
                        self.cov.lib_datalm.shape,
                        iter_max=self.maxiter)
                    mchain_iso = fs.qcinv.multigrid.multigrid_chain(
                        self.opfilt,
                        self.type,
                        chain_descr_iso,
                        isofilt,
                        no_deglensing=self.nodeglensing)
                    MFest = ql.MFestimator(isofilt,
                                           self.opfilt,
                                           mchain_iso,
                                           self.lib_qlm,
                                           pix_pha=pix_pha,
                                           cmb_pha=cmb_pha,
                                           use_Pool=self.use_Pool)
                    grad -= MFest.get_MFqlms(self.type, self.MFkey, idx)[{
                        'p': 0,
                        'o': 1
                    }[key.lower()]]
                self.cache_qlm(grad_fname, grad, pbs_rank=self.PBSRANK)
            else:
                # This is the data.
                # FIXME : The solution input is not working properly sometimes. We give it up for now.
                # FIXME  don't manage to find the right d0 to input for a given sol ?!!
                self.cov.set_ffi(self.load_f(iter - 1, key),
                                 self.load_finv(iter - 1, key))
                soltn = self.load_soltn(iter, key).copy() * 0.
                mchain.solve(soltn, self.get_datmaps(), finiop='MLIK')
                self.cache_TEBmap(soltn, iter - 1, key)
                TQUMlik = self.opfilt.soltn2TQUMlik(soltn, self.cov)
                ResTQUMlik = self.Mlik2ResTQUMlik(TQUMlik, iter, key)
                grad = -ql.get_qlms_wl(self.type,
                                       self.cov.lib_skyalm,
                                       TQUMlik,
                                       ResTQUMlik,
                                       self.lib_qlm,
                                       use_Pool=self.use_Pool,
                                       f=self.load_f(iter - 1, key))[{
                                           'p': 0,
                                           'o': 1
                                       }[key.lower()]]
                self.cache_qlm(fname_likterm, grad, pbs_rank=self.PBSRANK)

            print "%s it. %s sim %s, rank %s cg status  " % (key.lower(), iter,
                                                             idx, self.PBSRANK)
            # It does not help to cache both grad_O and grad_P as they do not follow the trajectory in plm space.
            # Saves some info about current iteration :
            if idx == -1:  # Saves some info about iteration times etc.
                with open(self.lib_dir + '/cghistories/history_dat.txt',
                          'a') as file:
                    file.write('%04d %.3f \n' % (iter, time.time() - ti))
                    file.close()
            else:
                with open(
                        self.lib_dir +
                        '/cghistories/history_sim%04d.txt' % idx, 'a') as file:
                    file.write('%04d %.3f \n' % (iter, time.time() - ti))
                    file.close()
        pbs.barrier()
        if self.PBSRANK == 0:
            # Collecting terms and caching det term.
            # We also cache arrays formed from independent sims for tests.
            print "rank 0, collecting mc det. %s gradients :" % key.lower()
            det_term = np.zeros(self.lib_qlm.alm_size, dtype=complex)
            for i in range(self.nsims):
                fname = self.lib_dir + '/mf_it%03d/g%s_%04d.npy' % (
                    iter - 1, key.lower(), i)
                det_term = (det_term * i + self.load_qlm(fname)) / (i + 1.)
            self.cache_qlm(fname_detterm, det_term, pbs_rank=0)
            det_term *= 0.
            fname_detterm1 = fname_detterm.replace('.npy', 'MF1.npy')
            assert 'MF1' in fname_detterm1
            for i in np.arange(self.nsims)[0::2]:
                fname = self.lib_dir + '/mf_it%03d/g%s_%04d.npy' % (
                    iter - 1, key.lower(), i)
                det_term = (det_term * i + self.load_qlm(fname)) / (i + 1.)
            self.cache_qlm(fname_detterm1, det_term, pbs_rank=0)
            det_term *= 0.
            fname_detterm2 = fname_detterm.replace('.npy', 'MF2.npy')
            assert 'MF2' in fname_detterm2
            for i in np.arange(self.nsims)[1::2]:
                fname = self.lib_dir + '/mf_it%03d/g%s_%04d.npy' % (
                    iter - 1, key.lower(), i)
                det_term = (det_term * i + self.load_qlm(fname)) / (i + 1.)
            self.cache_qlm(fname_detterm2, det_term, pbs_rank=0)

            # Erase some temp files if requested to do so :
            if self.tidy > 1:
                # We erase as well the gradient determinant term that were stored on disk :
                files_to_remove = \
                    [self.lib_dir + '/mf_it%03d/g%s_%04d.npy' % (iter - 1, key.lower(), i) for i in range(self.nsims)]
                print 'rank %s removing %s maps in ' % (
                    self.PBSRANK, len(files_to_remove)
                ), self.lib_dir + '/mf_it%03d/' % (iter - 1)
                for file in files_to_remove:
                    os.remove(file)
        pbs.barrier()
Exemple #18
0
    def iterate(self,
                iter,
                key,
                cache_only=False,
                callback='default_callback'):
        """
        Performs an iteration, by collecting the gradients at level iter, and the lower level potential,
        saving then the iter + 1 potential map.
        """
        assert key.lower() in ['p', 'o'], key  # potential or curl potential.
        plm_fname = self.lib_dir + '/%s_plm_it%03d.npy' % ({
            'p': 'Phi',
            'o': 'Om'
        }[key.lower()], iter)
        if os.path.exists(plm_fname):
            return None if cache_only else self.load_qlm(plm_fname)

        assert self.is_previous_iter_done(iter,
                                          key), 'previous iteration not done'
        # Calculation in // of lik and det term :
        ti = time.time()
        if self.PBSRANK == 0:  # Single processes routines :
            self.calc_ffinv(iter - 1, key)
            self.get_gradPpri(iter, key, cache_only=True)
        pbs.barrier()
        # Calculation of the likelihood term, involving the det term over MCs :
        irrelevant = self.calc_gradPlikPdet(iter, key)
        pbs.barrier()  # Everything should be on disk now.
        if self.PBSRANK == 0:
            incr, steplength = self.build_incr(
                iter, key, self.load_total_grad(iter - 1, key))
            self.cache_qlm(plm_fname,
                           self.get_Plm(iter - 1, key) + incr,
                           pbs_rank=0)

            # Saves some info about increment norm and exec. time :
            norm_inc = self.calc_norm(incr) / self.calc_norm(
                self.get_Plm(0, key))
            norms = [self.calc_norm(self.load_gradquad(iter - 1, key))]
            norms.append(self.calc_norm(self.load_graddet(iter - 1, key)))
            norms.append(self.calc_norm(self.load_gradpri(iter - 1, key)))
            norm_grad = self.calc_norm(self.load_total_grad(iter - 1, key))
            norm_grad_0 = self.calc_norm(self.load_total_grad(0, key))
            for i in [0, 1, 2]:
                norms[i] = norms[i] / norm_grad_0

            with open(self.lib_dir + '/history_increment.txt', 'a') as file:
                file.write(
                    '%03d %.1f %.6f %.6f %.6f %.6f %.6f %.12f \n' %
                    (iter, time.time() - ti, norm_inc, norm_grad / norm_grad_0,
                     norms[0], norms[1], norms[2], steplength))
                file.close()

            if self.tidy > 2:  # Erasing dx,dy and det magn (12GB for full sky at 0.74 amin per iteration)
                f1, f2 = self.getfnames_f(key, iter - 1)
                f3, f4 = self.getfnames_finv(key, iter - 1)
                for _f in [f1, f2, f3, f4]:
                    if os.path.exists(_f):
                        os.remove(_f)
                        if self.verbose: print "     removed :", _f
                if os.path.exists(self.lib_dir + '/f_%04d_libdir' %
                                  (iter - 1)):
                    shutil.rmtree(self.lib_dir + '/f_%04d_libdir' % (iter - 1))
                    if self.verbose:
                        print "Removed :", self.lib_dir + '/f_%04d_libdir' % (
                            iter - 1)
                if os.path.exists(self.lib_dir + '/finv_%04d_libdir' %
                                  (iter - 1)):
                    shutil.rmtree(self.lib_dir + '/finv_%04d_libdir' %
                                  (iter - 1))
                    if self.verbose:
                        print "Removed :", self.lib_dir + '/finv_%04d_libdir' % (
                            iter - 1)

        pbs.barrier()
        return None if cache_only else self.load_qlm(plm_fname)