示例#1
0
    def mapmake_full(self, nside, mapname):
        def _make_alm(mi):

            print("Making %i" % mi)

            mmode = self.mmode(mi)
            sphmode = self.beamtransfer.project_vector_telescope_to_sky(mi, mmode)

            return sphmode

        alm_list = mpiutil.parallel_map(_make_alm, list(range(self.telescope.mmax + 1)))

        if mpiutil.rank0:

            alm = np.zeros(
                (
                    self.telescope.nfreq,
                    self.telescope.num_pol_sky,
                    self.telescope.lmax + 1,
                    self.telescope.lmax + 1,
                ),
                dtype=np.complex128,
            )

            for mi in range(self.telescope.mmax + 1):

                alm[..., mi] = alm_list[mi]

            skymap = hputil.sphtrans_inv_sky(alm, nside)

            with h5py.File(self.output_directory + "/" + mapname, "w") as f:
                f.create_dataset("/map", data=skymap)

        mpiutil.barrier()
示例#2
0
    def test_io(self):

        import h5py

        # Cleanup directories
        fname = 'testdset.hdf5'

        if mpiutil.rank0 and os.path.exists(fname):
            os.remove(fname)

        mpiutil.barrier()

        gshape = (19, 17)

        ds = mpiarray.MPIArray(gshape, dtype=np.int64)

        ga = np.arange(np.prod(gshape)).reshape(gshape)

        l0, s0, e0 = mpiutil.split_local(gshape[0])
        ds[:] = ga[s0:e0]

        ds.redistribute(axis=1).to_hdf5(fname, 'testds', create=True)

        if mpiutil.rank0:

            with h5py.File(fname, 'r') as f:

                h5ds = f['testds'][:]

                assert (h5ds == ga).all()

        ds2 = mpiarray.MPIArray.from_hdf5(fname, 'testds')

        assert (ds2 == ds).all()
示例#3
0
    def generate(self, regen=False):
        """Save out all beam transfer matrices to disk.

        Parameters
        ----------
        regen : boolean, optional
            Force regeneration even if cache files exist (default: False).
        """

        self._generate_dirs()
        self._generate_teldatafile(regen)

        st = time.time()
        self._generate_mfiles(regen)
        if self.gen_invbeam:
            self._generate_invbeam(regen)
        et = time.time()
        if mpiutil.rank0:
            print "***** Beam transfer matrices generation time: %f" % (et - st)

        # Save pickled telescope object
        if mpiutil.rank0:
            print
            print '=' * 80
            print "=== Saving Telescope object. ==="
            with open(self._picklefile, 'w') as f:
                pickle.dump(self.telescope, f)

        # If we're part of an MPI run, synchronise here.
        mpiutil.barrier()
示例#4
0
    def setup(self):
        """Get a list of existing processed files."""

        # Find processed transit files
        self.proc_transits = []
        for processed_dir in self.processed_dir:
            self.log.debug(
                "Looking for processed transits in {}...".format(processed_dir)
            )
            # Expand path
            processed_dir = path.expanduser(processed_dir)
            processed_dir = path.expandvars(processed_dir)

            try:
                processed_files = listdir(processed_dir)
            except FileNotFoundError:
                processed_files = []
            for fname in processed_files:
                if not path.splitext(fname)[1] == ".h5":
                    continue
                with ContainerBase.from_file(
                    fname, ondisk=True, distributed=False, mode="r"
                ) as fh:
                    obs_id = fh.attrs.get("observation_id", None)
                    if obs_id is not None:
                        self.proc_transits.append(obs_id)
        self.log.debug("Found {:d} processed transits.".format(len(self.proc_transits)))

        # Query database for observations of this source
        hol_obs = None
        if mpiutil.rank0:
            hol_obs = list(get_holography_obs(self.source))
        self.hol_obs = mpiutil.bcast(hol_obs, root=0)
        mpiutil.barrier()
示例#5
0
    def mapmake_svd(self, nside, mapname):

        self.generate_mmodes_svd()

        def _make_alm(mi):

            svdmode = self.mmode_svd(mi)

            sphmode = self.beamtransfer.project_vector_svd_to_sky(mi, svdmode)

            return sphmode

        alm_list = mpiutil.parallel_map(_make_alm, range(self.telescope.mmax + 1))

        if mpiutil.rank0:

            alm = np.zeros((self.telescope.nfreq, self.telescope.num_pol_sky, self.telescope.lmax + 1,
                            self.telescope.lmax + 1), dtype=np.complex128)

            for mi in range(self.telescope.mmax + 1):

                alm[..., mi] = alm_list[mi]

            skymap = hputil.sphtrans_inv_sky(alm, nside)

            with h5py.File(self.output_directory + '/' + mapname, 'w') as f:
                f.create_dataset('/map', data=skymap)

        mpiutil.barrier()
示例#6
0
    def __init__(self, pipefile=None, feedback=2):

        # Read in the parameters.
        self.params, self.task_params = parse_ini.parse(pipefile, self.params_init, prefix=self.prefix, return_undeclared=True, feedback=feedback)
        self.tasks = self.params['tasks']

        # set environment var
        os.environ['TL_OUTPUT'] = self.params['outdir'] + '/'

        # copy pipefile to outdir if required
        if self.params['copy']:
            base_name = path.basename(pipefile)
            dst_file = '%s/%s' % (self.params['outdir'], base_name)
            outdir = output_path(dst_file, relative=False, mkdir=True)
            if mpiutil.rank0:
                if self.params['overwrite']:
                    shutil.copy2(pipefile, dst_file)
                else:
                    if not path.exists(dst_file):
                        shutil.copy2(pipefile, dst_file)
                    else:
                        base, ext = path.splitext(dst_file)
                        for cnt in itertools.count(1):
                            dst = '%s_%d%s' % (base, cnt, ext) # add cnt to file name
                            if not path.exists(dst):
                                shutil.copy2(pipefile, dst)
                                break

            mpiutil.barrier()
    def test_misc(self):

        dg = memh5.MemDiskGroup(distributed=True)

        pdset = dg.create_dataset('parallel_data', shape=(10,), dtype=np.float64, distributed=True, distributed_axis=0)
        # pdset[:] = dg._data.comm.rank
        pdset[:] = rank
        # Test successfully added
        self.assertIn('parallel_data', dg)

        dg.save(self.fname)

        dg2 = memh5.MemDiskGroup.from_file(self.fname, distributed=True)

        # Test successful load
        self.assertIn('parallel_data', dg2)
        self.assertTrue((dg['parallel_data'][:] == dg2['parallel_data'][:]).all())

        # self.assertRaises(NotImplementedError, dg.to_disk, self.fname)

        # Test refusal to base off a h5py object when distributed
        from caput import mpiutil
        with h5py.File(self.fname, 'r') as f:
            if comm is not None:
                self.assertRaises(ValueError, memh5.MemDiskGroup, data_group=f, distributed=True)
        mpiutil.barrier()
示例#8
0
    def finish(self):
        #if mpiutil.rank0:
        print 'RANK %03d Finishing FGRM' % (mpiutil.rank)

        mpiutil.barrier()
        for df in self.df_out:
            df.close()
示例#9
0
    def generate(self, regen=False):
        """Perform the KL-transform for all m-modes and save the result.

        Uses MPI to distribute the work (if available).

        Parameters
        ----------
        mlist : array_like, optional
            Set of m's to calculate KL-modes for By default do all m-modes.
        """

        if mpiutil.rank0:
            st = time.time()
            print "======== Starting KL calculation ========"

        # Iterate list over MPI processes.
        for mi in mpiutil.mpirange(self.telescope.mmax+1):
            if os.path.exists(self._evfile % mi) and not regen:
                print "m index %i. File: %s exists. Skipping..." % (mi, (self._evfile % mi))
                continue

            self.transform_save(mi)

        # If we're part of an MPI run, synchronise here.
        mpiutil.barrier()

        if mpiutil.rank0:
            et = time.time()
            print "======== Ending KL calculation (time=%f) ========" % (et - st)


        # Collect together the eigenvalues
        self._collect()
示例#10
0
    def process(self, ts):

        show_progress = self.params['show_progress']
        progress_step = self.params['progress_step']

        ra_axis  = self.map_tmp.get_axis('ra')
        dec_axis = self.map_tmp.get_axis('dec')
        if mpiutil.rank0:
            msg = 'RANK %03d:  RA  Range [%5.2f, %5.2f] deg'%(
                    mpiutil.rank, ra_axis.min(), ra_axis.max())
            logger.info(msg)
            msg = 'RANK %03d:  Dec Range [%5.2f, %5.2f] deg\n'%(
                    mpiutil.rank, dec_axis.min(), dec_axis.max())
            logger.info(msg)

        if self.params['save_localHI']:
            if mpiutil.rank0:
                logger.info('save local HI')
            freq = ts['freq'][:] - 1420.
            local_hi = np.abs(freq) < 1
            ts.local_vis_mask[:, local_hi, ...] = False

        self.init_output()
        if 'ns_on' in ts.iterkeys():
            ns = ts['ns_on'].local_data
            ts.local_vis_mask[:] += ns[:, None, None, :]
            #ns = ts['ns_on'][:]
            #ts.vis_mask[:] += ns[:, None, None, :]

        func = self.init_ps_datasets(ts)

        ts.redistribute('frequency')

        vis_var = mpiarray.MPIArray.wrap(np.zeros(ts.vis.local_shape), 1)
        axis_order = tuple(xrange(len(ts.vis.shape)))
        ts.create_time_ordered_dataset('vis_var', data=vis_var, axis_order=axis_order)
        #var  = ts['vis_var'][:]
        #print mpiutil.rank, var.shape

        ts.freq_data_operate(self.init_vis, full_data=True, copy_data=False, 
                show_progress=show_progress, progress_step=progress_step, 
                keep_dist_axis=False)
        mpiutil.barrier()
        #vis_var = ts['vis_var'].local_data
        #vis_var = mpiutil.allreduce(vis_var)
        #ts['vis_var'][:] = vis_var

        #print ts['vis_var'].shape

        if not func is None:

            #ts.redistribute('time')
            ts.redistribute('frequency')
            func(self.make_map, full_data=False, copy_data=True, 
                    show_progress=show_progress, 
                    progress_step=progress_step, keep_dist_axis=False)

        mpiutil.barrier()

        self.df.close()
示例#11
0
    def generate(self, regen=False):
        """Perform the KL-transform for all m-modes and save the result.

        Uses MPI to distribute the work (if available).

        Parameters
        ----------
        mlist : array_like, optional
            Set of m's to calculate KL-modes for By default do all m-modes.
        """

        if mpiutil.rank0:
            st = time.time()
            print("======== Starting KL calculation ========")

        # Iterate list over MPI processes.
        for mi in mpiutil.mpirange(self.telescope.mmax + 1):
            if os.path.exists(self._evfile % mi) and not regen:
                print(
                    "m index %i. File: %s exists. Skipping..."
                    % (mi, (self._evfile % mi))
                )
                continue

            self.transform_save(mi)

        # If we're part of an MPI run, synchronise here.
        mpiutil.barrier()

        if mpiutil.rank0:
            et = time.time()
            print("======== Ending KL calculation (time=%f) ========" % (et - st))

        # Collect together the eigenvalues
        self._collect()
示例#12
0
    def mapmake_kl(self, nside, mapname, wiener=False):

        mapfile = self.output_directory + "/" + mapname

        if os.path.exists(mapfile):
            if mpiutil.rank0:
                print("File %s exists. Skipping...")
            return

        kl = self.manager.kltransforms[self.klname]

        if not kl.inverse:
            raise Exception("Need the inverse to make a meaningful map.")

        def _make_alm(mi):
            print("Making %i" % mi)

            klmode = self.mmode_kl(mi)

            if wiener:
                evals = kl.evals_m(mi, self.klthreshold)

                if evals is not None:
                    klmode *= evals / (1.0 + evals)

            isvdmode = kl.project_vector_kl_to_svd(
                mi, klmode, threshold=self.klthreshold
            )

            sphmode = self.beamtransfer.project_vector_svd_to_sky(mi, isvdmode)

            return sphmode

        alm_list = mpiutil.parallel_map(_make_alm, list(range(self.telescope.mmax + 1)))

        if mpiutil.rank0:

            alm = np.zeros(
                (
                    self.telescope.nfreq,
                    self.telescope.num_pol_sky,
                    self.telescope.lmax + 1,
                    self.telescope.lmax + 1,
                ),
                dtype=np.complex128,
            )

            # Determine whether to use m=0 or not
            mlist = list(range(1 if self.no_m_zero else 0, self.telescope.mmax + 1))

            for mi in mlist:

                alm[..., mi] = alm_list[mi]

            skymap = hputil.sphtrans_inv_sky(alm, nside)

            with h5py.File(mapfile, "w") as f:
                f.create_dataset("/map", data=skymap)

        mpiutil.barrier()
示例#13
0
    def test_misc(self):

        dg = memh5.MemDiskGroup(distributed=True)

        pdset = dg.create_dataset('parallel_data', shape=(10,), dtype=np.float64, distributed=True, distributed_axis=0)
        # pdset[:] = dg._data.comm.rank
        pdset[:] = rank
        # Test successfully added
        self.assertIn('parallel_data', dg)

        dg.save(self.fname)

        dg2 = memh5.MemDiskGroup.from_file(self.fname, distributed=True)

        # Test successful load
        self.assertIn('parallel_data', dg2)
        self.assertTrue((dg['parallel_data'][:] == dg2['parallel_data'][:]).all())

        # self.assertRaises(NotImplementedError, dg.to_disk, self.fname)

        # Test refusal to base off a h5py object when distributed
        from caput import mpiutil
        with h5py.File(self.fname, 'r') as f:
            if comm is not None:
                self.assertRaises(ValueError, memh5.MemDiskGroup, data_group=f, distributed=True)
        mpiutil.barrier()
示例#14
0
    def __init__(self, pipefile=None, feedback=2):

        # Read in the parameters.
        self.params, self.task_params = parse_ini.parse(pipefile, self.params_init, prefix=self.prefix, return_undeclared=True, feedback=feedback)
        self.tasks = self.params['tasks']

        # timing the running
        if self.params['timing']:
            self.start_time = datetime.datetime.now()
            if mpiutil.rank0:
                print 'Start the pipeline at %s...' % self.start_time

        # set environment var
        os.environ['TL_OUTPUT'] = self.params['outdir'] + '/'

        # copy pipefile to outdir if required
        if self.params['copy']:
            base_name = path.basename(pipefile)
            dst_file = '%s/%s' % (self.params['outdir'], base_name)
            outdir = output_path(dst_file, relative=False, mkdir=True)
            if mpiutil.rank0:
                if self.params['overwrite']:
                    shutil.copy2(pipefile, dst_file)
                else:
                    if not path.exists(dst_file):
                        shutil.copy2(pipefile, dst_file)
                    else:
                        base, ext = path.splitext(dst_file)
                        for cnt in itertools.count(1):
                            dst = '%s_%d%s' % (base, cnt, ext) # add cnt to file name
                            if not path.exists(dst):
                                shutil.copy2(pipefile, dst)
                                break

            mpiutil.barrier()
示例#15
0
    def generate_mmodes(self, ts_data=None):
        """Calculate the m-modes corresponding to the Timestream.

        Perform an MPI transpose for efficiency.
        """

        completed_file = self._mdir + 'COMPLETED_M'
        if os.path.exists(completed_file):
            if mpiutil.rank0:
                print "******* m-files already generated ********"
            mpiutil.barrier()
            return

        # Make directory if required
        # if mpiutil.rank0 and not os.path.exists(self._mdir):
        #     os.makedirs(self._mdir)

        try:
            os.makedirs(self._mdir)
        except OSError:
            # directory exists
            pass

        tel = self.telescope
        mmax = tel.mmax
        ntime = ts_data.shape[0] if ts_data is not None else self.ntime
        nbl = tel.nbase
        nfreq = tel.nfreq

        indices = list(itertools.product(np.arange(nfreq), np.arange(nbl)))
        lind, sind, eind = mpiutil.split_local(nfreq * nbl)

        # load the local section of the time stream
        tstream = np.zeros((ntime, lind), dtype=np.complex128)
        for ind, (f_ind, bl_ind) in enumerate(indices[sind:eind]):
            if ts_data is not None:
                tstream[:, ind] = ts_data[:, f_ind, bl_ind]
            else:
                with h5py.File(self._tsfile, 'r') as f:
                    tstream[:, ind] = f['/timestream'][:, f_ind, bl_ind]

        # FFT to get m-mode
        mmodes = np.fft.fft(tstream, axis=0) / ntime # m = 0 is at left
        mmodes = MPIArray.wrap(mmodes, axis=1)
        # redistribute along different m
        mmodes = mmodes.redistribute(axis=0)

        # save m-modes to file
        ms = np.concatenate([np.arange(0, mmax+1), np.arange(-mmax, 0)])
        for ind, mi in enumerate(mpiutil.mpilist(ms, method='con')):
            with h5py.File(self._mfile(mi), 'w') as f:
              f.create_dataset('/mmode', data=mmodes[ind].view(np.ndarray).reshape(nfreq, nbl))
              f.attrs['m'] = mi

        mpiutil.barrier()

        if mpiutil.rank0:

            # Make file marker that the m's have been correctly generated:
            open(completed_file, 'a').close()
示例#16
0
    def process(self, tstream):

        bt = tstream.beamtransfer
        tel = bt.telescope
        mmode_dir = tstream.output_directory + '/mmodes'
        count_file = mmode_dir + '/count.hdf5'
        if os.path.exists(count_file):
            # get count
            with h5py.File(count_file, 'r') as f:
                N = f['count'][:]
            # normalize mmode
            for mi in mpiutil.mpirange(tel.mmax + 1):
                with h5py.File(tstream._mfile(mi), 'r+') as f:
                    f['/mmode'][:] /= N[:, np.newaxis, :]
            mpiutil.barrier()
            # delete the count file
            if mpiutil.rank0:
                os.remove(count_file)
        else:
            if mpiutil.rank0:
                print 'Count file %s does not exist, do noting...' % count_file

        # mpiutil.barrier()

        return tstream
示例#17
0
    def generate_mmodes(self):
        """Calculate the m-modes corresponding to the Timestream.

        Perform an MPI transpose for efficiency.
        """

        if os.path.exists(self.output_directory + "/mmodes/COMPLETED_M"):
            if mpiutil.rank0:
                print "******* m-files already generated ********"
            return

        tel = self.telescope
        mmax = tel.mmax
        nfreq = tel.nfreq

        lfreq, sfreq, efreq = mpiutil.split_local(nfreq)
        lm, sm, em = mpiutil.split_local(mmax + 1)

        # Load in the local frequencies of the time stream
        tstream = np.zeros((lfreq, tel.npairs, self.ntime),
                           dtype=np.complex128)
        for lfi, fi in enumerate(range(sfreq, efreq)):
            tstream[lfi] = self.timestream_f(fi)

        # FFT to calculate the m-modes for the timestream
        row_mmodes = np.fft.fft(tstream, axis=-1) / self.ntime

        ## Combine positive and negative m parts.
        row_mpairs = np.zeros((lfreq, 2, tel.npairs, mmax + 1),
                              dtype=np.complex128)

        row_mpairs[:, 0, ..., 0] = row_mmodes[..., 0]
        for mi in range(1, mmax + 1):
            row_mpairs[:, 0, ..., mi] = row_mmodes[..., mi]
            row_mpairs[:, 1, ..., mi] = row_mmodes[..., -mi].conj()

        # Transpose to get the entirety of an m-mode on each process (i.e. all frequencies)
        col_mmodes = mpiutil.transpose_blocks(row_mpairs,
                                              (nfreq, 2, tel.npairs, mmax + 1))

        # Transpose the local section to make the m's first
        col_mmodes = np.transpose(col_mmodes, (3, 0, 1, 2))

        for lmi, mi in enumerate(range(sm, em)):

            # Make directory for each m-mode
            if not os.path.exists(self._mdir(mi)):
                os.makedirs(self._mdir(mi))

            # Create the m-file and save the result.
            with h5py.File(self._mfile(mi), 'w') as f:
                f.create_dataset('/mmode', data=col_mmodes[lmi])
                f.attrs['m'] = mi

        if mpiutil.rank0:

            # Make file marker that the m's have been correctly generated:
            open(self.output_directory + "/mmodes/COMPLETED_M", 'a').close()

        mpiutil.barrier()
示例#18
0
    def generate_mmodes_kl(self):
        """Generate the KL modes for the Timestream.
        """

        kl = self.manager.kltransforms[self.klname]

        # Iterate over local m's, project mode and save to disk.
        for mi in mpiutil.mpirange(self.telescope.mmax + 1):

            if os.path.exists(self._klfile(mi)):
                print "File %s exists. Skipping..." % self._klfile(mi)
                continue

            svdm = self.mmode_svd(
                mi)  #.reshape(self.telescope.nfreq, 2*self.telescope.npairs)
            #svdm = self.beamtransfer.project_vector_telescope_to_svd(mi, tm)

            klm = kl.project_vector_svd_to_kl(mi,
                                              svdm,
                                              threshold=self.klthreshold)

            with h5py.File(self._klfile(mi), 'w') as f:
                f.create_dataset('mmode_kl', data=klm)
                f.attrs['m'] = mi

        mpiutil.barrier()
示例#19
0
    def mapmake_svd(self, nside, mapname):

        self.generate_mmodes_svd()

        def _make_alm(mi):

            svdmode = self.mmode_svd(mi)

            sphmode = self.beamtransfer.project_vector_svd_to_sky(mi, svdmode)

            return sphmode

        alm_list = mpiutil.parallel_map(_make_alm,
                                        range(self.telescope.mmax + 1))

        if mpiutil.rank0:

            alm = np.zeros((self.telescope.nfreq, self.telescope.num_pol_sky,
                            self.telescope.lmax + 1, self.telescope.lmax + 1),
                           dtype=np.complex128)

            for mi in range(self.telescope.mmax + 1):

                alm[..., mi] = alm_list[mi]

            skymap = hputil.sphtrans_inv_sky(alm, nside)

            with h5py.File(self.output_directory + '/' + mapname, 'w') as f:
                f.create_dataset('/map', data=skymap)
                f.attrs['frequency'] = self.beamtransfer.telescope.frequencies
                f.attrs['polarization'] = np.array(
                    ['I', 'Q', 'U',
                     'V'])[:self.beamtransfer.telescope.num_pol_sky]

        mpiutil.barrier()
示例#20
0
    def generate_mmodes(self):
        """Calculate the m-modes corresponding to the Timestream.

        Perform an MPI transpose for efficiency.
        """


        if os.path.exists(self.output_directory + "/mmodes/COMPLETED_M"):
            if mpiutil.rank0:
                print "******* m-files already generated ********"
            return

        tel = self.telescope
        mmax = tel.mmax
        nfreq = tel.nfreq

        lfreq, sfreq, efreq = mpiutil.split_local(nfreq)
        lm, sm, em = mpiutil.split_local(mmax + 1)

        # Load in the local frequencies of the time stream
        tstream = np.zeros((lfreq, tel.npairs, self.ntime), dtype=np.complex128)
        for lfi, fi in enumerate(range(sfreq, efreq)):
            tstream[lfi] = self.timestream_f(fi)

        # FFT to calculate the m-modes for the timestream
        row_mmodes = np.fft.fft(tstream, axis=-1) / self.ntime

        ## Combine positive and negative m parts.
        row_mpairs = np.zeros((lfreq, 2, tel.npairs, mmax+1), dtype=np.complex128)

        row_mpairs[:, 0, ..., 0] = row_mmodes[..., 0]
        for mi in range(1, mmax+1):
            row_mpairs[:, 0, ..., mi] = row_mmodes[...,  mi]
            row_mpairs[:, 1, ..., mi] = row_mmodes[..., -mi].conj()

        # Transpose to get the entirety of an m-mode on each process (i.e. all frequencies)
        col_mmodes = mpiutil.transpose_blocks(row_mpairs, (nfreq, 2, tel.npairs, mmax + 1))

        # Transpose the local section to make the m's first
        col_mmodes = np.transpose(col_mmodes, (3, 0, 1, 2))

        for lmi, mi in enumerate(range(sm, em)):

            # Make directory for each m-mode
            if not os.path.exists(self._mdir(mi)):
                os.makedirs(self._mdir(mi))

            # Create the m-file and save the result.
            with h5py.File(self._mfile(mi), 'w') as f:
                f.create_dataset('/mmode', data=col_mmodes[lmi])
                f.attrs['m'] = mi

        if mpiutil.rank0:

            # Make file marker that the m's have been correctly generated:
            open(self.output_directory + "/mmodes/COMPLETED_M", 'a').close()

        mpiutil.barrier()
示例#21
0
def separator(sec, tag):
    # sleep, sync, and flush to avoid output of different parts being mixed
    time.sleep(sec)
    mpiutil.barrier()
    sys.stdout.flush()

    if rank == 0:
        print
        print '-' * 35 + ' ' + tag + ' ' + '-' * 35
示例#22
0
    def powerspectrum(self):

        import scipy.linalg as la


        if os.path.exists(self._psfile):
            print "File %s exists. Skipping..." % self._psfile
            return

        ps = self.manager.psestimators[self.psname]
        ps.genbands()

        def _q_estimate(mi):

            return ps.q_estimator(mi, self.mmode_kl(mi))

        # Determine whether to use m=0 or not
        mlist = range(1 if self.no_m_zero else 0, self.telescope.mmax + 1)
        qvals = mpiutil.parallel_map(_q_estimate, mlist)

        qtotal = np.array(qvals).sum(axis=0)

        fisher, bias = ps.fisher_bias()

        powerspectrum =  np.dot(la.inv(fisher), qtotal - bias)


        if mpiutil.rank0:
            with h5py.File(self._psfile, 'w') as f:


                cv = la.inv(fisher)
                err = cv.diagonal()**0.5
                cr = cv / np.outer(err, err)

                f.create_dataset('fisher/', data=fisher)
#                f.create_dataset('bias/', data=self.bias)
                f.create_dataset('covariance/', data=cv)
                f.create_dataset('error/', data=err)
                f.create_dataset('correlation/', data=cr)

                f.create_dataset('bandpower/', data=ps.band_power)
                #f.create_dataset('k_start/', data=ps.k_start)
                #f.create_dataset('k_end/', data=ps.k_end)
                #f.create_dataset('k_center/', data=ps.k_center)
                #f.create_dataset('psvalues/', data=ps.psvalues)

                f.create_dataset('powerspectrum', data=powerspectrum)

        # Delete cache of bands for memory reasons
        del ps.clarray
        ps.clarray = None

        mpiutil.barrier()

        return powerspectrum
示例#23
0
    def powerspectrum(self):

        import scipy.linalg as la


        if os.path.exists(self._psfile):
            print "File %s exists. Skipping..." % self._psfile
            return

        ps = self.manager.psestimators[self.psname]
        ps.genbands()

        def _q_estimate(mi):

            return ps.q_estimator(mi, self.mmode_kl(mi))

        # Determine whether to use m=0 or not
        mlist = range(1 if self.no_m_zero else 0, self.telescope.mmax + 1)
        qvals = mpiutil.parallel_map(_q_estimate, mlist)

        qtotal = np.array(qvals).sum(axis=0)

        fisher, bias = ps.fisher_bias()

        powerspectrum =  np.dot(la.inv(fisher), qtotal - bias)


        if mpiutil.rank0:
            with h5py.File(self._psfile, 'w') as f:


                cv = la.inv(fisher)
                err = cv.diagonal()**0.5
                cr = cv / np.outer(err, err)

                f.create_dataset('fisher/', data=fisher)
#                f.create_dataset('bias/', data=self.bias)
                f.create_dataset('covariance/', data=cv)
                f.create_dataset('error/', data=err)
                f.create_dataset('correlation/', data=cr)

                f.create_dataset('bandpower/', data=ps.band_power)
                #f.create_dataset('k_start/', data=ps.k_start)
                #f.create_dataset('k_end/', data=ps.k_end)
                #f.create_dataset('k_center/', data=ps.k_center)
                #f.create_dataset('psvalues/', data=ps.psvalues)

                f.create_dataset('powerspectrum', data=powerspectrum)

        # Delete cache of bands for memory reasons
        del ps.clarray
        ps.clarray = None

        mpiutil.barrier()

        return powerspectrum
示例#24
0
    def show_params(cls):
        """Show all parameters that can be set and their default values."""
        if mpiutil.rank0:

            print 'Parameters of %s:' % cls.__name__
            for key, val in cls.params_init.items():
                print '%s:  %s' % (key, val)
            print

        mpiutil.barrier()
示例#25
0
    def show_params(cls):
        """Show all parameters that can be set and their default values."""
        if mpiutil.rank0:

            print 'Parameters of %s:' % cls.__name__
            for key, val in cls.params_init.items():
                print '%s:  %s' % (key, val)
            print

        mpiutil.barrier()
示例#26
0
    def mapmake_kl(self, nside, mapname, wiener=False):

        mapfile = self.output_directory + '/' + mapname

        if os.path.exists(mapfile):
            if mpiutil.rank0:
                print "File %s exists. Skipping..."
            return

        kl = self.manager.kltransforms[self.klname]

        if not kl.inverse:
            raise Exception("Need the inverse to make a meaningful map.")

        def _make_alm(mi):
            print "Making %i" % mi

            klmode = self.mmode_kl(mi)

            if wiener:
                evals = kl.evals_m(mi, self.klthreshold)

                if evals is not None:
                    klmode *= (evals / (1.0 + evals))

            isvdmode = kl.project_vector_kl_to_svd(mi, klmode, threshold=self.klthreshold)

            sphmode = self.beamtransfer.project_vector_svd_to_sky(mi, isvdmode)

            return sphmode

        alm_list = mpiutil.parallel_map(_make_alm, range(self.telescope.mmax + 1))

        if mpiutil.rank0:

            alm = np.zeros((self.telescope.nfreq, self.telescope.num_pol_sky, self.telescope.lmax + 1,
                            self.telescope.lmax + 1), dtype=np.complex128)

            # Determine whether to use m=0 or not
            mlist = range(1 if self.no_m_zero else 0, self.telescope.mmax + 1)

            for mi in mlist:

                alm[..., mi] = alm_list[mi]

            skymap = hputil.sphtrans_inv_sky(alm, nside)

            with h5py.File(mapfile, 'w') as f:
                f.create_dataset('/map', data=skymap)
                f.attrs['frequency'] = self.beamtransfer.telescope.frequencies
                f.attrs['polarization'] = np.array(['I', 'Q', 'U', 'V'])[:self.beamtransfer.telescope.num_pol_sky]

        mpiutil.barrier()
示例#27
0
    def show_params(cls):
        """Show all parameters that can be set and their default values of this task."""
        if mpiutil.rank0:
            # get all params that has merged params of the all super classes
            all_params = cls._get_params()

            print 'Parameters of task %s:' % cls.__name__
            for key, val in all_params.items():
                print '%s:  %s' % (key, val)
            print

        mpiutil.barrier()
示例#28
0
    def show_params(cls):
        """Show all parameters that can be set and their default values of this task."""
        if mpiutil.rank0:
            # get all params that has merged params of the all super classes
            all_params = cls._get_params()

            print 'Parameters of task %s:' % cls.__name__
            for key, val in all_params.items():
                print '%s:  %s' % (key, val)
            print

        mpiutil.barrier()
示例#29
0
    def mapmake_kl(self, nside, mapname, wiener=False):

        mapfile = self.output_directory + '/' + mapname

        if os.path.exists(mapfile):
            if mpiutil.rank0:
                print "File %s exists. Skipping..."
            return

        kl = self.manager.kltransforms[self.klname]

        if not kl.inverse:
            raise Exception("Need the inverse to make a meaningful map.")

        def _make_alm(mi):
            print "Making %i" % mi

            klmode = self.mmode_kl(mi)

            if wiener:
                evals = kl.evals_m(mi, self.klthreshold)

                if evals is not None:
                    klmode *= (evals / (1.0 + evals))

            isvdmode = kl.project_vector_kl_to_svd(mi, klmode, threshold=self.klthreshold)

            sphmode = self.beamtransfer.project_vector_svd_to_sky(mi, isvdmode)

            return sphmode

        alm_list = mpiutil.parallel_map(_make_alm, range(self.telescope.mmax + 1), root=0, method='rand')

        if mpiutil.rank0:

            alm = np.zeros((self.telescope.nfreq, self.telescope.num_pol_sky, self.telescope.lmax + 1,
                            self.telescope.lmax + 1), dtype=np.complex128)

            # Determine whether to use m=0 or not
            mlist = range(1 if self.no_m_zero else 0, self.telescope.mmax + 1)

            for mi in mlist:

                alm[..., mi] = alm_list[mi]

            skymap = hputil.sphtrans_inv_sky(alm, nside)

            with h5py.File(mapfile, 'w') as f:
                f.create_dataset('/map', data=skymap)
                f.attrs['frequency'] = self.beamtransfer.telescope.frequencies
                f.attrs['polarization'] = np.array(['I', 'Q', 'U', 'V'])[:self.beamtransfer.telescope.num_pol_sky]

        mpiutil.barrier()
示例#30
0
    def __init__(self, bt, subdir=None):
        self.beamtransfer = bt
        self.telescope = self.beamtransfer.telescope

        subdir = "ev" if subdir is None else subdir

        # Create directory if required
        self.evdir = self.beamtransfer.directory + "/" + subdir
        if mpiutil.rank0 and not os.path.exists(self.evdir):
            os.makedirs(self.evdir)

        # If we're part of an MPI run, synchronise here.
        mpiutil.barrier()
示例#31
0
    def __init__(self, bt, subdir=None):
        self.beamtransfer = bt
        self.telescope = self.beamtransfer.telescope

        subdir = "ev" if subdir is None else subdir

        # Create directory if required
        self.evdir = self.beamtransfer.directory + "/" + subdir
        if mpiutil.rank0 and not os.path.exists(self.evdir):
            os.makedirs(self.evdir)

        # If we're part of an MPI run, synchronise here.
        mpiutil.barrier()
示例#32
0
    def to_hdf5(self, filename, dataset, create=False):
        """Parallel write into a contiguous HDF5 dataset.

        Parameters
        ----------
        filename : str
            File to write dataset into.
        dataset : string
            Name of dataset to write into. Should not exist.
        """

        ## Naive non-parallel implementation to start

        import h5py

        if self.comm is None or self.comm.rank == 0:

            with h5py.File(filename, 'a' if create else 'r+') as fh:
                if dataset in fh:
                    raise Exception("Dataset should not exist.")

                fh.create_dataset(dataset, self.global_shape, dtype=self.dtype)
                fh[dataset][:] = np.array(0.0).astype(self.dtype)

        # wait until all processes see the created file
        while not os.path.exists(filename):
            time.sleep(1)

        # self._comm.Barrier()
        mpiutil.barrier(comm=self.comm)

        if self.axis == 0:
            dist_arr = self
        else:
            dist_arr = self.redistribute(axis=0)

        size = 1 if self.comm is None else self.comm.size
        for ri in range(size):

            rank = 0 if self.comm is None else self.comm.rank
            if ri == rank:
                with h5py.File(filename, 'r+') as fh:

                    start = dist_arr.local_offset[0]
                    end = start + dist_arr.local_shape[0]

                    fh[dataset][start:end] = dist_arr

            # dist_arr._comm.Barrier()
            mpiutil.barrier(comm=self.comm)
示例#33
0
    def mapmake_full(self, nside, mapname, nbin=None, dirty=False, method='svd', normalize=True, threshold=1.0e3):

        nfreq = self.telescope.nfreq
        if nbin is not None and (nbin <= 0 or nbin >= nfreq): # invalid nbin
            nbin = None

        def _make_alm(mi):

            print "Making %i" % mi

            mmode = self.mmode(mi)
            if dirty:
                sphmode = self.beamtransfer.project_vector_backward_dirty(mi, mmode, nbin, normalize, threshold)
            else:
                if method == 'svd':
                    sphmode = self.beamtransfer.project_vector_telescope_to_sky(mi, mmode, nbin)
                elif method == 'tk':
                    sphmode = self.beamtransfer.project_vector_telescope_to_sky_tk(mi, mmode, nbin)
                else:
                    raise ValueError('Unknown map-making method %s' % method)

            return sphmode

        alm_list = mpiutil.parallel_map(_make_alm, range(self.telescope.mmax + 1))

        if mpiutil.rank0:

            # get center freq of each bin
            if nbin is not None:
                n, s, e = mpiutil.split_m(nfreq, nbin)
                cfreqs = np.array([ self.beamtransfer.telescope.frequencies[(s[i]+e[i])/2] for i in range(nbin) ])
            else:
                nbin = nfreq
                cfreqs = self.beamtransfer.telescope.frequencies

            alm = np.zeros((nbin, self.telescope.num_pol_sky, self.telescope.lmax + 1,
                            self.telescope.lmax + 1), dtype=np.complex128)

            for mi in range(self.telescope.mmax + 1):

                alm[..., mi] = alm_list[mi]

            skymap = hputil.sphtrans_inv_sky(alm, nside)

            with h5py.File(self.output_directory + '/' + mapname, 'w') as f:
                f.create_dataset('/map', data=skymap)
                f.attrs['frequency'] = cfreqs
                f.attrs['polarization'] = np.array(['I', 'Q', 'U', 'V'])[:self.beamtransfer.telescope.num_pol_sky]

        mpiutil.barrier()
示例#34
0
    def finish(self):
        mpiutil.barrier()

        if mpiutil.rank0:
            #for ii in self.HI_mock_ids:
            for ii in range(self.mock_n):
                norm = self.df_out[ii]['count_map'][:]
                hist = self.df_out[ii]['dirty_map'][:]
                norm[norm == 0] = np.inf
                self.df_out[ii]['clean_map'][:] = hist / norm
            print 'Finishing CleanMapMaking.'

        mpiutil.barrier()
        super(SurveySimToMap, self).finish()
示例#35
0
    def read_process_write(self, input):
        """Reads input, executes any processing and writes output."""

        # Read input if needed.
        if input is None and not self._no_input:
            if self.input_files is None or len(self.input_files) == 0:
                if mpiutil.rank0:
                    msg = 'No file to read from, will stop then...'
                    logger.info(msg)
                self.stop_iteration(True)
                return None
            if mpiutil.rank0:
                msg = "%s reading data from files:" % self.__class__.__name__
                for input_file in self.input_files:
                    msg += '\n\t%s' % input_file
                logger.info(msg)
            mpiutil.barrier()
            input = self.read_input()

        # Analyze.
        if self._no_input:
            if not input is None:
                # This should never happen.  Just here to catch bugs.
                raise RuntimeError("Somehow `input` was set")
            output = self.process()
        else:
            if input is None:
                output = None
            else:
                output = self.process(input)

        # Write output if needed.
        if output is not None and len(self.output_files) != 0:
            if mpiutil.rank0:
                msg = "%s writing data to files:" % self.__class__.__name__

                # make output dirs
                for output_file in self.output_files:
                    msg += '\n\t%s' % output_file
                    output_dir = path.dirname(output_file)
                    if not path.exists(output_dir):
                        os.makedirs(output_dir)

                logger.info(msg)

            mpiutil.barrier()

            self.write_output(output)

        return output
示例#36
0
    def to_hdf5(self, filename, dataset, create=False):
        """Parallel write into a contiguous HDF5 dataset.

        Parameters
        ----------
        filename : str
            File to write dataset into.
        dataset : string
            Name of dataset to write into. Should not exist.
        """

        ## Naive non-parallel implementation to start

        import h5py

        if self.comm is None or self.comm.rank == 0:

            with h5py.File(filename, 'a' if create else 'r+') as fh:
                if dataset in fh:
                    raise Exception("Dataset should not exist.")

                fh.create_dataset(dataset, self.global_shape, dtype=self.dtype)
                fh[dataset][:] = np.array(0.0).astype(self.dtype)

        # wait until all processes see the created file
        while not os.path.exists(filename):
            time.sleep(1)

        # self._comm.Barrier()
        mpiutil.barrier(comm=self.comm)

        if self.axis == 0:
            dist_arr = self
        else:
            dist_arr = self.redistribute(axis=0)

        size = 1 if self.comm is None else self.comm.size
        for ri in range(size):

            rank = 0 if self.comm is None else self.comm.rank
            if ri == rank:
                with h5py.File(filename, 'r+') as fh:

                    start = dist_arr.local_offset[0]
                    end = start + dist_arr.local_shape[0]

                    fh[dataset][start:end] = dist_arr

            # dist_arr._comm.Barrier()
            mpiutil.barrier(comm=self.comm)
示例#37
0
    def _generate_dirs(self):
        ## Create all the directories required to store the beam transfers.

        if mpiutil.rank0:

            # Create main directory for beamtransfer
            if not os.path.exists(self.directory):
                os.makedirs(self.directory)

            # Create directories for m beams
            if not os.path.exists(self._mdir):
                os.makedirs(self._mdir)

            if self.gen_invbeam and not os.path.exists(self._inv_mdir):
                os.makedirs(self._inv_mdir)

        mpiutil.barrier()
示例#38
0
    def generate_mmodes_svd(self):
        """Generate the SVD modes for the Timestream."""

        # Iterate over local m's, project mode and save to disk.
        for mi in mpiutil.mpirange(self.telescope.mmax + 1):

            if os.path.exists(self._svdfile(mi)):
                print("File %s exists. Skipping..." % self._svdfile(mi))
                continue

            tm = self.mmode(mi).reshape(self.telescope.nfreq, 2 * self.telescope.npairs)
            svdm = self.beamtransfer.project_vector_telescope_to_svd(mi, tm)

            with h5py.File(self._svdfile(mi), "w") as f:
                f.create_dataset("mmode_svd", data=svdm)
                f.attrs["m"] = mi

        mpiutil.barrier()
示例#39
0
    def generate_mmodes_svd(self):
        """Generate the SVD modes for the Timestream.
        """

        # Iterate over local m's, project mode and save to disk.
        for mi in mpiutil.mpirange(self.telescope.mmax + 1):

            if os.path.exists(self._svdfile(mi)):
                print "File %s exists. Skipping..." % self._svdfile(mi)
                continue

            tm = self.mmode(mi).reshape(self.telescope.nfreq, 2*self.telescope.npairs)
            svdm = self.beamtransfer.project_vector_telescope_to_svd(mi, tm)

            with h5py.File(self._svdfile(mi), 'w') as f:
                f.create_dataset('mmode_svd', data=svdm)
                f.attrs['m'] = mi

        mpiutil.barrier()
示例#40
0
    def read_process_write(self, input):
        """Reads input, executes any processing and writes output."""

        # Read input if needed.
        if input is None and not self._no_input:
            if len(self.input_files) == 0:
                raise RuntimeError('No file to read from')
            if mpiutil.rank0:
                msg = "%s reading data from files:" % self.__class__.__name__
                for input_file in self.input_files:
                    msg += '\n\t%s' % input_file
                logger.info(msg)
            mpiutil.barrier()
            input = self.read_input()

        # Analyse.
        if self._no_input:
            if not input is None:
                # This should never happen.  Just here to catch bugs.
                raise RuntimeError("Somehow `input` was set")
            output = self.process()
        else:
            output = self.process(input)

        # Write output if needed.
        if output is not None and len(self.output_files) != 0:
            if mpiutil.rank0:
                msg = "%s writing data to files:" % self.__class__.__name__

                # make output dirs
                for output_file in self.output_files:
                    msg += '\n\t%s' % output_file
                    output_dir = path.dirname(output_file)
                    if not path.exists(output_dir):
                        os.makedirs(output_dir)

                logger.info(msg)

            mpiutil.barrier()

            self.write_output(output)

        return output
示例#41
0
    def _save_checkpoint(self):

        if self.checkpoint:

            # Create a tag for the output file name
            tag = self.stack.attrs[
                'tag'] if 'tag' in self.stack.attrs else 'stack'

            # Construct the filename
            outfile = self.output_root + str(tag) + '.h5'

            # Expand any variables in the path
            outfile = os.path.expanduser(outfile)
            outfile = os.path.expandvars(outfile)

            # If the output file already exists, then we would like to
            # rename it temporarily while writing the contents of the stack.
            # We do not want to lose our progress if the job ends while writing.
            if mpiutil.rank0:

                delete_temp = False

                if os.path.isfile(outfile):

                    tempfile = list(os.path.splitext(outfile))
                    tempfile.insert(-1, '_temp')
                    tempfile = ''.join(tempfile)

                    shutil.move(outfile, tempfile)

                    delete_temp = True

            mpiutil.barrier()

            # Save checkpoint
            self.write_output(outfile, self.stack)

            # Finished writing stack to disk!
            # If necessary, delete temporary file.
            if mpiutil.rank0 and delete_temp:
                os.remove(tempfile)

            mpiutil.barrier()
示例#42
0
    def process(self, input):
        def _indx_f(x, shp):
            if x >= np.prod(shp): return
            _i = [
                int(x / np.prod(shp[1:])),
            ]
            for i in range(1, len(shp)):
                x -= _i[i - 1] * np.prod(shp[i:])
                _i += [
                    int(x / np.prod(shp[i + 1:])),
                ]
            return tuple(_i)

        diag_cov = self.params['diag_cov']
        threshold = self.params['threshold']
        task_n = np.prod(self.map_shp[:-2])
        for task_ind in mpiutil.mpirange(task_n):

            indx = _indx_f(task_ind, self.map_shp[:-2])
            #print mpiutil.rank,  indx
            print "RANK%03d: (" % mpiutil.rank + ("%04d, " *
                                                  len(indx)) % indx + ")"

            map_shp = self.map_shp[-2:]
            _dirty_map = np.zeros(map_shp, dtype=__dtype__)
            if diag_cov:
                _cov_inv = np.zeros(map_shp, dtype=__dtype__)
            else:
                _cov_inv = np.zeros(map_shp * 2, dtype=__dtype__)
            for ii, df in enumerate(self.df_in):
                _dirty_map += df['dirty_map'][indx + (slice(None), )]
                self.read_block_from_dset(ii, 'cov_inv', indx, _cov_inv)
                #_cov_inv   += df['cov_inv'][indx + (slice(None), )]

            self.df_out[-1]['dirty_map'][indx + (slice(None), )] = _dirty_map
            clean_map, noise_diag = make_cleanmap(_dirty_map, _cov_inv,
                                                  diag_cov, threshold)
            self.df_out[-1]['clean_map'][indx + (slice(None), )] = clean_map
            self.df_out[-1]['noise_diag'][indx + (slice(None), )] = noise_diag
            del _cov_inv
            gc.collect()

        mpiutil.barrier()
示例#43
0
    def _generate_teldatafile(self, regen=False):

        if mpiutil.rank0:
            if os.path.exists(self._tel_datafile) and not regen:
                print
                print '=' * 80
                print 'File %s exists. Skipping...' % self._tel_datafile
            else:
                print
                print '=' * 80
                print 'Crreate telescope data file %s...' % self._tel_datafile
                with h5py.File(self._tel_datafile, 'w') as f:
                    f.create_dataset('baselines', data=self.telescope.baselines)
                    f.create_dataset('frequencies', data=self.telescope.frequencies)
                    f.create_dataset('uniquepairs', data=self.telescope.uniquepairs)
                    f.create_dataset('allpairs', data=self.telescope.allpairs)
                    f.create_dataset('redundancy', data=self.telescope.redundancy)

        mpiutil.barrier()
示例#44
0
    def next(self):

        if self.iter == self.iter_num:
            mpiutil.barrier()
            #self.close_outputfiles()
            super(CubeSim, self).next()

        print "rank %03d, %03d" % (mpiutil.rank, self.iter_list[self.iter])

        self.realize_simulation()
        if 'delta' in self.outfiles:
            self.make_delta_sim()
        if 'optsim' in self.outfiles:
            self.make_optical_sim()
        if 'withbeam' in self.outfiles:
            self.convolve_by_beam()

        self.write_to_file()
        self.write_to_file_splitmock()

        self.iter += 1
示例#45
0
    def __init__(self, directory, telescope=None, gen_invbeam=True, noise_weight=True):

        self.directory = directory
        self.telescope = telescope
        self.gen_invbeam = gen_invbeam
        self.noise_weight = noise_weight

        # Create directory if required
        if mpiutil.rank0 and not os.path.exists(directory):
            os.makedirs(directory)

        mpiutil.barrier()

        if self.telescope == None and mpiutil.rank0:
            print "Attempting to read telescope from disk..."

            try:
                f = open(self._picklefile, 'r')
                self.telescope = pickle.load(f)
            except IOError, UnpicklingError:
                raise Exception("Could not load Telescope object from disk.")
示例#46
0
    def __init__(self, kltrans, subdir="ps"):
        """Initialise a PS estimator class.

        Parameters
        ----------
        kltrans : KLTransform
            The KL Transform filter to use.
        subdir : string, optional
            Subdirectory of the KLTransform directory to store results in.
            Default is 'ps'.
        """

        self.kltrans = kltrans
        self.telescope = kltrans.telescope
        self.psdir = self.kltrans.evdir + "/" + subdir + "/"

        if mpiutil.rank0 and not os.path.exists(self.psdir):
            os.makedirs(self.psdir)

        # If we're part of an MPI run, synchronise here.
        mpiutil.barrier()
示例#47
0
    def fake_kl_data(self):

        kl = self.manager.kltransforms[self.klname]

        # Iterate over local m's, project mode and save to disk.
        for mi in mpiutil.mpirange(self.telescope.mmax + 1):

            evals = kl.evals_m(mi)

            if evals is None:
                klmode = np.array([], dtype=np.complex128)
            else:
                modeamp = ((evals + 1.0) / 2.0)**0.5
                klmode = modeamp * (np.array([1.0, 1.0J]) * np.random.standard_normal((modeamp.shape[0], 2))).sum(axis=1)


            with h5py.File(self._klfile(mi), 'w') as f:
                f.create_dataset('mmode_kl', data=klmode)
                f.attrs['m'] = mi

        mpiutil.barrier()
示例#48
0
    def __init__(self, kltrans, subdir="ps"):
        """Initialise a PS estimator class.

        Parameters
        ----------
        kltrans : KLTransform
            The KL Transform filter to use.
        subdir : string, optional
            Subdirectory of the KLTransform directory to store results in.
            Default is 'ps'.
        """

        self.kltrans = kltrans
        self.telescope = kltrans.telescope
        self.psdir = self.kltrans.evdir + '/' + subdir + '/'

        if mpiutil.rank0 and not os.path.exists(self.psdir):
            os.makedirs(self.psdir)

        # If we're part of an MPI run, synchronise here.
        mpiutil.barrier()
示例#49
0
def collect_m_arrays(mlist, func, shapes, dtype):

    data = [ (mi, func(mi)) for mi in mpiutil.partition_list_mpi(mlist) ]

    mpiutil.barrier()

    if mpiutil.rank0 and mpiutil.size == 1:
        p_all = [data]
    else:
        p_all = mpiutil.world.gather(data, root=0)

    mpiutil.barrier() # Not sure if this barrier really does anything,
                      # but hoping to stop collect breaking

    marrays = None
    if mpiutil.rank0:
        marrays = [np.zeros((len(mlist),) + shape, dtype=dtype) for shape in shapes]

        for p_process in p_all:

            for mi, result in p_process:

                for si in range(len(shapes)):
                    if result[si] is not None:
                        marrays[si][mi] = result[si]

    mpiutil.barrier()

    return marrays
示例#50
0
    def _generate_invbeam(self, regen=False):

        completed_file = self._inv_mdir + 'COMPLETED_IBEAM'
        if os.path.exists(completed_file) and not regen:
            if mpiutil.rank0:
                print
                print '=' * 80
                print "******* Inverse beam transfer m-files already generated ********"
            mpiutil.barrier()
            return

        if mpiutil.rank0:
            print
            print '=' * 80
            print 'Create inverse beam transfer m-files...'

        st = time.time()

        mmax = self.telescope.mmax
        for mi in mpiutil.mpilist(range(mmax+1)):
            inv_beam, W1 = self.compute_invbeam_m(mi)
            # save to file
            with h5py.File(self._inv_mfile(mi), 'w') as f:
                f.create_dataset('ibeam_m', data=inv_beam)
                f.create_dataset('W1_m', data=W1)
                f.attrs['m'] = mi

        mpiutil.barrier()

        et = time.time()

        if mpiutil.rank0:

            # Make file marker that the m's have been correctly generated:
            open(completed_file, 'a').close()

            # Print out timing
            print "=== Create inverse beam transfer m-files took %f s ===" % (et - st)
示例#51
0
    def generate_mmodes_kl(self):
        """Generate the KL modes for the Timestream.
        """

        kl = self.manager.kltransforms[self.klname]

        # Iterate over local m's, project mode and save to disk.
        for mi in mpiutil.mpirange(self.telescope.mmax + 1):

            if os.path.exists(self._klfile(mi)):
                print "File %s exists. Skipping..." % self._klfile(mi)
                continue

            svdm = self.mmode_svd(mi) #.reshape(self.telescope.nfreq, 2*self.telescope.npairs)
            #svdm = self.beamtransfer.project_vector_telescope_to_svd(mi, tm)

            klm = kl.project_vector_svd_to_kl(mi, svdm, threshold=self.klthreshold)

            with h5py.File(self._klfile(mi), 'w') as f:
                f.create_dataset('mmode_kl', data=klm)
                f.attrs['m'] = mi

        mpiutil.barrier()
示例#52
0
def simulate(beamtransfer, outdir, tsname, maps=[], ndays=None, resolution=0, add_noise=True, seed=None, **kwargs):
    """Create a simulated timestream and save it to disk.

    Parameters
    ----------
    beamtransfer : fmmode.core.beamtransfer.BeamTransfer
        BeamTransfer object containing the analysis products.
    outdir : directoryname
        Directory that we will save the timestream into.
    maps : list
        List of map filenames. The sum of these form the simulated sky.
    ndays : int, optional
        Number of days of observation. Setting `ndays = None` (default) uses
        the default stored in the telescope object; `ndays = 0`, assumes the
        observation time is infinite so that the noise is zero.
    resolution : scalar, optional
        Approximate time resolution in seconds. Setting `resolution = 0`
        (default) calculates the value from the mmax.
    add_noise : bool, optional
        Weather to add random noise to the simulated visibilities. Default True.

    Returns
    -------
    timestream : Timestream
    """

    # Create timestream object
    tstream = Timestream(outdir, tsname, beamtransfer)

    completed_file = tstream._tsdir + '/COMPLETED_TIMESTREAM'
    if os.path.exists(completed_file):
        if mpiutil.rank0:
            print "******* timestream-files already generated ********"
        mpiutil.barrier()
        return tstream

    # Make directory if required
    try:
        os.makedirs(tstream._tsdir)
    except OSError:
         # directory exists
         pass

    if mpiutil.rank0:
        # if not os.path.exists(tstream._tsdir):
        #     os.makedirs(tstream._tsdir)

        tstream.save()

    ## Read in telescope system
    bt = beamtransfer
    tel = bt.telescope

    lmax = tel.lmax
    mmax = tel.mmax
    nfreq = tel.nfreq
    nbl = tel.nbase
    npol = tel.num_pol_sky

    # If ndays is not set use the default value.
    if ndays is None:
        ndays = tel.ndays

    # Calculate the number of timesamples from the resolution
    if resolution == 0:
        # Set the minimum resolution required for the sky.
        ntime = 2*mmax+1
    else:
        # Set the cl
        ntime = int(np.round(24 * 3600.0 / resolution))

    indices = list(itertools.product(np.arange(nfreq), np.arange(npol)))
    lind, sind, eind = mpiutil.split_local(nfreq * npol)

    # local section of the Tm array
    theta_size = tel.theta_size
    phi_size = tel.phi_size
    Tm = np.zeros((lind, theta_size, phi_size), dtype=np.complex128)

    for ind, (f_ind, p_ind) in enumerate(indices[sind:eind]):
        hp_map = None
        for idx, mapfile in enumerate(maps):
            with h5py.File(mapfile, 'r') as f:
                if idx == 0:
                    hp_map = f['map'][f_ind, p_ind, :]
                else:
                    hp_map += f['map'][f_ind, p_ind, :]
        if hp_map is not None:
            cart_map = hpproj.cartesian_proj(hp_map, tel.cart_projector)
            # Calculate the Tm's for the local sections
            Tm[ind] = np.fft.ifft(cart_map, axis=1) # / phi_size # m = 0 is at left

    Tm = MPIArray.wrap(Tm, axis=0)
    # redistribute along different m
    Tm = Tm.redistribute(axis=2)
    Tm = Tm.reshape((nfreq, npol, theta_size, None))
    Tm = Tm.reshape((nfreq, npol*theta_size, None))

    ms = np.concatenate([np.arange(0, mmax+1), np.arange(-mmax, 0)])
    lm, sm, em = mpiutil.split_local(phi_size)
    # local section of mmode
    # mmode = np.zeros((lm, nbl, nfreq), dtype=np.complex128)
    mmode = np.zeros((lm, nfreq, nbl), dtype=np.complex128)

    for ind, mi in enumerate(ms[sm:em]):
        mmode[ind] = bt.project_vector_sky_to_telescope(mi, Tm[:, :, ind].view(np.ndarray))

    mmode = MPIArray.wrap(mmode, axis=0)
    mmode = mmode.redistribute(axis=2) # distribute along bl

    # add noise if required
    if add_noise:
        lbl, sbl, ebl = mpiutil.split_local(nbl)
        # Fetch the noise powerspectrum
        noise_ps = tel.noisepower(np.arange(sbl, ebl)[:, np.newaxis], np.arange(nfreq)[np.newaxis, :], ndays=ndays).reshape(lbl, nfreq).T[np.newaxis, :, :]

        # Seed random number generator to give consistent noise
        if seed is not None:
        # Must include rank such that we don't have massive power deficit from correlated noise
            np.random.seed(seed + mpiutil.rank)

        # Create and weight complex noise coefficients
        noise_mode = (np.array([1.0, 1.0J]) * np.random.standard_normal(mmode.shape + (2,))).sum(axis=-1)
        noise_mode *= (noise_ps / 2.0)**0.5

        mmode += noise_mode

        del noise_mode

        # Reset RNG
        if seed is not None:
            np.random.seed()

    # The time samples the visibility is calculated at
    tphi = np.linspace(0, 2*np.pi, ntime, endpoint=False)

    # inverse FFT to get timestream
    vis_stream = np.fft.ifft(mmode, axis=0) * ntime
    vis_stream = MPIArray.wrap(vis_stream, axis=2)

    # save vis_stream to file
    vis_h5 = memh5.MemGroup(distributed=True)
    vis_h5.create_dataset('/timestream', data=vis_stream)
    vis_h5.create_dataset('/phi', data=tphi)

    # Telescope layout data
    vis_h5.create_dataset('/feedmap', data=tel.feedmap)
    vis_h5.create_dataset('/feedconj', data=tel.feedconj)
    vis_h5.create_dataset('/feedmask', data=tel.feedmask)
    vis_h5.create_dataset('/uniquepairs', data=tel.uniquepairs)
    vis_h5.create_dataset('/baselines', data=tel.baselines)

    # Telescope frequencies
    vis_h5.create_dataset('/frequencies', data=tel.frequencies)

    # Write metadata
    vis_h5.attrs['beamtransfer_path'] = os.path.abspath(bt.directory)
    vis_h5.attrs['ntime'] = ntime

    # save to file
    vis_h5.to_hdf5(tstream._tsfile)

    if mpiutil.rank0:
        # Make file marker that the m's have been correctly generated:
        open(completed_file, 'a').close()

    mpiutil.barrier()

    return tstream
示例#53
0
    def mapmake_full(self, nside, maptype):

        mapfile = self._mapsdir + 'map_%s.hdf5' % maptype
        Tmfile = self._Tmsdir + 'Tm_%s.hdf5' % maptype

        if os.path.exists(mapfile):
            if mpiutil.rank0:
                print "File %s exists. Skipping..." % mapfile
            mpiutil.barrier()
            return
        elif os.path.exists(Tmfile):
            if mpiutil.rank0:
                print "File %s exists. Read from it..." % Tmfile

            Tm = MPIArray.from_hdf5(Tmfile, 'Tm')
        else:

            def _make_Tm(mi):

                print "Making %i" % mi

                mmode = self.mmode(mi)

                return self.beamtransfer.project_vector_telescope_to_sky(mi, mmode)


            # if mpiutil.rank0 and not os.path.exists(self._Tmsdir):
            #     # Make directory for Tms file
            #     os.makedirs(self._Tmsdir)

            # Make directory for Tms file
            try:
                os.makedirs(self._Tmsdir)
            except OSError:
                # directory exists
                pass

            tel = self.telescope
            mmax = tel.mmax
            lm, sm, em = mpiutil.split_local(mmax+1)

            nfreq = tel.nfreq
            npol = tel.num_pol_sky
            ntheta = tel.theta_size
            # the local Tm array
            Tm = np.zeros((nfreq, npol, ntheta, lm), dtype=np.complex128)
            for ind, mi in enumerate(range(sm, em)):
                Tm[..., ind] = _make_Tm(mi)
            Tm = MPIArray.wrap(Tm, axis=3)
            Tm = Tm.redistribute(axis=0) # redistribute along freq

            # Save Tm
            Tm.to_hdf5(Tmfile, 'Tm', create=True)


        # if mpiutil.rank0 and not os.path.exists(self._mapsdir):
        #     # Make directory for maps file
        #     os.makedirs(self._mapsdir)

        # Make directory for maps file
        try:
            os.makedirs(self._mapsdir)
        except OSError:
            # directory exists
            pass

        tel = self.telescope
        npol = tel.num_pol_sky
        ntime = self.ntime

        # irfft to get map
        # cart_map = np.fft.irfft(Tm, axis=3, n=ntime) * ntime # NOTE the normalization constant ntime here to be consistant with the simulation fft
        cart_map = np.fft.hfft(Tm, axis=3, n=ntime)
        lfreq = cart_map.shape[0]
        hp_map = np.zeros((lfreq, npol, 12*nside**2), dtype=cart_map.dtype)
        for fi in range(lfreq):
            for pi in range(npol):
                hp_map[fi, pi] = tel.cart_projector.inv_projmap(cart_map[fi, pi], nside)

        mpiutil.barrier()
        hp_map = MPIArray.wrap(hp_map, axis=0)

        # save map
        hp_map.to_hdf5(mapfile, 'map', create=True)
示例#54
0
    def generate(self, regen=False):
        """Calculate the total Fisher matrix and bias and save to a file.

        Parameters
        ----------
        regen : boolean, optional
            Force regeneration if products already exist (default `False`).
        """

        if mpiutil.rank0:
            st = time.time()
            print "======== Starting PS calculation ========"

        ffile = self.psdir + "/fisher.hdf5"

        if os.path.exists(ffile) and not regen:
            print ("Fisher matrix file: %s exists. Skipping..." % ffile)
            return

        mpiutil.barrier()

        # Pre-compute all the angular power spectra for the bands
        self.genbands()

        # Use parallel map to distribute Fisher calculation
        fisher_bias = mpiutil.parallel_map(self.fisher_bias_m, range(self.telescope.mmax + 1))

        # Unpack into separate lists of the Fisher matrix and bias
        fisher, bias = zip(*fisher_bias)

        # Sum over all m-modes to get the over all Fisher and bias
        self.fisher = np.sum(np.array(fisher), axis=0).real  # Be careful of the .real here
        self.bias = np.sum(np.array(bias), axis=0).real  # Be careful of the .real here

        # Write out all the PS estimation products
        if mpiutil.rank0:
            et = time.time()
            print "======== Ending PS calculation (time=%f) ========" % (et - st)

            # Check to see ensure that Fisher matrix isn't all zeros.
            if not (self.fisher == 0).all():
                # Generate derived quantities (covariance, errors..)
                cv = la.inv(self.fisher)
                err = cv.diagonal() ** 0.5
                cr = cv / np.outer(err, err)
            else:
                cv = np.zeros_like(self.fisher)
                err = cv.diagonal()
                cr = np.zeros_like(self.fisher)

            f = h5py.File(self.psdir + "/fisher.hdf5", "w")
            f.attrs["bandtype"] = self.bandtype

            f.create_dataset("fisher/", data=self.fisher)
            f.create_dataset("bias/", data=self.bias)
            f.create_dataset("covariance/", data=cv)
            f.create_dataset("errors/", data=err)
            f.create_dataset("correlation/", data=cr)

            f.create_dataset("band_power/", data=self.band_power)

            if self.bandtype == "polar":
                f.create_dataset("k_start/", data=self.k_start)
                f.create_dataset("k_end/", data=self.k_end)
                f.create_dataset("k_center/", data=self.k_center)

                f.create_dataset("theta_start/", data=self.theta_start)
                f.create_dataset("theta_end/", data=self.theta_end)
                f.create_dataset("theta_center/", data=self.theta_center)

                f.create_dataset("k_bands", data=self.k_bands)
                f.create_dataset("theta_bands", data=self.theta_bands)

            elif self.bandtype == "cartesian":

                f.create_dataset("kpar_start/", data=self.kpar_start)
                f.create_dataset("kpar_end/", data=self.kpar_end)
                f.create_dataset("kpar_center/", data=self.kpar_center)

                f.create_dataset("kperp_start/", data=self.kperp_start)
                f.create_dataset("kperp_end/", data=self.kperp_end)
                f.create_dataset("kperp_center/", data=self.kperp_center)

                f.create_dataset("kpar_bands", data=self.kpar_bands)
                f.create_dataset("kperp_bands", data=self.kperp_bands)

            f.close()
示例#55
0
    def test_io(self):

        import h5py

        # Cleanup directories
        fname = 'testdset.hdf5'

        if mpiutil.rank0 and os.path.exists(fname):
            os.remove(fname)

        mpiutil.barrier()

        gshape = (19, 17)

        ds = mpiarray.MPIArray(gshape, dtype=np.int64)

        ga = np.arange(np.prod(gshape)).reshape(gshape)

        l0, s0, e0 = mpiutil.split_local(gshape[0])
        ds[:] = ga[s0:e0]

        ds.redistribute(axis=1).to_hdf5(fname, 'testds', create=True)

        if mpiutil.rank0:

            with h5py.File(fname, 'r') as f:

                h5ds = f['testds'][:]

                assert (h5ds == ga).all()

        ds2 = mpiarray.MPIArray.from_hdf5(fname, 'testds')

        assert (ds2 == ds).all()

        mpiutil.barrier()


        # Check that reading over another distributed axis works
        ds3 = mpiarray.MPIArray.from_hdf5(fname, 'testds', axis=1)
        assert ds3.shape[0] == gshape[0]
        assert ds3.shape[1] == mpiutil.split_local(gshape[1])[0]
        ds3 = ds3.redistribute(axis=0)
        assert (ds3 == ds).all()
        mpiutil.barrier()

        # Check a read with an arbitrary slice in there. This only checks the shape is correct.
        ds4 = mpiarray.MPIArray.from_hdf5(fname, 'testds', axis=1, sel=(np.s_[3:10:2], np.s_[1:16:3]))
        assert ds4.shape[0] == 4
        assert ds4.shape[1] == mpiutil.split_local(5)[0]
        mpiutil.barrier()

        # Check the read with a slice along the axis being read
        ds5 = mpiarray.MPIArray.from_hdf5(fname, 'testds', axis=1, sel=(np.s_[:], np.s_[3:15:2]))
        assert ds5.shape[0] == gshape[0]
        assert ds5.shape[1] == mpiutil.split_local(6)[0]
        ds5 = ds5.redistribute(axis=0)
        assert (ds5 == ds[:, 3:15:2]).all()
        mpiutil.barrier()

        # Check the read with a slice along the axis being read
        ds6 = mpiarray.MPIArray.from_hdf5(fname, 'testds', axis=0, sel=(np.s_[:], np.s_[3:15:2]))
        ds6 = ds6.redistribute(axis=0)
        assert (ds6 == ds[:, 3:15:2]).all()
        mpiutil.barrier()

        if mpiutil.rank0 and os.path.exists(fname):
            os.remove(fname)
示例#56
0
    def _generate_mfiles(self, regen=False):

        completed_file = self._mdir + 'COMPLETED_BEAM'
        if os.path.exists(completed_file) and not regen:
            if mpiutil.rank0:
                print
                print '=' * 80
                print "******* Beam transfer m-files already generated ********"
            mpiutil.barrier()
            return

        if mpiutil.rank0:
            print
            print '=' * 80
            print 'Create beam transfer m-files...'

        st = time.time()

        # Calculate the Beam Transfer Matrices
        nfreq = self.telescope.nfreq
        nbl = self.telescope.nbase
        npol = self.telescope.num_pol_sky
        ntheta = self.telescope.theta_size
        nphi = self.telescope.phi_size

        # create file to save beam transfer matrices
        dsize = (nfreq, nbl, npol, ntheta)
        csize = (nfreq, 1, npol, ntheta)
        mmax = self.telescope.mmax
        ms = np.concatenate([np.arange(0, mmax+1), np.arange(-mmax, 0)])
        # get local section of m'th
        for ind, mi in enumerate(mpiutil.mpilist(ms, method='con')):
            with h5py.File(self._mfile(mi), 'w') as f:
                f.create_dataset('beam_m', dsize, chunks=csize, compression='lzf', dtype=np.complex128)
                f.attrs['m'] = mi

        # calculate the total memory needed for the transfer matrix
        total_memory = nfreq * nbl * npol * ntheta * nphi * 16.0 # Bytes, 16 for complex128
        limit = 1.0 # GB, memory limit for each process
        # make each process have maximum `limit` GB
        sigle_memory = limit * 2**30 # Bytes
        # how many chunks
        num_chunks = np.int(np.ceil(total_memory / (mpiutil.size * sigle_memory)))

        # split bls to num_chunks sections
        if nbl < num_chunks:
            warnings.warn('Could not split to %d chunks for %d baselines' % (num_chunks, nbl))
        num_chunks = min(num_chunks, nbl)
        num, start, end = mpiutil.split_m(nbl, num_chunks)
        for ci in range(num_chunks):
            if mpiutil.rank0:
                print "Starting chunk %i of %i" % (ci+1, num_chunks)

            tarray = self.telescope.transfer_matrices(np.arange(start[ci], end[ci]), np.arange(nfreq))
            tarray = MPIArray.wrap(tarray, axis=0)
            # redistribute along different m
            tarray = tarray.redistribute(axis=3)

            # save beam transfer matrices to file
            for ind, mi in enumerate(mpiutil.mpilist(ms, method='con')):
                with h5py.File(self._mfile(mi), 'r+') as f:
                    f['beam_m'][:, start[ci]:end[ci]] = tarray[..., ind].view(np.ndarray).reshape(nfreq, num[ci], npol, ntheta)

        mpiutil.barrier()

        et = time.time()

        if mpiutil.rank0:

            # Make file marker that the m's have been correctly generated:
            open(completed_file, 'a').close()

            # Print out timing
            print "=== Create beam transfer m-files took %f s ===" % (et - st)