Exemple #1
0
def create_observations(args, comm, schedules):
    """ Create and distribute TOAST observations for every CES in schedules.

    """
    log = Logger.get()
    timer = Timer()
    timer.start()

    data = Data(comm)

    # Loop over the schedules, distributing each schedule evenly across
    # the process groups.  For now, we'll assume that each schedule has
    # the same number of operational days and the number of process groups
    # matches the number of operational days.  Relaxing these constraints
    # will cause the season break to occur on different process groups
    # for different schedules and prevent splitting the communicator.

    for schedule in schedules:

        telescope = schedule.telescope
        all_ces = schedule.ceslist
        nces = len(all_ces)

        breaks = get_breaks(comm, all_ces, args)

        groupdist = distribute_uniform(nces, comm.ngroups, breaks=breaks)
        group_firstobs = groupdist[comm.group][0]
        group_numobs = groupdist[comm.group][1]

        for ices in range(group_firstobs, group_firstobs + group_numobs):
            obs = create_observation(args, comm, telescope, all_ces[ices])
            data.obs.append(obs)

    if comm.comm_world is None or comm.comm_group.rank == 0:
        log.info("Group # {:4} has {} observations.".format(comm.group, len(data.obs)))

    if len(data.obs) == 0:
        raise RuntimeError(
            "Too many tasks. Every MPI task must "
            "be assigned to at least one observation."
        )

    if comm.comm_world is not None:
        comm.comm_world.barrier()
    timer.stop()
    if comm.world_rank == 0:
        timer.report("Simulated scans")

    # Split the data object for each telescope for separate mapmaking.
    # We could also split by site.

    if len(schedules) > 1:
        telescope_data = data.split("telescope")
        if len(telescope_data) == 1:
            # Only one telescope available
            telescope_data = []
    else:
        telescope_data = []
    telescope_data.insert(0, ("all", data))
    return data, telescope_data
Exemple #2
0
    def _observe_sso(self, sso_az, sso_el, sso_dist, sso_dia, tod, comm,
                     prefix):
        """
        Observe the SSO with each detector in tod
        """
        log = Logger.get()
        rank = 0
        if comm is not None:
            rank = comm.rank
        tmr = Timer()
        if self._report_timing:
            if comm is not None:
                comm.Barrier()
            tmr.start()

        nsamp = tod.local_samples[1]

        if rank == 0:
            log.info("{}Observing the SSO signal".format(prefix))

        for det in tod.local_dets:
            # Cache the output signal
            cachename = "{}_{}".format(self._out, det)
            if tod.cache.exists(cachename):
                ref = tod.cache.reference(cachename)
            else:
                ref = tod.cache.create(cachename, np.float64, (nsamp, ))

            try:
                # Some TOD classes provide a shortcut to Az/El
                az, el = tod.read_azel(detector=det)
            except Exception as e:
                azelquat = tod.read_pntg(detector=det, azel=True)
                # Convert Az/El quaternion of the detector back into
                # angles for the simulation.
                theta, phi = qa.to_position(azelquat)
                # Azimuth is measured in the opposite direction
                # than longitude
                az = 2 * np.pi - phi
                el = np.pi / 2 - theta

            beam, radius = self._get_beam_map(det, sso_dia)

            # Interpolate the beam map at appropriate locations
            x = (az - sso_az) * np.cos(el)
            y = el - sso_el
            r = np.sqrt(x**2 + y**2)
            good = r < radius
            sig = beam(x[good], y[good], grid=False)
            ref[:][good] += sig

            del ref, sig, beam

        if self._report_timing:
            if comm is not None:
                comm.Barrier()
            if rank == 0:
                tmr.stop()
                tmr.report("{}OpSimSSO: Observe signal".format(prefix))
        return
Exemple #3
0
def export_TOD(args,
               comm,
               data,
               totalname,
               schedules,
               other=None,
               verbose=True):
    if args.export is None:
        return

    log = Logger.get()
    timer = Timer()

    # Only import spt3g if we are writing out so3g files
    from spt3g import core as core3g
    from ..data.toast_export import ToastExport

    path = os.path.abspath(args.export)

    key = args.export_key
    if key is not None:
        prefix = "{}_{}".format(args.bands, key)
        det_groups = {}
        for schedule in schedules:
            for (
                    det_name,
                    det_data,
            ) in schedule.telescope.focalplane.detector_data.items():
                value = det_data[key]
                if value not in det_groups:
                    det_groups[value] = []
                det_groups[value].append(det_name)
    else:
        prefix = args.bands
        det_groups = None

    if comm.world_rank == 0 and verbose:
        log.info("Exporting data to directory tree at {}".format(path))

    timer.start()
    export = ToastExport(
        path,
        prefix=prefix,
        use_intervals=True,
        cache_name=totalname,
        cache_copy=other,
        mask_flag_common=TODGround.TURNAROUND,
        filesize=2**30,
        units=core3g.G3TimestreamUnits.Tcmb,
        detgroups=det_groups,
        compress=args.compress,
    )
    export.exec(data)
    if comm.comm_world is not None:
        comm.comm_world.Barrier()
    timer.stop()
    if comm.world_rank == 0 and verbose:
        timer.report("Wrote simulated data to {}:{}" "".format(path, "total"))

    return
Exemple #4
0
def get_elevation_noise(args, comm, data, key="noise"):
    """ Insert elevation-dependent noise

    """
    timer = Timer()
    timer.start()
    # fsample = args.sample_rate
    for obs in data.obs:
        tod = obs["tod"]
        fp = obs["focalplane"]
        noise = obs[key]
        for det in tod.local_dets:
            if det not in noise.keys:
                raise RuntimeError(
                    'Detector "{}" does not have a PSD in the noise object'.
                    format(det))
            A = fp[det]["A"]
            C = fp[det]["C"]
            psd = noise.psd(det)
            try:
                # Some TOD classes provide a shortcut to Az/El
                _, el = tod.read_azel(detector=det)
            except Exception:
                azelquat = tod.read_pntg(detector=det, azel=True)
                # Convert Az/El quaternion of the detector back into
                # angles for the simulation.
                theta, _ = qa.to_position(azelquat)
                el = np.pi / 2 - theta
            el = np.median(el)
            # Scale the analytical noise PSD. Pivot is at el = 50 deg.
            psd[:] *= (A / np.sin(el) + C)**2
    timer.stop()
    if comm.world_rank == 0:
        timer.report("Elevation noise")
    return
Exemple #5
0
def load_focalplane(args, comm):
    """Load focalplane information
    """
    timer = Timer()
    gain = None
    fp = None
    if comm.world_rank == 0:
        if args.focalplane is None:
            # in this case, create a fake detector at the boresight
            # with a pure white noise spectrum.
            fake = {}
            fake["quat"] = np.array([0.0, 0.0, 1.0, 0.0])
            fake["fwhm"] = 30.0
            fake["fknee"] = 0.0
            fake["fmin"] = 1.0e-5
            fake["alpha"] = 1.0
            fake["NET"] = 1.0
            fake["polangle_deg"] = 0
            fake["color"] = "r"
            fp = {}
            fp["bore"] = fake
        else:
            with open(args.focalplane, "rb") as p:
                fp = pickle.load(p)

        if args.gain is not None:
            gain = {}
            with fits.open(args.gain) as f:
                gain["TIME"] = np.array(f["TIME"].data["TIME"])
                for i_det, det_name in f["DETECTORS"].data["DETECTORS"]:
                    gain[det_name] = np.array(f["GAINS"].data[i_det, :])

    if comm.comm_world is not None:
        if args.gain is not None:
            gain = comm.comm_world.bcast(gain, root=0)
        fp = comm.comm_world.bcast(fp, root=0)

    timer.stop()
    if comm.world_rank == 0:
        timer.report("Create focalplane ({} dets)".format(len(fp.keys())))

    if args.debug:
        if comm.world_rank == 0:
            outfile = os.path.join(args.outdir, "focalplane.png")
            set_backend()
            dquats = {x: fp[x]["quat"] for x in fp.keys()}
            dfwhm = {x: fp[x]["fwhm"] for x in fp.keys()}
            plot_focalplane(dquats, 10.0, 10.0, outfile, fwhm=dfwhm)

    # For purposes of this simulation, we use detector noise
    # weights based on the NET (white noise level).  If the destriping
    # baseline is too long, this will not be the best choice.

    detweights = {}
    for d in fp.keys():
        net = fp[d]["NET"]
        detweights[d] = 1.0 / (args.sample_rate * net * net)

    return fp, gain, detweights
    def exec(self, data):
        """ Apply the OpSignalSim operator on data

        """
        for obs in data.obs:
            tod = obs["tod"]
            nsamp = tod.local_samples[1]
            for det in tod.local_dets:
                timer = Timer()
                timer.start()
                if self._global_rank == 0:
                    print("Processing {}".format(det), flush=True)

                quat = tod.local_pointing(det, margin=self._margin)
                self._check_len(quat, nsamp, "detector quaternions")
                iquweights = tod.local_weights(det)
                self._check_len(iquweights, nsamp, "detector weights")

                sampled = self._sample_maps(tod, det, quat, iquweights)

                if self._dipoler is not None:
                    if self._global_rank == 0:
                        print("  Adding dipole", flush=True)
                    if self.skip_reproc:
                        velocity = None
                    else:
                        velocity = tod.local_velocity(margin=self._margin)
                        self._check_len(velocity, nsamp, "velocity")
                    sampled += self._dipoler.dipole(quat,
                                                    velocity=velocity,
                                                    det=det)

                if self._fsl and not self.skip_reproc:
                    if self._global_rank == 0:
                        print("  Adding FSL", flush=True)
                    local_fsl = tod.local_fsl(det, margin=self._margin)
                    self._check_len(local_fsl, nsamp, "FSL")
                    sampled += local_fsl

                if self._out is not None:
                    cachename = "{}_{}".format(self._out, det)
                    if not tod.cache.exists(cachename):
                        tod.cache.create((nsamp + 2 * self._margin, ),
                                         dtype=np.float64)
                local_signal = tod.local_signal(det,
                                                name=self._out,
                                                margin=self._margin)
                self._check_len(iquweights, nsamp, "signal")
                if self._add:
                    local_signal += sampled
                else:
                    local_signal[:] = sampled
                del local_signal
                timer.stop()
                if self._global_rank == 0:
                    timer.report("Process {}".format(det))
        return
Exemple #7
0
def apply_filter(args, data):
    """ Apply extra filter to signal
    """
    if args.filterfile is None:
        return
    timer = Timer()
    timer.start()
    convolver = tp.OpConvolvePlanck(args.filterfile)
    convolver.exec(data)
    data.comm.comm_world.barrier()
    timer.stop()
    if data.comm.comm_world.rank == 0:
        timer.report("Convolve with {}".format(args.filterfile))
    return
Exemple #8
0
def load_focalplanes(args, comm, schedules):
    """ Attach a focalplane to each of the schedules.

    Args:
        schedules (list) :  List of Schedule instances.
            Each schedule has two members, telescope
            and ceslist, a list of CES objects.
    Returns:
        detweights (dict) : Inverse variance noise weights for every
            detector across all focal planes. In [K_CMB^-2].
            They can be used to bin the TOD.
    """
    timer = Timer()
    timer.start()

    # Load focalplane information

    focalplanes = []
    if comm.world_rank == 0:
        for fpfile in args.focalplane.split(","):
            focalplanes.append(
                pipeline_tools.Focalplane(
                    fname_pickle=fpfile,
                    sample_rate=args.sample_rate,
                    radius_deg=args.focalplane_radius_deg,
                )
            )
    if comm.comm_world is not None:
        focalplanes = comm.comm_world.bcast(focalplanes)

    if len(focalplanes) == 1 and len(schedules) > 1:
        focalplanes *= len(schedules)
    if len(focalplanes) != len(schedules):
        raise RuntimeError(
            "Number of focalplanes must equal number of schedules or be 1."
        )

    # Append a focal plane and telescope to each entry in the schedules
    # list and assemble a detector weight dictionary that represents all
    # detectors in all focalplanes
    detweights = {}
    for schedule, focalplane in zip(schedules, focalplanes):
        schedule.telescope.focalplane = focalplane
        detweights.update(schedule.telescope.focalplane.detweights)

    timer.stop()
    if comm.world_rank == 0:
        timer.report("Loading focalplanes")
    return detweights
    def _load_alm(self):
        """ Load the alm expansion and place it in the node-shared memory

        """
        if self._rank == 0:
            timer = Timer()
            timer.start()
            alm, mmax = hp.read_alm(self._almfile, return_mmax=True)
            nalm = len(alm)
            lmax = hp.Alm.getlmax(nalm, mmax)
            alm = [alm]
            if self._pol:
                for hdu in [2, 3]:
                    alm.append(hp.read_alm(self._almfile, hdu=hdu))
            alm = np.vstack(alm)
            nalm = len(alm)
            # If necessary, truncate the expansion to sufficient lmax
            self._lmax = min(lmax, self._lmax)
            self._mmax = min(mmax, self._lmax)
            if self._lmax < lmax:
                sz = hp.Alm.getsize(self._lmax, self._mmax)
                new_alm = np.zeros([nalm, sz], dtype=np.complex)
                for ell in range(self._lmax + 1):
                    for m in range(min(ell, self._mmax)):
                        i = hp.Alm.getidx(self._lmax, ell, m)
                        j = hp.Alm.getidx(lmax, ell, m)
                        new_alm[:, i] = alm[:, j]
                alm = new_alm
                lmax = self._lmax
                mmax = self._mmax
            # Suppress any primordial monopole or dipole
            for ell in range(min(2, lmax + 1)):
                for m in range(min(ell + 1, mmax + 1)):
                    ind = hp.Alm.getidx(lmax, 1, m)
                    alm[0, ind] = 0
            timer.stop()
            timer.report("load CMB alm")
        else:
            alm, lmax, mmax = None, None, None
        self._alm = self._comm.bcast(alm)
        self._lmax = self._comm.bcast(lmax)
        self._mmax = self._comm.bcast(mmax)
        return
Exemple #10
0
def apply_groundfilter(args, comm, data, cache_name=None, verbose=True):
    if not args.apply_groundfilter:
        return
    log = Logger.get()
    timer = Timer()
    timer.start()
    if comm.world_rank == 0 and verbose:
        log.info("Ground-filtering signal")
    groundfilter = OpGroundFilter(
        filter_order=args.ground_order,
        name=cache_name,
        common_flag_mask=args.common_flag_mask,
    )
    groundfilter.exec(data)
    if comm.comm_world is not None:
        comm.comm_world.barrier()
    timer.stop()
    if comm.world_rank == 0 and verbose:
        timer.report("Ground filtering")
    return
Exemple #11
0
def apply_polyfilter(args, comm, data, cache_name=None, verbose=True):
    """ Apply the polynomial filter to data under `cache_name`.
    """
    if not args.apply_polyfilter:
        return
    log = Logger.get()
    timer = Timer()
    timer.start()
    if comm.world_rank == 0 and verbose:
        log.info("Polyfiltering signal")
    polyfilter = OpPolyFilter(order=args.poly_order,
                              name=cache_name,
                              common_flag_mask=args.common_flag_mask)
    polyfilter.exec(data)
    if comm.comm_world is not None:
        comm.comm_world.barrier()
    timer.stop()
    if comm.world_rank == 0 and verbose:
        timer.report("Polynomial filtering")
    return
Exemple #12
0
def load_focalplanes(args, comm, schedules, verbose=False):
    """ Attach a focalplane to each of the schedules.

    Args:
        schedules (list) :  List of Schedule instances.
            Each schedule has two members, telescope
            and ceslist, a list of CES objects.
    Returns:
        detweights (dict) : Inverse variance noise weights for every
            detector across all focal planes. In [K_CMB^-2].
            They can be used to bin the TOD.
    """
    # log = Logger.get()
    timer = Timer()
    timer.start()

    # Load focalplane information

    timer1 = Timer()
    timer1.start()
    hw, telescope, det_index = get_hardware(args, comm, verbose=verbose)
    focalplane = get_focalplane(args, comm, hw, det_index, verbose=verbose)
    telescope.focalplane = focalplane

    if comm.world_rank == 0 and verbose:
        timer1.report_clear("Collect focaplane information")

    for schedule in schedules:
        # Replace the telescope created from reading the observing schedule but
        # keep the weather object
        weather = schedule.telescope.site.weather
        schedule.telescope = telescope
        schedule.telescope.site.weather = weather

    detweights = telescope.focalplane.detweights

    timer.stop()
    if (comm.comm_world is None or comm.world_rank == 0) and verbose:
        timer.report("Loading focalplane")
    return detweights
def main():
    log = Logger.get()
    gt = GlobalTimers.get()
    gt.start("toast_planck_reduce (total)")

    mpiworld, procs, rank, comm = get_comm()

    # This is the 2-level toast communicator.  By default,
    # there is just one group which spans MPI_COMM_WORLD.
    comm = toast.Comm()

    if comm.comm_world.rank == 0:
        print(
            "Running with {} processes at {}".format(
                procs, str(datetime.datetime.now())
            )
        )

    parser = argparse.ArgumentParser(
        description="Simple on-the-fly signal convolution + MADAM Mapmaking",
        fromfile_prefix_chars="@",
    )
    parser.add_argument("--lmax", required=True, type=np.int, help="Simulation lmax")
    parser.add_argument(
        "--fwhm", required=True, type=np.float, help="Sky fwhm [arcmin] to deconvolve"
    )
    parser.add_argument("--beammmax", required=True, type=np.int, help="Beam mmax")
    parser.add_argument("--order", default=11, type=np.int, help="Iteration order")
    parser.add_argument(
        "--pxx",
        required=False,
        default=False,
        action="store_true",
        help="Beams are in Pxx frame, not Dxx",
    )
    parser.add_argument(
        "--normalize",
        required=False,
        default=False,
        action="store_true",
        help="Normalize the beams",
    )
    parser.add_argument(
        "--skyfile",
        required=True,
        help="Path to sky alm files. Tag DETECTOR will be "
        "replaced with detector name.",
    )
    parser.add_argument(
        "--remove_monopole",
        required=False,
        default=False,
        action="store_true",
        help="Remove the sky monopole before convolution",
    )
    parser.add_argument(
        "--remove_dipole",
        required=False,
        default=False,
        action="store_true",
        help="Remove the sky dipole before convolution",
    )
    parser.add_argument(
        "--beamfile",
        required=True,
        help="Path to beam alm files. Tag DETECTOR will be "
        "replaced with detector name.",
    )
    parser.add_argument("--rimo", required=True, help="RIMO file")
    parser.add_argument("--freq", required=True, type=np.int, help="Frequency")
    parser.add_argument(
        "--dets", required=False, default=None, help="Detector list (comma separated)"
    )
    parser.add_argument(
        "--effdir", required=True, help="Input Exchange Format File directory"
    )
    parser.add_argument(
        "--effdir_pntg",
        required=False,
        help="Input Exchange Format File directory " "for pointing",
    )
    parser.add_argument(
        "--effdir_out", required=False, help="Output directory for convolved TOD"
    )
    parser.add_argument(
        "--obtmask", required=False, default=1, type=np.int, help="OBT flag mask"
    )
    parser.add_argument(
        "--flagmask", required=False, default=1, type=np.int, help="Quality flag mask"
    )
    parser.add_argument("--ringdb", required=True, help="Ring DB file")
    parser.add_argument(
        "--odfirst", required=False, default=None, type=np.int, help="First OD to use"
    )
    parser.add_argument(
        "--odlast", required=False, default=None, type=np.int, help="Last OD to use"
    )
    parser.add_argument(
        "--ringfirst",
        required=False,
        default=None,
        type=np.int,
        help="First ring to use",
    )
    parser.add_argument(
        "--ringlast", required=False, default=None, type=np.int, help="Last ring to use"
    )
    parser.add_argument(
        "--obtfirst",
        required=False,
        default=None,
        type=np.float,
        help="First OBT to use",
    )
    parser.add_argument(
        "--obtlast", required=False, default=None, type=np.float, help="Last OBT to use"
    )
    parser.add_argument("--madam_prefix", required=False, help="map prefix")
    parser.add_argument(
        "--madampar", required=False, default=None, help="Madam parameter file"
    )
    parser.add_argument(
        "--obtmask_madam", required=False, type=np.int, help="OBT flag mask for Madam"
    )
    parser.add_argument(
        "--flagmask_madam",
        required=False,
        type=np.int,
        help="Quality flag mask for Madam",
    )
    parser.add_argument(
        "--skip_madam",
        required=False,
        default=False,
        action="store_true",
        help="Do not run Madam on the convolved timelines",
    )
    parser.add_argument("--out", required=False, default=".", help="Output directory")

    try:
        args = parser.parse_args()
    except SystemExit:
        sys.exit(0)

    timer = Timer()
    timer.start()

    odrange = None
    if args.odfirst is not None and args.odlast is not None:
        odrange = (args.odfirst, args.odlast)

    ringrange = None
    if args.ringfirst is not None and args.ringlast is not None:
        ringrange = (args.ringfirst, args.ringlast)

    obtrange = None
    if args.obtfirst is not None and args.obtlast is not None:
        obtrange = (args.obtfirst, args.obtlast)

    detectors = None
    if args.dets is not None:
        detectors = re.split(",", args.dets)

    # This is the distributed data, consisting of one or
    # more observations, each distributed over a communicator.
    data = toast.Data(comm)

    # Ensure output directory exists

    if not os.path.isdir(args.out) and comm.comm_world.rank == 0:
        os.makedirs(args.out)

    # Read in madam parameter file

    # Allow more than one entry, gather into a list
    repeated_keys = ["detset", "detset_nopol", "survey"]
    pars = {}

    if comm.comm_world.rank == 0:
        pars["kfirst"] = False
        pars["temperature_only"] = True
        pars["base_first"] = 60.0
        pars["nside_map"] = 512
        pars["nside_cross"] = 512
        pars["nside_submap"] = 16
        pars["write_map"] = False
        pars["write_binmap"] = True
        pars["write_matrix"] = False
        pars["write_wcov"] = False
        pars["write_hits"] = True
        pars["kfilter"] = False
        pars["info"] = 3
        if args.madampar:
            pat = re.compile(r"\s*(\S+)\s*=\s*(\S+(\s+\S+)*)\s*")
            comment = re.compile(r"^#.*")
            with open(args.madampar, "r") as f:
                for line in f:
                    if not comment.match(line):
                        result = pat.match(line)
                        if result:
                            key, value = result.group(1), result.group(2)
                            if key in repeated_keys:
                                if key not in pars:
                                    pars[key] = []
                                pars[key].append(value)
                            else:
                                pars[key] = value
        # Command line parameters override the ones in the madam parameter file
        if "file_root" not in pars:
            pars["file_root"] = "madam"
        if args.madam_prefix is not None:
            pars["file_root"] = args.madam_prefix
        sfreq = "{:03}".format(args.freq)
        if sfreq not in pars["file_root"]:
            pars["file_root"] += "_" + sfreq
        try:
            fsample = {30: 32.51, 44: 46.55, 70: 78.77}[args.freq]
        except Exception:
            fsample = 180.3737
        pars["fsample"] = fsample
        pars["path_output"] = args.out

        print("All parameters:")
        print(args, flush=True)

    pars = comm.comm_world.bcast(pars, root=0)

    memreport("after parameters", MPI.COMM_WORLD)

    # madam only supports a single observation.  Normally
    # we would have multiple observations with some subset
    # assigned to each process group.

    # create the TOD for this observation

    tod = tp.Exchange(
        comm=comm.comm_group,
        detectors=detectors,
        ringdb=args.ringdb,
        effdir_in=args.effdir,
        effdir_pntg=args.effdir_pntg,
        obt_range=obtrange,
        ring_range=ringrange,
        od_range=odrange,
        freq=args.freq,
        RIMO=args.rimo,
        obtmask=args.obtmask,
        flagmask=args.flagmask,
        do_eff_cache=False,
    )

    # normally we would get the intervals from somewhere else, but since
    # the Exchange TOD already had to get that information, we can
    # get it from there.

    ob = {}
    ob["name"] = "mission"
    ob["id"] = 0
    ob["tod"] = tod
    ob["intervals"] = tod.valid_intervals
    ob["baselines"] = None
    ob["noise"] = tod.noise

    # Add the bare minimum focal plane information for the conviqt operator
    focalplane = {}
    for det in tod.detectors:
        if args.pxx:
            # Beam is in the polarization basis.
            # No extra rotations are needed
            psipol = tod.rimo[det].psi_pol
        else:
            # Beam is in the detector basis. Convolver needs to remove
            # the last rotation into the polarization sensitive frame.
            psipol = tod.rimo[det].psi_uv + tod.rimo[det].psi_pol
        focalplane[det] = {
            "pol_leakage" : tod.rimo[det].epsilon,
            "pol_angle_deg" : psipol,
        }
    ob["focalplane"] = focalplane

    data.obs.append(ob)

    comm.comm_world.barrier()
    if comm.comm_world.rank == 0:
        timer.report_clear("Metadata queries")

    loader = tp.OpInputPlanck(
        commonflags_name="common_flags", flags_name="flags", margin=0
    )

    loader.exec(data)

    comm.comm_world.barrier()
    if comm.comm_world.rank == 0:
        timer.report_clear("Data read and cache")
        tod.cache.report()

    memreport("after loading", mpiworld)

    # make a planck Healpix pointing matrix
    mode = "IQU"
    if pars["temperature_only"] == "T":
        mode = "I"
    nside = int(pars["nside_map"])
    pointing = tp.OpPointingPlanck(
        nside=nside,
        mode=mode,
        RIMO=tod.RIMO,
        margin=0,
        apply_flags=False,
        keep_vel=False,
        keep_pos=False,
        keep_phase=False,
        keep_quats=True,
    )
    pointing.exec(data)

    comm.comm_world.barrier()
    if comm.comm_world.rank == 0:
        timer.report_clear("Pointing Matrix took, mode = {}".format(mode))

    memreport("after pointing", mpiworld)

    # simulate the TOD by convolving the sky with the beams

    if comm.comm_world.rank == 0:
        print("Convolving TOD", flush=True)

    for pattern in args.beamfile.split(","):
        skyfiles = {}
        beamfiles = {}
        for det in tod.detectors:
            freq = "{:03}".format(tp.utilities.det2freq(det))
            if "LFI" in det:
                psmdet = "{}_{}".format(freq, det[3:])
                if det.endswith("M"):
                    arm = "y"
                else:
                    arm = "x"
                graspdet = "{}_{}_{}".format(freq[1:], det[3:5], arm)
            else:
                psmdet = det.replace("-", "_")
                graspdet = det
            skyfile = (
                args.skyfile.replace("FREQ", freq)
                .replace("PSMDETECTOR", psmdet)
                .replace("DETECTOR", det)
            )
            skyfiles[det] = skyfile
            beamfile = pattern.replace("GRASPDETECTOR", graspdet).replace(
                "DETECTOR", det
            )
            beamfiles[det] = beamfile
            if comm.comm_world.rank == 0:
                print("Convolving {} with {}".format(skyfile, beamfile), flush=True)

        conviqt = OpSimConviqt(
            comm.comm_world,
            skyfiles,
            beamfiles,
            lmax=args.lmax,
            beammmax=args.beammmax,
            pol=True,
            fwhm=args.fwhm,
            order=args.order,
            calibrate=True,
            dxx=True,
            out="conviqt_tod",
            apply_flags=False,
            remove_monopole=args.remove_monopole,
            remove_dipole=args.remove_dipole,
            verbosity=1,
            normalize_beam=args.normalize,
        )
        conviqt.exec(data)

    comm.comm_world.barrier()
    if comm.comm_world.rank == 0:
        timer.report_clear("Convolution")

    memreport("after conviqt", mpiworld)

    if args.effdir_out is not None:
        if comm.comm_world.rank == 0:
            print("Writing TOD", flush=True)

        tod.set_effdir_out(args.effdir_out, None)
        writer = tp.OpOutputPlanck(
            signal_name="conviqt_tod",
            flags_name="flags",
            commonflags_name="common_flags",
        )
        writer.exec(data)

        comm.comm_world.barrier()
        if comm.comm_world.rank == 0:
            timer.report_clear("Conviqt output")

        memreport("after writing", mpiworld)

    # for now, we pass in the noise weights from the RIMO.
    detweights = {}
    for d in tod.detectors:
        net = tod.rimo[d].net
        fsample = tod.rimo[d].fsample
        detweights[d] = 1.0 / (fsample * net * net)

    if not args.skip_madam:
        if comm.comm_world.rank == 0:
            print("Calling Madam", flush=True)

        try:
            if args.obtmask_madam is None:
                obtmask = args.obtmask
            else:
                obtmask = args.obtmask_madam
            if args.flagmask_madam is None:
                flagmask = args.flagmask
            else:
                flagmask = args.flagmask_madam
            madam = OpMadam(
                params=pars,
                detweights=detweights,
                name="conviqt_tod",
                flag_name="flags",
                purge=True,
                name_out="madam_tod",
                common_flag_mask=obtmask,
                flag_mask=flagmask,
            )
        except Exception as e:
            raise Exception(
                "{:4} : ERROR: failed to initialize Madam: {}".format(
                    comm.comm_world.rank, e
                )
            )
        madam.exec(data)

        comm.comm_world.barrier()
        if comm.comm_world.rank == 0:
            timer.report_clear("Madam took {:.3f} s")

        memreport("after madam", mpiworld)

    gt.stop_all()
    if mpiworld is not None:
        mpiworld.barrier()
    timer = Timer()
    timer.start()
    alltimers = gather_timers(comm=mpiworld)
    if comm.world_rank == 0:
        out = os.path.join(args.out, "timing")
        dump_timing(alltimers, out)
        timer.stop()
        timer.report("Gather and dump timing info")
    return
Exemple #14
0
    def exec(self, data):
        """Generate timestreams.
        Args:
            data (toast.Data): The distributed data.
        Returns:
            None
        """

        log = Logger.get()
        group = data.comm.group
        for obs in data.obs:
            try:
                obsname = obs["name"]
            except Exception:
                obsname = "observation"

            observer = ephem.Observer()
            observer.lon = obs['site'].lon
            observer.lat = obs['site'].lat
            observer.elevation = obs['site'].alt  # In meters
            observer.epoch = "2000"
            observer.temp = 0  # in Celcius
            observer.compute_pressure()

            prefix = "{} : {} : ".format(group, obsname)
            tod = obs['tod']
            comm = tod.mpicomm
            rank = 0
            if comm is not None:
                rank = comm.rank
            site = obs['site'].id

            if comm is not None:
                comm.Barrier()
            if rank == 0:
                log.info("{}Setting up SSO simulation".format(prefix))

            # Get the observation time span and compute the horizontal
            # position of the SSO
            times = tod.local_times()
            sso_az, sso_el, sso_dist, sso_dia = self._get_sso_position(
                times, observer)

            tmr = Timer()
            if self._report_timing:
                if comm is not None:
                    comm.Barrier()
                tmr.start()

            self._observe_sso(sso_az, sso_el, sso_dist, sso_dia, tod, comm,
                              prefix)

            del sso_az, sso_el, sso_dist

        if self._report_timing:
            if comm is not None:
                comm.Barrier()
            if rank == 0:
                tmr.stop()
                tmr.report(
                    "{}Simulated and observed SSO signal".format(prefix))
        return
    def _synthesize_map(self, fn_cmb, there):
        """ Synthesize the stored alm expansion into a map
        and place the map in node-shared memory.

        """
        timer = Timer()
        timer.start()
        if not there:
            # Use libsharp to perform the synthesis across the communicator
            if self._quickpolbeam is None:
                beam = hp.gauss_beam(fwhm=self._fwhm,
                                     lmax=self._lmax,
                                     pol=True)
                beam = beam[:, 0:3].copy()
            else:
                beam = np.array(hp.read_cl(self._quickpolbeam))
                if beam.ndim == 1:
                    beam = np.vstack([beam, beam, beam])
                beam = beam[:, :self._lmax + 1].T.copy()
            almT = self._alm[0].reshape(1, 1, -1)
            self._alminfo.almxfl(almT, np.ascontiguousarray(beam[:, 0:1]))
            my_outmap = synthesis(self._grid,
                                  self._alminfo,
                                  almT,
                                  spin=0,
                                  comm=self._comm)[0]
            my_outmap = [my_outmap]
            if self._pol:
                almP = self._alm[1:3].reshape(1, 2, -1)
                self._alminfo.almxfl(almP, np.ascontiguousarray(beam[:,
                                                                     (1, 2)]))
                my_outmap.append(
                    synthesis(self._grid,
                              self._alminfo,
                              almP,
                              spin=2,
                              comm=self._comm)[0])
            # Discard the a_lm
            del self._alm
            my_outmap = np.vstack(my_outmap)
            my_pixels = self._dist_rings.local_pixels
            my_maptemp = np.zeros([self._nnz, self._npix], dtype=np.float)
            maptemp = np.zeros([self._nnz, self._npix], dtype=np.float)
            my_maptemp[:, my_pixels] = my_outmap
            self._comm.Reduce(my_maptemp, maptemp)
            del my_maptemp
            maptemp = hp.reorder(maptemp, r2n=True)
            timer.stop()
            if self._global_rank == 0:
                timer.report("synthesize CMB map")
                # Save the CMB map
                os.makedirs(CMBCACHE, exist_ok=True)
                header = [("fwhm", np.degrees(self._fwhm),
                           "gaussian smoothing (deg)")]
                hp.write_map(fn_cmb,
                             maptemp,
                             extra_header=header,
                             overwrite=True,
                             nest=True)
                print("CMB map saved in {}".format(fn_cmb), flush=True)
        else:
            if self._global_rank == 0:
                print("Loading cached CMB map from {}".format(fn_cmb),
                      flush=True)
            if self._rank == 0:
                maptemp = hp.read_map(fn_cmb,
                                      None,
                                      nest=True,
                                      verbose=False,
                                      dtype=np.float32)
                if not self._pol:
                    maptemp = maptemp[0]
            else:
                maptemp = None
        self.mapsampler = MapSampler(
            None,
            pol=self._pol,
            comm=self._comm,
            preloaded_map=maptemp,
            nest=True,
            plug_holes=False,
            use_shmem=True,
        )
        del maptemp
        return
def main():
    log = Logger.get()
    gt = GlobalTimers.get()
    gt.start("toast_planck_reduce (total)")

    mpiworld, procs, rank, comm = get_comm()

    if comm.world_rank == 0:
        print("Running with {} processes at {}".format(
            procs, str(datetime.datetime.now())))

    parser = argparse.ArgumentParser(description='Simple MADAM Mapmaking',
                                     fromfile_prefix_chars='@')
    parser.add_argument('--skip_madam',
                        dest='skip_madam',
                        default=False,
                        action='store_true',
                        help='D not make maps with Madam.')
    parser.add_argument('--skip_noise',
                        dest='skip_noise',
                        default=False,
                        action='store_true',
                        help='Do not add simulated noise to the TOD.')
    parser.add_argument('--rimo', required=True, help='RIMO file')
    parser.add_argument('--freq', required=True, type=np.int, help='Frequency')
    parser.add_argument('--debug',
                        dest='debug',
                        default=False,
                        action='store_true',
                        help='Write data distribution info to file')
    parser.add_argument('--dets',
                        required=False,
                        default=None,
                        help='Detector list (comma separated)')
    parser.add_argument('--effdir',
                        required=True,
                        help='Input Exchange Format File directory')
    parser.add_argument('--effdir2',
                        required=False,
                        help='Additional input Exchange Format File directory')
    parser.add_argument('--effdir_pntg',
                        required=False,
                        help='Input Exchange Format File directory for '
                        'pointing')
    parser.add_argument('--effdir_fsl',
                        required=False,
                        help='Input Exchange Format File directory for '
                        'straylight')
    parser.add_argument('--obtmask',
                        required=False,
                        default=1,
                        type=np.int,
                        help='OBT flag mask')
    parser.add_argument('--flagmask',
                        required=False,
                        default=1,
                        type=np.int,
                        help='Quality flag mask')
    parser.add_argument('--pntflagmask',
                        required=False,
                        default=0,
                        type=np.int,
                        help='Which OBT flag bits to raise for HCM maneuvers')
    parser.add_argument('--bad_intervals',
                        required=False,
                        help='Path to bad interval file.')
    parser.add_argument('--ringdb', required=True, help='Ring DB file')
    parser.add_argument('--odfirst',
                        required=False,
                        default=None,
                        help='First OD to use')
    parser.add_argument('--odlast',
                        required=False,
                        default=None,
                        help='Last OD to use')
    parser.add_argument('--ringfirst',
                        required=False,
                        default=None,
                        help='First ring to use')
    parser.add_argument('--ringlast',
                        required=False,
                        default=None,
                        help='Last ring to use')
    parser.add_argument('--obtfirst',
                        required=False,
                        default=None,
                        help='First OBT to use')
    parser.add_argument('--obtlast',
                        required=False,
                        default=None,
                        help='Last OBT to use')
    parser.add_argument('--read_eff',
                        dest='read_eff',
                        default=False,
                        action='store_true',
                        help='Read and co-add the signal from effdir')
    parser.add_argument('--decalibrate',
                        required=False,
                        help='Path to calibration file to decalibrate with. '
                        'You can use python string formatting, assuming '
                        '.format(mc)')
    parser.add_argument('--calibrate',
                        required=False,
                        help='Path to calibration file to calibrate with. '
                        'You can use python string formatting, assuming '
                        '.format(mc)')
    parser.add_argument('--madampar',
                        required=False,
                        default=None,
                        help='Madam parameter file')
    parser.add_argument('--nside',
                        required=False,
                        default=None,
                        type=np.int,
                        help='Madam resolution')
    parser.add_argument('--out',
                        required=False,
                        default='.',
                        help='Output directory')
    parser.add_argument('--madam_prefix', required=False, help='map prefix')
    parser.add_argument('--make_rings',
                        dest='make_rings',
                        default=False,
                        action='store_true',
                        help='Compile ringsets.')
    parser.add_argument('--nside_ring',
                        required=False,
                        default=128,
                        type=np.int,
                        help='Ringset resolution')
    parser.add_argument('--ring_root',
                        required=False,
                        default='ringset',
                        help='Root filename for ringsets (setting to empty '
                        'disables ringset output).')
    parser.add_argument('--MC_start',
                        required=False,
                        default=0,
                        type=np.int,
                        help='First Monte Carlo noise realization')
    parser.add_argument('--MC_count',
                        required=False,
                        default=1,
                        type=np.int,
                        help='Number of Monte Carlo noise realizations')
    # noise parameters
    parser.add_argument('--noisefile',
                        required=False,
                        default='RIMO',
                        help='Path to noise PSD files for noise filter. '
                        'Tag DETECTOR will be replaced with detector name.')
    parser.add_argument('--noisefile_simu',
                        required=False,
                        default='RIMO',
                        help='Path to noise PSD files for noise simulation. '
                        'Tag DETECTOR will be replaced with detector name.')
    # Dipole parameters
    dipogroup = parser.add_mutually_exclusive_group()
    dipogroup.add_argument('--dipole',
                           dest='dipole',
                           required=False,
                           default=False,
                           action='store_true',
                           help='Simulate dipole')
    dipogroup.add_argument('--solsys_dipole',
                           dest='solsys_dipole',
                           required=False,
                           default=False,
                           action='store_true',
                           help='Simulate solar system dipole')
    dipogroup.add_argument('--orbital_dipole',
                           dest='orbital_dipole',
                           required=False,
                           default=False,
                           action='store_true',
                           help='Simulate orbital dipole')
    dipo_parameters_group = parser.add_argument_group('dipole_parameters')
    dipo_parameters_group.add_argument(
        '--solsys_speed',
        required=False,
        type=np.float,
        default=DEFAULT_PARAMETERS["solsys_speed"],
        help='Solar system speed wrt. CMB rest frame in km/s. Default is '
        'Planck 2015 best fit value')
    dipo_parameters_group.add_argument(
        '--solsys_glon',
        required=False,
        type=np.float,
        default=DEFAULT_PARAMETERS["solsys_glon"],
        help='Solar system velocity direction longitude in degrees')
    dipo_parameters_group.add_argument(
        '--solsys_glat',
        required=False,
        type=np.float,
        default=DEFAULT_PARAMETERS["solsys_glat"],
        help='Solar system velocity direction latitude in degrees')

    try:
        args = parser.parse_args()
    except SystemExit:
        sys.exit(0)

    if comm.world_rank == 0:
        print('All parameters:')
        print(args, flush=True)

    if args.MC_count < 1:
        raise RuntimeError('MC_count = {} < 1. Nothing done.'
                           ''.format(args.MC_count))

    timer = Timer()
    timer.start()

    nrange = 1

    odranges = None
    if args.odfirst is not None and args.odlast is not None:
        odranges = []
        firsts = [int(i) for i in str(args.odfirst).split(',')]
        lasts = [int(i) for i in str(args.odlast).split(',')]
        for odfirst, odlast in zip(firsts, lasts):
            odranges.append((odfirst, odlast))
        nrange = len(odranges)

    ringranges = None
    if args.ringfirst is not None and args.ringlast is not None:
        ringranges = []
        firsts = [int(i) for i in str(args.ringfirst).split(',')]
        lasts = [int(i) for i in str(args.ringlast).split(',')]
        for ringfirst, ringlast in zip(firsts, lasts):
            ringranges.append((ringfirst, ringlast))
        nrange = len(ringranges)

    obtranges = None
    if args.obtfirst is not None and args.obtlast is not None:
        obtranges = []
        firsts = [float(i) for i in str(args.obtfirst).split(',')]
        lasts = [float(i) for i in str(args.obtlast).split(',')]
        for obtfirst, obtlast in zip(firsts, lasts):
            obtranges.append((obtfirst, obtlast))
        nrange = len(obtranges)

    if odranges is None:
        odranges = [None] * nrange

    if ringranges is None:
        ringranges = [None] * nrange

    if obtranges is None:
        obtranges = [None] * nrange

    detectors = None
    if args.dets is not None:
        detectors = re.split(',', args.dets)

    # create the TOD for this observation

    if args.noisefile != 'RIMO' or args.noisefile_simu != 'RIMO':
        do_eff_cache = True
    else:
        do_eff_cache = False

    tods = []

    for obtrange, ringrange, odrange in zip(obtranges, ringranges, odranges):
        # create the TOD for this observation
        tods.append(
            tp.Exchange(comm=comm.comm_group,
                        detectors=detectors,
                        ringdb=args.ringdb,
                        effdir_in=args.effdir,
                        extra_effdirs=[args.effdir2, args.effdir_fsl],
                        effdir_pntg=args.effdir_pntg,
                        obt_range=obtrange,
                        ring_range=ringrange,
                        od_range=odrange,
                        freq=args.freq,
                        RIMO=args.rimo,
                        obtmask=args.obtmask,
                        flagmask=args.flagmask,
                        pntflagmask=args.pntflagmask,
                        do_eff_cache=do_eff_cache))

    # Make output directory

    if not os.path.isdir(args.out) and comm.world_rank == 0:
        os.makedirs(args.out)

    # Read in madam parameter file
    # Allow more than one entry, gather into a list
    repeated_keys = ['detset', 'detset_nopol', 'survey']
    pars = {}

    if comm.world_rank == 0:
        pars['kfirst'] = False
        pars['temperature_only'] = True
        pars['base_first'] = 60.0
        pars['nside_submap'] = 16
        pars['write_map'] = False
        pars['write_binmap'] = True
        pars['write_matrix'] = False
        pars['write_wcov'] = False
        pars['write_hits'] = True
        pars['kfilter'] = False
        pars['info'] = 3
        if args.madampar:
            pat = re.compile(r'\s*(\S+)\s*=\s*(\S+(\s+\S+)*)\s*')
            comment = re.compile(r'^#.*')
            with open(args.madampar, 'r') as f:
                for line in f:
                    if not comment.match(line):
                        result = pat.match(line)
                        if result:
                            key, value = result.group(1), result.group(2)
                            if key in repeated_keys:
                                if key not in pars:
                                    pars[key] = []
                                pars[key].append(value)
                            else:
                                pars[key] = value
        # Command line parameters override the ones in the madam parameter file
        if 'file_root' not in pars:
            pars['file_root'] = 'madam'
        if args.madam_prefix is not None:
            pars['file_root'] = args.madam_prefix
        sfreq = '{:03}'.format(args.freq)
        if sfreq not in pars['file_root']:
            pars['file_root'] += '_' + sfreq
        try:
            fsample = {30: 32.51, 44: 46.55, 70: 78.77}[args.freq]
        except Exception:
            fsample = 180.3737
        pars['fsample'] = fsample
        pars['path_output'] = args.out

    pars = comm.comm_world.bcast(pars, root=0)

    madam_mcmode = True
    if 'nsubchunk' in pars and int(pars['nsubchunk']) > 1:
        madam_mcmode = False

    if args.noisefile != 'RIMO' or args.noisefile_simu != 'RIMO':
        # We split MPI_COMM_WORLD into single process groups, each of
        # which is assigned one or more observations (rings)
        comm = toast.Comm(groupsize=1)

    # This is the distributed data, consisting of one or
    # more observations, each distributed over a communicator.
    data = toast.Data(comm)

    for iobs, tod in enumerate(tods):
        if args.noisefile != 'RIMO' or args.noisefile_simu != 'RIMO':
            # Use a toast helper method to optimally distribute rings between
            # processes.
            dist = distribute_discrete(tod.ringsizes, comm.world_size)
            my_first_ring, my_n_ring = dist[comm.world_rank]

            for my_ring in range(my_first_ring, my_first_ring + my_n_ring):
                ringtod = tp.Exchange.from_tod(
                    tod,
                    my_ring,
                    comm.comm_group,
                    noisefile=args.noisefile,
                    noisefile_simu=args.noisefile_simu)
                ob = {}
                ob['name'] = 'ring{:05}'.format(ringtod.globalfirst_ring)
                ob['id'] = ringtod.globalfirst_ring
                ob['tod'] = ringtod
                ob['intervals'] = ringtod.valid_intervals
                ob['baselines'] = None
                ob['noise'] = ringtod.noise
                ob['noise_simu'] = ringtod.noise_simu
                data.obs.append(ob)
        else:
            ob = {}
            ob['name'] = 'observation{:04}'.format(iobs)
            ob['id'] = 0
            ob['tod'] = tod
            ob['intervals'] = tod.valid_intervals
            ob['baselines'] = None
            ob['noise'] = tod.noise
            ob['noise_simu'] = tod.noise

            data.obs.append(ob)

    rimo = tods[0].rimo

    if mpiworld is not None:
        mpiworld.barrier()
    if comm.world_rank == 0:
        timer.report_clear("Metadata queries")

    # Always read the signal and flags, even if the signal is later
    # overwritten.  There is no overhead for the signal because it is
    # interlaced with the flags.

    tod_name = 'signal'
    timestamps_name = 'timestamps'
    flags_name = 'flags'
    common_flags_name = 'common_flags'
    reader = tp.OpInputPlanck(signal_name=tod_name,
                              flags_name=flags_name,
                              timestamps_name=timestamps_name,
                              commonflags_name=common_flags_name)
    if comm.world_rank == 0:
        print('Reading input signal from {}'.format(args.effdir), flush=True)
    reader.exec(data)
    if mpiworld is not None:
        mpiworld.barrier()
    if comm.world_rank == 0:
        timer.report_clear("Read")

    # Clear the signal if we don't need it

    if not args.read_eff:
        eraser = tp.OpCacheMath(in1=tod_name,
                                in2=0,
                                multiply=True,
                                out=tod_name)
        if comm.world_rank == 0:
            print('Erasing TOD', flush=True)
        eraser.exec(data)
        if mpiworld is not None:
            mpiworld.barrier()
        if comm.world_rank == 0:
            timer.report_clear("Erase")

    # Optionally flag bad intervals

    if args.bad_intervals is not None:
        flagger = tp.OpBadIntervals(path=args.bad_intervals)
        flagger.exec(data)
        if mpiworld is not None:
            mpiworld.barrier()
        if comm.world_rank == 0:
            timer.report_clear("Apply {}".format(args.bad_intervals))

    # Now read an optional second TOD to add with the first

    if args.effdir2 is not None:
        # Read the extra TOD and add it to the first one
        reader = tp.OpInputPlanck(signal_name='tod2',
                                  flags_name=None,
                                  timestamps_name=None,
                                  commonflags_name=None,
                                  effdir=args.effdir2)
        if comm.world_rank == 0:
            print('Reading extra TOD from {}'.format(args.effdir2), flush=True)
        reader.exec(data)
        if mpiworld is not None:
            mpiworld.barrier()
        if comm.world_rank == 0:
            print("Reading took {:.3f} s".format(elapsed), flush=True)

        adder = tp.OpCacheMath(in1=tod_name,
                               in2='signal2',
                               add=True,
                               out=tod_name)
        if comm.world_rank == 0:
            print('Adding TODs', flush=True)
        adder.exec(data)

        # Erase the extra cache object
        for ob in data.obs:
            tod = ob['tod']
            tod.cache.clear('signal2_.*')

    if args.effdir_fsl is not None:
        # Read the straylight signal into the tod cache under
        # "fsl_<detector>"
        reader = tp.OpInputPlanck(signal_name='fsl',
                                  flags_name=None,
                                  timestamps_name=None,
                                  commonflags_name=None,
                                  effdir=args.effdir_fsl)
        if comm.world_rank == 0:
            print('Reading straylight signal from {}'.format(args.effdir_fsl),
                  flush=True)
        reader.exec(data)
        if mpiworld is not None:
            mpiworld.barrier()
        if comm.world_rank == 0:
            timer.report_clear("Read FSL")
        do_fsl = True
    else:
        do_fsl = False

    # make a planck Healpix pointing matrix
    mode = 'IQU'
    if pars['temperature_only'] == 'T':
        mode = 'I'

    if args.nside is None:
        if 'nside_map' in pars:
            nside = int(pars['nside_map'])
        else:
            raise RuntimeError(
                'Nside must be set either in the Madam parameter file or on '
                'the command line')
    else:
        nside = args.nside
        pars['nside_map'] = nside
    if 'nside_cross' not in pars or pars['nside_cross'] > pars['nside_map']:
        pars['nside_cross'] = pars['nside_map']

    do_dipole = args.dipole or args.solsys_dipole or args.orbital_dipole

    pointing = tp.OpPointingPlanck(nside=nside,
                                   mode=mode,
                                   RIMO=rimo,
                                   margin=0,
                                   apply_flags=True,
                                   keep_vel=do_dipole,
                                   keep_pos=False,
                                   keep_phase=False,
                                   keep_quats=do_dipole)
    pointing.exec(data)
    if mpiworld is not None:
        mpiworld.barrier()
    if comm.world_rank == 0:
        timer.report_clear("Pointing Matrix")

    flags_name = 'flags'
    common_flags_name = 'common_flags'

    # for now, we pass in the noise weights from the RIMO.
    detweights = {}
    for d in tod.detectors:
        net = tod.rimo[d].net
        fsample = tod.rimo[d].fsample
        detweights[d] = 1.0 / (fsample * net * net)

    if args.debug:
        with open("debug_planck_exchange_madam.txt", "w") as f:
            data.info(f)

    if do_dipole:
        # Simulate the dipole
        if args.dipole:
            dipomode = 'total'
        elif args.solsys_dipole:
            dipomode = 'solsys'
        else:
            dipomode = 'orbital'
        dipo = tp.OpDipolePlanck(args.freq,
                                 solsys_speed=args.solsys_speed,
                                 solsys_glon=args.solsys_glon,
                                 solsys_glat=args.solsys_glat,
                                 mode=dipomode,
                                 output='dipole',
                                 keep_quats=False)
        dipo.exec(data)
        if mpiworld is not None:
            mpiworld.barrier()
        if comm.world_rank == 0:
            timer.report_clear("Dipole")

    # Loop over Monte Carlos

    madam = None

    for mc in range(args.MC_start, args.MC_start + args.MC_count):

        out = "{}/{:05d}".format(args.out, mc)
        if comm.world_rank == 0:
            if not os.path.isdir(out):
                os.makedirs(out)

        # clear all noise data from the cache, so that we can generate
        # new noise timestreams.

        for ob in data.obs:
            ob['tod'].cache.clear("noise_.*")
        tod_name = 'signal'

        if do_dipole:
            adder = tp.OpCacheMath(in1=tod_name,
                                   in2='dipole',
                                   add=True,
                                   out='noise')
            adder.exec(data)
            if mpiworld is not None:
                mpiworld.barrier()
            if comm.world_rank == 0:
                timer.report_clear("MC {}:  Add dipole".format(mc))
            tod_name = 'noise'

        # Simulate noise

        if not args.skip_noise:
            tod_name = 'noise'
            nse = toast.tod.OpSimNoise(out=tod_name,
                                       realization=mc,
                                       component=0,
                                       noise='noise_simu',
                                       rate=fsample)
            if comm.world_rank == 0:
                print('Simulating noise from {}'.format(args.noisefile_simu),
                      flush=True)
            nse.exec(data)
            if mpiworld is not None:
                mpiworld.barrier()
            if comm.world_rank == 0:
                timer.report_clear("MC {}:  Noise simulation".format(mc))

            # If we didn't add the dipole, we need to add the input
            # signal with the noise we just simulated

            if args.read_eff and not do_dipole:
                adder = tp.OpCacheMath(in1=tod_name,
                                       in2='signal',
                                       add=True,
                                       out=tod_name)
                adder.exec(data)
                if mpiworld is not None:
                    mpiworld.barrier()
                if comm.world_rank == 0:
                    timer.report_clear("MC {}:  Add input signal".format(mc))

        # Make rings

        if args.make_rings:
            ringmaker = tp.OpRingMaker(args.nside_ring,
                                       nside,
                                       signal=tod_name,
                                       fileroot=args.ring_root,
                                       out=out,
                                       commonmask=args.obtmask,
                                       detmask=args.flagmask)
            ringmaker.exec(data)
            if mpiworld is not None:
                mpiworld.barrier()
            if comm.world_rank == 0:
                timer.report_clear("MC {}:  Ringmaking".format(mc))

        # Apply calibration errors

        if args.decalibrate is not None:
            fn = args.decalibrate
            try:
                fn = fn.format(mc)
            except Exception:
                pass
            if comm.world_rank == 0:
                print('Decalibrating with {}'.format(fn), flush=True)
            decalibrator = tp.OpCalibPlanck(signal_in=tod_name,
                                            signal_out='noise',
                                            file_gain=fn,
                                            decalibrate=True)
            decalibrator.exec(data)
            if mpiworld is not None:
                mpiworld.barrier()
            if comm.world_rank == 0:
                timer.report_clear("MC {}:  Decalibrate".format(mc))
            tod_name = 'noise'

        if args.calibrate is not None:
            fn = args.calibrate
            try:
                fn = fn.format(mc)
            except Exception:
                pass
            if comm.world_rank == 0:
                print('Calibrating with {}'.format(fn), flush=True)
            calibrator = tp.OpCalibPlanck(signal_in=tod_name,
                                          signal_out='noise',
                                          file_gain=fn)
            calibrator.exec(data)
            if mpiworld is not None:
                mpiworld.barrier()
            if comm.world_rank == 0:
                timer.report_clear("MC {}:  Calibrate".format(mc))
            tod_name = 'noise'

        # Subtract the dipole and straylight

        if do_dipole:
            subtractor = tp.OpCacheMath(in1=tod_name,
                                        in2='dipole',
                                        subtract=True,
                                        out='noise')
            subtractor.exec(data)
            if mpiworld is not None:
                mpiworld.barrier()
            if comm.world_rank == 0:
                timer.report_clear("MC {}:  Subtract dipole".format(mc))
            tod_name = 'noise'

        if do_fsl:
            subtractor = tp.OpCacheMath(in1=tod_name,
                                        in2='fsl',
                                        subtract=True,
                                        out='noise')
            subtractor.exec(data)
            if mpiworld is not None:
                mpiworld.barrier()
            if comm.world_rank == 0:
                timer.report_clear("MC {}:  Subtract straylight".format(mc))
            tod_name = 'noise'

        # Make the map

        if not args.skip_madam:
            # Make maps
            if madam is None:
                try:
                    madam = toast.todmap.OpMadam(params=pars,
                                                 detweights=detweights,
                                                 purge_tod=True,
                                                 name=tod_name,
                                                 apply_flags=False,
                                                 name_out=None,
                                                 noise='noise',
                                                 mcmode=madam_mcmode,
                                                 translate_timestamps=False)
                except Exception as e:
                    raise Exception(
                        '{:4} : ERROR: failed to initialize Madam: '
                        '{}'.format(comm.world_rank, e))
            madam.params['path_output'] = out
            madam.exec(data)
            if mpiworld is not None:
                mpiworld.barrier()
            if comm.world_rank == 0:
                timer.report_clear("MC {}:  Mapmaking".format(mc))

    gt.stop_all()
    if mpiworld is not None:
        mpiworld.barrier()
    timer = Timer()
    timer.start()
    alltimers = gather_timers(comm=mpiworld)
    if comm.world_rank == 0:
        out = os.path.join(args.out, "timing")
        dump_timing(alltimers, out)
        timer.stop()
        timer.report("Gather and dump timing info")
    return
Exemple #17
0
    def load_frames(self):
        log = Logger.get()
        rank = 0
        if self.mpicomm is not None:
            rank = self.mpicomm.rank

        # Timestamps
        self.cache.create(self.TIMESTAMP_NAME, np.float64,
                          (self.local_samples[1], ))

        # Boresight pointing
        self.cache.create("boresight_radec", np.float64,
                          (self.local_samples[1], 4))
        self.cache.create("boresight_azel", np.float64,
                          (self.local_samples[1], 4))
        self.cache.create(self.HWP_ANGLE_NAME, np.float64,
                          (self.local_samples[1], ))

        # Common flags
        self.cache.create(self.COMMON_FLAG_NAME, np.uint8,
                          (self.local_samples[1], ))

        # Telescope position and velocity
        self.cache.create(self.POSITION_NAME, np.float64,
                          (self.local_samples[1], 3))
        self.cache.create(self.VELOCITY_NAME, np.float64,
                          (self.local_samples[1], 3))

        # Detector data and flags
        for det in self.local_dets:
            name = "{}_{}".format(self.SIGNAL_NAME, det)
            self.cache.create(name, np.float64, (self.local_samples[1], ))
            name = "{}_{}".format(self.FLAG_NAME, det)
            self.cache.create(name, np.uint8, (self.local_samples[1], ))

        timer = Timer()
        for ffile in self._file_names:
            fnf = self._file_nframes[ffile]
            frame_offsets = self._frame_sample_offs[ffile]
            frame_sizes = self._frame_sizes[ffile]
            if rank == 0:
                log.debug("Loading {} frames from {}".format(fnf, ffile))
            # Loop over all frames- only the root process will actually
            # read data from disk.
            if rank == 0:
                gfile = core3g.G3File(ffile)
            else:
                gfile = [None] * fnf

            timer.clear()
            timer.start()
            for fdata, frame_offset, frame_size in zip(gfile, frame_offsets,
                                                       frame_sizes):
                is_scan = True
                if rank == 0:
                    if fdata.type != core3g.G3FrameType.Scan:
                        is_scan = False
                if self.mpicomm is not None:
                    is_scan = self.mpicomm.bcast(is_scan, root=0)
                if not is_scan:
                    continue

                frame_to_tod(
                    self,
                    frame_offset,
                    frame_size,
                    frame_data=fdata,
                    all_flavors=self._all_flavors,
                )
                if self.mpicomm is not None:
                    self.mpicomm.barrier()
            timer.stop()
            if rank == 0:
                log.debug("Translated frames in {}s".format(timer.seconds()))
            del gfile
        return
Exemple #18
0
def main():
    log = Logger.get()
    gt = GlobalTimers.get()
    gt.start("toast_planck_reduce (total)")

    mpiworld, procs, rank, comm = get_comm()

    # This is the 2-level toast communicator.  By default,
    # there is just one group which spans MPI_COMM_WORLD.
    comm = toast.Comm()

    if comm.world_rank == 0:
        print("Running with {} processes at {}".format(
            procs, str(datetime.datetime.now())))

    parser = argparse.ArgumentParser(
        description='Accumulate polarization moments',
        fromfile_prefix_chars='@')
    parser.add_argument('--rimo', required=True, help='RIMO file')
    parser.add_argument('--freq', required=True, type=np.int, help='Frequency')
    parser.add_argument('--nside',
                        required=False,
                        type=np.int,
                        default=512,
                        help='Map resolution')
    parser.add_argument('--smax',
                        required=False,
                        type=np.int,
                        default=6,
                        help='Highest moment')
    parser.add_argument('--debug',
                        dest='debug',
                        default=False,
                        action='store_true',
                        help='Write data distribution info to file')
    parser.add_argument('--dets',
                        required=False,
                        default=None,
                        help='Detector list (comma separated)')
    parser.add_argument('--effdir',
                        required=True,
                        help='Input Exchange Format File directory')
    parser.add_argument('--effdir_in_diode0',
                        required=False,
                        default=None,
                        help='Input Exchange Format File directory, '
                        'LFI diode 0')
    parser.add_argument('--effdir_in_diode1',
                        required=False,
                        default=None,
                        help='Input Exchange Format File directory, '
                        'LFI diode 1')
    parser.add_argument('--effdir_pntg',
                        required=False,
                        help='Input Exchange Format File directory '
                        'for pointing')
    parser.add_argument('--obtmask',
                        required=False,
                        default=1,
                        type=np.int,
                        help='OBT flag mask')
    parser.add_argument('--flagmask',
                        required=False,
                        default=1,
                        type=np.int,
                        help='Quality flag mask')
    parser.add_argument('--pntflagmask',
                        required=False,
                        default=0,
                        type=np.int,
                        help='Pointing flag mask')
    parser.add_argument('--ringdb', required=True, help='Ring DB file')
    parser.add_argument('--odfirst',
                        required=False,
                        default=None,
                        type=np.int,
                        help='First OD to use')
    parser.add_argument('--odlast',
                        required=False,
                        default=None,
                        type=np.int,
                        help='Last OD to use')
    parser.add_argument('--ringfirst',
                        required=False,
                        default=None,
                        help='First ring to use (can be a list)')
    parser.add_argument('--ringlast',
                        required=False,
                        default=None,
                        help='Last ring to use (can be a list)')
    parser.add_argument('--obtfirst',
                        required=False,
                        default=None,
                        type=np.float,
                        help='First OBT to use')
    parser.add_argument('--obtlast',
                        required=False,
                        default=None,
                        type=np.float,
                        help='Last OBT to use')
    parser.add_argument('--out',
                        required=False,
                        default='.',
                        help='Output directory')
    parser.add_argument('--prefix',
                        required=False,
                        default='spins',
                        help='map prefix')

    try:
        args = parser.parse_args()
    except SystemExit:
        sys.exit(0)

    if comm.world_rank == 0:
        print('All parameters:')
        print(args, flush=True)

    timer = Timer()
    timer.start()

    nrange = 1

    odranges = None
    if args.odfirst is not None and args.odlast is not None:
        odranges = []
        firsts = [int(i) for i in str(args.odfirst).split(',')]
        lasts = [int(i) for i in str(args.odlast).split(',')]
        for odfirst, odlast in zip(firsts, lasts):
            odranges.append((odfirst, odlast))
        nrange = len(odranges)

    ringranges = None
    if args.ringfirst is not None and args.ringlast is not None:
        ringranges = []
        firsts = [int(i) for i in str(args.ringfirst).split(',')]
        lasts = [int(i) for i in str(args.ringlast).split(',')]
        for ringfirst, ringlast in zip(firsts, lasts):
            ringranges.append((ringfirst, ringlast))
        nrange = len(ringranges)

    obtranges = None
    if args.obtfirst is not None and args.obtlast is not None:
        obtranges = []
        firsts = [float(i) for i in str(args.obtfirst).split(',')]
        lasts = [float(i) for i in str(args.obtlast).split(',')]
        for obtfirst, obtlast in zip(firsts, lasts):
            obtranges.append((obtfirst, obtlast))
        nrange = len(obtranges)

    if odranges is None:
        odranges = [None] * nrange

    if ringranges is None:
        ringranges = [None] * nrange

    if obtranges is None:
        obtranges = [None] * nrange

    detectors = None
    if args.dets is not None:
        detectors = re.split(',', args.dets)

    # create the TOD for this observation

    tods = []

    for obtrange, ringrange, odrange in zip(obtranges, ringranges, odranges):
        tods.append(
            tp.Exchange(comm=comm.comm_group,
                        detectors=detectors,
                        ringdb=args.ringdb,
                        effdir_in=args.effdir,
                        effdir_in_diode0=args.effdir_in_diode0,
                        effdir_in_diode1=args.effdir_in_diode1,
                        effdir_pntg=args.effdir_pntg,
                        obt_range=obtrange,
                        ring_range=ringrange,
                        od_range=odrange,
                        freq=args.freq,
                        RIMO=args.rimo,
                        obtmask=args.obtmask,
                        flagmask=args.flagmask,
                        pntflagmask=args.pntflagmask,
                        do_eff_cache=False,
                        noisefile='RIMO'))

    rimo = tods[0].rimo

    # Make output directory

    if not os.path.isdir(args.out) and comm.comm_world.rank == 0:
        os.makedirs(args.out)

    # This is the distributed data, consisting of one or
    # more observations, each distributed over a communicator.
    data = toast.Data(comm)

    for iobs, tod in enumerate(tods):
        ob = {}
        ob['name'] = 'observation{:04}'.format(iobs)
        ob['id'] = 0
        ob['tod'] = tod
        ob['intervals'] = tod.valid_intervals
        ob['baselines'] = None
        ob['noise'] = tod.noise

        data.obs.append(ob)

    if mpiworld is not None:
        mpiworld.barrier()
    if comm.comm_world.rank == 0:
        timer.report_clear("Metadata queries")

    # Accumulate and save the moment maps
    polmoments = tp.OpPolMomentsPlanck(nside=args.nside,
                                       RIMO=rimo,
                                       margin=0,
                                       keep_vel=False,
                                       keep_pos=False,
                                       keep_phase=False,
                                       keep_quats=False,
                                       smax=args.smax,
                                       prefix=os.path.join(
                                           args.out, args.prefix))

    polmoments.exec(data)

    if mpiworld is not None:
        mpiworld.barrier()
    if comm.comm_world.rank == 0:
        timer.report_clear("Accumulate moment maps")

    gt.stop_all()
    if mpiworld is not None:
        mpiworld.barrier()
    timer = Timer()
    timer.start()
    alltimers = gather_timers(comm=mpiworld)
    if comm.world_rank == 0:
        out = os.path.join(args.out, "timing")
        dump_timing(alltimers, out)
        timer.stop()
        timer.report("Gather and dump timing info")
    return
Exemple #19
0
def apply_mappraiser(
    args,
    comm,
    data,
    params,
    signalname,
    noisename,
    time_comms=None,
    telescope_data=None,
    verbose=True,
):
    """ Use libmappraiser to run the ML map-making

    Args:
        time_comms (iterable) :  Series of disjoint communicators that
            map, e.g., seasons and days.  Each entry is a tuple of
            the form (`name`, `communicator`)
        telescope_data (iterable) : series of disjoint TOAST data
            objects.  Each entry is tuple of the form (`name`, `data`).
    """
    if comm.comm_world is None:
        raise RuntimeError("Mappraiser requires MPI")

    log = Logger.get()
    total_timer = Timer()
    total_timer.start()
    if comm.world_rank == 0 and verbose:
        log.info("Making maps")

    mappraiser = OpMappraiser(
        params= params,
        purge=True,
        name=signalname,
        noise_name = noisename,
        conserve_memory=args.conserve_memory,
    )

    if time_comms is None:
        time_comms = [("all", comm.comm_world)]

    if telescope_data is None:
        telescope_data = [("all", data)]

    timer = Timer()
    for time_name, time_comm in time_comms:
        for tele_name, tele_data in telescope_data:
            if len(time_name.split("-")) == 3:
                # Special rules for daily maps
                if args.do_daymaps:
                    continue
                if len(telescope_data) > 1 and tele_name == "all":
                    # Skip daily maps over multiple telescopes
                    continue

            timer.start()
            # N.B: code below is for Madam but may be useful to copy in Mappraiser
            # once we start doing multiple maps in one run
            # madam.params["file_root"] = "{}_telescope_{}_time_{}".format(
            #     file_root, tele_name, time_name
            # )
            # if time_comm == comm.comm_world:
            #     madam.params["info"] = info
            # else:
            #     # Cannot have verbose output from concurrent mapmaking
            #     madam.params["info"] = 0
            # if (time_comm is None or time_comm.rank == 0) and verbose:
            #     log.info("Mapping {}".format(madam.params["file_root"]))
            mappraiser.exec(tele_data, time_comm)

            if time_comm is not None:
                time_comm.barrier()
            if comm.world_rank == 0 and verbose:
                timer.report_clear("Mapping {}_telescope_{}_time_{}".format(
                args.outpath,
                tele_name,
                time_name,
                ))

    if comm.comm_world is not None:
        comm.comm_world.barrier()
    total_timer.stop()
    if comm.world_rank == 0 and verbose:
        total_timer.report("Mappraiser total")

    return
Exemple #20
0
    def cache_effdirs(self, effdir_in, effdir_in_diode0, effdir_in_diode1,
                      effdir_out, effdir_dark, effdir_pntg, effdir_fsl,
                      extra_effdirs, effdir_flags):
        """ Cache the metadata so we don't need to look for files
        while reading and writing

        """
        if effdir_in is not None and PATTERN_SEPARATOR in effdir_in:
            self.effdir_in, self.effdir_in_pattern = effdir_in.split(
                PATTERN_SEPARATOR)
        else:
            self.effdir_in, self.effdir_in_pattern = effdir_in, None
        self.effdir_in_diode0 = effdir_in_diode0
        self.effdir_in_diode1 = effdir_in_diode1
        if effdir_out is not None and PATTERN_SEPARATOR in effdir_out:
            self.effdir_out, self.effdir_out_pattern = effdir_out.split(
                PATTERN_SEPARATOR)
        else:
            self.effdir_out, self.effdir_out_pattern = effdir_out, None
        self.effdir_out = effdir_out
        if effdir_dark is not None:
            self.effdir_dark = effdir_dark
        else:
            self.effdir_dark = self.effdir_in
        if effdir_pntg is not None:
            self.effdir_pntg = effdir_pntg
        else:
            self.effdir_pntg = self.effdir_in
        self.effdir_fsl = effdir_fsl
        self.extra_effdirs = extra_effdirs
        if effdir_flags is None:
            self.effdir_flags = self.effdir_in
            self.effdir_flags_pattern = self.effdir_in_pattern
        else:
            if PATTERN_SEPARATOR in effdir_flags:
                (self.effdir_flags, self.effdir_flags_pattern
                 ) = effdir_flags.split(PATTERN_SEPARATOR)
            else:
                (self.effdir_flags,
                 self.effdir_flags_pattern) = effdir_flags, None

        if self.rank == 0:
            all_effdirs = [
                self.effdir_in, self.effdir_out, self.effdir_pntg,
                self.effdir_dark, self.effdir_fsl, self.effdir_flags,
                self.effdir_in_diode0, self.effdir_in_diode1
            ]
            if self.extra_effdirs is not None:
                for effdir in self.extra_effdirs:
                    all_effdirs.append(effdir)

            for effdir in all_effdirs:
                if effdir is None:
                    continue
                if effdir in filenames_cache:
                    continue
                print('Building a list of files under {} ...'.format(effdir),
                      end='',
                      flush=True)
                timer = Timer()
                timer.start()
                filenames_cache[effdir] = sorted(list_files(effdir))
                timer.stop()
                timer.report("List files")
        if self.comm is None:
            self.filenames = filenames_cache
        else:
            self.filenames = self.comm.bcast(filenames_cache, root=0)
        return
Exemple #21
0
def main():
    log = Logger.get()
    gt = GlobalTimers.get()
    gt.start("toast_so_sim (total)")
    timer0 = Timer()
    timer0.start()

    mpiworld, procs, rank, comm = toast_tools.get_comm()

    memreport("at the beginning of the pipeline", comm.comm_world)

    args, comm = parse_arguments(comm)

    if args.use_madam:
        # Initialize madam parameters
        madampars = toast_tools.setup_madam(args)
    else:
        madampars = None

    if args.import_dir is not None:
        schedules = None
        data, telescope_data, detweights = so_tools.load_observations(
            args, comm)
        memreport("after load", comm.comm_world)
        totalname = "signal"
    else:
        # Load and broadcast the schedule file

        schedules = toast_tools.load_schedule(args, comm)

        # Load the weather and append to schedules

        toast_tools.load_weather(args, comm, schedules)

        # load or simulate the focalplane

        detweights = so_tools.load_focalplanes(args, comm, schedules)

        # Create the TOAST data object to match the schedule.  This will
        # include simulating the boresight pointing.

        data, telescope_data = so_tools.create_observations(
            args, comm, schedules)

        memreport("after creating observations", comm.comm_world)

        # Optionally rewrite the noise PSD:s in each observation to include
        # elevation-dependence
        so_tools.get_elevation_noise(args, comm, data)

        totalname = "total"

    # Split the communicator for day and season mapmaking

    time_comms = toast_tools.get_time_communicators(args, comm, data)

    # Rotate the LAT focalplane around the boresight based on co-rotator position

    so_tools.rotate_focalplane(args, data, comm)

    # Expand boresight quaternions into detector pointing weights and
    # pixel numbers

    toast_tools.expand_pointing(args, comm, data)

    # Flag Solar system objects

    so_tools.apply_flag_sso(args, comm, data)

    # Optionally, output h_n maps

    so_tools.compute_h_n(args, comm, data)

    # Optionally, output crosslinking map

    so_tools.compute_crosslinking(args, comm, data, detweights)

    # Optionally, output cadence map

    so_tools.compute_cadence_map(args, comm, data)

    # Only purge the pointing if we are NOT going to export the
    # data to a TIDAS volume
    if not (args.tidas is None) and (args.export is None):
        for ob in data.obs:
            tod = ob["tod"]
            try:
                tod.free_radec_quats()
            except AttributeError:
                # These TOD objects do not have RA/Dec quaternions
                pass

    memreport("after pointing", comm.comm_world)

    # Set up objects to take copies of the TOD at appropriate times

    if args.pysm_model:
        if schedules is not None:
            focalplanes = [
                s.telescope.focalplane.detector_data for s in schedules
            ]
        else:
            focalplanes = [telescope.focalplane.detector_data]
        signalname = so_tools.simulate_sky_signal(args, comm, data,
                                                  focalplanes)
    else:
        signalname = toast_tools.scan_sky_signal(args, comm, data)

    memreport("after PySM", comm.comm_world)

    # Loop over Monte Carlos

    firstmc = int(args.MC_start)
    nmc = int(args.MC_count)

    for mc in range(firstmc, firstmc + nmc):

        if comm.world_rank == 0:
            log.info("Processing MC = {}".format(mc))

        toast_tools.draw_weather(args, comm, data, mc)

        toast_tools.simulate_atmosphere(args, comm, data, mc, totalname)

        #so_tools.scale_atmosphere_by_bandpass(args, comm, data, totalname, mc)
        toast_tools.scale_atmosphere_by_frequency(
            args,
            comm,
            data,
            cache_name=totalname,
            mc=mc,
        )

        memreport("after atmosphere", comm.comm_world)

        so_tools.simulate_hwpss(args, comm, data, mc, totalname)

        # update_atmospheric_noise_weights(args, comm, data, freq, mc)

        toast_tools.add_signal(args,
                               comm,
                               data,
                               totalname,
                               signalname,
                               purge=(mc == firstmc + nmc - 1))

        memreport("after adding sky", comm.comm_world)

        toast_tools.simulate_sss(args, comm, data, mc, totalname)

        memreport("after simulating SSS", comm.comm_world)

        toast_tools.simulate_noise(args, comm, data, mc, totalname)

        memreport("after simulating noise", comm.comm_world)

        so_tools.apply_sim_sso(args, comm, data, mc, totalname)

        memreport("after simulating SSO", comm.comm_world)

        so_tools.convolve_time_constant(args, comm, data, totalname)

        memreport("after convolving with time constant", comm.comm_world)

        # DEBUG begin
        """
        import matplotlib.pyplot as plt
        tod = data.obs[0]['tod']
        times = tod.local_times()
        for det in tod.local_dets:
            sig = tod.local_signal(det, totalname)
            plt.plot(times, sig, label=det)
        plt.legend(loc='best')
        fnplot = 'debug_{}.png'.format(args.madam_prefix)
        plt.savefig(fnplot)
        plt.close()
        print('DEBUG plot saved in', fnplot)
        return
        """
        # DEBUG end

        toast_tools.scramble_gains(args, comm, data, mc, totalname)

        so_tools.deconvolve_time_constant(args,
                                          comm,
                                          data,
                                          totalname,
                                          realization=mc)

        memreport("after deconvolving time constant", comm.comm_world)

        if mc == firstmc:
            # For the first realization and frequency, optionally
            # export the timestream data.
            toast_tools.output_tidas(args, comm, data, totalname)
            so_tools.export_TOD(args, comm, data, totalname, schedules)

            memreport("after export", comm.comm_world)

        if args.no_maps:
            continue

        outpath = setup_output(args, comm, mc)

        # Optionally demodulate signal

        so_tools.demodulate(args, comm, data, totalname, detweights, madampars)

        # Bin and destripe maps

        if args.use_madam:
            toast_tools.apply_madam(
                args,
                comm,
                data,
                madampars,
                outpath,
                detweights,
                totalname,
                time_comms=time_comms,
                telescope_data=telescope_data,
                first_call=(mc == firstmc),
            )
        else:
            toast_tools.apply_mapmaker(
                args,
                comm,
                data,
                outpath,
                totalname,
                time_comms=time_comms,
                telescope_data=telescope_data,
                first_call=(mc == firstmc),
            )

        memreport("after destriper", comm.comm_world)

        if (args.filterbin_ground_order is not None
                or args.filterbin_poly_order is not None):
            toast_tools.apply_filterbin(
                args,
                comm,
                data,
                outpath,
                totalname,
                time_comms=time_comms,
                telescope_data=telescope_data,
                first_call=(mc == firstmc),
            )

        if args.apply_polyfilter or args.apply_groundfilter:

            # Filter signal

            toast_tools.apply_polyfilter(args, comm, data, totalname)

            memreport("after polyfilter", comm.comm_world)

            # Ground filter

            memreport("after demodulation", comm.comm_world)

            toast_tools.apply_groundfilter(args, comm, data, totalname)

            memreport("after groundfilter", comm.comm_world)

            # Bin maps

            if args.use_madam:
                toast_tools.apply_madam(
                    args,
                    comm,
                    data,
                    madampars,
                    outpath,
                    detweights,
                    totalname,
                    time_comms=time_comms,
                    telescope_data=telescope_data,
                    first_call=args.demodulate,
                    extra_prefix="filtered",
                    bin_only=True,
                )
            else:
                toast_tools.apply_mapmaker(
                    args,
                    comm,
                    data,
                    outpath,
                    totalname,
                    time_comms=time_comms,
                    telescope_data=telescope_data,
                    first_call=False,
                    extra_prefix="filtered",
                    bin_only=True,
                )

            memreport("after filter & bin", comm.comm_world)

            if args.demodulate and args.MC_count > 1:
                if comm.world_rank == 0:
                    log.info("WARNING: demodulation and MC iterations are "
                             "incompatible.  Terminating after first MC.")
                break

    if comm.comm_world is not None:
        comm.comm_world.barrier()

    memreport("at the end of the pipeline", comm.comm_world)

    gt.stop_all()
    if mpiworld is not None:
        mpiworld.barrier()
    timer = Timer()
    timer.start()
    alltimers = gather_timers(comm=mpiworld)
    if rank == 0:
        out = os.path.join(args.outdir, "timing")
        dump_timing(alltimers, out)
        timer.stop()
        timer.report("Gather and dump timing info")
    timer0.stop()
    if comm.world_rank == 0:
        timer0.report("toast_so_sim.py pipeline")
    return
Exemple #22
0
def get_analytic_noise(args, comm, focalplane, verbose=True):
    """ Create a TOAST noise object.

    Create a noise object from the 1/f noise parameters contained in the
    focalplane database.

    """
    timer = Timer()
    timer.start()
    detectors = sorted(focalplane.keys())
    fmins = {}
    fknees = {}
    alphas = {}
    NETs = {}
    rates = {}
    indices = {}
    for d in detectors:
        rates[d] = args.sample_rate
        fmins[d] = focalplane[d]["fmin"]
        fknees[d] = focalplane[d]["fknee"]
        alphas[d] = focalplane[d]["alpha"]
        NETs[d] = focalplane[d]["NET"]
        indices[d] = focalplane[d]["index"]

    if args.common_mode_noise:
        # Add an extra "virtual" detector for common mode noise for
        # every optics tube
        fmin, fknee, alpha, net = np.array(
            args.common_mode_noise.split(",")).astype(np.float64)
        hw = hardware.get_example()
        for itube, tube in enumerate(sorted(hw.data["tubes"].keys())):
            d = "common_mode_{}".format(tube)
            detectors.append(d)
            rates[d] = args.sample_rate
            fmins[d] = fmin
            fknees[d] = fknee
            alphas[d] = alpha
            NETs[d] = net
            indices[d] = 100000 + itube

    noise = AnalyticNoise(
        rate=rates,
        fmin=fmins,
        detectors=detectors,
        fknee=fknees,
        alpha=alphas,
        NET=NETs,
        indices=indices,
    )

    if args.common_mode_noise:
        # Update the mixing matrix in the noise operator
        mixmatrix = {}
        keys = set()
        for det in focalplane.keys():
            tube = focalplane[det]["tube"]
            common = "common_mode_{}".format(tube)
            mixmatrix[det] = {det: 1, common: 1}
            keys.add(det)
            keys.add(common)
        # There should probably be an accessor method to update the
        # mixmatrix in the TOAST Noise object.
        if noise._mixmatrix is not None:
            raise RuntimeError("Did not expect non-empty mixing matrix")
        noise._mixmatrix = mixmatrix
        noise._keys = list(sorted(keys))

    timer.stop()
    if comm.world_rank == 0 and verbose:
        timer.report("Creating noise model")
    return noise
Exemple #23
0
def parse_arguments(comm):
    timer = Timer()
    timer.start()
    log = Logger.get()

    parser = argparse.ArgumentParser(
        description="Simulate ground-based boresight pointing.  Simulate "
        "atmosphere and make maps for some number of noise Monte Carlos.",
        fromfile_prefix_chars="@",
    )

    add_dist_args(parser)
    add_debug_args(parser)
    add_todground_args(parser)
    add_pointing_args(parser)
    add_polyfilter_args(parser)
    add_groundfilter_args(parser)
    add_atmosphere_args(parser)
    add_noise_args(parser)
    add_gainscrambler_args(parser)
    add_madam_args(parser)
    add_sky_map_args(parser)
    add_pysm_args(parser)
    add_sss_args(parser)
    add_tidas_args(parser)
    add_spt3g_args(parser)
    add_mc_args(parser)

    parser.add_argument("--outdir",
                        required=False,
                        default="out",
                        help="Output directory")

    parser.add_argument(
        "--focalplane",
        required=False,
        default=None,
        help="Pickle file containing a dictionary of detector "
        "properties.  The keys of this dict are the detector "
        "names, and each value is also a dictionary with keys "
        '"quat" (4 element ndarray), "fwhm" (float, arcmin), '
        '"fknee" (float, Hz), "alpha" (float), and '
        '"NET" (float).',
    )
    parser.add_argument(
        "--freq",
        required=True,
        help="Comma-separated list of frequencies with identical focal planes."
        "  They override the bandpasses in the focalplane for the purpose of"
        " scaling the atmospheric signal but not for simulating the sky signal.",
    )

    try:
        args = parser.parse_args()
    except SystemExit:
        sys.exit(0)

    if args.tidas is not None:
        if not tidas_available:
            raise RuntimeError("TIDAS not found- cannot export")

    if args.spt3g is not None:
        if not spt3g_available:
            raise RuntimeError("SPT3G not found- cannot export")

    if len(args.freq.split(",")) != 1:
        # Multi frequency run.  We don't support multiple copies of
        # scanned signal.
        if args.input_map:
            raise RuntimeError(
                "Multiple frequencies are not supported when scanning from a map"
            )

    if args.simulate_atmosphere and args.weather is None:
        raise RuntimeError(
            "Cannot simulate atmosphere without a TOAST weather file")

    if comm.world_rank == 0:
        log.info("All parameters:")
        for ag in vars(args):
            log.info("{} = {}".format(ag, getattr(args, ag)))

    if args.group_size:
        comm = Comm(groupsize=args.group_size)

    if comm.world_rank == 0:
        os.makedirs(args.outdir, exist_ok=True)

    timer.stop()
    if comm.world_rank == 0:
        timer.report("Parsed parameters")

    return args, comm
Exemple #24
0
def main():
    log = Logger.get()
    gt = GlobalTimers.get()
    gt.start("toast_ground_sim (total)")
    timer0 = Timer()
    timer0.start()

    mpiworld, procs, rank, comm = get_comm()

    args, comm = parse_arguments(comm)

    # Initialize madam parameters

    madampars = setup_madam(args)

    # Load and broadcast the schedule file

    schedules = load_schedule(args, comm)

    # Load the weather and append to schedules

    load_weather(args, comm, schedules)

    # load or simulate the focalplane

    detweights = load_focalplanes(args, comm, schedules)

    # Create the TOAST data object to match the schedule.  This will
    # include simulating the boresight pointing.

    data, telescope_data = create_observations(args, comm, schedules)

    # Split the communicator for day and season mapmaking

    time_comms = get_time_communicators(args, comm, data)

    # Expand boresight quaternions into detector pointing weights and
    # pixel numbers

    expand_pointing(args, comm, data)

    # Purge the pointing if we are NOT going to export the
    # data to a TIDAS volume
    if (args.tidas is None) and (args.spt3g is None):
        for ob in data.obs:
            tod = ob["tod"]
            tod.free_radec_quats()

    # Prepare auxiliary information for distributed map objects

    _, localsm, subnpix = get_submaps(args, comm, data)

    if args.pysm_model:
        focalplanes = [s.telescope.focalplane.detector_data for s in schedules]
        signalname = simulate_sky_signal(args, comm, data, focalplanes,
                                         subnpix, localsm, "signal")
    else:
        signalname = scan_sky_signal(args, comm, data, localsm, subnpix,
                                     "signal")

    # Set up objects to take copies of the TOD at appropriate times

    totalname, totalname_freq = setup_sigcopy(args)

    # Loop over Monte Carlos

    firstmc = args.MC_start
    nsimu = args.MC_count

    freqs = [float(freq) for freq in args.freq.split(",")]
    nfreq = len(freqs)

    for mc in range(firstmc, firstmc + nsimu):

        simulate_atmosphere(args, comm, data, mc, totalname)

        # Loop over frequencies with identical focal planes and identical
        # atmospheric noise.

        for ifreq, freq in enumerate(freqs):

            if comm.world_rank == 0:
                log.info("Processing frequency {}GHz {} / {}, MC = {}".format(
                    freq, ifreq + 1, nfreq, mc))

            # Make a copy of the atmosphere so we can scramble the gains and apply
            # frequency-dependent scaling.
            copy_signal(args, comm, data, totalname, totalname_freq)

            scale_atmosphere_by_frequency(args,
                                          comm,
                                          data,
                                          freq=freq,
                                          mc=mc,
                                          cache_name=totalname_freq)

            update_atmospheric_noise_weights(args, comm, data, freq, mc)

            # Add previously simulated sky signal to the atmospheric noise.

            add_signal(args,
                       comm,
                       data,
                       totalname_freq,
                       signalname,
                       purge=(nsimu == 1))

            mcoffset = ifreq * 1000000

            simulate_noise(args, comm, data, mc + mcoffset, totalname_freq)

            simulate_sss(args, comm, data, mc + mcoffset, totalname_freq)

            scramble_gains(args, comm, data, mc + mcoffset, totalname_freq)

            if (mc == firstmc) and (ifreq == 0):
                # For the first realization and frequency, optionally
                # export the timestream data.
                output_tidas(args, comm, data, totalname)
                output_spt3g(args, comm, data, totalname)

            outpath = setup_output(args, comm, mc + mcoffset, freq)

            # Bin and destripe maps

            apply_madam(
                args,
                comm,
                data,
                madampars,
                outpath,
                detweights,
                totalname_freq,
                freq=freq,
                time_comms=time_comms,
                telescope_data=telescope_data,
                first_call=(mc == firstmc),
            )

            if args.apply_polyfilter or args.apply_groundfilter:

                # Filter signal

                apply_polyfilter(args, comm, data, totalname_freq)

                apply_groundfilter(args, comm, data, totalname_freq)

                # Bin filtered maps

                apply_madam(
                    args,
                    comm,
                    data,
                    madampars,
                    outpath,
                    detweights,
                    totalname_freq,
                    freq=freq,
                    time_comms=time_comms,
                    telescope_data=telescope_data,
                    first_call=False,
                    extra_prefix="filtered",
                    bin_only=True,
                )

    gt.stop_all()
    if mpiworld is not None:
        mpiworld.barrier()
    timer = Timer()
    timer.start()
    alltimers = gather_timers(comm=mpiworld)
    if comm.world_rank == 0:
        out = os.path.join(args.outdir, "timing")
        dump_timing(alltimers, out)
        timer.stop()
        timer.report("Gather and dump timing info")
        timer0.report_clear("toast_ground_sim.py")
    return
Exemple #25
0
def main():
    env = Environment.get()
    env.enable_function_timers()

    log = Logger.get()
    gt = GlobalTimers.get()
    gt.start("toast_benchmark (total)")

    mpiworld, procs, rank = get_world()

    if rank == 0:
        log.info("TOAST version = {}".format(env.version()))
        log.info("Using a maximum of {} threads per process".format(env.max_threads()))
    if mpiworld is None:
        log.info("Running serially with one process at {}".format(str(datetime.now())))
    else:
        if rank == 0:
            log.info(
                "Running with {} processes at {}".format(procs, str(datetime.now()))
            )

    cases = {
        "tiny": 5000000,  # O(1) GB RAM
        "xsmall": 50000000,  # O(10) GB RAM
        "small": 500000000,  # O(100) GB RAM
        "medium": 5000000000,  # O(1) TB RAM
        "large": 50000000000,  # O(10) TB RAM
        "xlarge": 500000000000,  # O(100) TB RAM
        "heroic": 5000000000000,  # O(1000) TB RAM
    }

    args, comm, n_nodes, n_detector, case, group_seconds, n_group = job_config(
        mpiworld, cases
    )

    # Note:  The number of "days" here will just be an approximation of the desired
    # data volume since we are doing a realistic schedule for a real observing site.

    n_days = int(2.0 * (group_seconds * n_group) / (24 * 3600))
    if n_days == 0:
        n_days = 1

    if rank == 0:
        log.info(
            "Using {} detectors for approximately {} days".format(n_detector, n_days)
        )

    # Create the schedule file and input maps on one process
    if rank == 0:
        create_schedules(args, group_seconds, n_days)
        create_input_maps(args)
    if mpiworld is not None:
        mpiworld.barrier()

    if args.dry_run is not None:
        if rank == 0:
            log.info("Exit from dry run")
        # We are done!
        sys.exit(0)

    gt.start("toast_benchmark (science work)")

    # Load and broadcast the schedule file

    schedules = pipeline_tools.load_schedule(args, comm)

    # Load the weather and append to schedules

    pipeline_tools.load_weather(args, comm, schedules)

    # Simulate the focalplane

    detweights = create_focalplanes(args, comm, schedules, n_detector)

    # Create the TOAST data object to match the schedule.  This will
    # include simulating the boresight pointing.

    data, telescope_data, total_samples = create_observations(args, comm, schedules)

    # handle = None
    # if comm.world_rank == 0:
    #     handle = open(os.path.join(args.outdir, "distdata.txt"), "w")
    # data.info(handle)
    # if comm.world_rank == 0:
    #     handle.close()
    # if comm.comm_world is not None:
    #     comm.comm_world.barrier()

    # Split the communicator for day and season mapmaking

    time_comms = pipeline_tools.get_time_communicators(args, comm, data)

    # Expand boresight quaternions into detector pointing weights and
    # pixel numbers

    pipeline_tools.expand_pointing(args, comm, data)

    # Optionally rewrite the noise PSD:s in each observation to include
    # elevation-dependence

    pipeline_tools.get_elevation_noise(args, comm, data)

    # Purge the pointing if we are NOT going to export the
    # data to a TIDAS volume
    if (args.tidas is None) and (args.spt3g is None):
        for ob in data.obs:
            tod = ob["tod"]
            tod.free_radec_quats()

    # Prepare auxiliary information for distributed map objects

    signalname = pipeline_tools.scan_sky_signal(args, comm, data, "signal")

    # Set up objects to take copies of the TOD at appropriate times

    totalname, totalname_freq = setup_sigcopy(args)

    # Loop over Monte Carlos

    firstmc = args.MC_start
    nsimu = args.MC_count

    freqs = [float(freq) for freq in args.freq.split(",")]
    nfreq = len(freqs)

    for mc in range(firstmc, firstmc + nsimu):

        pipeline_tools.simulate_atmosphere(args, comm, data, mc, totalname)

        # Loop over frequencies with identical focal planes and identical
        # atmospheric noise.

        for ifreq, freq in enumerate(freqs):

            if comm.world_rank == 0:
                log.info(
                    "Processing frequency {}GHz {} / {}, MC = {}".format(
                        freq, ifreq + 1, nfreq, mc
                    )
                )

            # Make a copy of the atmosphere so we can scramble the gains and apply
            # frequency-dependent scaling.
            pipeline_tools.copy_signal(args, comm, data, totalname, totalname_freq)

            pipeline_tools.scale_atmosphere_by_frequency(
                args, comm, data, freq=freq, mc=mc, cache_name=totalname_freq
            )

            pipeline_tools.update_atmospheric_noise_weights(args, comm, data, freq, mc)

            # Add previously simulated sky signal to the atmospheric noise.

            pipeline_tools.add_signal(
                args, comm, data, totalname_freq, signalname, purge=(nsimu == 1)
            )

            mcoffset = ifreq * 1000000

            pipeline_tools.simulate_noise(
                args, comm, data, mc + mcoffset, totalname_freq
            )

            pipeline_tools.scramble_gains(
                args, comm, data, mc + mcoffset, totalname_freq
            )

            outpath = setup_output(args, comm, mc + mcoffset, freq)

            # Bin and destripe maps

            pipeline_tools.apply_mapmaker(
                args,
                comm,
                data,
                outpath,
                totalname_freq,
                time_comms=time_comms,
                telescope_data=telescope_data,
                first_call=(mc == firstmc),
            )

            if args.apply_polyfilter or args.apply_groundfilter:

                # Filter signal

                pipeline_tools.apply_polyfilter(args, comm, data, totalname_freq)

                pipeline_tools.apply_groundfilter(args, comm, data, totalname_freq)

                # Bin filtered maps

                pipeline_tools.apply_mapmaker(
                    args,
                    comm,
                    data,
                    outpath,
                    totalname_freq,
                    time_comms=time_comms,
                    telescope_data=telescope_data,
                    first_call=False,
                    extra_prefix="filtered",
                    bin_only=True,
                )

    gt.stop_all()
    if mpiworld is not None:
        mpiworld.barrier()

    runtime = gt.seconds("toast_benchmark (science work)")
    prefactor = 1.0e-3
    kilo_samples = 1.0e-3 * total_samples
    sample_factor = 1.2
    det_factor = 2.0
    metric = (
        prefactor
        * n_detector ** det_factor
        * kilo_samples ** sample_factor
        / (n_nodes * runtime)
    )
    if rank == 0:
        msg = "Science Metric: {:0.1e} * ({:d}**{:0.2f}) * ({:0.3e}**{:0.3f}) / ({:0.1f} * {}) = {:0.2f}".format(
            prefactor,
            n_detector,
            det_factor,
            kilo_samples,
            sample_factor,
            runtime,
            n_nodes,
            metric,
        )
        log.info("")
        log.info(msg)
        log.info("")
        with open(os.path.join(args.outdir, "log"), "a") as f:
            f.write(msg)
            f.write("\n\n")

    timer = Timer()
    timer.start()
    alltimers = gather_timers(comm=mpiworld)
    if comm.world_rank == 0:
        out = os.path.join(args.outdir, "timing")
        dump_timing(alltimers, out)
        with open(os.path.join(args.outdir, "log"), "a") as f:
            f.write("Copy of Global Timers:\n")
            with open("{}.csv".format(out), "r") as t:
                f.write(t.read())
        timer.stop()
        timer.report("Gather and dump timing info")
    return
Exemple #26
0
    def _observe_sso(self, sso_az, sso_el, sso_dist, sso_dia, tod, comm, prefix, focalplane):
        """
        Observe the SSO with each detector in tod
        """
        log = Logger.get()
        rank = 0
        if comm is not None:
            rank = comm.rank
        tmr = Timer()
        if self._report_timing:
            if comm is not None:
                comm.Barrier()
            tmr.start()

        nsamp = tod.local_samples[1]

        if rank == 0:
            log.info("{}Observing the SSO signal".format(prefix))

        # FIXME: we should get the center frequency from the bandpass
        band_dict = {'f030' : 27, 'f040': 39, 'f090': 93,
             'f150': 145 , 'f230': 225, 'f290': 285}

        for band in band_dict.keys():
            if band in prefix:
                # FIXME we use the same, approximate center frequency for
                # SAT and LAT
                freq = band_dict[band[4:]]
                break

        for det in tod.local_dets:
            # Cache the output signal
            cachename = "{}_{}".format(self._out, det)
            if tod.cache.exists(cachename):
                ref = tod.cache.reference(cachename)
            else:
                ref = tod.cache.create(cachename, np.float64, (nsamp,))

            try:
                # Some TOD classes provide a shortcut to Az/El
                az, el = tod.read_azel(detector=det)
            except Exception as e:
                azelquat = tod.read_pntg(detector=det, azel=True)
                # Convert Az/El quaternion of the detector back into
                # angles for the simulation.
                theta, phi = qa.to_position(azelquat)
                # Azimuth is measured in the opposite direction
                # than longitude
                az = 2 * np.pi - phi
                el = np.pi / 2 - theta

            if "bandpass_transmission" in focalplane[det]:
                # We have full bandpasses for the detector
                bandpass_freqs = focalplane[det]["bandpass_freq_ghz"]
                bandpass = focalplane[det]["bandpass_transmission"]
            else:
                if "bandcenter_ghz" in focalplane[det]:
                    # Use detector bandpass from the focalplane
                    center = focalplane[det]["bandcenter_ghz"]
                    width = focalplane[det]["bandwidth_ghz"]
                else:
                    # Use default values for the entire focalplane
                    if freq is None:
                        raise RuntimeError(
                            "You must supply the nominal frequency if bandpasses "
                            "are not available"
                        )
                    center = freq
                    width = 0.2 * freq
                bandpass_freqs = np.array([center - width / 2, center + width / 2])
                bandpass = np.ones(2)   
        
            nstep = 1001
            fmin, fmax = bandpass_freqs[0], bandpass_freqs[-1]
            det_freqs = np.linspace(fmin, fmax, nstep)
            det_bandpass = np.interp(det_freqs, bandpass_freqs, bandpass)
            det_bandpass /= np.sum(det_bandpass)

            self._get_planet_temp(self.sso_name)
            ttemp_det = np.interp(det_freqs, self.t_freqs, self.ttemp)
            ttemp_det = np.sum(ttemp_det * det_bandpass)
            beam, radius = self._get_beam_map(det, sso_dia, ttemp_det)

            # Interpolate the beam map at appropriate locations
            x = (az - sso_az) * np.cos(el)
            y = el - sso_el
            r = np.sqrt(x ** 2 + y ** 2)
            good = r < radius
            sig = beam(x[good], y[good], grid=False)
            ref[:][good] += sig

            del ref, sig, beam

        if self._report_timing:
            if comm is not None:
                comm.Barrier()
            if rank == 0:
                tmr.stop()
                tmr.report("{}OpSimSSO: Observe signal".format(prefix))
        return
Exemple #27
0
def simulate_sky_signal(args, comm, data, focalplanes, signalname=None, mc=0):
    """ Use PySM to simulate smoothed sky signal.

    """
    log = Logger.get()
    timer = Timer()
    timer.start()
    # Convolve a signal TOD from PySM
    if comm.world_rank == 0:
        log.info("Simulating sky signal with PySM")

    map_dist = (None if comm is None else pysm.MapDistribution(
        nside=args.nside, mpi_comm=comm.comm_rank))
    pysm_component_objects = []
    pysm_model = []
    for model_tag in args.pysm_model.split(","):

        if not model_tag.startswith("SO"):
            pysm_model.append(model_tag)
        else:
            if so_pysm_models is None:
                raise RuntimeError(
                    "{} requires so_pysm_models".format(model_tag))
            if model_tag == "SO_x1_cib":
                pysm_component_objects.append(
                    so_pysm_models.WebSkyCIB(
                        websky_version="0.3",
                        interpolation_kind="linear",
                        nside=args.nside,
                        map_dist=map_dist,
                    ))
            elif model_tag == "SO_x1_ksz":
                pysm_component_objects.append(
                    so_pysm_models.WebSkySZ(
                        version="0.3",
                        nside=args.nside,
                        map_dist=map_dist,
                        sz_type="kinetic",
                    ))
            elif model_tag == "SO_x1_tsz":
                pysm_component_objects.append(
                    so_pysm_models.WebSkySZ(
                        version="0.3",
                        nside=args.nside,
                        map_dist=map_dist,
                        sz_type="thermal",
                    ))
            elif model_tag.startswith("SO_x1_cmb"):
                lensed = "unlensed" not in model_tag
                include_solar_dipole = "solar" in model_tag
                pysm_component_objects.append(
                    so_pysm_models.WebSkyCMBMap(
                        websky_version="0.3",
                        lensed=lensed,
                        include_solar_dipole=include_solar_dipole,
                        seed=1,
                        nside=args.nside,
                        map_dist=map_dist,
                    ))
            else:
                if not model_tag.endswith("s") and args.nside > 512:
                    model_tag += "s"
                pysm_component_objects.append(
                    so_pysm_models.get_so_models(model_tag,
                                                 args.nside,
                                                 map_dist=map_dist))

    if signalname is None:
        signalname = "pysmsignal"
    op_sim_pysm = OpSimPySM(
        data,
        comm=comm.comm_rank,
        out=signalname,
        pysm_model=pysm_model,
        pysm_component_objects=pysm_component_objects,
        focalplanes=focalplanes,
        apply_beam=args.pysm_apply_beam,
        coord="G",  # setting G doesn't perform any rotation
        map_dist=map_dist,
    )
    assert args.coord in "CQ", "Input SO models are always in Equatorial coordinates"
    op_sim_pysm.exec(data)
    if comm.comm_world is not None:
        comm.comm_world.barrier()
    timer.stop()
    if comm.world_rank == 0:
        timer.report("PySM")

    return signalname
Exemple #28
0
def main():
    env = Environment.get()
    log = Logger.get()
    gt = GlobalTimers.get()
    gt.start("toast_satellite_sim (total)")
    timer0 = Timer()
    timer0.start()

    mpiworld, procs, rank, comm = pipeline_tools.get_comm()
    args, comm, groupsize = parse_arguments(comm, procs)

    # Parse options

    tmr = Timer()
    tmr.start()

    if comm.world_rank == 0:
        os.makedirs(args.outdir, exist_ok=True)

    focalplane, gain, detweights = load_focalplane(args, comm)
    if comm.world_rank == 0:
        tmr.report_clear("Load focalplane")

    data = create_observations(args, comm, focalplane, groupsize)
    if comm.world_rank == 0:
        tmr.report_clear("Create observations")

    pipeline_tools.expand_pointing(args, comm, data)
    if comm.world_rank == 0:
        tmr.report_clear("Expand pointing")

    signalname = None
    if args.pysm_model:
        skyname = pipeline_tools.simulate_sky_signal(args, comm, data,
                                                     [focalplane], "signal")
    else:
        skyname = pipeline_tools.scan_sky_signal(args, comm, data, "signal")
    if skyname is not None:
        signalname = skyname
    if comm.world_rank == 0:
        tmr.report_clear("Simulate sky signal")

    # NOTE: Conviqt could use different input file names for different
    # Monte Carlo indices, but the operator would need to be invoked within
    # the Monte Carlo loop.
    skyname = pipeline_tools.apply_conviqt(
        args,
        comm,
        data,
        "signal",
        mc=args.MC_start,
    )
    if skyname is not None:
        signalname = skyname
    if comm.world_rank == 0:
        tmr.report_clear("Apply beam convolution")

    diponame = pipeline_tools.simulate_dipole(args, comm, data, "signal")
    if diponame is not None:
        signalname = diponame
    if comm.world_rank == 0:
        tmr.report_clear("Simulate dipole")

    # in debug mode, print out data distribution information
    if args.debug:
        handle = None
        if comm.world_rank == 0:
            handle = open(os.path.join(args.outdir, "distdata.txt"), "w")
        data.info(handle)
        if comm.world_rank == 0:
            handle.close()
        if comm.comm_world is not None:
            comm.comm_world.barrier()
        if comm.world_rank == 0:
            tmr.report_clear("Dumping data distribution")

    # in debug mode, print out data distribution information
    if args.debug:
        handle = None
        if comm.world_rank == 0:
            handle = open(os.path.join(args.outdir, "distdata.txt"), "w")
        data.info(handle)
        if comm.world_rank == 0:
            handle.close()
        if comm.comm_world is not None:
            comm.comm_world.barrier()
        if comm.world_rank == 0:
            tmr.report_clear("Dumping data distribution")

    # Mapmaking.

    if args.use_madam:
        # Initialize madam parameters
        madampars = pipeline_tools.setup_madam(args)
        if comm.comm_world is not None:
            comm.comm_world.barrier()
        if comm.world_rank == 0:
            tmr.report_clear("Initialize madam map-making")

    # Loop over Monte Carlos

    firstmc = args.MC_start
    nmc = args.MC_count

    for mc in range(firstmc, firstmc + nmc):
        mctmr = Timer()
        mctmr.start()

        # create output directory for this realization
        outpath = os.path.join(args.outdir, "mc_{:03d}".format(mc))

        pipeline_tools.simulate_noise(args,
                                      comm,
                                      data,
                                      mc,
                                      "tot_signal",
                                      overwrite=True)
        if comm.comm_world is not None:
            comm.comm_world.barrier()
        if comm.world_rank == 0:
            tmr.report_clear("    Simulate noise {:04d}".format(mc))

        # add sky signal
        pipeline_tools.add_signal(args, comm, data, "tot_signal", signalname)
        if comm.comm_world is not None:
            comm.comm_world.barrier()
        if comm.world_rank == 0:
            tmr.report_clear("    Add sky signal {:04d}".format(mc))

        if gain is not None:
            op_apply_gain = OpApplyGain(gain, name="tot_signal")
            op_apply_gain.exec(data)
            if comm.comm_world is not None:
                comm.comm_world.barrier()
            if comm.world_rank == 0:
                tmr.report_clear("    Apply gains {:04d}".format(mc))

        if mc == firstmc:
            # For the first realization, optionally export the
            # timestream data.  If we had observation intervals defined,
            # we could pass "use_interval=True" to the export operators,
            # which would ensure breaks in the exported data at
            # acceptable places.
            pipeline_tools.output_tidas(args, comm, data, "tot_signal")
            pipeline_tools.output_spt3g(args, comm, data, "tot_signal")
            if comm.comm_world is not None:
                comm.comm_world.barrier()
            if comm.world_rank == 0:
                tmr.report_clear("    Write TOD snapshot {:04d}".format(mc))

        if args.use_madam:
            pipeline_tools.apply_madam(args, comm, data, madampars, outpath,
                                       detweights, "tot_signal")
        else:
            pipeline_tools.apply_mapmaker(args, comm, data, outpath,
                                          "tot_signal")

        if comm.comm_world is not None:
            comm.comm_world.barrier()
        if comm.world_rank == 0:
            tmr.report_clear("  Map-making {:04d}".format(mc))

        if comm.comm_world is not None:
            comm.comm_world.barrier()
        if comm.world_rank == 0:
            mctmr.report_clear("  Monte Carlo loop {:04d}".format(mc))

    gt.stop_all()
    if comm.comm_world is not None:
        comm.comm_world.barrier()
    tmr.stop()
    tmr.clear()
    tmr.start()
    alltimers = gather_timers(comm=comm.comm_world)
    if comm.world_rank == 0:
        out = os.path.join(args.outdir, "timing")
        dump_timing(alltimers, out)
        tmr.stop()
        tmr.report("Gather and dump timing info")
        timer0.report_clear("toast_satellite_sim.py")
    return
Exemple #29
0
def main():
    env = Environment.get()
    log = Logger.get()
    gt = GlobalTimers.get()
    gt.start("toast_satellite_sim (total)")
    timer0 = Timer()
    timer0.start()

    mpiworld, procs, rank, comm = get_comm()
    args, comm, groupsize = parse_arguments(comm, procs)

    # Parse options

    tmr = Timer()
    tmr.start()

    if comm.world_rank == 0:
        os.makedirs(args.outdir, exist_ok=True)

    focalplane, gain, detweights = load_focalplane(args, comm)

    data = create_observations(args, comm, focalplane, groupsize)

    expand_pointing(args, comm, data)

    localpix, localsm, subnpix = get_submaps(args, comm, data)

    signalname = None
    skyname = simulate_sky_signal(args, comm, data, [focalplane], subnpix,
                                  localsm, "signal")
    if skyname is not None:
        signalname = skyname

    diponame = simulate_dipole(args, comm, data, "signal")
    if diponame is not None:
        signalname = diponame

    # Mapmaking.

    if not args.use_madam:
        if comm.world_rank == 0:
            log.info("Not using Madam, will only make a binned map")

        npp, zmap = init_binner(args,
                                comm,
                                data,
                                detweights,
                                subnpix=subnpix,
                                localsm=localsm)

        # Loop over Monte Carlos

        firstmc = args.MC_start
        nmc = args.MC_count

        for mc in range(firstmc, firstmc + nmc):
            mctmr = Timer()
            mctmr.start()

            outpath = os.path.join(args.outdir, "mc_{:03d}".format(mc))

            simulate_noise(args, comm, data, mc, "tot_signal", overwrite=True)

            # add sky signal
            add_signal(args, comm, data, "tot_signal", signalname)

            if gain is not None:
                timer = Timer()
                timer.start()
                op_apply_gain = OpApplyGain(gain, name="tot_signal")
                op_apply_gain.exec(data)
                if comm.world_rank == 0:
                    timer.report_clear("  Apply gains {:04d}".format(mc))

            if mc == firstmc:
                # For the first realization, optionally export the
                # timestream data.  If we had observation intervals defined,
                # we could pass "use_interval=True" to the export operators,
                # which would ensure breaks in the exported data at
                # acceptable places.
                output_tidas(args, comm, data, "tot_signal")
                output_spt3g(args, comm, data, "tot_signal")

            apply_binner(args, comm, data, npp, zmap, detweights, outpath,
                         "tot_signal")

            if comm.world_rank == 0:
                mctmr.report_clear("  Map-making {:04d}".format(mc))
    else:

        # Initialize madam parameters

        madampars = setup_madam(args)

        # in debug mode, print out data distribution information
        if args.debug:
            handle = None
            if comm.world_rank == 0:
                handle = open(os.path.join(args.outdir, "distdata.txt"), "w")
            data.info(handle)
            if comm.world_rank == 0:
                handle.close()
            if comm.comm_world is not None:
                comm.comm_world.barrier()
            if comm.world_rank == 0:
                tmr.report_clear("Dumping data distribution")

        # Loop over Monte Carlos

        firstmc = args.MC_start
        nmc = args.MC_count

        for mc in range(firstmc, firstmc + nmc):
            mctmr = Timer()
            mctmr.start()

            # create output directory for this realization
            outpath = os.path.join(args.outdir, "mc_{:03d}".format(mc))

            simulate_noise(args, comm, data, mc, "tot_signal", overwrite=True)

            # add sky signal
            add_signal(args, comm, data, "tot_signal", signalname)

            if gain is not None:
                op_apply_gain = OpApplyGain(gain, name="tot_signal")
                op_apply_gain.exec(data)

            if comm.comm_world is not None:
                comm.comm_world.barrier()
            if comm.world_rank == 0:
                tmr.report_clear("  Apply gains {:04d}".format(mc))

            apply_madam(args, comm, data, madampars, outpath, detweights,
                        "tot_signal")

            if comm.comm_world is not None:
                comm.comm_world.barrier()
            if comm.world_rank == 0:
                mctmr.report_clear("  Map-making {:04d}".format(mc))

    gt.stop_all()
    if comm.comm_world is not None:
        comm.comm_world.barrier()
    tmr.stop()
    tmr.clear()
    tmr.start()
    alltimers = gather_timers(comm=comm.comm_world)
    if comm.world_rank == 0:
        out = os.path.join(args.outdir, "timing")
        dump_timing(alltimers, out)
        tmr.stop()
        tmr.report("Gather and dump timing info")
        timer0.report_clear("toast_satellite_sim.py")
    return
Exemple #30
0
def main():
    log = Logger.get()
    gt = GlobalTimers.get()
    gt.start("toast_planck_reduce (total)")

    mpiworld, procs, rank, comm = get_comm()

    # This is the 2-level toast communicator.  By default,
    # there is just one group which spans MPI_COMM_WORLD.
    comm = toast.Comm()

    if comm.comm_world.rank == 0:
        print('Running with {} processes at {}'
              ''.format(procs, str(datetime.datetime.now())))

    parser = argparse.ArgumentParser(description='Planck Ringset making',
                                     fromfile_prefix_chars='@')
    parser.add_argument('--rimo', required=True, help='RIMO file')
    parser.add_argument('--freq', required=True, type=np.int, help='Frequency')
    parser.add_argument('--dets',
                        required=False,
                        default=None,
                        help='Detector list (comma separated)')
    parser.add_argument('--nosingle',
                        dest='nosingle',
                        required=False,
                        default=False,
                        action='store_true',
                        help='Do not compute single detector PSDs')
    parser.add_argument('--effdir',
                        required=True,
                        help='Input Exchange Format File directory')
    parser.add_argument('--effdir_pntg',
                        required=False,
                        help='Input Exchange Format File directory '
                        'for pointing')
    parser.add_argument('--obtmask',
                        required=False,
                        default=1,
                        type=np.int,
                        help='OBT flag mask')
    parser.add_argument('--flagmask',
                        required=False,
                        default=1,
                        type=np.int,
                        help='Quality flag mask')
    parser.add_argument('--skymask', required=False, help='Pixel mask file')
    parser.add_argument('--skymap', required=False, help='Sky estimate file')
    parser.add_argument('--skypol',
                        dest='skypol',
                        required=False,
                        default=False,
                        action='store_true',
                        help='Sky estimate is polarized')
    parser.add_argument('--no_spin_harmonics',
                        dest='no_spin_harmonics',
                        required=False,
                        default=False,
                        action='store_true',
                        help='Do not include PSD bins with spin harmonics')
    parser.add_argument('--calibrate',
                        required=False,
                        help='Path to calibration file to calibrate with.')
    parser.add_argument('--calibrate_signal_estimate',
                        dest='calibrate_signal_estimate',
                        required=False,
                        default=False,
                        action='store_true',
                        help='Calibrate '
                        'the signal estimate using linear regression.')
    parser.add_argument('--ringdb', required=True, help='Ring DB file')
    parser.add_argument('--odfirst',
                        required=False,
                        default=None,
                        type=np.int,
                        help='First OD to use')
    parser.add_argument('--odlast',
                        required=False,
                        default=None,
                        type=np.int,
                        help='Last OD to use')
    parser.add_argument('--ringfirst',
                        required=False,
                        default=None,
                        type=np.int,
                        help='First ring to use')
    parser.add_argument('--ringlast',
                        required=False,
                        default=None,
                        type=np.int,
                        help='Last ring to use')
    parser.add_argument('--obtfirst',
                        required=False,
                        default=None,
                        type=np.float,
                        help='First OBT to use')
    parser.add_argument('--obtlast',
                        required=False,
                        default=None,
                        type=np.float,
                        help='Last OBT to use')
    parser.add_argument('--out',
                        required=False,
                        default='.',
                        help='Output directory')
    parser.add_argument('--nbin_psd',
                        required=False,
                        default=1000,
                        type=np.int,
                        help='Number of logarithmically '
                        'distributed spectral bins to write.')
    parser.add_argument('--lagmax',
                        required=False,
                        default=100000,
                        type=np.int,
                        help='Maximum lag to evaluate for the '
                        'autocovariance function [samples].')
    parser.add_argument('--stationary_period',
                        required=False,
                        default=86400.,
                        type=np.float,
                        help='Length of a stationary interval [seconds].')
    # Dipole parameters
    dipogroup = parser.add_mutually_exclusive_group()
    dipogroup.add_argument('--dipole',
                           dest='dipole',
                           required=False,
                           default=False,
                           action='store_true',
                           help='Simulate dipole')
    dipogroup.add_argument('--solsys_dipole',
                           dest='solsys_dipole',
                           required=False,
                           default=False,
                           action='store_true',
                           help='Simulate solar system dipole')
    dipogroup.add_argument('--orbital_dipole',
                           dest='orbital_dipole',
                           required=False,
                           default=False,
                           action='store_true',
                           help='Simulate orbital dipole')
    # Extra filter
    parser.add_argument('--filterfile',
                        required=False,
                        help='Extra filter file.')

    try:
        args = parser.parse_args()
    except SystemExit:
        sys.exit(0)

    if comm.comm_world.rank == 0:
        print('All parameters:')
        print(args, flush=True)

    timer = Timer()
    timer.start()

    odrange = None
    if args.odfirst is not None and args.odlast is not None:
        odrange = (args.odfirst, args.odlast)

    ringrange = None
    if args.ringfirst is not None and args.ringlast is not None:
        ringrange = (args.ringfirst, args.ringlast)

    obtrange = None
    if args.obtfirst is not None and args.obtlast is not None:
        obtrange = (args.obtfirst, args.obtlast)

    detectors = None
    if args.dets is not None:
        detectors = re.split(',', args.dets)

    if args.nosingle and len(detectors) != 2:
        raise RuntimeError('You cannot skip the single detectors PSDs '
                           'without multiple detectors.')

    # This is the distributed data, consisting of one or
    # more observations, each distributed over a communicator.
    data = toast.Data(comm)

    # Make output directory

    if not os.path.isdir(args.out) and comm.comm_world.rank == 0:
        os.mkdir(args.out)

    # create the TOD for this observation

    tod = tp.Exchange(
        comm=comm.comm_group,
        detectors=detectors,
        ringdb=args.ringdb,
        effdir_in=args.effdir,
        effdir_pntg=args.effdir_pntg,
        obt_range=obtrange,
        ring_range=ringrange,
        od_range=odrange,
        freq=args.freq,
        RIMO=args.rimo,
        obtmask=args.obtmask,
        flagmask=args.flagmask,
        do_eff_cache=False,
    )

    rimo = tod.rimo

    ob = {}
    ob['name'] = 'mission'
    ob['id'] = 0
    ob['tod'] = tod
    ob['intervals'] = tod.valid_intervals
    ob['baselines'] = None
    ob['noise'] = tod.noise

    data.obs.append(ob)

    comm.comm_world.barrier()
    if comm.comm_world.rank == 0:
        timer.report_clear("Metadata queries")

    # Read the signal

    tod_name = 'signal'
    flags_name = 'flags'

    reader = tp.OpInputPlanck(signal_name=tod_name, flags_name=flags_name)
    if comm.comm_world.rank == 0:
        print('Reading input signal from {}'.format(args.effdir), flush=True)
    reader.exec(data)
    comm.comm_world.barrier()
    if comm.comm_world.rank == 0:
        timer.report_clear("Reading")

    if args.calibrate is not None:
        fn = args.calibrate
        if comm.comm_world.rank == 0:
            print('Calibrating with {}'.format(fn), flush=True)
        calibrator = tp.OpCalibPlanck(signal_in=tod_name,
                                      signal_out=tod_name,
                                      file_gain=fn)
        calibrator.exec(data)
        comm.comm_world.barrier()
        if comm.comm_world.rank == 0:
            timer.report_clear("Calibrate")

    # Optionally subtract the dipole

    do_dipole = (args.dipole or args.solsys_dipole or args.orbital_dipole)

    if do_dipole:
        if args.dipole:
            dipomode = 'total'
        elif args.solsys_dipole:
            dipomode = 'solsys'
        else:
            dipomode = 'orbital'

        dipo = tp.OpDipolePlanck(args.freq,
                                 mode=dipomode,
                                 output='dipole',
                                 keep_quats=True)
        dipo.exec(data)

        comm.comm_world.barrier()
        if comm.comm_world.rank == 0:
            timer.report_clear("Dipole")

        subtractor = tp.OpCacheMath(in1=tod_name,
                                    in2='dipole',
                                    subtract=True,
                                    out=tod_name)
        if comm.comm_world.rank == 0:
            print('Subtracting dipole', flush=True)
        subtractor.exec(data)

        comm.comm_world.barrier()
        if comm.comm_world.rank == 0:
            timer.report_clear("Dipole subtraction")

    # Optionally filter the signal

    apply_filter(args, data)
    timer.clear()

    # Estimate noise

    noise_estimator = tp.OpNoiseEstim(
        signal=tod_name,
        flags=flags_name,
        detmask=args.flagmask,
        commonmask=args.obtmask,
        maskfile=args.skymask,
        mapfile=args.skymap,
        out=args.out,
        rimo=rimo,
        pol=args.skypol,
        nbin_psd=args.nbin_psd,
        lagmax=args.lagmax,
        stationary_period=args.stationary_period,
        nosingle=args.nosingle,
        no_spin_harmonics=args.no_spin_harmonics,
        calibrate_signal_estimate=args.calibrate_signal_estimate)

    noise_estimator.exec(data)

    comm.comm_world.barrier()
    if comm.comm_world.rank == 0:
        timer.report_clear("Noise estimation")

    gt.stop_all()
    if mpiworld is not None:
        mpiworld.barrier()
    timer = Timer()
    timer.start()
    alltimers = gather_timers(comm=mpiworld)
    if comm.world_rank == 0:
        out = os.path.join(args.out, "timing")
        dump_timing(alltimers, out)
        timer.stop()
        timer.report("Gather and dump timing info")
    return