Esempio n. 1
0
def main():

    # This is the 2-level toast communicator.  By default,
    # there is just one group which spans MPI_COMM_WORLD.
    comm = toast.Comm()

    # Create an argparse and add custom arguments
    parser = argparse.ArgumentParser(description=“...")
    parser.add_argument('--groupsize',
                        required=False, type=np.int,
                        help='Size of a process group assigned to a CES')

    # pass the argparse object to timing module which will add timing
    # arguments and return "parse.parse_args() result after handling
    # the timing specific options
    args = timing.add_arguments_and_parse(parser, timing.FILE(noquotes=True))
    # create the primary auto timer for the entire script
    autotimer = timing.auto_timer(timing.FILE())
Esempio n. 2
0
def main():

    args, start_timestamp, stop_timestamp = parse_args()

    autotimer = timing.auto_timer(timing.FILE())

    patches = parse_patches(args)

    build_schedule(args, start_timestamp, stop_timestamp,
                   args.sun_el_max * degree, args.sun_avoidance_angle * degree,
                   args.sun_angle_min * degree, args.moon_angle_min * degree,
                   args.el_min * degree, args.el_max * degree,
                   args.fp_radius * degree, patches)
def main():

    parser = argparse.ArgumentParser(
        description="Simulate fake hexagonal focalplane.",
        fromfile_prefix_chars='@')

    parser.add_argument("--minpix",
                        required=False,
                        type=int,
                        default=100,
                        help="minimum number of pixels to use")

    parser.add_argument("--out",
                        required=False,
                        default="fp_fake",
                        help="Root name of output pickle file")

    parser.add_argument("--fwhm",
                        required=False,
                        type=float,
                        default=5.0,
                        help="beam FWHM in arcmin")

    parser.add_argument("--fwhm_sigma",
                        required=False,
                        type=float,
                        default=0,
                        help="Relative beam FWHM distribution width")

    parser.add_argument("--fov",
                        required=False,
                        type=float,
                        default=5.0,
                        help="Field of View in degrees")

    parser.add_argument("--psd_fknee",
                        required=False,
                        type=float,
                        default=0.05,
                        help="Detector noise model f_knee in Hz")

    parser.add_argument("--psd_NET",
                        required=False,
                        type=float,
                        default=60.0e-6,
                        help="Detector noise model NET in K*sqrt(sec)")

    parser.add_argument("--psd_alpha",
                        required=False,
                        type=float,
                        default=1.0,
                        help="Detector noise model slope")

    parser.add_argument("--psd_fmin",
                        required=False,
                        type=float,
                        default=1.0e-5,
                        help="Detector noise model f_min in Hz")

    parser.add_argument("--bandcenter_ghz",
                        required=False,
                        type=float,
                        help="Band center frequency [GHz]")

    parser.add_argument("--bandcenter_sigma",
                        required=False,
                        type=float,
                        default=0,
                        help="Relative band center distribution width")

    parser.add_argument("--bandwidth_ghz",
                        required=False,
                        type=float,
                        help="Bandwidth [GHz]")

    parser.add_argument("--bandwidth_sigma",
                        required=False,
                        type=float,
                        default=0,
                        help="Relative bandwidth distribution width")

    parser.add_argument("--random_seed",
                        required=False,
                        type=np.int,
                        default=123456,
                        help="Random number generator seed for randomized "
                        "detector parameters")

    args = timing.add_arguments_and_parse(parser, timing.FILE(noquotes=True))

    # Guard against being called with multiple processes
    if MPI.COMM_WORLD.rank == 0:
        # Make one big hexagon layout at the center of the focalplane.
        # Compute the number of pixels that is at least the number requested.

        test = args.minpix - 1
        nrings = 0
        while (test - 6 * nrings) > 0:
            test -= 6 * nrings
            nrings += 1

        npix = 1
        for r in range(1, nrings + 1):
            npix += 6 * r

        print("using {} pixels ({} detectors)".format(npix, npix * 2))

        # Translate the field-of-view into distance between flag sides
        angwidth = args.fov * np.cos(30 * degree)

        Apol = tt.hex_pol_angles_qu(npix, offset=0.0)
        Bpol = tt.hex_pol_angles_qu(npix, offset=90.0)

        Adets = tt.hex_layout(npix, angwidth, "fake_", "A", Apol)
        Bdets = tt.hex_layout(npix, angwidth, "fake_", "B", Bpol)

        dets = Adets.copy()
        dets.update(Bdets)

        np.random.seed(args.random_seed)

        for indx, d in enumerate(sorted(dets.keys())):
            dets[d]["fknee"] = args.psd_fknee
            dets[d]["fmin"] = args.psd_fmin
            dets[d]["alpha"] = args.psd_alpha
            dets[d]["NET"] = args.psd_NET
            # This is in degrees, but the input is in arcmin.
            dets[d]["fwhm_deg"] = (args.fwhm / 60.0)  \
                * (1 + np.random.randn()*args.fwhm_sigma)
            # This is a fixed value, in arcmin.
            dets[d]["fwhm"] = args.fwhm
            if args.bandcenter_ghz:
                dets[d]["bandcenter_ghz"] \
                    = args.bandcenter_ghz * (1+np.random.randn()*args.bandcenter_sigma)
            if args.bandwidth_ghz:
                dets[d]["bandwidth_ghz"] \
                    = args.bandwidth_ghz * (1+np.random.randn()*args.bandwidth_sigma)
            dets[d]["index"] = indx

        outfile = "{}_{}".format(args.out, npix)
        qdets = {x: y["quat"] for x, y in dets.items()}
        beams = {x: (60.0 * y["fwhm_deg"]) for x, y in dets.items()}
        tt.plot_focalplane(qdets,
                           args.fov,
                           args.fov,
                           "{}.png".format(outfile),
                           fwhm=beams)

        with open("{}.pkl".format(outfile), "wb") as p:
            pickle.dump(dets, p)

    return
Esempio n. 4
0
def parse_arguments(comm):

    parser = argparse.ArgumentParser(
        description="Simulate ground-based boresight pointing.  Simulate "
        "and map astrophysical signal.",
        fromfile_prefix_chars='@')
    parser.add_argument('--groupsize',
                        required=False,
                        type=np.int,
                        help='Size of a process group assigned to a CES')

    parser.add_argument('--timezone',
                        required=False,
                        type=np.int,
                        default=0,
                        help='Offset to apply to MJD to separate days [hours]')
    parser.add_argument('--coord',
                        required=False,
                        default='C',
                        help='Sky coordinate system [C,E,G]')
    parser.add_argument('--schedule',
                        required=True,
                        help='CES schedule file from toast_ground_schedule.py')
    parser.add_argument('--samplerate',
                        required=False,
                        default=100.0,
                        type=np.float,
                        help='Detector sample rate (Hz)')
    parser.add_argument('--scanrate',
                        required=False,
                        default=1.0,
                        type=np.float,
                        help='Scanning rate [deg / s]')
    parser.add_argument('--scan_accel',
                        required=False,
                        default=1.0,
                        type=np.float,
                        help='Scanning rate change [deg / s^2]')
    parser.add_argument('--sun_angle_min',
                        required=False,
                        default=30.0,
                        type=np.float,
                        help='Minimum azimuthal distance between the Sun and '
                        'the bore sight [deg]')

    parser.add_argument('--polyorder',
                        required=False,
                        type=np.int,
                        help='Polynomial order for the polyfilter')

    parser.add_argument('--wbin_ground',
                        required=False,
                        type=np.float,
                        help='Ground template bin width [degrees]')

    parser.add_argument('--gain_sigma',
                        required=False,
                        type=np.float,
                        help='Gain error distribution')

    parser.add_argument('--hwprpm',
                        required=False,
                        default=0.0,
                        type=np.float,
                        help='The rate (in RPM) of the HWP rotation')
    parser.add_argument('--hwpstep',
                        required=False,
                        default=None,
                        help='For stepped HWP, the angle in degrees '
                        'of each step')
    parser.add_argument('--hwpsteptime',
                        required=False,
                        default=0.0,
                        type=np.float,
                        help='For stepped HWP, the the time in seconds '
                        'between steps')

    parser.add_argument('--input_map',
                        required=False,
                        help='Input map for signal')

    parser.add_argument('--skip_bin',
                        required=False,
                        default=False,
                        action='store_true',
                        help='Disable binning the map.')
    parser.add_argument('--skip_hits',
                        required=False,
                        default=False,
                        action='store_true',
                        help='Do not save the 3x3 matrices and hitmaps')

    parser.add_argument('--fp_radius',
                        required=False,
                        default=1,
                        type=np.float,
                        help='Focal plane radius assumed in the atmospheric '
                        'simulation.')

    parser.add_argument('--outdir',
                        required=False,
                        default='out',
                        help='Output directory')
    parser.add_argument('--zip',
                        required=False,
                        default=False,
                        action='store_true',
                        help='Compress the output fits files')
    parser.add_argument('--debug',
                        required=False,
                        default=False,
                        action='store_true',
                        help='Write diagnostics')
    parser.add_argument('--flush',
                        required=False,
                        default=False,
                        action='store_true',
                        help='Flush every print statement.')
    parser.add_argument('--nside',
                        required=False,
                        default=512,
                        type=np.int,
                        help='Healpix NSIDE')
    parser.add_argument('--madam_iter_max',
                        required=False,
                        default=1000,
                        type=np.int,
                        help='Maximum number of CG iterations in Madam')
    parser.add_argument('--madam_baseline_length',
                        required=False,
                        default=10000.0,
                        type=np.float,
                        help='Destriping baseline length (seconds)')
    parser.add_argument('--madam_baseline_order',
                        required=False,
                        default=0,
                        type=np.int,
                        help='Destriping baseline polynomial order')
    parser.add_argument('--madam_noisefilter',
                        required=False,
                        default=False,
                        action='store_true',
                        help='Destripe with the noise filter enabled')
    parser.add_argument('--madam',
                        required=False,
                        default=False,
                        action='store_true',
                        help='If specified, use libmadam for map-making')
    parser.add_argument('--madampar',
                        required=False,
                        default=None,
                        help='Madam parameter file')
    parser.add_argument('--madam_allreduce',
                        required=False,
                        default=False,
                        action='store_true',
                        help='Use allreduce communication in Madam')
    parser.add_argument('--common_flag_mask',
                        required=False,
                        default=1,
                        type=np.uint8,
                        help='Common flag mask')
    parser.add_argument(
        '--fp',
        required=False,
        default=None,
        help='Pickle file containing a dictionary of detector '
        'properties.  The keys of this dict are the detector '
        'names, and each value is also a dictionary with keys '
        '"quat" (4 element ndarray), "fwhm" (float, arcmin), '
        '"fknee" (float, Hz), "alpha" (float), and '
        '"NET" (float).  For optional plotting, the key "color"'
        ' can specify a valid matplotlib color string.')
    parser.add_argument('--tidas',
                        required=False,
                        default=None,
                        help='Output TIDAS export path')

    args = timing.add_arguments_and_parse(parser, timing.FILE(noquotes=True))

    if args.tidas is not None:
        if not tt.tidas_available:
            raise RuntimeError("TIDAS not found- cannot export")

    if comm.comm_world.rank == 0:
        print('\nAll parameters:')
        print(args, flush=args.flush)
        print('')

    if args.groupsize:
        comm = toast.Comm(groupsize=args.groupsize)

    if comm.comm_world.rank == 0:
        if not os.path.isdir(args.outdir):
            try:
                os.makedirs(args.outdir)
            except FileExistsError:
                pass

    return args, comm
Esempio n. 5
0
def main():

    # This is the 2-level toast communicator.  By default,
    # there is just one group which spans MPI_COMM_WORLD.
    comm = toast.Comm()

    if comm.comm_world.rank == 0:
        print('Running with {} processes at {}'.format(comm.comm_world.size,
                                                       str(datetime.now())),
              flush=True)

    global_timer = timing.simple_timer('Total time')
    global_timer.start()

    args, comm = parse_arguments(comm)

    autotimer = timing.auto_timer("@{}".format(timing.FILE()))

    # Load and broadcast the schedule file

    site, all_ces = load_schedule(args, comm)

    # load or simulate the focalplane

    fp, detweights = load_fp(args, comm)

    # Create the TOAST data object to match the schedule.  This will
    # include simulating the boresight pointing.

    data = create_observations(args, comm, fp, all_ces, site)

    # Expand boresight quaternions into detector pointing weights and
    # pixel numbers

    expand_pointing(args, comm, data)

    # Prepare auxiliary information for distributed map objects

    localpix, localsm, subnpix = get_submaps(args, comm, data)

    # Scan input map

    signalname = scan_signal(args, comm, data, localsm, subnpix)

    # Set up objects to take copies of the TOD at appropriate times

    signalname_madam, sigcopy_madam, sigclear \
        = setup_sigcopy(args, comm, signalname)

    common_flag_name = None
    flag_name = None

    invnpp, zmap, invnpp_group, zmap_group, flag_name, common_flag_name \
        = build_npp(args, comm, data, localsm, subnpix, detweights,
                    flag_name, common_flag_name)

    madampars = setup_madam(args, comm)

    output_tidas(args, comm, data, signalname, common_flag_name, flag_name)

    outpath = setup_output(args, comm)

    # Make a copy of the signal for Madam

    copy_signal_madam(args, comm, data, sigcopy_madam)

    # Bin unprocessed signal for reference

    bin_maps(args, comm, data, 'binned', zmap, invnpp, zmap_group,
             invnpp_group, detweights, signalname, flag_name, common_flag_name,
             outpath)

    # Filter signal

    apply_polyfilter(args, comm, data, signalname)

    apply_groundfilter(args, comm, data, signalname)

    # Bin the filtered signal

    if args.polyorder or args.wbin_ground:
        bin_maps(args, comm, data, 'filtered', zmap, invnpp, zmap_group,
                 invnpp_group, detweights, signalname, flag_name,
                 common_flag_name, outpath)

    clear_signal(args, comm, data, sigclear)

    # Now run Madam on the unprocessed copy of the signal

    apply_madam(args, comm, data, madampars, outpath, detweights,
                signalname_madam, flag_name, common_flag_name)

    comm.comm_world.barrier()
    global_timer.stop()
    if comm.comm_world.rank == 0:
        global_timer.report()
Esempio n. 6
0
def main():

    comm = MPI.COMM_WORLD

    if comm.rank == 0:
        print("Running with {} processes".format(comm.size))

    parser = argparse.ArgumentParser( description='Read a toast covariance matrix and invert it.' )
    parser.add_argument( '--input', required=True, default=None, help='The input covariance FITS file' )
    parser.add_argument( '--output', required=False, default=None, help='The output inverse covariance FITS file.' )
    parser.add_argument( '--rcond', required=False, default=None, help='Optionally write the inverse condition number map to this file.' )
    parser.add_argument( '--single', required=False, default=False, action='store_true', help='Write the output in single precision.' )
    parser.add_argument( '--threshold', required=False, default=1e-3, type=np.float, help='Reciprocal condition number threshold' )
    
    args = timing.add_arguments_and_parse(parser, timing.FILE(noquotes=True))

    autotimer = timing.auto_timer(timing.FILE())

    # get options

    infile = args.input
    outfile = None
    if args.output is not None:
        outfile = args.output
    else:
        inmat = re.match(r'(.*)\.fits', infile)
        if inmat is None:
            print("input file should have .fits extension")
            sys.exit(0)
        inroot = inmat.group(1)
        outfile = "{}_inv.fits".format(inroot)

    # We need to read the header to get the size of the matrix.
    # This would be a trivial function call in astropy.fits or
    # fitsio, but we don't want to bring in a whole new dependency
    # just for that.  Instead, we open the file with healpy in memmap
    # mode so that nothing is actually read except the header.

    nside = 0
    ncovnz = 0
    if comm.rank == 0:
        fake, head = hp.read_map(infile, h=True, memmap=True)
        for key, val in head:
            if key == 'NSIDE':
                nside = int(val)
            if key == 'TFIELDS':
                ncovnz = int(val)
    nside = comm.bcast(nside, root=0)
    ncovnz = comm.bcast(nnz, root=0)

    nnz = int( ( (np.sqrt(8.0*ncovnz) - 1.0) / 2.0 ) + 0.5 )

    npix = 12 * nside**2
    subnside = int(nside / 16)
    if subnside == 0:
        subnside = 1
    subnpix = 12 * subnside**2
    nsubmap = int( npix / subnpix )

    # divide the submaps as evenly as possible among processes

    dist = toast.distribute_uniform(nsubmap, comm.size)
    local = np.arange(dist[comm.rank][0], dist[comm.rank][0] + dist[comm.rank][1])

    if comm.rank == 0:
        if os.path.isfile(outfile):
            os.remove(outfile)
    comm.barrier()

    # create the covariance and inverse condition number map

    cov = None
    invcov = None
    rcond = None

    cov = tm.DistPixels(comm=comm, dtype=np.float64, size=npix, nnz=ncovnz, submap=subnpix, local=local)
    if args.single:
        invcov = tm.DistPixels(comm=comm, dtype=np.float32, size=npix, nnz=ncovnz, submap=subnpix, local=local)
    else:
        invcov = cov
    if args.rcond is not None:
        rcond = tm.DistPixels(comm=comm, dtype=np.float64, size=npix, nnz=nnz, submap=subnpix, local=local)

    # read the covariance

    cov.read_healpix_fits(infile)

    # every process computes its local piece

    tm.covariance_invert(cov, args.threshold, rcond=rcond)

    if args.single:
        invcov.data[:] = cov.data.astype(np.float32)

    # write the inverted covariance

    invcov.write_healpix_fits(outfile)

    # write the condition number

    if args.rcond is not None:
        rcond.write_healpix_fits(args.rcond)

    return
Esempio n. 7
0
def main():

    if MPI.COMM_WORLD.rank == 0:
        print("Running with {} processes".format(MPI.COMM_WORLD.size),
              flush=True)

    global_start = MPI.Wtime()

    parser = argparse.ArgumentParser(
        description="Read existing data and make a simple map.",
        fromfile_prefix_chars="@",
    )

    parser.add_argument(
        "--groupsize",
        required=False,
        type=int,
        default=0,
        help="size of processor groups used to distribute "
        "observations",
    )

    parser.add_argument(
        "--hwprpm",
        required=False,
        type=float,
        default=0.0,
        help="The rate (in RPM) of the HWP rotation",
    )

    parser.add_argument(
        "--samplerate",
        required=False,
        default=100.0,
        type=np.float,
        help="Detector sample rate (Hz)",
    )

    parser.add_argument("--outdir",
                        required=False,
                        default="out",
                        help="Output directory")

    parser.add_argument("--nside",
                        required=False,
                        type=int,
                        default=64,
                        help="Healpix NSIDE")

    parser.add_argument(
        "--subnside",
        required=False,
        type=int,
        default=8,
        help="Distributed pixel sub-map NSIDE",
    )

    parser.add_argument("--coord",
                        required=False,
                        default="E",
                        help="Sky coordinate system [C,E,G]")

    parser.add_argument(
        "--baseline",
        required=False,
        type=float,
        default=60.0,
        help="Destriping baseline length (seconds)",
    )

    parser.add_argument(
        "--noisefilter",
        required=False,
        default=False,
        action="store_true",
        help="Destripe with the noise filter enabled",
    )

    parser.add_argument(
        "--madam",
        required=False,
        default=False,
        action="store_true",
        help="If specified, use libmadam for map-making",
    )

    parser.add_argument("--madampar",
                        required=False,
                        default=None,
                        help="Madam parameter file")

    parser.add_argument(
        "--polyorder",
        required=False,
        type=int,
        help="Polynomial order for the polyfilter",
    )

    parser.add_argument(
        "--wbin_ground",
        required=False,
        type=float,
        help="Ground template bin width [degrees]",
    )

    parser.add_argument(
        "--flush",
        required=False,
        default=False,
        action="store_true",
        help="Flush every print statement.",
    )

    parser.add_argument("--tidas",
                        required=False,
                        default=None,
                        help="Input TIDAS volume")

    parser.add_argument("--tidas_detgroup",
                        required=False,
                        default=None,
                        help="TIDAS detector group")

    parser.add_argument("--spt3g",
                        required=False,
                        default=None,
                        help="Input SPT3G data directory")

    parser.add_argument(
        "--spt3g_prefix",
        required=False,
        default=None,
        help="SPT3G data frame file prefix",
    )

    parser.add_argument(
        "--common_flag_mask",
        required=False,
        default=0,
        type=np.uint8,
        help="Common flag mask",
    )

    parser.add_argument(
        "--debug",
        required=False,
        default=False,
        action="store_true",
        help="Write data distribution info and focalplane plot",
    )

    args = timing.add_arguments_and_parse(parser, timing.FILE(noquotes=True))
    # args = parser.parse_args(sys.argv)

    autotimer = timing.auto_timer("@{}".format(timing.FILE()))

    if (args.tidas is not None) and (args.spt3g is not None):
        raise RuntimeError("Cannot read two datasets!")

    if (args.tidas is None) and (args.spt3g is None):
        raise RuntimeError("No dataset specified!")

    if args.tidas is not None:
        if not tt.tidas_available:
            raise RuntimeError("TIDAS not found- cannot load")

    if args.spt3g is not None:
        if not tt.spt3g_available:
            raise RuntimeError("SPT3G not found- cannot load")

    groupsize = args.groupsize
    if groupsize == 0:
        groupsize = MPI.COMM_WORLD.size

    # Pixelization

    nside = args.nside
    npix = 12 * args.nside * args.nside
    subnside = args.subnside
    if subnside > nside:
        subnside = nside
    subnpix = 12 * subnside * subnside

    # This is the 2-level toast communicator.

    if MPI.COMM_WORLD.size % groupsize != 0:
        if MPI.COMM_WORLD.rank == 0:
            print(
                "WARNING:  process groupsize does not evenly divide into "
                "total number of processes",
                flush=True,
            )
    comm = toast.Comm(world=MPI.COMM_WORLD, groupsize=groupsize)

    # Create output directory

    mtime = MPI.Wtime()

    if comm.comm_world.rank == 0:
        if not os.path.isdir(args.outdir):
            os.makedirs(args.outdir)

    mtime = elapsed(comm.comm_world, mtime, "Creating output directory")

    # The distributed timestream data

    data = None

    if args.tidas is not None:
        if args.tidas_detgroup is None:
            raise RuntimeError("you must specify the detector group")
        data = tds.load_tidas(
            comm,
            comm.group_size,
            args.tidas,
            "r",
            args.tidas_detgroup,
            tds.TODTidas,
            group_dets=args.tidas_detgroup,
            distintervals="chunks",
        )

    if args.spt3g is not None:
        if args.spt3g_prefix is None:
            raise RuntimeError("you must specify the frame file prefix")
        data = s3g.load_spt3g(
            comm,
            comm.group_size,
            args.spt3g,
            args.spt3g_prefix,
            s3g.obsweight_spt3g,
            s3g.TOD3G,
        )

    mtime = elapsed(comm.comm_world, mtime, "Distribute data")

    # In debug mode, print out data distribution information

    if args.debug:
        handle = None
        if comm.comm_world.rank == 0:
            handle = open("{}_distdata.txt".format(args.outdir), "w")
        data.info(handle)
        if comm.comm_world.rank == 0:
            handle.close()
        mtime = elapsed(comm.comm_world, mtime,
                        "Dumping debug data distribution")
        if comm.comm_world.rank == 0:
            outfile = "{}_focalplane.png".format(args.outdir)
            set_backend()
            # Just plot the dets from the first TOD
            temptod = data.obs[0]["tod"]
            # FIXME: change this once we store det info in the metadata.
            dfwhm = {x: 10.0 for x in temptod.detectors}
            tt.plot_focalplane(temptod.detoffset(),
                               10.0,
                               10.0,
                               outfile,
                               fwhm=dfwhm)
        comm.comm_world.barrier()
        mtime = elapsed(comm.comm_world, mtime, "Plotting debug focalplane")

    # Compute pointing matrix

    pointing = tt.OpPointingHpix(nside=args.nside,
                                 nest=True,
                                 mode="IQU",
                                 hwprpm=args.hwprpm)
    pointing.exec(data)

    mtime = elapsed(comm.comm_world, mtime, "Expand pointing")

    # Mapmaking.

    # FIXME:  We potentially have a different noise model for every
    # observation.  We need to have both spt3g and tidas format Noise
    # classes which read the information from disk.  Then the mapmaking
    # operators need to get these noise weights from each observation.
    detweights = {d: 1.0 for d in data.obs[0]["tod"].detectors}

    if not args.madam:
        if comm.comm_world.rank == 0:
            print("Not using Madam, will only make a binned map!", flush=True)

        # Filter data if desired

        if args.polyorder:
            polyfilter = tt.OpPolyFilter(
                order=args.polyorder, common_flag_mask=args.common_flag_mask)
            polyfilter.exec(data)
            mtime = elapsed(comm.comm_world, mtime, "Polynomial filtering")

        if args.wbin_ground:
            groundfilter = tt.OpGroundFilter(
                wbin=args.wbin_ground, common_flag_mask=args.common_flag_mask)
            groundfilter.exec(data)
            mtime = elapsed(comm.comm_world, mtime,
                            "Ground template filtering")

        # Compute pixel space distribution

        lc = tm.OpLocalPixels()
        localpix = lc.exec(data)
        if localpix is None:
            raise RuntimeError(
                "Process {} has no hit pixels. Perhaps there are fewer "
                "detectors than processes in the group?".format(
                    comm.comm_world.rank))
        localsm = np.unique(np.floor_divide(localpix, subnpix))
        mtime = elapsed(comm.comm_world, mtime, "Compute local submaps")

        # construct distributed maps to store the covariance,
        # noise weighted map, and hits

        mtime = MPI.Wtime()
        invnpp = tm.DistPixels(
            comm=comm.comm_world,
            size=npix,
            nnz=6,
            dtype=np.float64,
            submap=subnpix,
            local=localsm,
        )
        hits = tm.DistPixels(
            comm=comm.comm_world,
            size=npix,
            nnz=1,
            dtype=np.int64,
            submap=subnpix,
            local=localsm,
        )
        zmap = tm.DistPixels(
            comm=comm.comm_world,
            size=npix,
            nnz=3,
            dtype=np.float64,
            submap=subnpix,
            local=localsm,
        )

        # compute the hits and covariance.

        invnpp.data.fill(0.0)
        hits.data.fill(0)

        build_invnpp = tm.OpAccumDiag(
            detweights=detweights,
            invnpp=invnpp,
            hits=hits,
            common_flag_mask=args.common_flag_mask,
        )
        build_invnpp.exec(data)

        invnpp.allreduce()
        hits.allreduce()
        mtime = elapsed(comm.comm_world, mtime, "Building hits and N_pp^-1")

        hits.write_healpix_fits("{}_hits.fits".format(args.outdir))
        invnpp.write_healpix_fits("{}_invnpp.fits".format(args.outdir))
        mtime = elapsed(comm.comm_world, mtime, "Writing hits and N_pp^-1")

        # invert it
        tm.covariance_invert(invnpp, 1.0e-3)
        mtime = elapsed(comm.comm_world, mtime, "Inverting N_pp^-1")

        invnpp.write_healpix_fits("{}_npp.fits".format(args.outdir))
        mtime = elapsed(comm.comm_world, mtime, "Writing N_pp")

        zmap.data.fill(0.0)
        build_zmap = tm.OpAccumDiag(zmap=zmap,
                                    detweights=detweights,
                                    common_flag_mask=args.common_flag_mask)
        build_zmap.exec(data)
        zmap.allreduce()
        mtime = elapsed(comm.comm_world, mtime, "Building noise weighted map")

        tm.covariance_apply(invnpp, zmap)
        mtime = elapsed(comm.comm_world, mtime, "Computing binned map")

        zmap.write_healpix_fits(os.path.join(args.outdir, "binned.fits"))
        mtime = elapsed(comm.comm_world, mtime, "Writing binned map")

    else:
        # Set up MADAM map making.

        pars = {}
        pars["temperature_only"] = "F"
        pars["force_pol"] = "T"
        pars["kfirst"] = "T"
        pars["concatenate_messages"] = "T"
        pars["write_map"] = "T"
        pars["write_binmap"] = "T"
        pars["write_matrix"] = "T"
        pars["write_wcov"] = "T"
        pars["write_hits"] = "T"
        pars["nside_cross"] = nside // 2
        pars["nside_submap"] = subnside

        if args.madampar is not None:
            pat = re.compile(r"\s*(\S+)\s*=\s*(\S+(\s+\S+)*)\s*")
            comment = re.compile(r"^#.*")
            with open(args.madampar, "r") as f:
                for line in f:
                    if comment.match(line) is None:
                        result = pat.match(line)
                        if result is not None:
                            key, value = result.group(1), result.group(2)
                            pars[key] = value

        pars["base_first"] = args.baseline
        pars["nside_map"] = nside
        if args.noisefilter:
            pars["kfilter"] = "T"
        else:
            pars["kfilter"] = "F"
        pars["fsample"] = args.samplerate

        madam = tm.OpMadam(params=pars,
                           detweights=detweights,
                           common_flag_mask=args.common_flag_mask)
        madam.exec(data)
        mtime = elapsed(comm.comm_world, mtime, "Madam mapmaking")

    comm.comm_world.barrier()
    stop = MPI.Wtime()
    dur = stop - global_start
    if comm.comm_world.rank == 0:
        print("Total Time:  {:.2f} seconds".format(dur), flush=True)
    return
Esempio n. 8
0
                    help="Bandwidth [GHz]")

parser.add_argument("--bandwidth_sigma",
                    required=False,
                    type=float,
                    default=0,
                    help="Relative bandwidth distribution width")

parser.add_argument("--random_seed",
                    required=False,
                    type=np.int,
                    default=123456,
                    help="Random number generator seed for randomized detector"
                    " parameters")

args = timing.add_arguments_and_parse(parser, timing.FILE(noquotes=True))

autotimer = timing.auto_timer(timing.FILE())

# Make one big hexagon layout at the center of the focalplane.
# Compute the number of pixels that is at least the number requested.

test = args.minpix - 1
nrings = 0
while (test - 6 * nrings) > 0:
    test -= 6 * nrings
    nrings += 1

npix = 1
for r in range(1, nrings + 1):
    npix += 6 * r
Esempio n. 9
0
def parse_args():

    parser = argparse.ArgumentParser(
        description='Generate ground observation schedule.',
        fromfile_prefix_chars='@')

    parser.add_argument('--site_name',
                        required=False,
                        default='LBL',
                        help='Observing site name')
    parser.add_argument('--telescope',
                        required=False,
                        default='Telescope',
                        help='Observing telescope name')
    parser.add_argument('--site_lon',
                        required=False,
                        default='-122.247',
                        help='Observing site longitude [PyEphem string]')
    parser.add_argument('--site_lat',
                        required=False,
                        default='37.876',
                        help='Observing site latitude [PyEphem string]')
    parser.add_argument('--site_alt',
                        required=False,
                        default=100.0,
                        type=np.float,
                        help='Observing site altitude [meters]')
    parser.add_argument('--patch',
                        required=True,
                        action='append',
                        help='Patch definition: '
                        'name,weight,lon1,lat1,lon2,lat2 ... '
                        'OR name,weight,lon,lat,width')
    parser.add_argument('--patch_coord',
                        required=False,
                        default='C',
                        help='Sky patch coordinate system [C,E,G]')
    parser.add_argument('--el_min',
                        required=False,
                        default=30.0,
                        type=np.float,
                        help='Minimum elevation for a CES')
    parser.add_argument('--el_max',
                        required=False,
                        default=80.0,
                        type=np.float,
                        help='Maximum elevation for a CES')
    parser.add_argument('--fp_radius',
                        required=False,
                        default=0.0,
                        type=np.float,
                        help='Focal plane radius [deg]')
    parser.add_argument('--sun_avoidance_angle',
                        required=False,
                        default=-15.0,
                        type=np.float,
                        help='Solar elevation above which to apply '
                        'sun_angle_min [deg]')
    parser.add_argument('--sun_angle_min',
                        required=False,
                        default=30.0,
                        type=np.float,
                        help='Minimum distance between the Sun and '
                        'the bore sight [deg]')
    parser.add_argument('--moon_angle_min',
                        required=False,
                        default=20.0,
                        type=np.float,
                        help='Minimum distance between the Moon and '
                        'the bore sight [deg]')
    parser.add_argument('--sun_el_max',
                        required=False,
                        default=90.0,
                        type=np.float,
                        help='Maximum allowed sun elevation [deg]')
    parser.add_argument('--start',
                        required=False,
                        default='2000-01-01 00:00:00',
                        help='UTC start time of the schedule')
    parser.add_argument('--stop',
                        required=False,
                        help='UTC stop time of the schedule')
    parser.add_argument('--operational_days',
                        required=False,
                        type=np.int,
                        help='Number of operational days to schedule '
                        '(empty days do not count)')
    parser.add_argument('--timezone',
                        required=False,
                        type=np.int,
                        default=0,
                        help='Offset to apply to MJD to separate operational '
                        'days [hours]')
    parser.add_argument('--gap',
                        required=False,
                        default=100,
                        type=np.float,
                        help='Gap between CES:es [seconds]')
    parser.add_argument('--gap_small',
                        required=False,
                        default=10,
                        type=np.float,
                        help='Gap between split CES:es [seconds]')
    parser.add_argument('--one_scan_per_day',
                        required=False,
                        default=False,
                        action='store_true',
                        help='Pad each operational day to have only one CES')
    parser.add_argument('--ces_max_time',
                        required=False,
                        default=900,
                        type=np.float,
                        help='Maximum length of a CES [seconds]')
    parser.add_argument('--debug',
                        required=False,
                        default=False,
                        action='store_true',
                        help='Write diagnostics')
    parser.add_argument('--pole_mode',
                        required=False,
                        default=False,
                        action='store_true',
                        help='Pole scheduling mode (no drift scan)')
    parser.add_argument('--pole_el_step',
                        required=False,
                        default=0.25,
                        type=np.float,
                        help='Elevation step in pole scheduling mode [deg]')
    parser.add_argument('--pole_ces_time',
                        required=False,
                        default=3000,
                        type=np.float,
                        help='Time to scan at constant elevation in pole mode')
    parser.add_argument('--out',
                        required=False,
                        default='schedule.txt',
                        help='Output filename')

    args = timing.add_arguments_and_parse(parser, timing.FILE(noquotes=True))

    if args.operational_days is None and args.stop is None:
        raise RuntimeError('You must provide --stop or --operational_days')

    stop_time = None
    if args.start.endswith('Z'):
        start_time = dateutil.parser.parse(args.start)
        if args.stop is not None:
            if not args.stop.endswith('Z'):
                raise RuntimeError('Either both or neither times must be '
                                   'given in UTC')
            stop_time = dateutil.parser.parse(args.stop)
    else:
        if args.timezone < 0:
            tz = '-{:02}00'.format(-args.timezone)
        else:
            tz = '+{:02}00'.format(args.timezone)
        start_time = dateutil.parser.parse(args.start + tz)
        if args.stop is not None:
            if args.stop.endswith('Z'):
                raise RuntimeError('Either both or neither times must be '
                                   'given in UTC')
            stop_time = dateutil.parser.parse(args.stop + tz)

    start_timestamp = start_time.timestamp()
    if stop_time is None:
        # Keep scheduling until the desired number of operational days is full.
        stop_timestamp = 2**60
    else:
        stop_timestamp = stop_time.timestamp()

    return args, start_timestamp, stop_timestamp
Esempio n. 10
0
def main():

    if MPI.COMM_WORLD.rank == 0:
        print("Running with {} processes".format(MPI.COMM_WORLD.size),
            flush=True)

    global_start = MPI.Wtime()

    parser = argparse.ArgumentParser( description="Simulate satellite "
        "boresight pointing and make a noise map.", fromfile_prefix_chars="@" )

    parser.add_argument( "--groupsize", required=False, type=int, default=0,
        help="size of processor groups used to distribute observations" )

    parser.add_argument( "--samplerate", required=False, type=float,
        default=40.0, help="Detector sample rate (Hz)" )

    parser.add_argument( "--starttime", required=False, type=float,
        default=0.0, help="The overall start time of the simulation" )

    parser.add_argument( "--spinperiod", required=False, type=float,
        default=10.0, help="The period (in minutes) of the rotation about the "
        "spin axis" )
    parser.add_argument( "--spinangle", required=False, type=float,
        default=30.0, help="The opening angle (in degrees) of the boresight "
        "from the spin axis" )

    parser.add_argument( "--precperiod", required=False, type=float,
        default=50.0, help="The period (in minutes) of the rotation about the "
        "precession axis" )
    parser.add_argument( "--precangle", required=False, type=float,
        default=65.0, help="The opening angle (in degrees) of the spin axis "
        "from the precession axis" )

    parser.add_argument( "--hwprpm", required=False, type=float,
        default=0.0, help="The rate (in RPM) of the HWP rotation" )
    parser.add_argument( "--hwpstep", required=False, default=None,
        help="For stepped HWP, the angle in degrees of each step" )
    parser.add_argument( "--hwpsteptime", required=False, type=float,
        default=0.0, help="For stepped HWP, the the time in seconds between "
        "steps" )

    parser.add_argument( "--obs", required=False, type=float, default=1.0,
        help="Number of hours in one science observation" )
    parser.add_argument( "--gap", required=False, type=float, default=0.0,
        help="Cooler cycle time in hours between science obs" )
    parser.add_argument( "--numobs", required=False, type=int, default=1,
        help="Number of complete observations" )

    parser.add_argument( "--outdir", required=False, default="out",
        help="Output directory" )
    parser.add_argument( "--debug", required=False, default=False,
        action="store_true", help="Write diagnostics" )

    parser.add_argument( "--nside", required=False, type=int, default=64,
        help="Healpix NSIDE" )
    parser.add_argument( "--subnside", required=False, type=int, default=4,
        help="Distributed pixel sub-map NSIDE" )

    parser.add_argument( "--baseline", required=False, type=float,
        default=60.0, help="Destriping baseline length (seconds)" )
    parser.add_argument( "--noisefilter", required=False, default=False,
        action="store_true", help="Destripe with the noise filter enabled" )

    parser.add_argument( "--madam", required=False, default=False,
        action="store_true", help="If specified, use libmadam for map-making" )
    parser.add_argument( "--madampar", required=False, default=None,
        help="Madam parameter file" )

    parser.add_argument('--flush',
                        required=False, default=False, action='store_true',
                        help='Flush every print statement.')

    parser.add_argument( "--MC_start", required=False, type=int, default=0,
        help="First Monte Carlo noise realization" )
    parser.add_argument( "--MC_count", required=False, type=int, default=1,
        help="Number of Monte Carlo noise realizations" )

    parser.add_argument( "--fp", required=False, default=None,
        help="Pickle file containing a dictionary of detector properties.  "
        "The keys of this dict are the detector names, and each value is also "
        "a dictionary with keys \"quat\" (4 element ndarray), \"fwhm\" "
        "(float, arcmin), \"fknee\" (float, Hz), \"alpha\" (float), and \"NET\" "
        "(float).  For optional plotting, the key \"color\" can specify a "
        "valid matplotlib color string." )

    parser.add_argument('--tidas',
                        required=False, default=None,
                        help='Output TIDAS export path')

    parser.add_argument('--input_map', required=False,
                        help='Input map for signal')
    parser.add_argument('--input_pysm_model', required=False,
                        help='Comma separated models for on-the-fly PySM '
                        'simulation, e.g. s3,d6,f1,a2"')
    parser.add_argument('--apply_beam', required=False, action='store_true',
                        help='Apply beam convolution to input map with gaussian '
                        'beam parameters defined in focalplane')

    parser.add_argument('--input_dipole', required=False,
                        help='Simulate dipole, possible values are '
                        'total, orbital, solar')
    parser.add_argument('--input_dipole_solar_speed_kms', required=False,
                        help='Solar system speed [km/s]', type=float,
                        default=369.0)
    parser.add_argument('--input_dipole_solar_gal_lat_deg', required=False,
                        help='Solar system speed galactic latitude [degrees]',
                        type=float, default=48.26)
    parser.add_argument('--input_dipole_solar_gal_lon_deg', required=False,
                        help='Solar system speed galactic longitude[degrees]',
                        type=float, default=263.99)

    args = timing.add_arguments_and_parse(parser, timing.FILE(noquotes=True))

    autotimer = timing.auto_timer("@{}".format(timing.FILE()))

    if args.tidas is not None:
        if not tt.tidas_available:
            raise RuntimeError("TIDAS not found- cannot export")

    groupsize = args.groupsize
    if groupsize == 0:
        groupsize = MPI.COMM_WORLD.size

    # This is the 2-level toast communicator.

    if MPI.COMM_WORLD.size % groupsize != 0:
        if MPI.COMM_WORLD.rank == 0:
            print("WARNING:  process groupsize does not evenly divide into "
                "total number of processes", flush=True)
    comm = toast.Comm(world=MPI.COMM_WORLD, groupsize=groupsize)

    # get options

    hwpstep = None
    if args.hwpstep is not None:
        hwpstep = float(args.hwpstep)

    npix = 12 * args.nside * args.nside

    subnside = args.subnside
    if subnside > args.nside:
        subnside = args.nside
    subnpix = 12 * subnside * subnside

    start = MPI.Wtime()

    fp = None

    # Load focalplane information

    if comm.comm_world.rank == 0:
        if args.fp is None:
            # in this case, create a fake detector at the boresight
            # with a pure white noise spectrum.
            fake = {}
            fake["quat"] = np.array([0.0, 0.0, 1.0, 0.0])
            fake["fwhm"] = 30.0
            fake["fknee"] = 0.0
            fake["alpha"] = 1.0
            fake["NET"] = 1.0
            fake["color"] = "r"
            fp = {}
            fp["bore"] = fake
        else:
            with open(args.fp, "rb") as p:
                fp = pickle.load(p)
    fp = comm.comm_world.bcast(fp, root=0)

    stop = MPI.Wtime()
    elapsed = stop - start
    if comm.comm_world.rank == 0:
        print("Create focalplane:  {:.2f} seconds".format(stop-start),
            flush=True)
    start = stop

    if args.debug:
        if comm.comm_world.rank == 0:
            outfile = "{}_focalplane.png".format(args.outdir)
            set_backend()
            tt.plot_focalplane(fp, 10.0, 10.0, outfile)

    # Since we are simulating noise timestreams, we want
    # them to be contiguous and reproducible over the whole
    # observation.  We distribute data by detector within an
    # observation, so ensure that our group size is not larger
    # than the number of detectors we have.

    if groupsize > len(fp.keys()):
        if comm.comm_world.rank == 0:
            print("process group is too large for the number of detectors",
                flush=True)
            comm.comm_world.Abort()

    # Detector information from the focalplane

    detectors = sorted(fp.keys())
    detquats = {}
    detindx = None
    if "index" in fp[detectors[0]]:
        detindx = {}

    for d in detectors:
        detquats[d] = fp[d]["quat"]
        if detindx is not None:
            detindx[d] = fp[d]["index"]

    # Distribute the observations uniformly

    groupdist = toast.distribute_uniform(args.numobs, comm.ngroups)

    # Compute global time and sample ranges of all observations

    obsrange = tt.regular_intervals(args.numobs, args.starttime, 0,
        args.samplerate, 3600*args.obs, 3600*args.gap)

    # Create the noise model used for all observations

    fmin = {}
    fknee = {}
    alpha = {}
    NET = {}
    rates = {}
    for d in detectors:
        rates[d] = args.samplerate
        fmin[d] = fp[d]["fmin"]
        fknee[d] = fp[d]["fknee"]
        alpha[d] = fp[d]["alpha"]
        NET[d] = fp[d]["NET"]

    noise = tt.AnalyticNoise(rate=rates, fmin=fmin, detectors=detectors,
        fknee=fknee, alpha=alpha, NET=NET)

    mem_counter = tt.OpMemoryCounter()

    # The distributed timestream data

    data = toast.Data(comm)

    # Every process group creates its observations

    group_firstobs = groupdist[comm.group][0]
    group_numobs = groupdist[comm.group][1]

    for ob in range(group_firstobs, group_firstobs + group_numobs):
        tod = tt.TODSatellite(
            comm.comm_group,
            detquats,
            obsrange[ob].samples,
            firsttime=obsrange[ob].start,
            rate=args.samplerate,
            spinperiod=args.spinperiod,
            spinangle=args.spinangle,
            precperiod=args.precperiod,
            precangle=args.precangle,
            detindx=detindx,
            detranks=comm.group_size
        )

        obs = {}
        obs["name"] = "science_{:05d}".format(ob)
        obs["tod"] = tod
        obs["intervals"] = None
        obs["baselines"] = None
        obs["noise"] = noise
        obs["id"] = ob

        data.obs.append(obs)

    stop = MPI.Wtime()
    elapsed = stop - start
    if comm.comm_world.rank == 0:
        print("Read parameters, compute data distribution:  "
            "{:.2f} seconds".format(stop-start), flush=True)
    start = stop

    # we set the precession axis now, which will trigger calculation
    # of the boresight pointing.

    for ob in range(group_numobs):
        curobs = data.obs[ob]
        tod = curobs["tod"]

        # Get the global sample offset from the original distribution of
        # intervals
        obsoffset = obsrange[group_firstobs + ob].first

        # Constantly slewing precession axis
        degday = 360.0 / 365.25
        precquat = tt.slew_precession_axis(nsim=tod.local_samples[1],
            firstsamp=obsoffset, samplerate=args.samplerate,
            degday=degday)

        tod.set_prec_axis(qprec=precquat)

    stop = MPI.Wtime()
    elapsed = stop - start
    if comm.comm_world.rank == 0:
        print("Construct boresight pointing:  "
            "{:.2f} seconds".format(stop-start), flush=True)
    start = stop

    # make a Healpix pointing matrix.

    pointing = tt.OpPointingHpix(nside=args.nside, nest=True, mode="IQU",
        hwprpm=args.hwprpm, hwpstep=hwpstep, hwpsteptime=args.hwpsteptime)
    pointing.exec(data)

    comm.comm_world.barrier()
    stop = MPI.Wtime()
    elapsed = stop - start
    if comm.comm_world.rank == 0:
        print("Pointing generation took {:.3f} s".format(elapsed), flush=True)
    start = stop

    localpix, localsm, subnpix = get_submaps(args, comm, data)

    signalname = "signal"
    if args.input_pysm_model:
        simulate_sky_signal(args, comm, data, mem_counter,
                                         [fp], subnpix, localsm, signalname=signalname)

    if args.input_dipole:
        print("Simulating dipole")
        op_sim_dipole = tt.OpSimDipole(mode=args.input_dipole,
                solar_speed=args.input_dipole_solar_speed_kms,
                solar_gal_lat=args.input_dipole_solar_gal_lat_deg,
                solar_gal_lon=args.input_dipole_solar_gal_lon_deg,
                out=signalname,
                keep_quats=True,
                keep_vel=False,
                subtract=False,
                coord="G",
                freq=0,  # we could use frequency for quadrupole correction
                flag_mask=255, common_flag_mask=255)
        op_sim_dipole.exec(data)

    # Mapmaking.  For purposes of this simulation, we use detector noise
    # weights based on the NET (white noise level).  If the destriping
    # baseline is too long, this will not be the best choice.

    detweights = {}
    for d in detectors:
        net = fp[d]["NET"]
        detweights[d] = 1.0 / (args.samplerate * net * net)

    if not args.madam:
        if comm.comm_world.rank == 0:
            print("Not using Madam, will only make a binned map!", flush=True)

        # get locally hit pixels
        lc = tm.OpLocalPixels()
        localpix = lc.exec(data)

        # find the locally hit submaps.
        localsm = np.unique(np.floor_divide(localpix, subnpix))

        # construct distributed maps to store the covariance,
        # noise weighted map, and hits

        invnpp = tm.DistPixels(comm=comm.comm_world, size=npix, nnz=6,
            dtype=np.float64, submap=subnpix, local=localsm)
        hits = tm.DistPixels(comm=comm.comm_world, size=npix, nnz=1,
            dtype=np.int64, submap=subnpix, local=localsm)
        zmap = tm.DistPixels(comm=comm.comm_world, size=npix, nnz=3,
            dtype=np.float64, submap=subnpix, local=localsm)

        # compute the hits and covariance once, since the pointing and noise
        # weights are fixed.

        invnpp.data.fill(0.0)
        hits.data.fill(0)

        build_invnpp = tm.OpAccumDiag(detweights=detweights, invnpp=invnpp,
            hits=hits)
        build_invnpp.exec(data)

        invnpp.allreduce()
        hits.allreduce()

        comm.comm_world.barrier()
        stop = MPI.Wtime()
        elapsed = stop - start
        if comm.comm_world.rank == 0:
            print("Building hits and N_pp^-1 took {:.3f} s".format(elapsed),
                flush=True)
        start = stop

        hits.write_healpix_fits("{}_hits.fits".format(args.outdir))
        invnpp.write_healpix_fits("{}_invnpp.fits".format(args.outdir))

        comm.comm_world.barrier()
        stop = MPI.Wtime()
        elapsed = stop - start
        if comm.comm_world.rank == 0:
            print("Writing hits and N_pp^-1 took {:.3f} s".format(elapsed),
                flush=True)
        start = stop

        # invert it
        tm.covariance_invert(invnpp, 1.0e-3)

        comm.comm_world.barrier()
        stop = MPI.Wtime()
        elapsed = stop - start
        if comm.comm_world.rank == 0:
            print("Inverting N_pp^-1 took {:.3f} s".format(elapsed),
                flush=True)
        start = stop

        invnpp.write_healpix_fits("{}_npp.fits".format(args.outdir))

        comm.comm_world.barrier()
        stop = MPI.Wtime()
        elapsed = stop - start
        if comm.comm_world.rank == 0:
            print("Writing N_pp took {:.3f} s".format(elapsed),
                flush=True)
        start = stop

        # in debug mode, print out data distribution information
        if args.debug:
            handle = None
            if comm.comm_world.rank == 0:
                handle = open("{}_distdata.txt".format(args.outdir), "w")
            data.info(handle)
            if comm.comm_world.rank == 0:
                handle.close()

            comm.comm_world.barrier()
            stop = MPI.Wtime()
            elapsed = stop - start
            if comm.comm_world.rank == 0:
                print("Dumping debug data distribution took "
                    "{:.3f} s".format(elapsed), flush=True)
            start = stop

        mcstart = start

        # Loop over Monte Carlos

        firstmc = int(args.MC_start)
        nmc = int(args.MC_count)

        for mc in range(firstmc, firstmc+nmc):
            # create output directory for this realization
            outpath = "{}_{:03d}".format(args.outdir, mc)
            if comm.comm_world.rank == 0:
                if not os.path.isdir(outpath):
                    os.makedirs(outpath)

            comm.comm_world.barrier()
            stop = MPI.Wtime()
            elapsed = stop - start
            if comm.comm_world.rank == 0:
                print("Creating output dir {:04d} took {:.3f} s".format(mc,
                    elapsed), flush=True)
            start = stop

            # clear all signal data from the cache, so that we can generate
            # new noise timestreams.
            tod.cache.clear("tot_signal_.*")

            # simulate noise

            nse = tt.OpSimNoise(out="tot_signal", realization=mc)
            nse.exec(data)

            # add sky signal
            add_sky_signal(args, comm, data, totalname="tot_signal", signalname=signalname)

            if mc == firstmc:
                # For the first realization, optionally export the
                # timestream data to a TIDAS volume.
                if args.tidas is not None:
                    from toast.tod.tidas import OpTidasExport
                    tidas_path = os.path.abspath(args.tidas)
                    export = OpTidasExport(tidas_path, name="tot_signal")
                    export.exec(data)

            comm.comm_world.barrier()
            stop = MPI.Wtime()
            elapsed = stop - start
            if comm.comm_world.rank == 0:
                print("  Noise simulation {:04d} took {:.3f} s".format(mc,
                    elapsed), flush=True)
            start = stop

            zmap.data.fill(0.0)
            build_zmap = tm.OpAccumDiag(zmap=zmap, name="tot_signal",
                                        detweights=detweights)
            build_zmap.exec(data)
            zmap.allreduce()

            comm.comm_world.barrier()
            stop = MPI.Wtime()
            elapsed = stop - start
            if comm.comm_world.rank == 0:
                print("  Building noise weighted map {:04d} took {:.3f} s".format(
                    mc, elapsed), flush=True)
            start = stop

            tm.covariance_apply(invnpp, zmap)

            comm.comm_world.barrier()
            stop = MPI.Wtime()
            elapsed = stop - start
            if comm.comm_world.rank == 0:
                print("  Computing binned map {:04d} took {:.3f} s".format(mc,
                    elapsed), flush=True)
            start = stop

            zmap.write_healpix_fits(os.path.join(outpath, "binned.fits"))

            comm.comm_world.barrier()
            stop = MPI.Wtime()
            elapsed = stop - start
            if comm.comm_world.rank == 0:
                print("  Writing binned map {:04d} took {:.3f} s".format(mc,
                    elapsed), flush=True)
            elapsed = stop - mcstart
            if comm.comm_world.rank == 0:
                print("  Mapmaking {:04d} took {:.3f} s".format(mc, elapsed),
                    flush=True)
            start = stop

    else:

        # Set up MADAM map making.

        pars = {}

        cross = args.nside // 2

        pars[ "temperature_only" ] = "F"
        pars[ "force_pol" ] = "T"
        pars[ "kfirst" ] = "T"
        pars[ "concatenate_messages" ] = "T"
        pars[ "write_map" ] = "T"
        pars[ "write_binmap" ] = "T"
        pars[ "write_matrix" ] = "T"
        pars[ "write_wcov" ] = "T"
        pars[ "write_hits" ] = "T"
        pars[ "nside_cross" ] = cross
        pars[ "nside_submap" ] = subnside

        if args.madampar is not None:
            pat = re.compile(r"\s*(\S+)\s*=\s*(\S+(\s+\S+)*)\s*")
            comment = re.compile(r"^#.*")
            with open(args.madampar, "r") as f:
                for line in f:
                    if comment.match(line) is None:
                        result = pat.match(line)
                        if result is not None:
                            key, value = result.group(1), result.group(2)
                            pars[key] = value

        pars[ "base_first" ] = args.baseline
        pars[ "nside_map" ] = args.nside
        if args.noisefilter:
            pars[ "kfilter" ] = "T"
        else:
            pars[ "kfilter" ] = "F"
        pars[ "fsample" ] = args.samplerate

        # Loop over Monte Carlos

        firstmc = int(args.MC_start)
        nmc = int(args.MC_count)

        for mc in range(firstmc, firstmc+nmc):
            # clear all total signal data from the cache, so that we can generate
            # new noise timestreams.
            tod.cache.clear("tot_signal_.*")

            # simulate noise

            nse = tt.OpSimNoise(out="tot_signal", realization=mc)
            nse.exec(data)

            # add sky signal
            add_sky_signal(args, comm, data, totalname="tot_signal", signalname=signalname)

            comm.comm_world.barrier()
            stop = MPI.Wtime()
            elapsed = stop - start
            if comm.comm_world.rank == 0:
                print("Noise simulation took {:.3f} s".format(elapsed),
                    flush=True)
            start = stop

            # create output directory for this realization
            pars[ "path_output" ] = "{}_{:03d}".format(args.outdir, mc)
            if comm.comm_world.rank == 0:
                if not os.path.isdir(pars["path_output"]):
                    os.makedirs(pars["path_output"])

            # in debug mode, print out data distribution information
            if args.debug:
                handle = None
                if comm.comm_world.rank == 0:
                    handle = open(os.path.join(pars["path_output"],
                        "distdata.txt"), "w")
                data.info(handle)
                if comm.comm_world.rank == 0:
                    handle.close()

            madam = tm.OpMadam(params=pars, detweights=detweights,
                name="tot_signal")
            madam.exec(data)

            comm.comm_world.barrier()
            stop = MPI.Wtime()
            elapsed = stop - start
            if comm.comm_world.rank == 0:
                print("Mapmaking took {:.3f} s".format(elapsed), flush=True)

    comm.comm_world.barrier()
    stop = MPI.Wtime()
    elapsed = stop - global_start
    if comm.comm_world.rank == 0:
        print("Total Time:  {:.2f} seconds".format(elapsed), flush=True)