Exemplo n.º 1
0
def main():
    env = Environment.get()
    log = Logger.get()

    parser = argparse.ArgumentParser(
        description="Test the TOAST runtime environment.",
        fromfile_prefix_chars="@")

    parser.add_argument(
        "--groupsize",
        required=False,
        type=int,
        default=0,
        help="size of processor groups used to distribute observations",
    )

    try:
        args = parser.parse_args()
    except SystemExit:
        return

    mpiworld, procs, rank = get_world()
    if rank == 0:
        print(env)
        log.info(
            "Numba threading layer set to '{}'".format(numba_threading_layer))
    if mpiworld is None:
        log.info("Running serially with one process")
    else:
        if rank == 0:
            log.info("Running with {} processes".format(procs))

    groupsize = args.groupsize
    if groupsize <= 0:
        groupsize = procs

    if rank == 0:
        log.info("Using group size of {} processes".format(groupsize))

    comm = Comm(world=mpiworld, groupsize=groupsize)

    log.info(
        "Process {}:  world rank {}, group {} of {}, group rank {}".format(
            rank, comm.world_rank, comm.group + 1, comm.ngroups,
            comm.group_rank))

    return
Exemplo n.º 2
0
    if comm.comm_world is not None:
        comm.comm_world.barrier()
    tmr.stop()
    tmr.clear()
    tmr.start()
    alltimers = gather_timers(comm=comm.comm_world)
    if comm.world_rank == 0:
        out = os.path.join(args.outdir, "timing")
        dump_timing(alltimers, out)
        tmr.stop()
        tmr.report("Gather and dump timing info")
        timer0.report_clear("toast_satellite_sim.py")
    return


if __name__ == "__main__":
    try:
        main()
    except Exception:
        # We have an unhandled exception on at least one process.  Print a stack
        # trace for this process and then abort so that all processes terminate.
        mpiworld, procs, rank = get_world()
        if procs == 1:
            raise
        exc_type, exc_value, exc_traceback = sys.exc_info()
        lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
        lines = ["Proc {}: {}".format(rank, x) for x in lines]
        print("".join(lines), flush=True)
        if mpiworld is not None:
            mpiworld.Abort(6)
Exemplo n.º 3
0
def main():
    env = Environment.get()
    env.enable_function_timers()

    log = Logger.get()
    gt = GlobalTimers.get()
    gt.start("toast_benchmark (total)")

    mpiworld, procs, rank = get_world()

    if rank == 0:
        log.info("TOAST version = {}".format(env.version()))
        log.info("Using a maximum of {} threads per process".format(env.max_threads()))
    if mpiworld is None:
        log.info("Running serially with one process at {}".format(str(datetime.now())))
    else:
        if rank == 0:
            log.info(
                "Running with {} processes at {}".format(procs, str(datetime.now()))
            )

    cases = {
        "tiny": 5000000,  # O(1) GB RAM
        "xsmall": 50000000,  # O(10) GB RAM
        "small": 500000000,  # O(100) GB RAM
        "medium": 5000000000,  # O(1) TB RAM
        "large": 50000000000,  # O(10) TB RAM
        "xlarge": 500000000000,  # O(100) TB RAM
        "heroic": 5000000000000,  # O(1000) TB RAM
    }

    args, comm, n_nodes, n_detector, case, group_seconds, n_group = job_config(
        mpiworld, cases
    )

    # Note:  The number of "days" here will just be an approximation of the desired
    # data volume since we are doing a realistic schedule for a real observing site.

    n_days = int(2.0 * (group_seconds * n_group) / (24 * 3600))
    if n_days == 0:
        n_days = 1

    if rank == 0:
        log.info(
            "Using {} detectors for approximately {} days".format(n_detector, n_days)
        )

    # Create the schedule file and input maps on one process
    if rank == 0:
        create_schedules(args, group_seconds, n_days)
        create_input_maps(args)
    if mpiworld is not None:
        mpiworld.barrier()

    if args.dry_run is not None:
        if rank == 0:
            log.info("Exit from dry run")
        # We are done!
        sys.exit(0)

    gt.start("toast_benchmark (science work)")

    # Load and broadcast the schedule file

    schedules = pipeline_tools.load_schedule(args, comm)

    # Load the weather and append to schedules

    pipeline_tools.load_weather(args, comm, schedules)

    # Simulate the focalplane

    detweights = create_focalplanes(args, comm, schedules, n_detector)

    # Create the TOAST data object to match the schedule.  This will
    # include simulating the boresight pointing.

    data, telescope_data, total_samples = create_observations(args, comm, schedules)

    # handle = None
    # if comm.world_rank == 0:
    #     handle = open(os.path.join(args.outdir, "distdata.txt"), "w")
    # data.info(handle)
    # if comm.world_rank == 0:
    #     handle.close()
    # if comm.comm_world is not None:
    #     comm.comm_world.barrier()

    # Split the communicator for day and season mapmaking

    time_comms = pipeline_tools.get_time_communicators(args, comm, data)

    # Expand boresight quaternions into detector pointing weights and
    # pixel numbers

    pipeline_tools.expand_pointing(args, comm, data)

    # Optionally rewrite the noise PSD:s in each observation to include
    # elevation-dependence

    pipeline_tools.get_elevation_noise(args, comm, data)

    # Purge the pointing if we are NOT going to export the
    # data to a TIDAS volume
    if (args.tidas is None) and (args.spt3g is None):
        for ob in data.obs:
            tod = ob["tod"]
            tod.free_radec_quats()

    # Prepare auxiliary information for distributed map objects

    signalname = pipeline_tools.scan_sky_signal(args, comm, data, "signal")

    # Set up objects to take copies of the TOD at appropriate times

    totalname, totalname_freq = setup_sigcopy(args)

    # Loop over Monte Carlos

    firstmc = args.MC_start
    nsimu = args.MC_count

    freqs = [float(freq) for freq in args.freq.split(",")]
    nfreq = len(freqs)

    for mc in range(firstmc, firstmc + nsimu):

        pipeline_tools.simulate_atmosphere(args, comm, data, mc, totalname)

        # Loop over frequencies with identical focal planes and identical
        # atmospheric noise.

        for ifreq, freq in enumerate(freqs):

            if comm.world_rank == 0:
                log.info(
                    "Processing frequency {}GHz {} / {}, MC = {}".format(
                        freq, ifreq + 1, nfreq, mc
                    )
                )

            # Make a copy of the atmosphere so we can scramble the gains and apply
            # frequency-dependent scaling.
            pipeline_tools.copy_signal(args, comm, data, totalname, totalname_freq)

            pipeline_tools.scale_atmosphere_by_frequency(
                args, comm, data, freq=freq, mc=mc, cache_name=totalname_freq
            )

            pipeline_tools.update_atmospheric_noise_weights(args, comm, data, freq, mc)

            # Add previously simulated sky signal to the atmospheric noise.

            pipeline_tools.add_signal(
                args, comm, data, totalname_freq, signalname, purge=(nsimu == 1)
            )

            mcoffset = ifreq * 1000000

            pipeline_tools.simulate_noise(
                args, comm, data, mc + mcoffset, totalname_freq
            )

            pipeline_tools.scramble_gains(
                args, comm, data, mc + mcoffset, totalname_freq
            )

            outpath = setup_output(args, comm, mc + mcoffset, freq)

            # Bin and destripe maps

            pipeline_tools.apply_mapmaker(
                args,
                comm,
                data,
                outpath,
                totalname_freq,
                time_comms=time_comms,
                telescope_data=telescope_data,
                first_call=(mc == firstmc),
            )

            if args.apply_polyfilter or args.apply_groundfilter:

                # Filter signal

                pipeline_tools.apply_polyfilter(args, comm, data, totalname_freq)

                pipeline_tools.apply_groundfilter(args, comm, data, totalname_freq)

                # Bin filtered maps

                pipeline_tools.apply_mapmaker(
                    args,
                    comm,
                    data,
                    outpath,
                    totalname_freq,
                    time_comms=time_comms,
                    telescope_data=telescope_data,
                    first_call=False,
                    extra_prefix="filtered",
                    bin_only=True,
                )

    gt.stop_all()
    if mpiworld is not None:
        mpiworld.barrier()

    runtime = gt.seconds("toast_benchmark (science work)")
    prefactor = 1.0e-3
    kilo_samples = 1.0e-3 * total_samples
    sample_factor = 1.2
    det_factor = 2.0
    metric = (
        prefactor
        * n_detector ** det_factor
        * kilo_samples ** sample_factor
        / (n_nodes * runtime)
    )
    if rank == 0:
        msg = "Science Metric: {:0.1e} * ({:d}**{:0.2f}) * ({:0.3e}**{:0.3f}) / ({:0.1f} * {}) = {:0.2f}".format(
            prefactor,
            n_detector,
            det_factor,
            kilo_samples,
            sample_factor,
            runtime,
            n_nodes,
            metric,
        )
        log.info("")
        log.info(msg)
        log.info("")
        with open(os.path.join(args.outdir, "log"), "a") as f:
            f.write(msg)
            f.write("\n\n")

    timer = Timer()
    timer.start()
    alltimers = gather_timers(comm=mpiworld)
    if comm.world_rank == 0:
        out = os.path.join(args.outdir, "timing")
        dump_timing(alltimers, out)
        with open(os.path.join(args.outdir, "log"), "a") as f:
            f.write("Copy of Global Timers:\n")
            with open("{}.csv".format(out), "r") as t:
                f.write(t.read())
        timer.stop()
        timer.report("Gather and dump timing info")
    return
Exemplo n.º 4
0
def main():
    log = Logger.get()

    parser = argparse.ArgumentParser(
        description="Read a toast covariance matrix and invert it.")

    parser.add_argument("--input",
                        required=True,
                        default=None,
                        help="The input covariance FITS file")

    parser.add_argument(
        "--output",
        required=False,
        default=None,
        help="The output inverse covariance FITS file.",
    )

    parser.add_argument(
        "--rcond",
        required=False,
        default=None,
        help="Optionally write the inverse condition number map to this file.",
    )

    parser.add_argument(
        "--single",
        required=False,
        default=False,
        action="store_true",
        help="Write the output in single precision.",
    )

    parser.add_argument(
        "--threshold",
        required=False,
        default=1e-3,
        type=np.float,
        help="Reciprocal condition number threshold",
    )

    try:
        args = parser.parse_args()
    except SystemExit:
        return

    # get options

    infile = args.input
    outfile = None
    if args.output is not None:
        outfile = args.output
    else:
        inmat = re.match(r"(.*)\.fits", infile)
        if inmat is None:
            log.error("input file should have .fits extension")
            return
        inroot = inmat.group(1)
        outfile = "{}_inv.fits".format(inroot)

    # Get the default communicator
    mpiworld, procs, rank = get_world()

    # We need to read the header to get the size of the matrix.
    # This would be a trivial function call in astropy.fits or
    # fitsio, but we don't want to bring in a whole new dependency
    # just for that.  Instead, we open the file with healpy in memmap
    # mode so that nothing is actually read except the header.

    nside = 0
    ncovnz = 0
    if rank == 0:
        fake, head = hp.read_map(infile, h=True, memmap=True)
        for key, val in head:
            if key == "NSIDE":
                nside = int(val)
            if key == "TFIELDS":
                ncovnz = int(val)
    if mpiworld is not None:
        nside = mpiworld.bcast(nside, root=0)
        ncovnz = mpiworld.bcast(ncovnz, root=0)

    nnz = int(((np.sqrt(8.0 * ncovnz) - 1.0) / 2.0) + 0.5)

    npix = 12 * nside**2
    subnside = int(nside / 16)
    if subnside == 0:
        subnside = 1
    subnpix = 12 * subnside**2
    nsubmap = int(npix / subnpix)

    # divide the submaps as evenly as possible among processes

    dist = distribute_uniform(nsubmap, procs)
    local = np.arange(dist[rank][0], dist[rank][0] + dist[rank][1])

    if rank == 0:
        if os.path.isfile(outfile):
            os.remove(outfile)

    if mpiworld is not None:
        mpiworld.barrier()

    # create the covariance and inverse condition number map

    cov = None
    invcov = None
    rcond = None

    cov = DistPixels(
        comm=mpiworld,
        dtype=np.float64,
        size=npix,
        nnz=ncovnz,
        submap=subnpix,
        local=local,
    )

    if args.single:
        invcov = DistPixels(
            comm=mpiworld,
            dtype=np.float32,
            size=npix,
            nnz=ncovnz,
            submap=subnpix,
            local=local,
        )
    else:
        invcov = cov

    if args.rcond is not None:
        rcond = DistPixels(
            comm=mpiworld,
            dtype=np.float64,
            size=npix,
            nnz=nnz,
            submap=subnpix,
            local=local,
        )

    # read the covariance
    if rank == 0:
        log.info("Reading covariance from {}".format(infile))
    cov.read_healpix_fits(infile)

    # every process computes its local piece
    if rank == 0:
        log.info("Inverting covariance")
    covariance_invert(cov, args.threshold, rcond=rcond)

    if args.single:
        invcov.data[:] = cov.data.astype(np.float32)

    # write the inverted covariance
    if rank == 0:
        log.info("Writing inverted covariance to {}".format(outfile))
    invcov.write_healpix_fits(outfile)

    # write the condition number

    if args.rcond is not None:
        if rank == 0:
            log.info("Writing condition number map")
        rcond.write_healpix_fits(args.rcond)

    return
Exemplo n.º 5
0
    def setUp(self):
        fixture_name = os.path.splitext(os.path.basename(__file__))[0]
        if not toast_available:
            print("toast cannot be imported- skipping unit tests", flush=True)
            return

        self.comm, self.procs, self.rank = get_world()

        self.outdir = create_outdir(fixture_name, comm=self.comm)

        toastcomm = toast.Comm()
        self.data = toast.Data(toastcomm)

        # Focalplane
        hwfull = get_example()
        dets = sim_telescope_detectors(hwfull, "SAT4")
        hwfull.data["detectors"] = dets
        hw = hwfull.select(match={
            "wafer_slot": "w42",
            "band": "f030",
            "pixel": "00[01]"
        })
        print(hw.data["detectors"], flush=True)
        detquats = {k: v["quat"] for k, v in hw.data["detectors"].items()}

        # Samples per observation
        self.totsamp = 10000

        # Pixelization
        nside = 512
        self.sim_nside = nside
        self.map_nside = nside

        # Scan properties
        self.site_lon = '-67:47:10'
        self.site_lat = '-22:57:30'
        self.site_alt = 5200.
        self.coord = 'C'
        self.azmin = 45
        self.azmax = 55
        self.el = 60
        self.scanrate = 1.0
        self.scan_accel = 0.1
        self.CES_start = None

        # Noise properties
        self.rate = 100.0
        self.NET = 5.0
        self.epsilon = 0.0
        self.fmin = 1.0e-5
        self.alpha = 1.0
        self.fknee = 0.05

        tod = TODGround(self.data.comm.comm_group,
                        detquats,
                        self.totsamp,
                        detranks=self.data.comm.group_size,
                        firsttime=0.0,
                        rate=self.rate,
                        site_lon=self.site_lon,
                        site_lat=self.site_lat,
                        site_alt=self.site_alt,
                        azmin=self.azmin,
                        azmax=self.azmax,
                        el=self.el,
                        coord=self.coord,
                        scanrate=self.scanrate,
                        scan_accel=self.scan_accel,
                        CES_start=self.CES_start)

        # Analytic noise model
        detnames = list(detquats.keys())
        drate = {x: self.rate for x in detnames}
        dfmin = {x: self.fmin for x in detnames}
        dfknee = {x: self.fknee for x in detnames}
        dalpha = {x: self.alpha for x in detnames}
        dnet = {x: self.NET for x in detnames}
        nse = AnalyticNoise(rate=drate,
                            fmin=dfmin,
                            detectors=detnames,
                            fknee=dfknee,
                            alpha=dalpha,
                            NET=dnet)

        # Single observation
        obs = dict()
        obs["tod"] = tod
        obs["noise"] = nse
        obs["id"] = 12345
        obs["intervals"] = tod.subscans
        obs["site"] = "SimonsObs"
        obs["telescope"] = "SAT4"
        obs["site_id"] = 1
        obs["telescope_id"] = 4
        obs["fpradius"] = 5.0
        obs["start_time"] = 0
        obs["altitude"] = self.site_alt
        obs["name"] = "test"

        # Add the observation to the dataset
        self.data.obs.append(obs)
        return
Exemplo n.º 6
0
def main():
    log = Logger.get()

    parser = argparse.ArgumentParser(
        description="Simulate fake hexagonal focalplane.",
        fromfile_prefix_chars="@")

    parser.add_argument(
        "--minpix",
        required=False,
        type=int,
        default=100,
        help="minimum number of pixels to use",
    )

    parser.add_argument(
        "--out",
        required=False,
        default="fp_fake",
        help="Root name of output pickle file",
    )

    parser.add_argument("--fwhm",
                        required=False,
                        type=float,
                        default=5.0,
                        help="beam FWHM in arcmin")

    parser.add_argument(
        "--fwhm_sigma",
        required=False,
        type=float,
        default=0,
        help="Relative beam FWHM distribution width",
    )

    parser.add_argument(
        "--fov",
        required=False,
        type=float,
        default=5.0,
        help="Field of View in degrees",
    )

    parser.add_argument(
        "--psd_fknee",
        required=False,
        type=float,
        default=0.05,
        help="Detector noise model f_knee in Hz",
    )

    parser.add_argument(
        "--psd_NET",
        required=False,
        type=float,
        default=60.0e-6,
        help="Detector noise model NET in K*sqrt(sec)",
    )

    parser.add_argument(
        "--psd_alpha",
        required=False,
        type=float,
        default=1.0,
        help="Detector noise model slope",
    )

    parser.add_argument(
        "--psd_fmin",
        required=False,
        type=float,
        default=1.0e-5,
        help="Detector noise model f_min in Hz",
    )

    parser.add_argument(
        "--bandcenter_ghz",
        required=False,
        type=float,
        help="Band center frequency [GHz]",
    )

    parser.add_argument(
        "--bandcenter_sigma",
        required=False,
        type=float,
        default=0,
        help="Relative band center distribution width",
    )

    parser.add_argument("--bandwidth_ghz",
                        required=False,
                        type=float,
                        help="Bandwidth [GHz]")

    parser.add_argument(
        "--bandwidth_sigma",
        required=False,
        type=float,
        default=0,
        help="Relative bandwidth distribution width",
    )

    parser.add_argument(
        "--random_seed",
        required=False,
        type=np.int,
        default=123456,
        help="Random number generator seed for randomized "
        "detector parameters",
    )

    try:
        args = parser.parse_args()
    except SystemExit:
        return

    # Get the default communicator
    mpiworld, procs, rank = get_world()

    # Guard against being called with multiple processes
    if rank == 0:
        # Make one big hexagon layout at the center of the focalplane.
        # Compute the number of pixels that is at least the number requested.

        test = args.minpix - 1
        nrings = 0
        while (test - 6 * nrings) > 0:
            test -= 6 * nrings
            nrings += 1

        npix = 1
        for r in range(1, nrings + 1):
            npix += 6 * r

        log.info("using {} pixels ({} detectors)".format(npix, npix * 2))

        # Translate the field-of-view into distance between flag sides
        angwidth = args.fov * np.cos(30 * degree)

        Apol = hex_pol_angles_qu(npix, offset=0.0)
        Bpol = hex_pol_angles_qu(npix, offset=90.0)

        Adets = hex_layout(npix, angwidth, "fake_", "A", Apol)
        Bdets = hex_layout(npix, angwidth, "fake_", "B", Bpol)

        dets = Adets.copy()
        dets.update(Bdets)

        np.random.seed(args.random_seed)

        for indx, d in enumerate(sorted(dets.keys())):
            dets[d]["fknee"] = args.psd_fknee
            dets[d]["fmin"] = args.psd_fmin
            dets[d]["alpha"] = args.psd_alpha
            dets[d]["NET"] = args.psd_NET
            # This is in degrees, but the input is in arcmin.
            dets[d]["fwhm_deg"] = (args.fwhm / 60.0) * (
                1 + np.random.randn() * args.fwhm_sigma)
            # This is a fixed value, in arcmin.
            dets[d]["fwhm"] = args.fwhm
            if args.bandcenter_ghz:
                dets[d]["bandcenter_ghz"] = args.bandcenter_ghz * (
                    1 + np.random.randn() * args.bandcenter_sigma)
            if args.bandwidth_ghz:
                dets[d]["bandwidth_ghz"] = args.bandwidth_ghz * (
                    1 + np.random.randn() * args.bandwidth_sigma)
            dets[d]["index"] = indx

        outfile = "{}_{}".format(args.out, npix)
        qdets = {x: y["quat"] for x, y in dets.items()}
        beams = {x: (60.0 * y["fwhm_deg"]) for x, y in dets.items()}
        plot_focalplane(qdets,
                        args.fov,
                        args.fov,
                        "{}.png".format(outfile),
                        fwhm=beams)

        with open("{}.pkl".format(outfile), "wb") as p:
            pickle.dump(dets, p)

    return
Exemplo n.º 7
0
import h5py
import numpy as np
import toast
from numba import jit
from toast.mpi import get_world
from toast.op import Operator
from toast.utils import Logger

if TYPE_CHECKING:
    from typing import Optional, List

COMM: Optional[toast.mpi.Comm]
PROCS: int
RANK: int
COMM, PROCS, RANK = get_world()
LOGGER = Logger.get()
IS_SERIAL = PROCS == 1

H5_CREATE_KW = {
    'compression': 'gzip',
    # shuffle minimize the output size
    'shuffle': True,
    # checksum for data integrity
    'fletcher32': True,
    # turn off track_times so that identical output gives the same md5sum
    'track_times': False
}


@jit(nopython=True, nogil=True, cache=False)
Exemplo n.º 8
0
    def setUp(self):
        fixture_name = os.path.splitext(os.path.basename(__file__))[0]
        if not toast_available:
            print(
                "toast cannot be imported ({})- skipping unit test".format(
                    toast_import_error),
                flush=True,
            )
            return

        self.comm, self.procs, self.rank = get_world()

        self.outdir = create_outdir(fixture_name, comm=self.comm)

        toastcomm = toast.Comm(world=self.comm)
        self.data = toast.Data(toastcomm)

        # Focalplane
        hwfull = get_example()
        dets = sim_telescope_detectors(hwfull, "SAT4")
        hwfull.data["detectors"] = dets
        hw = hwfull.select(match={
            "wafer_slot": "w42",
            "band": "SAT_f030",
            "pixel": "00[01]"
        })
        # print(hw.data["detectors"], flush=True)
        detquats = {k: v["quat"] for k, v in hw.data["detectors"].items()}

        # Samples per observation
        self.totsamp = 10000

        # Scan properties
        self.site_lon = '-67:47:10'
        self.site_lat = '-22:57:30'
        self.site_alt = 5200.
        self.coord = 'C'
        self.azmin = 45
        self.azmax = 55
        self.el = 60
        self.scanrate = 1.0
        self.scan_accel = 0.1
        self.CES_start = None

        # Noise properties
        self.rate = 100.0
        self.NET = 1e-3  # 1 mK NET
        self.epsilon = 0.0
        self.fmin = 1.0e-5
        self.alpha = 1.0
        self.fknee = 0.05

        for ob in range(3):
            ftime = (self.totsamp / self.rate) * ob + 1564015655.88
            tod = TODGround(self.data.comm.comm_group,
                            detquats,
                            self.totsamp,
                            detranks=self.data.comm.group_size,
                            firsttime=ftime,
                            rate=self.rate,
                            site_lon=self.site_lon,
                            site_lat=self.site_lat,
                            site_alt=self.site_alt,
                            azmin=self.azmin,
                            azmax=self.azmax,
                            el=self.el,
                            coord=self.coord,
                            scanrate=self.scanrate,
                            scan_accel=self.scan_accel,
                            CES_start=self.CES_start)

            # Analytic noise model
            detnames = list(detquats.keys())
            drate = {x: self.rate for x in detnames}
            dfmin = {x: self.fmin for x in detnames}
            dfknee = {x: self.fknee for x in detnames}
            dalpha = {x: self.alpha for x in detnames}
            dnet = {x: self.NET for x in detnames}
            nse = AnalyticNoise(rate=drate,
                                fmin=dfmin,
                                detectors=detnames,
                                fknee=dfknee,
                                alpha=dalpha,
                                NET=dnet)

            # Single observation
            obs = dict()
            obs["tod"] = tod
            obs["noise"] = nse
            obs["id"] = 12345
            obs["intervals"] = tod.subscans
            obs["site"] = "SimonsObs"
            obs["telescope"] = "SAT4"
            obs["site_id"] = 1
            obs["telescope_id"] = 4
            obs["fpradius"] = 5.0
            obs["start_time"] = ftime
            obs["altitude"] = self.site_alt
            obs["name"] = "test_{:02}".format(ob)

            # Add a focalplane dictionary with just the detector index
            focalplane = {}
            for idet, det in enumerate(detnames):
                focalplane[det] = {"index": idet}
            obs["focalplane"] = focalplane

            # Add the observation to the dataset
            self.data.obs.append(obs)

        nse = toast.tod.OpSimNoise(out="signal", realization=0)
        nse.exec(self.data)

        return