Esempio n. 1
0
def create_observations(args, comm, schedules):
    """ Create and distribute TOAST observations for every CES in schedules.

    """
    log = Logger.get()
    timer = Timer()
    timer.start()

    data = Data(comm)

    # Loop over the schedules, distributing each schedule evenly across
    # the process groups.  For now, we'll assume that each schedule has
    # the same number of operational days and the number of process groups
    # matches the number of operational days.  Relaxing these constraints
    # will cause the season break to occur on different process groups
    # for different schedules and prevent splitting the communicator.

    for schedule in schedules:

        telescope = schedule.telescope
        all_ces = schedule.ceslist
        nces = len(all_ces)

        breaks = get_breaks(comm, all_ces, args)

        groupdist = distribute_uniform(nces, comm.ngroups, breaks=breaks)
        group_firstobs = groupdist[comm.group][0]
        group_numobs = groupdist[comm.group][1]

        for ices in range(group_firstobs, group_firstobs + group_numobs):
            obs = create_observation(args, comm, telescope, all_ces[ices])
            data.obs.append(obs)

    if comm.comm_world is None or comm.comm_group.rank == 0:
        log.info("Group # {:4} has {} observations.".format(comm.group, len(data.obs)))

    if len(data.obs) == 0:
        raise RuntimeError(
            "Too many tasks. Every MPI task must "
            "be assigned to at least one observation."
        )

    if comm.comm_world is not None:
        comm.comm_world.barrier()
    timer.stop()
    if comm.world_rank == 0:
        timer.report("Simulated scans")

    # Split the data object for each telescope for separate mapmaking.
    # We could also split by site.

    if len(schedules) > 1:
        telescope_data = data.split("telescope")
        if len(telescope_data) == 1:
            # Only one telescope available
            telescope_data = []
    else:
        telescope_data = []
    telescope_data.insert(0, ("all", data))
    return data, telescope_data
Esempio n. 2
0
def create_observations(args, comm, focalplane, groupsize):
    timer = Timer()
    timer.start()

    if groupsize > len(focalplane.keys()):
        if comm.world_rank == 0:
            log.error("process group is too large for the number of detectors")
            comm.comm_world.Abort()

    # Detector information from the focalplane

    detectors = sorted(focalplane.keys())
    detquats = {}
    detindx = None
    if "index" in focalplane[detectors[0]]:
        detindx = {}

    for d in detectors:
        detquats[d] = focalplane[d]["quat"]
        if detindx is not None:
            detindx[d] = focalplane[d]["index"]

    # Distribute the observations uniformly

    groupdist = distribute_uniform(args.obs_num, comm.ngroups)

    # Compute global time and sample ranges of all observations

    obsrange = regular_intervals(
        args.obs_num,
        args.start_time,
        0,
        args.sample_rate,
        3600 * args.obs_time_h,
        3600 * args.gap_h,
    )

    noise = get_analytic_noise(args, comm, focalplane)

    # The distributed timestream data

    data = Data(comm)

    # Every process group creates its observations

    group_firstobs = groupdist[comm.group][0]
    group_numobs = groupdist[comm.group][1]

    for ob in range(group_firstobs, group_firstobs + group_numobs):
        tod = TODSatellite(
            comm.comm_group,
            detquats,
            obsrange[ob].samples,
            coord=args.coord,
            firstsamp=obsrange[ob].first,
            firsttime=obsrange[ob].start,
            rate=args.sample_rate,
            spinperiod=args.spin_period_min,
            spinangle=args.spin_angle_deg,
            precperiod=args.prec_period_min,
            precangle=args.prec_angle_deg,
            detindx=detindx,
            detranks=comm.group_size,
            hwprpm=hwprpm,
            hwpstep=hwpstep,
            hwpsteptime=hwpsteptime,
        )

        obs = {}
        obs["name"] = "science_{:05d}".format(ob)
        obs["tod"] = tod
        obs["intervals"] = None
        obs["baselines"] = None
        obs["noise"] = noise
        obs["id"] = ob

        data.obs.append(obs)

    if comm.world_rank == 0:
        timer.report_clear("Read parameters, compute data distribution")

    # Since we are simulating noise timestreams, we want
    # them to be contiguous and reproducible over the whole
    # observation.  We distribute data by detector within an
    # observation, so ensure that our group size is not larger
    # than the number of detectors we have.

    # we set the precession axis now, which will trigger calculation
    # of the boresight pointing.

    for ob in range(group_numobs):
        curobs = data.obs[ob]
        tod = curobs["tod"]

        # Get the global sample offset from the original distribution of
        # intervals
        obsoffset = obsrange[group_firstobs + ob].first

        # Constantly slewing precession axis
        degday = 360.0 / 365.25
        precquat = np.empty(4 * tod.local_samples[1],
                            dtype=np.float64).reshape((-1, 4))
        slew_precession_axis(
            precquat,
            firstsamp=(obsoffset + tod.local_samples[0]),
            samplerate=args.sample_rate,
            degday=degday,
        )

        tod.set_prec_axis(qprec=precquat)
        del precquat

    if comm.world_rank == 0:
        timer.report_clear("Construct boresight pointing")

    return data
Esempio n. 3
0
def create_observations(args, comm, schedule):
    """Simulate constant elevation scans.

    Simulate constant elevation scans at "site" matching entries in
    "all_ces".  Each operational day is assigned to a different
    process group to allow making day maps.

    """
    timer = Timer()
    log = Logger.get()

    data = Data(comm)

    telescope = schedule.telescope
    site = telescope.site
    focalplane = telescope.focalplane
    all_ces = schedule.ceslist
    nces = len(all_ces)

    breaks = get_breaks(comm, all_ces, args)

    nbreak = len(breaks)

    groupdist = distribute_uniform(nces, comm.ngroups, breaks=breaks)
    group_firstobs = groupdist[comm.group][0]
    group_numobs = groupdist[comm.group][1]

    if comm.comm_group is not None:
        ndetrank = comm.comm_group.size
    else:
        ndetrank = 1

    for ices in range(group_firstobs, group_firstobs + group_numobs):
        ces = all_ces[ices]
        totsamples = int((ces.stop_time - ces.start_time) * args.sample_rate)

        # create the single TOD for this observation

        try:
            tod = TODGround(
                comm.comm_group,
                focalplane.detquats,
                totsamples,
                detranks=ndetrank,
                firsttime=ces.start_time,
                rate=args.sample_rate,
                site_lon=site.lon,
                site_lat=site.lat,
                site_alt=site.alt,
                azmin=ces.azmin,
                azmax=ces.azmax,
                el=ces.el,
                scanrate=args.scan_rate,
                scan_accel=args.scan_accel,
                cosecant_modulation=args.scan_cosecant_modulate,
                CES_start=None,
                CES_stop=None,
                sun_angle_min=args.sun_angle_min,
                coord=args.coord,
                sampsizes=None,
                report_timing=args.debug,
            )
        except RuntimeError as e:
            raise RuntimeError("Failed to create the CES scan: {}".format(e))

        # Create the (single) observation

        ob = {}
        ob["name"] = "CES-{}-{}-{}".format(ces.name, ces.scan, ces.subscan)
        ob["tod"] = tod
        if len(tod.subscans) > 0:
            ob["intervals"] = tod.subscans
        else:
            raise RuntimeError("{} has no valid intervals".format(ob["name"]))
        ob["baselines"] = None
        ob["noise"] = focalplane.noise
        ob["id"] = int(ces.mjdstart * 10000)

        data.obs.append(ob)

    for ob in data.obs:
        tod = ob["tod"]
        tod.free_azel_quats()

    if comm.comm_world is None or comm.comm_group.rank == 0:
        log.info("Group # {:4} has {} observations.".format(
            comm.group, len(data.obs)))

    if len(data.obs) == 0:
        raise RuntimeError("Too many tasks. Every MPI task must "
                           "be assigned to at least one observation.")

    if comm.world_rank == 0:
        timer.report_clear("Simulate scans")

    return data
Esempio n. 4
0
def main():
    log = Logger.get()

    parser = argparse.ArgumentParser(
        description="Read a toast covariance matrix and invert it.")

    parser.add_argument("--input",
                        required=True,
                        default=None,
                        help="The input covariance FITS file")

    parser.add_argument(
        "--output",
        required=False,
        default=None,
        help="The output inverse covariance FITS file.",
    )

    parser.add_argument(
        "--rcond",
        required=False,
        default=None,
        help="Optionally write the inverse condition number map to this file.",
    )

    parser.add_argument(
        "--single",
        required=False,
        default=False,
        action="store_true",
        help="Write the output in single precision.",
    )

    parser.add_argument(
        "--threshold",
        required=False,
        default=1e-3,
        type=np.float,
        help="Reciprocal condition number threshold",
    )

    try:
        args = parser.parse_args()
    except SystemExit:
        return

    # get options

    infile = args.input
    outfile = None
    if args.output is not None:
        outfile = args.output
    else:
        inmat = re.match(r"(.*)\.fits", infile)
        if inmat is None:
            log.error("input file should have .fits extension")
            return
        inroot = inmat.group(1)
        outfile = "{}_inv.fits".format(inroot)

    # Get the default communicator
    mpiworld, procs, rank = get_world()

    # We need to read the header to get the size of the matrix.
    # This would be a trivial function call in astropy.fits or
    # fitsio, but we don't want to bring in a whole new dependency
    # just for that.  Instead, we open the file with healpy in memmap
    # mode so that nothing is actually read except the header.

    nside = 0
    ncovnz = 0
    if rank == 0:
        fake, head = hp.read_map(infile, h=True, memmap=True)
        for key, val in head:
            if key == "NSIDE":
                nside = int(val)
            if key == "TFIELDS":
                ncovnz = int(val)
    if mpiworld is not None:
        nside = mpiworld.bcast(nside, root=0)
        ncovnz = mpiworld.bcast(ncovnz, root=0)

    nnz = int(((np.sqrt(8.0 * ncovnz) - 1.0) / 2.0) + 0.5)

    npix = 12 * nside**2
    subnside = int(nside / 16)
    if subnside == 0:
        subnside = 1
    subnpix = 12 * subnside**2
    nsubmap = int(npix / subnpix)

    # divide the submaps as evenly as possible among processes

    dist = distribute_uniform(nsubmap, procs)
    local = np.arange(dist[rank][0], dist[rank][0] + dist[rank][1])

    if rank == 0:
        if os.path.isfile(outfile):
            os.remove(outfile)

    if mpiworld is not None:
        mpiworld.barrier()

    # create the covariance and inverse condition number map

    cov = None
    invcov = None
    rcond = None

    cov = DistPixels(
        comm=mpiworld,
        dtype=np.float64,
        size=npix,
        nnz=ncovnz,
        submap=subnpix,
        local=local,
    )

    if args.single:
        invcov = DistPixels(
            comm=mpiworld,
            dtype=np.float32,
            size=npix,
            nnz=ncovnz,
            submap=subnpix,
            local=local,
        )
    else:
        invcov = cov

    if args.rcond is not None:
        rcond = DistPixels(
            comm=mpiworld,
            dtype=np.float64,
            size=npix,
            nnz=nnz,
            submap=subnpix,
            local=local,
        )

    # read the covariance
    if rank == 0:
        log.info("Reading covariance from {}".format(infile))
    cov.read_healpix_fits(infile)

    # every process computes its local piece
    if rank == 0:
        log.info("Inverting covariance")
    covariance_invert(cov, args.threshold, rcond=rcond)

    if args.single:
        invcov.data[:] = cov.data.astype(np.float32)

    # write the inverted covariance
    if rank == 0:
        log.info("Writing inverted covariance to {}".format(outfile))
    invcov.write_healpix_fits(outfile)

    # write the condition number

    if args.rcond is not None:
        if rank == 0:
            log.info("Writing condition number map")
        rcond.write_healpix_fits(args.rcond)

    return