def main(): timer0 = Timer() timer0.start() log = Logger.get() gt = GlobalTimers.get() gt.start("toast_planck_reduce (total)") mpiworld, procs, rank, comm = get_comm() memreport("At start of pipeline", mpiworld) if comm.world_rank == 0: print("Running with {} processes at {}".format( procs, str(datetime.datetime.now()))) parser = argparse.ArgumentParser(description='Simple MADAM Mapmaking', fromfile_prefix_chars='@') parser.add_argument('--rimo', required=True, help='RIMO file') parser.add_argument('--freq', required=True, type=np.int, help='Frequency') parser.add_argument('--debug', dest='debug', default=False, action='store_true', help='Write data distribution info to file') parser.add_argument('--dets', required=False, default=None, help='Detector list (comma separated)') parser.add_argument('--effdir', required=True, help='Input Exchange Format File directory') parser.add_argument('--effdir_pntg', required=False, help='Input Exchange Format File directory ' 'for pointing') parser.add_argument('--coord', default='G', help='Coordinate system, "G", "E" or "C"') parser.add_argument('--obtmask', required=False, default=1, type=np.int, help='OBT flag mask') parser.add_argument('--flagmask', required=False, default=1, type=np.int, help='Quality flag mask') parser.add_argument('--pntflagmask', required=False, default=0, type=np.int, help='Pointing flag mask') parser.add_argument('--bad_intervals', required=False, help='Path to bad interval file.') parser.add_argument('--ringdb', required=True, help='Ring DB file') parser.add_argument('--odfirst', required=False, default=None, type=np.int, help='First OD to use') parser.add_argument('--odlast', required=False, default=None, type=np.int, help='Last OD to use') parser.add_argument('--ringfirst', required=False, default=None, help='First ring to use (can be a list)') parser.add_argument('--ringlast', required=False, default=None, help='Last ring to use (can be a list)') parser.add_argument('--obtfirst', required=False, default=None, type=np.float, help='First OBT to use') parser.add_argument('--obtlast', required=False, default=None, type=np.float, help='Last OBT to use') parser.add_argument('--out', required=False, default='.', help='Output directory') parser.add_argument('--catalog', required=True, help='Target catalog file') parser.add_argument('--radius', required=True, type=np.float, help='Search radius about the source [arc min]') parser.add_argument('--mask', required=False, help='Mask defining region of the sky to accept') parser.add_argument('--bg', required=False, help='Background map to subtract') parser.add_argument('--recalib_bg', dest='recalib_bg', default=False, action='store_true', help='Recalibrate bg map for each ring.') parser.add_argument('--full_rings', dest='full_rings', default=False, action='store_true', help='Extract impacted rings entirely.') # noise parameters parser.add_argument('--noisefile', required=False, default='RIMO', help='Path to noise PSD files for noise filter. ' 'Tag DETECTOR will be replaced with detector name.') # Dipole parameters dipogroup = parser.add_mutually_exclusive_group() dipogroup.add_argument('--dipole', dest='dipole', required=False, default=False, action='store_true', help='Simulate dipole') dipogroup.add_argument('--solsys_dipole', dest='solsys_dipole', required=False, default=False, action='store_true', help='Simulate solar system dipole') dipogroup.add_argument('--orbital_dipole', dest='orbital_dipole', required=False, default=False, action='store_true', help='Simulate orbital dipole') try: args = parser.parse_args() except SystemExit: sys.exit(0) if comm.world_rank == 0: print('All parameters:') print(args, flush=True) data = create_observations(args, comm) rimo = data.obs[0]["tod"].rimo memreport("After create observations", mpiworld) # Read in the signal timer = Timer() timer.start() reader = tp.OpInputPlanck(signal_name='signal', flags_name='flags') if comm.world_rank == 0: print('Reading input signal from {}'.format(args.effdir), flush=True) reader.exec(data) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("Reading") tod_name = 'signal' flags_name = 'flags' memreport("After read", mpiworld) # Optionally flag bad intervals if args.bad_intervals is not None: flagger = tp.OpBadIntervals(path=args.bad_intervals) flagger.exec(data) if comm.world_rank == 0: timer.report_clear("Applying {}".format(args.bad_intervals)) do_dipole = (args.dipole or args.solsys_dipole or args.orbital_dipole) # make a planck Healpix pointing matrix pointing = tp.OpPointingPlanck(nside=1024, mode='IQU', RIMO=rimo, margin=0, apply_flags=False, keep_vel=do_dipole, keep_pos=False, keep_phase=True, keep_quats=True) pointing.exec(data) memreport("After pointing", mpiworld) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("Pointing Matrix") # Optionally subtract the dipole if do_dipole: if args.dipole: dipomode = 'total' elif args.solsys_dipole: dipomode = 'solsys' else: dipomode = 'orbital' dipo = tp.OpDipolePlanck(args.freq, mode=dipomode, output='dipole', keep_quats=True) dipo.exec(data) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("Dipole") subtractor = tp.OpCacheMath(in1=tod_name, in2='dipole', subtract=True, out=tod_name) if comm.comm_world.rank == 0: print('Subtracting dipole', flush=True) subtractor.exec(data) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("Dipole subtraction") memreport("After dipole", mpiworld) extract = tp.OpExtractPlanck(rimo, args.catalog, args.radius, mpiworld, common_flag_mask=args.obtmask, flag_mask=args.flagmask, maskfile=args.mask, bg=args.bg, full_rings=args.full_rings, recalibrate_bg=args.recalib_bg, out=args.out) extract.exec(data) memreport("After extract", mpiworld) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("Extraction") gt.stop_all() if mpiworld is not None: mpiworld.barrier() timer = Timer() timer.start() alltimers = gather_timers(comm=mpiworld) if comm.world_rank == 0: out = os.path.join(args.out, "timing") dump_timing(alltimers, out) timer.report_clear("Gather and dump timing info") timer0.report_clear("Full pipeline") return
def main(): env = Environment.get() env.enable_function_timers() log = Logger.get() gt = GlobalTimers.get() gt.start("toast_benchmark (total)") mpiworld, procs, rank = get_world() if rank == 0: log.info("TOAST version = {}".format(env.version())) log.info("Using a maximum of {} threads per process".format(env.max_threads())) if mpiworld is None: log.info("Running serially with one process at {}".format(str(datetime.now()))) else: if rank == 0: log.info( "Running with {} processes at {}".format(procs, str(datetime.now())) ) cases = { "tiny": 5000000, # O(1) GB RAM "xsmall": 50000000, # O(10) GB RAM "small": 500000000, # O(100) GB RAM "medium": 5000000000, # O(1) TB RAM "large": 50000000000, # O(10) TB RAM "xlarge": 500000000000, # O(100) TB RAM "heroic": 5000000000000, # O(1000) TB RAM } args, comm, n_nodes, n_detector, case, group_seconds, n_group = job_config( mpiworld, cases ) # Note: The number of "days" here will just be an approximation of the desired # data volume since we are doing a realistic schedule for a real observing site. n_days = int(2.0 * (group_seconds * n_group) / (24 * 3600)) if n_days == 0: n_days = 1 if rank == 0: log.info( "Using {} detectors for approximately {} days".format(n_detector, n_days) ) # Create the schedule file and input maps on one process if rank == 0: create_schedules(args, group_seconds, n_days) create_input_maps(args) if mpiworld is not None: mpiworld.barrier() if args.dry_run is not None: if rank == 0: log.info("Exit from dry run") # We are done! sys.exit(0) gt.start("toast_benchmark (science work)") # Load and broadcast the schedule file schedules = pipeline_tools.load_schedule(args, comm) # Load the weather and append to schedules pipeline_tools.load_weather(args, comm, schedules) # Simulate the focalplane detweights = create_focalplanes(args, comm, schedules, n_detector) # Create the TOAST data object to match the schedule. This will # include simulating the boresight pointing. data, telescope_data, total_samples = create_observations(args, comm, schedules) # handle = None # if comm.world_rank == 0: # handle = open(os.path.join(args.outdir, "distdata.txt"), "w") # data.info(handle) # if comm.world_rank == 0: # handle.close() # if comm.comm_world is not None: # comm.comm_world.barrier() # Split the communicator for day and season mapmaking time_comms = pipeline_tools.get_time_communicators(args, comm, data) # Expand boresight quaternions into detector pointing weights and # pixel numbers pipeline_tools.expand_pointing(args, comm, data) # Optionally rewrite the noise PSD:s in each observation to include # elevation-dependence pipeline_tools.get_elevation_noise(args, comm, data) # Purge the pointing if we are NOT going to export the # data to a TIDAS volume if (args.tidas is None) and (args.spt3g is None): for ob in data.obs: tod = ob["tod"] tod.free_radec_quats() # Prepare auxiliary information for distributed map objects signalname = pipeline_tools.scan_sky_signal(args, comm, data, "signal") # Set up objects to take copies of the TOD at appropriate times totalname, totalname_freq = setup_sigcopy(args) # Loop over Monte Carlos firstmc = args.MC_start nsimu = args.MC_count freqs = [float(freq) for freq in args.freq.split(",")] nfreq = len(freqs) for mc in range(firstmc, firstmc + nsimu): pipeline_tools.simulate_atmosphere(args, comm, data, mc, totalname) # Loop over frequencies with identical focal planes and identical # atmospheric noise. for ifreq, freq in enumerate(freqs): if comm.world_rank == 0: log.info( "Processing frequency {}GHz {} / {}, MC = {}".format( freq, ifreq + 1, nfreq, mc ) ) # Make a copy of the atmosphere so we can scramble the gains and apply # frequency-dependent scaling. pipeline_tools.copy_signal(args, comm, data, totalname, totalname_freq) pipeline_tools.scale_atmosphere_by_frequency( args, comm, data, freq=freq, mc=mc, cache_name=totalname_freq ) pipeline_tools.update_atmospheric_noise_weights(args, comm, data, freq, mc) # Add previously simulated sky signal to the atmospheric noise. pipeline_tools.add_signal( args, comm, data, totalname_freq, signalname, purge=(nsimu == 1) ) mcoffset = ifreq * 1000000 pipeline_tools.simulate_noise( args, comm, data, mc + mcoffset, totalname_freq ) pipeline_tools.scramble_gains( args, comm, data, mc + mcoffset, totalname_freq ) outpath = setup_output(args, comm, mc + mcoffset, freq) # Bin and destripe maps pipeline_tools.apply_mapmaker( args, comm, data, outpath, totalname_freq, time_comms=time_comms, telescope_data=telescope_data, first_call=(mc == firstmc), ) if args.apply_polyfilter or args.apply_groundfilter: # Filter signal pipeline_tools.apply_polyfilter(args, comm, data, totalname_freq) pipeline_tools.apply_groundfilter(args, comm, data, totalname_freq) # Bin filtered maps pipeline_tools.apply_mapmaker( args, comm, data, outpath, totalname_freq, time_comms=time_comms, telescope_data=telescope_data, first_call=False, extra_prefix="filtered", bin_only=True, ) gt.stop_all() if mpiworld is not None: mpiworld.barrier() runtime = gt.seconds("toast_benchmark (science work)") prefactor = 1.0e-3 kilo_samples = 1.0e-3 * total_samples sample_factor = 1.2 det_factor = 2.0 metric = ( prefactor * n_detector ** det_factor * kilo_samples ** sample_factor / (n_nodes * runtime) ) if rank == 0: msg = "Science Metric: {:0.1e} * ({:d}**{:0.2f}) * ({:0.3e}**{:0.3f}) / ({:0.1f} * {}) = {:0.2f}".format( prefactor, n_detector, det_factor, kilo_samples, sample_factor, runtime, n_nodes, metric, ) log.info("") log.info(msg) log.info("") with open(os.path.join(args.outdir, "log"), "a") as f: f.write(msg) f.write("\n\n") timer = Timer() timer.start() alltimers = gather_timers(comm=mpiworld) if comm.world_rank == 0: out = os.path.join(args.outdir, "timing") dump_timing(alltimers, out) with open(os.path.join(args.outdir, "log"), "a") as f: f.write("Copy of Global Timers:\n") with open("{}.csv".format(out), "r") as t: f.write(t.read()) timer.stop() timer.report("Gather and dump timing info") return
def get_analytic_noise(args, comm, focalplane, verbose=True): """ Create a TOAST noise object. Create a noise object from the 1/f noise parameters contained in the focalplane database. """ timer = Timer() timer.start() detectors = sorted(focalplane.keys()) fmins = {} fknees = {} alphas = {} NETs = {} rates = {} indices = {} for d in detectors: rates[d] = args.sample_rate fmins[d] = focalplane[d]["fmin"] fknees[d] = focalplane[d]["fknee"] alphas[d] = focalplane[d]["alpha"] NETs[d] = focalplane[d]["NET"] indices[d] = focalplane[d]["index"] if args.common_mode_noise: # Add an extra "virtual" detector for common mode noise for # every optics tube fmin, fknee, alpha, net = np.array( args.common_mode_noise.split(",")).astype(np.float64) hw = hardware.get_example() for itube, tube in enumerate(sorted(hw.data["tubes"].keys())): d = "common_mode_{}".format(tube) detectors.append(d) rates[d] = args.sample_rate fmins[d] = fmin fknees[d] = fknee alphas[d] = alpha NETs[d] = net indices[d] = 100000 + itube noise = AnalyticNoise( rate=rates, fmin=fmins, detectors=detectors, fknee=fknees, alpha=alphas, NET=NETs, indices=indices, ) if args.common_mode_noise: # Update the mixing matrix in the noise operator mixmatrix = {} keys = set() for det in focalplane.keys(): tube = focalplane[det]["tube"] common = "common_mode_{}".format(tube) mixmatrix[det] = {det: 1, common: 1} keys.add(det) keys.add(common) # There should probably be an accessor method to update the # mixmatrix in the TOAST Noise object. if noise._mixmatrix is not None: raise RuntimeError("Did not expect non-empty mixing matrix") noise._mixmatrix = mixmatrix noise._keys = list(sorted(keys)) timer.stop() if comm.world_rank == 0 and verbose: timer.report("Creating noise model") return noise
def main(): log = Logger.get() gt = GlobalTimers.get() gt.start("toast_planck_reduce (total)") mpiworld, procs, rank, comm = get_comm() if comm.world_rank == 0: print("Running with {} processes at {}".format( procs, str(datetime.datetime.now()))) parser = argparse.ArgumentParser(description='Simple MADAM Mapmaking', fromfile_prefix_chars='@') parser.add_argument('--skip_madam', dest='skip_madam', default=False, action='store_true', help='D not make maps with Madam.') parser.add_argument('--skip_noise', dest='skip_noise', default=False, action='store_true', help='Do not add simulated noise to the TOD.') parser.add_argument('--rimo', required=True, help='RIMO file') parser.add_argument('--freq', required=True, type=np.int, help='Frequency') parser.add_argument('--debug', dest='debug', default=False, action='store_true', help='Write data distribution info to file') parser.add_argument('--dets', required=False, default=None, help='Detector list (comma separated)') parser.add_argument('--effdir', required=True, help='Input Exchange Format File directory') parser.add_argument('--effdir2', required=False, help='Additional input Exchange Format File directory') parser.add_argument('--effdir_pntg', required=False, help='Input Exchange Format File directory for ' 'pointing') parser.add_argument('--effdir_fsl', required=False, help='Input Exchange Format File directory for ' 'straylight') parser.add_argument('--obtmask', required=False, default=1, type=np.int, help='OBT flag mask') parser.add_argument('--flagmask', required=False, default=1, type=np.int, help='Quality flag mask') parser.add_argument('--pntflagmask', required=False, default=0, type=np.int, help='Which OBT flag bits to raise for HCM maneuvers') parser.add_argument('--bad_intervals', required=False, help='Path to bad interval file.') parser.add_argument('--ringdb', required=True, help='Ring DB file') parser.add_argument('--odfirst', required=False, default=None, help='First OD to use') parser.add_argument('--odlast', required=False, default=None, help='Last OD to use') parser.add_argument('--ringfirst', required=False, default=None, help='First ring to use') parser.add_argument('--ringlast', required=False, default=None, help='Last ring to use') parser.add_argument('--obtfirst', required=False, default=None, help='First OBT to use') parser.add_argument('--obtlast', required=False, default=None, help='Last OBT to use') parser.add_argument('--read_eff', dest='read_eff', default=False, action='store_true', help='Read and co-add the signal from effdir') parser.add_argument('--decalibrate', required=False, help='Path to calibration file to decalibrate with. ' 'You can use python string formatting, assuming ' '.format(mc)') parser.add_argument('--calibrate', required=False, help='Path to calibration file to calibrate with. ' 'You can use python string formatting, assuming ' '.format(mc)') parser.add_argument('--madampar', required=False, default=None, help='Madam parameter file') parser.add_argument('--nside', required=False, default=None, type=np.int, help='Madam resolution') parser.add_argument('--out', required=False, default='.', help='Output directory') parser.add_argument('--madam_prefix', required=False, help='map prefix') parser.add_argument('--make_rings', dest='make_rings', default=False, action='store_true', help='Compile ringsets.') parser.add_argument('--nside_ring', required=False, default=128, type=np.int, help='Ringset resolution') parser.add_argument('--ring_root', required=False, default='ringset', help='Root filename for ringsets (setting to empty ' 'disables ringset output).') parser.add_argument('--MC_start', required=False, default=0, type=np.int, help='First Monte Carlo noise realization') parser.add_argument('--MC_count', required=False, default=1, type=np.int, help='Number of Monte Carlo noise realizations') # noise parameters parser.add_argument('--noisefile', required=False, default='RIMO', help='Path to noise PSD files for noise filter. ' 'Tag DETECTOR will be replaced with detector name.') parser.add_argument('--noisefile_simu', required=False, default='RIMO', help='Path to noise PSD files for noise simulation. ' 'Tag DETECTOR will be replaced with detector name.') # Dipole parameters dipogroup = parser.add_mutually_exclusive_group() dipogroup.add_argument('--dipole', dest='dipole', required=False, default=False, action='store_true', help='Simulate dipole') dipogroup.add_argument('--solsys_dipole', dest='solsys_dipole', required=False, default=False, action='store_true', help='Simulate solar system dipole') dipogroup.add_argument('--orbital_dipole', dest='orbital_dipole', required=False, default=False, action='store_true', help='Simulate orbital dipole') dipo_parameters_group = parser.add_argument_group('dipole_parameters') dipo_parameters_group.add_argument( '--solsys_speed', required=False, type=np.float, default=DEFAULT_PARAMETERS["solsys_speed"], help='Solar system speed wrt. CMB rest frame in km/s. Default is ' 'Planck 2015 best fit value') dipo_parameters_group.add_argument( '--solsys_glon', required=False, type=np.float, default=DEFAULT_PARAMETERS["solsys_glon"], help='Solar system velocity direction longitude in degrees') dipo_parameters_group.add_argument( '--solsys_glat', required=False, type=np.float, default=DEFAULT_PARAMETERS["solsys_glat"], help='Solar system velocity direction latitude in degrees') try: args = parser.parse_args() except SystemExit: sys.exit(0) if comm.world_rank == 0: print('All parameters:') print(args, flush=True) if args.MC_count < 1: raise RuntimeError('MC_count = {} < 1. Nothing done.' ''.format(args.MC_count)) timer = Timer() timer.start() nrange = 1 odranges = None if args.odfirst is not None and args.odlast is not None: odranges = [] firsts = [int(i) for i in str(args.odfirst).split(',')] lasts = [int(i) for i in str(args.odlast).split(',')] for odfirst, odlast in zip(firsts, lasts): odranges.append((odfirst, odlast)) nrange = len(odranges) ringranges = None if args.ringfirst is not None and args.ringlast is not None: ringranges = [] firsts = [int(i) for i in str(args.ringfirst).split(',')] lasts = [int(i) for i in str(args.ringlast).split(',')] for ringfirst, ringlast in zip(firsts, lasts): ringranges.append((ringfirst, ringlast)) nrange = len(ringranges) obtranges = None if args.obtfirst is not None and args.obtlast is not None: obtranges = [] firsts = [float(i) for i in str(args.obtfirst).split(',')] lasts = [float(i) for i in str(args.obtlast).split(',')] for obtfirst, obtlast in zip(firsts, lasts): obtranges.append((obtfirst, obtlast)) nrange = len(obtranges) if odranges is None: odranges = [None] * nrange if ringranges is None: ringranges = [None] * nrange if obtranges is None: obtranges = [None] * nrange detectors = None if args.dets is not None: detectors = re.split(',', args.dets) # create the TOD for this observation if args.noisefile != 'RIMO' or args.noisefile_simu != 'RIMO': do_eff_cache = True else: do_eff_cache = False tods = [] for obtrange, ringrange, odrange in zip(obtranges, ringranges, odranges): # create the TOD for this observation tods.append( tp.Exchange(comm=comm.comm_group, detectors=detectors, ringdb=args.ringdb, effdir_in=args.effdir, extra_effdirs=[args.effdir2, args.effdir_fsl], effdir_pntg=args.effdir_pntg, obt_range=obtrange, ring_range=ringrange, od_range=odrange, freq=args.freq, RIMO=args.rimo, obtmask=args.obtmask, flagmask=args.flagmask, pntflagmask=args.pntflagmask, do_eff_cache=do_eff_cache)) # Make output directory if not os.path.isdir(args.out) and comm.world_rank == 0: os.makedirs(args.out) # Read in madam parameter file # Allow more than one entry, gather into a list repeated_keys = ['detset', 'detset_nopol', 'survey'] pars = {} if comm.world_rank == 0: pars['kfirst'] = False pars['temperature_only'] = True pars['base_first'] = 60.0 pars['nside_submap'] = 16 pars['write_map'] = False pars['write_binmap'] = True pars['write_matrix'] = False pars['write_wcov'] = False pars['write_hits'] = True pars['kfilter'] = False pars['info'] = 3 if args.madampar: pat = re.compile(r'\s*(\S+)\s*=\s*(\S+(\s+\S+)*)\s*') comment = re.compile(r'^#.*') with open(args.madampar, 'r') as f: for line in f: if not comment.match(line): result = pat.match(line) if result: key, value = result.group(1), result.group(2) if key in repeated_keys: if key not in pars: pars[key] = [] pars[key].append(value) else: pars[key] = value # Command line parameters override the ones in the madam parameter file if 'file_root' not in pars: pars['file_root'] = 'madam' if args.madam_prefix is not None: pars['file_root'] = args.madam_prefix sfreq = '{:03}'.format(args.freq) if sfreq not in pars['file_root']: pars['file_root'] += '_' + sfreq try: fsample = {30: 32.51, 44: 46.55, 70: 78.77}[args.freq] except Exception: fsample = 180.3737 pars['fsample'] = fsample pars['path_output'] = args.out pars = comm.comm_world.bcast(pars, root=0) madam_mcmode = True if 'nsubchunk' in pars and int(pars['nsubchunk']) > 1: madam_mcmode = False if args.noisefile != 'RIMO' or args.noisefile_simu != 'RIMO': # We split MPI_COMM_WORLD into single process groups, each of # which is assigned one or more observations (rings) comm = toast.Comm(groupsize=1) # This is the distributed data, consisting of one or # more observations, each distributed over a communicator. data = toast.Data(comm) for iobs, tod in enumerate(tods): if args.noisefile != 'RIMO' or args.noisefile_simu != 'RIMO': # Use a toast helper method to optimally distribute rings between # processes. dist = distribute_discrete(tod.ringsizes, comm.world_size) my_first_ring, my_n_ring = dist[comm.world_rank] for my_ring in range(my_first_ring, my_first_ring + my_n_ring): ringtod = tp.Exchange.from_tod( tod, my_ring, comm.comm_group, noisefile=args.noisefile, noisefile_simu=args.noisefile_simu) ob = {} ob['name'] = 'ring{:05}'.format(ringtod.globalfirst_ring) ob['id'] = ringtod.globalfirst_ring ob['tod'] = ringtod ob['intervals'] = ringtod.valid_intervals ob['baselines'] = None ob['noise'] = ringtod.noise ob['noise_simu'] = ringtod.noise_simu data.obs.append(ob) else: ob = {} ob['name'] = 'observation{:04}'.format(iobs) ob['id'] = 0 ob['tod'] = tod ob['intervals'] = tod.valid_intervals ob['baselines'] = None ob['noise'] = tod.noise ob['noise_simu'] = tod.noise data.obs.append(ob) rimo = tods[0].rimo if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("Metadata queries") # Always read the signal and flags, even if the signal is later # overwritten. There is no overhead for the signal because it is # interlaced with the flags. tod_name = 'signal' timestamps_name = 'timestamps' flags_name = 'flags' common_flags_name = 'common_flags' reader = tp.OpInputPlanck(signal_name=tod_name, flags_name=flags_name, timestamps_name=timestamps_name, commonflags_name=common_flags_name) if comm.world_rank == 0: print('Reading input signal from {}'.format(args.effdir), flush=True) reader.exec(data) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("Read") # Clear the signal if we don't need it if not args.read_eff: eraser = tp.OpCacheMath(in1=tod_name, in2=0, multiply=True, out=tod_name) if comm.world_rank == 0: print('Erasing TOD', flush=True) eraser.exec(data) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("Erase") # Optionally flag bad intervals if args.bad_intervals is not None: flagger = tp.OpBadIntervals(path=args.bad_intervals) flagger.exec(data) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("Apply {}".format(args.bad_intervals)) # Now read an optional second TOD to add with the first if args.effdir2 is not None: # Read the extra TOD and add it to the first one reader = tp.OpInputPlanck(signal_name='tod2', flags_name=None, timestamps_name=None, commonflags_name=None, effdir=args.effdir2) if comm.world_rank == 0: print('Reading extra TOD from {}'.format(args.effdir2), flush=True) reader.exec(data) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: print("Reading took {:.3f} s".format(elapsed), flush=True) adder = tp.OpCacheMath(in1=tod_name, in2='signal2', add=True, out=tod_name) if comm.world_rank == 0: print('Adding TODs', flush=True) adder.exec(data) # Erase the extra cache object for ob in data.obs: tod = ob['tod'] tod.cache.clear('signal2_.*') if args.effdir_fsl is not None: # Read the straylight signal into the tod cache under # "fsl_<detector>" reader = tp.OpInputPlanck(signal_name='fsl', flags_name=None, timestamps_name=None, commonflags_name=None, effdir=args.effdir_fsl) if comm.world_rank == 0: print('Reading straylight signal from {}'.format(args.effdir_fsl), flush=True) reader.exec(data) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("Read FSL") do_fsl = True else: do_fsl = False # make a planck Healpix pointing matrix mode = 'IQU' if pars['temperature_only'] == 'T': mode = 'I' if args.nside is None: if 'nside_map' in pars: nside = int(pars['nside_map']) else: raise RuntimeError( 'Nside must be set either in the Madam parameter file or on ' 'the command line') else: nside = args.nside pars['nside_map'] = nside if 'nside_cross' not in pars or pars['nside_cross'] > pars['nside_map']: pars['nside_cross'] = pars['nside_map'] do_dipole = args.dipole or args.solsys_dipole or args.orbital_dipole pointing = tp.OpPointingPlanck(nside=nside, mode=mode, RIMO=rimo, margin=0, apply_flags=True, keep_vel=do_dipole, keep_pos=False, keep_phase=False, keep_quats=do_dipole) pointing.exec(data) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("Pointing Matrix") flags_name = 'flags' common_flags_name = 'common_flags' # for now, we pass in the noise weights from the RIMO. detweights = {} for d in tod.detectors: net = tod.rimo[d].net fsample = tod.rimo[d].fsample detweights[d] = 1.0 / (fsample * net * net) if args.debug: with open("debug_planck_exchange_madam.txt", "w") as f: data.info(f) if do_dipole: # Simulate the dipole if args.dipole: dipomode = 'total' elif args.solsys_dipole: dipomode = 'solsys' else: dipomode = 'orbital' dipo = tp.OpDipolePlanck(args.freq, solsys_speed=args.solsys_speed, solsys_glon=args.solsys_glon, solsys_glat=args.solsys_glat, mode=dipomode, output='dipole', keep_quats=False) dipo.exec(data) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("Dipole") # Loop over Monte Carlos madam = None for mc in range(args.MC_start, args.MC_start + args.MC_count): out = "{}/{:05d}".format(args.out, mc) if comm.world_rank == 0: if not os.path.isdir(out): os.makedirs(out) # clear all noise data from the cache, so that we can generate # new noise timestreams. for ob in data.obs: ob['tod'].cache.clear("noise_.*") tod_name = 'signal' if do_dipole: adder = tp.OpCacheMath(in1=tod_name, in2='dipole', add=True, out='noise') adder.exec(data) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("MC {}: Add dipole".format(mc)) tod_name = 'noise' # Simulate noise if not args.skip_noise: tod_name = 'noise' nse = toast.tod.OpSimNoise(out=tod_name, realization=mc, component=0, noise='noise_simu', rate=fsample) if comm.world_rank == 0: print('Simulating noise from {}'.format(args.noisefile_simu), flush=True) nse.exec(data) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("MC {}: Noise simulation".format(mc)) # If we didn't add the dipole, we need to add the input # signal with the noise we just simulated if args.read_eff and not do_dipole: adder = tp.OpCacheMath(in1=tod_name, in2='signal', add=True, out=tod_name) adder.exec(data) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("MC {}: Add input signal".format(mc)) # Make rings if args.make_rings: ringmaker = tp.OpRingMaker(args.nside_ring, nside, signal=tod_name, fileroot=args.ring_root, out=out, commonmask=args.obtmask, detmask=args.flagmask) ringmaker.exec(data) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("MC {}: Ringmaking".format(mc)) # Apply calibration errors if args.decalibrate is not None: fn = args.decalibrate try: fn = fn.format(mc) except Exception: pass if comm.world_rank == 0: print('Decalibrating with {}'.format(fn), flush=True) decalibrator = tp.OpCalibPlanck(signal_in=tod_name, signal_out='noise', file_gain=fn, decalibrate=True) decalibrator.exec(data) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("MC {}: Decalibrate".format(mc)) tod_name = 'noise' if args.calibrate is not None: fn = args.calibrate try: fn = fn.format(mc) except Exception: pass if comm.world_rank == 0: print('Calibrating with {}'.format(fn), flush=True) calibrator = tp.OpCalibPlanck(signal_in=tod_name, signal_out='noise', file_gain=fn) calibrator.exec(data) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("MC {}: Calibrate".format(mc)) tod_name = 'noise' # Subtract the dipole and straylight if do_dipole: subtractor = tp.OpCacheMath(in1=tod_name, in2='dipole', subtract=True, out='noise') subtractor.exec(data) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("MC {}: Subtract dipole".format(mc)) tod_name = 'noise' if do_fsl: subtractor = tp.OpCacheMath(in1=tod_name, in2='fsl', subtract=True, out='noise') subtractor.exec(data) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("MC {}: Subtract straylight".format(mc)) tod_name = 'noise' # Make the map if not args.skip_madam: # Make maps if madam is None: try: madam = toast.todmap.OpMadam(params=pars, detweights=detweights, purge_tod=True, name=tod_name, apply_flags=False, name_out=None, noise='noise', mcmode=madam_mcmode, translate_timestamps=False) except Exception as e: raise Exception( '{:4} : ERROR: failed to initialize Madam: ' '{}'.format(comm.world_rank, e)) madam.params['path_output'] = out madam.exec(data) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("MC {}: Mapmaking".format(mc)) gt.stop_all() if mpiworld is not None: mpiworld.barrier() timer = Timer() timer.start() alltimers = gather_timers(comm=mpiworld) if comm.world_rank == 0: out = os.path.join(args.out, "timing") dump_timing(alltimers, out) timer.stop() timer.report("Gather and dump timing info") return
def main(): log = Logger.get() gt = GlobalTimers.get() gt.start("toast_planck_reduce (total)") mpiworld, procs, rank, comm = get_comm() # This is the 2-level toast communicator. By default, # there is just one group which spans MPI_COMM_WORLD. comm = toast.Comm() if comm.comm_world.rank == 0: print( "Running with {} processes at {}".format( procs, str(datetime.datetime.now()) ) ) parser = argparse.ArgumentParser( description="Simple on-the-fly signal convolution + MADAM Mapmaking", fromfile_prefix_chars="@", ) parser.add_argument("--lmax", required=True, type=np.int, help="Simulation lmax") parser.add_argument( "--fwhm", required=True, type=np.float, help="Sky fwhm [arcmin] to deconvolve" ) parser.add_argument("--beammmax", required=True, type=np.int, help="Beam mmax") parser.add_argument("--order", default=11, type=np.int, help="Iteration order") parser.add_argument( "--pxx", required=False, default=False, action="store_true", help="Beams are in Pxx frame, not Dxx", ) parser.add_argument( "--normalize", required=False, default=False, action="store_true", help="Normalize the beams", ) parser.add_argument( "--skyfile", required=True, help="Path to sky alm files. Tag DETECTOR will be " "replaced with detector name.", ) parser.add_argument( "--remove_monopole", required=False, default=False, action="store_true", help="Remove the sky monopole before convolution", ) parser.add_argument( "--remove_dipole", required=False, default=False, action="store_true", help="Remove the sky dipole before convolution", ) parser.add_argument( "--beamfile", required=True, help="Path to beam alm files. Tag DETECTOR will be " "replaced with detector name.", ) parser.add_argument("--rimo", required=True, help="RIMO file") parser.add_argument("--freq", required=True, type=np.int, help="Frequency") parser.add_argument( "--dets", required=False, default=None, help="Detector list (comma separated)" ) parser.add_argument( "--effdir", required=True, help="Input Exchange Format File directory" ) parser.add_argument( "--effdir_pntg", required=False, help="Input Exchange Format File directory " "for pointing", ) parser.add_argument( "--effdir_out", required=False, help="Output directory for convolved TOD" ) parser.add_argument( "--obtmask", required=False, default=1, type=np.int, help="OBT flag mask" ) parser.add_argument( "--flagmask", required=False, default=1, type=np.int, help="Quality flag mask" ) parser.add_argument("--ringdb", required=True, help="Ring DB file") parser.add_argument( "--odfirst", required=False, default=None, type=np.int, help="First OD to use" ) parser.add_argument( "--odlast", required=False, default=None, type=np.int, help="Last OD to use" ) parser.add_argument( "--ringfirst", required=False, default=None, type=np.int, help="First ring to use", ) parser.add_argument( "--ringlast", required=False, default=None, type=np.int, help="Last ring to use" ) parser.add_argument( "--obtfirst", required=False, default=None, type=np.float, help="First OBT to use", ) parser.add_argument( "--obtlast", required=False, default=None, type=np.float, help="Last OBT to use" ) parser.add_argument("--madam_prefix", required=False, help="map prefix") parser.add_argument( "--madampar", required=False, default=None, help="Madam parameter file" ) parser.add_argument( "--obtmask_madam", required=False, type=np.int, help="OBT flag mask for Madam" ) parser.add_argument( "--flagmask_madam", required=False, type=np.int, help="Quality flag mask for Madam", ) parser.add_argument( "--skip_madam", required=False, default=False, action="store_true", help="Do not run Madam on the convolved timelines", ) parser.add_argument("--out", required=False, default=".", help="Output directory") try: args = parser.parse_args() except SystemExit: sys.exit(0) timer = Timer() timer.start() odrange = None if args.odfirst is not None and args.odlast is not None: odrange = (args.odfirst, args.odlast) ringrange = None if args.ringfirst is not None and args.ringlast is not None: ringrange = (args.ringfirst, args.ringlast) obtrange = None if args.obtfirst is not None and args.obtlast is not None: obtrange = (args.obtfirst, args.obtlast) detectors = None if args.dets is not None: detectors = re.split(",", args.dets) # This is the distributed data, consisting of one or # more observations, each distributed over a communicator. data = toast.Data(comm) # Ensure output directory exists if not os.path.isdir(args.out) and comm.comm_world.rank == 0: os.makedirs(args.out) # Read in madam parameter file # Allow more than one entry, gather into a list repeated_keys = ["detset", "detset_nopol", "survey"] pars = {} if comm.comm_world.rank == 0: pars["kfirst"] = False pars["temperature_only"] = True pars["base_first"] = 60.0 pars["nside_map"] = 512 pars["nside_cross"] = 512 pars["nside_submap"] = 16 pars["write_map"] = False pars["write_binmap"] = True pars["write_matrix"] = False pars["write_wcov"] = False pars["write_hits"] = True pars["kfilter"] = False pars["info"] = 3 if args.madampar: pat = re.compile(r"\s*(\S+)\s*=\s*(\S+(\s+\S+)*)\s*") comment = re.compile(r"^#.*") with open(args.madampar, "r") as f: for line in f: if not comment.match(line): result = pat.match(line) if result: key, value = result.group(1), result.group(2) if key in repeated_keys: if key not in pars: pars[key] = [] pars[key].append(value) else: pars[key] = value # Command line parameters override the ones in the madam parameter file if "file_root" not in pars: pars["file_root"] = "madam" if args.madam_prefix is not None: pars["file_root"] = args.madam_prefix sfreq = "{:03}".format(args.freq) if sfreq not in pars["file_root"]: pars["file_root"] += "_" + sfreq try: fsample = {30: 32.51, 44: 46.55, 70: 78.77}[args.freq] except Exception: fsample = 180.3737 pars["fsample"] = fsample pars["path_output"] = args.out print("All parameters:") print(args, flush=True) pars = comm.comm_world.bcast(pars, root=0) memreport("after parameters", MPI.COMM_WORLD) # madam only supports a single observation. Normally # we would have multiple observations with some subset # assigned to each process group. # create the TOD for this observation tod = tp.Exchange( comm=comm.comm_group, detectors=detectors, ringdb=args.ringdb, effdir_in=args.effdir, effdir_pntg=args.effdir_pntg, obt_range=obtrange, ring_range=ringrange, od_range=odrange, freq=args.freq, RIMO=args.rimo, obtmask=args.obtmask, flagmask=args.flagmask, do_eff_cache=False, ) # normally we would get the intervals from somewhere else, but since # the Exchange TOD already had to get that information, we can # get it from there. ob = {} ob["name"] = "mission" ob["id"] = 0 ob["tod"] = tod ob["intervals"] = tod.valid_intervals ob["baselines"] = None ob["noise"] = tod.noise # Add the bare minimum focal plane information for the conviqt operator focalplane = {} for det in tod.detectors: if args.pxx: # Beam is in the polarization basis. # No extra rotations are needed psipol = tod.rimo[det].psi_pol else: # Beam is in the detector basis. Convolver needs to remove # the last rotation into the polarization sensitive frame. psipol = tod.rimo[det].psi_uv + tod.rimo[det].psi_pol focalplane[det] = { "pol_leakage" : tod.rimo[det].epsilon, "pol_angle_deg" : psipol, } ob["focalplane"] = focalplane data.obs.append(ob) comm.comm_world.barrier() if comm.comm_world.rank == 0: timer.report_clear("Metadata queries") loader = tp.OpInputPlanck( commonflags_name="common_flags", flags_name="flags", margin=0 ) loader.exec(data) comm.comm_world.barrier() if comm.comm_world.rank == 0: timer.report_clear("Data read and cache") tod.cache.report() memreport("after loading", mpiworld) # make a planck Healpix pointing matrix mode = "IQU" if pars["temperature_only"] == "T": mode = "I" nside = int(pars["nside_map"]) pointing = tp.OpPointingPlanck( nside=nside, mode=mode, RIMO=tod.RIMO, margin=0, apply_flags=False, keep_vel=False, keep_pos=False, keep_phase=False, keep_quats=True, ) pointing.exec(data) comm.comm_world.barrier() if comm.comm_world.rank == 0: timer.report_clear("Pointing Matrix took, mode = {}".format(mode)) memreport("after pointing", mpiworld) # simulate the TOD by convolving the sky with the beams if comm.comm_world.rank == 0: print("Convolving TOD", flush=True) for pattern in args.beamfile.split(","): skyfiles = {} beamfiles = {} for det in tod.detectors: freq = "{:03}".format(tp.utilities.det2freq(det)) if "LFI" in det: psmdet = "{}_{}".format(freq, det[3:]) if det.endswith("M"): arm = "y" else: arm = "x" graspdet = "{}_{}_{}".format(freq[1:], det[3:5], arm) else: psmdet = det.replace("-", "_") graspdet = det skyfile = ( args.skyfile.replace("FREQ", freq) .replace("PSMDETECTOR", psmdet) .replace("DETECTOR", det) ) skyfiles[det] = skyfile beamfile = pattern.replace("GRASPDETECTOR", graspdet).replace( "DETECTOR", det ) beamfiles[det] = beamfile if comm.comm_world.rank == 0: print("Convolving {} with {}".format(skyfile, beamfile), flush=True) conviqt = OpSimConviqt( comm.comm_world, skyfiles, beamfiles, lmax=args.lmax, beammmax=args.beammmax, pol=True, fwhm=args.fwhm, order=args.order, calibrate=True, dxx=True, out="conviqt_tod", apply_flags=False, remove_monopole=args.remove_monopole, remove_dipole=args.remove_dipole, verbosity=1, normalize_beam=args.normalize, ) conviqt.exec(data) comm.comm_world.barrier() if comm.comm_world.rank == 0: timer.report_clear("Convolution") memreport("after conviqt", mpiworld) if args.effdir_out is not None: if comm.comm_world.rank == 0: print("Writing TOD", flush=True) tod.set_effdir_out(args.effdir_out, None) writer = tp.OpOutputPlanck( signal_name="conviqt_tod", flags_name="flags", commonflags_name="common_flags", ) writer.exec(data) comm.comm_world.barrier() if comm.comm_world.rank == 0: timer.report_clear("Conviqt output") memreport("after writing", mpiworld) # for now, we pass in the noise weights from the RIMO. detweights = {} for d in tod.detectors: net = tod.rimo[d].net fsample = tod.rimo[d].fsample detweights[d] = 1.0 / (fsample * net * net) if not args.skip_madam: if comm.comm_world.rank == 0: print("Calling Madam", flush=True) try: if args.obtmask_madam is None: obtmask = args.obtmask else: obtmask = args.obtmask_madam if args.flagmask_madam is None: flagmask = args.flagmask else: flagmask = args.flagmask_madam madam = OpMadam( params=pars, detweights=detweights, name="conviqt_tod", flag_name="flags", purge=True, name_out="madam_tod", common_flag_mask=obtmask, flag_mask=flagmask, ) except Exception as e: raise Exception( "{:4} : ERROR: failed to initialize Madam: {}".format( comm.comm_world.rank, e ) ) madam.exec(data) comm.comm_world.barrier() if comm.comm_world.rank == 0: timer.report_clear("Madam took {:.3f} s") memreport("after madam", mpiworld) gt.stop_all() if mpiworld is not None: mpiworld.barrier() timer = Timer() timer.start() alltimers = gather_timers(comm=mpiworld) if comm.world_rank == 0: out = os.path.join(args.out, "timing") dump_timing(alltimers, out) timer.stop() timer.report("Gather and dump timing info") return
def simulate_sky_signal(args, comm, data, focalplanes, signalname=None, mc=0): """ Use PySM to simulate smoothed sky signal. """ log = Logger.get() timer = Timer() timer.start() # Convolve a signal TOD from PySM if comm.world_rank == 0: log.info("Simulating sky signal with PySM") map_dist = (None if comm is None else pysm.MapDistribution( nside=args.nside, mpi_comm=comm.comm_rank)) pysm_component_objects = [] pysm_model = [] for model_tag in args.pysm_model.split(","): if not model_tag.startswith("SO"): pysm_model.append(model_tag) else: if so_pysm_models is None: raise RuntimeError( "{} requires so_pysm_models".format(model_tag)) if model_tag == "SO_x1_cib": pysm_component_objects.append( so_pysm_models.WebSkyCIB( websky_version="0.3", interpolation_kind="linear", nside=args.nside, map_dist=map_dist, )) elif model_tag == "SO_x1_ksz": pysm_component_objects.append( so_pysm_models.WebSkySZ( version="0.3", nside=args.nside, map_dist=map_dist, sz_type="kinetic", )) elif model_tag == "SO_x1_tsz": pysm_component_objects.append( so_pysm_models.WebSkySZ( version="0.3", nside=args.nside, map_dist=map_dist, sz_type="thermal", )) elif model_tag.startswith("SO_x1_cmb"): lensed = "unlensed" not in model_tag include_solar_dipole = "solar" in model_tag pysm_component_objects.append( so_pysm_models.WebSkyCMBMap( websky_version="0.3", lensed=lensed, include_solar_dipole=include_solar_dipole, seed=1, nside=args.nside, map_dist=map_dist, )) else: if not model_tag.endswith("s") and args.nside > 512: model_tag += "s" pysm_component_objects.append( so_pysm_models.get_so_models(model_tag, args.nside, map_dist=map_dist)) if signalname is None: signalname = "pysmsignal" op_sim_pysm = OpSimPySM( data, comm=comm.comm_rank, out=signalname, pysm_model=pysm_model, pysm_component_objects=pysm_component_objects, focalplanes=focalplanes, apply_beam=args.pysm_apply_beam, coord="G", # setting G doesn't perform any rotation map_dist=map_dist, ) assert args.coord in "CQ", "Input SO models are always in Equatorial coordinates" op_sim_pysm.exec(data) if comm.comm_world is not None: comm.comm_world.barrier() timer.stop() if comm.world_rank == 0: timer.report("PySM") return signalname
def _observe_sso(self, sso_az, sso_el, sso_dist, sso_dia, tod, comm, prefix, focalplane): """ Observe the SSO with each detector in tod """ log = Logger.get() rank = 0 if comm is not None: rank = comm.rank tmr = Timer() if self._report_timing: if comm is not None: comm.Barrier() tmr.start() nsamp = tod.local_samples[1] if rank == 0: log.info("{}Observing the SSO signal".format(prefix)) # FIXME: we should get the center frequency from the bandpass band_dict = {'f030' : 27, 'f040': 39, 'f090': 93, 'f150': 145 , 'f230': 225, 'f290': 285} for band in band_dict.keys(): if band in prefix: # FIXME we use the same, approximate center frequency for # SAT and LAT freq = band_dict[band[4:]] break for det in tod.local_dets: # Cache the output signal cachename = "{}_{}".format(self._out, det) if tod.cache.exists(cachename): ref = tod.cache.reference(cachename) else: ref = tod.cache.create(cachename, np.float64, (nsamp,)) try: # Some TOD classes provide a shortcut to Az/El az, el = tod.read_azel(detector=det) except Exception as e: azelquat = tod.read_pntg(detector=det, azel=True) # Convert Az/El quaternion of the detector back into # angles for the simulation. theta, phi = qa.to_position(azelquat) # Azimuth is measured in the opposite direction # than longitude az = 2 * np.pi - phi el = np.pi / 2 - theta if "bandpass_transmission" in focalplane[det]: # We have full bandpasses for the detector bandpass_freqs = focalplane[det]["bandpass_freq_ghz"] bandpass = focalplane[det]["bandpass_transmission"] else: if "bandcenter_ghz" in focalplane[det]: # Use detector bandpass from the focalplane center = focalplane[det]["bandcenter_ghz"] width = focalplane[det]["bandwidth_ghz"] else: # Use default values for the entire focalplane if freq is None: raise RuntimeError( "You must supply the nominal frequency if bandpasses " "are not available" ) center = freq width = 0.2 * freq bandpass_freqs = np.array([center - width / 2, center + width / 2]) bandpass = np.ones(2) nstep = 1001 fmin, fmax = bandpass_freqs[0], bandpass_freqs[-1] det_freqs = np.linspace(fmin, fmax, nstep) det_bandpass = np.interp(det_freqs, bandpass_freqs, bandpass) det_bandpass /= np.sum(det_bandpass) self._get_planet_temp(self.sso_name) ttemp_det = np.interp(det_freqs, self.t_freqs, self.ttemp) ttemp_det = np.sum(ttemp_det * det_bandpass) beam, radius = self._get_beam_map(det, sso_dia, ttemp_det) # Interpolate the beam map at appropriate locations x = (az - sso_az) * np.cos(el) y = el - sso_el r = np.sqrt(x ** 2 + y ** 2) good = r < radius sig = beam(x[good], y[good], grid=False) ref[:][good] += sig del ref, sig, beam if self._report_timing: if comm is not None: comm.Barrier() if rank == 0: tmr.stop() tmr.report("{}OpSimSSO: Observe signal".format(prefix)) return
def main(): env = Environment.get() log = Logger.get() gt = GlobalTimers.get() gt.start("toast_satellite_sim (total)") timer0 = Timer() timer0.start() mpiworld, procs, rank, comm = pipeline_tools.get_comm() args, comm, groupsize = parse_arguments(comm, procs) # Parse options tmr = Timer() tmr.start() if comm.world_rank == 0: os.makedirs(args.outdir, exist_ok=True) focalplane, gain, detweights = load_focalplane(args, comm) if comm.world_rank == 0: tmr.report_clear("Load focalplane") data = create_observations(args, comm, focalplane, groupsize) if comm.world_rank == 0: tmr.report_clear("Create observations") pipeline_tools.expand_pointing(args, comm, data) if comm.world_rank == 0: tmr.report_clear("Expand pointing") signalname = None if args.pysm_model: skyname = pipeline_tools.simulate_sky_signal(args, comm, data, [focalplane], "signal") else: skyname = pipeline_tools.scan_sky_signal(args, comm, data, "signal") if skyname is not None: signalname = skyname if comm.world_rank == 0: tmr.report_clear("Simulate sky signal") # NOTE: Conviqt could use different input file names for different # Monte Carlo indices, but the operator would need to be invoked within # the Monte Carlo loop. skyname = pipeline_tools.apply_conviqt( args, comm, data, "signal", mc=args.MC_start, ) if skyname is not None: signalname = skyname if comm.world_rank == 0: tmr.report_clear("Apply beam convolution") diponame = pipeline_tools.simulate_dipole(args, comm, data, "signal") if diponame is not None: signalname = diponame if comm.world_rank == 0: tmr.report_clear("Simulate dipole") # in debug mode, print out data distribution information if args.debug: handle = None if comm.world_rank == 0: handle = open(os.path.join(args.outdir, "distdata.txt"), "w") data.info(handle) if comm.world_rank == 0: handle.close() if comm.comm_world is not None: comm.comm_world.barrier() if comm.world_rank == 0: tmr.report_clear("Dumping data distribution") # in debug mode, print out data distribution information if args.debug: handle = None if comm.world_rank == 0: handle = open(os.path.join(args.outdir, "distdata.txt"), "w") data.info(handle) if comm.world_rank == 0: handle.close() if comm.comm_world is not None: comm.comm_world.barrier() if comm.world_rank == 0: tmr.report_clear("Dumping data distribution") # Mapmaking. if args.use_madam: # Initialize madam parameters madampars = pipeline_tools.setup_madam(args) if comm.comm_world is not None: comm.comm_world.barrier() if comm.world_rank == 0: tmr.report_clear("Initialize madam map-making") # Loop over Monte Carlos firstmc = args.MC_start nmc = args.MC_count for mc in range(firstmc, firstmc + nmc): mctmr = Timer() mctmr.start() # create output directory for this realization outpath = os.path.join(args.outdir, "mc_{:03d}".format(mc)) pipeline_tools.simulate_noise(args, comm, data, mc, "tot_signal", overwrite=True) if comm.comm_world is not None: comm.comm_world.barrier() if comm.world_rank == 0: tmr.report_clear(" Simulate noise {:04d}".format(mc)) # add sky signal pipeline_tools.add_signal(args, comm, data, "tot_signal", signalname) if comm.comm_world is not None: comm.comm_world.barrier() if comm.world_rank == 0: tmr.report_clear(" Add sky signal {:04d}".format(mc)) if gain is not None: op_apply_gain = OpApplyGain(gain, name="tot_signal") op_apply_gain.exec(data) if comm.comm_world is not None: comm.comm_world.barrier() if comm.world_rank == 0: tmr.report_clear(" Apply gains {:04d}".format(mc)) if mc == firstmc: # For the first realization, optionally export the # timestream data. If we had observation intervals defined, # we could pass "use_interval=True" to the export operators, # which would ensure breaks in the exported data at # acceptable places. pipeline_tools.output_tidas(args, comm, data, "tot_signal") pipeline_tools.output_spt3g(args, comm, data, "tot_signal") if comm.comm_world is not None: comm.comm_world.barrier() if comm.world_rank == 0: tmr.report_clear(" Write TOD snapshot {:04d}".format(mc)) if args.use_madam: pipeline_tools.apply_madam(args, comm, data, madampars, outpath, detweights, "tot_signal") else: pipeline_tools.apply_mapmaker(args, comm, data, outpath, "tot_signal") if comm.comm_world is not None: comm.comm_world.barrier() if comm.world_rank == 0: tmr.report_clear(" Map-making {:04d}".format(mc)) if comm.comm_world is not None: comm.comm_world.barrier() if comm.world_rank == 0: mctmr.report_clear(" Monte Carlo loop {:04d}".format(mc)) gt.stop_all() if comm.comm_world is not None: comm.comm_world.barrier() tmr.stop() tmr.clear() tmr.start() alltimers = gather_timers(comm=comm.comm_world) if comm.world_rank == 0: out = os.path.join(args.outdir, "timing") dump_timing(alltimers, out) tmr.stop() tmr.report("Gather and dump timing info") timer0.report_clear("toast_satellite_sim.py") return
def main(): log = Logger.get() gt = GlobalTimers.get() gt.start("toast_planck_reduce (total)") mpiworld, procs, rank, comm = get_comm() # This is the 2-level toast communicator. By default, # there is just one group which spans MPI_COMM_WORLD. comm = toast.Comm() if comm.world_rank == 0: print("Running with {} processes at {}".format( procs, str(datetime.datetime.now()))) parser = argparse.ArgumentParser( description='Accumulate polarization moments', fromfile_prefix_chars='@') parser.add_argument('--rimo', required=True, help='RIMO file') parser.add_argument('--freq', required=True, type=np.int, help='Frequency') parser.add_argument('--nside', required=False, type=np.int, default=512, help='Map resolution') parser.add_argument('--smax', required=False, type=np.int, default=6, help='Highest moment') parser.add_argument('--debug', dest='debug', default=False, action='store_true', help='Write data distribution info to file') parser.add_argument('--dets', required=False, default=None, help='Detector list (comma separated)') parser.add_argument('--effdir', required=True, help='Input Exchange Format File directory') parser.add_argument('--effdir_in_diode0', required=False, default=None, help='Input Exchange Format File directory, ' 'LFI diode 0') parser.add_argument('--effdir_in_diode1', required=False, default=None, help='Input Exchange Format File directory, ' 'LFI diode 1') parser.add_argument('--effdir_pntg', required=False, help='Input Exchange Format File directory ' 'for pointing') parser.add_argument('--obtmask', required=False, default=1, type=np.int, help='OBT flag mask') parser.add_argument('--flagmask', required=False, default=1, type=np.int, help='Quality flag mask') parser.add_argument('--pntflagmask', required=False, default=0, type=np.int, help='Pointing flag mask') parser.add_argument('--ringdb', required=True, help='Ring DB file') parser.add_argument('--odfirst', required=False, default=None, type=np.int, help='First OD to use') parser.add_argument('--odlast', required=False, default=None, type=np.int, help='Last OD to use') parser.add_argument('--ringfirst', required=False, default=None, help='First ring to use (can be a list)') parser.add_argument('--ringlast', required=False, default=None, help='Last ring to use (can be a list)') parser.add_argument('--obtfirst', required=False, default=None, type=np.float, help='First OBT to use') parser.add_argument('--obtlast', required=False, default=None, type=np.float, help='Last OBT to use') parser.add_argument('--out', required=False, default='.', help='Output directory') parser.add_argument('--prefix', required=False, default='spins', help='map prefix') try: args = parser.parse_args() except SystemExit: sys.exit(0) if comm.world_rank == 0: print('All parameters:') print(args, flush=True) timer = Timer() timer.start() nrange = 1 odranges = None if args.odfirst is not None and args.odlast is not None: odranges = [] firsts = [int(i) for i in str(args.odfirst).split(',')] lasts = [int(i) for i in str(args.odlast).split(',')] for odfirst, odlast in zip(firsts, lasts): odranges.append((odfirst, odlast)) nrange = len(odranges) ringranges = None if args.ringfirst is not None and args.ringlast is not None: ringranges = [] firsts = [int(i) for i in str(args.ringfirst).split(',')] lasts = [int(i) for i in str(args.ringlast).split(',')] for ringfirst, ringlast in zip(firsts, lasts): ringranges.append((ringfirst, ringlast)) nrange = len(ringranges) obtranges = None if args.obtfirst is not None and args.obtlast is not None: obtranges = [] firsts = [float(i) for i in str(args.obtfirst).split(',')] lasts = [float(i) for i in str(args.obtlast).split(',')] for obtfirst, obtlast in zip(firsts, lasts): obtranges.append((obtfirst, obtlast)) nrange = len(obtranges) if odranges is None: odranges = [None] * nrange if ringranges is None: ringranges = [None] * nrange if obtranges is None: obtranges = [None] * nrange detectors = None if args.dets is not None: detectors = re.split(',', args.dets) # create the TOD for this observation tods = [] for obtrange, ringrange, odrange in zip(obtranges, ringranges, odranges): tods.append( tp.Exchange(comm=comm.comm_group, detectors=detectors, ringdb=args.ringdb, effdir_in=args.effdir, effdir_in_diode0=args.effdir_in_diode0, effdir_in_diode1=args.effdir_in_diode1, effdir_pntg=args.effdir_pntg, obt_range=obtrange, ring_range=ringrange, od_range=odrange, freq=args.freq, RIMO=args.rimo, obtmask=args.obtmask, flagmask=args.flagmask, pntflagmask=args.pntflagmask, do_eff_cache=False, noisefile='RIMO')) rimo = tods[0].rimo # Make output directory if not os.path.isdir(args.out) and comm.comm_world.rank == 0: os.makedirs(args.out) # This is the distributed data, consisting of one or # more observations, each distributed over a communicator. data = toast.Data(comm) for iobs, tod in enumerate(tods): ob = {} ob['name'] = 'observation{:04}'.format(iobs) ob['id'] = 0 ob['tod'] = tod ob['intervals'] = tod.valid_intervals ob['baselines'] = None ob['noise'] = tod.noise data.obs.append(ob) if mpiworld is not None: mpiworld.barrier() if comm.comm_world.rank == 0: timer.report_clear("Metadata queries") # Accumulate and save the moment maps polmoments = tp.OpPolMomentsPlanck(nside=args.nside, RIMO=rimo, margin=0, keep_vel=False, keep_pos=False, keep_phase=False, keep_quats=False, smax=args.smax, prefix=os.path.join( args.out, args.prefix)) polmoments.exec(data) if mpiworld is not None: mpiworld.barrier() if comm.comm_world.rank == 0: timer.report_clear("Accumulate moment maps") gt.stop_all() if mpiworld is not None: mpiworld.barrier() timer = Timer() timer.start() alltimers = gather_timers(comm=mpiworld) if comm.world_rank == 0: out = os.path.join(args.out, "timing") dump_timing(alltimers, out) timer.stop() timer.report("Gather and dump timing info") return
def export_TOD(args, comm, data, totalname, schedules, other=None, verbose=True): if args.export is None: return log = Logger.get() timer = Timer() # Only import spt3g if we are writing out so3g files from spt3g import core as core3g from ..export import ToastExport path = os.path.abspath(args.export) key = args.export_key if key is not None: if key not in ALLOWED_KEYS: raise RuntimeError( f"Cannot export data, --export-key='{key}' not in {ALLOWED_KEYS}" ) prefix = "{}_{}".format(args.bands, key) det_groups = {} for obs in data.obs: for (det_name, det_data) in obs["focalplane"].items(): value = det_data[key] if value not in det_groups: det_groups[value] = [] det_groups[value].append(det_name) else: prefix = args.bands det_groups = None if comm.world_rank == 0 and verbose: log.info("Exporting data to directory tree at {}".format(path)) timer.start() export = ToastExport( path, prefix=prefix, use_intervals=True, cache_name=totalname, cache_copy=other, mask_flag_common=TODGround.TURNAROUND, filesize=2**30, units=core3g.G3TimestreamUnits.Tcmb, detgroups=det_groups, compress=args.compress, ) export.exec(data) if comm.comm_world is not None: comm.comm_world.Barrier() timer.stop() if comm.world_rank == 0 and verbose: timer.report("Wrote simulated data to {}:{}" "".format(path, "total")) return
def load_focalplanes(args, comm, schedules, verbose=False): """ Attach a focalplane to each of the schedules. Args: schedules (list) : List of Schedule instances. Each schedule has two members, telescope and ceslist, a list of CES objects. Returns: detweights (dict) : Inverse variance noise weights for every detector across all focal planes. In [K_CMB^-2]. They can be used to bin the TOD. """ # log = Logger.get() timer = Timer() timer.start() # Load focalplane information timer1 = Timer() timer1.start() hw, telescope, det_index = get_hardware(args, comm, verbose=verbose) focalplane = get_focalplane(args, comm, hw, det_index, verbose=verbose) telescope.focalplane = focalplane if comm.world_rank == 0 and verbose: timer1.report_clear("Collect focaplane information") for schedule in schedules: # Replace the telescope created from reading the observing schedule but # keep the weather object weather = schedule.telescope.site.weather schedule.telescope = telescope schedule.telescope.site.weather = weather detweights = telescope.focalplane.detweights timer.stop() if (comm.comm_world is None or comm.world_rank == 0) and verbose: timer.report("Loading focalplane") return detweights
def _synthesize_map(self, fn_cmb, there): """ Synthesize the stored alm expansion into a map and place the map in node-shared memory. """ timer = Timer() timer.start() if not there: # Use libsharp to perform the synthesis across the communicator if self._quickpolbeam is None: beam = hp.gauss_beam(fwhm=self._fwhm, lmax=self._lmax, pol=True) beam = beam[:, 0:3].copy() else: beam = np.array(hp.read_cl(self._quickpolbeam)) if beam.ndim == 1: beam = np.vstack([beam, beam, beam]) beam = beam[:, :self._lmax + 1].T.copy() almT = self._alm[0].reshape(1, 1, -1) self._alminfo.almxfl(almT, np.ascontiguousarray(beam[:, 0:1])) my_outmap = synthesis(self._grid, self._alminfo, almT, spin=0, comm=self._comm)[0] my_outmap = [my_outmap] if self._pol: almP = self._alm[1:3].reshape(1, 2, -1) self._alminfo.almxfl(almP, np.ascontiguousarray(beam[:, (1, 2)])) my_outmap.append( synthesis(self._grid, self._alminfo, almP, spin=2, comm=self._comm)[0]) # Discard the a_lm del self._alm my_outmap = np.vstack(my_outmap) my_pixels = self._dist_rings.local_pixels my_maptemp = np.zeros([self._nnz, self._npix], dtype=np.float) maptemp = np.zeros([self._nnz, self._npix], dtype=np.float) my_maptemp[:, my_pixels] = my_outmap self._comm.Reduce(my_maptemp, maptemp) del my_maptemp maptemp = hp.reorder(maptemp, r2n=True) timer.stop() if self._global_rank == 0: timer.report("synthesize CMB map") # Save the CMB map os.makedirs(CMBCACHE, exist_ok=True) header = [("fwhm", np.degrees(self._fwhm), "gaussian smoothing (deg)")] hp.write_map(fn_cmb, maptemp, extra_header=header, overwrite=True, nest=True) print("CMB map saved in {}".format(fn_cmb), flush=True) else: if self._global_rank == 0: print("Loading cached CMB map from {}".format(fn_cmb), flush=True) if self._rank == 0: maptemp = hp.read_map(fn_cmb, None, nest=True, verbose=False, dtype=np.float32) if not self._pol: maptemp = maptemp[0] else: maptemp = None self.mapsampler = MapSampler( None, pol=self._pol, comm=self._comm, preloaded_map=maptemp, nest=True, plug_holes=False, use_shmem=True, ) del maptemp return
def cache_effdirs(self, effdir_in, effdir_in_diode0, effdir_in_diode1, effdir_out, effdir_dark, effdir_pntg, effdir_fsl, extra_effdirs, effdir_flags): """ Cache the metadata so we don't need to look for files while reading and writing """ if effdir_in is not None and PATTERN_SEPARATOR in effdir_in: self.effdir_in, self.effdir_in_pattern = effdir_in.split( PATTERN_SEPARATOR) else: self.effdir_in, self.effdir_in_pattern = effdir_in, None self.effdir_in_diode0 = effdir_in_diode0 self.effdir_in_diode1 = effdir_in_diode1 if effdir_out is not None and PATTERN_SEPARATOR in effdir_out: self.effdir_out, self.effdir_out_pattern = effdir_out.split( PATTERN_SEPARATOR) else: self.effdir_out, self.effdir_out_pattern = effdir_out, None self.effdir_out = effdir_out if effdir_dark is not None: self.effdir_dark = effdir_dark else: self.effdir_dark = self.effdir_in if effdir_pntg is not None: self.effdir_pntg = effdir_pntg else: self.effdir_pntg = self.effdir_in self.effdir_fsl = effdir_fsl self.extra_effdirs = extra_effdirs if effdir_flags is None: self.effdir_flags = self.effdir_in self.effdir_flags_pattern = self.effdir_in_pattern else: if PATTERN_SEPARATOR in effdir_flags: (self.effdir_flags, self.effdir_flags_pattern ) = effdir_flags.split(PATTERN_SEPARATOR) else: (self.effdir_flags, self.effdir_flags_pattern) = effdir_flags, None if self.rank == 0: all_effdirs = [ self.effdir_in, self.effdir_out, self.effdir_pntg, self.effdir_dark, self.effdir_fsl, self.effdir_flags, self.effdir_in_diode0, self.effdir_in_diode1 ] if self.extra_effdirs is not None: for effdir in self.extra_effdirs: all_effdirs.append(effdir) for effdir in all_effdirs: if effdir is None: continue if effdir in filenames_cache: continue print('Building a list of files under {} ...'.format(effdir), end='', flush=True) timer = Timer() timer.start() filenames_cache[effdir] = sorted(list_files(effdir)) timer.stop() timer.report("List files") if self.comm is None: self.filenames = filenames_cache else: self.filenames = self.comm.bcast(filenames_cache, root=0) return
def create_observations(args, comm): timer = Timer() timer.start() nrange = 1 odranges = None if args.odfirst is not None and args.odlast is not None: odranges = [] firsts = [int(i) for i in str(args.odfirst).split(',')] lasts = [int(i) for i in str(args.odlast).split(',')] for odfirst, odlast in zip(firsts, lasts): odranges.append((odfirst, odlast)) nrange = len(odranges) ringranges = None if args.ringfirst is not None and args.ringlast is not None: ringranges = [] firsts = [int(i) for i in str(args.ringfirst).split(',')] lasts = [int(i) for i in str(args.ringlast).split(',')] for ringfirst, ringlast in zip(firsts, lasts): ringranges.append((ringfirst, ringlast)) nrange = len(ringranges) obtranges = None if args.obtfirst is not None and args.obtlast is not None: obtranges = [] firsts = [float(i) for i in str(args.obtfirst).split(',')] lasts = [float(i) for i in str(args.obtlast).split(',')] for obtfirst, obtlast in zip(firsts, lasts): obtranges.append((obtfirst, obtlast)) nrange = len(obtranges) if odranges is None: odranges = [None] * nrange if ringranges is None: ringranges = [None] * nrange if obtranges is None: obtranges = [None] * nrange detectors = None if args.dets is not None: detectors = re.split(',', args.dets) # create the TOD for this observation if args.noisefile != 'RIMO': do_eff_cache = True else: do_eff_cache = False tods = [] for obtrange, ringrange, odrange in zip(obtranges, ringranges, odranges): tods.append( tp.Exchange( comm=comm.comm_group, detectors=detectors, ringdb=args.ringdb, effdir_in=args.effdir, effdir_pntg=args.effdir_pntg, obt_range=obtrange, ring_range=ringrange, od_range=odrange, freq=args.freq, RIMO=args.rimo, obtmask=args.obtmask, flagmask=args.flagmask, pntflagmask=args.pntflagmask, do_eff_cache=do_eff_cache, coord=args.coord, )) rimo = tods[0].rimo # Make output directory if comm.world_rank == 0: os.makedirs(args.out, exist_ok=True) if args.noisefile != 'RIMO': # We split MPI_COMM_WORLD into single process groups, each of # which is assigned one or more observations (rings) comm = Comm(groupsize=1) # This is the distributed data, consisting of one or # more observations, each distributed over a communicator. data = Data(comm) for iobs, tod in enumerate(tods): if args.noisefile != 'RIMO': # Use a toast helper method to optimally distribute rings between # processes. dist = distribute_discrete(tod.ringsizes, comm.world_size) my_first_ring, my_n_ring = dist[comm.world_rank] for my_ring in range(my_first_ring, my_first_ring + my_n_ring): ringtod = tp.Exchange.from_tod(tod, my_ring, comm.comm_group, noisefile=args.noisefile) ob = {} ob['name'] = 'ring{:05}'.format(ringtod.globalfirst_ring) ob['id'] = ringtod.globalfirst_ring ob['tod'] = ringtod ob['intervals'] = ringtod.valid_intervals ob['baselines'] = None ob['noise'] = ringtod.noise data.obs.append(ob) else: ob = {} ob['name'] = 'observation{:04}'.format(iobs) ob['id'] = 0 ob['tod'] = tod ob['intervals'] = tod.valid_intervals ob['baselines'] = None ob['noise'] = tod.noise data.obs.append(ob) if mpiworld is not None: mpiworld.barrier() if comm.world_rank == 0: timer.report_clear("Metadata queries") return data
def main(): log = Logger.get() gt = GlobalTimers.get() gt.start("toast_ground_sim (total)") mpiworld, procs, rank, comm = get_comm() args, comm = parse_arguments(comm) # Initialize madam parameters madampars = setup_madam(args) # Load and broadcast the schedule file schedule = load_schedule(args, comm)[0] # load or simulate the focalplane detweights = load_focalplane(args, comm, schedule) # Create the TOAST data object to match the schedule. This will # include simulating the boresight pointing. data = create_observations(args, comm, schedule) # Expand boresight quaternions into detector pointing weights and # pixel numbers expand_pointing(args, comm, data) # Scan input map signalname = scan_sky_signal(args, comm, data, "signal") # Simulate noise if signalname is None: signalname = "signal" mc = 0 simulate_noise(args, comm, data, mc, signalname) # Set up objects to take copies of the TOD at appropriate times signalname_madam, sigcopy_madam, sigclear = setup_sigcopy( args, comm, signalname) npp, zmap = init_binner(args, comm, data, detweights) output_tidas(args, comm, data, signalname) outpath = setup_output(args, comm) # Make a copy of the signal for Madam copy_signal_madam(args, comm, data, sigcopy_madam) # Bin unprocessed signal for reference apply_binner(args, comm, data, npp, zmap, detweights, outpath, signalname) if args.apply_polyfilter or args.apply_groundfilter: # Filter signal apply_polyfilter(args, comm, data, signalname) apply_groundfilter(args, comm, data, signalname) # Bin the filtered signal apply_binner( args, comm, data, npp, zmap, detweights, outpath, signalname, prefix="filtered", ) data.obs[0]["tod"].cache.report() clear_signal(args, comm, data, sigclear) data.obs[0]["tod"].cache.report() # Now run Madam on the unprocessed copy of the signal if args.use_madam: apply_madam(args, comm, data, madampars, outpath, detweights, signalname_madam) gt.stop_all() if mpiworld is not None: mpiworld.barrier() timer = Timer() timer.start() alltimers = gather_timers(comm=mpiworld) if comm.world_rank == 0: out = os.path.join(args.outdir, "timing") dump_timing(alltimers, out) timer.report_clear("Gather and dump timing info") return
def main(): log = Logger.get() gt = GlobalTimers.get() gt.start("toast_ground_sim (total)") timer0 = Timer() timer0.start() mpiworld, procs, rank, comm = get_comm() args, comm = parse_arguments(comm) # Initialize madam parameters madampars = setup_madam(args) # Load and broadcast the schedule file schedules = load_schedule(args, comm) # Load the weather and append to schedules load_weather(args, comm, schedules) # load or simulate the focalplane detweights = load_focalplanes(args, comm, schedules) # Create the TOAST data object to match the schedule. This will # include simulating the boresight pointing. data, telescope_data = create_observations(args, comm, schedules) # Split the communicator for day and season mapmaking time_comms = get_time_communicators(args, comm, data) # Expand boresight quaternions into detector pointing weights and # pixel numbers expand_pointing(args, comm, data) # Purge the pointing if we are NOT going to export the # data to a TIDAS volume if (args.tidas is None) and (args.spt3g is None): for ob in data.obs: tod = ob["tod"] tod.free_radec_quats() # Prepare auxiliary information for distributed map objects _, localsm, subnpix = get_submaps(args, comm, data) if args.pysm_model: focalplanes = [s.telescope.focalplane.detector_data for s in schedules] signalname = simulate_sky_signal(args, comm, data, focalplanes, subnpix, localsm, "signal") else: signalname = scan_sky_signal(args, comm, data, localsm, subnpix, "signal") # Set up objects to take copies of the TOD at appropriate times totalname, totalname_freq = setup_sigcopy(args) # Loop over Monte Carlos firstmc = args.MC_start nsimu = args.MC_count freqs = [float(freq) for freq in args.freq.split(",")] nfreq = len(freqs) for mc in range(firstmc, firstmc + nsimu): simulate_atmosphere(args, comm, data, mc, totalname) # Loop over frequencies with identical focal planes and identical # atmospheric noise. for ifreq, freq in enumerate(freqs): if comm.world_rank == 0: log.info("Processing frequency {}GHz {} / {}, MC = {}".format( freq, ifreq + 1, nfreq, mc)) # Make a copy of the atmosphere so we can scramble the gains and apply # frequency-dependent scaling. copy_signal(args, comm, data, totalname, totalname_freq) scale_atmosphere_by_frequency(args, comm, data, freq=freq, mc=mc, cache_name=totalname_freq) update_atmospheric_noise_weights(args, comm, data, freq, mc) # Add previously simulated sky signal to the atmospheric noise. add_signal(args, comm, data, totalname_freq, signalname, purge=(nsimu == 1)) mcoffset = ifreq * 1000000 simulate_noise(args, comm, data, mc + mcoffset, totalname_freq) simulate_sss(args, comm, data, mc + mcoffset, totalname_freq) scramble_gains(args, comm, data, mc + mcoffset, totalname_freq) if (mc == firstmc) and (ifreq == 0): # For the first realization and frequency, optionally # export the timestream data. output_tidas(args, comm, data, totalname) output_spt3g(args, comm, data, totalname) outpath = setup_output(args, comm, mc + mcoffset, freq) # Bin and destripe maps apply_madam( args, comm, data, madampars, outpath, detweights, totalname_freq, freq=freq, time_comms=time_comms, telescope_data=telescope_data, first_call=(mc == firstmc), ) if args.apply_polyfilter or args.apply_groundfilter: # Filter signal apply_polyfilter(args, comm, data, totalname_freq) apply_groundfilter(args, comm, data, totalname_freq) # Bin filtered maps apply_madam( args, comm, data, madampars, outpath, detweights, totalname_freq, freq=freq, time_comms=time_comms, telescope_data=telescope_data, first_call=False, extra_prefix="filtered", bin_only=True, ) gt.stop_all() if mpiworld is not None: mpiworld.barrier() timer = Timer() timer.start() alltimers = gather_timers(comm=mpiworld) if comm.world_rank == 0: out = os.path.join(args.outdir, "timing") dump_timing(alltimers, out) timer.stop() timer.report("Gather and dump timing info") timer0.report_clear("toast_ground_sim.py") return
def parse_arguments(comm): timer = Timer() log = Logger.get() parser = argparse.ArgumentParser( description="Simulate ground-based boresight pointing. Simulate " "and map astrophysical signal.", fromfile_prefix_chars="@", ) add_dist_args(parser) add_debug_args(parser) add_todground_args(parser) add_pointing_args(parser) add_polyfilter_args(parser) add_groundfilter_args(parser) add_gainscrambler_args(parser) add_noise_args(parser) add_sky_map_args(parser) add_tidas_args(parser) parser.add_argument("--outdir", required=False, default="out", help="Output directory") add_madam_args(parser) add_binner_args(parser) parser.add_argument( "--madam", required=False, action="store_true", help="Use libmadam for map-making", dest="use_madam", ) parser.add_argument( "--no-madam", required=False, action="store_false", help="Do not use libmadam for map-making [default]", dest="use_madam", ) parser.set_defaults(use_madam=False) parser.add_argument( "--focalplane", required=False, default=None, help="Pickle file containing a dictionary of detector " "properties. The keys of this dict are the detector " "names, and each value is also a dictionary with keys " '"quat" (4 element ndarray), "fwhm" (float, arcmin), ' '"fknee" (float, Hz), "alpha" (float), and ' '"NET" (float). For optional plotting, the key "color"' " can specify a valid matplotlib color string.", ) try: args = parser.parse_args() except SystemExit: sys.exit(0) if args.tidas is not None: if not tidas_available: raise RuntimeError("TIDAS not found- cannot export") if comm.comm_world is None or comm.world_rank == 0: log.info("All parameters:") for ag in vars(args): log.info("{} = {}".format(ag, getattr(args, ag))) if args.group_size: comm = Comm(groupsize=args.group_size) if comm.comm_world is None or comm.comm_world.rank == 0: os.makedirs(args.outdir, exist_ok=True) if comm.comm_world is None or comm.world_rank == 0: timer.report_clear("Parsed parameters") return args, comm
def parse_arguments(comm): timer = Timer() timer.start() log = Logger.get() parser = argparse.ArgumentParser( description="Simulate ground-based boresight pointing. Simulate " "atmosphere and make maps for some number of noise Monte Carlos.", fromfile_prefix_chars="@", ) add_dist_args(parser) add_debug_args(parser) add_todground_args(parser) add_pointing_args(parser) add_polyfilter_args(parser) add_groundfilter_args(parser) add_atmosphere_args(parser) add_noise_args(parser) add_gainscrambler_args(parser) add_madam_args(parser) add_sky_map_args(parser) add_pysm_args(parser) add_sss_args(parser) add_tidas_args(parser) add_spt3g_args(parser) add_mc_args(parser) parser.add_argument("--outdir", required=False, default="out", help="Output directory") parser.add_argument( "--focalplane", required=False, default=None, help="Pickle file containing a dictionary of detector " "properties. The keys of this dict are the detector " "names, and each value is also a dictionary with keys " '"quat" (4 element ndarray), "fwhm" (float, arcmin), ' '"fknee" (float, Hz), "alpha" (float), and ' '"NET" (float).', ) parser.add_argument( "--freq", required=True, help="Comma-separated list of frequencies with identical focal planes." " They override the bandpasses in the focalplane for the purpose of" " scaling the atmospheric signal but not for simulating the sky signal.", ) try: args = parser.parse_args() except SystemExit: sys.exit(0) if args.tidas is not None: if not tidas_available: raise RuntimeError("TIDAS not found- cannot export") if args.spt3g is not None: if not spt3g_available: raise RuntimeError("SPT3G not found- cannot export") if len(args.freq.split(",")) != 1: # Multi frequency run. We don't support multiple copies of # scanned signal. if args.input_map: raise RuntimeError( "Multiple frequencies are not supported when scanning from a map" ) if args.simulate_atmosphere and args.weather is None: raise RuntimeError( "Cannot simulate atmosphere without a TOAST weather file") if comm.world_rank == 0: log.info("All parameters:") for ag in vars(args): log.info("{} = {}".format(ag, getattr(args, ag))) if args.group_size: comm = Comm(groupsize=args.group_size) if comm.world_rank == 0: os.makedirs(args.outdir, exist_ok=True) timer.stop() if comm.world_rank == 0: timer.report("Parsed parameters") return args, comm
def apply_mappraiser( args, comm, data, params, signalname, noisename, time_comms=None, telescope_data=None, verbose=True, ): """ Use libmappraiser to run the ML map-making Args: time_comms (iterable) : Series of disjoint communicators that map, e.g., seasons and days. Each entry is a tuple of the form (`name`, `communicator`) telescope_data (iterable) : series of disjoint TOAST data objects. Each entry is tuple of the form (`name`, `data`). """ if comm.comm_world is None: raise RuntimeError("Mappraiser requires MPI") log = Logger.get() total_timer = Timer() total_timer.start() if comm.world_rank == 0 and verbose: log.info("Making maps") mappraiser = OpMappraiser( params= params, purge=True, name=signalname, noise_name = noisename, conserve_memory=args.conserve_memory, ) if time_comms is None: time_comms = [("all", comm.comm_world)] if telescope_data is None: telescope_data = [("all", data)] timer = Timer() for time_name, time_comm in time_comms: for tele_name, tele_data in telescope_data: if len(time_name.split("-")) == 3: # Special rules for daily maps if args.do_daymaps: continue if len(telescope_data) > 1 and tele_name == "all": # Skip daily maps over multiple telescopes continue timer.start() # N.B: code below is for Madam but may be useful to copy in Mappraiser # once we start doing multiple maps in one run # madam.params["file_root"] = "{}_telescope_{}_time_{}".format( # file_root, tele_name, time_name # ) # if time_comm == comm.comm_world: # madam.params["info"] = info # else: # # Cannot have verbose output from concurrent mapmaking # madam.params["info"] = 0 # if (time_comm is None or time_comm.rank == 0) and verbose: # log.info("Mapping {}".format(madam.params["file_root"])) mappraiser.exec(tele_data, time_comm) if time_comm is not None: time_comm.barrier() if comm.world_rank == 0 and verbose: timer.report_clear("Mapping {}_telescope_{}_time_{}".format( args.outpath, tele_name, time_name, )) if comm.comm_world is not None: comm.comm_world.barrier() total_timer.stop() if comm.world_rank == 0 and verbose: total_timer.report("Mappraiser total") return
type=np.int, help="Last ring to use") parser.add_argument("--obtfirst", required=False, default=None, type=np.float, help="First OBT to use") parser.add_argument("--obtlast", required=False, default=None, type=np.float, help="Last OBT to use") args = parser.parse_args() timer = Timer() timer.start() odrange = None if args.odfirst is not None and args.odlast is not None: odrange = (args.odfirst, args.odlast) ringrange = None if args.ringfirst is not None and args.ringlast is not None: ringrange = (args.ringfirst, args.ringlast) obtrange = None if args.obtfirst is not None and args.obtlast is not None: obtrange = (args.obtfirst, args.obtlast) detectors = None
def load_frames(self): log = Logger.get() rank = 0 if self.mpicomm is not None: rank = self.mpicomm.rank # Timestamps self.cache.create(self.TIMESTAMP_NAME, np.float64, (self.local_samples[1], )) # Boresight pointing self.cache.create("boresight_radec", np.float64, (self.local_samples[1], 4)) self.cache.create("boresight_azel", np.float64, (self.local_samples[1], 4)) self.cache.create(self.HWP_ANGLE_NAME, np.float64, (self.local_samples[1], )) # Common flags self.cache.create(self.COMMON_FLAG_NAME, np.uint8, (self.local_samples[1], )) # Telescope position and velocity self.cache.create(self.POSITION_NAME, np.float64, (self.local_samples[1], 3)) self.cache.create(self.VELOCITY_NAME, np.float64, (self.local_samples[1], 3)) # Detector data and flags for det in self.local_dets: name = "{}_{}".format(self.SIGNAL_NAME, det) self.cache.create(name, np.float64, (self.local_samples[1], )) name = "{}_{}".format(self.FLAG_NAME, det) self.cache.create(name, np.uint8, (self.local_samples[1], )) timer = Timer() for ffile in self._file_names: fnf = self._file_nframes[ffile] frame_offsets = self._frame_sample_offs[ffile] frame_sizes = self._frame_sizes[ffile] if rank == 0: log.debug("Loading {} frames from {}".format(fnf, ffile)) # Loop over all frames- only the root process will actually # read data from disk. if rank == 0: gfile = core3g.G3File(ffile) else: gfile = [None] * fnf timer.clear() timer.start() for fdata, frame_offset, frame_size in zip(gfile, frame_offsets, frame_sizes): is_scan = True if rank == 0: if fdata.type != core3g.G3FrameType.Scan: is_scan = False if self.mpicomm is not None: is_scan = self.mpicomm.bcast(is_scan, root=0) if not is_scan: continue frame_to_tod( self, frame_offset, frame_size, frame_data=fdata, all_flavors=self._all_flavors, ) if self.mpicomm is not None: self.mpicomm.barrier() timer.stop() if rank == 0: log.debug("Translated frames in {}s".format(timer.seconds())) del gfile return
def main(): log = Logger.get() gt = GlobalTimers.get() gt.start("toast_planck_reduce (total)") mpiworld, procs, rank, comm = get_comm() # This is the 2-level toast communicator. By default, # there is just one group which spans MPI_COMM_WORLD. comm = toast.Comm() if comm.comm_world.rank == 0: print('Running with {} processes at {}' ''.format(procs, str(datetime.datetime.now()))) parser = argparse.ArgumentParser(description='Planck Ringset making', fromfile_prefix_chars='@') parser.add_argument('--rimo', required=True, help='RIMO file') parser.add_argument('--freq', required=True, type=np.int, help='Frequency') parser.add_argument('--dets', required=False, default=None, help='Detector list (comma separated)') parser.add_argument('--nosingle', dest='nosingle', required=False, default=False, action='store_true', help='Do not compute single detector PSDs') parser.add_argument('--effdir', required=True, help='Input Exchange Format File directory') parser.add_argument('--effdir_pntg', required=False, help='Input Exchange Format File directory ' 'for pointing') parser.add_argument('--obtmask', required=False, default=1, type=np.int, help='OBT flag mask') parser.add_argument('--flagmask', required=False, default=1, type=np.int, help='Quality flag mask') parser.add_argument('--skymask', required=False, help='Pixel mask file') parser.add_argument('--skymap', required=False, help='Sky estimate file') parser.add_argument('--skypol', dest='skypol', required=False, default=False, action='store_true', help='Sky estimate is polarized') parser.add_argument('--no_spin_harmonics', dest='no_spin_harmonics', required=False, default=False, action='store_true', help='Do not include PSD bins with spin harmonics') parser.add_argument('--calibrate', required=False, help='Path to calibration file to calibrate with.') parser.add_argument('--calibrate_signal_estimate', dest='calibrate_signal_estimate', required=False, default=False, action='store_true', help='Calibrate ' 'the signal estimate using linear regression.') parser.add_argument('--ringdb', required=True, help='Ring DB file') parser.add_argument('--odfirst', required=False, default=None, type=np.int, help='First OD to use') parser.add_argument('--odlast', required=False, default=None, type=np.int, help='Last OD to use') parser.add_argument('--ringfirst', required=False, default=None, type=np.int, help='First ring to use') parser.add_argument('--ringlast', required=False, default=None, type=np.int, help='Last ring to use') parser.add_argument('--obtfirst', required=False, default=None, type=np.float, help='First OBT to use') parser.add_argument('--obtlast', required=False, default=None, type=np.float, help='Last OBT to use') parser.add_argument('--out', required=False, default='.', help='Output directory') parser.add_argument('--nbin_psd', required=False, default=1000, type=np.int, help='Number of logarithmically ' 'distributed spectral bins to write.') parser.add_argument('--lagmax', required=False, default=100000, type=np.int, help='Maximum lag to evaluate for the ' 'autocovariance function [samples].') parser.add_argument('--stationary_period', required=False, default=86400., type=np.float, help='Length of a stationary interval [seconds].') # Dipole parameters dipogroup = parser.add_mutually_exclusive_group() dipogroup.add_argument('--dipole', dest='dipole', required=False, default=False, action='store_true', help='Simulate dipole') dipogroup.add_argument('--solsys_dipole', dest='solsys_dipole', required=False, default=False, action='store_true', help='Simulate solar system dipole') dipogroup.add_argument('--orbital_dipole', dest='orbital_dipole', required=False, default=False, action='store_true', help='Simulate orbital dipole') # Extra filter parser.add_argument('--filterfile', required=False, help='Extra filter file.') try: args = parser.parse_args() except SystemExit: sys.exit(0) if comm.comm_world.rank == 0: print('All parameters:') print(args, flush=True) timer = Timer() timer.start() odrange = None if args.odfirst is not None and args.odlast is not None: odrange = (args.odfirst, args.odlast) ringrange = None if args.ringfirst is not None and args.ringlast is not None: ringrange = (args.ringfirst, args.ringlast) obtrange = None if args.obtfirst is not None and args.obtlast is not None: obtrange = (args.obtfirst, args.obtlast) detectors = None if args.dets is not None: detectors = re.split(',', args.dets) if args.nosingle and len(detectors) != 2: raise RuntimeError('You cannot skip the single detectors PSDs ' 'without multiple detectors.') # This is the distributed data, consisting of one or # more observations, each distributed over a communicator. data = toast.Data(comm) # Make output directory if not os.path.isdir(args.out) and comm.comm_world.rank == 0: os.mkdir(args.out) # create the TOD for this observation tod = tp.Exchange( comm=comm.comm_group, detectors=detectors, ringdb=args.ringdb, effdir_in=args.effdir, effdir_pntg=args.effdir_pntg, obt_range=obtrange, ring_range=ringrange, od_range=odrange, freq=args.freq, RIMO=args.rimo, obtmask=args.obtmask, flagmask=args.flagmask, do_eff_cache=False, ) rimo = tod.rimo ob = {} ob['name'] = 'mission' ob['id'] = 0 ob['tod'] = tod ob['intervals'] = tod.valid_intervals ob['baselines'] = None ob['noise'] = tod.noise data.obs.append(ob) comm.comm_world.barrier() if comm.comm_world.rank == 0: timer.report_clear("Metadata queries") # Read the signal tod_name = 'signal' flags_name = 'flags' reader = tp.OpInputPlanck(signal_name=tod_name, flags_name=flags_name) if comm.comm_world.rank == 0: print('Reading input signal from {}'.format(args.effdir), flush=True) reader.exec(data) comm.comm_world.barrier() if comm.comm_world.rank == 0: timer.report_clear("Reading") if args.calibrate is not None: fn = args.calibrate if comm.comm_world.rank == 0: print('Calibrating with {}'.format(fn), flush=True) calibrator = tp.OpCalibPlanck(signal_in=tod_name, signal_out=tod_name, file_gain=fn) calibrator.exec(data) comm.comm_world.barrier() if comm.comm_world.rank == 0: timer.report_clear("Calibrate") # Optionally subtract the dipole do_dipole = (args.dipole or args.solsys_dipole or args.orbital_dipole) if do_dipole: if args.dipole: dipomode = 'total' elif args.solsys_dipole: dipomode = 'solsys' else: dipomode = 'orbital' dipo = tp.OpDipolePlanck(args.freq, mode=dipomode, output='dipole', keep_quats=True) dipo.exec(data) comm.comm_world.barrier() if comm.comm_world.rank == 0: timer.report_clear("Dipole") subtractor = tp.OpCacheMath(in1=tod_name, in2='dipole', subtract=True, out=tod_name) if comm.comm_world.rank == 0: print('Subtracting dipole', flush=True) subtractor.exec(data) comm.comm_world.barrier() if comm.comm_world.rank == 0: timer.report_clear("Dipole subtraction") # Optionally filter the signal apply_filter(args, data) timer.clear() # Estimate noise noise_estimator = tp.OpNoiseEstim( signal=tod_name, flags=flags_name, detmask=args.flagmask, commonmask=args.obtmask, maskfile=args.skymask, mapfile=args.skymap, out=args.out, rimo=rimo, pol=args.skypol, nbin_psd=args.nbin_psd, lagmax=args.lagmax, stationary_period=args.stationary_period, nosingle=args.nosingle, no_spin_harmonics=args.no_spin_harmonics, calibrate_signal_estimate=args.calibrate_signal_estimate) noise_estimator.exec(data) comm.comm_world.barrier() if comm.comm_world.rank == 0: timer.report_clear("Noise estimation") gt.stop_all() if mpiworld is not None: mpiworld.barrier() timer = Timer() timer.start() alltimers = gather_timers(comm=mpiworld) if comm.world_rank == 0: out = os.path.join(args.out, "timing") dump_timing(alltimers, out) timer.stop() timer.report("Gather and dump timing info") return
def exec(self, data): """Generate timestreams. Args: data (toast.Data): The distributed data. Returns: None """ log = Logger.get() group = data.comm.group for obs in data.obs: try: obsname = obs["name"] except Exception: obsname = "observation" observer = ephem.Observer() observer.lon = obs['site'].lon observer.lat = obs['site'].lat observer.elevation = obs['site'].alt # In meters observer.epoch = "2000" observer.temp = 0 # in Celcius observer.compute_pressure() prefix = "{} : {} : ".format(group, obsname) tod = obs['tod'] comm = tod.mpicomm rank = 0 if comm is not None: rank = comm.rank site = obs['site'].id if comm is not None: comm.Barrier() if rank == 0: log.info("{}Setting up SSO simulation".format(prefix)) # Get the observation time span and compute the horizontal # position of the SSO times = tod.local_times() sso_az, sso_el, sso_dist, sso_dia = self._get_sso_position( times, observer) tmr = Timer() if self._report_timing: if comm is not None: comm.Barrier() tmr.start() self._observe_sso(sso_az, sso_el, sso_dist, sso_dia, tod, comm, prefix) del sso_az, sso_el, sso_dist if self._report_timing: if comm is not None: comm.Barrier() if rank == 0: tmr.stop() tmr.report( "{}Simulated and observed SSO signal".format(prefix)) return
def main(): log = Logger.get() gt = GlobalTimers.get() gt.start("toast_so_sim (total)") timer0 = Timer() timer0.start() mpiworld, procs, rank, comm = toast_tools.get_comm() memreport("at the beginning of the pipeline", comm.comm_world) args, comm = parse_arguments(comm) if args.use_madam: # Initialize madam parameters madampars = toast_tools.setup_madam(args) else: madampars = None if args.import_dir is not None: schedules = None data, telescope_data, detweights = so_tools.load_observations( args, comm) memreport("after load", comm.comm_world) totalname = "signal" else: # Load and broadcast the schedule file schedules = toast_tools.load_schedule(args, comm) # Load the weather and append to schedules toast_tools.load_weather(args, comm, schedules) # load or simulate the focalplane detweights = so_tools.load_focalplanes(args, comm, schedules) # Create the TOAST data object to match the schedule. This will # include simulating the boresight pointing. data, telescope_data = so_tools.create_observations( args, comm, schedules) memreport("after creating observations", comm.comm_world) # Optionally rewrite the noise PSD:s in each observation to include # elevation-dependence so_tools.get_elevation_noise(args, comm, data) totalname = "total" # Split the communicator for day and season mapmaking time_comms = toast_tools.get_time_communicators(args, comm, data) # Rotate the LAT focalplane around the boresight based on co-rotator position so_tools.rotate_focalplane(args, data, comm) # Expand boresight quaternions into detector pointing weights and # pixel numbers toast_tools.expand_pointing(args, comm, data) # Flag Solar system objects so_tools.apply_flag_sso(args, comm, data) # Optionally, output h_n maps so_tools.compute_h_n(args, comm, data) # Optionally, output crosslinking map so_tools.compute_crosslinking(args, comm, data, detweights) # Optionally, output cadence map so_tools.compute_cadence_map(args, comm, data) # Only purge the pointing if we are NOT going to export the # data to a TIDAS volume if not (args.tidas is None) and (args.export is None): for ob in data.obs: tod = ob["tod"] try: tod.free_radec_quats() except AttributeError: # These TOD objects do not have RA/Dec quaternions pass memreport("after pointing", comm.comm_world) # Set up objects to take copies of the TOD at appropriate times if args.pysm_model: if schedules is not None: focalplanes = [ s.telescope.focalplane.detector_data for s in schedules ] else: focalplanes = [telescope.focalplane.detector_data] signalname = so_tools.simulate_sky_signal(args, comm, data, focalplanes) else: signalname = toast_tools.scan_sky_signal(args, comm, data) memreport("after PySM", comm.comm_world) # Loop over Monte Carlos firstmc = int(args.MC_start) nmc = int(args.MC_count) for mc in range(firstmc, firstmc + nmc): if comm.world_rank == 0: log.info("Processing MC = {}".format(mc)) toast_tools.draw_weather(args, comm, data, mc) toast_tools.simulate_atmosphere(args, comm, data, mc, totalname) #so_tools.scale_atmosphere_by_bandpass(args, comm, data, totalname, mc) toast_tools.scale_atmosphere_by_frequency( args, comm, data, cache_name=totalname, mc=mc, ) memreport("after atmosphere", comm.comm_world) so_tools.simulate_hwpss(args, comm, data, mc, totalname) # update_atmospheric_noise_weights(args, comm, data, freq, mc) toast_tools.add_signal(args, comm, data, totalname, signalname, purge=(mc == firstmc + nmc - 1)) memreport("after adding sky", comm.comm_world) toast_tools.simulate_sss(args, comm, data, mc, totalname) memreport("after simulating SSS", comm.comm_world) toast_tools.simulate_noise(args, comm, data, mc, totalname) memreport("after simulating noise", comm.comm_world) so_tools.apply_sim_sso(args, comm, data, mc, totalname) memreport("after simulating SSO", comm.comm_world) so_tools.convolve_time_constant(args, comm, data, totalname) memreport("after convolving with time constant", comm.comm_world) # DEBUG begin """ import matplotlib.pyplot as plt tod = data.obs[0]['tod'] times = tod.local_times() for det in tod.local_dets: sig = tod.local_signal(det, totalname) plt.plot(times, sig, label=det) plt.legend(loc='best') fnplot = 'debug_{}.png'.format(args.madam_prefix) plt.savefig(fnplot) plt.close() print('DEBUG plot saved in', fnplot) return """ # DEBUG end toast_tools.scramble_gains(args, comm, data, mc, totalname) so_tools.deconvolve_time_constant(args, comm, data, totalname, realization=mc) memreport("after deconvolving time constant", comm.comm_world) if mc == firstmc: # For the first realization and frequency, optionally # export the timestream data. toast_tools.output_tidas(args, comm, data, totalname) so_tools.export_TOD(args, comm, data, totalname, schedules) memreport("after export", comm.comm_world) if args.no_maps: continue outpath = setup_output(args, comm, mc) # Optionally demodulate signal so_tools.demodulate(args, comm, data, totalname, detweights, madampars) # Bin and destripe maps if args.use_madam: toast_tools.apply_madam( args, comm, data, madampars, outpath, detweights, totalname, time_comms=time_comms, telescope_data=telescope_data, first_call=(mc == firstmc), ) else: toast_tools.apply_mapmaker( args, comm, data, outpath, totalname, time_comms=time_comms, telescope_data=telescope_data, first_call=(mc == firstmc), ) memreport("after destriper", comm.comm_world) if (args.filterbin_ground_order is not None or args.filterbin_poly_order is not None): toast_tools.apply_filterbin( args, comm, data, outpath, totalname, time_comms=time_comms, telescope_data=telescope_data, first_call=(mc == firstmc), ) if args.apply_polyfilter or args.apply_groundfilter: # Filter signal toast_tools.apply_polyfilter(args, comm, data, totalname) memreport("after polyfilter", comm.comm_world) # Ground filter memreport("after demodulation", comm.comm_world) toast_tools.apply_groundfilter(args, comm, data, totalname) memreport("after groundfilter", comm.comm_world) # Bin maps if args.use_madam: toast_tools.apply_madam( args, comm, data, madampars, outpath, detweights, totalname, time_comms=time_comms, telescope_data=telescope_data, first_call=args.demodulate, extra_prefix="filtered", bin_only=True, ) else: toast_tools.apply_mapmaker( args, comm, data, outpath, totalname, time_comms=time_comms, telescope_data=telescope_data, first_call=False, extra_prefix="filtered", bin_only=True, ) memreport("after filter & bin", comm.comm_world) if args.demodulate and args.MC_count > 1: if comm.world_rank == 0: log.info("WARNING: demodulation and MC iterations are " "incompatible. Terminating after first MC.") break if comm.comm_world is not None: comm.comm_world.barrier() memreport("at the end of the pipeline", comm.comm_world) gt.stop_all() if mpiworld is not None: mpiworld.barrier() timer = Timer() timer.start() alltimers = gather_timers(comm=mpiworld) if rank == 0: out = os.path.join(args.outdir, "timing") dump_timing(alltimers, out) timer.stop() timer.report("Gather and dump timing info") timer0.stop() if comm.world_rank == 0: timer0.report("toast_so_sim.py pipeline") return
def create_observations(args, comm, schedules): """Create and distribute TOAST observations for every CES in schedules. Args: schedules (iterable) : a list of Schedule objects. """ log = Logger.get() timer = Timer() timer.start() data = Data(comm) # Loop over the schedules, distributing each schedule evenly across # the process groups. For now, we'll assume that each schedule has # the same number of operational days and the number of process groups # matches the number of operational days. Relaxing these constraints # will cause the season break to occur on different process groups # for different schedules and prevent splitting the communicator. total_samples = 0 group_samples = 0 for schedule in schedules: telescope = schedule.telescope all_ces = schedule.ceslist nces = len(all_ces) breaks = pipeline_tools.get_breaks(comm, all_ces, args) ces_weights = [x.stop_time - x.start_time for x in all_ces] groupdist = distribute_discrete(ces_weights, comm.ngroups, breaks=breaks) # groupdist = distribute_uniform(nces, comm.ngroups, breaks=breaks) group_firstobs = groupdist[comm.group][0] group_numobs = groupdist[comm.group][1] for ices in range(group_firstobs, group_firstobs + group_numobs): obs = create_observation(args, comm, telescope, all_ces[ices]) group_samples += obs["tod"].total_samples data.obs.append(obs) if comm.comm_rank is not None: if comm.comm_group.rank == 0: total_samples = comm.comm_rank.allreduce(group_samples, op=MPI.SUM) total_samples = comm.comm_group.bcast(total_samples, root=0) if comm.comm_world is None or comm.comm_group.rank == 0: log.info("Group # {:4} has {} observations.".format(comm.group, len(data.obs))) if len(data.obs) == 0: raise RuntimeError( "Too many tasks. Every MPI task must " "be assigned to at least one observation." ) if comm.comm_world is not None: comm.comm_world.barrier() timer.stop() if comm.world_rank == 0: timer.report("Simulated scans") # Split the data object for each telescope for separate mapmaking. # We could also split by site. if len(schedules) > 1: telescope_data = data.split("telescope_name") if len(telescope_data) == 1: # Only one telescope available telescope_data = [] else: telescope_data = [] telescope_data.insert(0, ("all", data)) return data, telescope_data, total_samples
def parse_arguments(comm): timer = Timer() timer.start() log = Logger.get() parser = argparse.ArgumentParser( description="Simulate ground-based boresight pointing. Simulate " "atmosphere and make maps for some number of noise Monte Carlos.", fromfile_prefix_chars="@", ) toast_tools.add_dist_args(parser) toast_tools.add_todground_args(parser) toast_tools.add_pointing_args(parser) toast_tools.add_polyfilter_args(parser) toast_tools.add_groundfilter_args(parser) toast_tools.add_atmosphere_args(parser) toast_tools.add_noise_args(parser) toast_tools.add_gainscrambler_args(parser) toast_tools.add_madam_args(parser) toast_tools.add_mapmaker_args(parser) toast_tools.add_filterbin_args(parser) toast_tools.add_sky_map_args(parser) toast_tools.add_sss_args(parser) toast_tools.add_tidas_args(parser) toast_tools.add_mc_args(parser) so_tools.add_corotator_args(parser) so_tools.add_time_constant_args(parser) so_tools.add_demodulation_args(parser) so_tools.add_h_n_args(parser) so_tools.add_crosslinking_args(parser) so_tools.add_cadence_map_args(parser) so_tools.add_hw_args(parser) so_tools.add_so_noise_args(parser) so_tools.add_pysm_args(parser) so_tools.add_export_args(parser) toast_tools.add_debug_args(parser) so_tools.add_import_args(parser) so_tools.add_sim_sso_args(parser) so_tools.add_flag_sso_args(parser) so_tools.add_sim_hwpss_args(parser) parser.add_argument( "--no-maps", required=False, default=False, action="store_true", help="Disable all mapmaking.", ) parser.add_argument("--outdir", required=False, default="out", help="Output directory") parser.add_argument( "--madam", required=False, action="store_true", help="Use libmadam for map-making", dest="use_madam", ) parser.add_argument( "--no-madam", required=False, action="store_false", help="Do not use libmadam for map-making [default]", dest="use_madam", ) parser.set_defaults(use_madam=True) try: args = parser.parse_args() except SystemExit as e: sys.exit() if len(args.bands.split(",")) != 1: # Multi frequency run. We don't support multiple copies of # scanned signal. if args.input_map: raise RuntimeError( "Multiple frequencies are not supported when scanning from a map" ) if args.weather is None: raise RuntimeError("You must provide a TOAST weather file") if comm.world_rank == 0: log.info("\n") log.info("All parameters:") for ag in vars(args): log.info("{} = {}".format(ag, getattr(args, ag))) log.info("\n") if args.group_size: comm = Comm(groupsize=args.group_size) if comm.world_rank == 0: if not os.path.isdir(args.outdir): try: os.makedirs(args.outdir) except FileExistsError: pass timer.report_clear("Parse arguments") return args, comm
def get_analytic_noise(args, comm, focalplane, verbose=True): """ Create a TOAST noise object. Create a noise object from the 1/f noise parameters contained in the focalplane database. Optionally add thermal common modes. """ timer = Timer() timer.start() detectors = sorted(focalplane.detector_data.keys()) fmins = {} fknees = {} alphas = {} NETs = {} rates = {} indices = {} for d in detectors: rates[d] = args.sample_rate fmins[d] = focalplane[d]["fmin"] fknees[d] = focalplane[d]["fknee"] alphas[d] = focalplane[d]["alpha"] NETs[d] = focalplane[d]["NET"] indices[d] = focalplane[d]["index"] ncommon = 0 coupling_strength_distributions = [] common_modes = [] if args.common_mode_noise: # Add an extra "virtual" detector for common mode noise for # every optics tube for common_mode in args.common_mode_noise.split(";"): ncommon += 1 try: fmin, fknee, alpha, net, center, width = np.array( common_mode.split(",")).astype(np.float64) except ValueError: fmin, fknee, alpha, net = np.array( common_mode.split(",")).astype(np.float64) center, width = 1, 0 coupling_strength_distributions.append([center, width]) hw = get_example() for itube, tube_slot in enumerate( sorted(hw.data["tube_slots"].keys())): d = "common_mode_{}_{}".format(ncommon - 1, tube_slot) detectors.append(d) common_modes.append(d) rates[d] = args.sample_rate fmins[d] = fmin fknees[d] = fknee alphas[d] = alpha NETs[d] = net indices[d] = ncommon * 100000 + itube noise = AnalyticNoise( rate=rates, fmin=fmins, detectors=detectors, fknee=fknees, alpha=alphas, NET=NETs, indices=indices, ) if args.common_mode_noise: mixmatrix = {} keys = set() if args.common_mode_only: detweight = 0 else: detweight = 1 for icommon in range(ncommon): # Update the mixing matrix in the noise operator center, width = coupling_strength_distributions[icommon] np.random.seed(1001 + icommon) couplings = center + np.random.randn(1000000) * width for det in focalplane.detector_data.keys(): if det not in mixmatrix: mixmatrix[det] = {det: detweight} keys.add(det) tube_slot = focalplane[det]["tube_slot"] common = "common_mode_{}_{}".format(icommon, tube_slot) index = focalplane[det]["index"] mixmatrix[det][common] = couplings[index] keys.add(common) # Add a diagonal entries, even if we wouldn't usually ask for # the common mode alone. for common in common_modes: mixmatrix[common] = {common: 1} # There should probably be an accessor method to update the # mixmatrix in the TOAST Noise object. if noise._mixmatrix is not None: raise RuntimeError("Did not expect non-empty mixing matrix") noise._mixmatrix = mixmatrix noise._keys = list(sorted(keys)) focalplane._noise = noise if comm.world_rank == 0 and verbose: timer.report_clear("Creating noise model") return noise
def create_observations(args, comm, schedule): """Simulate constant elevation scans. Simulate constant elevation scans at "site" matching entries in "all_ces". Each operational day is assigned to a different process group to allow making day maps. """ timer = Timer() log = Logger.get() data = Data(comm) telescope = schedule.telescope site = telescope.site focalplane = telescope.focalplane all_ces = schedule.ceslist nces = len(all_ces) breaks = get_breaks(comm, all_ces, args) nbreak = len(breaks) groupdist = distribute_uniform(nces, comm.ngroups, breaks=breaks) group_firstobs = groupdist[comm.group][0] group_numobs = groupdist[comm.group][1] if comm.comm_group is not None: ndetrank = comm.comm_group.size else: ndetrank = 1 for ices in range(group_firstobs, group_firstobs + group_numobs): ces = all_ces[ices] totsamples = int((ces.stop_time - ces.start_time) * args.sample_rate) # create the single TOD for this observation try: tod = TODGround( comm.comm_group, focalplane.detquats, totsamples, detranks=ndetrank, firsttime=ces.start_time, rate=args.sample_rate, site_lon=site.lon, site_lat=site.lat, site_alt=site.alt, azmin=ces.azmin, azmax=ces.azmax, el=ces.el, scanrate=args.scan_rate, scan_accel=args.scan_accel, cosecant_modulation=args.scan_cosecant_modulate, CES_start=None, CES_stop=None, sun_angle_min=args.sun_angle_min, coord=args.coord, sampsizes=None, report_timing=args.debug, ) except RuntimeError as e: raise RuntimeError("Failed to create the CES scan: {}".format(e)) # Create the (single) observation ob = {} ob["name"] = "CES-{}-{}-{}".format(ces.name, ces.scan, ces.subscan) ob["tod"] = tod if len(tod.subscans) > 0: ob["intervals"] = tod.subscans else: raise RuntimeError("{} has no valid intervals".format(ob["name"])) ob["baselines"] = None ob["noise"] = focalplane.noise ob["id"] = int(ces.mjdstart * 10000) data.obs.append(ob) for ob in data.obs: tod = ob["tod"] tod.free_azel_quats() if comm.comm_world is None or comm.comm_group.rank == 0: log.info("Group # {:4} has {} observations.".format( comm.group, len(data.obs))) if len(data.obs) == 0: raise RuntimeError("Too many tasks. Every MPI task must " "be assigned to at least one observation.") if comm.world_rank == 0: timer.report_clear("Simulate scans") return data
def create_observations(args, comm, focalplane, groupsize): timer = Timer() timer.start() if groupsize > len(focalplane.keys()): if comm.world_rank == 0: log.error("process group is too large for the number of detectors") comm.comm_world.Abort() # Detector information from the focalplane detectors = sorted(focalplane.keys()) detquats = {} detindx = None if "index" in focalplane[detectors[0]]: detindx = {} for d in detectors: detquats[d] = focalplane[d]["quat"] if detindx is not None: detindx[d] = focalplane[d]["index"] # Distribute the observations uniformly groupdist = distribute_uniform(args.obs_num, comm.ngroups) # Compute global time and sample ranges of all observations obsrange = regular_intervals( args.obs_num, args.start_time, 0, args.sample_rate, 3600 * args.obs_time_h, 3600 * args.gap_h, ) noise = get_analytic_noise(args, comm, focalplane) # The distributed timestream data data = Data(comm) # Every process group creates its observations group_firstobs = groupdist[comm.group][0] group_numobs = groupdist[comm.group][1] for ob in range(group_firstobs, group_firstobs + group_numobs): tod = TODSatellite( comm.comm_group, detquats, obsrange[ob].samples, coord=args.coord, firstsamp=obsrange[ob].first, firsttime=obsrange[ob].start, rate=args.sample_rate, spinperiod=args.spin_period_min, spinangle=args.spin_angle_deg, precperiod=args.prec_period_min, precangle=args.prec_angle_deg, detindx=detindx, detranks=comm.group_size, hwprpm=hwprpm, hwpstep=hwpstep, hwpsteptime=hwpsteptime, ) obs = {} obs["name"] = "science_{:05d}".format(ob) obs["tod"] = tod obs["intervals"] = None obs["baselines"] = None obs["noise"] = noise obs["id"] = ob data.obs.append(obs) if comm.world_rank == 0: timer.report_clear("Read parameters, compute data distribution") # Since we are simulating noise timestreams, we want # them to be contiguous and reproducible over the whole # observation. We distribute data by detector within an # observation, so ensure that our group size is not larger # than the number of detectors we have. # we set the precession axis now, which will trigger calculation # of the boresight pointing. for ob in range(group_numobs): curobs = data.obs[ob] tod = curobs["tod"] # Get the global sample offset from the original distribution of # intervals obsoffset = obsrange[group_firstobs + ob].first # Constantly slewing precession axis degday = 360.0 / 365.25 precquat = np.empty(4 * tod.local_samples[1], dtype=np.float64).reshape((-1, 4)) slew_precession_axis( precquat, firstsamp=(obsoffset + tod.local_samples[0]), samplerate=args.sample_rate, degday=degday, ) tod.set_prec_axis(qprec=precquat) del precquat if comm.world_rank == 0: timer.report_clear("Construct boresight pointing") return data
def get_hardware(args, comm, verbose=False): """ Get the hardware configuration, either from file or by simulating. Then trim it down to the bands that were selected. """ log = Logger.get() telescope = get_telescope(args, comm, verbose=verbose) timer = Timer() if comm.world_rank == 0: timer.start() if args.hardware: log.info("Loading hardware configuration from {}..." "".format(args.hardware)) hw = Hardware(args.hardware) timer.report_clear("Load hardware map") else: log.info("Simulating default hardware configuration") hw = get_example() hw.data["detectors"] = sim_telescope_detectors(hw, telescope.name) timer.report_clear("Simulate hardware map") # Construct a running index for all detectors across all # telescopes for independent noise realizations det_index = {} for idet, det in enumerate(sorted(hw.data["detectors"])): det_index[det] = idet match = {"band": args.bands.replace(",", "|")} tube_slots = None if args.wafer_slots is not None: match["wafer_slot"] = args.wafer_slots.split(",") elif args.tube_slots is not None: tube_slots = args.tube_slots.split(",") # If one provides both telescopes and tube_slots, the tube_slots matching *either* # will be concatenated #hw = hw.select(telescopes=[telescope.name], tube_slots=tube_slots, match=match) hw = hw.select(tube_slots=tube_slots, match=match) if args.thinfp: # Only accept a fraction of the detectors for # testing and development delete_detectors = [] for det_name in hw.data["detectors"].keys(): if (det_index[det_name] // 4) % args.thinfp != 0: delete_detectors.append(det_name) for det_name in delete_detectors: del hw.data["detectors"][det_name] ndetector = len(hw.data["detectors"]) if ndetector == 0: raise RuntimeError("No detectors match query: telescope={}, " "tube_slots={}, match={}".format( telescope.name, tube_slots, match)) log.info( f"Telescope = {telescope.name} tube_slots = {args.tube_slots}, " f"wafer_slots = {args.wafer_slots}, bands = {args.bands}, " f"thinfp = {args.thinfp} matches {ndetector} detectors") timer.report_clear("Select detectors") else: hw = None det_index = None if comm.comm_world is not None: hw = comm.comm_world.bcast(hw) det_index = comm.comm_world.bcast(det_index) if comm.world_rank == 0: timer.report_clear("Broadcast hardware map") return hw, telescope, det_index