def main(): env = Environment.get() env.enable_function_timers() log = Logger.get() gt = GlobalTimers.get() gt.start("toast_benchmark (total)") mpiworld, procs, rank = get_world() if rank == 0: log.info("TOAST version = {}".format(env.version())) log.info("Using a maximum of {} threads per process".format(env.max_threads())) if mpiworld is None: log.info("Running serially with one process at {}".format(str(datetime.now()))) else: if rank == 0: log.info( "Running with {} processes at {}".format(procs, str(datetime.now())) ) cases = { "tiny": 5000000, # O(1) GB RAM "xsmall": 50000000, # O(10) GB RAM "small": 500000000, # O(100) GB RAM "medium": 5000000000, # O(1) TB RAM "large": 50000000000, # O(10) TB RAM "xlarge": 500000000000, # O(100) TB RAM "heroic": 5000000000000, # O(1000) TB RAM } args, comm, n_nodes, n_detector, case, group_seconds, n_group = job_config( mpiworld, cases ) # Note: The number of "days" here will just be an approximation of the desired # data volume since we are doing a realistic schedule for a real observing site. n_days = int(2.0 * (group_seconds * n_group) / (24 * 3600)) if n_days == 0: n_days = 1 if rank == 0: log.info( "Using {} detectors for approximately {} days".format(n_detector, n_days) ) # Create the schedule file and input maps on one process if rank == 0: create_schedules(args, group_seconds, n_days) create_input_maps(args) if mpiworld is not None: mpiworld.barrier() if args.dry_run is not None: if rank == 0: log.info("Exit from dry run") # We are done! sys.exit(0) gt.start("toast_benchmark (science work)") # Load and broadcast the schedule file schedules = pipeline_tools.load_schedule(args, comm) # Load the weather and append to schedules pipeline_tools.load_weather(args, comm, schedules) # Simulate the focalplane detweights = create_focalplanes(args, comm, schedules, n_detector) # Create the TOAST data object to match the schedule. This will # include simulating the boresight pointing. data, telescope_data, total_samples = create_observations(args, comm, schedules) # handle = None # if comm.world_rank == 0: # handle = open(os.path.join(args.outdir, "distdata.txt"), "w") # data.info(handle) # if comm.world_rank == 0: # handle.close() # if comm.comm_world is not None: # comm.comm_world.barrier() # Split the communicator for day and season mapmaking time_comms = pipeline_tools.get_time_communicators(args, comm, data) # Expand boresight quaternions into detector pointing weights and # pixel numbers pipeline_tools.expand_pointing(args, comm, data) # Optionally rewrite the noise PSD:s in each observation to include # elevation-dependence pipeline_tools.get_elevation_noise(args, comm, data) # Purge the pointing if we are NOT going to export the # data to a TIDAS volume if (args.tidas is None) and (args.spt3g is None): for ob in data.obs: tod = ob["tod"] tod.free_radec_quats() # Prepare auxiliary information for distributed map objects signalname = pipeline_tools.scan_sky_signal(args, comm, data, "signal") # Set up objects to take copies of the TOD at appropriate times totalname, totalname_freq = setup_sigcopy(args) # Loop over Monte Carlos firstmc = args.MC_start nsimu = args.MC_count freqs = [float(freq) for freq in args.freq.split(",")] nfreq = len(freqs) for mc in range(firstmc, firstmc + nsimu): pipeline_tools.simulate_atmosphere(args, comm, data, mc, totalname) # Loop over frequencies with identical focal planes and identical # atmospheric noise. for ifreq, freq in enumerate(freqs): if comm.world_rank == 0: log.info( "Processing frequency {}GHz {} / {}, MC = {}".format( freq, ifreq + 1, nfreq, mc ) ) # Make a copy of the atmosphere so we can scramble the gains and apply # frequency-dependent scaling. pipeline_tools.copy_signal(args, comm, data, totalname, totalname_freq) pipeline_tools.scale_atmosphere_by_frequency( args, comm, data, freq=freq, mc=mc, cache_name=totalname_freq ) pipeline_tools.update_atmospheric_noise_weights(args, comm, data, freq, mc) # Add previously simulated sky signal to the atmospheric noise. pipeline_tools.add_signal( args, comm, data, totalname_freq, signalname, purge=(nsimu == 1) ) mcoffset = ifreq * 1000000 pipeline_tools.simulate_noise( args, comm, data, mc + mcoffset, totalname_freq ) pipeline_tools.scramble_gains( args, comm, data, mc + mcoffset, totalname_freq ) outpath = setup_output(args, comm, mc + mcoffset, freq) # Bin and destripe maps pipeline_tools.apply_mapmaker( args, comm, data, outpath, totalname_freq, time_comms=time_comms, telescope_data=telescope_data, first_call=(mc == firstmc), ) if args.apply_polyfilter or args.apply_groundfilter: # Filter signal pipeline_tools.apply_polyfilter(args, comm, data, totalname_freq) pipeline_tools.apply_groundfilter(args, comm, data, totalname_freq) # Bin filtered maps pipeline_tools.apply_mapmaker( args, comm, data, outpath, totalname_freq, time_comms=time_comms, telescope_data=telescope_data, first_call=False, extra_prefix="filtered", bin_only=True, ) gt.stop_all() if mpiworld is not None: mpiworld.barrier() runtime = gt.seconds("toast_benchmark (science work)") prefactor = 1.0e-3 kilo_samples = 1.0e-3 * total_samples sample_factor = 1.2 det_factor = 2.0 metric = ( prefactor * n_detector ** det_factor * kilo_samples ** sample_factor / (n_nodes * runtime) ) if rank == 0: msg = "Science Metric: {:0.1e} * ({:d}**{:0.2f}) * ({:0.3e}**{:0.3f}) / ({:0.1f} * {}) = {:0.2f}".format( prefactor, n_detector, det_factor, kilo_samples, sample_factor, runtime, n_nodes, metric, ) log.info("") log.info(msg) log.info("") with open(os.path.join(args.outdir, "log"), "a") as f: f.write(msg) f.write("\n\n") timer = Timer() timer.start() alltimers = gather_timers(comm=mpiworld) if comm.world_rank == 0: out = os.path.join(args.outdir, "timing") dump_timing(alltimers, out) with open(os.path.join(args.outdir, "log"), "a") as f: f.write("Copy of Global Timers:\n") with open("{}.csv".format(out), "r") as t: f.write(t.read()) timer.stop() timer.report("Gather and dump timing info") return
def main(): log = Logger.get() gt = GlobalTimers.get() gt.start("toast_so_sim (total)") timer0 = Timer() timer0.start() mpiworld, procs, rank, comm = toast_tools.get_comm() memreport("at the beginning of the pipeline", comm.comm_world) args, comm = parse_arguments(comm) if args.use_madam: # Initialize madam parameters madampars = toast_tools.setup_madam(args) else: madampars = None if args.import_dir is not None: schedules = None data, telescope_data, detweights = so_tools.load_observations( args, comm) memreport("after load", comm.comm_world) totalname = "signal" else: # Load and broadcast the schedule file schedules = toast_tools.load_schedule(args, comm) # Load the weather and append to schedules toast_tools.load_weather(args, comm, schedules) # load or simulate the focalplane detweights = so_tools.load_focalplanes(args, comm, schedules) # Create the TOAST data object to match the schedule. This will # include simulating the boresight pointing. data, telescope_data = so_tools.create_observations( args, comm, schedules) memreport("after creating observations", comm.comm_world) # Optionally rewrite the noise PSD:s in each observation to include # elevation-dependence so_tools.get_elevation_noise(args, comm, data) totalname = "total" # Split the communicator for day and season mapmaking time_comms = toast_tools.get_time_communicators(args, comm, data) # Rotate the LAT focalplane around the boresight based on co-rotator position so_tools.rotate_focalplane(args, data, comm) # Expand boresight quaternions into detector pointing weights and # pixel numbers toast_tools.expand_pointing(args, comm, data) # Flag Solar system objects so_tools.apply_flag_sso(args, comm, data) # Optionally, output h_n maps so_tools.compute_h_n(args, comm, data) # Optionally, output crosslinking map so_tools.compute_crosslinking(args, comm, data, detweights) # Optionally, output cadence map so_tools.compute_cadence_map(args, comm, data) # Only purge the pointing if we are NOT going to export the # data to a TIDAS volume if not (args.tidas is None) and (args.export is None): for ob in data.obs: tod = ob["tod"] try: tod.free_radec_quats() except AttributeError: # These TOD objects do not have RA/Dec quaternions pass memreport("after pointing", comm.comm_world) # Set up objects to take copies of the TOD at appropriate times if args.pysm_model: if schedules is not None: focalplanes = [ s.telescope.focalplane.detector_data for s in schedules ] else: focalplanes = [telescope.focalplane.detector_data] signalname = so_tools.simulate_sky_signal(args, comm, data, focalplanes) else: signalname = toast_tools.scan_sky_signal(args, comm, data) memreport("after PySM", comm.comm_world) # Loop over Monte Carlos firstmc = int(args.MC_start) nmc = int(args.MC_count) for mc in range(firstmc, firstmc + nmc): if comm.world_rank == 0: log.info("Processing MC = {}".format(mc)) toast_tools.draw_weather(args, comm, data, mc) toast_tools.simulate_atmosphere(args, comm, data, mc, totalname) #so_tools.scale_atmosphere_by_bandpass(args, comm, data, totalname, mc) toast_tools.scale_atmosphere_by_frequency( args, comm, data, cache_name=totalname, mc=mc, ) memreport("after atmosphere", comm.comm_world) so_tools.simulate_hwpss(args, comm, data, mc, totalname) # update_atmospheric_noise_weights(args, comm, data, freq, mc) toast_tools.add_signal(args, comm, data, totalname, signalname, purge=(mc == firstmc + nmc - 1)) memreport("after adding sky", comm.comm_world) toast_tools.simulate_sss(args, comm, data, mc, totalname) memreport("after simulating SSS", comm.comm_world) toast_tools.simulate_noise(args, comm, data, mc, totalname) memreport("after simulating noise", comm.comm_world) so_tools.apply_sim_sso(args, comm, data, mc, totalname) memreport("after simulating SSO", comm.comm_world) so_tools.convolve_time_constant(args, comm, data, totalname) memreport("after convolving with time constant", comm.comm_world) # DEBUG begin """ import matplotlib.pyplot as plt tod = data.obs[0]['tod'] times = tod.local_times() for det in tod.local_dets: sig = tod.local_signal(det, totalname) plt.plot(times, sig, label=det) plt.legend(loc='best') fnplot = 'debug_{}.png'.format(args.madam_prefix) plt.savefig(fnplot) plt.close() print('DEBUG plot saved in', fnplot) return """ # DEBUG end toast_tools.scramble_gains(args, comm, data, mc, totalname) so_tools.deconvolve_time_constant(args, comm, data, totalname, realization=mc) memreport("after deconvolving time constant", comm.comm_world) if mc == firstmc: # For the first realization and frequency, optionally # export the timestream data. toast_tools.output_tidas(args, comm, data, totalname) so_tools.export_TOD(args, comm, data, totalname, schedules) memreport("after export", comm.comm_world) if args.no_maps: continue outpath = setup_output(args, comm, mc) # Optionally demodulate signal so_tools.demodulate(args, comm, data, totalname, detweights, madampars) # Bin and destripe maps if args.use_madam: toast_tools.apply_madam( args, comm, data, madampars, outpath, detweights, totalname, time_comms=time_comms, telescope_data=telescope_data, first_call=(mc == firstmc), ) else: toast_tools.apply_mapmaker( args, comm, data, outpath, totalname, time_comms=time_comms, telescope_data=telescope_data, first_call=(mc == firstmc), ) memreport("after destriper", comm.comm_world) if (args.filterbin_ground_order is not None or args.filterbin_poly_order is not None): toast_tools.apply_filterbin( args, comm, data, outpath, totalname, time_comms=time_comms, telescope_data=telescope_data, first_call=(mc == firstmc), ) if args.apply_polyfilter or args.apply_groundfilter: # Filter signal toast_tools.apply_polyfilter(args, comm, data, totalname) memreport("after polyfilter", comm.comm_world) # Ground filter memreport("after demodulation", comm.comm_world) toast_tools.apply_groundfilter(args, comm, data, totalname) memreport("after groundfilter", comm.comm_world) # Bin maps if args.use_madam: toast_tools.apply_madam( args, comm, data, madampars, outpath, detweights, totalname, time_comms=time_comms, telescope_data=telescope_data, first_call=args.demodulate, extra_prefix="filtered", bin_only=True, ) else: toast_tools.apply_mapmaker( args, comm, data, outpath, totalname, time_comms=time_comms, telescope_data=telescope_data, first_call=False, extra_prefix="filtered", bin_only=True, ) memreport("after filter & bin", comm.comm_world) if args.demodulate and args.MC_count > 1: if comm.world_rank == 0: log.info("WARNING: demodulation and MC iterations are " "incompatible. Terminating after first MC.") break if comm.comm_world is not None: comm.comm_world.barrier() memreport("at the end of the pipeline", comm.comm_world) gt.stop_all() if mpiworld is not None: mpiworld.barrier() timer = Timer() timer.start() alltimers = gather_timers(comm=mpiworld) if rank == 0: out = os.path.join(args.outdir, "timing") dump_timing(alltimers, out) timer.stop() timer.report("Gather and dump timing info") timer0.stop() if comm.world_rank == 0: timer0.report("toast_so_sim.py pipeline") return
def main(): log = Logger.get() gt = GlobalTimers.get() gt.start("toast_ground_sim (total)") timer0 = Timer() timer0.start() mpiworld, procs, rank, comm = pipeline_tools.get_comm() args, comm = parse_arguments(comm) if args.use_madam: # Initialize madam parameters madampars = pipeline_tools.setup_madam(args) # Load and broadcast the schedule file schedules = pipeline_tools.load_schedule(args, comm) # Load the weather and append to schedules pipeline_tools.load_weather(args, comm, schedules) # load or simulate the focalplane detweights = load_focalplanes(args, comm, schedules) # Create the TOAST data object to match the schedule. This will # include simulating the boresight pointing. data, telescope_data = create_observations(args, comm, schedules) # Split the communicator for day and season mapmaking time_comms = pipeline_tools.get_time_communicators(args, comm, data) # Expand boresight quaternions into detector pointing weights and # pixel numbers pipeline_tools.expand_pointing(args, comm, data) # Optionally rewrite the noise PSD:s in each observation to include # elevation-dependence pipeline_tools.get_elevation_noise(args, comm, data) # Purge the pointing if we are NOT going to export the # data to a TIDAS volume if (args.tidas is None) and (args.spt3g is None): for ob in data.obs: tod = ob["tod"] tod.free_radec_quats() # Prepare auxiliary information for distributed map objects if args.pysm_model: focalplanes = [s.telescope.focalplane.detector_data for s in schedules] signalname = pipeline_tools.simulate_sky_signal( args, comm, data, focalplanes, "signal" ) else: signalname = pipeline_tools.scan_sky_signal(args, comm, data, "signal") # Set up objects to take copies of the TOD at appropriate times totalname, totalname_freq = setup_sigcopy(args) # Loop over Monte Carlos firstmc = args.MC_start nsimu = args.MC_count freqs = [float(freq) for freq in args.freq.split(",")] nfreq = len(freqs) for mc in range(firstmc, firstmc + nsimu): pipeline_tools.simulate_atmosphere(args, comm, data, mc, totalname) # Loop over frequencies with identical focal planes and identical # atmospheric noise. for ifreq, freq in enumerate(freqs): if comm.world_rank == 0: log.info( "Processing frequency {}GHz {} / {}, MC = {}".format( freq, ifreq + 1, nfreq, mc ) ) # Make a copy of the atmosphere so we can scramble the gains and apply # frequency-dependent scaling. pipeline_tools.copy_signal(args, comm, data, totalname, totalname_freq) pipeline_tools.scale_atmosphere_by_frequency( args, comm, data, freq=freq, mc=mc, cache_name=totalname_freq ) pipeline_tools.update_atmospheric_noise_weights(args, comm, data, freq, mc) # Add previously simulated sky signal to the atmospheric noise. pipeline_tools.add_signal( args, comm, data, totalname_freq, signalname, purge=(nsimu == 1) ) mcoffset = ifreq * 1000000 pipeline_tools.simulate_noise( args, comm, data, mc + mcoffset, totalname_freq ) pipeline_tools.simulate_sss(args, comm, data, mc + mcoffset, totalname_freq) pipeline_tools.scramble_gains( args, comm, data, mc + mcoffset, totalname_freq ) if (mc == firstmc) and (ifreq == 0): # For the first realization and frequency, optionally # export the timestream data. pipeline_tools.output_tidas(args, comm, data, totalname) pipeline_tools.output_spt3g(args, comm, data, totalname) outpath = setup_output(args, comm, mc + mcoffset, freq) # Bin and destripe maps if args.use_madam: pipeline_tools.apply_madam( args, comm, data, madampars, outpath, detweights, totalname_freq, freq=freq, time_comms=time_comms, telescope_data=telescope_data, first_call=(mc == firstmc), ) else: pipeline_tools.apply_mapmaker( args, comm, data, outpath, totalname_freq, time_comms=time_comms, telescope_data=telescope_data, first_call=(mc == firstmc), ) if args.apply_polyfilter or args.apply_groundfilter: # Filter signal pipeline_tools.apply_polyfilter(args, comm, data, totalname_freq) pipeline_tools.apply_groundfilter(args, comm, data, totalname_freq) # Bin filtered maps if args.use_madam: pipeline_tools.apply_madam( args, comm, data, madampars, outpath, detweights, totalname_freq, freq=freq, time_comms=time_comms, telescope_data=telescope_data, first_call=False, extra_prefix="filtered", bin_only=True, ) else: pipeline_tools.apply_mapmaker( args, comm, data, outpath, totalname_freq, time_comms=time_comms, telescope_data=telescope_data, first_call=False, extra_prefix="filtered", bin_only=True, ) gt.stop_all() if mpiworld is not None: mpiworld.barrier() timer = Timer() timer.start() alltimers = gather_timers(comm=mpiworld) if comm.world_rank == 0: out = os.path.join(args.outdir, "timing") dump_timing(alltimers, out) timer.stop() timer.report("Gather and dump timing info") timer0.report_clear("toast_ground_sim.py") return
def main(): env = Environment.get() log = Logger.get() gt = GlobalTimers.get() gt.start("toast_satellite_sim (total)") timer0 = Timer() timer0.start() mpiworld, procs, rank, comm = pipeline_tools.get_comm() args, comm, groupsize = parse_arguments(comm, procs) # Parse options tmr = Timer() tmr.start() if comm.world_rank == 0: os.makedirs(args.outdir, exist_ok=True) focalplane, gain, detweights = load_focalplane(args, comm) if comm.world_rank == 0: tmr.report_clear("Load focalplane") data = create_observations(args, comm, focalplane, groupsize) if comm.world_rank == 0: tmr.report_clear("Create observations") pipeline_tools.expand_pointing(args, comm, data) if comm.world_rank == 0: tmr.report_clear("Expand pointing") signalname = None if args.pysm_model: skyname = pipeline_tools.simulate_sky_signal(args, comm, data, [focalplane], "signal") else: skyname = pipeline_tools.scan_sky_signal(args, comm, data, "signal") if skyname is not None: signalname = skyname if comm.world_rank == 0: tmr.report_clear("Simulate sky signal") # NOTE: Conviqt could use different input file names for different # Monte Carlo indices, but the operator would need to be invoked within # the Monte Carlo loop. skyname = pipeline_tools.apply_conviqt( args, comm, data, "signal", mc=args.MC_start, ) if skyname is not None: signalname = skyname if comm.world_rank == 0: tmr.report_clear("Apply beam convolution") diponame = pipeline_tools.simulate_dipole(args, comm, data, "signal") if diponame is not None: signalname = diponame if comm.world_rank == 0: tmr.report_clear("Simulate dipole") # in debug mode, print out data distribution information if args.debug: handle = None if comm.world_rank == 0: handle = open(os.path.join(args.outdir, "distdata.txt"), "w") data.info(handle) if comm.world_rank == 0: handle.close() if comm.comm_world is not None: comm.comm_world.barrier() if comm.world_rank == 0: tmr.report_clear("Dumping data distribution") # in debug mode, print out data distribution information if args.debug: handle = None if comm.world_rank == 0: handle = open(os.path.join(args.outdir, "distdata.txt"), "w") data.info(handle) if comm.world_rank == 0: handle.close() if comm.comm_world is not None: comm.comm_world.barrier() if comm.world_rank == 0: tmr.report_clear("Dumping data distribution") # Mapmaking. if args.use_madam: # Initialize madam parameters madampars = pipeline_tools.setup_madam(args) if comm.comm_world is not None: comm.comm_world.barrier() if comm.world_rank == 0: tmr.report_clear("Initialize madam map-making") # Loop over Monte Carlos firstmc = args.MC_start nmc = args.MC_count for mc in range(firstmc, firstmc + nmc): mctmr = Timer() mctmr.start() # create output directory for this realization outpath = os.path.join(args.outdir, "mc_{:03d}".format(mc)) pipeline_tools.simulate_noise(args, comm, data, mc, "tot_signal", overwrite=True) if comm.comm_world is not None: comm.comm_world.barrier() if comm.world_rank == 0: tmr.report_clear(" Simulate noise {:04d}".format(mc)) # add sky signal pipeline_tools.add_signal(args, comm, data, "tot_signal", signalname) if comm.comm_world is not None: comm.comm_world.barrier() if comm.world_rank == 0: tmr.report_clear(" Add sky signal {:04d}".format(mc)) if gain is not None: op_apply_gain = OpApplyGain(gain, name="tot_signal") op_apply_gain.exec(data) if comm.comm_world is not None: comm.comm_world.barrier() if comm.world_rank == 0: tmr.report_clear(" Apply gains {:04d}".format(mc)) if mc == firstmc: # For the first realization, optionally export the # timestream data. If we had observation intervals defined, # we could pass "use_interval=True" to the export operators, # which would ensure breaks in the exported data at # acceptable places. pipeline_tools.output_tidas(args, comm, data, "tot_signal") pipeline_tools.output_spt3g(args, comm, data, "tot_signal") if comm.comm_world is not None: comm.comm_world.barrier() if comm.world_rank == 0: tmr.report_clear(" Write TOD snapshot {:04d}".format(mc)) if args.use_madam: pipeline_tools.apply_madam(args, comm, data, madampars, outpath, detweights, "tot_signal") else: pipeline_tools.apply_mapmaker(args, comm, data, outpath, "tot_signal") if comm.comm_world is not None: comm.comm_world.barrier() if comm.world_rank == 0: tmr.report_clear(" Map-making {:04d}".format(mc)) if comm.comm_world is not None: comm.comm_world.barrier() if comm.world_rank == 0: mctmr.report_clear(" Monte Carlo loop {:04d}".format(mc)) gt.stop_all() if comm.comm_world is not None: comm.comm_world.barrier() tmr.stop() tmr.clear() tmr.start() alltimers = gather_timers(comm=comm.comm_world) if comm.world_rank == 0: out = os.path.join(args.outdir, "timing") dump_timing(alltimers, out) tmr.stop() tmr.report("Gather and dump timing info") timer0.report_clear("toast_satellite_sim.py") return
# #print(data.obs[0]['tod'].cache.keys()) # #print(data.obs[0]['tod'].cache['pixels_13.13_155.150B']) for mc in range(args.mc_start, args.mc_start + args.nsims): total_prefix = sims_prefix + str(mc) print(f'Processing {total_prefix}') outpath = "{}/{}".format(args.outpath, mc) # # tpt.simulate_atmosphere(args, comm, data, mc, total_prefix) # # tpt.scale_atmosphere_by_frequency(args, comm, data, freq=None, mc=mc, cache_name=total_prefix) tpt.scan_sky_signal(args, comm, data, total_prefix, mc=mc) # # tpt.simulate_noise(args, comm, data, mc, total_prefix) # # tpt.simulate_sss(args, comm, data, mc, total_prefix) # #high_pass_filter(data, total_prefix) # # tpt.apply_polyfilter(args, comm, data, total_prefix) # # tpt.apply_groundfilter(args, comm, data, total_prefix) tpt.apply_mapmaker(args, comm, data, outpath, total_prefix, bin_only=True) # if rank == 0: # print(data.obs[0]['tod'].cache['mc0_13.13_135.150B']) # print(data.obs[0]['tod'].cache['mc1_13.13_135.150B']) # #if rank == 1: # print(data.obs[0]['tod'].cache['mc2_13.13_135.150B']) # print(data.obs[0]['tod'].cache['mc3_13.13_135.150B']) #sa_tpt.add_suffix_to_detname(data, sims_prefix, data_prefix, suffix='-I') def high_pass_filter(data, total_prefix): print('High pass filtering') sos = signal.butter(2, 0.1, 'hp', fs=152.58789, output='sos') for obs in data.obs: