def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: cfg = yaml.load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') tray = I3Tray() tray.context['I3FileStager'] = dataio.get_stagers() random_service, _, run_id = create_random_services( dataset_number=cfg['dataset_number'], run_number=cfg['run_number'], seed=cfg['seed']) tray.context['I3RandomService'] = random_service tray.Add('I3Reader', FilenameList=[cfg['gcd_pass2'], infile]) if run_number < cfg['det_pass2_keep_all_upto']: cfg['det_keep_mc_hits'] = True cfg['det_keep_propagated_mc_tree'] = True cfg['det_keep_mc_pulses'] = True tray.AddSegment(segments.DetectorSim, "Detector5Sim", RandomService='I3RandomService', RunID=run_id, GCDFile=cfg['gcd_pass2'], KeepMCHits=cfg['det_keep_mc_hits'], KeepPropagatedMCTree=cfg['det_keep_propagated_mc_tree'], KeepMCPulses=cfg['det_keep_mc_pulses'], SkipNoiseGenerator=cfg['det_skip_noise_generation'], LowMem=cfg['det_low_mem'], InputPESeriesMapName=MCPE_SERIES_MAP, BeaconLaunches=cfg['det_add_beacon_launches'], FilterTrigger=cfg['det_filter_trigger']) if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') print(outfile) print(cfg['outfile_pattern']) tray.AddModule("I3Writer", "EventWriter", filename=outfile, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.TrayInfo, icetray.I3Frame.Simulation ]) tray.AddModule("TrashCan", "the can") tray.Execute() tray.Finish()
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: if int(yaml.__version__[0]) < 5: # backwards compatibility for yaml versions before version 5 cfg = yaml.load(stream) else: cfg = yaml.full_load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') click.echo('Run: {}'.format(run_number)) click.echo('ParticleType: {}'.format(cfg['particle_type'])) click.echo('Outfile: {}'.format(outfile)) click.echo('n_events_per_run: {}'.format(cfg['n_events_per_run'])) click.echo('smearing_angle: {}'.format(cfg['smearing_angle'])) click.echo('skymap_path: {}'.format(cfg['skymap_path'])) tray = I3Tray() random_services, _ = create_random_services( dataset_number=cfg['dataset_number'], run_number=cfg['run_number'], seed=cfg['seed'], n_services=2) tray.AddModule( 'I3InfiniteSource', 'source', # Prefix=gcdfile, Stream=icetray.I3Frame.DAQ) tray.AddModule(ParticleFactory, 'make_particles', particle_type=cfg['particle_type'], map_filename=cfg['skymap_path'], num_events=cfg['n_events_per_run'], smearing_angle=cfg['smearing_angle'] * I3Units.deg, random_state=cfg['seed'], random_service=random_services[0]) tray.AddSegment(segments.PropagateMuons, 'propagate_muons', RandomService=random_services[1], InputMCTreeName='I3MCTree', **cfg['muon_propagation_config']) tray.AddModule('I3Writer', 'write', Filename=outfile, Streams=[icetray.I3Frame.DAQ, icetray.I3Frame.Stream('M')]) tray.AddModule('TrashCan', 'trash') tray.Execute() tray.Finish() del tray
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: cfg = yaml.load(stream, Loader=yaml.Loader) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') tray = I3Tray() """The main script""" tray.AddModule('I3Reader', 'i3 reader', FilenameList=[cfg['gcd'], infile]) #-------------------------------------------------- # Mask Pulses #-------------------------------------------------- tray.AddModule(get_valid_pulse_map, 'get_valid_pulse_map', pulse_key=cfg['DNN_pulse_key'], excluded_doms=cfg['DNN_excluded_doms'], partial_exclusion=cfg['DNN_partial_exclusion'], verbose=True, If=lambda f: cfg['DNN_pulse_key'] in f) #-------------------------------------------------- # Apply DNN_reco #-------------------------------------------------- tray.AddModule( DeepLearningReco, 'DeepLearningReco', PulseMapString=cfg['DNN_pulse_key'] + '_masked', OutputBaseName=cfg['DNN_output_base_name'], DNNModel=cfg['DNN_model'], DNNModelDirectory=cfg['DNN_model_directory'], MeasureTime=cfg['DNN_measure_time'], ParallelismThreads=cfg['resources']['cpus'][cfg['step']], ) tray.AddModule("I3Writer", "EventWriter", filename=outfile, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.TrayInfo, icetray.I3Frame.Simulation ], DropOrphanStreams=[icetray.I3Frame.DAQ]) tray.Execute() del tray
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: if int(yaml.__version__[0]) < 5: # backwards compatibility for yaml versions before version 5 cfg = yaml.load(stream) else: cfg = yaml.full_load(stream) icetray.logging.set_level("WARN") cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') infile = infile.replace('Level0.{}'.format(cfg['previous_step']), 'Level2') infile = infile.replace('2012_pass2', '2012') if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace('Level0.{}'.format(cfg['step']), 'Level3') outfile = outfile.replace(' ', '0') outfile = outfile.replace('2012_pass2', '2012') print('Outfile != $FINAL_OUT clean up for crashed scripts not possible!') outfile = outfile.replace(' ', '0') outfile = outfile.replace('2012_pass2', '2012') tray = I3Tray() photonics_dir = os.path.join(PHOTON_TABLES, 'SPICEMie') photonics_driver_dir = os.path.join(photonics_dir, 'driverfiles') tray.AddSegment( MuonL3, gcdfile=cfg['gcd'], infiles=infile, output_i3=outfile, output_hd5=None, output_root=None, photonicsdir=photonics_dir, photonicsdriverdir=photonics_driver_dir, photonicsdriverfile=DRIVER_FILE, infmuonampsplinepath=os.path.join(SPLINE_TABLES, 'InfBareMu_mie_abs_z20a10.fits'), infmuonprobsplinepath=os.path.join(SPLINE_TABLES, 'InfBareMu_mie_prob_z20a10.fits'), cascadeampsplinepath=os.path.join(SPLINE_TABLES, 'ems_mie_z20_a10.abs.fits'), cascadeprobsplinepath=os.path.join(SPLINE_TABLES, 'ems_mie_z20_a10.prob.fits')) tray.AddModule("TrashCan", "Bye") tray.Execute() tray.Finish()
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: if int(yaml.__version__[0]) < 5: # backwards compatibility for yaml versions before version 5 cfg = yaml.load(stream) else: cfg = yaml.full_load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') infile = infile.replace('Level0.{}'.format(cfg['previous_step']), 'Level0.{}'.format(cfg['previous_step'] % 10)) infile = infile.replace('2012_pass2', 'pass2') if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace('Level0.{}'.format(cfg['step']), 'Level2') outfile = outfile.replace(' ', '0') outfile = outfile.replace('2012_pass2', 'pass2') print('Outfile != $FINAL_OUT clean up for crashed scripts not possible!') tray = I3Tray() """The main L1 script""" tray.AddModule('I3Reader', 'i3 reader', FilenameList=[cfg['gcd_pass2'], infile]) tray.AddSegment(OfflineFilter, "OfflineFilter", dstfile=None, mc=True, doNotQify=True, photonicsdir=PHOTONICS_DIR) tray.AddModule("I3Writer", "EventWriter", filename=outfile, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.TrayInfo, icetray.I3Frame.Simulation, icetray.I3Frame.Stream('m'), icetray.I3Frame.Stream('M') ], DropOrphanStreams=[icetray.I3Frame.DAQ]) tray.AddModule("TrashCan", "the can") tray.Execute() tray.Finish() del tray
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: if int(yaml.__version__[0]) < 5: # backwards compatibility for yaml versions before version 5 cfg = yaml.load(stream) else: cfg = yaml.full_load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') if cfg.get('distance_splits', False): distance_splits = np.atleast_1d(cfg['distance_splits']) dom_limits = np.atleast_1d(cfg['threshold_doms']) if len(dom_limits) == 1: dom_limits = np.ones_like(distance_splits) * cfg['threshold_doms'] oversize_factors = np.atleast_1d(cfg['oversize_factors']) order = np.argsort(distance_splits) stream_objects = generate_stream_object(distance_splits[order], dom_limits[order], oversize_factors[order]) process_single_stream.n_streams = len(stream_objects) for stream_i in stream_objects: infile_i = stream_i.transform_filepath(infile) outfile_i = stream_i.transform_filepath(outfile) cfg['clsim_dom_oversize'] = stream_i.oversize_factor proc = ExecProcess(target=process_single_stream, args=(cfg, infile_i, outfile_i)) proc.start() proc.join() process_single_stream.i_th_stream += 1 if proc.exception: error, traceback = proc.exception print(traceback) print(error) sys.exit(1) infiles = [ stream_i.transform_filepath(outfile) for stream_i in stream_objects ] merge(infiles, outfile) else: process_single_stream(cfg, infile, outfile)
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: cfg = yaml.load(stream, Loader=yaml.Loader) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') tray = I3Tray() tray.Add('I3Reader', FilenameList=[cfg['gcd'], infile]) # random_services, _ = create_random_services( # dataset_number=cfg['dataset_number'], # run_number=cfg['run_number'], # seed=cfg['seed'], # n_services=2) # random_service, random_service_prop = random_services # tray.context['I3RandomService'] = random_service # -------------------------------------- # Propagate Muons # -------------------------------------- tray.AddSegment( segments.PropagateMuons, "PropagateMuons", RandomService=random_service_prop, **cfg['muon_propagation_config']) click.echo('Output: {}'.format(outfile)) tray.AddModule("I3Writer", "writer", Filename=outfile, Streams=[icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.Stream('S'), icetray.I3Frame.Stream('M')]) click.echo('Scratch: {}'.format(scratch)) tray.Execute()
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: cfg = yaml.load(stream) icetray.logging.set_level("WARN") cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') tray = I3Tray() photonics_dir = os.path.join(PHOTON_TABLES, 'SPICEMie') photonics_driver_dir = os.path.join(photonics_dir, 'driverfiles') tray.AddSegment( MuonL3, gcdfile=cfg['gcd'], infiles=infile, output_i3=outfile, output_hd5=None, output_root=None, photonicsdir=photonics_dir, photonicsdriverdir=photonics_driver_dir, photonicsdriverfile=DRIVER_FILE, infmuonampsplinepath=os.path.join(SPLINE_TABLES, 'InfBareMu_mie_abs_z20a10.fits'), infmuonprobsplinepath=os.path.join(SPLINE_TABLES, 'InfBareMu_mie_prob_z20a10.fits'), cascadeampsplinepath=os.path.join(SPLINE_TABLES, 'ems_mie_z20_a10.abs.fits'), cascadeprobsplinepath=os.path.join(SPLINE_TABLES, 'ems_mie_z20_a10.prob.fits')) tray.AddModule("TrashCan", "Bye") tray.Execute() tray.Finish()
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: cfg = yaml.full_load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') if cfg.get('distance_splits', False): raise NotImplementedError('Distance splits are not supported!') else: run_snowstorm_propagation(cfg, infile, outfile)
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: cfg = yaml.load(stream, Loader=yaml.Loader) icetray.logging.set_level("WARN") cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') photonics_dir = os.path.join(cfg['photon_tables_dir'], 'SPICEMie') photonics_driver_dir = os.path.join(photonics_dir, 'driverfiles') tray = I3Tray() """The main L3 script""" tray.AddSegment( MuonL3, gcdfile=cfg['gcd'], infiles=infile, output_i3=outfile, output_hd5="", output_root="", photonicsdir=photonics_dir, photonicsdriverdir=photonics_driver_dir, photonicsdriverfile='mu_photorec.list', infmuonampsplinepath=os.path.join(cfg['spline_table_dir'], cfg['mu_amplitude_spline_table']), infmuonprobsplinepath=os.path.join(cfg['spline_table_dir'], cfg['mu_timing_spline_table']), cascadeampsplinepath=os.path.join(cfg['spline_table_dir'], cfg['cascade_amplitude_spline_table']), cascadeprobsplinepath=os.path.join(cfg['spline_table_dir'], cfg['cascade_timing_spline_table']), restore_timewindow_forMC=True) tray.Execute() del tray
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: cfg = yaml.load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') tray = I3Tray() """The main L1 script""" tray.AddModule('I3Reader', 'i3 reader', FilenameList=[cfg['gcd_pass2'], infile]) tray.AddSegment(OfflineFilter, "OfflineFilter", dstfile=None, mc=True, doNotQify=True, photonicsdir=PHOTONICS_DIR) if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') tray.AddModule("I3Writer", "EventWriter", filename=outfile, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.TrayInfo, icetray.I3Frame.Simulation ], DropOrphanStreams=[icetray.I3Frame.DAQ]) tray.AddModule("TrashCan", "the can") tray.Execute() tray.Finish() del tray
def main(cfg, run_number, scratch, do_merging_if_necessary): with open(cfg, 'r') as stream: if int(yaml.__version__[0]) < 5: # backwards compatibility for yaml versions before version 5 cfg = yaml.load(stream) else: cfg = yaml.full_load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') infile = infile.replace('Level0.{}'.format(cfg['previous_step']), 'Level0.{}'.format(cfg['previous_step'] % 10)) infile = infile.replace('2012_pass2', 'pass2') if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace('Level0.{}'.format(cfg['step']), 'Level0.{}'.format(cfg['step'] % 10)) outfile = outfile.replace(' ', '0') outfile = outfile.replace('2012_pass2', 'pass2') print('Outfile != $FINAL_OUT clean up for crashed scripts not possible!') tray = I3Tray() tray.AddModule('I3Reader', 'i3 reader', FilenameList=[cfg['gcd_pass2'], infile]) # get pulses tray.AddSegment( GetPulses, "GetPulses", decode=False, simulation=True, ) # Throw out unneeded streams and keys if 'oversampling_keep_keys' not in cfg: cfg['oversampling_keep_keys'] = [] elif cfg['oversampling_keep_keys'] is None: cfg['oversampling_keep_keys'] = [] if cfg['L1_keep_untriggered']: stream_name = filter_globals.InIceSplitter else: stream_name = filter_globals.NullSplitter tray.AddModule("KeepFromSubstream", "DeleteSubstream", StreamName=stream_name, KeepKeys=['do_not_keep_anything']) # merge oversampled events: calculate average hits if cfg['oversampling_factor'] is not None and do_merging_if_necessary: if 'oversampling_merge_events' in cfg: merge_events = cfg['oversampling_merge_events'] else: # backward compability merge_events = True if merge_events: tray.AddModule(MergeOversampledEvents, 'MergeOversampledEvents', OversamplingFactor=cfg['oversampling_factor']) keys_to_keep = [ 'TimeShift', 'I3MCTree_preMuonProp', 'I3MCTree', 'I3MCWeightDict', 'CorsikaWeightMap', 'MMCTrackList', 'I3EventHeader', 'I3SuperDST', 'RNGState', 'oversampling', 'AggregatedPulses', 'InIceDSTPulses', 'InIceDSTPulsesTimeRange', 'CalibrationErrata', 'SaturationWindows', 'SplitUncleanedInIcePulses', 'SplitUncleanedInIcePulsesTimeRange', 'SplitUncleanedInIceDSTPulsesTimeRange', 'I3TriggerHierarchy', 'GCFilter_GCFilterMJD', ] keys_to_keep += filter_globals.inice_split_keeps + \ filter_globals.onlinel2filter_keeps if 'event_import_settings' in cfg: import_keys = [ cfg['event_import_settings']['rename_dict'].get(k, k) for k in cfg['event_import_settings']['keys_to_import'] ] keys_to_keep += import_keys tray.AddModule("Keep", "keep_before_merge", keys=keys_to_keep + cfg['oversampling_keep_keys']) tray.AddModule("I3Writer", "EventWriter", filename=outfile, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.TrayInfo, icetray.I3Frame.Simulation, icetray.I3Frame.Stream('M') ]) tray.AddModule("TrashCan", "the can") tray.Execute() tray.Finish()
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: cfg = yaml.full_load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') click.echo('Run: {}'.format(run_number)) click.echo('Outfile: {}'.format(outfile)) click.echo('Azimuth: [{},{}]'.format(*cfg['azimuth_range'])) click.echo('Zenith: [{},{}]'.format(*cfg['zenith_range'])) click.echo('Energy: [{},{}]'.format(*cfg['primary_energy_range'])) click.echo('Vertex x: [{},{}]'.format(*cfg['x_range'])) click.echo('Vertex y: [{},{}]'.format(*cfg['y_range'])) click.echo('Vertex z: [{},{}]'.format(*cfg['z_range'])) # crate random services if 'random_service_use_gslrng' not in cfg: cfg['random_service_use_gslrng'] = False random_services, _ = create_random_services( dataset_number=cfg['dataset_number'], run_number=cfg['run_number'], seed=cfg['seed'], n_services=2, use_gslrng=cfg['random_service_use_gslrng']) # -------------------------------------- # Build IceTray # -------------------------------------- tray = I3Tray() tray.AddModule( 'I3InfiniteSource', 'source', # Prefix=gcdfile, Stream=icetray.I3Frame.DAQ) if 'max_vertex_distance' not in cfg: cfg['max_vertex_distance'] = None if 'constant_vars' not in cfg: cfg['constant_vars'] = None if 'sample_uniformly_on_sphere' not in cfg: cfg['sample_uniformly_on_sphere'] = False if 'oversample_after_proposal' in cfg and \ cfg['oversample_after_proposal']: oversampling_factor_injection = None oversampling_factor_photon = cfg['oversampling_factor'] else: oversampling_factor_injection = cfg['oversampling_factor'] oversampling_factor_photon = None tray.AddModule( CascadeFactory, 'make_cascades', azimuth_range=cfg['azimuth_range'], zenith_range=cfg['zenith_range'], sample_uniformly_on_sphere=cfg['sample_uniformly_on_sphere'], primary_energy_range=cfg['primary_energy_range'], fractional_energy_in_hadrons_range=cfg[ 'fractional_energy_in_hadrons_range'], time_range=cfg['time_range'], x_range=cfg['x_range'], y_range=cfg['y_range'], z_range=cfg['z_range'], max_vertex_distance=cfg['max_vertex_distance'], flavors=cfg['flavors'], interaction_types=cfg['interaction_types'], num_events=cfg['n_events_per_run'], oversampling_factor=oversampling_factor_injection, random_state=cfg['seed'], random_service=random_services[0], constant_vars=cfg['constant_vars'], ) # propagate muons if config exists in config # Note: Snowstorm may perform muon propagation internally if 'muon_propagation_config' in cfg: tray.AddSegment(segments.PropagateMuons, 'propagate_muons', RandomService=random_services[1], **cfg['muon_propagation_config']) else: # In this case we are not propagating the I3MCTree yet, but # are letting this be done by snowstorm propagation # We need to add a key named 'I3MCTree', since snowstorm expects this # It will propagate the particles for us. tray.AddModule(DummyMCTreeRenaming, 'DummyMCTreeRenaming') tray.AddModule(DAQFrameMultiplier, 'DAQFrameMultiplier', oversampling_factor=oversampling_factor_photon) # -------------------------------------- # Distance Splits # -------------------------------------- if cfg['distance_splits'] is not None: click.echo('SplittingDistance: {}'.format(cfg['distance_splits'])) distance_splits = np.atleast_1d(cfg['distance_splits']) dom_limits = np.atleast_1d(cfg['threshold_doms']) if len(dom_limits) == 1: dom_limits = np.ones_like(distance_splits) * cfg['threshold_doms'] oversize_factors = np.atleast_1d(cfg['oversize_factors']) order = np.argsort(distance_splits) distance_splits = distance_splits[order] dom_limits = dom_limits[order] oversize_factors = oversize_factors[order] stream_objects = generate_stream_object(distance_splits, dom_limits, oversize_factors) tray.AddModule(OversizeSplitterNSplits, "OversizeSplitterNSplits", thresholds=distance_splits, thresholds_doms=dom_limits, oversize_factors=oversize_factors) for stream_i in stream_objects: outfile_i = stream_i.transform_filepath(outfile) tray.AddModule("I3Writer", "writer_{}".format(stream_i.stream_name), Filename=outfile_i, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.Stream('S'), icetray.I3Frame.Stream('M') ], If=stream_i) click.echo('Output ({}): {}'.format(stream_i.stream_name, outfile_i)) else: click.echo('Output: {}'.format(outfile)) tray.AddModule("I3Writer", "writer", Filename=outfile, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.Stream('S'), icetray.I3Frame.Stream('M') ]) # -------------------------------------- click.echo('Scratch: {}'.format(scratch)) tray.AddModule("TrashCan", "the can") tray.Execute() tray.Finish()
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: if int(yaml.__version__[0]) < 5: # backwards compatibility for yaml versions before version 5 cfg = yaml.load(stream) else: cfg = yaml.full_load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') infile = infile.replace('Level0.{}'.format(cfg['previous_step']), 'Level0.{}'.format(cfg['previous_step'] % 10)) infile = infile.replace('2012_pass2', 'pass2') if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace('Level0.{}'.format(cfg['step']), 'Level0.{}'.format(cfg['step'] % 10)) outfile = outfile.replace(' ', '0') outfile = outfile.replace('2012_pass2', 'pass2') print('Outfile != $FINAL_OUT clean up for crashed scripts not possible!') # collect keys which are to be kept in a addition to the official keep keys # of the standard L1 and L2 processing if 'additional_keep_keys' in cfg: additional_keep_keys = cfg['additional_keep_keys'] + muongun_keys else: additional_keep_keys = muongun_keys additional_keep_keys += [ 'BiasedMuonWeighter', 'BiasedMuonCorridorWeighter', 'BiasedMESCHotspotWeighter', 'BiasedSimulationWeight', 'PROPOSALStorm', 'PROPOSALStormUniformRanges', 'MCVetoMuonInjectionInfo', 'MMCTrackListVetoMuon', 'CombinedMuonVetoI3MCTree', 'I3MCTreeVetoMuon', 'I3MCTreeVetoMuon_preMuonProp', 'I3MCTreeVetoMuon_preMuonProp_RNGState', ] tray = I3Tray() """The main L1 script""" tray.AddModule('I3Reader', 'i3 reader', FilenameList=[cfg['gcd_pass2'], infile]) # run online filters online_kwargs = {} if SPLINE_TABLES: online_kwargs.update({ 'SplineRecoAmplitudeTable': os.path.join( SPLINE_TABLES, 'InfBareMu_mie_abs_z20a10.fits'), 'SplineRecoTimingTable': os.path.join( SPLINE_TABLES, 'InfBareMu_mie_prob_z20a10.fits'), # 'alert_followup_base_GCD_filename': cfg['gcd_pass2'], }) if cfg['L1_pass2_run_gfu'] is not None: online_kwargs['gfu_enabled'] = cfg['L1_pass2_run_gfu'] if 'L1_needs_wavedeform_spe_corr' not in cfg: cfg['L1_needs_wavedeform_spe_corr'] = False tray.AddSegment(OnlineFilter, "OnlineFilter", decode=False, simulation=True, vemcal_enabled=False, alert_followup=False, needs_wavedeform_spe_corr=cfg[ 'L1_needs_wavedeform_spe_corr'], **online_kwargs ) # make random service _, seed = create_random_services( dataset_number=cfg['dataset_number'], run_number=cfg['run_number'], seed=cfg['seed'], n_services=0) filter_mask_randoms = phys_services.I3GSLRandomService(seed) # override MinBias Prescale filterconfigs = filter_globals.filter_pairs + filter_globals.sdst_pairs print(cfg['L1_min_bias_prescale']) if cfg['L1_min_bias_prescale']: for i, filtertuple in enumerate(filterconfigs): if filtertuple[0] == filter_globals.FilterMinBias: del filterconfigs[i] filterconfigs.append((filtertuple[0], cfg['L1_min_bias_prescale'])) break print(filterconfigs) # Generate filter Masks for all P frames tray.AddModule(filter_tools.FilterMaskMaker, "MakeFilterMasks", OutputMaskName=filter_globals.filter_mask, FilterConfigs=filterconfigs, RandomService=filter_mask_randoms) # Merge the FilterMasks tray.AddModule("OrPframeFilterMasks", "make_q_filtermask", InputName=filter_globals.filter_mask, OutputName=filter_globals.qfilter_mask) # Q+P frame specific keep module needs to go first, as KeepFromSubstram # will rename things, let's rename post keep. def is_Q(frame): return frame.Stop == frame.DAQ simulation_keeps = [ 'BackgroundI3MCTree', 'BackgroundI3MCTreePEcounts', 'BackgroundI3MCPESeriesMap', 'BackgroundI3MCTree_preMuonProp', 'BackgroundI3MCTree_preMuonProp_RNGState', 'BackgroundMMCTrackList', 'BeaconLaunches', 'CorsikaInteractionHeight', 'CorsikaWeightMap', 'EventProperties', 'GenerationSpec', 'I3LinearizedMCTree', 'I3MCTree', 'I3MCTreePEcounts', 'I3MCTree_preMuonProp', 'I3MCTree_preMuonProp_RNGState', 'I3MCPESeriesMap', 'I3MCPESeriesMapWithoutNoise', 'I3MCPESeriesMapParticleIDMap', 'I3MCPulseSeriesMap', 'I3MCPulseSeriesMapParticleIDMap', 'I3MCPulseSeriesMapPrimaryIDMap', 'I3MCWeightDict', 'LeptonInjectorProperties', 'MCHitSeriesMap', 'MCPrimary', 'MCPrimaryInfo', 'MMCTrackList', 'PolyplopiaInfo', 'PolyplopiaPrimary', 'RNGState', 'SignalI3MCPEs', 'SimTrimmer', # for SimTrimmer flag 'TimeShift', # the time shift amount 'WIMP_params', # Wimp-sim 'noise_weight', # weights for noise-only vuvuzela simulations 'I3GENIEResultDict' # weight informaition for GENIE simulations ] + additional_keep_keys keep_before_merge = filter_globals.q_frame_keeps + [ 'InIceDSTPulses', # keep DST pulse masks 'IceTopDSTPulses', 'CalibratedWaveformRange', # keep calibration info 'UncleanedInIcePulsesTimeRange', 'SplitUncleanedInIcePulses', 'SplitUncleanedInIcePulsesTimeRange', 'SplitUncleanedInIceDSTPulsesTimeRange', 'CalibrationErrata', 'SaturationWindows', 'InIceRawData', # keep raw data for now 'IceTopRawData', ] + simulation_keeps tray.AddModule("Keep", "keep_before_merge", keys=keep_before_merge, If=is_Q) # second set of prekeeps, conditional on filter content, based on newly # created Qfiltermask # Determine if we should apply harsh keep for events that failed to pass # any filter # Note: excluding the sdst_streams entries tray.AddModule("I3IcePickModule<FilterMaskFilter>", "filterMaskCheckAll", FilterNameList=filter_globals.filter_streams, FilterResultName=filter_globals.qfilter_mask, DecisionName="PassedAnyFilter", DiscardEvents=False, Streams=[icetray.I3Frame.DAQ]) def do_save_just_superdst(frame): if frame.Has("PassedAnyFilter"): if not frame["PassedAnyFilter"].value: return True # <- Event failed to pass any filter. else: return False # <- Event passed some filter else: icetray.logging.log_error("Failed to find key frame Bool!!") return False keep_only_superdsts = filter_globals.keep_nofilterpass + [ 'PassedAnyFilter', 'InIceDSTPulses', 'IceTopDSTPulses', 'SplitUncleanedInIcePulses', 'SplitUncleanedInIcePulsesTimeRange', 'SplitUncleanedInIceDSTPulsesTimeRange', 'RNGState'] + simulation_keeps tray.AddModule("Keep", "KeepOnlySuperDSTs", keys=keep_only_superdsts, If=do_save_just_superdst) # Now clean up the events that not even the SuperDST filters passed on tray.AddModule("I3IcePickModule<FilterMaskFilter>", "filterMaskCheckSDST", FilterNameList=filter_globals.sdst_streams, FilterResultName=filter_globals.qfilter_mask, DecisionName="PassedKeepSuperDSTOnly", DiscardEvents=False, Streams=[icetray.I3Frame.DAQ]) def dont_save_superdst(frame): if frame.Has("PassedKeepSuperDSTOnly") and \ frame.Has("PassedAnyFilter"): if frame["PassedAnyFilter"].value: return False # <- these passed a regular filter, keeper elif not frame["PassedKeepSuperDSTOnly"].value: return True # <- Event failed to pass SDST filter. else: return False # <- Event passed some SDST filter else: icetray.logging.log_error("Failed to find key frame Bool!!") return False # backward compatibility if 'L1_keep_untriggered' in cfg and cfg['L1_keep_untriggered']: discard_substream_and_keys = False else: discard_substream_and_keys = True if discard_substream_and_keys: tray.AddModule( "Keep", "KeepOnlyDSTs", keys=filter_globals.keep_dst_only + [ "PassedAnyFilter", "PassedKeepSuperDSTOnly", filter_globals.eventheader] + additional_keep_keys, If=dont_save_superdst) # Frames should now contain only what is needed. now flatten, # write/send to server # Squish P frames back to single Q frame, one for each split: tray.AddModule("KeepFromSubstream", "null_stream", StreamName=filter_globals.NullSplitter, KeepKeys=filter_globals.null_split_keeps) in_ice_keeps = filter_globals.inice_split_keeps + \ filter_globals.onlinel2filter_keeps in_ice_keeps = in_ice_keeps + [ 'I3EventHeader', 'SplitUncleanedInIcePulses', 'SplitUncleanedInIcePulsesTimeRange', 'TriggerSplitterLaunchWindow', 'I3TriggerHierarchy', 'GCFilter_GCFilterMJD'] + additional_keep_keys tray.AddModule("Keep", "inice_keeps", keys=in_ice_keeps, If=which_split(split_name=filter_globals.InIceSplitter),) tray.AddModule("KeepFromSubstream", "icetop_split_stream", StreamName=filter_globals.IceTopSplitter, KeepKeys=filter_globals.icetop_split_keeps) # Apply small keep list (SuperDST/SmallTrig/DST/FilterMask for non-filter # passers # Remove I3DAQData object for events not passing one of the # 'filters_keeping_allraw' tray.AddModule("I3IcePickModule<FilterMaskFilter>", "filterMaskCheck", FilterNameList=filter_globals.filters_keeping_allraw, FilterResultName=filter_globals.qfilter_mask, DecisionName="PassedConventional", DiscardEvents=False, Streams=[icetray.I3Frame.DAQ]) # Clean out the Raw Data when not passing conventional filter def I3RawDataCleaner(frame): if not (('PassedConventional' in frame and frame['PassedConventional'].value == True) or ('SimTrimmer' in frame and frame['SimTrimmer'].value == True)): frame.Delete('InIceRawData') frame.Delete('IceTopRawData') tray.AddModule(I3RawDataCleaner, "CleanErrataForConventional", Streams=[icetray.I3Frame.DAQ]) tray.AddModule("I3Writer", "EventWriter", filename=outfile, Streams=[icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.TrayInfo, icetray.I3Frame.Simulation, icetray.I3Frame.Stream('m'), icetray.I3Frame.Stream('M')]) tray.AddModule("TrashCan", "the can") tray.Execute() tray.Finish()
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: cfg = yaml.load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) click.echo('Keep all OnlineL2: {}'.format(cfg['OnlineL2_keep_all_L2'])) click.echo('Keep time residuals: {}'.format( cfg['OnlineL2_keep_time_residuals'])) infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') infile = infile.replace('Level0.{}'.format(cfg['previous_step']), 'Level2') infile = infile.replace('Level0.{}'.format(cfg['previous_step']), 'Level0.{}'.format(cfg['previous_step'] % 10)) infile = infile.replace('2012_pass2', '2012') if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace('Level0.{}'.format(cfg['step']), '2017OnlineL2') outfile = outfile.replace(' ', '0') outfile = outfile.replace('2012_pass2', '2012') print('Outfile != $FINAL_OUT clean up for crashed scripts not possible!') # build tray tray = I3Tray() tray.context['I3FileStager'] = dataio.get_stagers() tray.Add('I3Reader', FilenameList=[cfg['gcd_pass2'], infile], SkipKeys=['I3MCTree'] if 'corsika' in infile.lower() else []) # drop exisiting P-Frames (will do our own splitting later) tray.Add(lambda f: False, Streams=[icetray.I3Frame.Physics]) ############################################################################ # the following modules repeat what is done in the base processing at Pole # ############################################################################ # resplit Q frame icetray.load('trigger-splitter', False) tray.AddModule('I3TriggerSplitter', filter_globals.InIceSplitter, TrigHierName='DSTTriggers', TriggerConfigIDs=[ filter_globals.deepcoreconfigid, filter_globals.inicesmtconfigid, filter_globals.inicestringconfigid, filter_globals.volumetriggerconfigid ], SubEventStreamName=filter_globals.InIceSplitter, InputResponses=['InIceDSTPulses'], OutputResponses=[filter_globals.SplitUncleanedInIcePulses], WriteTimeWindow=True) # evaluate TriggerHierarchy tray.AddModule( "TriggerCheck_13", "BaseProc_Trigchecker", I3TriggerHierarchy=filter_globals.triggerhierarchy, InIceSMTFlag=filter_globals.inicesmttriggered, IceTopSMTFlag=filter_globals.icetopsmttriggered, InIceStringFlag=filter_globals.inicestringtriggered, DeepCoreSMTFlag=filter_globals.deepcoresmttriggered, DeepCoreSMTConfigID=filter_globals.deepcoreconfigid, VolumeTriggerFlag=filter_globals.volumetrigtriggered, SlowParticleFlag=filter_globals.slowparticletriggered, FixedRateTriggerFlag=filter_globals.fixedratetriggered, ) # run SRT and TW Cleaning from the Base Processing from icecube.STTools.seededRT.configuration_services import I3DOMLinkSeededRTConfigurationService seededRTConfig = I3DOMLinkSeededRTConfigurationService( ic_ic_RTRadius=150.0 * I3Units.m, ic_ic_RTTime=1000.0 * I3Units.ns, treat_string_36_as_deepcore=False, useDustlayerCorrection=False, allowSelfCoincidence=True) tray.AddModule( 'I3SeededRTCleaning_RecoPulseMask_Module', 'BaseProc_seededrt', InputHitSeriesMapName=filter_globals.SplitUncleanedInIcePulses, OutputHitSeriesMapName=filter_globals.SplitRTCleanedInIcePulses, STConfigService=seededRTConfig, SeedProcedure='HLCCoreHits', NHitsThreshold=2, MaxNIterations=3, Streams=[icetray.I3Frame.Physics], If=which_split(split_name=filter_globals.InIceSplitter)) tray.AddModule("I3TimeWindowCleaning<I3RecoPulse>", "TimeWindowCleaning", InputResponse=filter_globals.SplitRTCleanedInIcePulses, OutputResponse=filter_globals.CleanedMuonPulses, TimeWindow=6000 * I3Units.ns, If=which_split(split_name=filter_globals.InIceSplitter)) tray.AddSegment(linefit.simple, "BaseProc_imprv_LF", inputResponse=filter_globals.CleanedMuonPulses, fitName=filter_globals.muon_linefit, If=which_split(split_name=filter_globals.InIceSplitter)) # Muon LLH SimpleFitter from GulliverSuite with LineFit seed. tray.AddSegment(lilliput.segments.I3SinglePandelFitter, filter_globals.muon_llhfit, seeds=[filter_globals.muon_linefit], pulses=filter_globals.CleanedMuonPulses, If=which_split(split_name=filter_globals.InIceSplitter)) # run MuonFilter tray.Add(MuonFilter, 'MuonFilter', pulses=filter_globals.CleanedMuonPulses, If=which_split(split_name=filter_globals.InIceSplitter)) tray.AddModule( "I3FirstPulsifier", "BaseProc_first-pulsify", InputPulseSeriesMapName=filter_globals.CleanedMuonPulses, OutputPulseSeriesMapName='FirstPulseMuonPulses', KeepOnlyFirstCharge=False, # default UseMask=False, # default If=which_split(split_name=filter_globals.InIceSplitter)) # discard events not passing the MuonFilter tray.Add(lambda f: f.Has(filter_globals.MuonFilter) and f[ filter_globals.MuonFilter].value) # run OnlineL2 filter tray.Add(TimerStart, timerName='OnlineL2', If=which_split(split_name=filter_globals.InIceSplitter)) tray.AddSegment(OnlineL2Filter, "OnlineL2", If=which_split(split_name=filter_globals.InIceSplitter)) tray.Add(TimerStop, timerName='OnlineL2') # discard events not passing the OnlineL2 filter tray.Add(lambda f: f.Has(filter_globals.OnlineL2Filter) and f[ filter_globals.OnlineL2Filter].value) # run GFU filter tray.Add(TimerStart, timerName='GFU') tray.AddSegment(GammaFollowUp, "GFU", OnlineL2SegmentName="OnlineL2", KeepDetails=cfg['OnlineL2_keep_time_residuals'], angular_error=True) tray.Add(TimerStop, timerName='GFU') # discard events not passing the GFU filter if not cfg['OnlineL2_keep_all_L2']: tray.Add(lambda f: f.Has(filter_globals.GFUFilter) and f[ filter_globals.GFUFilter].value) # in this case, also run splineMPE with maximum settings for comparison TEestis = [ 'OnlineL2_SplineMPE_TruncatedEnergy_AllDOMS_Muon', 'OnlineL2_SplineMPE_TruncatedEnergy_DOMS_Muon', 'OnlineL2_SplineMPE_TruncatedEnergy_AllBINS_Muon', 'OnlineL2_SplineMPE_TruncatedEnergy_BINS_Muon', 'OnlineL2_SplineMPE_TruncatedEnergy_ORIG_Muon' ] tray.Add(CustomSplineMPE, 'SplineMPEmax', configuration='max', pulses='OnlineL2_CleanedMuonPulses', trackSeeds=['OnlineL2_SplineMPE'], enEstis=TEestis, paraboloid=True) # For MC weighting, keep the neutrino primary. if 'corsika' not in infile.lower(): # Some CORSIKA files have I3MCTree objects much larger than 100 MB. # Loading them takes too long... instead use CorsikaWeightMap.PrimaryEnergy / PrimaryType for weighting. tray.AddModule(get_weighted_primary, 'get_weighted_primary', MCPrimary='I3MCPrimary') # For MC studies, store information about the muon from CC interaction if 'neutrino-generator' in infile.lower(): # store muon intersection points tray.Add(AddMuon) tray.Add(AddMuonIntersection) # store deposited energy in detector tray.Add(AddDepositedEnergy) tray.AddModule("I3Writer", "EventWriter", filename=outfile, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.TrayInfo, icetray.I3Frame.Simulation ], DropOrphanStreams=[icetray.I3Frame.DAQ]) tray.AddModule("TrashCan", "the can") tray.Execute() del tray
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: cfg = yaml.full_load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') click.echo('Run: {}'.format(run_number)) click.echo('Outfile: {}'.format(outfile)) for setting_key in ( 'GenerateCosmicRayMuonsSettings', 'MuonGeometryFilterSettings', 'MuonLossProfileFilterSettings', ): if cfg[setting_key]: click.echo('{}:'.format(setting_key)) for setting, value in cfg[setting_key].items(): click.echo('\t{}: {}'.format(setting, value)) # crate random services if 'random_service_use_gslrng' not in cfg: cfg['random_service_use_gslrng'] = False random_services, _ = create_random_services( dataset_number=cfg['dataset_number'], run_number=cfg['run_number'], seed=cfg['seed'], n_services=2, use_gslrng=cfg['random_service_use_gslrng']) # -------------------------------------- # Build IceTray # -------------------------------------- tray = I3Tray() # add random generator to tray context tray.context['I3RandomService'] = random_services[0] tray.AddModule('I3InfiniteSource', 'source', Prefix=cfg['gcd'], Stream=icetray.I3Frame.DAQ) if 'oversampling_factor' not in cfg: cfg['oversampling_factor'] = None if 'oversample_after_proposal' in cfg and \ cfg['oversample_after_proposal']: oversampling_factor_injection = None oversampling_factor_photon = cfg['oversampling_factor'] else: oversampling_factor_injection = cfg['oversampling_factor'] oversampling_factor_photon = None tray.AddSegment(segments.GenerateCosmicRayMuons, 'GenerateCosmicRayMuons', num_events=cfg['n_events_per_run'], **cfg['GenerateCosmicRayMuonsSettings']) # add filter to bias simulation based on track geometry tray.AddModule(MuonGeometryFilter, 'MuonGeometryFilter', **cfg['MuonGeometryFilterSettings']) # add corridor MuonGun bias module: # Events can be biased while keeping ability to properly weight events. # This is not the case for the filter module above. bias_mesc_hotspot_muons(tray, cfg) bias_corridor_muons(tray, cfg) tray.AddModule(DAQFrameMultiplier, 'PreDAQFrameMultiplier', oversampling_factor=oversampling_factor_injection, mctree_keys=['I3MCTree_preMuonProp']) # propagate muons if config exists in config # Note: Snowstorm may perform muon propagation internally if 'muon_propagation_config' in cfg: tray.AddSegment(segments.PropagateMuons, 'propagate_muons', RandomService=random_services[1], **cfg['muon_propagation_config']) else: # In this case we are not propagating the I3MCTree yet, but # are letting this be done by snowstorm propagation # We need to add a key named 'I3MCTree', since snowstorm expects this # It will propagate the particles for us. tray.AddModule('DummyMCTreeRenaming', 'DummyMCTreeRenaming') # add filter to bias simulation based on muon loss profile tray.AddModule(MuonLossProfileFilter, 'MuonLossProfileFilter', **cfg['MuonLossProfileFilterSettings']) # add MuonGun bias module: # Events can be biased while keeping ability to properly weight events. # This is not the case for the filter modules above. bias_muongun_events(tray, cfg) tray.AddModule(DAQFrameMultiplier, 'PostDAQFrameMultiplier', oversampling_factor=oversampling_factor_photon, mctree_keys=['I3MCTree']) # -------------------------------------- # Distance Splits # -------------------------------------- if cfg['distance_splits'] is not None: click.echo('SplittingDistance: {}'.format(cfg['distance_splits'])) distance_splits = np.atleast_1d(cfg['distance_splits']) dom_limits = np.atleast_1d(cfg['threshold_doms']) if len(dom_limits) == 1: dom_limits = np.ones_like(distance_splits) * cfg['threshold_doms'] oversize_factors = np.atleast_1d(cfg['oversize_factors']) order = np.argsort(distance_splits) distance_splits = distance_splits[order] dom_limits = dom_limits[order] oversize_factors = oversize_factors[order] stream_objects = generate_stream_object(distance_splits, dom_limits, oversize_factors) tray.AddModule(OversizeSplitterNSplits, "OversizeSplitterNSplits", thresholds=distance_splits, thresholds_doms=dom_limits, oversize_factors=oversize_factors) for stream_i in stream_objects: outfile_i = stream_i.transform_filepath(outfile) tray.AddModule("I3Writer", "writer_{}".format(stream_i.stream_name), Filename=outfile_i, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.Stream('S'), icetray.I3Frame.Stream('M') ], If=stream_i) click.echo('Output ({}): {}'.format(stream_i.stream_name, outfile_i)) else: click.echo('Output: {}'.format(outfile)) tray.AddModule("I3Writer", "writer", Filename=outfile, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.Stream('S'), icetray.I3Frame.Stream('M') ]) # -------------------------------------- click.echo('Scratch: {}'.format(scratch)) tray.AddModule("TrashCan", "the can") tray.Execute() tray.Finish()
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: cfg = yaml.full_load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') # ------------------------------ # get list of files for this run # ------------------------------ import_cfg = cfg['event_import_settings'] glob_files = import_cfg['input_file_glob_list'] if isinstance(glob_files, str): # single string provided files = glob.glob(glob_files.format(run_number=run_number)) else: # list of file globs provided files = [] for file_pattern in glob_files: files.extend(glob.glob(file_pattern.format(run_number=run_number))) # sort files files = sorted(files) # ------------------------------ click.echo('Run: {}'.format(run_number)) click.echo('Outfile: {}'.format(outfile)) click.echo('Keys to import: {}'.format(import_cfg['keys_to_import'])) click.echo('input Files:') for file in files: click.echo('\t{}'.format(file)) # crate random services if 'random_service_use_gslrng' not in cfg: cfg['random_service_use_gslrng'] = False random_services, _ = create_random_services( dataset_number=cfg['dataset_number'], run_number=cfg['run_number'], seed=cfg['seed'], n_services=2, use_gslrng=cfg['random_service_use_gslrng']) # -------------------------------------- # Build IceTray # -------------------------------------- tray = I3Tray() # import events from another I3-file if 'num_events' not in import_cfg: import_cfg['num_events'] = None tray.AddModule( ImportEvents, 'ImportEvents', files=files, num_events=import_cfg['num_events'], keys_to_import=import_cfg['keys_to_import'], rename_dict=import_cfg['rename_dict'], mctree_name=import_cfg['mctree_name'], ) # propagate muons if config exists in config # Note: Snowstorm may perform muon propagation internally if 'muon_propagation_config' in cfg: tray.AddSegment(segments.PropagateMuons, 'propagate_muons', RandomService=random_services[1], **cfg['muon_propagation_config']) tray.AddModule(DAQFrameMultiplier, 'DAQFrameMultiplier', oversampling_factor=cfg['oversampling_factor'], mctree_keys=[import_cfg['mctree_name']]) # -------------------------------------- # Distance Splits # -------------------------------------- if cfg['distance_splits'] is not None: click.echo('SplittingDistance: {}'.format(cfg['distance_splits'])) distance_splits = np.atleast_1d(cfg['distance_splits']) dom_limits = np.atleast_1d(cfg['threshold_doms']) if len(dom_limits) == 1: dom_limits = np.ones_like(distance_splits) * cfg['threshold_doms'] oversize_factors = np.atleast_1d(cfg['oversize_factors']) order = np.argsort(distance_splits) distance_splits = distance_splits[order] dom_limits = dom_limits[order] oversize_factors = oversize_factors[order] stream_objects = generate_stream_object(distance_splits, dom_limits, oversize_factors) tray.AddModule(OversizeSplitterNSplits, "OversizeSplitterNSplits", thresholds=distance_splits, thresholds_doms=dom_limits, oversize_factors=oversize_factors) for stream_i in stream_objects: outfile_i = stream_i.transform_filepath(outfile) tray.AddModule("I3Writer", "writer_{}".format(stream_i.stream_name), Filename=outfile_i, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.Stream('S'), icetray.I3Frame.Stream('M') ], If=stream_i) click.echo('Output ({}): {}'.format(stream_i.stream_name, outfile_i)) else: click.echo('Output: {}'.format(outfile)) tray.AddModule("I3Writer", "writer", Filename=outfile, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.Stream('S'), icetray.I3Frame.Stream('M') ]) # -------------------------------------- click.echo('Scratch: {}'.format(scratch)) tray.AddModule("TrashCan", "the can") tray.Execute() tray.Finish()
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: cfg = yaml.full_load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') click.echo('Run: {}'.format(run_number)) click.echo('Outfile: {}'.format(outfile)) # ----------------------------- # Set PPC environment variables # ----------------------------- ppc_config = cfg['ppc_config'] # define default Environment variables ice_base = '$I3_BUILD/ice-models/resources/models/' default_ppc_environment_variables = { 'PPCTABLESDIR': ice_base + 'spice_bfr-dv1_complete', 'OGPU': '1', # makes sure only GPUs are used (with OpenCL version) } ppc_environment_variables = dict(default_ppc_environment_variables) ppc_environment_variables.update(ppc_config['environment_variables']) # define default PPC arguments default_ppc_arguments = { 'MCTree': 'I3MCTree', } if 'CUDA_VISIBLE_DEVICES' in os.environ: default_ppc_arguments['gpu'] = int(os.environ['CUDA_VISIBLE_DEVICES']) ppc_arguments = dict(default_ppc_arguments) ppc_arguments.update(ppc_config['arguments']) click.echo('PPC Settings:') for key, value in ppc_environment_variables.items(): click.echo('\t{}: {}'.format(key, os.path.expandvars(value))) os.putenv(key, os.path.expandvars(value)) click.echo('PPC Arguments:') for key, value in ppc_arguments.items(): click.echo('\t{}: {}'.format(key, value)) # importing ppc must be done *after* setting the environment variables from icecube import ppc # ------------------------------ # get random service random_services, _ = create_random_services( dataset_number=cfg['dataset_number'], run_number=cfg['run_number'], seed=cfg['seed'], n_services=1, use_gslrng=cfg['random_service_use_gslrng']) random_service = random_services[0] # -------------------------------------- # Build IceTray # -------------------------------------- tray = I3Tray() tray.AddModule('I3Reader', 'i3 reader', FilenameList=[cfg['gcd_pass2'], infile]) # run PPC tray.context["I3RandomService"] = random_service tray.AddModule("i3ppc", 'ppc', **ppc_arguments) # rename MCPESeriesMap to I3MCPESeriesMap tray.Add("Rename", keys=["MCPESeriesMap", "I3MCPESeriesMap"]) click.echo('Output: {}'.format(outfile)) tray.AddModule("I3Writer", "writer", Filename=outfile, Streams=[ icetray.I3Frame.TrayInfo, icetray.I3Frame.Simulation, icetray.I3Frame.Stream('M'), icetray.I3Frame.Stream('S'), icetray.I3Frame.DAQ, icetray.I3Frame.Physics ]) # -------------------------------------- click.echo('Scratch: {}'.format(scratch)) tray.AddModule("TrashCan", "the can") tray.Execute() tray.Finish()
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: cfg = yaml.load(stream, Loader=yaml.Loader) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') tray = I3Tray() """The main script""" tray.AddModule('I3Reader', 'i3 reader', FilenameList=[cfg['gcd'], infile]) detectorsurface = MuonGun.Cylinder( length=1600.*icetray.I3Units.m, radius=800.*icetray.I3Units.m, center=dataclasses.I3Position(0.*icetray.I3Units.m, 0.*icetray.I3Units.m, 0.*icetray.I3Units.m,)) # filter secondaries that are not in detector volume tray.AddModule(MuonRemoveChildren, 'MuonRemoveChildren', Detector=detectorsurface, Output='I3MCTree') #-------------------------------------------------- # Add MC Labels mirco trained his DNN on #-------------------------------------------------- add_cascade_labels = False if add_cascade_labels: tray.AddModule(modules.MCLabelsCascades, 'MCLabelsCascade', PulseMapString='InIcePulses', PrimaryKey='MCPrimary1', OutputKey='LabelsDeepLearning') else: tray.AddModule(modules.MCLabelsDeepLearning, 'MCLabelsDeepLearning', PulseMapString='InIcePulses', PrimaryKey='MCPrimary1', MCPESeriesMapName=cfg['mcpe_series_map'], OutputKey='LabelsDeepLearning', IsMuonGun=True) tray.AddModule("I3Writer", "EventWriter", filename=outfile, Streams=[icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.TrayInfo, icetray.I3Frame.Simulation], DropOrphanStreams=[icetray.I3Frame.DAQ]) tray.Execute() del tray
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: cfg = yaml.load(stream, Loader=yaml.Loader) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') if cfg['distance_splits'] is not None: click.echo('SplittingDistances: {}'.format(cfg['distance_splits'])) click.echo('Oversizefactors: {}'.format(cfg['oversize_factors'])) click.echo('NEvents: {}'.format(cfg['n_events_per_run'])) click.echo('EMin: {}'.format(cfg['e_min'])) click.echo('EMax: {}'.format(cfg['e_max'])) click.echo('EBreak: {}'.format(cfg['muongun_e_break'])) click.echo('Gamma: {}'.format(cfg['gamma'])) click.echo('ZenithMin: {}'.format(cfg['zenith_min'])) click.echo('ZenithMax: {}'.format(cfg['zenith_max'])) # create convex hull if 'use_convex_hull' in cfg and cfg['use_convex_hull']: # hardcode icecube corner points # ToDo: read from geometry file points = [ [-570.90002441, -125.13999939, 501], # string 31 [-256.14001465, -521.08001709, 501], # string 1 [361., -422.82998657, 501], # string 6 [576.36999512, 170.91999817, 501], # string 50 [338.44000244, 463.72000122, 501], # string 74 [101.04000092, 412.79000854, 501], # string 72 [22.11000061, 509.5, 501], # string 78 [-347.88000488, 451.51998901, 501], # string 75 [-570.90002441, -125.13999939, -502], # string 31 [-256.14001465, -521.08001709, -502], # string 1 [361., -422.82998657, -502], # string 6 [576.36999512, 170.91999817, -502], # string 50 [338.44000244, 463.72000122, -502], # string 74 [101.04000092, 412.79000854, -502], # string 72 [22.11000061, 509.5, -502], # string 78 [-347.88000488, 451.51998901, -502], # string 75 ] convex_hull = ConvexHull(points) else: convex_hull = None if 'extend_past_hull' not in cfg: cfg['extend_past_hull'] = 0.0 random_services, _ = create_random_services( dataset_number=cfg['dataset_number'], run_number=cfg['run_number'], seed=cfg['seed'], n_services=2) random_service, random_service_prop = random_services # create muon muon = create_muon( azimuth_range=[cfg['azimuth_min'], cfg['azimuth_max']], zenith_range=[cfg['zenith_min'], cfg['zenith_max']], energy_range=[cfg['e_min'], cfg['e_max']], anchor_time_range=cfg['anchor_time_range'], anchor_x_range=cfg['anchor_x_range'], anchor_y_range=cfg['anchor_y_range'], anchor_z_range=cfg['anchor_z_range'], length_to_go_back=cfg['length_to_go_back'], convex_hull=convex_hull, extend_past_hull=cfg['extend_past_hull'], random_service=random_services[0], ) tray = I3Tray() tray.context['I3RandomService'] = random_service tray.AddModule("I3InfiniteSource", "TheSource", Prefix=cfg['gcd'], Stream=icetray.I3Frame.DAQ) if cfg['MuonGenerator'] == 'MuonGunSinglemuons': tray.AddSegment(segments.GenerateSingleMuons, "GenerateCosmicRayMuons", NumEvents=cfg['n_events_per_run'], FromEnergy=cfg['e_min'] * icetray.I3Units.GeV, ToEnergy=cfg['e_max'] * icetray.I3Units.GeV, BreakEnergy=cfg['muongun_e_break'] * icetray.I3Units.GeV, GammaIndex=cfg['gamma'], ZenithRange=[ cfg['zenith_min'] * icetray.I3Units.deg, cfg['zenith_max'] * icetray.I3Units.deg ]) elif cfg['MuonGenerator'] == 'MuonGunGeneral': model = MuonGun.load_model(cfg['muongun_model']) model.flux.min_multiplicity = cfg['muongun_min_multiplicity'] model.flux.max_multiplicity = cfg['muongun_max_multiplicity'] spectrum = MuonGun.OffsetPowerLaw(cfg['gamma'], cfg['e_min'] * icetray.I3Units.GeV, cfg['e_min'] * icetray.I3Units.GeV, cfg['e_max'] * icetray.I3Units.GeV) surface = MuonGun.Cylinder(1600, 800, dataclasses.I3Position(31.25, 19.64, 0)) if cfg['muongun_generator'] == 'energy': scale = MuonGun.BasicSurfaceScalingFunction() scale.SetSideScaling(4., 17266, 3.41, 1.74) scale.SetCapScaling(4., 23710, 3.40, 1.88) generator = MuonGun.EnergyDependentSurfaceInjector( surface, model.flux, spectrum, model.radius, scale) elif cfg['muongun_generator'] == 'static': generator = MuonGun.StaticSurfaceInjector(surface, model.flux, spectrum, model.radius) elif cfg['muongun_generator'] == 'floodlight': generator = MuonGun.Floodlight( surface=surface, energyGenerator=spectrum, cosMin=cfg['muongun_floodlight_min_cos'], cosMax=cfg['muongun_floodlight_max_cos'], ) else: err_msg = 'MuonGun generator {} is not known.' err_msg += " Must be 'energy','static' or 'floodlight" raise ValueError(err_msg.format(cfg['muongun_generator'])) tray.Add(MuonGun.segments.GenerateBundles, 'MuonGenerator', Generator=generator, NEvents=cfg['n_events_per_run'], GCDFile=cfg['gcd']) tray.Add("Rename", keys=["I3MCTree", "I3MCTree_preMuonProp"]) elif cfg['MuonGenerator'] == 'MuonResimulation': tray.AddModule(ParticleMultiplier, 'make_particles', num_events=cfg['n_events_per_run'], primary=muon) else: err_msg = 'MuonGenerator {} is not known.' err_msg += " Must be 'MuonGunSinglemuons','MuonGunGeneral' or 'MuonResimulation" raise ValueError(err_msg.format(cfg['MuonGenerator'])) # -------------------------------------- # Propagate Muons # -------------------------------------- tray.AddSegment(segments.PropagateMuons, "PropagateMuons", RandomService=random_service_prop, **cfg['muon_propagation_config']) # -------------------------------------- # Distance Splits # -------------------------------------- if cfg['distance_splits'] is not None: click.echo('SplittingDistance: {}'.format(cfg['distance_splits'])) distance_splits = np.atleast_1d(cfg['distance_splits']) dom_limits = np.atleast_1d(cfg['threshold_doms']) if len(dom_limits) == 1: dom_limits = np.ones_like(distance_splits) * cfg['threshold_doms'] oversize_factors = np.atleast_1d(cfg['oversize_factors']) order = np.argsort(distance_splits) distance_splits = distance_splits[order] dom_limits = dom_limits[order] oversize_factors = oversize_factors[order] stream_objects = generate_stream_object(distance_splits, dom_limits, oversize_factors) tray.AddModule(OversizeSplitterNSplits, "OversizeSplitterNSplits", thresholds=distance_splits, thresholds_doms=dom_limits, oversize_factors=oversize_factors) for stream_i in stream_objects: outfile_i = stream_i.transform_filepath(outfile) tray.AddModule("I3Writer", "writer_{}".format(stream_i.stream_name), Filename=outfile_i, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.Stream('S'), icetray.I3Frame.Stream('M') ], If=stream_i) click.echo('Output ({}): {}'.format(stream_i.stream_name, outfile_i)) else: click.echo('Output: {}'.format(outfile)) tray.AddModule("I3Writer", "writer", Filename=outfile, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.Stream('S'), icetray.I3Frame.Stream('M') ]) click.echo('Scratch: {}'.format(scratch)) tray.Execute() del tray
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: cfg = yaml.load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) if cfg['distance_splits'] is not None: click.echo('SplittingDistances: {}'.format(cfg['distance_splits'])) click.echo('Oversizefactors: {}'.format(cfg['oversize_factors'])) click.echo('NEvents: {}'.format(cfg['n_events_per_run'])) click.echo('EMin: {}'.format(cfg['e_min'])) click.echo('EMax: {}'.format(cfg['e_max'])) click.echo('EBreak: {}'.format(cfg['muongun_e_break'])) click.echo('Gamma: {}'.format(cfg['gamma'])) click.echo('ZenithMin: {}'.format(cfg['zenith_min'])) click.echo('ZenithMax: {}'.format(cfg['zenith_max'])) tray = I3Tray() random_service, random_service_prop, _ = create_random_services( dataset_number=cfg['dataset_number'], run_number=cfg['run_number'], seed=cfg['seed']) tray.context['I3RandomService'] = random_service tray.AddModule("I3InfiniteSource", "TheSource", Prefix=cfg['gcd'], Stream=icetray.I3Frame.DAQ) tray.AddSegment(segments.GenerateSingleMuons, "GenerateCosmicRayMuons", NumEvents=cfg['n_events_per_run'], FromEnergy=cfg['e_min'] * icetray.I3Units.GeV, ToEnergy=cfg['e_max'] * icetray.I3Units.GeV, BreakEnergy=cfg['muongun_e_break'] * icetray.I3Units.GeV, GammaIndex=cfg['gamma'], ZenithRange=[ cfg['zenith_min'] * icetray.I3Units.deg, cfg['zenith_max'] * icetray.I3Units.deg ]) tray.AddSegment(segments.PropagateMuons, "PropagateMuons", RandomService=random_service_prop) if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') if cfg['distance_splits'] is not None: click.echo('SplittingDistance: {}'.format(cfg['distance_splits'])) distance_splits = np.atleast_1d(cfg['distance_splits']) dom_limits = np.atleast_1d(cfg['threshold_doms']) if len(dom_limits) == 1: dom_limits = np.ones_like(distance_splits) * cfg['threshold_doms'] oversize_factors = np.atleast_1d(cfg['oversize_factors']) order = np.argsort(distance_splits) distance_splits = distance_splits[order] dom_limits = dom_limits[order] oversize_factors = oversize_factors[order] stream_objects = generate_stream_object(distance_splits, dom_limits, oversize_factors) tray.AddModule(OversizeSplitterNSplits, "OversizeSplitterNSplits", thresholds=distance_splits, thresholds_doms=dom_limits, oversize_factors=oversize_factors) for stream_i in stream_objects: outfile_i = stream_i.transform_filepath(outfile) tray.AddModule("I3Writer", "writer_{}".format(stream_i.stream_name), Filename=outfile_i, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.Stream('S'), icetray.I3Frame.Stream('M') ], If=stream_i) click.echo('Output ({}): {}'.format(stream_i.stream_name, outfile_i)) else: click.echo('Output: {}'.format(outfile)) tray.AddModule("I3Writer", "writer", Filename=outfile, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.Stream('S'), icetray.I3Frame.Stream('M') ]) click.echo('Scratch: {}'.format(scratch)) tray.AddModule("TrashCan", "the can") tray.Execute() tray.Finish()
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: if int(yaml.__version__[0]) < 5: # backwards compatibility for yaml versions before version 5 cfg = yaml.load(stream) else: cfg = yaml.full_load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') click.echo('NEvents: {}'.format(cfg['n_events_per_run'])) click.echo('EMin: {}'.format(cfg['e_min'])) click.echo('EMax: {}'.format(cfg['e_max'])) click.echo('Gamma: {}'.format(cfg['gamma'])) click.echo('ZenithMin: {}'.format(cfg['zenith_min'])) click.echo('ZenithMax: {}'.format(cfg['zenith_max'])) click.echo('AzimuthMin: {}'.format(cfg['azimuth_min'])) click.echo('AzimuthMax: {}'.format(cfg['azimuth_max'])) if cfg['neutrino_flavor'] is None: click.echo('NeutrinoTypes: {}'.format(cfg['neutrino_types'])) click.echo('PrimaryTypeRatio: {}'.format(cfg['primary_type_ratio'])) else: click.echo('NeutrinoFlavor: {}'.format(cfg['neutrino_flavor'])) if 'ApplyBaseSimulationBias' in cfg and cfg['ApplyBaseSimulationBias']: click.echo('Apply simulation bias: True') else: click.echo('Apply simulation bias: True') tray = I3Tray() if 'ApplyBaseSimulationBias' in cfg and cfg['ApplyBaseSimulationBias']: n_services = 3 else: n_services = 2 random_services, _ = create_random_services( dataset_number=cfg['dataset_number'], run_number=cfg['run_number'], seed=cfg['seed'], n_services=n_services, use_gslrng=cfg['random_service_use_gslrng'], ) random_service, random_service_prop = random_services[:2] tray.context['I3RandomService'] = random_service tray.AddModule("I3InfiniteSource", "TheSource", Prefix=cfg['gcd'], Stream=icetray.I3Frame.DAQ) tray.AddSegment(segments.GenerateNeutrinos, "GenerateNeutrinos", RunID=run_number, RandomService=random_service, NumEvents=cfg['n_events_per_run'], FromEnergy=cfg['e_min'] * icetray.I3Units.GeV, ToEnergy=cfg['e_max'] * icetray.I3Units.GeV, ZenithRange=[ cfg['zenith_min'] * icetray.I3Units.deg, cfg['zenith_max'] * icetray.I3Units.deg ], AzimuthRange=[ cfg['azimuth_min'] * icetray.I3Units.deg, cfg['azimuth_max'] * icetray.I3Units.deg ], **cfg['additional_GenerateNeutrinos_settings']) # propagate muons if config exists in config # Note: Snowstorm may perform muon propagation internally if 'muon_propagation_config' in cfg: tray.AddSegment(segments.PropagateMuons, 'propagate_muons', RandomService=random_service_prop, **cfg['muon_propagation_config']) else: # In this case we are not propagating the I3MCTree yet, but # are letting this be done by snowstorm propagation # We need to add a key named 'I3MCTree', since snowstorm expects this # It will propagate the particles for us. tray.AddModule('Rename', keys=['I3MCTree_preMuonProp', 'I3MCTree']) # Bias simulation if desired if 'ApplyBaseSimulationBias' in cfg and cfg['ApplyBaseSimulationBias']: tray.AddModule(BaseSimulationBias, 'BaseSimulationBias', random_service=random_services[2], **cfg['BaseSimulationBiasSettings']) if cfg['distance_splits'] is not None: import dom_distance_cut as dom_cut click.echo('Oversizestreams') stream_objects = dom_cut.generate_stream_object( cut_distances=cfg['distance_splits'], dom_limits=cfg['threshold_doms'], oversize_factors=cfg['oversize_factors']) tray.AddModule(dom_cut.OversizeSplitterNSplits, "OversizeSplitterNSplits", thresholds=cfg['distance_splits'], thresholds_doms=cfg['threshold_doms'], oversize_factors=cfg['oversize_factors'], simulaton_type=cfg['neutrino_flavor'].lower()) for stream_i in stream_objects: outfile_i = stream_i.transform_filepath(outfile) click.echo('\t{}'.format(stream_i)) click.echo('\tOutfile: {}'.format(outfile_i)) tray.AddModule("I3Writer", "writer_{}".format(stream_i.stream_name), Filename=outfile_i, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.Stream('S'), icetray.I3Frame.Stream('M') ], If=stream_i) else: click.echo('Output: {}'.format(outfile)) tray.AddModule("I3Writer", "writer", Filename=outfile, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.Stream('S'), icetray.I3Frame.Stream('M') ]) click.echo('Scratch: {}'.format(scratch)) tray.AddModule("TrashCan", "the can") tray.Execute() tray.Finish()
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: if int(yaml.__version__[0]) < 5: # backwards compatibility for yaml versions before version 5 cfg = yaml.load(stream) else: cfg = yaml.full_load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) tray = I3Tray() random_services, _ = create_random_services( dataset_number=cfg['dataset_number'], run_number=cfg['run_number'], seed=cfg['seed'], n_services=2) random_service, random_service_prop = random_services tray.context['I3RandomService'] = random_service model = MuonGun.load_model(cfg['muongun_model']) model.flux.min_multiplicity = cfg['muongun_min_multiplicity'] model.flux.max_multiplicity = cfg['muongun_max_multiplicity'] spectrum = MuonGun.OffsetPowerLaw(cfg['gamma'], cfg['e_min'] * icetray.I3Units.GeV, cfg['e_min'] * icetray.I3Units.GeV, cfg['e_max'] * icetray.I3Units.GeV) surface = MuonGun.Cylinder(1600, 800, dataclasses.I3Position(31.25, 19.64, 0)) if cfg['muongun_generator'] == 'energy': scale = MuonGun.BasicSurfaceScalingFunction() scale.SetSideScaling(4., 17266, 3.41, 1.74) scale.SetCapScaling(4., 23710, 3.40, 1.88) generator = MuonGun.EnergyDependentSurfaceInjector( surface, model.flux, spectrum, model.radius, scale) elif cfg['muongun_generator'] == 'static': generator = MuonGun.StaticSurfaceInjector(surface, model.flux, spectrum, model.radius) elif cfg['muongun_generator'] == 'floodlight': generator = MuonGun.Floodlight( surface=surface, energyGenerator=spectrum, cosMin=cfg['muongun_floodlight_min_cos'], cosMax=cfg['muongun_floodlight_max_cos'], ) else: err_msg = 'MuonGun generator {} is not known.' err_msg += " Must be 'energy','static' or 'floodlight" raise ValueError(err_msg.format(cfg['muongun_generator'])) tray.Add(MuonGun.segments.GenerateBundles, 'MuonGenerator', Generator=generator, NEvents=cfg['n_events_per_run'], GCDFile=cfg['gcd']) tray.Add("Rename", keys=["I3MCTree", "I3MCTree_preMuonProp"]) tray.AddSegment(segments.PropagateMuons, "PropagateMuons", RandomService=random_service_prop, **cfg['muon_propagation_config']) if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') if cfg['distance_splits'] is not None: click.echo('SplittingDistance: {}'.format(cfg['distance_splits'])) distance_splits = np.atleast_1d(cfg['distance_splits']) dom_limits = np.atleast_1d(cfg['threshold_doms']) if len(dom_limits) == 1: dom_limits = np.ones_like(distance_splits) * cfg['threshold_doms'] oversize_factors = np.atleast_1d(cfg['oversize_factors']) order = np.argsort(distance_splits) distance_splits = distance_splits[order] dom_limits = dom_limits[order] oversize_factors = oversize_factors[order] stream_objects = generate_stream_object(distance_splits, dom_limits, oversize_factors) tray.AddModule(OversizeSplitterNSplits, "OversizeSplitterNSplits", thresholds=distance_splits, thresholds_doms=dom_limits, oversize_factors=oversize_factors) for stream_i in stream_objects: outfile_i = stream_i.transform_filepath(outfile) tray.AddModule("I3Writer", "writer_{}".format(stream_i.stream_name), Filename=outfile_i, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.Stream('S'), icetray.I3Frame.Stream('M') ], If=stream_i) click.echo('Output ({}): {}'.format(stream_i.stream_name, outfile_i)) else: click.echo('Output: {}'.format(outfile)) tray.AddModule("I3Writer", "writer", Filename=outfile, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.Stream('S'), icetray.I3Frame.Stream('M') ]) click.echo('Scratch: {}'.format(scratch)) tray.AddModule("TrashCan", "the can") tray.Execute() tray.Finish()
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: cfg = yaml.load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) tray = I3Tray() random_service, random_service_prop, _ = create_random_services( dataset_number=cfg['dataset_number'], run_number=cfg['run_number'], seed=cfg['seed']) tray.context['I3RandomService'] = random_service model = MuonGun.load_model(cfg['muongun_model']) model.flux.min_multiplicity = cfg['muongun_min_multiplicity'] model.flux.max_multiplicity = cfg['muongun_max_multiplicity'] spectrum = MuonGun.OffsetPowerLaw( cfg['gamma'], cfg['e_min']*icetray.I3Units.TeV, cfg['e_min']*icetray.I3Units.TeV, cfg['e_max']*icetray.I3Units.TeV) surface = MuonGun.Cylinder(1600, 800, dataclasses.I3Position(31.25, 19.64, 0)) if cfg['muongun_generator'] == 'energy': scale = MuonGun.BasicSurfaceScalingFunction() scale.SetSideScaling(4., 17266, 3.41, 1.74) scale.SetCapScaling(4., 23710, 3.40, 1.88) generator = MuonGun.EnergyDependentSurfaceInjector(surface, model.flux, spectrum, model.radius, scale) elif cfg['muongun_generator'] == 'static': generator = MuonGun.StaticSurfaceInjector(surface, model.flux, spectrum, model.radius) elif cfg['muongun_generator'] =='floodlight': generator = MuonGun.Floodlight(surface = surface, energyGenerator=spectrum, cosMin=cfg['muongun_floodlight_min_cos'], cosMax=cfg['muongun_floodlight_max_cos'], ) else: err_msg = 'MuonGun generator {} is not known.' err_msg += " Must be 'energy','static' or 'floodlight" raise ValueError(err_msg.format(cfg['muongun_generator'])) tray.Add(MuonGun.segments.GenerateBundles, 'MuonGenerator', Generator=generator, NEvents=cfg['n_events_per_run'], GCDFile=cfg['gcd']) tray.Add("Rename", keys=["I3MCTree", "I3MCTree_preMuonProp"]) tray.AddSegment( segments.PropagateMuons, "PropagateMuons", RandomService=random_service_prop) if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') outfile = outfile.replace('.bz2', '') tray.AddModule("I3Writer", "writer", Filename=outfile, Streams=[icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.Stream('S'), icetray.I3Frame.Stream('M')]) tray.AddModule("TrashCan", "the can") tray.Execute() tray.Finish()
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: cfg = yaml.load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') click.echo('NEvents: {}'.format(cfg['n_events_per_run'])) click.echo('EMin: {}'.format(cfg['e_min'])) click.echo('EMax: {}'.format(cfg['e_max'])) click.echo('Gamma: {}'.format(cfg['gamma'])) click.echo('ZenithMin: {}'.format(cfg['zenith_min'])) click.echo('ZenithMax: {}'.format(cfg['zenith_max'])) click.echo('AzimuthMin: {}'.format(cfg['azimuth_min'])) click.echo('AzimuthMax: {}'.format(cfg['azimuth_max'])) if cfg['neutrino_flavor'] is None: click.echo('NeutrinoTypes: {}'.format(cfg['neutrino_types'])) click.echo('PrimaryTypeRatio: {}'.format(cfg['primary_type_ratio'])) else: click.echo('NeutrinoFlavor: {}'.format(cfg['neutrino_flavor'])) click.echo('CrossSections: {}'.format(cfg['cross_sections'])) if not cfg['cross_sections_path'] is None: click.echo('CrossSectionsPath: {}'.format(cfg['cross_sections_path'])) tray = I3Tray() random_services, _ = create_random_services( dataset_number=cfg['dataset_number'], run_number=cfg['run_number'], seed=cfg['seed'], n_services=2) random_service, random_service_prop = random_services tray.context['I3RandomService'] = random_service tray.AddModule("I3InfiniteSource", "TheSource", Prefix=cfg['gcd'], Stream=icetray.I3Frame.DAQ) tray.AddSegment( segments.GenerateNeutrinos, "GenerateNeutrinos", RandomService=random_service, NumEvents=cfg['n_events_per_run'], SimMode=cfg['simulation_mode'], VTXGenMode=cfg['vertex_generation_mode'], InjectionMode=cfg['injection_mode'], CylinderParams=cfg['cylinder_params'], AutoExtendMuonVolume=cfg['auto_extend_muon_volume'], Flavor=cfg['neutrino_flavor'], # NuTypes = cfg['neutrino_types'], # Only in newer simprod versions # PrimaryTypeRatio = cfg['primary_type_ratio'], # Only in newer simprod versions GammaIndex=cfg['gamma'], FromEnergy=cfg['e_min'] * icetray.I3Units.GeV, ToEnergy=cfg['e_max'] * icetray.I3Units.GeV, ZenithRange=[ cfg['zenith_min'] * icetray.I3Units.deg, cfg['zenith_max'] * icetray.I3Units.deg ], AzimuthRange=[ cfg['azimuth_min'] * icetray.I3Units.deg, cfg['azimuth_max'] * icetray.I3Units.deg ], # UseDifferentialXsection = cfg['use_diff_cross_section'], # Only in newer simprod versions CrossSections=cfg['cross_sections'], CrossSectionsPath=cfg['cross_sections_path'], # ZenithSamplingMode = cfg['zenith_sampling_mode'], # Only in newer simprod versions ) tray.AddSegment(segments.PropagateMuons, "PropagateMuons", RandomService=random_service_prop, **cfg['muon_propagation_config']) if cfg['distance_splits'] is not None: import dom_distance_cut as dom_cut click.echo('Oversizestreams') stream_objects = dom_cut.generate_stream_object( cut_distances=cfg['distance_splits'], dom_limits=cfg['threshold_doms'], oversize_factors=cfg['oversize_factors']) tray.AddModule(dom_cut.OversizeSplitterNSplits, "OversizeSplitterNSplits", thresholds=cfg['distance_splits'], thresholds_doms=cfg['threshold_doms'], oversize_factors=cfg['oversize_factors'], simulaton_type=cfg['neutrino_flavor'].lower()) for stream_i in stream_objects: outfile_i = stream_i.transform_filepath(outfile) click.echo('\t{}'.format(stream_i)) click.echo('\tOutfile: {}'.format(outfile_i)) tray.AddModule("I3Writer", "writer_{}".format(stream_i.stream_name), Filename=outfile_i, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.Stream('S'), icetray.I3Frame.Stream('M') ], If=stream_i) else: click.echo('Output: {}'.format(outfile)) tray.AddModule("I3Writer", "writer", Filename=outfile, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.Stream('S'), icetray.I3Frame.Stream('M') ]) click.echo('Scratch: {}'.format(scratch)) tray.AddModule("TrashCan", "the can") tray.Execute() tray.Finish()
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: cfg = yaml.load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') tray = I3Tray() """The main L1 script""" tray.AddModule('I3Reader', 'i3 reader', FilenameList=[cfg['gcd_pass2'], infile]) # run online filters online_kwargs = {} if SPLINE_TABLES: online_kwargs.update({ 'SplineRecoAmplitudeTable': os.path.join(SPLINE_TABLES, 'InfBareMu_mie_abs_z20a10.fits'), 'SplineRecoTimingTable': os.path.join(SPLINE_TABLES, 'InfBareMu_mie_prob_z20a10.fits'), 'hese_followup_base_GCD_filename': cfg['gcd_pass2'], }) if cfg['L1_pass2_run_gfu'] is not None: online_kwargs['gfu_enabled'] = cfg['L1_pass2_run_gfu'] tray.AddSegment(OnlineFilter, "OnlineFilter", decode=False, simulation=True, vemcal_enabled=False, **online_kwargs) # make random service seed = cfg['seed'] + run_number filter_mask_randoms = phys_services.I3GSLRandomService(seed) # override MinBias Prescale filterconfigs = filter_globals.filter_pairs + filter_globals.sdst_pairs print(cfg['L1_min_bias_prescale']) if cfg['L1_min_bias_prescale']: for i, filtertuple in enumerate(filterconfigs): if filtertuple[0] == filter_globals.FilterMinBias: del filterconfigs[i] filterconfigs.append( (filtertuple[0], cfg['L1_min_bias_prescale'])) break print(filterconfigs) # Generate filter Masks for all P frames tray.AddModule(filter_tools.FilterMaskMaker, "MakeFilterMasks", OutputMaskName=filter_globals.filter_mask, FilterConfigs=filterconfigs, RandomService=filter_mask_randoms) # Merge the FilterMasks tray.AddModule("OrPframeFilterMasks", "make_q_filtermask", InputName=filter_globals.filter_mask, OutputName=filter_globals.qfilter_mask) # Q+P frame specific keep module needs to go first, as KeepFromSubstram # will rename things, let's rename post keep. def is_Q(frame): return frame.Stop == frame.DAQ simulation_keeps = [ 'BackgroundI3MCTree', 'BackgroundI3MCTreePEcounts', 'BackgroundI3MCPESeriesMap', 'BackgroundI3MCTree_preMuonProp', 'BackgroundMMCTrackList', 'BeaconLaunches', 'CorsikaInteractionHeight', 'CorsikaWeightMap', 'EventProperties', 'GenerationSpec', 'I3LinearizedMCTree', 'I3MCTree', 'I3MCTreePEcounts', 'I3MCTree_preMuonProp', 'I3MCPESeriesMap', 'I3MCPulseSeriesMap', 'I3MCPulseSeriesMapParticleIDMap', 'I3MCWeightDict', 'LeptonInjectorProperties', 'MCHitSeriesMap', 'MCPrimary', 'MCPrimaryInfo', 'MMCTrackList', 'PolyplopiaInfo', 'PolyplopiaPrimary', 'RNGState', 'SignalI3MCPEs', 'SimTrimmer', 'TimeShift', 'WIMP_params' ] + muongun_keys keep_before_merge = filter_globals.q_frame_keeps + [ 'InIceDSTPulses', 'IceTopDSTPulses', 'CalibratedWaveformRange', 'UncleanedInIcePulsesTimeRange', 'SplitUncleanedInIcePulses', 'SplitUncleanedInIcePulsesTimeRange', 'SplitUncleanedInIceDSTPulsesTimeRange', 'CalibrationErrata', 'SaturationWindows', 'InIceRawData', 'IceTopRawData' ] + simulation_keeps tray.AddModule("Keep", "keep_before_merge", keys=keep_before_merge, If=is_Q) tray.AddModule("I3IcePickModule<FilterMaskFilter>", "filterMaskCheckAll", FilterNameList=filter_globals.filter_streams, FilterResultName=filter_globals.qfilter_mask, DecisionName="PassedAnyFilter", DiscardEvents=False, Streams=[icetray.I3Frame.DAQ]) def do_save_just_superdst(frame): if frame.Has("PassedAnyFilter"): if not frame["PassedAnyFilter"].value: return True else: return False else: print("Failed to find key frame Bool!!") return False keep_only_superdsts = filter_globals.keep_nofilterpass + [ 'PassedAnyFilter', 'InIceDSTPulses', 'IceTopDSTPulses', 'SplitUncleanedInIcePulses', 'SplitUncleanedInIcePulsesTimeRange', 'SplitUncleanedInIceDSTPulsesTimeRange', 'RNGState' ] + simulation_keeps tray.AddModule("Keep", "KeepOnlySuperDSTs", keys=keep_only_superdsts, If=do_save_just_superdst) tray.AddModule("I3IcePickModule<FilterMaskFilter>", "filterMaskCheckSDST", FilterNameList=filter_globals.sdst_streams, FilterResultName=filter_globals.qfilter_mask, DecisionName="PassedKeepSuperDSTOnly", DiscardEvents=False, Streams=[icetray.I3Frame.DAQ]) def dont_save_superdst(frame): if frame.Has("PassedKeepSuperDSTOnly") and \ frame.Has("PassedAnyFilter"): if frame["PassedAnyFilter"].value: return False elif not frame["PassedKeepSuperDSTOnly"].value: return True else: return False else: print("Failed to find key frame Bool!!") return False tray.AddModule("Keep", "KeepOnlyDSTs", keys=filter_globals.keep_dst_only + [ "PassedAnyFilter", "assedKeepSuperDSTOnly", filter_globals.eventheader ] + muongun_keys, If=dont_save_superdst) tray.AddModule("KeepFromSubstream", "null_stream", StreamName=filter_globals.NullSplitter, KeepKeys=filter_globals.null_split_keeps) in_ice_keeps = filter_globals.inice_split_keeps + \ filter_globals.onlinel2filter_keeps in_ice_keeps = in_ice_keeps + [ 'I3EventHeader', 'SplitUncleanedInIcePulses', 'TriggerSplitterLaunchWindow', 'I3TriggerHierarchy', 'GCFilter_GCFilterMJD' ] + muongun_keys tray.AddModule( "Keep", "inice_keeps", keys=in_ice_keeps, If=which_split(split_name=filter_globals.InIceSplitter), ) tray.AddModule("KeepFromSubstream", "icetop_split_stream", StreamName=filter_globals.IceTopSplitter, KeepKeys=filter_globals.icetop_split_keeps) tray.AddModule("I3IcePickModule<FilterMaskFilter>", "filterMaskCheck", FilterNameList=filter_globals.filters_keeping_allraw, FilterResultName=filter_globals.qfilter_mask, DecisionName="PassedConventional", DiscardEvents=False, Streams=[icetray.I3Frame.DAQ]) def I3RawDataCleaner(frame): if not (('PassedConventional' in frame and frame['PassedConventional'].value == True) or ('SimTrimmer' in frame and frame['SimTrimmer'].value == True)): frame.Delete('InIceRawData') frame.Delete('IceTopRawData') tray.AddModule(I3RawDataCleaner, "CleanErrataForConventional", Streams=[icetray.I3Frame.DAQ]) if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') tray.AddModule("I3Writer", "EventWriter", filename=outfile, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.TrayInfo, icetray.I3Frame.Simulation ]) tray.AddModule("TrashCan", "the can") tray.Execute() tray.Finish()
def main(config_file, run_number, scratch): with open(config_file, 'r') as stream: if int(yaml.__version__[0]) < 5: # backwards compatibility for yaml versions before version 5 cfg = yaml.load(stream) else: cfg = yaml.full_load(stream) if 'dictitems' in cfg.keys(): cfg = cfg['dictitems'] cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') infile = infile.replace('Level0.{}'.format(cfg['previous_step']), 'Level0.{}'.format(cfg['previous_step'] % 10)) infile = infile.replace('2012_pass2', '2012') if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace('Level0.{}'.format(cfg['step']), 'Level0.{}'.format(cfg['step'] % 10)) outfile = outfile.replace(' ', '0') outfile = outfile.replace('2012_pass2', '2012') print('Outfile != $FINAL_OUT clean up for crashed scripts not possible!') _, seed = create_random_services(dataset_number=cfg['dataset_number'], run_number=cfg['run_number'], seed=cfg['seed'], n_services=0) tray = I3Tray() tray.AddModule('I3Reader', 'reader', FilenameList=[cfg['gcd_2012'], infile], SkipKeys=[ 'I3DST11', 'I3SuperDST', 'I3VEMCalData', 'PoleMuonLlhFit', 'PoleMuonLlhFitCutsFirstPulseCuts', 'PoleMuonLlhFitFitParams', 'CramerRaoPoleL2IpdfGConvolute_2itParams', 'CramerRaoPoleL2MPEFitParams', 'PoleL2IpdfGConvolute_2it', 'PoleL2IpdfGConvolute_2itFitParams', 'PoleL2MPEFit', 'PoleL2MPEFitCuts', 'PoleL2MPEFitFitParams', 'PoleL2MPEFitMuE', ]) class SkipSFrames(icetray.I3ConditionalModule): S_stream = icetray.I3Frame.Stream('S') def __init__(self, context): icetray.I3ConditionalModule.__init__(self, context) def Configure(self): self.Register(self.S_stream, self.SFrame) def SFrame(self, frame): pass # tray.AddModule(SkipSFrames, # "Skip I Frames") def check_driving_time(frame): if 'DrivingTime' not in frame: frame['DrivingTime'] = dataclasses.I3Time( frame['I3EventHeader'].start_time) return True tray.AddModule(check_driving_time, 'DrivingTimeCheck', Streams=[icetray.I3Frame.DAQ]) # move that old filterMask out of the way tray.AddModule("Rename", "filtermaskmover", Keys=["FilterMask", "OrigFilterMask"]) if cfg['L1_2012_qify']: tray.AddModule("QConverter", "qify", WritePFrame=False) def MissingITCheck(frame): #print "Fixing IceTop RO" if "IceTopRawData" not in frame: itrd = dataclasses.I3DOMLaunchSeriesMap() frame["IceTopRawData"] = itrd tray.AddModule(MissingITCheck, 'L1_AddIceTopPulses', Streams=[icetray.I3Frame.DAQ]) if cfg['L1_2012_retrigger']: # some cleanup first tray.AddModule("Delete", "delete_triggerHierarchy", Keys=["I3TriggerHierarchy", "TimeShift"]) gcd_file = dataio.I3File(cfg['gcd_2012']) tray.AddSegment(trigger_sim.TriggerSim, "trig", gcd_file=gcd_file) tray.AddSegment(jeb_filter_2012.BaseProcessing, "BaseProc", pulses=filter_globals.CleanedMuonPulses, decode=False, simulation=True, DomLauncher=(not cfg['L1_2012_dom_simulator'])) tray.AddSegment(jeb_filter_2012.MuonFilter, "MuonFilter", pulses=filter_globals.CleanedMuonPulses, If=which_split(split_name=filter_globals.InIceSplitter)) tray.AddSegment(jeb_filter_2012.CascadeFilter, "CascadeFilter", pulses=filter_globals.CleanedMuonPulses, muon_llhfit_name=filter_globals.muon_llhfit, If=which_split(split_name=filter_globals.InIceSplitter)) tray.AddSegment(jeb_filter_2012.FSSFilter, "FSSFilter", pulses=filter_globals.SplitUncleanedInIcePulses, If=which_split(split_name=filter_globals.InIceSplitter)) tray.AddSegment(jeb_filter_2012.LowUpFilter, "LowUpFilter", If=which_split(split_name=filter_globals.InIceSplitter)) tray.AddSegment(jeb_filter_2012.ShadowFilter, "ShawdowFilters", If=which_split(split_name=filter_globals.InIceSplitter)) # use the PID as a seed. Good enough? tray.AddSegment(jeb_filter_2012.GCFilter, "GCFilter", mcseed=seed, If=which_split(split_name=filter_globals.InIceSplitter)) tray.AddSegment(jeb_filter_2012.VEFFilter, "VEFFilter", pulses=filter_globals.CleanedMuonPulses, If=which_split(split_name=filter_globals.InIceSplitter)) if PHOTONICS_DIR is not None: photonicstabledirmu = os.path.join(PHOTONICS_DIR, 'SPICE1') photonicsdriverfilemu = os.path.join('driverfiles', 'mu_photorec.list') else: photonicstabledirmu = None photonicsdriverfilemu = None tray.AddSegment(jeb_filter_2012.OnlineL2Filter, "OnlineL2", pulses=filter_globals.CleanedMuonPulses, llhfit_name=filter_globals.muon_llhfit, improved_linefit=True, paraboloid=False, PhotonicsTabledirMu=photonicstabledirmu, PhotonicsDriverfileMu_Spice1=photonicsdriverfilemu, If=which_split(split_name=filter_globals.InIceSplitter)) tray.AddSegment(jeb_filter_2012.DeepCoreFilter, "DeepCoreFilter", pulses=filter_globals.SplitUncleanedInIcePulses, If=which_split(split_name=filter_globals.InIceSplitter)) tray.AddSegment(jeb_filter_2012.EHEFilter, "EHEFilter", If=which_split(split_name=filter_globals.NullSplitter)) tray.AddSegment(jeb_filter_2012.MinBiasFilters, "MinBias", If=which_split(split_name=filter_globals.NullSplitter)) tray.AddSegment(jeb_filter_2012.SlopFilters, "SLOP", If=which_split(split_name=filter_globals.NullSplitter)) tray.AddSegment(jeb_filter_2012.FixedRateTrigFilter, "FixedRate", If=which_split(split_name=filter_globals.NullSplitter)) tray.AddSegment(jeb_filter_2012.CosmicRayFilter, "CosmicRayFilter", pulseMask=filter_globals.SplitUncleanedITPulses, If=which_split(split_name=filter_globals.IceTopSplitter)) tray.AddSegment(jeb_filter_2012.DST, "DSTFilter", dstname="I3DST12", pulses=filter_globals.CleanedMuonPulses, If=which_split(split_name=filter_globals.InIceSplitter)) # make random service filter_mask_randoms = phys_services.I3GSLRandomService(seed) # override MinBias Prescale filterconfigs = filter_globals.filter_pairs + filter_globals.sdst_pairs if cfg['L1_min_bias_prescale'] is not None: for i, filtertuple in enumerate(filterconfigs): if filtertuple[0] == filter_globals.FilterMinBias: del filterconfigs[i] filterconfigs.append( (filtertuple[0], cfg['L1_min_bias_prescale'])) break click.echo(filterconfigs) # Generate filter Masks for all P frames tray.AddModule(filter_tools.FilterMaskMaker, "MakeFilterMasks", OutputMaskName=filter_globals.filter_mask, FilterConfigs=filterconfigs, RandomService=filter_mask_randoms) # Merge the FilterMasks tray.AddModule("OrPframeFilterMasks", "make_q_filtermask", InputName=filter_globals.filter_mask, OutputName=filter_globals.qfilter_mask) #Q+P frame specific keep module needs to go first, as KeepFromSubstram #will rename things, let's rename post keep. def is_Q(frame): return frame.Stop == frame.DAQ tray.AddModule( "Keep", "keep_before_merge", keys=filter_globals.q_frame_keeps + [ 'InIceDSTPulses', # keep DST pulse masks 'IceTopDSTPulses', 'CalibratedWaveformRange', # keep calibration info 'UncleanedInIcePulsesTimeRange', 'SplitUncleanedInIcePulsesTimeRange', 'SplitUncleanedInIceDSTPulsesTimeRange', 'CalibrationErrata', 'SaturationWindows', 'InIceRawData', # keep raw data for now 'IceTopRawData', 'CorsikaWeightMap', # sim keys 'I3MCWeightDict', 'MCHitSeriesMap', 'MMCTrackList', 'I3MCTree', 'I3LinearizedMCTree', 'MCPrimary', 'MCPrimaryInfo', 'TimeShift', # the time shift amount 'WIMP_params', # Wimp-sim 'SimTrimmer', # for SimTrimmer flag 'I3MCPESeriesMap', 'I3MCPulseSeriesMap', 'I3MCPulseSeriesMapParticleIDMap', ] + muongun_keys, If=is_Q) tray.AddModule("I3IcePickModule<FilterMaskFilter>", "filterMaskCheckAll", FilterNameList=filter_globals.filter_streams, FilterResultName=filter_globals.qfilter_mask, DecisionName="PassedAnyFilter", DiscardEvents=False, Streams=[icetray.I3Frame.DAQ]) def do_save_just_superdst(frame): if frame.Has("PassedAnyFilter"): if not frame["PassedAnyFilter"].value: return True # <- Event failed to pass any filter. else: return False # <- Event passed some filter else: print "Failed to find key frame Bool!!" return False tray.AddModule( "Keep", "KeepOnlySuperDSTs", keys=filter_globals.keep_nofilterpass + [ 'PassedAnyFilter', 'InIceDSTPulses', 'IceTopDSTPulses', 'SplitUncleanedInIcePulses', 'SplitUncleanedInIcePulsesTimeRange', 'SplitUncleanedInIceDSTPulsesTimeRange', 'CorsikaWeightMap', # sim keys 'I3MCWeightDict', 'MCHitSeriesMap', 'MMCTrackList', 'I3MCTree', 'I3LinearizedMCTree', 'MCPrimary', 'MCPrimaryInfo', 'TimeShift', # the time shift amount 'WIMP_params', # Wimp-sim 'I3MCPESeriesMap', 'I3MCPulseSeriesMap', 'I3MCPulseSeriesMapParticleIDMap', ] + muongun_keys, If=do_save_just_superdst) ## Now clean up the events that not even the SuperDST filters passed on. tray.AddModule("I3IcePickModule<FilterMaskFilter>", "filterMaskCheckSDST", FilterNameList=filter_globals.sdst_streams, FilterResultName=filter_globals.qfilter_mask, DecisionName="PassedKeepSuperDSTOnly", DiscardEvents=False, Streams=[icetray.I3Frame.DAQ]) def dont_save_superdst(frame): if frame.Has("PassedKeepSuperDSTOnly") and frame.Has( "PassedAnyFilter"): if frame["PassedAnyFilter"].value: return False # <- these passed a regular filter, keeper elif not frame["PassedKeepSuperDSTOnly"].value: return True # <- Event failed to pass SDST filter. else: return False # <- Event passed some SDST filter else: print "Failed to find key frame Bool!!" return False # backward compatibility if 'L1_keep_untriggered' in cfg and cfg['L1_keep_untriggered']: discard_substream_and_keys = False else: discard_substream_and_keys = True if discard_substream_and_keys: tray.AddModule("Keep", "KeepOnlyDSTs", keys=filter_globals.keep_dst_only + [ "PassedAnyFilter", "PassedKeepSuperDSTOnly", filter_globals.eventheader ] + muongun_keys, If=dont_save_superdst) ## Frames should now contain only what is needed. now flatten, write/send to server ## Squish P frames back to single Q frame, one for each split: tray.AddModule( "KeepFromSubstream", "null_stream", StreamName=filter_globals.NullSplitter, KeepKeys=filter_globals.null_split_keeps, ) # Keep the P frames for InIce intact #tray.AddModule("KeepFromSubstream","inice_split_stream", # StreamName = filter_globals.InIceSplitter, # KeepKeys = filter_globals.inice_split_keeps + filter_globals.onlinel2filter_keeps, # ) # in_ice_keeps = filter_globals.inice_split_keeps + \ filter_globals.onlinel2filter_keeps in_ice_keeps = in_ice_keeps + [ 'I3EventHeader', 'SplitUncleanedInIcePulses', 'SplitUncleanedInIcePulsesTimeRange', 'SplitUncleanedInIceDSTPulsesTimeRange', 'I3TriggerHierarchy', 'GCFilter_GCFilterMJD' ] tray.AddModule( "Keep", "inice_keeps", keys=in_ice_keeps + muongun_keys, If=which_split(split_name=filter_globals.InIceSplitter), ) tray.AddModule( "KeepFromSubstream", "icetop_split_stream", StreamName=filter_globals.IceTopSplitter, KeepKeys=filter_globals.icetop_split_keeps, ) tray.AddModule("I3IcePickModule<FilterMaskFilter>", "filterMaskCheck", FilterNameList=filter_globals.filters_keeping_allraw, FilterResultName=filter_globals.qfilter_mask, DecisionName="PassedConventional", DiscardEvents=False, Streams=[icetray.I3Frame.DAQ]) ## Clean out the Raw Data when not passing conventional filter def I3RawDataCleaner(frame): if not (('PassedConventional' in frame and frame['PassedConventional'].value == True) or ('SimTrimmer' in frame and frame['SimTrimmer'].value == True)): frame.Delete('InIceRawData') frame.Delete('IceTopRawData') tray.AddModule(I3RawDataCleaner, "CleanErrataForConventional", Streams=[icetray.I3Frame.DAQ]) tray.AddModule("I3Writer", "EventWriter", filename=outfile, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.TrayInfo, icetray.I3Frame.Stream('S') ]) tray.AddModule("TrashCan", "the can") tray.Execute() tray.Finish()
def main(cfg, run_number, scratch, do_merging_if_necessary): with open(cfg, 'r') as stream: if int(yaml.__version__[0]) < 5: # backwards compatibility for yaml versions before version 5 cfg = yaml.load(stream) else: cfg = yaml.full_load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') infile = infile.replace('Level0.{}'.format(cfg['previous_step']), 'Level0.{}'.format(cfg['previous_step'] % 10)) infile = infile.replace('2012_pass2', 'pass2') if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace('Level0.{}'.format(cfg['step']), 'Level0.{}'.format(cfg['step'] % 10)) outfile = outfile.replace(' ', '0') outfile = outfile.replace('2012_pass2', 'pass2') print('Outfile != $FINAL_OUT clean up for crashed scripts not possible!') tray = I3Tray() tray.AddModule('I3Reader', 'i3 reader', FilenameList=[cfg['gcd_pass2'], infile]) # get MC pulses tray.AddModule(GetMCPulses, "GetMCPulses", I3MCPESeriesMap='I3MCPESeriesMapWithoutNoise', OutputKey='MCPulses', CreatePFrames=True) # merge oversampled events: calculate average hits if cfg['oversampling_factor'] is not None and do_merging_if_necessary: if 'oversampling_merge_events' in cfg: merge_events = cfg['oversampling_merge_events'] else: # backward compability merge_events = True if merge_events: tray.AddModule(MergeOversampledEvents, 'MergeOversampledEvents', OversamplingFactor=cfg['oversampling_factor'], PulseKey='MCPulses') # Make space and delete uneeded keys keys_to_delete = [ 'I3MCPESeriesMap', 'I3MCPulseSeriesMap', 'I3MCPESeriesMapWithoutNoise', 'I3MCPulseSeriesMapParticleIDMap', 'I3MCPulseSeriesMapPrimaryIDMap', 'InIceRawData', 'IceTopRawData', ] tray.AddModule('Delete', 'DeleteKeys', keys=keys_to_delete) tray.AddModule("I3Writer", "EventWriter", filename=outfile, Streams=[icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.TrayInfo, icetray.I3Frame.Simulation, icetray.I3Frame.Stream('M')]) tray.AddModule("TrashCan", "the can") tray.Execute() tray.Finish()
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: if int(yaml.__version__[0]) < 5: # backwards compatibility for yaml versions before version 5 cfg = yaml.load(stream) else: cfg = yaml.full_load(stream) if 'dictitems' in cfg.keys(): cfg = cfg['dictitems'] cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') infile = infile.replace('Level0.{}'.format(cfg['previous_step']), 'Level0.{}'.format(cfg['previous_step'] % 10)) infile = infile.replace('2012_pass2', '2012') if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace('Level0.{}'.format(cfg['step']), 'Level2') outfile = outfile.replace(' ', '0') outfile = outfile.replace('2012_pass2', '2012') print('Outfile != $FINAL_OUT clean up for crashed scripts not possible!') tray = I3Tray() tray.AddModule('I3Reader', 'i3 reader', FilenameList=[cfg['gcd'], infile]) class EmptyIceTopBadLists(icetray.I3ConditionalModule): def __init__(self, context): icetray.I3ConditionalModule.__init__(self, context) def Configure(self): self.Register(icetray.I3Frame.DetectorStatus, self.Detector) def Detector(self, frame): frame['IceTopBadDOMs'] = dataclasses.I3VectorOMKey() frame['IceTopBadTanks'] = dataclasses.TankKey.I3VectorTankKey() self.PushFrame(frame) ################################################################## ######### Level 1 ########### ######### IF SIM, do L1 that was done on PnF ########### ######### IF DATA, Rehydrate, recalibrate ########### ######### FOR BOTH, recal, resplit IT ########### ################################################################## tray.AddSegment(Rehydration, 'rehydrator', dstfile=None, mc=True) ## relic of redoing pole fits. That got taken out. ## but need to keep doing SRT cleaning for all the filters tray.AddModule( "I3SeededRTHitMaskingModule", 'North_seededrt', MaxIterations=3, Seeds='HLCcore', InputResponse='SplitInIcePulses', OutputResponse='SRTInIcePulses', If=lambda f: (which_split(f, split_name='InIceSplit') and (deepcore_wg(f) or wimp_wg(f) or muon_wg(f) or cascade_wg(f) or ehe_wg(f) or fss_wg(f) or icetop_wg_coic_inice(f)))) ## Counter to keep track of the differences between PnF and offline split #tray.AddModule("I3PQEventCounter", "countme")( # ("Substreams", ["InIceSplit"]), # ("Bools",["NFramesIsDifferent"]), #) ## IceTop pules calibration tray.AddSegment(CalibrateAndExtractIceTop, 'CalibrateAndExtractIceTop', Pulses='IceTopPulses') ## EHE Calibration tray.AddSegment(EHECalibration, 'ehecalib', inPulses='CleanInIceRawData', outATWD='EHECalibratedATWD_Wave', outFADC='EHECalibratedFADC_Wave', If=lambda f: ehe_wg_Qstream(f)) ################################################################### ########### HIT CLEANING ####################################### ################################################################### # icetop hitcleaning & splitting # tray.AddSegment(IceTopCoincTWCleaning, 'IceTopCoincTWCleaning', VEMPulses='CleanedHLCTankPulses', OfflinePulses='InIcePulses') # deepcore hitcleaning # tray.AddSegment( DeepCoreHitCleaning, 'DCHitCleaning', If=lambda f: (which_split(f, split_name='InIceSplit') and deepcore_wg(f))) # wimp & FSS hitcleaning # tray.AddSegment( WimpHitCleaning, "WIMPstuff", If=lambda f: (which_split(f, split_name='InIceSplit') and (wimp_wg(f) or fss_wg_finiteReco(f))), suffix='_WIMP', ) # cascade hit cleaning # tray.AddSegment( CascadeHitCleaning, 'CascadeHitCleaning', If=lambda f: (which_split(f, split_name='InIceSplit') and cascade_wg(f)), ) # ehe hit cleaning # tray.AddSegment( HitCleaningEHE, 'eheclean', inATWD='EHECalibratedATWD_Wave', inFADC='EHECalibratedFADC_Wave', If=lambda f: which_split(f, split_name='InIceSplit') and ehe_wg(f)) ################################################################### ########### RECONSTRUCTIONS/CALCULATIONS ########################## ################################################################### # load tables # tray.AddSegment(InstallTables, 'InstallPhotonTables', PhotonicsDir=PHOTONICS_DIR) # muon, cascade, wimp, fss # tray.AddSegment( OfflineMuonReco, 'OfflineMuonRecoSLC', Pulses="SRTInIcePulses", If=lambda f: ((muon_wg(f) or icetop_wg_coic_inice(f) or cascade_wg(f) or wimp_wg(f) or fss_wg(f)) and which_split(f, split_name='InIceSplit')), suffix="", #null? copied from level2_globals supplied #photonics_service_mu_spice1 = Globals.photonics_service_mu_spice1, #photonics_service_mu_spicemie = Globals.photonics_service_mu_spicemie ) # icetop # tray.AddSegment(ReconstructIceTop, 'ReconstructIceTop', Pulses='CleanedHLCTankPulses', CoincPulses='CleanedCoincOfflinePulses', If=lambda f: which_split(f, split_name='ice_top')) # deepcore # tray.AddSegment( OfflineDeepCoreReco, 'DeepCoreL2Reco', pulses='SRTTWOfflinePulsesDC', If=lambda f: (which_split(f, split_name='InIceSplit') and deepcore_wg(f)), suffix='_DC') # wimp, fss # tray.AddSegment( WimpReco, "WIMPreco", If=lambda f: (which_split(f, split_name='InIceSplit') and (wimp_wg(f) or fss_wg_finiteReco(f))), suffix='_WIMP', ) tray.AddSegment( OfflineCascadeReco, 'CascadeL2Reco', Pulses='TWOfflinePulsesHLC', TopoPulses='OfflinePulsesHLC', PhotonicsServiceName=Globals.photonics_service_cscd, If=lambda f: (which_split(f, split_name='InIceSplit') and cascade_wg(f)), suffix='_L2') # slop # tray.AddSegment(SLOPLevel2, "slop_me", If=lambda f: which_split(f, split_name='SLOPSplit')) # ehe # tray.AddSegment( ReconstructionEHE, 'ehereco', Pulses='EHETWCInIcePulsesSRT', suffix='EHE', LineFit='LineFit', SPEFitSingle='SPEFitSingle', SPEFit='SPEFit12', N_iter=12, If=lambda f: which_split(f, split_name='InIceSplit') and ehe_wg(f)) tray.AddModule("I3Writer", "EventWriter", filename=outfile, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.TrayInfo, icetray.I3Frame.Stream('S') ], DropOrphanStreams=[icetray.I3Frame.DAQ]) tray.AddModule("TrashCan", "the can") tray.Execute() tray.Finish()
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: cfg = yaml.load(stream, Loader=yaml.Loader) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace(' ', '0') parser = ConfigParser() parser.readfp( open( os.path.join(os.path.expandvars('$I3_BUILD'), 'lib/icecube/finallevel_filter_diffusenumu', 'paths.cfg'))) paths = dict(parser.items("main")) # Check if somebody messed with the tables ret = os.system("md5sum -c {}".format( os.path.join(os.path.expandvars('$I3_BUILD'), 'lib/icecube/finallevel_filter_diffusenumu', 'checksums'))) if ret != 0: raise RuntimeError("Tables are corrupt") tray = I3Tray() tray.Add(level4.IC12L4, gcdfile=cfg['gcd'], infiles=infile, table_paths=paths, is_numu=cfg['aachen_diffuse_numu_isNuMu']) tray.Add(level5.segments.Scorer, "doLevel5", CutFunc=level5.segments.CutFunc, CascCut=0.5, ClfPath=os.path.join( os.path.expandvars('$I3_BUILD'), 'finallevel_filter_diffusenumu/resources/bdt', cfg['aachen_diffuse_numu_ClfPath']), CascClfPath=os.path.join( os.path.expandvars('$I3_BUILD'), 'finallevel_filter_diffusenumu/resources/bdt', cfg['aachen_diffuse_numu_CascClfPath'])) tray.Add(level5.segments.millipede_segment, "MillipedeLosses", table_paths=paths) tray.Add(level5.segments.paraboloid_segment, "Paraboloid", table_paths=paths) # write output tray.AddModule("I3Writer", "EventWriter", filename=outfile, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.TrayInfo, icetray.I3Frame.Simulation ], DropOrphanStreams=[icetray.I3Frame.DAQ]) tray.Execute() usagemap = tray.Usage() for mod in usagemap: print(mod)