Exemplo n.º 1
0
def main(cfg, run_number, scratch):
    with open(cfg, 'r') as stream:
        cfg = yaml.load(stream)
    cfg['run_number'] = run_number
    cfg['run_folder'] = get_run_folder(run_number)

    infile = cfg['infile_pattern'].format(**cfg)
    infile = infile.replace(' ', '0')

    tray = I3Tray()

    tray.context['I3FileStager'] = dataio.get_stagers()

    random_service, _, run_id = create_random_services(
        dataset_number=cfg['dataset_number'],
        run_number=cfg['run_number'],
        seed=cfg['seed'])

    tray.context['I3RandomService'] = random_service

    tray.Add('I3Reader', FilenameList=[cfg['gcd_pass2'], infile])

    if run_number < cfg['det_pass2_keep_all_upto']:
        cfg['det_keep_mc_hits'] = True
        cfg['det_keep_propagated_mc_tree'] = True
        cfg['det_keep_mc_pulses'] = True

    tray.AddSegment(segments.DetectorSim,
                    "Detector5Sim",
                    RandomService='I3RandomService',
                    RunID=run_id,
                    GCDFile=cfg['gcd_pass2'],
                    KeepMCHits=cfg['det_keep_mc_hits'],
                    KeepPropagatedMCTree=cfg['det_keep_propagated_mc_tree'],
                    KeepMCPulses=cfg['det_keep_mc_pulses'],
                    SkipNoiseGenerator=cfg['det_skip_noise_generation'],
                    LowMem=cfg['det_low_mem'],
                    InputPESeriesMapName=MCPE_SERIES_MAP,
                    BeaconLaunches=cfg['det_add_beacon_launches'],
                    FilterTrigger=cfg['det_filter_trigger'])

    if scratch:
        outfile = cfg['scratchfile_pattern'].format(**cfg)
    else:
        outfile = cfg['outfile_pattern'].format(**cfg)
    outfile = outfile.replace(' ', '0')

    print(outfile)
    print(cfg['outfile_pattern'])
    tray.AddModule("I3Writer",
                   "EventWriter",
                   filename=outfile,
                   Streams=[
                       icetray.I3Frame.DAQ, icetray.I3Frame.Physics,
                       icetray.I3Frame.TrayInfo, icetray.I3Frame.Simulation
                   ])
    tray.AddModule("TrashCan", "the can")
    tray.Execute()
    tray.Finish()
Exemplo n.º 2
0
def merge(infiles, outfile):
    tray = I3Tray()
    tray.context['I3FileStager'] = dataio.get_stagers()
    tray.Add('I3Reader', FilenameList=infiles)
    tray.AddModule(filter_S_frame,
                   'S Frame Filter',
                   Streams=[icetray.I3Frame.Stream('S')])
    tray.AddModule("I3Writer",
                   "writer",
                   Filename=outfile,
                   Streams=[
                       icetray.I3Frame.DAQ, icetray.I3Frame.Physics,
                       icetray.I3Frame.Stream('S'),
                       icetray.I3Frame.Stream('M')
                   ])
    tray.AddModule("TrashCan", "the can")
    tray.Execute()
    tray.Finish()
    for file_i in infiles:
        click.echo('Remvoing {}:'.format(file_i))
        os.remove(file_i)
Exemplo n.º 3
0
    def create_frames(self, files):
        """Create a list of frames to import from the provided files

        Parameters
        ----------
        files : list of str
            The list of file paths from which to import the events.
        """
        frame_list = []
        tray = I3Tray()
        tray.context['I3FileStager'] = dataio.get_stagers()
        tray.Add('I3Reader', FilenameList=files)
        tray.Add(
            export_frame,
            'export_frame',
            frame_list=frame_list,
            mctree_name=self.mctree_name,
            keys_to_export=self.keys_to_import,
            rename_dict=self.rename_dict,
        )
        tray.Execute()
        tray.Finish()

        return frame_list
    help="Name of pulse map from which to reconstruct.  Default is %(default)s",
    dest="pulse")

args = parser.parse_args()

##############CONFIGURATION STUFF###########################

PulsesForReco = args.pulse
sub_event_streams = ["IC86_SMT8", "GEN2"]

############################################################
from I3Tray import I3Tray
from icecube import icetray, dataio, dataclasses, hdfwriter

tray = I3Tray()
tray.context['I3FileStager'] = dataio.get_stagers()
tray.Add("I3Reader", "read", FilenameList=[args.gcd] + [args.infile])

from icecube.gen2_sim.segments.BaseReco import BaseReco

keys = tray.Add(BaseReco,
                PulsesForReco=PulsesForReco,
                SplineMPE=args.with_spline_mpe,
                FitPrefix=args.fitprefix)

hdf_keys = keys + [
    'I3EventHeader', 'I3MCWeightDict', 'NuGPrimary', 'MostEnergeticMuon',
    'MCMuon', 'MuonEffectiveArea', 'SplineMPE_recommendedFitParams',
    'SplineMPEMuEXDifferential'
]
Exemplo n.º 5
0
def process_single_stream(cfg, infile, outfile):
    click.echo('Input: {}'.format(infile))
    hybrid_mode = (cfg['clsim_hybrid_mode']
                   and cfg['icemodel'].lower() != 'spicelea')
    ignore_muon_light = (cfg['clsim_ignore_muon_light']
                         and cfg['clsim_hybrid_mode'])
    click.echo('UseGPUs: {}'.format(cfg['clsim_usegpus']))
    click.echo('IceModel: {}'.format(cfg['icemodel']))
    if not cfg['icemodel_location'] is None:
        click.echo('IceModelLocation: {}'.format(cfg['icemodel_location']))
    click.echo('DomOversize {}'.format(cfg['clsim_dom_oversize']))
    click.echo('UnshadowedFraction: {0:.2f}'.format(
        cfg['clsim_unshadowed_fraction']))
    click.echo('HybridMode: {}'.format(hybrid_mode))
    click.echo('IgnoreMuonLight: {}'.format(ignore_muon_light))
    click.echo('KeepMCPE: {}'.format(cfg['clsim_keep_mcpe']))
    click.echo('Output: {}'.format(outfile))

    tray = I3Tray()
    tray.context['I3FileStager'] = dataio.get_stagers()
    random_services, _ = create_random_services(
        dataset_number=cfg['dataset_number'],
        run_number=cfg['run_number'],
        seed=cfg['seed'],
        n_services=process_single_stream.n_streams)

    random_service = random_services[process_single_stream.i_th_stream]
    tray.context['I3RandomService'] = random_service
    tray.Add('I3Reader', FilenameList=[cfg['gcd'], infile])

    if hybrid_mode:
        cascade_tables = segments.LoadCascadeTables(IceModel=cfg['icemodel'],
                                                    TablePath=SPLINE_TABLES)
    else:
        cascade_tables = None

    if cfg['clsim_usegpus']:
        use_gpus = True
        use_cpus = False
    else:
        use_gpus = True
        use_cpus = False

    tray.AddSegment(segments.PropagatePhotons,
                    "PropagatePhotons",
                    RandomService=random_service,
                    MaxParallelEvents=MAX_PARALLEL_EVENTS,
                    KeepIndividualMaps=cfg['clsim_keep_mcpe'],
                    IceModel=cfg['icemodel'],
                    IceModelLocation=cfg['icemodel_location'],
                    UnshadowedFraction=cfg['clsim_unshadowed_fraction'],
                    IgnoreMuons=ignore_muon_light,
                    HybridMode=hybrid_mode,
                    UseGPUs=use_gpus,
                    UseAllCPUCores=use_cpus,
                    DOMOversizeFactor=cfg['clsim_dom_oversize'],
                    CascadeService=cascade_tables)

    outfile = outfile.replace(' ', '0')
    tray.AddModule("I3Writer",
                   "writer",
                   Filename=outfile,
                   Streams=[
                       icetray.I3Frame.DAQ, icetray.I3Frame.Physics,
                       icetray.I3Frame.Stream('S'),
                       icetray.I3Frame.Stream('M')
                   ])
    tray.AddModule("TrashCan", "the can")
    tray.Execute()
    tray.Finish()
Exemplo n.º 6
0
def main(cfg, run_number, scratch):
    with open(cfg, 'r') as stream:
        cfg = yaml.load(stream)
    cfg['run_number'] = run_number
    cfg['run_folder'] = get_run_folder(run_number)

    click.echo('Keep all OnlineL2: {}'.format(cfg['OnlineL2_keep_all_L2']))
    click.echo('Keep time residuals: {}'.format(
        cfg['OnlineL2_keep_time_residuals']))

    infile = cfg['infile_pattern'].format(**cfg)
    infile = infile.replace(' ', '0')
    infile = infile.replace('Level0.{}'.format(cfg['previous_step']), 'Level2')
    infile = infile.replace('Level0.{}'.format(cfg['previous_step']),
                            'Level0.{}'.format(cfg['previous_step'] % 10))
    infile = infile.replace('2012_pass2', '2012')

    if scratch:
        outfile = cfg['scratchfile_pattern'].format(**cfg)
    else:
        outfile = cfg['outfile_pattern'].format(**cfg)
    outfile = outfile.replace('Level0.{}'.format(cfg['step']), '2017OnlineL2')
    outfile = outfile.replace(' ', '0')
    outfile = outfile.replace('2012_pass2', '2012')
    print('Outfile != $FINAL_OUT clean up for crashed scripts not possible!')

    # build tray
    tray = I3Tray()
    tray.context['I3FileStager'] = dataio.get_stagers()
    tray.Add('I3Reader',
             FilenameList=[cfg['gcd_pass2'], infile],
             SkipKeys=['I3MCTree'] if 'corsika' in infile.lower() else [])

    # drop exisiting P-Frames (will do our own splitting later)
    tray.Add(lambda f: False, Streams=[icetray.I3Frame.Physics])

    ############################################################################
    # the following modules repeat what is done in the base processing at Pole #
    ############################################################################

    # resplit Q frame
    icetray.load('trigger-splitter', False)
    tray.AddModule('I3TriggerSplitter',
                   filter_globals.InIceSplitter,
                   TrigHierName='DSTTriggers',
                   TriggerConfigIDs=[
                       filter_globals.deepcoreconfigid,
                       filter_globals.inicesmtconfigid,
                       filter_globals.inicestringconfigid,
                       filter_globals.volumetriggerconfigid
                   ],
                   SubEventStreamName=filter_globals.InIceSplitter,
                   InputResponses=['InIceDSTPulses'],
                   OutputResponses=[filter_globals.SplitUncleanedInIcePulses],
                   WriteTimeWindow=True)

    # evaluate TriggerHierarchy
    tray.AddModule(
        "TriggerCheck_13",
        "BaseProc_Trigchecker",
        I3TriggerHierarchy=filter_globals.triggerhierarchy,
        InIceSMTFlag=filter_globals.inicesmttriggered,
        IceTopSMTFlag=filter_globals.icetopsmttriggered,
        InIceStringFlag=filter_globals.inicestringtriggered,
        DeepCoreSMTFlag=filter_globals.deepcoresmttriggered,
        DeepCoreSMTConfigID=filter_globals.deepcoreconfigid,
        VolumeTriggerFlag=filter_globals.volumetrigtriggered,
        SlowParticleFlag=filter_globals.slowparticletriggered,
        FixedRateTriggerFlag=filter_globals.fixedratetriggered,
    )

    # run SRT and TW Cleaning from the Base Processing
    from icecube.STTools.seededRT.configuration_services import I3DOMLinkSeededRTConfigurationService
    seededRTConfig = I3DOMLinkSeededRTConfigurationService(
        ic_ic_RTRadius=150.0 * I3Units.m,
        ic_ic_RTTime=1000.0 * I3Units.ns,
        treat_string_36_as_deepcore=False,
        useDustlayerCorrection=False,
        allowSelfCoincidence=True)

    tray.AddModule(
        'I3SeededRTCleaning_RecoPulseMask_Module',
        'BaseProc_seededrt',
        InputHitSeriesMapName=filter_globals.SplitUncleanedInIcePulses,
        OutputHitSeriesMapName=filter_globals.SplitRTCleanedInIcePulses,
        STConfigService=seededRTConfig,
        SeedProcedure='HLCCoreHits',
        NHitsThreshold=2,
        MaxNIterations=3,
        Streams=[icetray.I3Frame.Physics],
        If=which_split(split_name=filter_globals.InIceSplitter))

    tray.AddModule("I3TimeWindowCleaning<I3RecoPulse>",
                   "TimeWindowCleaning",
                   InputResponse=filter_globals.SplitRTCleanedInIcePulses,
                   OutputResponse=filter_globals.CleanedMuonPulses,
                   TimeWindow=6000 * I3Units.ns,
                   If=which_split(split_name=filter_globals.InIceSplitter))

    tray.AddSegment(linefit.simple,
                    "BaseProc_imprv_LF",
                    inputResponse=filter_globals.CleanedMuonPulses,
                    fitName=filter_globals.muon_linefit,
                    If=which_split(split_name=filter_globals.InIceSplitter))

    # Muon LLH SimpleFitter from GulliverSuite with LineFit seed.
    tray.AddSegment(lilliput.segments.I3SinglePandelFitter,
                    filter_globals.muon_llhfit,
                    seeds=[filter_globals.muon_linefit],
                    pulses=filter_globals.CleanedMuonPulses,
                    If=which_split(split_name=filter_globals.InIceSplitter))

    # run MuonFilter
    tray.Add(MuonFilter,
             'MuonFilter',
             pulses=filter_globals.CleanedMuonPulses,
             If=which_split(split_name=filter_globals.InIceSplitter))
    tray.AddModule(
        "I3FirstPulsifier",
        "BaseProc_first-pulsify",
        InputPulseSeriesMapName=filter_globals.CleanedMuonPulses,
        OutputPulseSeriesMapName='FirstPulseMuonPulses',
        KeepOnlyFirstCharge=False,  # default
        UseMask=False,  # default
        If=which_split(split_name=filter_globals.InIceSplitter))

    # discard events not passing the MuonFilter
    tray.Add(lambda f: f.Has(filter_globals.MuonFilter) and f[
        filter_globals.MuonFilter].value)

    # run OnlineL2 filter
    tray.Add(TimerStart,
             timerName='OnlineL2',
             If=which_split(split_name=filter_globals.InIceSplitter))
    tray.AddSegment(OnlineL2Filter,
                    "OnlineL2",
                    If=which_split(split_name=filter_globals.InIceSplitter))
    tray.Add(TimerStop, timerName='OnlineL2')

    # discard events not passing the OnlineL2 filter
    tray.Add(lambda f: f.Has(filter_globals.OnlineL2Filter) and f[
        filter_globals.OnlineL2Filter].value)

    # run GFU filter
    tray.Add(TimerStart, timerName='GFU')
    tray.AddSegment(GammaFollowUp,
                    "GFU",
                    OnlineL2SegmentName="OnlineL2",
                    KeepDetails=cfg['OnlineL2_keep_time_residuals'],
                    angular_error=True)
    tray.Add(TimerStop, timerName='GFU')

    # discard events not passing the GFU filter
    if not cfg['OnlineL2_keep_all_L2']:
        tray.Add(lambda f: f.Has(filter_globals.GFUFilter) and f[
            filter_globals.GFUFilter].value)

        # in this case, also run splineMPE with maximum settings for comparison
        TEestis = [
            'OnlineL2_SplineMPE_TruncatedEnergy_AllDOMS_Muon',
            'OnlineL2_SplineMPE_TruncatedEnergy_DOMS_Muon',
            'OnlineL2_SplineMPE_TruncatedEnergy_AllBINS_Muon',
            'OnlineL2_SplineMPE_TruncatedEnergy_BINS_Muon',
            'OnlineL2_SplineMPE_TruncatedEnergy_ORIG_Muon'
        ]
        tray.Add(CustomSplineMPE,
                 'SplineMPEmax',
                 configuration='max',
                 pulses='OnlineL2_CleanedMuonPulses',
                 trackSeeds=['OnlineL2_SplineMPE'],
                 enEstis=TEestis,
                 paraboloid=True)

    # For MC weighting, keep the neutrino primary.
    if 'corsika' not in infile.lower():
        # Some CORSIKA files have I3MCTree objects much larger than 100 MB.
        # Loading them takes too long... instead use CorsikaWeightMap.PrimaryEnergy / PrimaryType for weighting.
        tray.AddModule(get_weighted_primary,
                       'get_weighted_primary',
                       MCPrimary='I3MCPrimary')

    # For MC studies, store information about the muon from CC interaction
    if 'neutrino-generator' in infile.lower():
        # store muon intersection points
        tray.Add(AddMuon)
        tray.Add(AddMuonIntersection)
        # store deposited energy in detector
        tray.Add(AddDepositedEnergy)

    tray.AddModule("I3Writer",
                   "EventWriter",
                   filename=outfile,
                   Streams=[
                       icetray.I3Frame.DAQ, icetray.I3Frame.Physics,
                       icetray.I3Frame.TrayInfo, icetray.I3Frame.Simulation
                   ],
                   DropOrphanStreams=[icetray.I3Frame.DAQ])
    tray.AddModule("TrashCan", "the can")
    tray.Execute()
    del tray
Exemplo n.º 7
0
    # Keys to write to frame
    keys = []
    keys += ['I3EventHeader', filter_mask]
    keys += ['ShowerPlane', 'ShowerPlaneParams']
    keys += ['LoudestStation', 'LoudestOnEdge']
    keys += ['SaturationList', 'SaturatedOnEdge']
    keys += ['Q1', 'Q2', 'Q3', 'Q4']
    keys += ['NStations']
    for comp in LLH_tables.keys():
        keys += ['ShowerLLH_' + comp, 'ShowerLLHParams_' + comp]

    t0 = time.time()

    tray = I3Tray()
    tray.context['I3FileStager'] = dataio.get_stagers(staging_directory=os.environ['_CONDOR_SCRATCH_DIR'])
    tray.AddModule('I3Reader', FileNameList=args.files)
    hdf = I3HDFTableService(args.outFile)

    #====================================================================
    # Clean up events

    tray.AddModule(PruneIceTop,
                   it_stream=it_stream)

    #====================================================================
    # Cut information

    tray.AddModule(GetStations,
                   InputITpulses=recoPulses,
                   output='NStations')
def main(args, outputLevel=2):
    from I3Tray import I3Tray
    from icecube import dataio, icetop_Level3_scripts, dataclasses, phys_services, frame_object_diff
    from icecube.icetop_Level3_scripts import icetop_globals

    icetray.I3Logger.global_logger.set_level(icetray.I3LogLevel.LOG_ERROR)
    icetray.I3Logger.global_logger.set_level_for_unit(
        "MakeQualityCuts", icetray.I3LogLevel.LOG_INFO)

    if not args.L3_gcdfile:
        if args.isMC:
            gcdfile = [
                "/data/ana/CosmicRay/IceTop_level3/sim/%s/GCD/Level3_%i_GCD.i3.gz"
                % (args.detector, args.dataset)
            ]
        else:
            gcdfile = glob.glob(
                "/data/ana/CosmicRay/IceTop_level3/data/%s/GCD/Level3_%s_data_Run00%i_????_GCD.i3.gz"
                % (args.detector, args.detector, args.run))
    else:
        gcdfile = [args.L3_gcdfile]

    # Instantiate a tray
    tray = I3Tray()
    tray.context['I3FileStager'] = dataio.get_stagers(
        staging_directory=os.environ['_CONDOR_SCRATCH_DIR'])
    tray.AddModule('I3Reader',
                   'Reader',
                   FilenameList=gcdfile + args.inputFiles)

    #---------------------------------------------------------------------------------------------------------------

    from icecube.frame_object_diff.segments import uncompress

    # If the L2 gcd file is not specified, use the base_filename which is used for compressing. Check First whether it exists.
    # If the L2 gcd file is provided (probably in the case when running on your own cluster and when you copied the diff and L2 GCDs there),
    # then you use this, but you check first whether the filename makes sense (is the same as the base_filename used for compression).
    def CheckL2GCD(frame):
        geodiff = frame["I3GeometryDiff"]
        if args.L2_gcdfile:
            L2_GCD = args.L2_gcdfile
            if os.path.basename(L2_GCD) != os.path.basename(
                    geodiff.base_filename):
                icetray.logging.log_fatal(
                    '''The provided L2 GCD seems not suited to use for uncompressing the L3 GCD.
                                           It needs to have the same filename as the L2 GCD used to create the diff.'''
                )
        else:
            L2_GCD = geodiff.base_filename
        if not os.path.exists(L2_GCD):
            icetray.logging.log_fatal("L2 GCD file %s not found" % L2_GCD)

    tray.AddModule(CheckL2GCD, 'CheckL2CD', Streams=[icetray.I3Frame.Geometry])

    tray.Add(
        uncompress,
        base_filename=args.L2_gcdfile)  # works correctly if L2_gcdfile is None

    #---------------------------------------------------------------------------------------------------------------

    tray.AddSegment(icetop_Level3_scripts.segments.level3_IceTop,
                    "level3_IceTop",
                    detector=args.detector,
                    do_select=args.select,
                    isMC=args.isMC,
                    add_jitter=args.add_jitter)

    #---------------------------------------------------------------------------------------------------------------

    if args.do_inice:
        itpulses = 'IceTopHLCSeedRTPulses'

        tray.AddSegment(
            icetop_Level3_scripts.segments.level3_Coinc,
            "level3_Coinc",
            Detector=args.detector,
            isMC=args.isMC,
            do_select=args.select,
            IceTopTrack='Laputop',
            IceTopPulses=itpulses,
        )

    if args.waveforms:
        from icecube.icetop_Level3_scripts.functions import count_stations

        tray.AddModule(
            icetop_Level3_scripts.modules.FilterWaveforms,
            'FilterWaveforms',  #Puts IceTopWaveformWeight in the frame.
            pulses=icetop_globals.icetop_hlc_pulses,
            If=lambda frame: icetop_globals.icetop_hlc_pulses in frame and
            count_stations(
                dataclasses.I3RecoPulseSeriesMap.from_frame(
                    frame, icetop_globals.icetop_hlc_pulses)) >= 5)

        tray.AddSegment(icetop_Level3_scripts.segments.ExtractWaveforms,
                        'IceTop',
                        If=lambda frame: "IceTopWaveformWeight" in frame and
                        frame["IceTopWaveformWeight"].value != 0)

    #---------------------------------------------------------------------------------------------------------------

    ## Which keys to keep:
    wanted_general = [
        'I3EventHeader', icetop_globals.filtermask, 'I3TriggerHierarchy'
    ]

    if args.isMC:
        wanted_general += [
            'MCPrimary',
            'MCPrimaryInfo',
            'AirShowerComponents',
            'IceTopComponentPulses_Electron',
            'IceTopComponentPulses_ElectronFromChargedMesons',
            'IceTopComponentPulses_Gamma',
            'IceTopComponentPulses_GammaFromChargedMesons',
            'IceTopComponentPulses_Muon',
            'IceTopComponentPulses_Hadron',
        ]

    wanted_icetop_filter = ['IceTop_EventPrescale', 'IceTop_StandardFilter']

    wanted_icetop_pulses = [
        icetop_globals.icetop_hlc_pulses,
        icetop_globals.icetop_slc_pulses,
        icetop_globals.icetop_clean_hlc_pulses,
        icetop_globals.icetop_tank_pulse_merger_excluded_tanks,
        icetop_globals.icetop_cluster_cleaning_excluded_tanks,
        icetop_globals.icetop_HLCseed_clean_hlc_pulses,
        icetop_globals.icetop_HLCseed_excluded_tanks,
        icetop_globals.icetop_HLCseed_clean_hlc_pulses + '_SnowCorrected',
        'TankPulseMergerExcludedSLCTanks',
        'IceTopLaputopSeededSelectedHLC',
        'IceTopLaputopSeededSelectedSLC',
        'IceTopLaputopSmallSeededSelectedHLC',
        'IceTopLaputopSmallSeededSelectedSLC',
    ]

    wanted_icetop_waveforms = [
        'IceTopVEMCalibratedWaveforms', 'IceTopWaveformWeight'
    ]

    wanted_icetop_reco = [
        'ShowerCOG', 'ShowerPlane', 'ShowerPlaneParams', 'Laputop',
        'LaputopParams', 'LaputopSnowDiagnostics', 'LaputopSmall',
        'LaputopSmallParams', 'IsSmallShower'
    ]

    wanted_icetop_cuts = [
        'Laputop_FractionContainment',
        'Laputop_OnionContainment',
        'Laputop_NearestStationIsInfill',
        'StationDensity',
        'IceTopMaxSignal',
        'IceTopMaxSignalInEdge',
        'IceTopMaxSignalTank',
        'IceTopMaxSignalString',
        'IceTopNeighbourMaxSignal',
        'IT73AnalysisIceTopQualityCuts',
    ]

    wanted = wanted_general + wanted_icetop_filter + wanted_icetop_pulses + wanted_icetop_waveforms + wanted_icetop_reco + wanted_icetop_cuts

    if args.do_inice:
        wanted_inice_filter = [
            'IceTopInIce_EventPrescale', 'IceTopInIce_StandardFilter'
        ]

        wanted_inice_pulses = [
            icetop_globals.inice_pulses, icetop_globals.inice_coinc_pulses,
            icetop_globals.inice_clean_coinc_pulses,
            icetop_globals.inice_clean_coinc_pulses + "TimeRange",
            icetop_globals.inice_clean_coinc_pulses + "_Balloon",
            "SaturationWindows", "CalibrationErrata",
            'SRT' + icetop_globals.inice_coinc_pulses
        ]

        wanted_inice_reco = [
            "Millipede", "MillipedeFitParams", "Millipede_dEdX", "Stoch_Reco",
            "Stoch_Reco2", "I3MuonEnergyLaputopCascadeParams",
            "I3MuonEnergyLaputopParams"
        ]

        wanted_inice_cuts = [
            'NCh_' + icetop_globals.inice_clean_coinc_pulses,
            'IT73AnalysisInIceQualityCuts'
        ]

        wanted_inice_muon = [
            'CoincMuonReco_LineFit', 'CoincMuonReco_SPEFit2',
            'CoincMuonReco_LineFitParams', 'CoincMuonReco_SPEFit2FitParams',
            'CoincMuonReco_MPEFit', 'CoincMuonReco_MPEFitFitParams',
            'CoincMuonReco_MPEFitMuEX', 'CoincMuonReco_CVMultiplicity',
            'CoincMuonReco_CVStatistics',
            'CoincMuonReco_MPEFitCharacteristics',
            'CoincMuonReco_SPEFit2Characteristics',
            'CoincMuonReco_MPEFitTruncated_BINS_Muon',
            'CoincMuonReco_MPEFitTruncated_AllBINS_Muon',
            'CoincMuonReco_MPEFitTruncated_ORIG_Muon',
            'CoincMuonReco_SPEFit2_D4R_CascadeParams',
            'CoincMuonReco_SPEFit2_D4R_Params',
            'CoincMuonReco_MPEFitDirectHitsC'
        ]

        wanted = wanted + wanted_inice_filter + wanted_inice_pulses + wanted_inice_reco + wanted_inice_cuts + wanted_inice_muon

    tray.AddModule("Keep", 'DropObjects', Keys=wanted)

    if args.output.replace('.bz2', '').replace('.gz', '')[-3:] == '.i3':
        tray.AddModule(
            "I3Writer",
            "i3-writer",
            Filename=args.output,
            DropOrphanStreams=[icetray.I3Frame.DAQ],
            streams=[icetray.I3Frame.DAQ, icetray.I3Frame.Physics],
        )
    else:
        raise Exception('I do not know how to handle files with extension %s' %
                        outfile.replace('.bz2', '').replace('.gz', '')[-3:])

    # Execute the Tray
    if args.n is None:
        tray.Execute()
    else:
        tray.Execute(args.n)

    if args.print_usage:
        tray.PrintUsage(fraction=1.0)
    tray.Finish()
Exemplo n.º 9
0
def save_pixels(broker,
                auth_token,
                topic_in,
                filename_out,
                nsides_to_wait_for,
                npixel_for_nside=None,
                delete_from_queue=True):
    if npixel_for_nside is None:
        npixel_for_nside = {}

    # connect to pulsar
    client_service = PulsarClientService(
        BrokerURL=broker,
        AuthToken=auth_token,
    )

    receiver_service = ReceiverService(
        client_service=client_service,
        topic=topic_in,
        subscription_name='skymap-saver-sub',
        force_single_consumer=True,
    )

    ########## the tray
    tray = I3Tray()

    tray.context['I3FileStager'] = dataio.get_stagers()

    tray.Add(
        ReceivePFrameWithMetadata,
        "ReceivePFrameWithMetadata",
        ReceiverService=receiver_service,
    )

    tray.Add(WaitForNumberOfPFrames,
             "WaitForNumberOfPFrames",
             SuspendAfterTheseNSides=nsides_to_wait_for,
             NPixelForNSide=npixel_for_nside)

    tray.Add(uncompress,
             "GCD_uncompress",
             keep_compressed=False,
             base_path=config.base_GCD_path)
    tray.AddModule(get_reco_losses_inside, "get_reco_losses_inside")

    tray.Add("I3Writer",
             "writer",
             Filename=filename_out,
             SkipKeys=['__msgid', '__msgtopic'],
             Streams=[
                 icetray.I3Frame.Geometry, icetray.I3Frame.Calibration,
                 icetray.I3Frame.DetectorStatus, icetray.I3Frame.DAQ,
                 icetray.I3Frame.Stream('p'), icetray.I3Frame.Physics
             ])

    if delete_from_queue:
        # only acknowledge receipt (i.e. delete from the queue) if requested
        tray.Add(AcknowledgeReceivedPFrame,
                 "AcknowledgeReceivedPFrame",
                 ReceiverService=receiver_service)

    tray.Execute()
    del tray

    del receiver_service
Exemplo n.º 10
0
def main():
    op = optparse.OptionParser()
    op.add_option(
        '-s',
        '--sequential',
        dest='sequential',
        action='store_true',
        help=
        'Force sequential file API (quick initial load, slower random access, supports compressed files)'
    )
    op.add_option(
        '-b',
        '--browsable',
        dest='browsable',
        action='store_true',
        help=
        'Force browsable file API (slower initial load, fast random access, uncompressed files)'
    )
    op.add_option('-c',
                  '--cachesize',
                  dest='cachesize',
                  type=int,
                  help='Specify a frame cache size (implies sequential API)')
    op.add_option(
        '-l',
        '--libs',
        dest='libs',
        type="string",
        action='callback',
        callback=libs_callback,
        help='Comma-separated list of additional icecube libraries to load')
    op.add_option('--colors',
                  dest='colors',
                  default=term_colors,
                  action='store',
                  type='int',
                  help='Display in (16,256) colors')
    op.add_option('--ascii',
                  dest='ascii',
                  default=False,
                  action='store_true',
                  help='Display in ascii only (unicode disabled)')
    (opts, args) = op.parse_args(sys.argv)

    if len(args) != 2:
        op.error('Need exactly one argument: a file to load')

    if opts.browsable and (opts.sequential or opts.cachesize):
        op.error('Contradictory file loading specifiers')

    if opts.libs is not None:
        print('Importing libraries...')
        for lib in opts.libs:
            __import__('icecube.' + lib)

    print('Loading file...')
    default_sequential_cache = 256
    i3file = args[1]
    stager = dataio.get_stagers()
    handle = stager.GetReadablePath(i3file)
    i3file = str(handle)
    if opts.sequential or opts.cachesize:
        opts.ensure_value('cachesize', default_sequential_cache)
        i3file = fileadaptor.I3SequentialAdaptor(dataio.I3File(i3file),
                                                 opts.cachesize)
    elif opts.browsable:
        ibf = dataio.I3BrowsableFile()
        ibf.open_file(i3file)
        i3file = fileadaptor.I3BrowsableAdaptor(ibf)
    else:
        # file type will be inferred
        i3file = fileadaptor.I3FileAdaptor(
            i3file, sequential_cache_size=default_sequential_cache)
    print('Creating GUI...')
    run_viewer(args[1], i3file, colors=opts.colors, ascii_only=opts.ascii)
Exemplo n.º 11
0
    keys += ['NChannels_1_6', 'InIce_charge_1_6', 'max_qfrac_1_6']
    keys += ['InIce_FractionContainment', 'IceTop_FractionContainment']
    keys += ['Laputop_InIce_FractionContainment',
             'Laputop_IceTop_FractionContainment']
    keys += ['Laputop', 'LaputopParams']
    keys += ['Laputop_fitstatus_ok']

    t0 = time.time()

    # Construct list of non-truncated files to process
    # icetray.set_log_level(icetray.I3LogLevel.LOG_DEBUG)
    good_file_list = []
    for test_file in args.files:
        try:
            test_tray = I3Tray()
            test_tray.context['I3FileStager'] = dataio.get_stagers(
                staging_directory=os.environ['_CONDOR_SCRATCH_DIR'])
            test_tray.Add('I3Reader', FileName=test_file)
            test_tray.Add(uncompress, 'uncompress')
            test_tray.Execute()
            test_tray.Finish()
            good_file_list.append(test_file)
        except:
            print('file {} is truncated'.format(test_file))
            pass
    del test_tray

    tray = I3Tray()
    tray.context['I3FileStager'] = dataio.get_stagers(
        staging_directory=os.environ['_CONDOR_SCRATCH_DIR'])
    # icetray.logging.log_dedug('good_file_list = {}'.format(good_file_list))
    tray.Add('I3Reader', FileNameList=good_file_list)
def process_single_stream(cfg, infile, outfile):
    click.echo('Input: {}'.format(infile))
    hybrid_mode = (cfg['clsim_hybrid_mode']
                   and cfg['icemodel'].lower() != 'spicelea')
    ignore_muon_light = (cfg['clsim_ignore_muon_light']
                         and cfg['clsim_hybrid_mode'])
    click.echo('UseGPUs: {}'.format(cfg['clsim_usegpus']))
    click.echo('IceModel: {}'.format(cfg['icemodel']))
    if not cfg['icemodel_location'] is None:
        click.echo('IceModelLocation: {}'.format(cfg['icemodel_location']))
    click.echo('DomOversize {}'.format(cfg['clsim_dom_oversize']))
    click.echo('UnshadowedFraction: {0:.2f}'.format(
        cfg['clsim_unshadowed_fraction']))
    click.echo('HybridMode: {}'.format(hybrid_mode))
    click.echo('IgnoreMuonLight: {}'.format(ignore_muon_light))
    click.echo('KeepMCPE: {}'.format(cfg['clsim_keep_mcpe']))
    click.echo('Output: {}'.format(outfile))

    tray = I3Tray()
    tray.context['I3FileStager'] = dataio.get_stagers()
    random_services, _ = create_random_services(
        dataset_number=cfg['dataset_number'],
        run_number=cfg['run_number'],
        seed=cfg['seed'],
        n_services=process_single_stream.n_streams)

    random_service = random_services[process_single_stream.i_th_stream]
    tray.context['I3RandomService'] = random_service
    tray.Add('I3Reader', FilenameList=[cfg['gcd'], infile])

    if hybrid_mode:
        cascade_tables = segments.LoadCascadeTables(
            IceModel=cfg['icemodel'], TablePath=cfg['spline_table_dir'])
    else:
        cascade_tables = None

    if cfg['clsim_usegpus']:
        use_gpus = True
        use_cpus = False
    else:
        use_gpus = True
        use_cpus = False

    if 'additional_clsim_params' in cfg:
        additional_clsim_params = cfg['additional_clsim_params']
    else:
        additional_clsim_params = {}

    if not cfg['clsim_input_is_sliced']:
        MCTreeName = "I3MCTree"
        MMCTrackListName = "MMCTrackList"
    else:
        MCTreeName = "I3MCTree_sliced"
        MMCTrackListName = None
    #use_gpus=False
    #use_cpus=True
    tray.AddSegment(segments.PropagatePhotons,
                    "PropagatePhotons",
                    GCDFile=cfg['gcd'],
                    RandomService=random_service,
                    KeepIndividualMaps=cfg['clsim_keep_mcpe'],
                    IceModel=cfg['icemodel'],
                    IceModelLocation=cfg['icemodel_location'],
                    UnshadowedFraction=cfg['clsim_unshadowed_fraction'],
                    IgnoreMuons=ignore_muon_light,
                    HybridMode=hybrid_mode,
                    UseGPUs=use_gpus,
                    UseAllCPUCores=use_cpus,
                    DOMOversizeFactor=cfg['clsim_dom_oversize'],
                    CascadeService=cascade_tables,
                    **additional_clsim_params)

    # tray.AddSegment(clsim.I3CLSimMakeHits, "makeCLSimHits",
    #     GCDFile = cfg['gcd'],
    #     PhotonSeriesName = cfg['photonSeriesName'],
    #     MCTreeName = MCTreeName,
    #     MMCTrackListName = MMCTrackListName,
    #     RandomService = random_service,
    #     MCPESeriesName = cfg['mcpe_series_map'],
    #     UnshadowedFraction = cfg['clsim_unshadowed_fraction'],
    #     UseGPUs = use_gpus,
    #     UseCPUs = use_cpus,
    #     IceModelLocation = os.path.expandvars("$I3_BUILD/ice-models/resources/models/spice_lea"),
    #     )

    outfile = outfile.replace(' ', '0')
    tray.AddModule("I3Writer",
                   "writer",
                   Filename=outfile,
                   Streams=[
                       icetray.I3Frame.DAQ, icetray.I3Frame.Physics,
                       icetray.I3Frame.Stream('S'),
                       icetray.I3Frame.Stream('M')
                   ])

    tray.Execute()
    del tray
def main(cfg, run_number, scratch):
    with open(cfg, 'r') as stream:
        if int(yaml.__version__[0]) < 5:
            # backwards compatibility for yaml versions before version 5
            cfg = yaml.load(stream)
        else:
            cfg = yaml.full_load(stream)
    cfg['run_number'] = run_number
    cfg['run_folder'] = get_run_folder(run_number)
    infile = cfg['infile_pattern'].format(**cfg)
    infile = infile.replace(' ', '0')
    infile = infile.replace('Level0.{}'.format(cfg['previous_step']),
                            'Level0.{}'.format(cfg['previous_step'] % 10))

    if scratch:
        outfile = cfg['scratchfile_pattern'].format(**cfg)
    else:
        outfile = cfg['outfile_pattern'].format(**cfg)
    outfile = outfile.replace('Level0.{}'.format(cfg['step']),
                              'Level0.{}'.format(cfg['step'] % 10))
    outfile = outfile.replace(' ', '0')
    outfile = outfile.replace('2012_pass2', 'pass2')
    print('Outfile != $FINAL_OUT clean up for crashed scripts not possible!')

    tray = I3Tray()

    tray.context['I3FileStager'] = dataio.get_stagers()

    random_services, run_id = create_random_services(
        dataset_number=cfg['dataset_number'],
        run_number=cfg['run_number'],
        seed=cfg['seed'],
        n_services=1)
    random_service = random_services[0]
    tray.context['I3RandomService'] = random_service

    tray.Add('I3Reader', FilenameList=[cfg['gcd_pass2'], infile])

    if run_number < cfg['det_pass2_keep_all_upto']:
        cfg['det_keep_mc_hits'] = True
        cfg['det_keep_propagated_mc_tree'] = True
        cfg['det_keep_mc_pulses'] = True

    tray.AddSegment(segments.DetectorSim,
                    "Detector5Sim",
                    RandomService='I3RandomService',
                    RunID=run_id,
                    GCDFile=cfg['gcd_pass2'],
                    KeepMCHits=cfg['det_keep_mc_hits'],
                    KeepPropagatedMCTree=cfg['det_keep_propagated_mc_tree'],
                    KeepMCPulses=cfg['det_keep_mc_pulses'],
                    SkipNoiseGenerator=cfg['det_skip_noise_generation'],
                    LowMem=cfg['det_low_mem'],
                    InputPESeriesMapName=MCPE_SERIES_MAP,
                    BeaconLaunches=cfg['det_add_beacon_launches'],
                    FilterTrigger=cfg['det_filter_trigger'])
    tray.AddModule("I3Writer",
                   "EventWriter",
                   filename=outfile,
                   Streams=[
                       icetray.I3Frame.DAQ, icetray.I3Frame.Physics,
                       icetray.I3Frame.TrayInfo, icetray.I3Frame.Simulation
                   ])
    tray.AddModule("TrashCan", "the can")
    tray.Execute()
    tray.Finish()
Exemplo n.º 14
0
def producer(eventURL, broker, auth_token, topic, metadata_topic_base, event_name, nside, area_center_nside=None, area_center_pixel=None, area_num_pixels=None, pixel_list=None):
    """
    Handle incoming events and perform a full scan.
    """
    if (area_center_nside is not None or area_center_pixel is not None or area_num_pixels is not None) and \
       (area_center_nside is None or area_center_pixel is None or area_num_pixels is None):
       raise RuntimeError("You have to either set none of the three options area_center_nside,area_center_pixel,area_num_pixels or all of them")

    try:
        # figure out if this is supposed to be JSON or .i3:
        url_file_path = urlparse(eventURL).path
        file_name, file_ext = os.path.splitext(url_file_path)
        if file_ext == '.json':
            file_format = 'json'
        elif file_ext == '.i3':
            file_format = 'i3'
        elif file_ext in ['.zst', '.gz', '.bz2', '.xz']:
            file_name, file_ext2 = os.path.splitext(file_name)
            if file_ext2 == '.i3':
                file_format = 'i3'
            else:
                raise RuntimeError("File format {}.{} is unknown (url={})".format(file_ext2, file_ext, eventURL))
        else:
            raise RuntimeError("File format {} is unknown (url={})".format(file_ext, eventURL))
            
        # load JSON
        if file_format == 'json':
            # get a file stager
            stagers = dataio.get_stagers()

            print('Skymap scanner is starting. Reading event information from JSON blob at `{0}`.'.format(eventURL))

            print("reading JSON blob from {0}".format( eventURL ))
            json_blob_handle = stagers.GetReadablePath( eventURL )
            if not os.path.isfile( str(json_blob_handle) ):
                print("problem reading JSON blob from {0}".format( eventURL ))
                raise RuntimeError("problem reading JSON blob from {0}".format( eventURL ))
            with open( str(json_blob_handle) ) as json_data:
                json_event = json.load(json_data)
            del json_blob_handle

            # extract the JSON message
            print('Event loaded. I am extracting it now...')
            GCDQp_packet = extract_json_message(json_event)
            
            # Note: the online messages to not use pulse cleaning, so we will need to work with
            # "SplitUncleanedInIcePulses" instead of "SplitInIcePulses" as the P-frame pulse map.
            # (Setting `pulsesName` will make sure "SplitInIcePulses" gets created and just points
            # to "SplitUncleanedInIcePulses".)
            pulsesName="SplitUncleanedInIcePulses"
        else: # file_format == 'i3'
            print('Skymap scanner is starting. Reading event information from i3 file at `{0}`.'.format(eventURL))
            GCDQp_packet = extract_i3_file( eventURL )

            pulsesName="SplitInIcePulses"
        
        # rename frame onbjects we might recreate
        GCDQp_packet = clean_old_frame_objects(GCDQp_packet)
        
        # (re-)create the online alert information
        GCDQp_packet = calculate_online_alert_dict(GCDQp_packet, pulsesName=pulsesName)
        
        # This step will create missing frame objects if necessary.
        print('Event extracted. I will now perform some simple tasks like the HESE veto calculation...')
        GCDQp_packet = prepare_frames(GCDQp_packet, pulsesName=pulsesName)
        print('Done.')
        
        
        # get the event id
        event_id = get_event_id(GCDQp_packet)

        # get the event time
        time = get_event_time(GCDQp_packet)

        print("Event `{0}` happened at `{1}`.".format(event_id, str(time)))

        print("Publishing events to   {}".format(topic))
        print("Publishing metadata to {}<...>".format(metadata_topic_base))

        print("Submitting scan...")
        send_scan(
            frame_packet=GCDQp_packet,
            broker=broker, 
            auth_token=auth_token,
            topic=topic,
            metadata_topic_base=metadata_topic_base,
            event_name=event_name,
            nside=nside,
            area_center_nside=area_center_nside,
            area_center_pixel=area_center_pixel,
            area_num_pixels=area_num_pixels,
            pixel_list=pixel_list
            )

        print("All scans for `{0}` are submitted.".format(event_id))
    except:
        exception_message = str(sys.exc_info()[0])+'\n'+str(sys.exc_info()[1])+'\n'+str(sys.exc_info()[2])
        print('Something went wrong while scanning the event (python caught an exception): ```{0}```'.format(exception_message))
        raise # re-raise exceptions
Exemplo n.º 15
0
def extract_i3_file(url, stop_after_first_p_frame=True):
    possible_baseline_gcds = glob.glob(config.base_GCD_path + "/*.i3")

    # get a file stager
    stagers = dataio.get_stagers()

    with TemporaryDirectory() as temp_dir:
        uncompressed_filename = os.path.join(temp_dir, "working.i3")

        blob_handle = stagers.GetReadablePath(url)
        if not os.path.isfile(str(blob_handle)):
            print("problem reading i3 file from {0}".format(url))
            raise RuntimeError("problem reading i3 file from {0}".format(url))

        print("Uncompressing {} to {}".format(str(blob_handle),
                                              uncompressed_filename))

        _, file_ext = os.path.splitext(str(blob_handle))

        if file_ext == '.zst':
            ret = subprocess.call([
                '/usr/bin/zstd', '-d',
                str(blob_handle), '-o', uncompressed_filename
            ])
            if ret != 0:
                raise RuntimeError(
                    "Could not decompress .zst file {}".format(url))
        elif file_ext == '.bz2':
            with open(uncompressed_filename, 'wb') as f:
                ret = subprocess.call(
                    ['/bin/bzip2', '-k', '-d', '--stdout',
                     str(blob_handle)],
                    stdout=f)
            if ret != 0:
                raise RuntimeError(
                    "Could not decompress .bz2 file {}".format(url))
        elif file_ext == '.xz':
            with open(uncompressed_filename, 'wb') as f:
                ret = subprocess.call(
                    ['/usr/bin/xz', '-k', '-d', '--stdout',
                     str(blob_handle)],
                    stdout=f)
            if ret != 0:
                raise RuntimeError(
                    "Could not decompress .xz file {}".format(url))
        elif file_ext == '.gz':
            with open(uncompressed_filename, 'wb') as f:
                ret = subprocess.call(
                    ['/bin/gzip', '-k', '-d', '--stdout',
                     str(blob_handle)],
                    stdout=f)
            if ret != 0:
                raise RuntimeError(
                    "Could not decompress .gz file {}".format(url))
        else:
            shutil.copyfile(str(blob_handle), uncompressed_filename)

        del blob_handle

        print("Reading the original file to judge its size..")
        frame_packet = extract_i3_file_gcd_diff(
            uncompressed_filename,
            baseline_gcd=None,
            stop_after_first_p_frame=stop_after_first_p_frame)
        original_size = 0
        for frame in frame_packet:
            original_size += len(frame.dumps())
        del frame_packet
        print("Done. It is {}MiB in size".format(original_size / 1024 / 1024))

        print(
            "Applying each available GCD diff to this undiffed data to see which one works best..."
        )
        serialized_sizes = {}
        with tqdm(possible_baseline_gcds) as pbar:
            for baseline_gcd in pbar:
                _, baseline_gcd_file = os.path.split(baseline_gcd)
                pbar.set_postfix(GCD_file=baseline_gcd_file)

                frame_packet = extract_i3_file_gcd_diff(
                    uncompressed_filename,
                    baseline_gcd=baseline_gcd_file,
                    stop_after_first_p_frame=stop_after_first_p_frame)

                this_size = 0
                for frame in frame_packet:
                    this_size += len(frame.dumps())

                serialized_sizes[this_size] = (baseline_gcd, frame_packet)

    sizes = sorted(serialized_sizes.keys())
    _, best_baseline_gcd = os.path.split(serialized_sizes[sizes[0]][0])
    best_frame_packet = serialized_sizes[sizes[0]][1]
    del serialized_sizes

    print(
        "Best GCD baseline file for this data is {} and yields a size of {}MiB. The worst one is {} kiB larger."
        .format(best_baseline_gcd, sizes[0] / 1024 / 1024,
                (sizes[-1] - sizes[0]) / 1024))

    return best_frame_packet
def main(cfg, run_number, scratch):
    with open(cfg, 'r') as stream:
        cfg = yaml.full_load(stream)
    cfg['run_number'] = run_number
    cfg['run_folder'] = get_run_folder(run_number)
    infile = cfg['infile_pattern'].format(**cfg)
    infile = infile.replace(' ', '0')
    infile = infile.replace('Level0.{}'.format(cfg['previous_step']),
                            'Level0.{}'.format(cfg['previous_step'] % 10))

    if scratch:
        outfile = cfg['scratchfile_pattern'].format(**cfg)
    else:
        outfile = cfg['outfile_pattern'].format(**cfg)
    outfile = outfile.replace('Level0.{}'.format(cfg['step']),
                              'Level0.{}'.format(cfg['step'] % 10))
    outfile = outfile.replace(' ', '0')
    outfile = outfile.replace('2012_pass2', 'pass2')
    print('Outfile != $FINAL_OUT clean up for crashed scripts not possible!')

    tray = I3Tray()

    tray.context['I3FileStager'] = dataio.get_stagers()

    random_services, run_id = create_random_services(
        dataset_number=cfg['dataset_number'],
        run_number=cfg['run_number'],
        seed=cfg['seed'],
        n_services=1,
        use_gslrng=cfg['random_service_use_gslrng'])
    random_service = random_services[0]
    tray.context['I3RandomService'] = random_service

    tray.Add('I3Reader', FilenameList=[cfg['gcd_pass2'], infile])

    """
    Perform Detector simulation:
        https://code.icecube.wisc.edu/projects/icecube/browser/IceCube/
        meta-projects/combo/stable/simprod-scripts/python/segments/
        DetectorSim.py
    """

    # Combine MCPEs from both detectors
    if cfg['det_is_genie_simulation']:
        tray.Add("Rename", Keys=[MCPE_SERIES_MAP, 'GenieMCPEs'])
        tray.Add("I3CombineMCPE",
                 InputResponses=["GenieMCPEs", "BackgroundMCPEs"],
                 OutputResponse=MCPE_SERIES_MAP)
        tray.Add("Delete", Keys=['BackgroundMCPEs', 'GenieMCPEs'])
    if cfg['det_is_icetop_simulation']:
        tray.Add("Rename", Keys=[MCPE_SERIES_MAP, 'InIceMCPEs'])
        tray.Add("I3CombineMCPE",
                 InputResponses=["IceTopMCPEs", "InIceMCPEs"],
                 OutputResponse=MCPE_SERIES_MAP)
        tray.Add("Delete", Keys=['InIceMCPEs', 'IceTopMCPEs'])

    # Sample a different efficiency
    sample_eff = cfg['det_dom_eff_resmapling_sample_efficiency']
    generated_eff = cfg['det_dom_eff_resmapling_generated_efficiency']
    if sample_eff > 0.0:
        if sample_eff > generated_eff:
            msg = 'Cannot upscale from GeneratedEfficiency %s to '
            msg += 'SampleEfficiency %s'
            icecube.icetray.logging.log_fatal(
               msg % (sample_eff, generated_eff))
        tray.AddSegment(segments.MultiDomEffSample, "resample",
                        GeneratedEfficiency=generated_eff,
                        SampleEfficiencies=[sample_eff],
                        InputSeriesName=MCPE_SERIES_MAP,
                        DeleteOriginalSeries=True,
                        OverwriteOriginalSeries=True)

    if run_number < cfg['det_keep_all_upto']:
        cfg['det_keep_mc_hits'] = True
        cfg['det_keep_propagated_mc_tree'] = True
        cfg['det_keep_mc_pulses'] = True

    tray.AddSegment(segments.DetectorSim, "DetectorSim",
                    RandomService='I3RandomService',
                    RunID=run_id,
                    GCDFile=cfg['gcd_pass2'],
                    KeepMCHits=cfg['det_keep_mc_hits'],
                    KeepPropagatedMCTree=cfg['det_keep_propagated_mc_tree'],
                    KeepMCPulses=cfg['det_keep_mc_pulses'],
                    SkipNoiseGenerator=cfg['det_skip_noise_generation'],
                    LowMem=cfg['det_low_mem'],
                    InputPESeriesMapName=MCPE_SERIES_MAP,
                    BeaconLaunches=cfg['det_add_beacon_launches'],
                    FilterTrigger=cfg['det_filter_trigger'],
                    TimeShiftSkipKeys=[
                        "SnowstormParameterRanges",
                        "SnowstormParameters",
                        "SnowstormParametrizations",
                        "SnowstormProposalDistribution",
                        "WavelengthAcceptance",
                        "WavelengthGenerationBias",
                        "LeptonInjectorProperties",
                        "EventProperties",
                        "MediumProperties",
                    ],
                    )

    if not cfg['det_keep_mc_pulses']:
        tray.Add("Delete", Keys=['I3MCPulseSeriesMapPrimaryIDMap'])

    if not cfg['det_keep_mc_hits']:
        tray.Add("Delete", Keys=['I3MCPESeriesMapParticleIDMap'])

    if cfg['det_remove_keys_from_m_frame']:
        tray.Add("Delete", Keys=cfg['det_remove_keys_from_m_frame'])

    if cfg['det_convert_to_linear_tree']:
        tray.AddModule(segments.ConvertToLinearizedMCTree,
                       "lineartree", streams=[icetray.I3Frame.DAQ])

    tray.AddModule("I3Writer", "EventWriter",
                   filename=outfile,
                   Streams=[icetray.I3Frame.DAQ,
                            icetray.I3Frame.Physics,
                            icetray.I3Frame.TrayInfo,
                            icetray.I3Frame.Simulation,
                            icetray.I3Frame.Stream('m'),
                            icetray.I3Frame.Stream('M')])
    tray.AddModule("TrashCan", "the can")
    tray.Execute()
    tray.Finish()