def gulliver_commons(tray, params, photonics=True, photosplines=True): """ Common infrastructure needed for all Gulliver reconstructions: Random number services and photonics. """ load("libphotonics-service", False) load("libphys-services", False) if photonics: tray.AddService('I3GSLRandomServiceFactory', 'I3RandomService') tray.AddService( 'I3PhotonicsServiceFactory', 'cscd_table_photonics', PhotonicsTopLevelDirectory=params.PhotonicsTabledirCscd, DriverFileDirectory=params.PhotonicsDriverdirCscd, PhotonicsLevel2DriverFile=params.PhotonicsDriverfileCscd, PhotonicsTableSelection=2, ServiceName='CascadeTablePhotonicsService') if photosplines: tray.AddService( 'I3PhotoSplineServiceFactory', 'cscd_spline_photonics', AmplitudeTable=params.PhotoSplineAmplitudeTableCscd, TimingTable=params.PhotoSplineTimingTableCscd, TimingSigma=0.0, # No convolution for now. ServiceName='CascadeSplinePhotonicsService') tray.AddService( 'I3PhotoSplineServiceFactory', 'muon_spline_photonics', AmplitudeTable=params.PhotoSplineAmplitudeTableMuon, TimingTable=params.PhotoSplineTimingTableMuon, TimingSigma=0.0, # No convolution for now. ServiceName='MuonSplinePhotonicsService')
def gulliver_commons(tray, params, photonics=True, photosplines=True): """ Common infrastructure needed for all Gulliver reconstructions: Random number services and photonics. """ load("libphotonics-service", False) load("libphys-services", False) if photonics: tray.AddService('I3GSLRandomServiceFactory', 'I3RandomService') tray.AddService('I3PhotonicsServiceFactory','cscd_table_photonics', PhotonicsTopLevelDirectory=params.PhotonicsTabledirCscd, DriverFileDirectory=params.PhotonicsDriverdirCscd, PhotonicsLevel2DriverFile=params.PhotonicsDriverfileCscd, PhotonicsTableSelection=2, ServiceName='CascadeTablePhotonicsService') if photosplines: tray.AddService('I3PhotoSplineServiceFactory','cscd_spline_photonics', AmplitudeTable=params.PhotoSplineAmplitudeTableCscd, TimingTable=params.PhotoSplineTimingTableCscd, TimingSigma=0.0, # No convolution for now. ServiceName='CascadeSplinePhotonicsService') tray.AddService('I3PhotoSplineServiceFactory','muon_spline_photonics', AmplitudeTable=params.PhotoSplineAmplitudeTableMuon, TimingTable=params.PhotoSplineTimingTableMuon, TimingSigma=0.0, # No convolution for now. ServiceName='MuonSplinePhotonicsService')
def CascadeLlhVertexFit(tray, name, CascadeLlh, Pulses='TWNFEMergedPulses', If=lambda frame: True): """ Run CscdLlhVertexFit, seeded with CLast. """ icetray.load('clast', False) icetray.load('cscd-llh', False) # Settings from std-processing/releases/11-02-00/scripts/IC79/level2_DoCascadeReco.py CscdLlhVertexFitter = icetray.module_altconfig('I3CscdLlhModule', InputType='RecoPulse', MinHits=5, Minimizer='Powell', PDF='UPandel', ParamT='1.0, 0.0, 0.0, false', ParamX='1.0, 0.0, 0.0, false', ParamY='1.0, 0.0, 0.0, false', ParamZ='1.0, 0.0, 0.0, false', ) seed = name + '_CLastSeed' tray.AddModule('I3CLastModule', name+'Clast', Name=seed, InputReadout=Pulses, If=If, MinHits=5) if CascadeLlh: tray.AddSegment(CscdLlhVertexFitter, name+'CscdLlh', RecoSeries=Pulses, SeedKey=seed, ResultName=name+"_CscdLlh", If=If ) return [seed, seed+'Params', name, name+'Params']
def CosmicRayFilter(tray, name, Pulses=filter_globals.CleanedHLCTankPulses, If=lambda f: True): from icecube.filterscripts import filter_globals icetray.load('filterscripts', False) #Perform CR filtering on the IceTopSplit tray.AddModule('I3FilterModule<I3CosmicRayFilter_13>', name + '_CosmicRayFilter', TriggerEvalList=[ filter_globals.inicesmttriggered, filter_globals.icetopsmttriggered ], DecisionName=filter_globals.CosmicRayFilter, TriggerKey=filter_globals.triggerhierarchy, IceTopPulseMaskKey=Pulses, If=If)
def load(libname): """ load the library (via dlopen) into the running tray. This is primarily used for libraries that don't have python bindings (eventually all libraries should have at least stub python bindings, making them loadable via the standard python *import*, and this sould be obsolete. :param filename: should be the name of the file to load including the leading ``lib``, but *not* including the trailing ``.so`` or ``.dylib``, eg:: load("libdataio") load("libexamples") """ try : icetray.load(libname) except: icetray.logging.log_fatal("Failed to load library (%s): %s" \ % (sys.exc_info()[0], sys.exc_info()[1]), "I3Tray")
def load(libname): """ load the library (via dlopen) into the running tray. This is primarily used for libraries that don't have python bindings (eventually all libraries should have at least stub python bindings, making them loadable via the standard python *import*, and this sould be obsolete. :param filename: should be the name of the file to load including the leading ``lib``, but *not* including the trailing ``.so`` or ``.dylib``, eg:: load("libdataio") load("libexamples") """ try: icetray.load(libname) except: icetray.logging.log_fatal("Failed to load library (%s): %s" \ % (sys.exc_info()[0], sys.exc_info()[1]), "I3Tray")
from icecube import icetray icetray.load("PulseCore", False)
print("output dir is %s" % outdir) print("output file is %s" % outdir + outfile) ######################## from I3Tray import * from os.path import expandvars import os import sys from icecube import icetray, dataclasses, dataio, phys_services from icecube.sim_services import bad_dom_list_static from icecube import WaveCalibrator #from icecube import DomTools icetray.load("DomTools", False) from icecube import wavedeform tray = I3Tray() tray.AddModule("I3Reader", "reader", Filename=infile) if options.DYNAMICBADDOMS: dbserver = "dbs2.icecube.wisc.edu" username = "******" from icecube.sim_services.bad_dom_list_static import IC86_static_bad_dom_list, IC86_static_bad_dom_list_HLC xmlfile = os.path.expandvars( '$I3_SRC') + '/BadDomList/resources/scripts/QueryConfiguration.xml' staticBadDomListOnly = False
from icecube import icetray icetray.load('double-muon', False)
def main(): #arguement parser parser = argparse.ArgumentParser( description='Generates muons in IceCube with varying multiplicity') parser.add_argument('--nseed', default=1, type=int, help='seed for randomization') #muongun args parser.add_argument('--model', default='GaisserH4a_atmod12_SIBYLL', type=str) parser.add_argument('--multiplicity', default=1000, type=int, help='Maximum muon bundle multiplcity') parser.add_argument('--emin', default=1e1, type=float, help='Muon min energy (GeV)') parser.add_argument('--emax', default=1e6, type=float, help='Muon max energy (GeV)') parser.add_argument('--nevents', default=100, type=int, help='Number of events') parser.add_argument('--out', default='muongun.i3.gz', help='Output file') parser.add_argument('--runnum', default=1, type=int, help='Run number for this sim') #detector args parser.add_argument( '--gcd', default=os.path.expandvars( '$I3_TESTDATA/sim/GeoCalibDetectorStatus_IC86.55697_corrected_V2.i3.gz' ), type=str, help='gcd file') parser.add_argument( '--no-hybrid', action="store_false", default=False, dest='hybrid', help='do not perform a hybrid simulation (i.e. use clsim only)') parser.add_argument('--use-gpu', action='store_true', default=False, help='simulate using GPUs instead of CPU cores') args = parser.parse_args() #setup muongun parameters gcdFile = args.gcd model = load_model(args.model) model.flux.max_multiplicity = args.multiplicity surface = Cylinder(1600 * I3Units.m, 800 * I3Units.m, dataclasses.I3Position(0, 0, 0)) surface_det = MuonGun.ExtrudedPolygon.from_file(gcdFile) spectrum = OffsetPowerLaw(2, 0 * I3Units.TeV, args.emin, args.emax) #spectrum = OffsetPowerLaw(2.65, 0*I3Units.TeV, args.emin, args.emax) generator = StaticSurfaceInjector(surface, model.flux, spectrum, model.radius) #setup reconstruction parameters icetray.load('VHESelfVeto') pulses = 'InIcePulses' HLCpulses = 'HLCInIcePulses' #setup I3Tray tray = I3Tray() tray.context['I3RandomService'] = phys_services.I3GSLRandomService( seed=args.nseed) #generate events tray.AddSegment(GenerateBundles, 'MuonGenerator', Generator=generator, NEvents=args.nevents, GCDFile=gcdFile) #propagate particles tray.Add(segments.PropagateMuons, 'PropagateMuons', RandomService='I3RandomService', InputMCTreeName="I3MCTree", OutputMCTreeName="I3MCTree") tray.Add(segments.PropagatePhotons, 'PropagatePhotons', RandomService='I3RandomService', HybridMode=args.hybrid, MaxParallelEvents=100, UseAllCPUCores=True, UseGPUs=args.use_gpu) #detector stuff tray.Add(DetectorSim, "DetectorSim", RandomService='I3RandomService', RunID=args.runnum, KeepPropagatedMCTree=True, KeepMCHits=True, KeepMCPulses=True, SkipNoiseGenerator=True, GCDFile=gcdFile, InputPESeriesMapName="I3MCPESeriesMap") #tray.Add(header, streams=[icetray.I3Frame.DAQ]) #tray.Add("I3NullSplitter") #clean the pulses tray.AddModule('I3MuonGun::WeightCalculatorModule', 'Weight', Model=model, Generator=generator) tray.AddModule('I3LCPulseCleaning', 'cleaning', OutputHLC=HLCpulses, OutputSLC='', Input=pulses) #now do the veto from icecube.filterscripts import filter_globals icetray.load("filterscripts", False) icetray.load("cscd-llh", False) tray.Add(find_primary) tray.Add(todet, surface=surface_det) tray.AddModule('HomogenizedQTot', 'qtot_totalDirty', Pulses=pulses, Output='HomogenizedQTotDirty', If=lambda frame: frame.Has('EnteringMuon_0')) tray.AddModule("VHESelfVeto", 'selfveto_3Dirty', VetoThreshold=3, VertexThreshold=3, pulses=pulses, OutputBool="VHESelfVeto_3Dirty", OutputVertexPos="VHESelfVetoVertexPos_3Dirty", OutputVertexTime="VHESelfVetoVertexTime_3Dirty", If=lambda frame: frame.Has('EnteringMuon_0')) tray.AddModule('HomogenizedQTot', 'qtot_totalClean', Pulses=HLCpulses, Output='HomogenizedQTotClean', If=lambda frame: frame.Has('EnteringMuon_0')) tray.AddModule("VHESelfVeto", 'selfveto_3Clean', VetoThreshold=3, VertexThreshold=3, pulses=HLCpulses, OutputBool="VHESelfVeto_3Clean", OutputVertexPos="VHESelfVetoVertexPos_3Clean", OutputVertexTime="VHESelfVetoVertexTime_3Clean", If=lambda frame: frame.Has('EnteringMuon_0')) #tray.Add(printer, If = lambda frame:frame.Has('EnteringMuon_0')) #tray.Add(print_gen, generator=generator) #write everything to file tray.AddModule( 'I3Writer', 'writer', Streams=[ icetray.I3Frame.Physics, #icetray.I3Frame.DAQ ], filename=args.out) tray.Execute() tray.Finish()
parser.add_option("-n", "--numevents", type = "int", action = "store", default = -1, help = "Number of events to process (default: all)", ) # get parsed args (options,args) = parser.parse_args() tray = I3Tray() icetray.load('libDomTools') icetray.load('libmcsummary') icetray.load('libneutrinoflux') icetray.load('libatmo-weights') icetray.load('libtrigger-splitter') icetray.load('libjeb-filter-2012') if len(options.GCD) > 1: runfiles = [options.GCD,options.INPUT] else: runfiles = [options.INPUT] writefile = options.OUTPUT #outfile = runfiles[1].replace('.i3','.AtmoWeightCorrect.hdf5') #tabler = hdfwriter.I3HDFTableService(outfile,1)
from icecube import icetray import os icetray.load('filter-tools', False) from icecube.filter_tools.FilterMaskMaker import FilterMaskMaker from icecube.filter_tools.PnfResplitter import PnfResplitter from icecube.filter_tools.PnfResplitter import AnalysisClientRehydrate
# the L3 cuts with the option to filter. ######################################################### from optparse import OptionParser from os.path import expandvars import time from I3Tray import * from icecube import icetray, dataclasses, dataio from icecube import tensor_of_inertia, mcsummary, improvedLinefit,SeededRTCleaning from icecube import tableio,hdfwriter from icecube.DeepCore_L3_2012 import level3_segments from icecube.std_processing import level2_globals,level2_HitCleaning,level2_DeepCoreReco icetray.load("DomTools",False) #---------------------------------------------------------------------- # Allows the parser to take lists as inputs. Cleans out any white # spaces that may be included in the names of the Official or # Personal scripts to be run. Also removes the .py which is required # to load a python module. def list_callback(option, opt, value, parser): noWhiteSpace = value.replace(' ', '') noPyAppend = noWhiteSpace.replace('.py', '') cleanList = noPyAppend.split(',') setattr(parser.values, option.dest, cleanList) def string_callback(option, opt, value, parser): lower_case = value.lower() no_hyphens = lower_case.replace('-', '')
def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: cfg = yaml.load(stream) cfg['run_number'] = run_number cfg['run_folder'] = get_run_folder(run_number) click.echo('Keep all OnlineL2: {}'.format(cfg['OnlineL2_keep_all_L2'])) click.echo('Keep time residuals: {}'.format( cfg['OnlineL2_keep_time_residuals'])) infile = cfg['infile_pattern'].format(**cfg) infile = infile.replace(' ', '0') infile = infile.replace('Level0.{}'.format(cfg['previous_step']), 'Level2') infile = infile.replace('Level0.{}'.format(cfg['previous_step']), 'Level0.{}'.format(cfg['previous_step'] % 10)) infile = infile.replace('2012_pass2', '2012') if scratch: outfile = cfg['scratchfile_pattern'].format(**cfg) else: outfile = cfg['outfile_pattern'].format(**cfg) outfile = outfile.replace('Level0.{}'.format(cfg['step']), '2017OnlineL2') outfile = outfile.replace(' ', '0') outfile = outfile.replace('2012_pass2', '2012') print('Outfile != $FINAL_OUT clean up for crashed scripts not possible!') # build tray tray = I3Tray() tray.context['I3FileStager'] = dataio.get_stagers() tray.Add('I3Reader', FilenameList=[cfg['gcd_pass2'], infile], SkipKeys=['I3MCTree'] if 'corsika' in infile.lower() else []) # drop exisiting P-Frames (will do our own splitting later) tray.Add(lambda f: False, Streams=[icetray.I3Frame.Physics]) ############################################################################ # the following modules repeat what is done in the base processing at Pole # ############################################################################ # resplit Q frame icetray.load('trigger-splitter', False) tray.AddModule('I3TriggerSplitter', filter_globals.InIceSplitter, TrigHierName='DSTTriggers', TriggerConfigIDs=[ filter_globals.deepcoreconfigid, filter_globals.inicesmtconfigid, filter_globals.inicestringconfigid, filter_globals.volumetriggerconfigid ], SubEventStreamName=filter_globals.InIceSplitter, InputResponses=['InIceDSTPulses'], OutputResponses=[filter_globals.SplitUncleanedInIcePulses], WriteTimeWindow=True) # evaluate TriggerHierarchy tray.AddModule( "TriggerCheck_13", "BaseProc_Trigchecker", I3TriggerHierarchy=filter_globals.triggerhierarchy, InIceSMTFlag=filter_globals.inicesmttriggered, IceTopSMTFlag=filter_globals.icetopsmttriggered, InIceStringFlag=filter_globals.inicestringtriggered, DeepCoreSMTFlag=filter_globals.deepcoresmttriggered, DeepCoreSMTConfigID=filter_globals.deepcoreconfigid, VolumeTriggerFlag=filter_globals.volumetrigtriggered, SlowParticleFlag=filter_globals.slowparticletriggered, FixedRateTriggerFlag=filter_globals.fixedratetriggered, ) # run SRT and TW Cleaning from the Base Processing from icecube.STTools.seededRT.configuration_services import I3DOMLinkSeededRTConfigurationService seededRTConfig = I3DOMLinkSeededRTConfigurationService( ic_ic_RTRadius=150.0 * I3Units.m, ic_ic_RTTime=1000.0 * I3Units.ns, treat_string_36_as_deepcore=False, useDustlayerCorrection=False, allowSelfCoincidence=True) tray.AddModule( 'I3SeededRTCleaning_RecoPulseMask_Module', 'BaseProc_seededrt', InputHitSeriesMapName=filter_globals.SplitUncleanedInIcePulses, OutputHitSeriesMapName=filter_globals.SplitRTCleanedInIcePulses, STConfigService=seededRTConfig, SeedProcedure='HLCCoreHits', NHitsThreshold=2, MaxNIterations=3, Streams=[icetray.I3Frame.Physics], If=which_split(split_name=filter_globals.InIceSplitter)) tray.AddModule("I3TimeWindowCleaning<I3RecoPulse>", "TimeWindowCleaning", InputResponse=filter_globals.SplitRTCleanedInIcePulses, OutputResponse=filter_globals.CleanedMuonPulses, TimeWindow=6000 * I3Units.ns, If=which_split(split_name=filter_globals.InIceSplitter)) tray.AddSegment(linefit.simple, "BaseProc_imprv_LF", inputResponse=filter_globals.CleanedMuonPulses, fitName=filter_globals.muon_linefit, If=which_split(split_name=filter_globals.InIceSplitter)) # Muon LLH SimpleFitter from GulliverSuite with LineFit seed. tray.AddSegment(lilliput.segments.I3SinglePandelFitter, filter_globals.muon_llhfit, seeds=[filter_globals.muon_linefit], pulses=filter_globals.CleanedMuonPulses, If=which_split(split_name=filter_globals.InIceSplitter)) # run MuonFilter tray.Add(MuonFilter, 'MuonFilter', pulses=filter_globals.CleanedMuonPulses, If=which_split(split_name=filter_globals.InIceSplitter)) tray.AddModule( "I3FirstPulsifier", "BaseProc_first-pulsify", InputPulseSeriesMapName=filter_globals.CleanedMuonPulses, OutputPulseSeriesMapName='FirstPulseMuonPulses', KeepOnlyFirstCharge=False, # default UseMask=False, # default If=which_split(split_name=filter_globals.InIceSplitter)) # discard events not passing the MuonFilter tray.Add(lambda f: f.Has(filter_globals.MuonFilter) and f[ filter_globals.MuonFilter].value) # run OnlineL2 filter tray.Add(TimerStart, timerName='OnlineL2', If=which_split(split_name=filter_globals.InIceSplitter)) tray.AddSegment(OnlineL2Filter, "OnlineL2", If=which_split(split_name=filter_globals.InIceSplitter)) tray.Add(TimerStop, timerName='OnlineL2') # discard events not passing the OnlineL2 filter tray.Add(lambda f: f.Has(filter_globals.OnlineL2Filter) and f[ filter_globals.OnlineL2Filter].value) # run GFU filter tray.Add(TimerStart, timerName='GFU') tray.AddSegment(GammaFollowUp, "GFU", OnlineL2SegmentName="OnlineL2", KeepDetails=cfg['OnlineL2_keep_time_residuals'], angular_error=True) tray.Add(TimerStop, timerName='GFU') # discard events not passing the GFU filter if not cfg['OnlineL2_keep_all_L2']: tray.Add(lambda f: f.Has(filter_globals.GFUFilter) and f[ filter_globals.GFUFilter].value) # in this case, also run splineMPE with maximum settings for comparison TEestis = [ 'OnlineL2_SplineMPE_TruncatedEnergy_AllDOMS_Muon', 'OnlineL2_SplineMPE_TruncatedEnergy_DOMS_Muon', 'OnlineL2_SplineMPE_TruncatedEnergy_AllBINS_Muon', 'OnlineL2_SplineMPE_TruncatedEnergy_BINS_Muon', 'OnlineL2_SplineMPE_TruncatedEnergy_ORIG_Muon' ] tray.Add(CustomSplineMPE, 'SplineMPEmax', configuration='max', pulses='OnlineL2_CleanedMuonPulses', trackSeeds=['OnlineL2_SplineMPE'], enEstis=TEestis, paraboloid=True) # For MC weighting, keep the neutrino primary. if 'corsika' not in infile.lower(): # Some CORSIKA files have I3MCTree objects much larger than 100 MB. # Loading them takes too long... instead use CorsikaWeightMap.PrimaryEnergy / PrimaryType for weighting. tray.AddModule(get_weighted_primary, 'get_weighted_primary', MCPrimary='I3MCPrimary') # For MC studies, store information about the muon from CC interaction if 'neutrino-generator' in infile.lower(): # store muon intersection points tray.Add(AddMuon) tray.Add(AddMuonIntersection) # store deposited energy in detector tray.Add(AddDepositedEnergy) tray.AddModule("I3Writer", "EventWriter", filename=outfile, Streams=[ icetray.I3Frame.DAQ, icetray.I3Frame.Physics, icetray.I3Frame.TrayInfo, icetray.I3Frame.Simulation ], DropOrphanStreams=[icetray.I3Frame.DAQ]) tray.AddModule("TrashCan", "the can") tray.Execute() del tray
#!/usr/bin/env python import os from I3Tray import * from os.path import * import sys from icecube import tableio from icecube.tableio import I3TableWriter from icecube import icetray, dataclasses, dataio from icecube import WaveCalibrator from icecube.icetray import I3Units from icecube import STTools icetray.load("wavedeform", False) @icetray.traysegment def CalibrationAndCleaning(tray, name): class check_raw_data(icetray.I3ConditionalModule): def __init__(self, ctx): icetray.I3ConditionalModule.__init__(self, ctx) def Configure(self): pass def DAQ(self, frame): if frame.Has("InIceRawData"): self.PushFrame(frame) else: return
elif kind == "Standard": flux_components = FiveComponent(nevents=nevents, emin=6e2, emax=1e11) return flux_components # Set up fluxes to weight to target_fluxes = dict() for k in "GaisserH3a", "GaisserH4a", "Hoerandel5": target_fluxes[k] = getattr(fluxes, k)() for k in "CascadeOptimized", "Standard": target_fluxes[k + "5Comp"] = make_pseudoflux(k) tray = I3Tray() from os.path import expandvars icetray.load('ucr-icetray') ucr_opts = expandvars('$I3_BUILD/bin/ucr-icetray-ucr ') for fn in infiles: ucr_opts += fn + ' ' ucr_opts += ("-DEPTH=1950 -LENGTH=1600 -RADIUS=800 -over=%d" % (opts.oversample)) print('ucr_opts: "%s"' % ucr_opts) tray.AddModule('I3InfiniteSource', 'driver') tray.AddModule('I3GeneratorUCR', 'reader', EventsToIssue=int(1e9), UCROpts=ucr_opts) from icecube.MuonGun import MuonPropagator, Crust, Sphere import random
from icecube.icetray import load load('StartingTrackVetoLE', False) del load
import icecube.recclasses # import dependencies from icecube.icetray import load load("libCascadeVariables", False)
from icecube import icetray, dataclasses from icecube import linefit, dipolefit, clast, cscd_llh, fill_ratio, tensor_of_inertia, CascadeVariables from icecube.icetray import I3Units icetray.load('double-muon', False) #! This has the pulse map splitter module ''' NB: This is basically just a reduced version of the OfflineCascadeReco Because I don't need all of it... https://github.com/icecube/icetray/blob/main/filterscripts/python/offlineL2/level2_Reconstruction_Cascade.py ''' @icetray.traysegment def OfflineCascadeReco( tray, name, If=lambda f: True, suffix='', SRTPulses='', Pulses='', TopoPulses='', CascadeLineFit='CascadeLineFit', CascadeDipoleFit='CascadeDipoleFit', CascadeLast='CascadeLast', CascadeLlhVertexFit='CascadeLlhVertexFit', CascadeLlhVertexFitSplit='CascadeLlhVertexFitSplit', BadDOMListName='BadDomsList', CascadeFillRatio='CascadeFillRatio', CascadeSplitPulses='CascadeSplitPulses', CascadeLineFitSplit='CascadeLineFitSplit', CascadeToISplit='CascadeToISplit',
# processing, although the hit series used can be deleted after use # if writePulses is set to False #----------------------------------------------------------------- tray.AddSegment(NoiseEngine.WithCleaners,"example", HitSeriesName = "OfflinePulses", OutputName = "NoiseEngine_bool", writePulses = True) #----------------------------------------------------------------- #Run as separate modules # This illustrates the hit cleaning used by NoiseEngine's tray # segment. The static time window cleaning runs first to remove # spurious noise hits from the leading and trailing edges of the # frame. #----------------------------------------------------------------- icetray.load("libstatic-twc") tray.AddModule( "I3StaticTWC<I3RecoPulseSeries>", "StaticTWC", InputResponse = "OfflinePulses", OutputResponse = "OfflinePulses_StaticTWC", WindowMinus = 3000.0, WindowPlus = 2000.0, TriggerName = "I3TriggerHierarchy") #----------------------------------------------------------------- # Next, the RT hit cleaning module runs in an effort to focus # the NoiseEngine algorithm on those hits most likely to be due # to physics in the detector. #----------------------------------------------------------------- seededRTConfigService_nodust = I3DOMLinkSeededRTConfigurationService( useDustlayerCorrection = False, ic_ic_RTTime = 750*I3Units.ns,
from icecube import icetray icetray.load("daq-decode")
from icecube.icetray import load load('bayesian-priors', False) del load
def GenerateAtmosphericNeutrinos(tray, name, Files, GCDFile="", AutoExtendMuonVolume=False, EnergyBiasPower=1, FlavorBias=[30, 1, 1], CylinderRadius=600 * I3Units.m, CylinderHeight=1200 * I3Units.m, CrossSections='csms', NEvents=-1, MakeNeutrino=True): r""" Read CORSIKA showers containing neutrinos, and force exactly one neutrino to interact. NB: this segment is deprecated. Use GenerateAirShowers, SelectNeutrino, and PropagateMuons instead. :param Files: a list of CORSIKA files to read :param GCDFile: I3 file with GCD information to read in before the CORSIKA files :param AutoExtendMuonVolume: allow :math:`\nu_{\mu}` to interact far before they reach the detector :param EnergyBiasPower: select a neutrino from the bundle with probability proportional to E^power :param FlavorBias: scale selection probability for :math:`\nu_{e}/\nu_{\mu}/\nu_{\tau}` by these factors. The default value is appropriate for equal sampling of conventional atmospheric :math:`\nu_{e}/\nu_{\mu}`. :param CylinderRadius: radius of upright-cylinder target volume :param CylinderHeight: full height of simulation volume :param CrossSections: cross-section tables to use ('cteq5', 'css', or 'csms') """ import warnings warnings.warn( 'GenerateAtmosphericNeutrinos is deprecated. Use GenerateAirShowers, SelectNeutrino, and PropagateMuons instead' ) from operator import add from icecube import neutrino_generator, sim_services, MuonGun from icecube.sim_services.propagation import get_propagators icetray.load('corsika-reader', False) random = tray.context['I3RandomService'] surface = MuonGun.Cylinder(CylinderHeight, CylinderRadius) tray.Add('I3CORSIKAReader', 'reader', filenamelist=Files, NEvents=NEvents, CylinderHeight=surface.length, CylinderRadius=surface.radius, Prefix=GCDFile) # Drop showers where no particles reach the observation level tray.Add(lambda frame: len(frame['I3MCTree']) > 1, Streams=[icetray.I3Frame.DAQ]) # Remove low-energy muons that can't reach the detector tray.Add('I3InIceCORSIKATrimmer') tray.Add(SelectNeutrino, AutoExtendMuonVolume=AutoExtendMuonVolume, EnergyBiasPower=EnergyBiasPower, FlavorBias=FlavorBias, CylinderRadius=CylinderRadius, CylinderHeight=CylinderHeight, CrossSections=CrossSections) base_propagators = get_propagators() propagators = tray.context['I3ParticleTypePropagatorServiceMap'] for k in base_propagators.keys(): propagators[k] = base_propagators[k] tray.Add('Rename', Keys=['I3MCTree', 'I3MCTree_preMuonProp']) tray.Add('I3PropagatorModule', PropagatorServices=propagators, InputMCTreeName="I3MCTree_preMuonProp", OutputMCTreeName="I3MCTree", RandomService='I3RandomService')
from icecube.load_pybindings import load_pybindings from icecube import icetray import os load_pybindings(__name__, __path__) icetray.load("libDOMLauncher", False) from icecube.DOMLauncher.domlauncher import DetectorResponse from icecube.DOMLauncher.domlauncher import launch_splitter
) parser.add_option("-t", "--tableoutput", action="store_true", default=False, dest="hdfout", help="Generate hdf5 file?") parser.add_option( "--filter", action="store_true", default=False, dest="filter", help="If true, only events passing specified branch will be written.", ) (options, args) = parser.parse_args() tray = I3Tray() icetray.load("libneutrinoflux") icetray.load("libatmo-weights") icetray.load("libDomTools") ### For BS processing ### inputfile = options.inputfile outputfile = "/data/user/daughjd/data/systematic_sets/10562/" + inputfile[76:-6] + "L4Out.i3" # outputfile = options.outputfile gcd = options.gcdfile if inputfile == outputfile: print "Input same as Output Name!!! Exiting..." exit() # gcdlist = pickle.load(open('/nv/hp11/jdaughhetee3/runners/burn_sample_process/gcdlist.pkl','r'))
from icecube.icetray import load load('TopologicalSplitter', False) del load
type = "int", action = "store", default = 1, help = "Process which filter branches (1=Both,2=StdDCFilter,3=ExpFidFilter)", ) parser.add_option("-t","--tableoutput", action="store_true", default=False, dest="hdfout", help="Generate hdf5 file?") parser.add_option("--filter", action="store_true", default=False, dest='filter', help="If true, only events passing specified branch will be written.") (options, args) = parser.parse_args() tray = I3Tray() icetray.load('libneutrinoflux') icetray.load('libatmo-weights') icetray.load('libDomTools') ### For BS processing ### inputfile = options.inputfile outputfile = '/data/user/daughjd/data/systematic_sets/10561/'+inputfile[76:-6]+'L4Out.i3' #outputfile = options.outputfile gcd = options.gcdfile if inputfile == outputfile: print "Input same as Output Name!!! Exiting..." exit() #gcdlist = pickle.load(open('/nv/hp11/jdaughhetee3/runners/burn_sample_process/gcdlist.pkl','r'))
def DetectorSim(tray, name, RandomService=None, RunID=None, GCDFile=None, KeepMCHits=False, KeepPropagatedMCTree=False, KeepMCPulses=False, SkipNoiseGenerator=False, LowMem=False, InputPESeriesMapName="I3MCPESeriesMap", BeaconLaunches=True): from I3Tray import I3Units from icecube import DOMLauncher from icecube import topsimulator if RunID is None: icetray.logging.log_fatal("You *must* set a RunID in production.") if not RandomService: icetray.logging.log_fatal("You *must* set a RandomService name.") MCPESeriesMapNames = [ InputPESeriesMapName, "BackgroundI3MCPESeriesMap", "SignalI3MCPEs" ] MCPulseSeriesMapNames = [ "I3MCPulseSeriesMap", "I3MCPulseSeriesMapParticleIDMap" ] MCTreeNames = ["I3MCTree", "BackgroundI3MCTree"] MCPMTResponseMapNames = [] if not SkipNoiseGenerator: InputPESeriesMapName_withoutNoise = InputPESeriesMapName + "WithoutNoise" tray.Add( "Rename", "RenamePESeriesMap", Keys=[InputPESeriesMapName, InputPESeriesMapName_withoutNoise]) MCPESeriesMapNames.append(InputPESeriesMapName_withoutNoise) from icecube import vuvuzela tray.AddSegment( vuvuzela.AddNoise, name + "_vuvuzela", OutputName=InputPESeriesMapName, InputName=InputPESeriesMapName_withoutNoise, StartTime=-10. * I3Units.microsecond, EndTime=10. * I3Units.microsecond, RandomServiceName=RandomService, ) tray.AddSegment(DOMLauncher.DetectorResponse, "DetectorResponse", pmt_config={ 'Input': InputPESeriesMapName, 'Output': "I3MCPulseSeriesMap", 'MergeHits': True, 'LowMem': LowMem, 'RandomServiceName': RandomService }, dom_config={ 'Input': 'I3MCPulseSeriesMap', 'Output': "I3DOMLaunchSeriesMap", 'UseTabulatedPT': True, 'RandomServiceName': RandomService, 'BeaconLaunches': BeaconLaunches }) tray.Add(header, streams=[icetray.I3Frame.DAQ]) tray.Add("I3NullSplitter") #literally converts Q to P frame from icecube import WaveCalibrator tray.AddModule( "I3WaveCalibrator", "calibrate", Waveforms='CalibratedWaveforms', CorrectDroop=True, ) tray.AddModule( "I3PMTSaturationFlagger", "find_saturation", Waveforms='CalibratedWaveforms', Output="PMTSaturation", ) icetray.load('wavedeform', False) tray.AddModule("I3Wavedeform", "deform", Waveforms='CalibratedWaveforms', Output='InIcePulses') tray.AddModule("Delete", name + "_cleanup", Keys=[ "MCTimeIncEventID", "MCPMTResponseMap", ]) if not KeepMCPulses: tray.AddModule("Delete", name + "_cleanup_2", Keys=MCPulseSeriesMapNames + MCPMTResponseMapNames) if not KeepMCHits: tray.AddModule("Delete", name + "_cleanup_I3MCHits_2", Keys=MCPESeriesMapNames) if not KeepPropagatedMCTree: # Always keep original tree tray.AddModule("Delete", name + "_cleanup_I3MCTree_3", Keys=MCTreeNames)
from icecube import icetray, DomTools, STTools from icecube.icetray import I3Units, I3Frame from . import DOMS, DOMList from icecube.STTools.seededRT.configuration_services import I3DOMLinkSeededRTConfigurationService icetray.load('libDeepCore_Filter', False) @icetray.traysegment def RunFilter(tray, name, pulses="InIcePulses", OutputName="DCVeto", DetectorConfig="IC86EDC", If=lambda f: True): ### Perform SeededRT using HLC instead of HLCCore ### SRTParams = { "allowSelfCoincidence": False, "useDustlayerCorrection": True, "dustlayerUpperZBoundary": 0 * I3Units.m, "dustlayerLowerZBoundary": -150 * I3Units.m, "ic_ic_RTRadius": 150. * I3Units.m, "ic_ic_RTTime": 1000. * I3Units.ns, "dc_dc_RTRadius": 75. * I3Units.m, "dc_dc_RTTime": 500. * I3Units.ns, "treat_string_36_as_deepcore": True } stConfigService = I3DOMLinkSeededRTConfigurationService(**SRTParams)
PhotonSeriesName=photonSeriesName, ParallelEvents=options.MAXPARALLELEVENTS, RandomService=randomService, UseGPUs=setGPUs, UseCPUs=setCPUs, UnWeightedPhotons=options.UNWEIGHTEDPHOTONS, DOMOversizeFactor=options.DOMOVERSIZEFACTOR, UnshadowedFraction=options.UNSHADOWEDFRACTION, IceModelLocation=options.ICEMODEL, FlasherInfoVectName="I3FlasherInfo") else: print("PPC mode, use GPUs") os.putenv("PPCTABLESDIR", expandvars("$I3_BUILD/ppc/resources/ice/lea")) os.putenv("OGPU", "1") # makes sure only GPUs are used (with OpenCL version) icetray.load("libxppc") icetray.load("libppc") fstr = options.FSTR nph = 2.75 * 2.5e9 # fitted p_y in SPICE Lea * photon bunch nph /= 0.1315 # DOM acceptance nph /= 0.9 * 0.85 # shadowing * disc. threshold loss nph /= 6 * 0.977316 # number of LEDs * correction for bri=127 wid=124 (as used in SPICE) # do not modify the above lines unless you think they contain an error! nph *= (0.0006753 + 0.00005593 * options.FLASHERBRIGHTNESS) * ( options.FLASHERWIDTH + 13.9 - 57.5 / (1 + options.FLASHERBRIGHTNESS / 34.4)) nph *= nled nph *= 0.1315
# check whether g4-tankresponse is available try: from icecube import icetray icetray.load("libg4-tankresponse", False) have_g4tankresponse = True except: have_g4tankresponse = False
#!/usr/bin/env python import numpy #import dashi #import pycorsika from icecube import icetray, dataclasses, dataio from icecube.icetray import I3Units from icecube import phys_services, c2j_icetray, mmc_icetray from icecube import MuonGun icetray.load('corsika-reader') from I3Tray import I3Tray from os.path import expandvars # See: # Y. Becherini, A. Margiotta, M. Sioli, and M. Spurio. A parameterisation of single and multiple muons in the deep water or ice. Astroparticle Physics, 25(1):1 - 13, 2006. # http://dx.doi.org/10.1016/j.astropartphys.2005.10.005 def MMCFactory( length=10 * I3Units.m, seed=12345, mediadef=expandvars('$I3_BUILD/MuonGun/resources/iceworld-mediadef')): jvmOpts = icetray.vector_string( ) # fill this with parameters passed directly to the JavaVM jvmOpts.append(expandvars("-Djava.class.path=$I3_BUILD/lib/mmc.jar")) jvmOpts.append("-Xms256m") jvmOpts.append("-Xmx512m") jvmOpts.append("-XX:-HeapDumpOnOutOfMemoryError")
default=False, action="store_true", help="Print names of files being checked to stderr") parser.add_option( "--detailed", default=False, action="store_true", help="Perform detailed check of particle blocks (much slower)") opts, args = parser.parse_args() if len(args) == 0: parser.error("You must specify at least one CORSIKA file to read!") from icecube import icetray, dataclasses from I3Tray import I3Tray import sys icetray.load('corsika-reader', False) if not opts.verbose: icetray.logging.I3Logger.global_logger = icetray.I3NullLogger() else: icetray.logging.set_level_for_unit('I3Tray', 'WARN') icetray.logging.set_level_for_unit('Python', 'INFO') infiles = [] from glob import glob for arg in args: if '*' in arg: infiles += sorted(glob(arg)) else: infiles.append(arg) icetray.logging.log_info('%d arguments expanded to %d files' %
from icecube.filter_2012.level2_HitCleaning_DeepCore import DeepCoreHitCleaning from icecube.filter_2012.level2_HitCleaning_WIMP import WimpHitCleaning from icecube.filter_2012.level2_HitCleaning_Cascade import CascadeHitCleaning from icecube.filter_2012.PhotonTables import InstallTables from icecube.filter_2012.level2_Reconstruction_Muon import OfflineMuonReco from icecube.filter_2012.level2_HitCleaning_EHE import HitCleaningEHE from icecube.filter_2012.level2_Reconstruction_IceTop import ReconstructIceTop from icecube.filter_2012.level2_Reconstruction_DeepCore import OfflineDeepCoreReco from icecube.filter_2012.level2_Reconstruction_WIMP import WimpReco from icecube.filter_2012.Rehydration import Rehydration #from icecube.filter_2012.level2_Reconstruction_FSS import OfflineFSSReco from icecube.filter_2012.level2_Reconstruction_Cascade import OfflineCascadeReco from icecube.filter_2012.level2_Reconstruction_SLOP import SLOPLevel2 from icecube.filter_2012.level2_Reconstruction_EHE import ReconstructionEHE from icecube.filter_2012 import SpecialWriter icetray.load("SeededRTCleaning", False) PHOTONICS_DIR = '/cvmfs/icecube.opensciencegrid.org/data/photon-tables' @click.command() @click.argument('cfg', type=click.Path(exists=True)) @click.argument('run_number', type=int) @click.option('--scratch/--no-scratch', default=True) def main(cfg, run_number, scratch): with open(cfg, 'r') as stream: if int(yaml.__version__[0]) < 5: # backwards compatibility for yaml versions before version 5 cfg = yaml.load(stream) else: cfg = yaml.full_load(stream)
from icecube import icetray, dataclasses, dataio, phys_services icetray.load('libtrigger-splitter', False) @icetray.traysegment def PnfResplitter(tray, name, doInIceSplit=True, doNullSplit=True, doIceTopSplit=True): # Script should be as simple as: # #tray.AddModule( "I3Reader", "Reader") #tray.AddSegment(filter_tools.PnfResplitter, "resplit") #tray.AddModule( "I3Writer", "EventWriter2" ) # def MaskMaker(frame): if frame.Has('I3SuperDST') and frame.Has('DSTTriggers'): # save only filtered frames (traditional filter + SuperDST filter) pulses = dataclasses.I3RecoPulseSeriesMap.from_frame(frame, 'I3SuperDST') ii_mask = dataclasses.I3RecoPulseSeriesMapMask(frame, 'I3SuperDST') it_mask = dataclasses.I3RecoPulseSeriesMapMask(frame, 'I3SuperDST') for omkey in pulses.keys(): ii_mask.set(omkey, omkey.om <= 60) it_mask.set(omkey, omkey.om > 60) frame['InIceDSTPulses'] = ii_mask frame['IceTopDSTPulses'] = it_mask else: return 0 tray.AddModule("QConverter", name+"_convert")( ("WritePFrame", 0) )
import icecube from icecube import icetray from icecube.load_pybindings import load_pybindings from .NoiseEngine import WithCleaners icetray.load('NoiseEngine', False) del icecube
def PnfResplitter(tray, name, doInIceSplit=True, doNullSplit=True, doIceTopSplit=True): # Script should be as simple as: # #tray.AddModule( "I3Reader", "Reader") #tray.AddSegment(filter_tools.PnfResplitter, "resplit") #tray.AddModule( "I3Writer", "EventWriter2" ) # def MaskMaker(frame): if frame.Has('I3SuperDST') and frame.Has('DSTTriggers'): # save only filtered frames (traditional filter + SuperDST filter) pulses = dataclasses.I3RecoPulseSeriesMap.from_frame(frame, 'I3SuperDST') ii_mask = dataclasses.I3RecoPulseSeriesMapMask(frame, 'I3SuperDST') it_mask = dataclasses.I3RecoPulseSeriesMapMask(frame, 'I3SuperDST') for omkey in pulses.keys(): ii_mask.set(omkey, omkey.om <= 60) it_mask.set(omkey, omkey.om > 60) frame['InIceDSTPulses'] = ii_mask frame['IceTopDSTPulses'] = it_mask else: return 0 tray.AddModule("QConverter", name+"_convert")( ("WritePFrame", 0) ) # Slip in a "EndQ/E" frame, act to prevent PacketModules from caching try: icetray.load('pfauxiliary', False) tray.AddModule("PFEmitFlushFrame", 'emit_flush') except: icetray.logging.log_warn("PFEmitFlushFrame module couldn't be loaded (not installed). It's not used.") tray.AddModule(MaskMaker, name+'_maskme', Streams = [icetray.I3Frame.DAQ]) tray.AddModule("Delete", name+'_deleteme')( ("Keys", ['JEBEventInfo','PFContinuity']) ) if doInIceSplit: tray.AddModule('I3TriggerSplitter','InIceSplit')( ("TrigHierName", 'DSTTriggers'), ('InputResponses', ['InIceDSTPulses']), ('OutputResponses', ['SplitUncleanedInIcePulses']), ) if doIceTopSplit: # I don't know the IT processing to get from IceTopDSTPulses to IceTopPulses_HLC # the icetop working group need to do this in the P-frame # for now, i'm splitting on IceTopPulses tray.AddModule('I3TriggerSplitter','IceTopSplit')( ("TrigHierName", 'DSTTriggers'), ('TriggerConfigIDs', [102,1006,1011]), ('InputResponses', ['IceTopDSTPulses']), #on PnF this is 'IceTopPulses_HLC' (HLC from tpx) ('OutputResponses', ['SplitUncleanedITPulses']), ) if doNullSplit: tray.AddModule("I3NullSplitter", 'NullSplit') def TriggerHierarchyForNullSplit(frame): if frame['I3EventHeader'].sub_event_stream == 'NullSplit': frame['I3TriggerHierarchy'] = dataclasses.I3TriggerHierarchy.from_frame(frame,'DSTTriggers') tray.AddModule(TriggerHierarchyForNullSplit, 'trigHierWriter') substreamnames = [] if doNullSplit: substreamnames += ['NullSplit'] if doInIceSplit: substreamnames += ['InIceSplit'] if doIceTopSplit: substreamnames += ['IceTopSplit'] tray.AddModule("DistributePnFObjects", name+'_distribute')( ("SubstreamNames", substreamnames) )