コード例 #1
0
def create_dataset(outfile, infiles):
    """
    Creates a dataset in hdf5 format.

    Parameters:
    -----------
    outfile : str
        Path to the hdf5 file.
    paths : dict
        A list of intput i3 files.
    """
    global event_offset
    global distances_offset
    event_offset, distances_offset = 0, 0
    infiles = infiles
    tray = I3Tray()
    tray.AddModule('I3Reader',
                FilenameList = infiles)
    tray.AddModule(process_frame, 'process_frame')
    tray.AddModule(get_weight_by_flux, 'get_weight_by_flux')
    tray.AddModule(I3TableWriter, 'I3TableWriter', keys = vertex_features + [
        # Meta data
        'PDGEncoding', 'InteractionType', 'NumberChannels', 'NeutrinoEnergy', 
        'CascadeEnergy', 'MuonEnergy', 'TrackLength', 'DeltaLLH', 'DCFiducialPE',
        'RunID', 'EventID',
        # Lookups
        'NumberVertices',
        # Coordinates and pairwise distances
        'VertexX', 'VertexY', 'VertexZ', 
        'COGCenteredVertexX', 'COGCenteredVertexY', 'COGCenteredVertexZ',
        # Auxilliary targets
        'PrimaryX', 'PrimaryY', 'PrimaryZ', 
        'COGCenteredPrimaryX', 'COGCenteredPrimaryY', 'COGCenteredPrimaryZ', 
        'PrimaryAzimuth', 'PrimaryZenith', 'PrimaryEnergy',
        # Reconstruction
        'RecoX', 'RecoY', 'RecoZ', 
        'COGCenteredRecoX', 'COGCenteredRecoY', 'COGCenteredRecoZ',
        'RecoAzimuth', 'RecoZenith',
        # Flux weights
        'NuMuFlux', 'NueFlux', 'NoFlux',
        # Debug stuff
        'PrimaryXOriginal', 'PrimaryYOriginal', 'PrimaryZOriginal',
        'CMeans', 'COGCenteredCMeans',
        ], 
                TableService=I3HDFTableService(outfile),
                SubEventStreams=['TTrigger'],
                BookEverything=False
                )
    tray.Execute()
    tray.Finish()
コード例 #2
0
ファイル: extractor.py プロジェクト: AlexHarn/bfrv1_icetray
def ExtractDST13(tray, name, 
	dst_output_filename  = "I3DST13.hdf5",
	dstname      = "I3DST13",
	extract_to_frame = True,
	If = lambda f: True):
	
	"""
	Record in compact form limited information from reconstructions, triggers and cut
	parameters for every triggered event.
	"""
	from icecube import dst
	from icecube import phys_services
	from icecube.tableio import I3TableWriter
	from . import TDSTConverter
	
	# Open output file
	if dst_output_filename.endswith('.root'):
	# Open output file
	   from icecube.rootwriter import I3ROOTTableService
	   table_service = I3ROOTTableService(filename= dst_output_filename,  
                                   master= "dst", #Default name: "MasterTree".
                                   #mode=RECREATE,     
                                   )
	elif dst_output_filename.endswith('.hdf5') or dst_output_filename.endswith('.hd5'):
	   from icecube.hdfwriter import I3HDFTableService
	   table_service = I3HDFTableService(dst_output_filename, 6)

	if "I3RandomService" not in tray.context:
	   dstRng = phys_services.I3GSLRandomService(int(time.time()))
	   tray.context["I3RandomService"]=dstRng


	tray.AddModule('I3DSTExtractor13', 'UnpackDST',
                SubEventStreamName = 'TDST13',
                FileName        = dst_output_filename,
                DSTName         = dstname,
                DSTHeaderName   = dstname+"Header",
                EventHeaderName = 'I3EventHeader',
                ExtractToFrame  = extract_to_frame,
                TriggerName     = 'I3TriggerHierarchy',
               )

	tray.AddModule(I3TableWriter, "writer",
               TableService = table_service,
               SubEventStreams= ['TDST13'],           
               Keys = [ "CutDST", "TDSTTriggers"],  
               )
コード例 #3
0
def create_dataset(outfile, infiles, gcdfile='/cvmfs/icecube.opensciencegrid.org/data/GCD/GeoCalibDetectorStatus_2013.56429_V0.i3.gz'):
    """
    Creates a dataset in hdf5 format.

    Parameters:
    -----------
    outfile : str
        Path to the hdf5 file.
    paths : list
        A list of intput i3 files.
    gcdfile : str
        Path to a gcd file.
    """
    infiles = infiles
    tray = I3Tray()
    tray.AddModule('I3Reader',
                FilenameList = infiles)
    tray.AddModule(lambda frame: process_frame(frame, gcdfile=gcdfile), 'process_frame')
    tray.AddModule(I3TableWriter, 'I3TableWriter', keys = vertex_features + [
        # Meta data
        #'PDGEncoding', 'InteractionType', 
        'RunID', 'EventID',
        # Lookups
        'NumberVertices',
        # Coordinates and pairwise distances
        'VertexX', 'VertexY', 'VertexZ', 
        # Auxilliary targets
        'PrimaryX', 'PrimaryY', 'PrimaryZ', 
        'PrimaryAzimuth', 'PrimaryZenith', 'PrimaryEnergy',
        # Class label
        'classification',
        ], 
                TableService=I3HDFTableService(outfile),
                SubEventStreams=['InIceSplit'],
                BookEverything=False
                )
    tray.Execute()
    tray.Finish()
コード例 #4
0
            true_nu_coszen) * norm * 0.5 / 0.7
    else:
        nue_flux_vector = flux_service.getFlux(
            dataclasses.I3Particle.NuEBar, true_nu_energy,
            true_nu_coszen) * norm * 0.5 / 0.3
        numu_flux_vector = flux_service.getFlux(
            dataclasses.I3Particle.NuMuBar, true_nu_energy,
            true_nu_coszen) * norm * 0.5 / 0.3


#    print true_nu_energy, true_nu_coszen, norm, numu_flux_vector, nue_flux_vector
    frame["I3MCWeightDict"]["no_flux"] = norm
    frame["I3MCWeightDict"]["numu_flux"] = numu_flux_vector
    frame["I3MCWeightDict"]["nue_flux"] = nue_flux_vector

tray = I3Tray()
tray.AddModule('I3Reader', 'reader', FilenameList=args.INFILE)

hdf = I3HDFTableService(args.OUTFILE)

tray.AddModule(GetFlux, "GetFlux", Streams=[icetray.I3Frame.DAQ])

tray.AddModule(
    I3TableWriter,
    tableservice=[hdf],
    #               BookEverything = True,
    keys=['I3EventHeader', 'I3MCTree', 'I3MCWeightDict', 'FilterMask'],
    SubEventStreams=["InIceSplit"])

tray.Execute()
コード例 #5
0
ファイル: root2hdf.py プロジェクト: mattmeehan/cr-composition
#!/usr/bin/env python

from icecube import icetray, tableio
from icecube.hdfwriter import I3HDFTableService
from icecube.rootwriter import I3ROOTTableService

from icecube.tableio import I3TableTranscriber

outservice = I3HDFTableService('ldf_proton_1PeV.hdf')
inservice = I3ROOTTableService('ldf_proton_1PeV.root', 'r')

scribe = I3TableTranscriber(inservice, outservice)

scribe.Execute()
コード例 #6
0
ファイル: segments.py プロジェクト: wardVD/IceSimV05
def SimIceTop(tray, name,
              input,
              output=None,
              gcd=None,
              log_level=2,
              samples=1,
              tanks=[],
              x=0.0,
              y=0.0,
              r=0.0,
              seed=0,
              rng="gsl",
              injector="normal",
              response="g4",
              new=True,
              pecompress=2,
              weighted=False,
              calibrate=False,
              trigger=False):
    """
    Simulate IceTop response.

    Input can be a list of CORSIKA files or a tuple of ``(str, kwargs)``,
    which are passed to ``tray.AddService(str, "particle-injector", **kwargs)``
    to create a custom particle injector. RandomServiceName should not be
    specified, it is overridden internally.

    The option "weighted" only takes effect if the input are CORSIKA files.
    """
    import os
    import re
    from icecube import icetray, dataclasses, dataio
    from icecube.sim_services.sim_utils.gcd_utils import get_time
    from icecube.icetray import I3Units

    custom_injector = False
    if len(input) == 2 and isinstance(input[0], str) and isinstance(input[1], dict):
        custom_injector = True

    if gcd is None:
        raise ValueError("Need to specify a GCD file")
    if not os.path.exists(gcd):
        raise IOError("Specified GCD file does not exist")

    icetray.set_log_level_for_unit('I3TopSimulator', icetray.I3LogLevel(log_level))
    icetray.set_log_level_for_unit('I3CorsikaReader', icetray.I3LogLevel(log_level))
    icetray.set_log_level_for_unit('I3CorsikaInjector', icetray.I3LogLevel(log_level))
    icetray.set_log_level_for_unit('I3G4TankResponse', icetray.I3LogLevel(log_level))
    icetray.set_log_level_for_unit('I3ParamTankResponse', icetray.I3LogLevel(log_level))

    # to be used by injector
    for tag in ("inj", "resp", "other"):
        if rng == "gsl":
            tray.AddService("I3GSLRandomServiceFactory", "gslrandom_" + tag,
                            Seed=seed,
                            InstallServiceAs='random_' + tag)
        elif rng == "root":
            tray.AddService("I3TRandomServiceFactory", "rootrandom_" + tag,
                            Seed=seed,
                            InstallServiceAs='random_' + tag)
        elif rng == "sprng":
            tray.AddService("I3SPRNGRandomServiceFactory", "sprngrandom_" + tag,
                            Seed=seed,
                            NStreams=1,
                            StreamNum=0,
                            InstallServiceAs='random_' + tag)
        else:
            raise ValueError("Unknown randon number generator: " + rng)


    if custom_injector:
        tray.AddService(input[0], "particle-injector",
                        RandomServiceName = 'random_inj',
                        **input[1])
    else:
        # CORSIKA injector
        if weighted:
            if injector == "normal":
                tray.AddService("I3InjectorFactory<I3CorsikaInjector>", "particle-injector",
                                FileNameList = input,
                                RandomServiceName = 'random_inj',
                                NumSamples = samples,     # <-- Number of re-samples of the same shower
                                ImportanceSampling = True,
                                #PartDistr = os.path.join(os.path.dirname(options.output),
                                #                         'on_regions_' + os.path.basename(options.output).replace(extension, 'root')),
                                Tanks = tanks,
                                IgnoreParticleTypes = [75, 76, 85, 86, 95, 96]
                                )
            else:
                raise ValueError("option weighted requires normal injector")
        else:
            if injector == "normal":
                tray.AddService("I3InjectorFactory<I3CorsikaInjector>", "particle-injector",
                                FileNameList = input,
                                RandomServiceName = 'random_inj',
                                NumSamples = samples,     # <-- Number of re-samples of the same shower
                                RelocationX = x,  # <-- x-coordinate of core or resampling center (if Relocation R > 0)
                                RelocationY = y, # <-- y-coordinate or core or resampling center (if Relocation R > 0)
                                RelocationR = r,   # <-- Re-sampling radius (if zero --> fixed core location)
                                Tanks = tanks,
                                IgnoreParticleTypes = [75, 76, 85, 86, 95, 96]
                                )
            elif injector == "unthin":
                tray.AddService("I3InjectorFactory<I3CorsikaThinnedInjector>", "particle-injector",
                      FileNameList = input,
                      RandomServiceName = 'random_inj',
                      NumSamples = samples,     # <-- Number of re-samples of the same shower
                      RelocationX = x,  # <-- x-coordinate of core or resampling center (if Relocation R > 0)
                      RelocationY = y, # <-- y-coordinate or core or resampling center (if Relocation R > 0)
                      RelocationR = r,   # <-- Re-sampling radius (if zero --> fixed core location)
                      )
            else:
                raise ValueError("unknown injector option")

    if response == 'g4':
        from icecube import g4_tankresponse
        tray.AddService("I3IceTopResponseFactory<I3G4TankResponse>", "topresponse",
                        RandomServiceName =  "random_resp",
                        ChargeScale =  1.02
                        )
    elif response == 'param':
        tray.AddService("I3IceTopResponseFactory<I3ParamTankResponse>", "topresponse",
                        RandomServiceName =  "random_resp",
                        UseSnowParam = True
                        )
    else:
        raise ValueError("Unknown IceTop tank response: " + response)

    tray.AddModule("I3InfiniteSource", "source",
                   prefix = gcd,
                   stream = icetray.I3Frame.DAQ )

    time = get_time(dataio.I3File(gcd))
    tray.AddModule("I3MCEventHeaderGenerator","time-gen",
                   Year = time.utc_year,
                   DAQTime = time.utc_daq_time)

    if new:
        tray.AddSegment(SimulateNew, 'new_simulation',
                        InjectorServiceName = "particle-injector",
                        ResponseServiceName = "topresponse",
                        RandomServiceName = "random_other",
                        InIceMCTreeName = '',
                        Tanks = tanks,
                        CompressPEs=pecompress
                        )
    else:
        tray.AddSegment(SimulateOld, 'old_simulation',
                        InjectorServiceName = "particle-injector",
                        ResponseServiceName = "topresponse",
                        InIceMCTreeName = '',
                        Tanks = tanks
                        )

    if calibrate:
        from icecube import topsimulator
        tray.AddSegment(CalibrateAndExtract, 'CalibrateExtract',
                        Launches = 'IceTopRawData',
                        )
        tray.AddModule('I3TopAddComponentWaveforms', 'AddComponentWaveforms')

    if output is not None:  # writing of output is requested
        if output.endswith(".h5") or output.endswith(".root"):
            # write tables

            keep_keys = ['I3EventHeader',
                         'MCTopCherenkovPulses',
                         'MCTopHitSeriesMap',
                         'IceTopPulses_HLC',
                         'IceTopPulses_SLC',
                         'IceTopHLCVEMPulses',
                         'IceTopSLCVEMPulses']

            if custom_injector:
                keep_keys += ['I3MCTreeIT',
                              'MCTopPulses']
            else:
                keep_keys += ['MCPrimary',
                              'MCPrimaryInfo',
                              'SamplingWeight',
                              'Samples']

            tray.AddModule(DummySubEventSplit, 'split')

            from icecube.tableio import I3TableWriter
            if output.endswith('.h5'):
                from icecube.hdfwriter import I3HDFTableService
                hdf_service = I3HDFTableService(output)

                tray.AddModule(I3TableWriter, "writer",
                               Keys = keep_keys,
                               TableService = [ hdf_service ],
                               SubEventStreams = ['IceTop'],
                               )
            elif output.endswith('.root'):
                from icecube.rootwriter import I3ROOTTableService
                root_service = I3ROOTTableService(output)

                tray.AddModule(I3TableWriter, "writer",
                               Keys = keep_keys,
                               TableService = [ root_service ],
                               SubEventStreams = ['IceTop'],
                               )
        else:
            # write standard output format i3
            tray.AddModule("I3Writer", "i3-writer",
                           Filename = output,
                           streams = [icetray.I3Frame.DAQ]
                           )
コード例 #7
0
ファイル: merge.py プロジェクト: wardVD/IceSimV05
if 'hdf5' in [iformat, oformat]:
    from icecube.hdfwriter import I3HDFTableService
if 'root' in [iformat, oformat]:
    from icecube.rootwriter import I3ROOTTableService
if 'csv' in [iformat, oformat]:
    from icecube.textwriter import I3CSVTableService

if iformat == 'hdf5':
    inservices = [(I3HDFTableService, (infile, 1, 'r')) for infile in infiles]
elif iformat == 'root':
    inservices = [(I3ROOTTableService, (infile, 'r')) for infile in infiles]
else:
    raise "Unknown input format '%s'" % iformat

if oformat == 'hdf5':
    outservice = I3HDFTableService(outfile, options.compress, 'w')
elif oformat == 'root':
    outservice = I3ROOTTableService(outfile,
                                    compression_level=options.compress)
elif oformat == 'csv':
    outservice = I3CSVTableService(outfile)
else:
    raise "Unknown out format '%s'" % oformat

for ctor, args in inservices:
    print('Merging %s' % args[0])
    inservice = ctor(*args)
    scribe = I3TableTranscriber(inservice, outservice)
    if options.nframes is not None:
        scribe.Execute(options.nframes)
    else:
コード例 #8
0
ファイル: extractor.py プロジェクト: AlexHarn/bfrv1_icetray
def ExtractDST(tray, name, 
	dst_output_filename  = "I3DST.hdf5",
	dstname      = "I3DST",
	simulation = False,
	extract_to_frame = False,
	remove_filter_stream = True,
	cut_data = False,
	If = lambda f: True):
	
	"""
	Record in compact form limited information from reconstructions, triggers and cut
	parameters for every triggered event.
	"""
	from icecube import dst
	from icecube import phys_services
	from icecube.tableio import I3TableWriter
	from . import TDSTConverter        

	


	if dst_output_filename.endswith('.root'):
	# Open output file
	   from icecube.rootwriter import I3ROOTTableService
	   table_service = I3ROOTTableService(filename= dst_output_filename,  
                                   master= "dst", #Default name: "MasterTree".
                                   #mode=RECREATE,     
                                   )
	elif dst_output_filename.endswith('.hdf5'):
	   from icecube.hdfwriter import I3HDFTableService
	   table_service = I3HDFTableService(dst_output_filename, 6)

	if "I3RandomService" not in tray.context:
	   dstRng = phys_services.I3GSLRandomService(int(time.time()))
	   tray.context["I3RandomService"]=dstRng

	if simulation:
	   from icecube import filter_tools
	   tray.AddModule("KeepFromSubstream","dst_stream",
	      StreamName = "InIceSplit",
	      KeepKeys = ["I3DST"],
	      KeepStream=True,
	      )


	tray.AddModule('I3DSTExtractor16', 'UnpackDST',
                SubEventStreamName = 'TDST',
                FileName        = dst_output_filename,
                DSTName         = dstname,
                DSTHeaderName   = "I3DSTHeader",
                EventHeaderName = 'I3EventHeader',
                ExtractToFrame  = extract_to_frame,
                TriggerName     = 'I3TriggerHierarchy',
                Cut             = cut_data,
               )
	
	tray.AddModule(I3TableWriter, "writer",
               TableService = table_service,
               SubEventStreams= ['TDST'],           
               Keys = [ "CutDST", "TDSTTriggers"],  
               )

	if remove_filter_stream:
	   tray.AddModule(filterStream,name+'_stream_filter', StreamName='TDST')
コード例 #9
0
#tray.AddModule('I3Reader','reader',FilenameList = ["/data/user/mamday/icetray/data/GeometryFiles/GeoCalibDetectorStatus_IC86.55697_corrected_V2.i3",infile], SkipKeys = ["TimeShift"])
#tray.AddModule('I3Reader','reader',FilenameList = ["/data/user/mamday/icetray/data/GeometryFiles/GeoCalibDetectorStatus_pingu_V36_Zezel_40_s22_d3.i3",infile], SkipKeys = ["TimeShift"])
tray.AddModule('I3Reader','reader',FilenameList = ["/data/user/mamday/icetray/data/GeometryFiles/GeoCalibDetectorStatus_IC86.55697_corrected_V2.i3",infile], SkipKeys = ["TimeShift"])

#out_num = int(sys.argv[1])
#g_ind = infile.index('genie')
g_ind = infile.index('Level2')
#g_ind = infile.index('data_')
i3_ind = infile.index('i3')
out_sys = infile[g_ind:i3_ind-1] 


#hdf = I3HDFTableService('/data/user/mamday/newSQuIDS/nuSQuIDS/resources/python/bindings/HD5Files/Weights/%s.hd5' % out_sys)
#hdf = I3HDFTableService('/data/user/mamday/newSQuIDS/nuSQuIDS/resources/python/bindings/HD5Files/Weights/JPIC863-Muons.hd5')
hdf = I3HDFTableService('/data/user/mamday/newSQuIDS/nuSQuIDS/resources/python/bindings/HD5Files/PRDWeights/%s.hd5' % out_sys)
#hdf = I3HDFTableService('/data/user/mamday/nuSQuIDS/nuSQuIDS/resources/python/bindings/HD5PhysFiles/NuMu/PINGU-NuMu-MC-ChiSquared-%04d.hd5' % out_num)
#Juan Pablo's
####numu_nfiles = 4000
#nue_nfiles = 2700
####nutau_nfiles = 1400

#nfiles = int(sys.argv[4])
nfiles = 1 

#PINGU
#numu_nfiles = 192 

def DoPCuts(frame):
  if(frame["Cuts_V5_Step1"].value==1 and frame["Cuts_V5_Step2"].value==1):
    return True
コード例 #10
0
    print("  Layer: %2d OM no.: %2d Containment variable: %2d" % (veto.maxDomChargeLayer,veto.maxDomChargeOM,veto.maxDomChargeContainment))

    return True


#####################################################################
# BOOT INTO ICETRAY
#####################################################################

tray = I3Tray()

#####################################################################
# SERVICES
#####################################################################

hdfService = I3HDFTableService(options.OUTPUT)

hdfKeys = ["I3EventHeader",
           #
           # reco pulses
           options.PULSES,
           #
           # fits
           #
           # fit parameters
           #
           # doubles
           #
           # containment veto
           "Veto",
           #
コード例 #11
0
		
fill_frame.objects = dict()
tray = I3Tray()

tray.Add("I3InfiniteSource")
tray.Add(fake_event_header, Streams=[icetray.I3Frame.DAQ])
tray.Add("I3NullSplitter", "nullsplit")
tray.Add(fill_frame)

from icecube.tableio import I3BroadcastTableService

tablers = [tableio.I3CSVTableService('test_converters')]
outfiles = ['test_converters']
try:
	from icecube.hdfwriter import I3HDFTableService
	tablers.append(I3HDFTableService("test_converters.hdf5", 6, 'w'))
	outfiles.append('test_converters.hdf5')
except ImportError:
	pass
try:
	from icecube.rootwriter import I3ROOTTableService
	tablers.append(I3ROOTTableService("test_converters.root"))
	outfiles.append('test_converters.root')
except ImportError:
	pass
if len(tablers) == 1:
	tabler = tablers[0]
else:
	tabler = I3BroadcastTableService(tuple(tablers))

tray.Add(tableio.I3TableWriter,
コード例 #12
0
ファイル: save_sim.py プロジェクト: jrbourbeau/cr-composition
            test_tray.Execute()
            test_tray.Finish()
            good_file_list.append(test_file)
        except:
            print('file {} is truncated'.format(test_file))
            pass
    del test_tray

    tray = I3Tray()
    tray.context['I3FileStager'] = dataio.get_stagers(
        staging_directory=os.environ['_CONDOR_SCRATCH_DIR'])
    # icetray.logging.log_dedug('good_file_list = {}'.format(good_file_list))
    tray.Add('I3Reader', FileNameList=good_file_list)
    # Uncompress Level3 diff files
    tray.Add(uncompress, 'uncompress')
    hdf = I3HDFTableService(args.outfile)

    # Filter out non-coincident P frames
    def filter_noncoincident(frame):
        if frame.Has('IceTopInIce_StandardFilter'):
            return frame['IceTopInIce_StandardFilter'].value
        else:
            return False

    tray.Add(filter_noncoincident)
    # tray.Add(lambda frame: frame['IceTopInIce_StandardFilter'].value)

    def get_nstations(frame):
        nstation = 0
        if IT_pulses in frame:
            nstation = count_stations(
コード例 #13
0
                 If=lambda frame: check_keys(frame, 'I3Geometry', 'MCPrimary') )

        # Add Laputop fitstatus ok boolean to frame
        tray.Add(icetray_software.lap_fitstatus_ok,
                 If=lambda frame: 'Laputop' in frame)

        # Add opening angle between Laputop and MCPrimary for angular resolution calculation
        tray.Add(icetray_software.add_opening_angle,
                 particle1='MCPrimary', particle2='Laputop',
                 key='angle_MCPrimary_Laputop',
                 If=lambda frame: 'MCPrimary' in frame and 'Laputop' in frame)

        #====================================================================
        # Finish

        hdf = I3HDFTableService(output)
        keys = {key: tableio.default for key in keys}
        if args.type == 'data':
            keys['Laputop'] = [dataclasses.converters.I3ParticleConverter(),
                               astro.converters.I3AstroConverter()]

        tray.Add(I3TableWriter,
                 tableservice=hdf,
                 keys=keys,
                 SubEventStreams=['ice_top'])

        tray.Execute()
        tray.Finish()

    print('Time taken: {}'.format(time.time() - t0))
コード例 #14
0
ファイル: weighting.py プロジェクト: WortPixel/nuance
    if options.type == 'i3':
        output_filename = options.output_file
        if not '.i3' in options.output_file:
            output_filename += '.i3.gz'
        else:
            if not '.gz' in output_filename:
                output_filename += '.gz'
        tray.AddModule('I3Writer',
                       'EventWriter',
                       Filename=output_filename,
                       Streams=[icetray.I3Frame.Physics, icetray.I3Frame.DAQ])
    elif options.type == 'hd5':
        from icecube.hdfwriter import I3HDFTableService
        from icecube.tableio import I3TableWriter

        # store everything in hdf5-file
        service = I3HDFTableService(options.output_file + '.hdf5')
        tray.AddModule(I3TableWriter,
                       'writer',
                       tableservice=[service],
                       BookEverything=True,
                       SubEventStreams=[options.sub_event_stream])
    else:
        print('Please use supported type. Currently supported: i3, hd5')

    tray.Add('TrashCan', 'trash')

    tray.Execute()
    tray.Finish()
コード例 #15
0
ファイル: converter.py プロジェクト: WortPixel/nuance
def convert(inputpath,
            outputfile,
            file_type='hdf5',
            sub_event_stream='InIceSplit',
            verbose=False,
            generateID=False):
    '''
    Convert files from i3 to hdf5 or root

    Args:
        inputpath: Inputpath to be scanned for i3 files, input file works, too
        outputfile: Outputpath for the converted file
        file_type: Choose the output format between root and hdf5
        sub_event_stream: Provide the i3 subeventstream to use
        verbose: Provide verbose output
        generateID: Generate a new unique event id

    Returns:
        Nothing
    '''
    # give output if requested
    if verbose:
        print 'Input folder is "', inputpath
        print 'Output file is "', outputfile
        print 'Outputformat is "', file_type

        if generateID:
            print 'P-frame-based ID will be added to "I3EventHeader".'

    # list of possible i3 endings
    i3_endings = ['i3', 'i3.gz', 'i3.bz2']

    tray = I3Tray()

    # create list of all files of input path
    i3_files = []

    # differentiate between a given path or filename
    if os.path.isdir(inputpath):
        root, subdirs, _ = os.walk(inputpath).next()
        if verbose:
            print("Subdirs:")
            print(subdirs)
            print("Filenames:")
        for subdir in subdirs:
            for filename in os.listdir(os.path.join(root, subdir)):
                if verbose:
                    print(filename)
                if any([filename.endswith(ending) for ending in i3_endings]):
                    i3_files.append(os.path.join(*[root, subdir, filename]))
        tray.AddModule('I3Reader', 'reader', FilenameList=i3_files)
    else:
        # seems to be a single file
        outputfile = os.path.join(
            os.path.dirname(outputfile),
            os.path.basename(inputpath[:inputpath.find('.i3')]))
        print(outputfile)
        if any([inputpath.endswith(ending) for ending in i3_endings]):
            tray.AddModule('I3Reader', 'reader', Filename=inputpath)
        else:
            print('File format not supported')

    # create output path if necessary
    if not os.path.isdir(os.path.dirname(outputfile)):
        os.makedirs(os.path.dirname(outputfile))

    # choose output file_type
    if file_type == "root":
        service = I3ROOTTableService(outputfile + '.root', 'master_tree')
        if verbose:
            print('Storing in ' + outputfile + '.root')
    elif file_type in ["h5", "hd5", "hdf5"]:
        service = I3HDFTableService(outputfile + '.hd5')
        if verbose:
            print('Storing in ' + outputfile + '.hd5')
    else:
        service = I3HDFTableService(outputfile + '.hd5')
        if verbose:
            print 'Using standard file_type: hd5.'
            print('Storing in ' + outputfile + '.hd5')

    if generateID:
        tray.AddModule(generic_attributes.create_event_id, 'HeaderModifier')

    # write chosen attributes
    tray.AddModule(I3TableWriter,
                   'writer',
                   tableservice=[service],
                   BookEverything=True,
                   SubEventStreams=[sub_event_stream])

    # close file
    tray.AddModule('TrashCan', 'can')
    tray.Execute()
    tray.Finish()
コード例 #16
0
                       ModificationSettings=mod[2])
        args.pulsemap.append(out_key)
    tray.AddModule(DeepLearningModule, "DeepLearningMod",
                   pulsemap=args.pulsemap,
                   batch_size=args.batch_size,
                   add_truth=False,
                   benchmark=True)
    tray.AddModule(print_info, 'printer',
                   pulsemap = args.pulsemap,
                   Streams=[icetray.I3Frame.Physics])
    '''

    #Save 
    if os.path.exists(args.outfile):
         os.remove(args.outfile)
    hdf = I3HDFTableService(args.outfile, mode='w+')
    olist = ["TUM_dnn_classification_" + p for p in args.pulsemap]
    olist.extend(["classification", "corsika_weight",
                  "I3MCWeightDict", "QFilterMaks",
                  "I3EventHeader", "track_length",
                  "MCPrimary1", "signature",
                  "conv", "depE", "inelasticity",
                  "IC_hit_doms","BrightDOMs",
                  "PolyplopiaCount", "visible_track",
                  "first_interaction_pos", 'multiplicity',
                  "primary_nu" ])
    print('\n Out Keys: {} \n'.format(olist))
    tray.AddModule(I3TableWriter,'writer',
               tableservice = hdf,
               keys         = olist,
               SubEventStreams=['InIceSplit'],)
コード例 #17
0
def main():
    # Make sure args are valid, and provide help, before importing all the
    # IceCube dreck
    args = parse_args()
    assert (args.outfile.lower().endswith('.hdf5')
            or args.outfile.lower().endswith('.hdf')
            or args.outfile.lower().endswith('.h5')
            or args.outfile.lower().endswith('.hd5'))

    from icecube import icetray

    # One or more of the following imports is actually necessary, but many may
    # not be necessary to convert files. Rather than try to figure out which is
    # which, all available imports (that don't fail) are included here.

    from icecube import (
        dataio,
        tableio,
        dataclasses,
        simclasses,
        dst,
        millipede,
        multinest_icetray,
        gulliver_modules,
        lilliput,
        linefit,
        hdfwriter,
        improvedLinefit,
        interfaces,
        HiveSplitter,
        cscd_llh,
        IceHive,
        spline_reco, # slow import!,
        dipolefit,
        paraboloid,
        wavedeform,
        common_variables,
        gulliver,
        phys_services,
        wavereform, # slow import!
        common_variables__direct_hits,
        finiteReco,
        payload_parsing,
        clast,
    )

    from icecube import trigger_sim
    #from icecube import AtmCscdEnergyReco
    #from icecube import production_histograms
    from icecube import BadDomList
    from icecube import pybdtmodule
    from icecube import CascadeVariables
    from icecube import icepick
    #from icecube import recclasses
    from icecube import CoincSuite
    from icecube import rootwriter
    from icecube import DeepCore_Filter
    #from icecube import core_removal
    from icecube import shield
    from icecube import DomTools
    from icecube import cramer_rao
    #from icecube import shovelart
    from icecube import credo
    from icecube import ipdf
    #from icecube import shovelio
    #from icecube import level3_filter_cascade
    from icecube import KalmanFilter
    from icecube import daq_decode
    #from icecube import level3_filter_lowen
    from icecube import NoiseEngine
    #from icecube import level3_filter_muon
    from icecube import static_twc
    from icecube import SLOPtools
    from icecube import steamshovel
    from icecube import STTools
    #from icecube import SeededRTCleaning
    from icecube import double_muon
    from icecube import load_pybindings
    from icecube import tensor_of_inertia
    from icecube import TopologicalSplitter
    #from icecube import test_unregistered
    from icecube import VHESelfVeto
    from icecube import fill_ratio
    #from icecube import mue
    #from icecube import topeventcleaning
    from icecube import WaveCalibrator
    from icecube import filter_tools
    #from icecube import ophelia
    #from icecube import toprec
    #from icecube import astro
    #from icecube import filterscripts
    from icecube import tpx
    #from icecube import bayesian_priors
    #from icecube import frame_object_diff
    from icecube import photonics_service
    from icecube import trigger_splitter
    from icecube import coinc_twc
    #from icecube import full_event_followup
    from icecube import photospline
    #from icecube import gulliver_bootstrap
    #from icecube import portia

    hasGENIE=True
    try:
        from ExtractGENIE import ExtractGENIESystematics
        from ExtractGENIE import ExtractGENIEType
        from ExtractGENIE import ExtractGENIEIterInfo
        from ExtractGENIE import ExtractGENIEIterTarget
    except:
        print("Could not load GENIE extractors. Not doing any extraction!")
        hasGENIE=False
    
    print "hasGENIE = ", hasGENIE

    if args.keys:
        keys = list(loadtxt(args.keys, dtype=str))
        book_everything = False
        num_keys = len(keys)
    else:
        keys = [] #['I3EventHeader']
        book_everything = True
        num_keys = 'all'
        print 'You specified no key list. Writing everything!'
        #print 'You specified no key list. Writing only the I3EventHeaders.'

    print ''
    print '='*79
    print 'Will read %d i3 file(s) and write %s key(s) to %s' \
            %(len(args.infiles), num_keys, args.outfile)
    print '='*79
    print ''

    outdir = os.path.dirname(args.outfile)
    if outdir not in ['', '.', './']:
        mkdir(outdir, warn=False)

    tray = I3Tray()
    tray.AddModule('I3Reader', 'reader', filenamelist=args.infiles)
    hdf_service = I3HDFTableService(args.outfile)
    ### GENIE info ####
    if hasGENIE:
        tray.AddModule(ExtractGENIEType       , "ExtractGENIEType_mod",
                GENIEResultDict_Name = "I3GENIEResultDict",
                Output_Name = "GENIE_InteractionType",
                )
    
        tray.AddModule(ExtractGENIEIterInfo   , "ExtractGENIEIterInfo_mod",
                GENIEResultDict_Name = "I3GENIEResultDict",
                Output_Name = "GENIE_InteractionInfo",
                )
    
        tray.AddModule(ExtractGENIEIterTarget , "ExtractGENIEIterTarget_mod",
                GENIEResultDict_Name = "I3GENIEResultDict",
                Output_Name = "GENIE_InteractionTarget",
                )
    
        tray.AddModule(ExtractGENIESystematics, "ExtractGENIESystematics_mod",
                GENIEResultDict_Name = "I3GENIEResultDict",
                Output_Name = "GENIE_SystematicsReweight",
                )
    
    def get_true_neutrino(frame):
        i3mctree = frame['I3MCTree']
        count = 0
        for idx in range(0, len(i3mctree)):
            i3particle = i3mctree[idx]
            if i3particle.is_primary:
                count += 1
                if frame.Has('trueNeutrino'):
                    del frame['trueNeutrino']
                frame['trueNeutrino'] = i3particle
            assert(count<=1)
    
    # only use this for where trueNeutrino needs to be rewritten 
    #tray.AddModule(get_true_neutrino, "true_nu", Streams=[icetray.I3Frame.Physics])

    tray.AddModule(
        I3TableWriter, 'writer',
        tableservice=hdf_service,
        keys=keys,
        BookEverything=book_everything,
        SubEventStreams=['fullevent', 'SLOPSplit', 'InIceSplit', 'in_ice',
                         'nullsplitter']
    )
    tray.AddModule('TrashCan', 'byebye')
    tray.Execute()
    tray.Finish()