def get_weight_by_flux(frame):
    """ Calculates the weight of an event using the fluxes.
    
    Parameters:
    -----------
    frame : ?
        The frame to process.
    """
    true_neutrino = dataclasses.get_most_energetic_neutrino(frame["I3MCTree"])
    true_nu_energy = true_neutrino.energy
    true_nu_coszen = np.cos(true_neutrino.dir.zenith)
    norm = (frame["I3MCWeightDict"]['OneWeight'] /
            frame["I3MCWeightDict"]['NEvents']) * 2.
    if (true_neutrino.type > 0):
        nue_flux = flux_service.getFlux(dataclasses.I3Particle.NuE,
                                        true_nu_energy,
                                        true_nu_coszen) * norm * 0.5 / 0.7
        numu_flux = flux_service.getFlux(dataclasses.I3Particle.NuMu,
                                         true_nu_energy,
                                         true_nu_coszen) * norm * 0.5 / 0.7
    else:
        nue_flux = flux_service.getFlux(dataclasses.I3Particle.NuEBar,
                                        true_nu_energy,
                                        true_nu_coszen) * norm * 0.5 / 0.3
        numu_flux = flux_service.getFlux(dataclasses.I3Particle.NuMuBar,
                                         true_nu_energy,
                                         true_nu_coszen) * norm * 0.5 / 0.3
    frame['NuMuFlux'] = dataclasses.I3Double(numu_flux)
    frame['NueFlux'] = dataclasses.I3Double(nue_flux)
    frame['NoFlux'] = dataclasses.I3Double(norm)
    return True
Esempio n. 2
0
def GetFlux(frame):

    true_neutrino = get_most_energetic_neutrino(frame["I3MCTree"])
    true_nu_energy = true_neutrino.energy
    true_nu_coszen = cos(true_neutrino.dir.zenith)
    norm = (frame["I3MCWeightDict"]['OneWeight'] /
            frame["I3MCWeightDict"]['NEvents']) * 2.

    if (true_neutrino.type > 0):
        nue_flux_vector = flux_service.getFlux(
            dataclasses.I3Particle.NuE, true_nu_energy,
            true_nu_coszen) * norm * 0.5 / 0.7
        numu_flux_vector = flux_service.getFlux(
            dataclasses.I3Particle.NuMu, true_nu_energy,
            true_nu_coszen) * norm * 0.5 / 0.7
    else:
        nue_flux_vector = flux_service.getFlux(
            dataclasses.I3Particle.NuEBar, true_nu_energy,
            true_nu_coszen) * norm * 0.5 / 0.3
        numu_flux_vector = flux_service.getFlux(
            dataclasses.I3Particle.NuMuBar, true_nu_energy,
            true_nu_coszen) * norm * 0.5 / 0.3


#    print true_nu_energy, true_nu_coszen, norm, numu_flux_vector, nue_flux_vector
    frame["I3MCWeightDict"]["no_flux"] = norm
    frame["I3MCWeightDict"]["numu_flux"] = numu_flux_vector
    frame["I3MCWeightDict"]["nue_flux"] = nue_flux_vector
Esempio n. 3
0
def process_frame(frame, gcdfile='/cvmfs/icecube.opensciencegrid.org/data/GCD/GeoCalibDetectorStatus_2013.56429_V0.i3.gz', charge_scale=1.0, time_scale=1e-3):
    """ Processes a frame to create an event graph and metadata out of it.
    
    Parameters:
    -----------
    frame : I3Frame
        The data frame to extract an event graph with features from.
    gcd_file : str
        Path to the gcd file.
    charge_scale : float
        The normalization constant for charge.
    time_scale : float
        The normalization constant for time.
    """

    ### Meta data of the event for analysis of the classifier and creation of ground truth
    primary = dataclasses.get_most_energetic_neutrino(frame['I3MCTree'])
    if primary is None:
        get_weighted_primary(frame, MCPrimary='MCPrimary')
        primary = frame['MCPrimary']

    # Obtain the PDG Encoding for ground truth
    #frame['PDGEncoding'] = dataclasses.I3Double(primary.pdg_encoding)
    #frame['InteractionType'] = dataclasses.I3Double(frame['I3MCWeightDict']['InteractionType'])

    frame['RunID'] = icetray.I3Int(frame['I3EventHeader'].run_id)
    frame['EventID'] = icetray.I3Int(frame['I3EventHeader'].event_id)
    frame['PrimaryEnergy'] = dataclasses.I3Double(primary.energy)

    ### Create features for each event 
    features, coordinates, _ = get_events_from_frame(frame, charge_scale=charge_scale, time_scale=time_scale)
    for feature_name in vertex_features:
        frame[feature_name] = dataclasses.I3VectorFloat(features[feature_name])
    
    ### Create offset lookups for the flattened feature arrays per event
    frame['NumberVertices'] = icetray.I3Int(len(features[features.keys()[0]]))

    ### Create coordinates for each vertex
    C = np.vstack(coordinates.values()).T
    #C, C_mean, C_std = normalize_coordinates(C, weights=None, copy=True)
    #C_cog, C_mean_cog, C_std_cog = normalize_coordinates(C, weights=features['TotalCharge'], copy=True)


    frame['VertexX'] = dataclasses.I3VectorFloat(C[:, 0])
    frame['VertexY'] = dataclasses.I3VectorFloat(C[:, 1])
    frame['VertexZ'] = dataclasses.I3VectorFloat(C[:, 2])

    ### Output centering and true debug information
    frame['PrimaryX'] = dataclasses.I3Double(primary.pos.x)
    frame['PrimaryY'] = dataclasses.I3Double(primary.pos.y)
    frame['PrimaryZ'] = dataclasses.I3Double(primary.pos.z)
    frame['PrimaryAzimuth'] = dataclasses.I3Double(primary.dir.azimuth)
    frame['PrimaryZenith'] = dataclasses.I3Double(primary.dir.zenith)

    ### Apply labeling
    classify_wrapper(frame, None, gcdfile=gcdfile)
    return True
Esempio n. 4
0
def load_file(path, s_parms, nu_frac=1.):
    """
    Get nu parameters from an I3 file
    """
    print 'Loading I3 file {0}'.format(path)
    infile = dataio.I3File(path, 'r')

    parms = {}
    for p in s_parms:
        parms[p] = []

    while (infile.more()):
        frame = infile.pop_daq()
        if str(frame) == 'None':
            infile.rewind()
            break

        mctree = frame['I3MCTree']
        nu = dataclasses.get_most_energetic_neutrino(mctree)
        energy = nu.energy
        coszen = np.cos(nu.dir.zenith)
        azimuth = nu.dir.azimuth * 180. / np.pi
        ptype = nu.type.real

        ow = frame['I3MCWeightDict']['OneWeight']
        nevents = frame['I3MCWeightDict']['NEvents']
        oneweight = ow / nevents
        if ptype > 0:
            oneweight /= nu_frac
        else:
            oneweight /= (1 - nu_frac)

        interaction = frame['I3MCWeightDict']['InteractionType']
        volume = frame['I3MCWeightDict']['GeneratorVolume']

        GENIE_x = frame['I3GENIEResultDict']['x']
        GENIE_y = frame['I3GENIEResultDict']['y']

        parms['energy'].append(energy)
        parms['coszen'].append(coszen)
        parms['azimuth'].append(azimuth)
        parms['ptype'].append(ptype)
        parms['oneweight'].append(oneweight)
        parms['interaction'].append(interaction)
        parms['volume'].append(volume)
        parms['GENIE_x'].append(GENIE_x)
        parms['GENIE_y'].append(GENIE_y)
    return parms
Esempio n. 5
0
    def testSequence(self):
        sun_from_timestamp = astro.sun_dir(
            self.frame['I3EventHeader'].start_time.mod_julian_day_double)
        sun_from_wimpparams = astro.sun_dir(
            self.frame['WIMP_params'].time.mod_julian_day_double)
        nu = dataclasses.get_most_energetic_neutrino(self.frame['I3MCTree'])
        sun_from_nu = nu.dir.zenith, nu.dir.azimuth
        icetray.logging.log_info("Sunpos from TimeStamp  :" +
                                 str(sun_from_timestamp))
        icetray.logging.log_info("Sunpos from WimpParams :" +
                                 str(sun_from_wimpparams))
        icetray.logging.log_info("Sunpos from neutrino   :" + str(sun_from_nu))

        self.assert_(
            sun_from_wimpparams[0] <= sun_from_nu[0] + 0.01 * I3Units.deg
            and sun_from_wimpparams[0] >= sun_from_nu[0] - 0.01 * I3Units.deg,
            "zenith match within precision 0.02deg")
        self.assert_(
            sun_from_wimpparams[1] <= sun_from_nu[1] + 0.01 * I3Units.deg
            and sun_from_wimpparams[1] >= sun_from_nu[1] - 0.01 * I3Units.deg,
            "azimuth match within precision 0.02deg")
Esempio n. 6
0
 def DAQ(self, frame):
   if frame.Has(self.wimpparams_name) and frame.Has("I3MCTree"):
     wp = frame[self.wimpparams_name]
     nu = dataclasses.get_most_energetic_neutrino(frame["I3MCTree"])
     nu_sigma= self.xsNeutrino(nu)
     if nu.type == dataclasses.I3Particle.NuMu:
       self.nu_nu_coll += wp.nu_weight
       self.lep_nu_coll += wp.lep_weight
       self.nuv_nu_coll += wp.vgen * wp.nu_weight
       self.lepv_nu_coll += wp.vgen * wp.lep_weight
       self.nuflux_nu_coll += wp.nu_weight/nu_sigma
       self.lepflux_nu_coll += wp.lep_weight/nu_sigma
     elif nu.type == dataclasses.I3Particle.NuMuBar:
       self.nu_nubar_coll += wp.nu_weight
       self.lep_nubar_coll += wp.lep_weight
       self.nuv_nubar_coll += wp.vgen * wp.nu_weight
       self.lepv_nubar_coll += wp.vgen * wp.lep_weight
       self.nuflux_nubar_coll += wp.nu_weight/nu_sigma
       self.lepflux_nubar_coll += wp.lep_weight/nu_sigma
   else:
     icetray.logging.log_fatal("The key "+self.wimpparams_name+" or I3MCTree is not valid")
   self.PushFrame(frame)
   return
Esempio n. 7
0
def Primary(frame, FitName, GenieOrCorsika="Genie"):
    if frame.Has(FitName):
        fit = copy.deepcopy(frame[FitName])
        if not fit.fit_status == dataclasses.I3Particle.OK:
            print "Bad Primary Seed"
            return False

        if frame.Has("I3MCTree"):
            if GenieOrCorsika == "Genie":
                prim = dataclasses.get_most_energetic_neutrino(
                    frame['I3MCTree'])
            elif GenieOrCorsika == "Corsika":
                prim = dataclasses.get_most_energetic_muon(frame['I3MCTree'])
            else:
                print "Specify if neutrino or muons"
                return False
            fit.pos = prim.pos
            fit.dir = prim.dir
            fit.time = prim.time
            frame['Primary'] = fit
            return True

    print "No Primary"
    return False
from icecube.dataclasses import get_most_energetic_muon
from icecube.dataclasses import get_most_energetic_nucleus

primary = dc.I3Particle()
primary.energy = 10 * I3Units.TeV
tree = dc.I3MCTree()
tree.add_primary(primary)


mep = get_most_energetic_primary(tree)
I3Test.ENSURE(mep.id == primary.id, "got the wrong particle.")

I3Test.ENSURE(not get_most_energetic_cascade(tree), "got a particle, but shouldn't.")
I3Test.ENSURE(not get_most_energetic_inice(tree), "got a particle, but shouldn't.")
I3Test.ENSURE(not get_most_energetic_track(tree), "got a particle, but shouldn't.")
I3Test.ENSURE(not get_most_energetic_neutrino(tree), "got a particle, but shouldn't.")
I3Test.ENSURE(not get_most_energetic_muon(tree), "got a particle, but shouldn't.")
I3Test.ENSURE(not get_most_energetic_nucleus(tree), "got a particle, but shouldn't.")


primary2 = dc.I3Particle()
primary2.energy = 9 * I3Units.TeV

cascade = dc.I3Particle()
cascade.type = dc.I3Particle.EMinus

inice = dc.I3Particle()
inice.location_type = dc.I3Particle.InIce

track = dc.I3Particle()
track.energy = 10 * I3Units.TeV
def process_frame(frame,
                  charge_scale=1.0,
                  time_scale=1e-3,
                  append_coordinates_to_features=False):
    """ Processes a frame to create an event graph and metadata out of it.
    
    Parameters:
    -----------
    frame : ?
        The data frame to extract an event graph with features from.
    charge_scale : float
        The normalization constant for charge.
    time_scale : float
        The normalization constant for time.
    append_coordinates_to_features : bool
        If the normalized coordinates should be appended to the feature matrix.
    """
    global event_offset
    global distances_offset

    ### Meta data of the event for analysis of the classifier and creation of ground truth
    nu = dataclasses.get_most_energetic_neutrino(frame['I3MCTree'])

    # Obtain the PDG Encoding for ground truth
    frame['PDGEncoding'] = dataclasses.I3Double(nu.pdg_encoding)
    frame['InteractionType'] = dataclasses.I3Double(
        frame['I3MCWeightDict']['InteractionType'])
    frame['NumberChannels'] = dataclasses.I3Double(
        frame['IC86_Dunkman_L3_Vars']['NchCleaned'])
    frame['DCFiducialPE'] = dataclasses.I3Double(
        frame['IC86_Dunkman_L3_Vars']['DCFiducialPE'])
    frame['NeutrinoEnergy'] = dataclasses.I3Double(
        frame['trueNeutrino'].energy)
    # Some rare events do not produce a cascade
    try:
        frame['CascadeEnergy'] = dataclasses.I3Double(
            frame['trueCascade'].energy)
    except:
        frame['CascadeEnergy'] = dataclasses.I3Double(np.nan)
    try:
        # Appearently also frames with no primary muon contain this field, so to distinguish try to access it (which should throw an exception)
        frame['MuonEnergy'] = dataclasses.I3Double(frame['trueMuon'].energy)
        frame['TrackLength'] = dataclasses.I3Double(frame['trueMuon'].length)
    except:
        frame['MuonEnergy'] = dataclasses.I3Double(np.nan)
        frame['TrackLength'] = dataclasses.I3Double(np.nan)
    frame['DeltaLLH'] = dataclasses.I3Double(
        frame['IC86_Dunkman_L6']
        ['delta_LLH'])  # Used for a baseline classifcation
    frame['RunID'] = icetray.I3Int(frame['I3EventHeader'].run_id)
    frame['EventID'] = icetray.I3Int(frame['I3EventHeader'].event_id)
    frame['PrimaryEnergy'] = dataclasses.I3Double(nu.energy)

    ### Create features for each event
    features, coordinates = get_events_from_frame(frame,
                                                  charge_scale=charge_scale,
                                                  time_scale=time_scale)
    for feature_name in vertex_features:
        frame[feature_name] = dataclasses.I3VectorFloat(features[feature_name])

    ### Create offset lookups for the flattened feature arrays per event
    frame['NumberHits'] = icetray.I3Int(len(features[features.keys()[0]]))
    #frame['Offset'] = icetray.I3Int(event_offset)
    event_offset += len(features[features.keys()[0]])

    ### Create coordinates for each vertex
    C = np.vstack(coordinates.values()).T
    C, C_mean, C_std = normalize_coordinates(C, weights=None, copy=True)
    C_cog, C_mean_cog, C_std_cog = normalize_coordinates(
        C, weights=features['TotalCharge'], copy=True)

    frame['VertexX'] = dataclasses.I3VectorFloat(C[:, 0])
    frame['VertexY'] = dataclasses.I3VectorFloat(C[:, 1])
    frame['VertexZ'] = dataclasses.I3VectorFloat(C[:, 2])
    frame['COGCenteredVertexX'] = dataclasses.I3VectorFloat(C_cog_centered[:,
                                                                           0])
    frame['COGCenteredVertexY'] = dataclasses.I3VectorFloat(C_cog_centered[:,
                                                                           1])
    frame['COGCenteredVertexZ'] = dataclasses.I3VectorFloat(C_cog_centered[:,
                                                                           2])

    ### Output centering and true debug information
    frame['PrimaryXOriginal'] = dataclasses.I3Double(nu.pos.x)
    frame['PrimaryYOriginal'] = dataclasses.I3Double(nu.pos.y)
    frame['PrimaryZOriginal'] = dataclasses.I3Double(nu.pos.z)
    frame['CMeans'] = dataclasses.I3VectorFloat(C_mean)
    frame['COGCenteredCMeans'] = dataclasses.I3VectorFloat(C_mean_cog)

    ### Compute targets for possible auxilliary tasks, i.e. position and direction of the interaction
    frame['PrimaryX'] = dataclasses.I3Double((nu.pos.x - C_mean[0]) / C_std[0])
    frame['PrimaryY'] = dataclasses.I3Double((nu.pos.y - C_mean[1]) / C_std[1])
    frame['PrimaryZ'] = dataclasses.I3Double((nu.pos.z - C_mean[2]) / C_std[2])

    frame['COGCenteredPrimaryX'] = dataclasses.I3Double(
        (nu.pos.x - C_mean_cog[0]) / C_std_cog[0])
    frame['COGCenteredPrimaryY'] = dataclasses.I3Double(
        (nu.pos.y - C_mean_cog[1]) / C_std_cog[1])
    frame['COGCenteredPrimaryZ'] = dataclasses.I3Double(
        (nu.pos.z - C_mean_cog[2]) / C_std_cog[2])
    frame['PrimaryAzimuth'] = dataclasses.I3Double(nu.dir.azimuth)
    frame['PrimaryZenith'] = dataclasses.I3Double(nu.dir.zenith)

    ### Compute possible reco inputs that apply to entire event sets
    track_reco = frame['IC86_Dunkman_L6_PegLeg_MultiNest8D_Track']
    frame['RecoX'] = dataclasses.I3Double(
        (track_reco.pos.x - C_mean[0]) / C_std[0])
    frame['RecoY'] = dataclasses.I3Double(
        (track_reco.pos.y - C_mean[1]) / C_std[1])
    frame['RecoZ'] = dataclasses.I3Double(
        (track_reco.pos.z - C_mean[2]) / C_std[2])
    frame['COGCenteredRecoX'] = dataclasses.I3Double(
        (track_reco.pos.x - C_mean_cog[0]) / C_std_cog[0])
    frame['COGCenteredRecoY'] = dataclasses.I3Double(
        (track_reco.pos.y - C_mean_cog[1]) / C_std_cog[1])
    frame['COGCenteredRecoZ'] = dataclasses.I3Double(
        (track_reco.pos.z - C_mean_cog[2]) / C_std_cog[2])
    frame['RecoAzimuth'] = dataclasses.I3Double(track_reco.dir.azimuth)
    frame['RecoZenith'] = dataclasses.I3Double(track_reco.dir.zenith)
    return True
Esempio n. 10
0
def Make_Image(frame):
    global data 
    global geometry
    global st_info

    #Log id info 
    id = np.zeros(1,dtype = id_dtype)
    H = frame["I3EventHeader"]
    id[["run_id","sub_run_id","event_id","sub_event_id"]] = (H.run_id,H.sub_run_id,H.event_id,H.sub_event_id)
    
    #Log Weight info
    weight = np.zeros(1,dtype = weight_dtype)
    if data_type in ['genie', 'corsika']:
	w = dict(frame[WEIGHT_KEY])
	weight[list(w.keys())] = tuple(w.values())
    
    #Log MCTree info 
    primary = np.zeros(1,dtype = particle_dtype)
    prim_daughter = np.zeros(1,dtype = particle_dtype)
    if not data_type == 'data':
    #find primary particle
	mctree = frame[MCTREE_KEY]
	daughter = None
	if data_type == 'genie':
            prim = dataclasses.get_most_energetic_neutrino(mctree)
	    
	    max_energy = 0
	    for part in mctree.children(prim.id):
	        if part.energy > max_energy:
		    max_enegy = part.energy
		    daughter = part
	else:
	    prim = dataclasses.get_most_energetic_primary(mctree)
            daughter = dataclasses.get_most_energetic_muon(mctree)
           
        
	#if no children, then daughter is a duplicate of primary
	if  daughter is None:
            print("MCTree has no primary children")
            primary[["tree_id","pdg","energy","position","direction","time","length"]] =\
            ([prim.id.majorID, prim.id.minorID], prim.pdg_encoding, prim.energy,\
             [prim.pos.x,prim.pos.y,prim.pos.z],\
             [prim.dir.zenith,prim.dir.azimuth],prim.time, prim.length)

            prim_daughter[["tree_id","pdg","energy","position","direction","time","length"]] =\
            ([prim.id.majorID, prim.id.minorID], prim.pdg_encoding, prim.energy,\
             [prim.pos.x,prim.pos.y,prim.pos.z],\
             [prim.dir.zenith,prim.dir.azimuth], prim.time, prim.length)

	 #if there are children, daughter is the child with highest energy
	else:
            primary[["tree_id","pdg","energy","position","direction","time","length"]] =\
            ([prim.id.majorID, prim.id.minorID], prim.pdg_encoding, prim.energy,\
             [prim.pos.x,prim.pos.y,prim.pos.z],\
             [prim.dir.zenith,prim.dir.azimuth],prim.time, prim.length)
        
            prim_daughter[["tree_id","pdg","energy","position","direction","time","length"]] =\
            ([daughter.id.majorID, daughter.id.minorID], daughter.pdg_encoding,daughter.energy,\
             [daughter.pos.x,daughter.pos.y,daughter.pos.z],\
             [daughter.dir.zenith,daughter.dir.azimuth],daughter.time,daughter.length)
    
    #Log HESE veto perameters 
    hese = np.zeros(1,dtype = hese_dtype)
    hese_vheselfveto = True
    hese_pos =[-9999,-9999,-9999]
    hese_time = -999
    if frame.Has("HESE3_VHESelfVeto"):
        hese_vheselfveto = frame["HESE3_VHESelfVeto"].value
        hese_pos = [frame["HESE3_VHESelfVetoVertexPos"].x,frame["HESE3_VHESelfVetoVertexPos"].y,frame["HESE3_VHESelfVetoVertexPos"].z]
        hese_time = frame["HESE3_VHESelfVetoVertexTime"].value
    hese[["vheselfveto","vheselfvetovertexpos","vheselfvetovertextime"]][0] =\
    (hese_vheselfveto,hese_pos,hese_time) 
   
    #Log logan's veto parameters
    veto = np.zeros(1,dtype = veto_dtype)
    veto_cas_rlogl = -999
    veto_spe_rlogl = 999
    veto_cas_rlogl_ndc = -999
    veto_spe_rlogl_ndc = 999
    veto_fh_z = -999
    veto_svv_z = -999
    veto_ldp = -999
    
    if frame.Has('HESE3_VHESelfVetoVertexPos') and frame.Has('SPEFit32_DPFitParams') and frame.Has('CascadeLlhVertexFit_DPParams')\
    and frame.Has('SPEFit32_noDC_DPFitParams') and frame.Has('CascadeLlhVertexFit_noDC_DPParams') and frame.Has('depthFirstHit')\
    and frame.Has("LeastDistanceToPolygon_Veto"):
         
        veto_cas_rlogl = frame['CascadeLlhVertexFit_DPParams'].ReducedLlh
        veto_spe_rlogl = frame['SPEFit32_DPFitParams'].rlogl
        veto_cas_rlogl_ndc = frame['CascadeLlhVertexFit_noDC_DPParams'].ReducedLlh
        veto_spe_rlogl_ndc = frame['SPEFit32_noDC_DPFitParams'].rlogl
        veto_fh_z = frame['depthFirstHit'].value
        veto_svv_z = frame['HESE3_VHESelfVetoVertexPos'].z
        veto_ldp = frame["LeastDistanceToPolygon_Veto"].value
        trck = frame['CascadeLlhVertexFit_DP']
        cscd = frame['SPEFit32_DP']
    
    veto[["SPE_rlogl","Cascade_rlogl","SPE_rlogl_noDC", "Cascade_rlogl_noDC","FirstHitZ","VHESelfVetoVertexPosZ","LeastDistanceToPolygon_Veto"]] =\
    (veto_spe_rlogl,veto_cas_rlogl,veto_spe_rlogl_ndc,veto_cas_rlogl_ndc,veto_fh_z,veto_svv_z,veto_ldp)                     
    pulses= dataclasses.I3RecoPulseSeriesMap.from_frame(frame, PULSES_KEY)
    wf_map = frame["CalibratedWaveformsHLCATWD"]   


    #make image from raw waveforms 
    wfms = []
    wf_times = [] #storage for waveforms starting times
    wf_widths = [] #storage for waveform bin widths
    
    for img_ch, (q, stnum, dist) in enumerate(st_info):
        for omkey in wf_map.keys():
            if (omkey.string == stnum):
                for wf in wf_map.get(omkey, []):
                    if wf.status == 0 and wf.source_index == 0:
			wf_times.append(wf.time)
                        wf_widths.append(wf.bin_width)
                        wfms.append({
                                'wfm': wf.waveform,
                                'time': wf.time,  
                                'width': wf.bin_width,
                                'dom_idx': omkey.om - 1,
                                'img_ch': img_ch,
                                'om_pos': [geometry[omkey].position.x,geometry[omkey].position.y,geometry[omkey].position.z]
                                })


    im = np.zeros(shape=(N_X_BINS, N_Y_BINS, N_CHANNELS))
    wf_times_arr = np.zeros(shape=(N_Y_BINS, N_CHANNELS))
    wf_pos_arr = np.zeros(shape=(3,N_Y_BINS, N_CHANNELS))
    

    if len(wfms) <= 4:
        print("FAILED only %d WF" %len(wfms) )
        return False
    
    #we neeed to prevent early noise hits from shifting the actual
    #interaction from the image time frame
    #first work out when the first waveforn starts
    wf_times = np.array(wf_times)
    wf_times = wf_times[wf_times.argsort()]

    #find the biggest differnece between starting times
    diff_times = np.diff(wf_times[:N_NOISE_HITS])
    max_diff_pos = np.argmax(diff_times)
    
    #check if the images needs to be shifted and work out the shift
    if diff_times[max_diff_pos] > MAX_TIME_SHIFT:
        min_time = wf_times[max_diff_pos+1]
    else:
        min_time = wf_times[0]

    

    #make images
    for wfm in wfms:
        wf_shift = 0
        start_ind = min(N_X_BINS, int((wfm['time'] - min_time) / wfm['width']))
        if start_ind >= 0:
            end_ind = min(N_X_BINS, start_ind + len(wfm['wfm']))
            wfm_vals = wfm['wfm'][0:end_ind-start_ind]
        else: 
            wf_shift = abs(start_ind)
            start_ind = 0
            end_ind = min(N_X_BINS, len(wfm['wfm'])-wf_shift)
            if end_ind <0:
                end_ind = 0
            wfm_vals = wfm['wfm'][wf_shift:len(wfm['wfm'])]

        im[start_ind:end_ind, wfm['dom_idx'], wfm['img_ch']] = wfm_vals
        wf_times_arr[wfm['dom_idx'], wfm['img_ch']] = wfm['time']
        wf_pos_arr[0:3,wfm['dom_idx'], wfm['img_ch']] = wfm['om_pos']
        
        
        if wf_shift > 0:
            print("the images were shifted by {0:.3f}".format(wf_shift))
                
    im = np.true_divide(im, 10**(-8))
    im = im.astype(np.float32)
    
    if np.sum(im[:,:,0])==0:
        print("FAILED no image 0")
        return False
    if np.sum(im[:,:,1])==0:
        print("FAILED no image 1")
        return False
    if np.sum(im[:,:,2])==0:
        print("FAILED no image 2")
        return False
    
    #Log all the event info    
    event = np.zeros(1,dtype = info_dtype)    
    event[["id","image","qtot","qst","primary","prim_daughter","logan_veto","hese","weight_dict"]]=\
           (id[0], im, qtot, st_info, primary[0], prim_daughter[0], veto[0],hese[0], weight[0])
    data.append(event)
Esempio n. 11
0
def store_primary(frame, mctree_name):
	if frame.Has(mctree_name):
		p = dataclasses.get_most_energetic_neutrino(frame[mctree_name])
		frame['NuPrimary'] = copy.copy(p)
                              nue_flux_service = Honda2014SPLFluxIP,
                              numu_flux_service = Honda2014SPLFluxIP,
                              delta_m21_squared = 7.6e-5,             #< eV^2                
                              delta_m31_squared = 2.426e-3,           #< eV^2                 
                              theta_12 = math.asin(math.sqrt(0.312)), #< rad                    
                              theta_23 = math.asin(math.sqrt(0.42)),  #< rad
                              theta_13 = math.asin(math.sqrt(0.025)), #< rad
                              delta_cp = 0.,                          #< rad
                              cache_base = "Ocelot",
                              output_name = "DummyNeutrinoWeights")

                osc_w = frame["DummyNeutrinoWeights"]["OscillatedRate"] / float(len(filenames['msu'])/3.0)
                unosc_w = frame["DummyNeutrinoWeights"]["UnoscillatedRate"] / float(len(filenames['msu'])/3.0)

                # fix the nu/nubar junk from ocelot, which is just wrong
                p = dataclasses.get_most_energetic_neutrino(frame['I3MCTree'])
                if p.pdg_encoding > 0:
                    osc_w *= 0.7/2.0
                    unosc_w *= 0.7/2.0
                else:
                    osc_w *= 0.3/2.0
                    unosc_w *= 0.3/2.0

                eventid = file_num + str(frame["I3EventHeader"].event_id)
                uniqueid = str(eventid)+'_'+str(p.energy)+'_'+str(numpy.cos(p.dir.zenith))
            
                isCC = frame["I3MCWeightDict"]["InteractionType"]==1
                isneutrino = p.pdg_encoding > 0.0

                if isCC:
                    for case in requirements.keys():
primary = dc.I3Particle()
primary.energy = 10 * I3Units.TeV
tree = dc.I3MCTree()
tree.add_primary(primary)

mep = get_most_energetic_primary(tree)
I3Test.ENSURE(mep.id == primary.id, "got the wrong particle.")

I3Test.ENSURE(not get_most_energetic_cascade(tree),
              "got a particle, but shouldn't.")
I3Test.ENSURE(not get_most_energetic_inice(tree),
              "got a particle, but shouldn't.")
I3Test.ENSURE(not get_most_energetic_track(tree),
              "got a particle, but shouldn't.")
I3Test.ENSURE(not get_most_energetic_neutrino(tree),
              "got a particle, but shouldn't.")
I3Test.ENSURE(not get_most_energetic_muon(tree),
              "got a particle, but shouldn't.")
I3Test.ENSURE(not get_most_energetic_nucleus(tree),
              "got a particle, but shouldn't.")

primary2 = dc.I3Particle()
primary2.energy = 9 * I3Units.TeV

cascade = dc.I3Particle()
cascade.type = dc.I3Particle.EMinus

inice = dc.I3Particle()
inice.location_type = dc.I3Particle.InIce