示例#1
0
    def Execute(self, stats):
        if not ipmodule.IPBaseClass.Execute(self, stats): return 0

        from icecube import icetray, dataclasses, dataio

        infile = self.GetParameter('inputfile')
        if not os.path.isfile(infile):
            raise Exception('input file does not exist')
        prefix = self.GetParameter('outfileprefix')
        noutputs = self.GetParameter('noutputs')
        if noutputs <= 1:
            raise Exception('noutputs must be greater than 1')
        suffix = self.GetParameter('outfilesuffix')

        # make output files
        outfiles = []
        pattern = '%s%0' + ('%d' % math.ceil(math.log10(noutputs))) + 'd%s'
        for i in xrange(noutputs):
            outfiles.append(dataio.I3File(pattern % (prefix, i, suffix), 'w'))

        # read from file
        for n, frame in enumerate(dataio.I3File(infile)):
            outfiles[n % noutputs].push(frame)

        # close output files
        for file in outfiles:
            file.close()

        return 0
示例#2
0
    def Execute(self, stats):
        if not ipmodule.IPBaseClass.Execute(self, stats): return 0

        from icecube import icetray, dataclasses, dataio

        outfile = self.GetParameter('outputfile')
        prefix = self.GetParameter('infileprefix')
        ninputs = self.GetParameter('ninputs')
        if ninputs <= 1:
            raise Exception('ninputs must be greater than 1')
        suffix = self.GetParameter('infilesuffix')

        # make list of input files
        infiles = []
        pattern = '%s%0' + ('%d' % math.ceil(math.log10(ninputs))) + 'd%s'
        for i in xrange(ninputs):
            finput = pattern % (prefix, i, suffix)
            if not os.path.isfile(finput):
                raise Exception('file %s not found' % finput)
            infiles.append(dataio.I3File(finput))

        # read from files
        file = dataio.I3File(outfile, 'w')
        nn = range(0, ninputs)
        try:
            while infiles:
                # try to read another frame off each file
                deln = []
                for n, infile in enumerate(infiles):
                    frame = None
                    try:
                        frame = infile.pop_frame()
                    except:
                        # must be out of frames
                        infile.close()
                        deln.append(n)
                    else:
                        if not frame:
                            # must be out of frames
                            infile.close()
                            deln.append(n)
                        else:
                            file.push(frame)
                for n in deln:
                    del infiles[n]
        except Exception as e:
            print('Exception when combining')
            raise

        # close output file
        file.close()

        return 0
示例#3
0
def main():

    i3file = dataio.I3File(sys.argv[1])
    gframe = i3file.pop_frame(icetray.I3Frame.Geometry)
    omgeo, coords = gather_geom(gframe)

    i3file2 = dataio.I3File(sys.argv[2])

    for i in range(200):
        print('Frame', i)
        pframe = i3file2.pop_physics()
        colors = gather_reco_colors(pframe)
        draw_geometry(omgeo, coords,i,colors)
示例#4
0
def countEvents(i3fname, debug=False):
    """Count the number of events (number of Q frames, actually) in an i3 file

    Parameters
    ----------
    i3fname : string
    debug : bool

    """
    from icecube import dataio, icetray  # pylint: disable=import-error

    input_i3 = dataio.I3File(i3fname, 'r')
    try:
        frameCount = 0
        while input_i3.more():
            frame = input_i3.pop_frame()
            if frame.Stop == icetray.I3Frame.DAQ:
                frameCount += 1
                #header = frame['I3EventHeader']

            # Debug mode stops after first 50 Q frames
            if debug > 0 and frameCount >= 50:
                break
        del frame
    finally:
        input_i3.close()

    return frameCount
示例#5
0
def get_keys(i3fname):
    """Return the complete set of keys present in any frame in the I3 file.

    Parameters
    ----------
    i3fname : string
        Path to I3 file

    Returns
    -------
    keys : list of strings
        All unique keys from any frame in the file

    """
    from icecube import dataio, icetray  # pylint: disable=import-error

    input_i3 = dataio.I3File(i3fname, 'r')

    keys = set()
    try:
        while input_i3.more():
            i3frame = input_i3.pop_frame()
            if i3frame.Stop not in [
                    icetray.I3Frame.DAQ, icetray.I3Frame.Physics
            ]:
                continue
            keys = keys.union(i3frame.keys())
        del i3frame
    finally:
        input_i3.close()

    return sorted(keys)
示例#6
0
def checkFiles(flist):
    out_list = []
    faulty_list = []
    for one_file in flist:
        infile = dataio.I3File(one_file)
        faulty_file = False
        while infile.more():
            frame = infile.pop_frame()
            if frame.Stop == frame.Physics:
                continue
            frame.keys()
            try:
                dummy = frame.Has('I3EventHeader')
            except:
                faulty_file = True
                break
        if not faulty_file:
            out_list.append(one_file)
        else:
            faulty_list.append(one_file)

    print 'Files tested. Good files ', len(out_list), '/', len(flist)
    print 'Files with errors: '
    print faulty_list
    return out_list
示例#7
0
def read(fname='muongun_serialization_test.i3'):
    f = dataio.I3File(fname)
    frame = f.pop_frame()
    f.close()

    newgenerator = frame['Generator']
    assert newgenerator.surface == generator.surface
示例#8
0
def i3Counts(file_name, evt_dict):
    in_file = dataio.I3File(file_name)
    #frame = in_file.pop_physics()
    #evt_dict['reco_energy']=[]
    #evt_dict['reco_zenith']=[]
    count = 0
    frame = in_file.pop_daq()
    while (in_file.more()):
        count += 1
        evt_id = frame["I3EventHeader"].event_id
        sub_evt_id = frame["I3EventHeader"].sub_event_id
        run_id = frame["I3EventHeader"].run_id
        #m_energy = frame["SANTA_Fit_Muon"].energy
        #m_zenith = frame["SANTA_Fit_Muon"].dir.zenith
        #m_energy = frame["SANTA_Muon"].energy
        #m_zenith = frame["SANTA_Muon"].dir.zenith
        #c_energy = frame["SANTA_Cascade"].energy
        #tot_energy = m_energy+c_energy
        #print count,tot_energy,m_zenith
        #evt_dict['reco_energy'].append(tot_energy)
        #evt_dict['reco_zenith'].append(m_zenith)
        evt_dict[(run_id, evt_id, sub_evt_id)] = 1
        #if(sub_evt_id>0):
        #  print sub_evt_id,evt_id,run_id
        frame = in_file.pop_daq()
示例#9
0
def do_one(Type, name, gen, checksum):
    print("Writing %s" % Type)
    global is_fail
    name += '.i3'
    i3f = dataio.I3File(name, dataio.I3File.Mode.Writing)

    tinst = Type()
    i = 0
    for value in gen:
        tinst.append(value)
        i += 1
    print("%d entries" % i)
    frame = icetray.I3Frame()
    frame[name] = tinst

    i3f.push(frame)
    i3f.close()

    f = open(name, 'rb')
    data = f.read()
    hsh = hashlib.md5()
    hsh.update(data)
    hd = hsh.hexdigest()
    if (hd != checksum):
        print("****************** ERRORZ ERRORZ ***********************")
        print("%s != %s (file %s, %u bytes)" % (hd, checksum, name, len(data)))
        is_fail = True
示例#10
0
def harvest_generators(infiles):
    """
    Harvest serialized generator configurations from a set of I3 files.
    """
    import icecube
    import icecube.icetray
    from icecube import dataclasses, dataio, icetray
    import icecube.MuonGun
    from icecube.icetray.i3logging import log_info as log
    generator = None
    for fname in infiles:
        print fname
        f = dataio.I3File(str(fname))
        fr = f.pop_frame(icetray.I3Frame.Stream('S'))
        f.close()
        if fr is not None:
            for k in fr.keys():
                v = fr[k]
                if isinstance(v, icecube.MuonGun.GenerationProbability):
                    #                log('%s: found "%s" (%s)' % (fname, k, type(v).__name__), unit="MuonGun")
                    if generator is None:
                        generator = v
                    else:
                        generator += v
    return generator
示例#11
0
def i3Counts(file_name, evt_dict, pick):
    in_file = dataio.I3File(file_name)
    #frame = in_file.pop_physics()
    while (in_file.more()):
        frame = in_file.pop_physics()
        evt_id = frame["I3EventHeader"].event_id
        sub_evt_id = frame["I3EventHeader"].sub_event_id
        run_id = frame["I3EventHeader"].run_id
        hlc_z = frame["FirstHLCvertex"].pos.z
        mm_energy = frame["SANTA_Muon"].energy
        mm_zenith = frame["SANTA_Muon"].dir.zenith
        mc_energy = frame["SANTA_Cascade"].energy
        tot_energy = mm_energy + mc_energy
        tn_energy = frame["trueNeutrino"].energy
        tn_zenith = frame["trueNeutrino"].dir.zenith
        #tm_energy = frame["trueMuon"].energy
        #tc_energy = frame["trueCascade"].energy
        #tmm_zenith = frame["trueMuon"].dir.zenith
        #CC=1 NC =2
        int_type = frame["I3MCWeightDict"]["InteractionType"]
        p_tenergy = 0
        p_nenergy = 0
        pte_ind = -1
        pne_ind = -2
        if (int_type == 1):
            p_tenergy = list(pick['CC']['reco_energy'])
            p_nenergy = list(pick['CC']['energy'])
            if (tot_energy in p_tenergy):
                pte_ind = p_tenergy.index(tot_energy)
            if (tn_energy in p_nenergy):
                pne_ind = p_nenergy.index(tn_energy)
        if (int_type == 2):
            p_tenergy = list(pick['NC']['reco_energy'])
            p_nenergy = list(pick['NC']['energy'])
            if (tot_energy in p_tenergy):
                pte_ind = p_tenergy.index(tot_energy)
            #ptz_ind = p_tzen.index(mm_zenith)
            if (tn_energy in p_nenergy):
                pne_ind = p_nenergy.index(tn_energy)
            #pnz_ind = p_nenergy.index(tn_zenith)
        #print 'Check Inds',pte_ind==pne_ind
        #if((pte_ind==pne_ind) and hlc_z<-250):
        #if((pte_ind==pne_ind) and hlc_z<-250):
        if ((pne_ind > 0)):
            m_bool = (pte_ind == pne_ind)
            if (int_type == 1):
                if (not (m_bool)):
                    print pick['CC']['reco_energy'][pne_ind] - tot_energy
                evt_dict['CC']['match_e'].append(m_bool)
                evt_dict['CC']['vtxz'].append(hlc_z)
            if (int_type == 2):
                if (not (m_bool)):
                    print pick['NC']['reco_energy'][pne_ind] - tot_energy
                evt_dict['NC']['match_e'].append(m_bool)
                evt_dict['NC']['vtxz'].append(hlc_z)
            for key in pick['NC'].keys():
                if (int_type == 1):
                    evt_dict['CC'][key].append(pick['CC'][key][pne_ind])
                if (int_type == 2):
                    evt_dict['NC'][key].append(pick['NC'][key][pne_ind])
def main():
    import os

    from icecube import icetray, dataclasses, dataio
    from icecube.common_variables import hit_multiplicity

    test_data_base_dir = os.path.expandvars('$I3_TESTDATA')
    if not os.path.exists(test_data_base_dir):
        raise RuntimeError('No test data has been downloaded, yet! '\
                           'Type "cd $I3_BUILD; make rsync" to get it!')

    f = dataio.I3File(
        os.path.join(test_data_base_dir, 'event-viewer',
                     'Level3aGCD_IC79_EEData_Run00115990.i3'))

    frame = f.pop_physics()

    pulses_map_name = 'MaskedOfflinePulses'

    print(('Calculating hit multiplicity values for "%s" pulses.' %
           (pulses_map_name)))

    hit_multiplicity_values = hit_multiplicity.calculate_hit_multiplicity(
        frame['I3Geometry'], frame[pulses_map_name].apply(frame))

    print("Calculation results:")
    print("NHitStrings     : %d" % (hit_multiplicity_values.n_hit_strings))
    print("NHitDoms        : %d" % (hit_multiplicity_values.n_hit_doms))
    print("NHitDomsOnePulse: %d" %
          (hit_multiplicity_values.n_hit_doms_one_pulse))
    print("NPulses         : %d" % (hit_multiplicity_values.n_pulses))

    # Put values into the frame.
    frame["MyHitMultiplicityValues"] = hit_multiplicity_values
示例#13
0
def check_oneweight(dataset):
    generator = weighting.from_simprod(dataset)

    url = get_random_filename(dataset)
    try:
        if 'pnfs' in url:
            raise RuntimeError("Can't get %s from convey" % url)
        frame = dataio.I3File(url).pop_daq()
    except RuntimeError as e:
        icetray.logging.log_error(str(e))
        return
    if frame is None:
        icetray.logging.log_error('Could not read ' + url)
        return
    else:
        icetray.logging.log_info("Got " + url)

    nu = [p for p in frame['I3MCTree'].primaries if p.is_neutrino][0]
    icetray.logging.log_info(str(nu))
    wdict = frame['I3MCWeightDict']
    mine = wdict['TotalInteractionProbabilityWeight'] / generator(
        nu.energy, nu.type, math.cos(nu.dir.zenith))
    # OneWeight is in units of cm^2, and must be normalized to the number of
    # neutrinos or antineutrinos ()
    theirs = wdict['OneWeight'] / (1e4 * wdict['NEvents'] / 2.)

    assert_array_almost_equal_nulp(mine, theirs, 4)
示例#14
0
def I3FileAdaptor(i3file, **kw):
    '''
    Return a sequence-like adaptor to an I3File.  The returned
    object will support the len() operation, the iterator protocol,
    and bracket access.

    The argument may be an I3File or I3BrowsableFile object, or a
    path to a readable file on disk.  File paths that appear to be
    uncompressed i3 files will be opened as I3BrowsableFile.

    Clients should expect compressed files to be slow if accessed
    in anything other than a sequential manner.

    Keyword args:
        sequential_cache_size: Cache size to pass to I3SequentialAdaptor,
                               if that adaptor is used
    '''
    f = i3file
    if isinstance(i3file, str):
        if i3file.endswith('.i3'):
            f = dataio.I3BrowsableFile()
            f.open_file(i3file)
        else:
            f = dataio.I3File(i3file)

    if isinstance(f, dataio.I3File):
        return I3SequentialAdaptor(f, kw.get('sequential_cache_size', None))
    else:
        return I3BrowsableAdaptor(f)
示例#15
0
    def FramePacket(self, frames):
        eh = frames[0]["I3EventHeader"]
        if eh.run_id <= 99999:
            icetray.logging.log_fatal(
                "Events seem not to be experimental data (run_id <= 99999)")

        if self.last_seen_run == eh.run_id:
            for frame in frames:
                self.PushFrame(frame)
            return
        icetray.logging.log_info(
            "Event with a new run_id encountered: %d; pushing in GCD-frames!" %
            (eh.run_id))

        if eh.run_id not in self.GRL:
            icetray.logging.log_fatal(
                "Goodrun-lists do not contain an entry for this run (%d); cannot infer GCD-file path"
                % (eh.run_id))

        #clip in the gcd-file content
        gcd_file = dataio.I3File(self.GRL[eh.run_id].get_gcd_file())
        while (gcd_file.more()):
            gcd_frame = gcd_file.pop_frame()
            if gcd_frame.Stop not in self.ignoreTypeList:
                self.PushFrame(gcd_frame)

        self.last_seen_run = eh.run_id
        #push all other frames
        for frame in frames:
            self.PushFrame(frame)
        return
def most_recorded_pulse_data(dir,n):

    f = dataio.I3File(dir)
    fr = f.pop_physics()
    offline_pulses = fr["OfflinePulsesHLC"]

    if type(offline_pulses) == dataclasses.I3RecoPulseSeriesMapMask:
            offline_pulses = offline_pulses.apply(fr)

    records=[]
    for i in range(len(offline_pulses)):
        records.append(len(offline_pulses[offline_pulses.keys()[i]]))
    records=np.array(records)

    def calc_time_charge(pulsemap):
        charge,time,width,flag=[],[],[],[]

        for i in range(len(pulsemap)):
            charge.append(pulsemap[i].charge)
            time.append(pulsemap[i].time)
            width.append(pulsemap[i].width)
            flag.append(pulsemap[i].flags)

        return np.array(charge),np.array(time),np.array(width),np.array(flag)

    #np.argwhere(records>160)   ##see which one do you want and select the key (the most recorded for example)
    key=np.argsort(records)[-n]
    #key=83

    pulsemap=offline_pulses[offline_pulses.keys()[key]]
    data=calc_time_charge(pulsemap)
    np.save("data_{}".format(key),data)
    return data
示例#17
0
def get_losses_of_i3files(file_list):
    losses_list = []
    longest_event = 0
    miliped_key = 'SplineMPE_MillipedeHighEnergyMIE'
    delaunay = create_icecube_delaunay()

    for input_file in tqdm(file_list):
        i3file = dataio.I3File(input_file)
        while (i3file.more()):
            frame = i3file.pop_frame()
            # check if end of file
            if (frame == None):
                break
            # check if its a gcd frame, daq frame or a physics frame
            if miliped_key not in frame:
                continue

            event_losses_inside = np.zeros(150)
            frame_idx = 0
            milipede_list = frame[miliped_key]
            for loss_bin in milipede_list:
                if loss_bin.energy > 0:
                    if points_in_detector(
                            delaunay,
                        [loss_bin.pos.x, loss_bin.pos.y, loss_bin.pos.z]):
                        event_losses_inside[frame_idx] = loss_bin.energy
                        frame_idx += 1

            losses_list.append(event_losses_inside)
            if np.count_nonzero(event_losses_inside) > longest_event:
                longest_event = np.count_nonzero(event_losses_inside)

    print('num events: ', len(losses_list))
    print('most nonzero bins: ', longest_event)
    return losses_list
示例#18
0
    def runTest(self):
        output_file = 'test/output.i3.gz'
        tray = I3Tray()
        tray.AddModule('I3Reader',
                       'reader',
                       Filenamelist=[self.gcd_file, self.i3_file])
        tray.AddModule(dl.DeepCoreLabels, 'labelmaker')
        tray.AddModule('I3Writer',
                       'writer',
                       Filename=os.path.join(self.path, output_file),
                       Streams=[icetray.I3Frame.Physics, icetray.I3Frame.DAQ])
        tray.AddModule('TrashCan', 'can')
        tray.Execute()
        i3_file = dataio.I3File(os.path.join(self.path, output_file))
        p_frame = i3_file.pop_physics()

        self.assertTrue('cc_in_deepcore' in p_frame, 'cc_in_deepcore was not'\
        ' found in i3_file after module execution')

        # cleaning up
        del tray
        del i3_file
        del p_frame

        # create plots
        self.i3_file = output_file
        self._get_detector(self.gcd_file)
        self._get_data(self.i3_file)
        self._plot_view(view='top')
        self._plot_view(view='side')
示例#19
0
    def _get_data(self, i3_file):
        ''' Reads interaction type and position of a given i3 file

            Args: 
                i3_file: icecube.dataio.I3File
                    I3 File with i3 frames to read.
            Returns:
                Pandas DataFrame with x, y, z, type columns of interactions.
        '''
        interactions = []
        positions = []
        label = []

        # open i3 file
        i3_file = dataio.I3File(i3_file)
        # get interactions and positions of all frames (including all daughters)
        pframe = i3_file.pop_physics()

        while i3_file.more():
            # obtain primary
            primary = get_primary(pframe)
            # CC, NC or other?
            interactions.append(get_interaction_type(pframe, primary))
            positions.append(get_position(pframe, primary))
            label.append(pframe['cc_in_deepcore'].value)
            pframe = i3_file.pop_physics()

        positions = np.array(positions)
        label = np.array(label)
        frames = pd.DataFrame(np.array([
            positions[:, 0], positions[:, 1], positions[:, 2], interactions,
            label
        ]).swapaxes(0, 1),
                              columns=['x', 'y', 'z', 'type', 'label'])
        self._data = frames
示例#20
0
    def Configure(self):
        self.gcd_filename = self.GetParameter('GCDFilename')
        self.prescale = self.GetParameter('Prescale')
        
        if not os.path.exists(self.filename):
            icetray.logging.log_fatal("GCD file %s not found." % self.filename)
                
        with dataio.I3File(self.filename) as f:
            self.geometry_frame = None
            self.calibration_frame = None
            self.detector_status_frame = None

            while f.more():
                frame = f.pop_frame()
                if 'I3Geometry' in frame:
                    self.geometry_frame = frame['I3Geometry']
                if 'I3Calibration' in frame:
                    self.calibration_frame = frame['I3Calibration']
                if 'I3DetectorStatus' in frame:
                    self.detector_status_frame = frame['I3DetectorStatus']

        # We'll need to get the checksums from some location
        try:
            url = self.__url + 'checksums'
            response = urllib.urlopen(url)
            self.checksums = pickle.loads(response.read())
        except:
            icetray.logging.log_fatal("Something went wrong checking/loading checksums")
示例#21
0
def MakeModels(outfile, infile, ThePerturber):

    # Open a file for output
    outf = dataio.I3File(outfile, 'w')

    # Send the simulation frame to the outfile.
    frame = InputEvents.pop_frame(icetray.I3Frame.Simulation)
    outf.push(frame)

    #### Select model range here #####
    ModelNumbers = range(initial_model, initial_model + NModels)

    ##### By definition, a IceXModelNumber = 0 corresponds to the central model

    # Start Off-Central Model Generator Loop
    for IceXModelNumber in ModelNumbers:

        frame = ThePerturber(IceXModelNumber)
        outf.push(frame)

        for k in range(0, EventsPerModel):
            InputEvent = InputEvents.pop_daq()
            InputEvent['IceXModelNumber'] = frame['IceXModelNumber']
            outf.push(InputEvent)
    outf.close()
    del outf
示例#22
0
  def FramePacket(self, frames):
    eh = frames[0]["I3EventHeader"]
    if eh.run_id <= 99999:
      icetray.logging.log_fatal("Events seem not to be experimental data (run_id <= 99999)")

    if self.last_seen_run==eh.run_id:
      for frame in frames:
        self.PushFrame(frame)
      return
    icetray.logging.log_info("Event with a new run_id encountered: %d; pushing in GCD-frames!"%(eh.run_id))
    ri = self.GRL.get_run_info(eh.run_id)
    
    if ri.run_id == -1:
      icetray.logging.log_fatal"Goodrun-lists do not contain an entry for this run (%d); cannot infer GCD-file path"%(eh.run_id))

    icetray.logging.log_trace("searching GCD : %s", os.path.join(ri.path,'*%s*GCD*.i3*'%(eh.run_id)))
    gcd_path = glob.glob(os.path.join(ri.path,'*%s*GCD*.i3*'%(eh.run_id)))

    if len(gcd_path) != 1:
      icetray.logging.log_fatal("Cannot infer unique GCD-file path for run %d; Please report this error!"%(eh.run_id))
    
    #clip in the gcd-file content
    gcd_file = dataio.I3File(gcd_path[0])
    while (gcd_file.more()):
      gcd_frame = gcd_file.pop_frame()
      if gcd_frame.Stop not in self.ignoreTypeList:
        self.PushFrame(gcd_frame)

    self.last_seen_run = eh.run_id
    #push all other frames
    for frame in frames:
      self.PushFrame(frame)
    return
示例#23
0
    def harvest_generators(sinfiles):

        from icecube.icetray.i3logging import log_info as log
        generator = None
        f = dataio.I3File(sinfiles[0])
        while True:
            try:
                fr = f.pop_frame(icetray.I3Frame.Stream('S'))
            except RuntimeError as e:
                log('Caught the following exception:', str(e))
                fr = None
            if fr is None:
                break
            for k in fr.keys():
                v = fr[k]
                if isinstance(v, MuonGun.GenerationProbability):
                    log('%s: found "%s" (%s)' %
                        (sinfiles[0], k, type(v).__name__),
                        unit="MuonGun")
                    if generator is None:
                        generator = v
                    else:
                        generator += v
        #print generator
        f.close()
        return generator
示例#24
0
def main():
    import os

    from icecube import icetray, dataclasses, dataio
    from icecube.icetray import I3Units
    from icecube.common_variables import time_characteristics

    test_data_base_dir = os.path.expandvars('$I3_TESTDATA')
    if not os.path.exists(test_data_base_dir):
        raise RuntimeError('No test data has been downloaded, yet! '\
                           'Type "cd $I3_BUILD; make rsync" to get it!')

    f = dataio.I3File(os.path.join(test_data_base_dir, 'event-viewer', 'Level3aGCD_IC79_EEData_Run00115990.i3'))

    frame = f.pop_physics()

    pulses_map_name       = 'MaskedOfflinePulses'
    reco_particle_name    = 'MPEFit_SLC'
    time_cylinder_radius = 150.*I3Units.m

    print('Calculating time characteristics for "%s" pulses'%\
          (pulses_map_name))

    time_characteristics_values = time_characteristics.calculate_time_characteristics_values(
        frame['I3Geometry'],
        frame[pulses_map_name].apply(frame),
    )

    print("Calculation results: %s"%(time_characteristics_values))

    # Put the calculated values into the frame.
    frame["TrackCharacteristicsValues"] = time_characteristics_values
示例#25
0
def process_frames(frames):
    start = time.time()
    tmpdir = tempfile.mkdtemp(dir=os.getcwd())
    out_frames = []
    try:
        infilename = os.path.join(tmpdir,'in.i3')
        outfilename = os.path.join(tmpdir,'out.i3')
        with dataio.I3File(infilename, 'w') as f:
            for fr in frames:
                f.push(fr)
        subprocess.check_call(['python','worker_file_helper.py',infilename,outfilename])
        for fr in dataio.I3File(outfilename):
            out_frames.append(fr)
    finally:
        shutil.rmtree(tmpdir)
    print('time: ',time.time()-start)
    return out_frames
示例#26
0
def load_frames(infile):
    frame_packet = []
    i3f = dataio.I3File(infile)
    while True:
        if not i3f.more():
            return frame_packet
        frame = i3f.pop_frame()
        frame_packet.append(frame)
示例#27
0
def write(fname='muongun_serialization_test.i3'):

    frame = icetray.I3Frame()
    frame['Generator'] = generator

    f = dataio.I3File(fname, 'w')
    f.push(frame)
    f.close()
示例#28
0
    def testRoundTrip(self):
        """
		I3Matrix serializes and deserializes properly.
		"""
        fname = 'i3matrix_test.i3'
        orig = numpy.pi * numpy.ones((3, 3))
        frame = icetray.I3Frame()
        frame['foo'] = I3Matrix(orig)
        f = dataio.I3File(fname, 'w')
        f.push(frame)
        f.close()

        frame = dataio.I3File(fname).pop_frame()
        view = numpy.asarray(frame['foo'])
        self.assertEquals(view.shape, orig.shape)
        self.assert_((view == orig).all())

        os.unlink(fname)
示例#29
0
def main(file_list, keys):
	data = dict.fromkeys(keys, [])
	for file_name in file_list:
		i3_file = dataio.I3File(file_name)
    	p_frame = i3_file.pop_physics()
    	while p_frame != None:
    		for key in keys:
    			attribute, column = key.split('.')
    			data[key].append(p_frame[attribute].get(column))
示例#30
0
 def test_04(self):
     # test iter
     f = dataio.I3File(self.name)
     iter = f.__iter__()
     iter2 = iter.__iter__()
     f.rewind()
     for fr in f:
         pass
     f.close()