示例#1
0
def GetDataFrame(mytreename, mybranches, filelist):
    RProcessor = rp.ROOTProcessor(treename=mytreename)
    for f1 in filelist:
        RProcessor.addROOTFile(f1, branches_to_get=mybranches)
    data = RProcessor.getProcessedData()
    df = pd.DataFrame(data)
    return df
示例#2
0
def process_ROOTFile(infilename,
                     data_branches,
                     output_filenamebase,
                     data_branch="phaseII",
                     save_json=False,
                     save_numpy=True):
    #Test processing data
    myROOTProcessor = rp.ROOTProcessor(treename=data_branch)
    #branch = ['Pi0Count']
    myROOTProcessor.processROOTFile(infilename, branches_to_get=data_branches)
    data_injson = myROOTProcessor.getProcessedData()
    if SAVE_JSON:
        with open(
                "../data/Processed/JSON_Data/" + output_filenamebase + ".json",
                "w") as f:
            #procd_data_lists = myProcessor.removeNumpyArrays()
            #json.dump(procd_data_lists,f)
            json.dump(data_injson, f)

    #Generate a pixel map from all PMTs hit in events in this file
    #FIXME: We could also just have the pixel map stored in a JSON file, so
    #We don't have to generate the map over and over again (plus, if a PMT was
    #Never hit in the file, then it would be left out of the map
    myPixelMapper = pm.PixelMapper(jsondata=data_injson)
    pixel_map, numxpixels, numypixels = myPixelMapper.MapPositionsToPixels(
        pmt_only=True, ycut=130)
    myPixelMapper.PlotPixelMap(pixel_map)

    #Process the JSON data into our correct numpy data format
    myJSONProcessor = jp.JSONProcessor()
    myJSONProcessor.loadJSON(data_injson)
    myJSONProcessor.loadPixelMap(pixel_map, numxpixels, numypixels)
    OUTINDS = [
        'PiPlusCount', 'Pi0Count', 'PiMinusCount', 'trueVtxX', 'trueVtxY',
        'trueVtxZ', 'trueDirX', 'trueDirY', 'trueDirZ', 'trueVtxTime'
    ]

    input_data, output_data = myJSONProcessor.processData(timewindowmin=0,
                                                          timewindowmax=20,
                                                          numwindows=5,
                                                          outdata=OUTINDS)
    print("TESTING: printing a single event's data out")
    print(input_data[0])
    print("single event's data in the x-pixel row")
    print(input_data[0][0])
    print("single event's charge data at x-pixel=0, y-pixel=3")
    print(input_data[0][0][3])
    if SAVE_NUMPYBIN:
        np.save(
            "../data/Processed/nparray_Data/" + output_filenamebase +
            "_input.npy", input_data)
        np.save(
            "../data/Processed/nparray_Data/" + output_filenamebase +
            "_output.npy", output_data)
示例#3
0
def EstimateLivetime(filelist):
    '''
    Estimate live time using the smallest and 
    largest time stamps in each separate file.  One or two 
    events are being set to an unphysically small or large number though,
    have to investigate.
    '''
    total_time = 0
    mybranches = ['eventTimeTank']
    for f1 in filelist:
        f1Processor = rp.ROOTProcessor(treename="phaseIITriggerTree")
        f1Processor.addROOTFile(f1, branches_to_get=mybranches)
        f1data = f1Processor.getProcessedData()
        f1data_pd = pd.DataFrame(f1data)
        early_time = np.min(
            f1data_pd.loc[(f1data_pd["eventTimeTank"] > 1E6)].values) / 1E9
        late_time = np.max(
            f1data_pd.loc[(f1data_pd["eventTimeTank"] < 2.0E18)].values) / 1E9
        print("EARLY_TIME: " + str(early_time))
        print("LATE_TIME: " + str(late_time))
        print("LATE - EARLY TIME: " + str(late_time - early_time))
        total_time += (late_time - early_time)
    return total_time
示例#4
0
if __name__ == '__main__':
    slist = glob.glob(SIGNAL_DIR + "*.ntuple.root")
    blist = glob.glob(BKG_DIR + "*.ntuple.root")

    livetime_estimate = es.EstimateLivetime(slist)
    print("SIGNAL LIVETIME ESTIMATE IN SECONDS IS: " + str(livetime_estimate))
    livetime_estimate = es.EstimateLivetime(blist)
    print("BKG LIVETIME ESTIMATE IN SECONDS IS: " + str(livetime_estimate))

    mybranches = [
        'eventNumber', 'eventTimeTank', 'clusterTime', 'SiPMhitQ', 'SiPMNum',
        'SiPMhitT', 'hitT', 'hitQ', 'hitPE', 'hitDetID',
        'clusterChargeBalance', 'clusterPE', 'SiPM1NPulses', 'SiPM2NPulses'
    ]
    SProcessor = rp.ROOTProcessor(treename="phaseIITankClusterTree")
    for f1 in slist:
        SProcessor.addROOTFile(f1, branches_to_get=mybranches)
    Sdata = SProcessor.getProcessedData()
    Sdf = pd.DataFrame(Sdata)

    BProcessor = rp.ROOTProcessor(treename="phaseIITankClusterTree")
    for f1 in blist:
        BProcessor.addROOTFile(f1, branches_to_get=mybranches)
    Bdata = BProcessor.getProcessedData()
    Bdf = pd.DataFrame(Bdata)

    SProcessor = rp.ROOTProcessor(treename="phaseIITriggerTree")
    for f1 in slist:
        SProcessor.addROOTFile(f1, branches_to_get=mybranches)
    Sdata = SProcessor.getProcessedData()
示例#5
0
            print("CHARGE NEAR SIPM: " + str(ClusterQ))
            LateClusterPEs.append(ClusterQ)

    labels = {'title': 'Comparison of total nearby PE to late SiPM Charge \n (Position 0, AmBe source installed)', 
            'xlabel': 'Cluster PE', 'ylabel': 'Total SiPM charge [nC]'}
    ranges = {'xbins': 30, 'ybins':20, 'xrange':[0,40],'yrange':[0,0.5],'promptTime':2000}
    #abp.MakeHexJointPlot(Sdf,'clusterPE','clusterChargeBalance',labels,ranges)
    plt.hist2d(LateClusterPEs,LateClusterQs, bins=(ranges['xbins'],ranges['ybins']),
            range=[ranges['xrange'],ranges['yrange']],
            cmap = plt.cm.inferno)
    plt.colorbar()
    plt.title(labels['title'])
    plt.xlabel(labels['xlabel'])
    plt.ylabel(labels['ylabel'])
    plt.show()

if __name__=='__main__':
    slist = glob.glob(SIGNAL_DIR+"*.ntuple.root")

    mybranches = ['eventNumber','eventTimeTank','clusterTime','SiPMhitT','SiPMhitQ','SiPMhitAmplitude','clusterChargeBalance','clusterPE','SiPM1NPulses','SiPM2NPulses','SiPMNum','clusterHits','hitPE','hitT']

    SProcessor = rp.ROOTProcessor(treename="phaseIITriggerTree")
    for f1 in slist:
        SProcessor.addROOTFile(f1,branches_to_get=mybranches)
    Sdata = SProcessor.getProcessedData()
    Sdf_trig = pd.DataFrame(Sdata)
    
    TwoPulsePlot(Sdf_trig)


示例#6
0
        Pandas DataFrame output from the MapPositionsToPixels method.
        '''
        pmp = pixel_map.pivot(index="ypixel", columns="xpixel", values="id")
        ax = sns.heatmap(pmp)
        plt.show()


if __name__ == '__main__':

    if str(sys.argv[1]) == "--help":
        print("USAGE: python PMTMap.py [file1.root] ")
        sys.exit(0)
    f1 = str(sys.argv[1])
    #Process data into JSON objects
    mybranches = ['digitX', 'digitY', 'digitZ', 'digitType', 'digitDetID']
    f1Processor = rp.ROOTProcessor(treename="phaseII")
    #branch = ['Pi0Count']
    f1Processor.addROOTFile(f1, branches_to_get=mybranches)
    f1data = f1Processor.getProcessedData()
    diX = np.array(f1data['digitX'])
    diY = np.array(f1data['digitY'])
    diZ = np.array(f1data['digitZ'])
    diType = np.array(f1data['digitType'])
    diID = np.array(f1data['digitDetID'])

    ids, ys, thetas = YVSTheta(diX, diY, diZ, diType, diID)
    #Now, create the map of IDs to pixel indices
    pixel_map = PositionsToPixels(ids, ys, thetas)
    print(pixel_map)
    #with open("./pixel_map.json","w") as f:
    #    json.dump(f,pixel_map,indent=4,sort_keys=True)
示例#7
0
文件: main.py 项目: jujuhub/annie
bkg_runs = [64,65,66,67,68,69,70,71,72,73,74,75,76,77]
mybranches = ['eventTimeTank', 'hitQ', 'hitT', 'hitPE', 'hitDetID', 'clusterPE', 'clusterHits', 'clusterCharge', 'SiPMNum', 'SiPMhitT']
#mybranches = ['eventTimeTank', 'hitQ', 'hitT', 'hitPE', 'hitDetID']



#main
if __name__=='__main__':
  flist = glob.glob(DATADIR + "NTuple_*.root")

  #load the data
  hasBkg = False   #hardcoded rn
  hasSrc = False
  hasLed = True
  if ((not HASCSV) and (not HASLED)):
    bkgProcessor = rp.ROOTProcessor("phaseIITankClusterTree")
    print("  ..for bkg data")
    srcProcessor = rp.ROOTProcessor("phaseIITankClusterTree")
    print("  ..for src data")
  nrun = -69
  for f in flist:
    #print(" > current file: " + f)
    nrun = f[-7:-5]   #extract run number
    #print(" > nrun = " + nrun)
    if (int(nrun) in bkg_runs):
      if ((not HASCSV) and (not HASLED)):
        bkgProcessor.addROOTFile(f, branches_to_get=mybranches)
      hasBkg = True
    elif (int(nrun) in src_runs):
      if ((not HASCSV) and (not HASLED)):
        srcProcessor.addROOTFile(f, branches_to_get=mybranches)