示例#1
0
channel_map[region_names['signal_antiIso']] = region_names['signal']

input_channels = []

#### open the input file and re-make its directory structure in the output file
inputFile = TFile(condor_dir + "/" + data_dataset + ".root")
inputFile.cd()
for key in inputFile.GetListOfKeys():
    if (key.GetClassName() != "TDirectoryFile"):
        continue
    outputFile.cd()
    rootDirectory = key.GetName()
    outputFile.mkdir(rootDirectory)

    inputFile.cd(rootDirectory)
    for key2 in gDirectory.GetListOfKeys():
        if (key2.GetClassName() != "TDirectoryFile"):
            continue
        current_channel_name = key2.GetName()
        if current_channel_name in channel_map:
            input_channels.append(current_channel_name)
            outputFile.cd(rootDirectory)
            gDirectory.mkdir(channel_map[current_channel_name])

#do the thing for cutflow histograms
inputFile.cd(rootDirectory)
for key in gDirectory.GetListOfKeys(
):  # loop over histograms in the current directory
    if not re.match(r"TH[123]", key.GetClassName()):
        continue
    histogramName = key.GetName()
def GetKeyNames(self, dir=""):
    self.cd(dir)
    return [key.GetName() for key in gDirectory.GetListOfKeys()]
示例#3
0
def writeNewFile(infile, outfile, regions):
    infile.cd()
    allregions = []
    nextregionkey = TIter(gDirectory.GetListOfKeys())
    regionkey = nextregionkey.Next()
    histograms = {}
    while regionkey and regionkey.GetTitle():
        if regionkey.IsFolder() == 1:
            regionname = regionkey.GetName()
            newregionname = getNewRegionName(regionname)
            histograms[newregionname] = {}
            allregions.append(regionname)
            inregiondir = infile.Get(regionname)
            inregiondir.cd()
            nextsamplekey = TIter(gDirectory.GetListOfKeys())
            samplekey = nextsamplekey.Next()
            while samplekey:
                if samplekey.IsFolder() == 1:
                    samplename = samplekey.GetName()
                    insampledir = inregiondir.Get(samplename)
                    insampledir.cd()
                    hist = insampledir.Get("nominal")
                    if samplename in histograms[newregionname]:
                        histograms[newregionname][samplename].Add(hist)
                    else:
                        histograms[newregionname][samplename] = hist
                samplekey = nextsamplekey.Next()
        regionkey = nextregionkey.Next()

    #get the binning
    binning = {}
    for newregion in histograms.keys():
        binning[newregion] = getBinning(histograms[newregion])
        print "Binning for region ", newregion, " -> ", binning[newregion]

    #now write output file
    infile.cd()
    nextregionkey = TIter(gDirectory.GetListOfKeys())
    regionkey = nextregionkey.Next()
    while regionkey and regionkey.GetTitle():
        if regionkey.IsFolder() == 1:
            regionname = regionkey.GetName()
            newregionname = getNewRegionName(regionname)
            outfile.cd()
            outfile.mkdir(regionname)
            outregiondir = outfile.Get(regionname)
            infile.cd()
            inregiondir = infile.Get(regionname)
            inregiondir.cd()
            nextsamplekey = TIter(gDirectory.GetListOfKeys())
            samplekey = nextsamplekey.Next()
            while samplekey:
                if samplekey.IsFolder() == 1:
                    samplename = samplekey.GetName()
                    outregiondir.cd()
                    outregiondir.mkdir(samplename)
                    outsampledir = outregiondir.Get(samplename)
                    inregiondir.cd()
                    insampledir = inregiondir.Get(samplename)
                    insampledir.cd()
                    nextsystkey = TIter(gDirectory.GetListOfKeys())
                    systkey = nextsystkey.Next()
                    while systkey:
                        obj = systkey.ReadObj()
                        if obj.IsA().InheritsFrom("TH1"):
                            systname = systkey.GetName()
                            outsampledir.cd()
                            outhist = rebin(obj, binning[newregionname],
                                            outsampledir)
                            outhist.Write()
                        systkey = nextsystkey.Next()
                else:  #take care of lumi histogram
                    obj = samplekey.ReadObj()
                    if obj.IsA().InheritsFrom("TH1"):
                        newobj = obj.Clone()
                        outregiondir.cd()
                        newobj.SetDirectory(outregiondir)
                        newobj.Write()
                samplekey = nextsamplekey.Next()
        regionkey = nextregionkey.Next()
示例#4
0
def GetAllKeys(self):
    keylist = []
    for key in gDirectory.GetListOfKeys():
        mypath = ""
        self.filterKey(key, mypath, keylist)
    return keylist
示例#5
0
from ROOT import TFile, TDirectory, gDirectory, gROOT
gROOT.SetBatch(True)

from Samples76.Samples import MiniAOD76Samples as samples
for sample in samples:
    sample.MakeJobs( nFilesPerJob , "%s/%s" % (OutPath , prefix) )

f = TFile.Open(samples[0].Jobs[0].Output)

from Haamm.HaNaMiniAnalyzer.Plotter import *
hcft = Histogram( samples , f.GetDirectory("Hamb/CutFlowTable/") )

f.cd("Hamb")
AllProps = {}
for dir in gDirectory.GetListOfKeys() :
    if dir.IsFolder() and dir.GetName() == "CutFlowTable":
        AllProps[ dir.GetName() ] = Histogram( samples , f.GetDirectory("Hamb/%s/" % (dir.GetName() )) )

f.Close()

for sample in samples:
    for Job in sample.Jobs :
        finame = Job.Output
        sys.stdout.write("\r%s : %d of %d" % (sample.Name , Job.Index , len(sample.Jobs)))
        sys.stdout.flush()
        ff = None
        if os.path.isfile( finame ):
            ff = TFile.Open(finame)
        else:
            print "File %d of sample %s doesn't exist, skip it" % (Job.Index , sample.Name)
示例#6
0
def merge_root_file(target, source_list):
    """
    Merge next file from the source list with the target file.
    Function called recursively for each element of the list.

    :param TFile target: the result ROOT file
    :param TList source_list: list of input files to merge
    """
    logger = get_logger()
    raw_path = target.GetPath()
    path = raw_path[raw_path.find(":") + 1:]

    first_source = source_list.First()
    first_source.cd(path)
    current_source_dir = gDirectory
    # gain time, do not add the objects in the list in memory
    status = TH1.AddDirectoryStatus()
    TH1.AddDirectory(False)

    # loop over all keys in this directory
    #global_chain = TChain()
    next_key = TIter(current_source_dir.GetListOfKeys())
    #key = TKey()
    #TKey old_key = None
    key = next_key()
    while key:
        # keep only the highest cycle number for each key
        #if old_key and not old_key.GetName() == key.GetName():
        #    continue
        # read object from first source file
        first_source.cd(path)
        obj = key.ReadObj()

        if obj.IsA().InheritsFrom(TH1.Class()):
            # descendant of TH1 -> merge it
            logger.info("Merging histogram %s", obj.GetName())
            h1 = TH1(obj)

            # loop over all source files and add the content of the
            # correspondant histogram to the one pointed to by "h1"
            next_source = source_list.After(first_source)
            while next_source:
                # make sure we are at the correct directory level by cd'ing to path
                next_source.cd(path)
                key2 = gDirectory.GetListOfKeys().FindObject(h1.GetName())
                if key2:
                    h2 = TH1(key2.ReadObj())
                    h1.Add(h2)
                    #del h2
                next_source = source_list.After(next_source)

        elif obj.IsA().InheritsFrom(TTree.Class()):
            logger.info("Merging tree %s", obj.GetName())
            # loop over all source files and create a chain of Trees "global_chain"
            obj_name = obj.GetName()
            global_chain = TChain(obj_name)
            global_chain.Add(first_source.GetName())
            next_source = source_list.After(first_source)
            while next_source:
                global_chain.Add(next_source.GetName())
                next_source = source_list.After(next_source)

        elif obj.IsA().InheritsFrom(TDirectory.Class()):
            logger.info("Found subdirectory %s", obj.GetName())
            # create a new subdir of same name and title in the target file
            target.cd()
            new_dir = target.mkdir(obj.GetName(), obj.GetTitle())
            # newdir is now the starting point of another round of merging
            # newdir still knows its depth within the target file via
            # GetPath(), so we can still figure out where we are in the recursion
            merge_root_file(new_dir, source_list)

        else:
            logger.info("Unknown object type, name: %s, title: %s",
                        obj.GetName(), obj.GetTitle())

        # now write the merged histogram (which is "in" obj) to the target file
        # note that this will just store obj in the current directory level,
        # which is not persistent until the complete directory itself is stored
        # by "target.Write()" below
        if obj is not None:
            target.cd()
            # if the object is a tree, it is stored in global_chain...
            if obj.IsA().InheritsFrom(TTree.Class()):
                global_chain.Merge(target.GetFile(), 0, "keep")
            else:
                obj.Write(key.GetName())

        # move to the next element
        key = next_key()

    # save modifications to target file
    target.SaveSelf(True)
    TH1.AddDirectory(status)
    target.Write()
示例#7
0
def GetKeyNames(file, dir=""):
    file.cd(dir)
    return [key.GetName() for key in gDirectory.GetListOfKeys()]
示例#8
0
testFile.cd(channelDirectory)

if arguments.savePDFs:
    try:
        shutil.rmtree ("comparison_histograms_pdfs")
    except OSError:
        pass
    os.mkdir ("comparison_histograms_pdfs")


if arguments.generic:
    outputFile.cd()
    gDirectory.mkdir(channelDirectory)
    LoopOverKeys(channelDirectory, testFile, outputFile)
else:
    for key in gDirectory.GetListOfKeys():  # Loop over directories in same way as in makePlots.py
        if re.match ('TH1', key.GetClassName()):  #found a 1D histogram
            MakeOnePlot("", key.GetName())

        if (key.GetClassName() != "TDirectoryFile"):
            continue
        if arguments.verbose:
            print "Checking key: ", key.GetName()

        histogramDirectory = key.GetName()
        outputFile.cd()
        gDirectory.mkdir(histogramDirectory)
        outputFile.cd(key.GetName())
        testFile.cd(channelDirectory + "/" + histogramDirectory)
        for key2 in gDirectory.GetListOfKeys():
            if re.match ('TH1', key2.GetClassName()):  #found a 1D histogram
示例#9
0
for ic,_ in enumerate(pcoords):
    if len(pcoords[ic]) != len(pad_title):
        print(ic, len(pcoords[ic]), len(pad_title))
        raise ValueError('The number of pads has to be consistent.')

myfile = TFile.Open(FLAGS.root_file, "READ")
online_dir = '/eos/user/b/bfontana/www/'
maind = 'outpath'
layers = [x for x in range(1,FLAGS.nlayers+1)]
subd_names = ['layer'+str(layers[x]) for x in range(len(layers))]
for isd,sd in enumerate(subd_names):
    directory = os.path.join(online_dir,FLAGS.directory,sd)
    SystemUtils.createDir(os.path.join(online_dir,FLAGS.directory))
    SystemUtils.createDir(directory)
    myfile.cd(os.path.join(maind,sd))
    kmap = keyTitleMap(gDirectory.GetListOfKeys())
    titleUsed = []
    for htitle,key in kmap.items():
        h = key.ReadObj()
        uvN = getUVFromTitle(htitle)
        title1 = uvN[0]+','+uvN[1]+','+uvN[2]+',RecHits'
        title2 = uvN[0]+','+uvN[1]+','+uvN[2]+',Geom'
        if title1 in titleUsed or title2 in titleUsed:
            continue
        titleUsed.append([title1, title2])
        if htitle==title1:
            histos = [h, kmap[title2].ReadObj()]
        elif htitle==title2:
            histos = [kmap[title1].ReadObj(), h]
        else:
            raise ValueError('There is a problem with the title of the histogram.')
def func(file, run, mode, argument):
    wholeDict = {}
    process = "TIMING"
    tfile = TFile(file)
    dirname = "DQMData/Run %s/HLT/Run summary/TimerService/" % (run)
    gDirectory.cd(dirname)
    dirnameModule = "process %s modules" % (process)
    gDirectory.cd(dirnameModule)

    totalSum = 0
    for everyKey in gDirectory.GetListOfKeys():
        keyName = everyKey.GetName()
        if (keyName[0].islower() and keyName.endswith("time_real")):
            hist = tfile.Get(dirname + dirnameModule + "/" + keyName)
            totalSum = totalSum + hist.GetMean()
            keyName = keyName.split()[0]
            wholeDict.update({keyName.split()[0]: hist.GetMean()})

    dirnamePath = "process %s paths" % (process)
    gDirectory.cd("../")
    gDirectory.cd(dirnamePath)

    cuontTotalPaths = 0
    countIncludedPaths = 0
    excludedDict = wholeDict.copy()
    for everyPath in gDirectory.GetListOfKeys():
        everyPathName = everyPath.GetName()
        if everyPathName.startswith("path ") or everyPathName.startswith(
                "endpath "):
            cuontTotalPaths += 1
            if not strainer(everyPath, mode, argument):
                countIncludedPaths += 1
                hist = tfile.Get(dirname + dirnamePath + "/" + everyPathName +
                                 "/module_time_real_total")
                nbins = hist.GetNbinsX()
                for moduleIndex in range(1, nbins + 1):
                    labelModule = hist.GetXaxis().GetBinLabel(moduleIndex)
                    if excludedDict.has_key(labelModule):
                        del excludedDict[labelModule]
    #print sumatory of mean times
    excludeSum = 0
    for everyKey in excludedDict:
        excludeSum += excludedDict[everyKey]

    f = io.open(unicode("ExtractPaths" + file.split('_')[1].strip(".csv") +
                        ".csv"),
                'w',
                encoding='utf8')

    f.write(
        unicode("Actual Mean Event Real Time:, %f\n" %
                (tfile.Get(dirname + "event time_real").GetMean())))
    f.write(unicode("Total Paths:, %i\n" % (cuontTotalPaths)))
    f.write(unicode("Total Modules:, %i\n" % (len(wholeDict))))
    f.write(unicode("Total Mean Modules Sum:, %f\n" % (totalSum)))
    f.write(
        unicode("Excluded Paths:, %i\n" %
                (cuontTotalPaths - countIncludedPaths)))
    f.write(unicode("Excluded Modules:, %i\n" % (len(excludedDict))))
    f.write(unicode("Excluded Mean Modules Sum:, %f\n" % (excludeSum)))
    f.write(unicode("Net Paths:, %i\n" % (countIncludedPaths)))
    f.write(
        unicode("Net Modules:, %i\n" % (len(wholeDict) - len(excludedDict))))
    f.write(unicode("Net Mean Modules Sum:, %f\n" % (totalSum - excludeSum)))
示例#11
0
def save_particle_to_root(filename):
    filename_base, filename_ext = os.path.splitext(filename)
    tf_out = TFile('{}.trigger{}'.format(filename_base, filename_ext),
                   'RECREATE')

    tree = TTree('tree', 'tree')
    particle = Particle()
    tree.Branch(
        'particle', particle,
        'is_noise/I:event_id:track_id:x/F:y:z:t:px:py:pz:pdg_id:parent_id:' +
        'spill_number/I:' +
        'present_tof_us/I:x_tof_us/F:y_tof_us:z_tof_us:t_tof_us:px_tof_us:py_tof_us:pz_tof_us:'
        +
        'present_wire_chamber_1/I:x_wire_chamber_1/F:y_wire_chamber_1:z_wire_chamber_1:t_wire_chamber_1:px_wire_chamber_1:py_wire_chamber_1:pz_wire_chamber_1:'
        +
        'present_wire_chamber_2/I:x_wire_chamber_2/F:y_wire_chamber_2:z_wire_chamber_2:t_wire_chamber_2:px_wire_chamber_2:py_wire_chamber_2:pz_wire_chamber_2:'
        +
        'present_wire_chamber_3/I:x_wire_chamber_3/F:y_wire_chamber_3:z_wire_chamber_3:t_wire_chamber_3:px_wire_chamber_3:py_wire_chamber_3:pz_wire_chamber_3:'
        +
        'present_wire_chamber_4/I:x_wire_chamber_4/F:y_wire_chamber_4:z_wire_chamber_4:t_wire_chamber_4:px_wire_chamber_4:py_wire_chamber_4:pz_wire_chamber_4:'
        +
        'present_cerenkov/I:x_cerenkov/F:y_cerenkov:z_cerenkov:t_cerenkov:px_cerenkov:py_cerenkov:pz_cerenkov:'
        +
        'present_tof_ds/I:x_tof_ds/F:y_tof_ds:z_tof_ds:t_tof_ds:px_tof_ds:py_tof_ds:pz_tof_ds'
    )

    tf_in = TFile(filename)
    pid_momentums = {}
    particles = []
    noise_particles = []

    keys = [key.GetName() for key in gDirectory.GetListOfKeys()]
    for key in keys:
        print('key = {}'.format(key))
        track_count = 0
        for track in tf_in.Get(key):
            track_count += 1
            pass_all = track.TrackPresenttof_us and \
                       track.TrackPresentwire_chamber_1_detector and \
                       track.TrackPresentwire_chamber_2_detector and \
                       track.TrackPresentwire_chamber_3_detector and \
                       track.TrackPresentwire_chamber_4_detector and \
                       track.TrackPresenttof_ds and \
                       track.TrackPresentcherenkov and \
                       track.TrackPresentnova

            if track_count % 100000 == 0:
                print('track_count = {}'.format(track_count))

            if track.TrackPresentnova:
                if pass_all:
                    particle.is_noise = 0
                else:
                    particle.is_noise = 1
                particle.event_id = track.EventID
                particle.track_id = track.TrackID
                particle.x = track.xnova
                particle.y = track.ynova
                particle.z = track.znova
                particle.t = track.tnova
                particle.px = track.Pxnova
                particle.py = track.Pynova
                particle.pz = track.Pznova
                particle.pdg_id = track.PDGidnova
                particle.parent_id = track.ParentIDnova

                particle.spill_number = track.SpillID

                particle.present_tof_us = track.TrackPresenttof_us
                particle.x_tof_us = track.xtof_us
                particle.y_tof_us = track.ytof_us
                particle.z_tof_us = track.ztof_us
                particle.t_tof_us = track.ttof_us
                particle.px_tof_us = track.Pxtof_us
                particle.py_tof_us = track.Pytof_us
                particle.pz_tof_us = track.Pztof_us

                particle.present_wire_chamber_1 = track.TrackPresentwire_chamber_1_detector
                particle.x_wire_chamber_1 = track.xwire_chamber_1_detector
                particle.y_wire_chamber_1 = track.ywire_chamber_1_detector
                particle.z_wire_chamber_1 = track.zwire_chamber_1_detector
                particle.t_wire_chamber_1 = track.twire_chamber_1_detector
                particle.px_wire_chamber_1 = track.Pxwire_chamber_1_detector
                particle.py_wire_chamber_1 = track.Pywire_chamber_1_detector
                particle.pz_wire_chamber_1 = track.Pzwire_chamber_1_detector

                particle.present_wire_chamber_2 = track.TrackPresentwire_chamber_2_detector
                particle.x_wire_chamber_2 = track.xwire_chamber_2_detector
                particle.y_wire_chamber_2 = track.ywire_chamber_2_detector
                particle.z_wire_chamber_2 = track.zwire_chamber_2_detector
                particle.t_wire_chamber_2 = track.twire_chamber_2_detector
                particle.px_wire_chamber_2 = track.Pxwire_chamber_2_detector
                particle.py_wire_chamber_2 = track.Pywire_chamber_2_detector
                particle.pz_wire_chamber_2 = track.Pzwire_chamber_2_detector

                particle.present_wire_chamber_3 = track.TrackPresentwire_chamber_3_detector
                particle.x_wire_chamber_3 = track.xwire_chamber_3_detector
                particle.y_wire_chamber_3 = track.ywire_chamber_3_detector
                particle.z_wire_chamber_3 = track.zwire_chamber_3_detector
                particle.t_wire_chamber_3 = track.twire_chamber_3_detector
                particle.px_wire_chamber_3 = track.Pxwire_chamber_3_detector
                particle.py_wire_chamber_3 = track.Pywire_chamber_3_detector
                particle.pz_wire_chamber_3 = track.Pzwire_chamber_3_detector

                particle.present_wire_chamber_4 = track.TrackPresentwire_chamber_4_detector
                particle.x_wire_chamber_4 = track.xwire_chamber_4_detector
                particle.y_wire_chamber_4 = track.ywire_chamber_4_detector
                particle.z_wire_chamber_4 = track.zwire_chamber_4_detector
                particle.t_wire_chamber_4 = track.twire_chamber_4_detector
                particle.px_wire_chamber_4 = track.Pxwire_chamber_4_detector
                particle.py_wire_chamber_4 = track.Pywire_chamber_4_detector
                particle.pz_wire_chamber_4 = track.Pzwire_chamber_4_detector

                particle.present_cerenkov = track.TrackPresentcherenkov
                particle.x_cerenkov = track.xcherenkov
                particle.y_cerenkov = track.ycherenkov
                particle.z_cerenkov = track.zcherenkov
                particle.t_cerenkov = track.tcherenkov
                particle.px_cerenkov = track.Pxcherenkov
                particle.py_cerenkov = track.Pycherenkov
                particle.pz_cerenkov = track.Pzcherenkov

                particle.present_tof_ds = track.TrackPresenttof_ds
                particle.x_tof_ds = track.xtof_ds
                particle.y_tof_ds = track.ytof_ds
                particle.z_tof_ds = track.ztof_ds
                particle.t_tof_ds = track.ttof_ds
                particle.px_tof_ds = track.Pxtof_ds
                particle.py_tof_ds = track.Pytof_ds
                particle.pz_tof_ds = track.Pztof_ds

                tree.Fill()

            # if track_count == 5000:
            #     break
        # break
    tf_in.Close()

    tf_out.cd()
    tree.Write()
    tf_out.Close()
示例#12
0
    def onefile(self, file):

        import ROOT
        from ROOT import gDirectory
        from ROOT import TH1
        from ROOT import TDirectory

        if self.verbose:
            print "look at file:", file
        tfile = ROOT.TFile(file)
        if tfile.IsZombie():
            print "WARNING: ", file, " is missing or empty"
            return self.error

        # get list of directories
        tDir = tfile.GetDirectory("")
        bDirs = tDir.GetListOfKeys()
        baseList = []
        for bdir in bDirs:
            baseList.append(bdir.GetName())

        # first try and extract total number of events
        total_events = -1
        #print self.totalEvHist
        for dir in self.totalEvDirs:
            # check if dir is in list of base directories
            if dir in baseList:
                #print dir
                # trap an errors
                try:
                    tfile.cd(dir)
                    hist = gDirectory.Get(self.totalEvHist)
                    if hist != 0:
                        hist_entries = int(hist.GetEntries())
                        #print "hist_entries",hist_entries, total_events
                        if total_events == -1:
                            total_events = hist_entries
                            self.grandTotalEvents += hist_entries
                            #print "grandTotalEvents", self.grandTotalEvents
                        else:
                            if hist_entries != total_events:
                                print "WARNING: total events in different directories don't match"
                        if total_events != -1:
                            print "Found %s events" % total_events
                            break
                    tfile.cd("..")
                except:
                    print "ERROR: cound not cd to directory: ", dir
            else:
                print "WARNING: direcory ", dir, " not found, could be normal if only HLT is run"

        found = False
        fileList = []
        chainList = []
        dummy = []
        for dir in self.checkDir:
            # check if dir is in list of base directories
            if dir in baseList:
                if self.verbose:
                    print dir
                try:
                    tfile.cd(dir)
                    for histName in self.checkHist:
                        #print "trying dir,histName",dir,histName
                        hist = 0
                        if histName in gDirectory.GetListOfKeys():
                            hist = gDirectory.Get(histName)
                            # else:
                            #   print "trying backup",backupName[histName]
                            #   hist = gDirectory.Get(backupName[histName])
                        if hist != 0:
                            if dir in histDict and histName in histDict:
                                textFileName = histDict[dir] + histDict[
                                    histName] + ".txt"
                            else:
                                textFileName = dir + histName + ".txt"
                            histDump = self.hstlist(hist, textFileName)
                            chainList = [textFileName, histDump]
                            fileList += [chainList]
                            found = True
                        else:
                            #not really useful to print a warning for not working dir/hist combinations
                            #print "WARNING:  missing L2 or EF resutls (normal if only HLT is run)"
                            #print "nope combination ain't working - but fine"
                            continue
                    tfile.cd("..")
                except:
                    print "ERROR: cound not cd to directory: ", dir
            else:
                print "WARNING: direcory ", dir, " not found"

        self.results += [[fileList, file, total_events]]
        #print "DMS len:",len(self.results)

        # return error or not
        if found:
            return self.success
        else:
            print "ERROR: no histograms found"
            return self.error
示例#13
0
def readroot():
    rls = []
    bxlist = []
    allmeas = {}

    DIRES = [
        'X0', 'Y0', 'Z0', 'width_X0', 'Width_Y0', 'Sigma_Z0', 'dxdz', 'dydz'
    ]
    # DIRES=['X0']
    rootfile = "BxAnalysis_Fill_" + FILL + ".root"
    filein = TFile(rootfile)
    for dire in DIRES:
        filein.cd(dire)
        # get list of histograms
        histolist = gDirectory.GetListOfKeys()
        iter = histolist.MakeIterator()
        key = iter.Next()
        while key:
            if key.GetClassName() == 'TH1F':
                td = key.ReadObj()
                histoname = td.GetName()
                if "bx" in histoname:
                    #                    print histoname
                    bx = histoname.split('_')[-1]
                    if bx not in bxlist:
                        # this is to be removed
                        #                        if len(bxlist)>=2:
                        #                            key = iter.Next()
                        #                            continue
                        # end to be removed
                        bxlist.append(bx)
                        allmeas[bx] = {}


#                    print bx,histoname
                    histo = gDirectory.Get(histoname)
                    nbin = histo.GetNbinsX()

                    thisbx = allmeas[bx]

                    for bin in range(1, nbin + 1):
                        label = histo.GetXaxis().GetBinLabel(bin)
                        label = label.strip()
                        if ":" not in label:
                            # not a valid label of type run:lumi-lumi, skip it
                            continue

                        cont = histo.GetBinContent(bin)
                        if cont != cont:
                            # it's a nan
                            cont = -999.0
                        err = histo.GetBinError(bin)
                        if err != err:
                            err = -999.0
                        #                        if len(bxlist)==1:
                        #                            rls.append(label)
                        #                            print label
                        #                        else:
                        if label not in rls:
                            print "New range:", label, " found in ", histoname
                            rls.append(label)

                        if label in thisbx.keys():
                            thismeas = thisbx[label]
                        else:
                            thisbx[label] = bsmeas()
                            thismeas = thisbx[label]
                        #  now filling up
                        if dire == 'X0':
                            thismeas.x = cont
                            thismeas.ex = err
                        if dire == 'Y0':
                            thismeas.y = cont
                            thismeas.ey = cont
                        if dire == 'Z0':
                            thismeas.z = cont
                            thismeas.ez = err
                        if dire == 'width_X0':
                            thismeas.wx = cont
                            thismeas.ewx = err
                        if dire == 'Width_Y0':
                            thismeas.wy = cont
                            thismeas.ewy = err
                        if dire == 'Sigma_Z0':
                            thismeas.wz = cont
                            thismeas.ewz = err
                        if dire == 'dxdz':
                            thismeas.dxdz = cont
                            thismeas.edxdz = err
                        if dire == 'dydz':
                            thismeas.dydz = cont
                            thismeas.edydz = err

            key = iter.Next()

    #    for name in pippo:
    #        print name

    filein.Close()

    # let's try to show it
    #    for bx in allmeas.keys():
    #        print "bx=",bx
    #        bxmeas=allmeas[bx]
    #        for meas in bxmeas.keys():
    #            print "meas time=",meas
    #            thismeas=bxmeas[meas]
    #            print thismeas.x,thismeas.ex,thismeas.y,thismeas.ey,thismeas.z,thismeas.ez
    #            print thismeas.wx,thismeas.ewx,thismeas.wy,thismeas.ewy,thismeas.wz,thismeas.ewz
    #            print thismeas.dxdz,thismeas.edxdz,thismeas.dydz,thismeas.edydz
    return allmeas
示例#14
0
    def run(self):
        if not os.path.isfile(self.input_filename):
            print("Cannot find %s.  Exiting.".format(self.input_filename))
            sys.exit()
        tfile_input = ROOT.TFile(self.input_filename)

        tfile_input.cd()
        # gDirectory.cd('VirtualDetector')
        gDirectory.cd('Detector')
        keys = [key.GetName() for key in gDirectory.GetListOfKeys()]

        detector_events = {}
        for key in gDirectory.GetListOfKeys():
            detector_events[key.GetName()] = key.ReadObj()

        for detector, events in detector_events.items():
            events.BuildIndex('EventID', 'TrackID')

        # ROOT prevents building a single tree that is too large.
        for category, detectors in self.category_detectors.items():
            line_to_process = 'struct ' + category + ' {Int_t EventID; Int_t TrackID; '
            for variable in self.variables:
                data_type = 'Float_t'
                if variable == 'EventID' or variable == 'TrackID':
                    data_type = 'Int_t'
                for detector in detectors:
                    line_to_process += data_type + ' ' + variable + detector + '; '
            line_to_process += '};'
            ROOT.gROOT.ProcessLine(line_to_process)

        category_structs = {}
        for category in self.category_detectors.keys():
            category_structs[category] = eval('ROOT.' + category + '()')

        for category, detectors in self.category_detectors.items():
            for detector in detectors:
                for variable in self.variables:
                    events = detector_events[detector]
                    events.SetBranchAddress(
                        variable,
                        ROOT.AddressOf(category_structs[category],
                                       variable + detector))

        tfile_output = ROOT.TFile(self.output_filename, 'RECREATE')
        spill_trees = {}
        pointers = {}
        track_count = 0
        spill_count = 0

        for event in detector_events[self.starter_tree]:
            track_count += 1
            (event_id, track_id) = (int(event.EventID), int(event.TrackID))
            spill = 1 + (event_id // self.spill_size)

            if self.gamma_cutoff > 0. and event.PDGid == 22:
                energy = (event.Px**2 + event.Py**2 + event.Pz**2)**0.5
                if energy < self.gamma_cutoff:
                    continue

            if spill not in spill_trees:
                spill_count += 1
                if spill_count > self.max_spill > 0:
                    break
                self.add_tree(spill_trees, spill, pointers)

            pointers[spill, 'SpillID'][0] = spill
            pointers[spill, 'EventID'][0] = event_id
            pointers[spill, 'TrackID'][0] = track_id

            for detector, events in detector_events.items():
                entry_number = events.GetEntryWithIndex(event_id, track_id)
                track_present = not (entry_number == -1)
                pointers[spill, 'TrackPresent' + detector][0] = track_present

                for variable in self.variables:
                    if variable == 'EventID' or variable == 'TrackID':
                        continue

                    variable_detector = variable + detector
                    value = MergeTree.DEFAULT_VALUE
                    if track_present:
                        value = getattr(
                            category_structs[
                                self.detector_categories[detector]],
                            variable_detector)
                        if variable == 't':
                            random.seed(event_id)
                            value = value * 1.e-9 + self.spill_interval * float(
                                spill) + self.random_offset_seconds()

                    pointers[spill, variable_detector][0] = value

            spill_trees[spill].Fill()

        print('{} total tracks in {}'.format(track_count, self.starter_tree))
        print('{} total spills'.format(spill_count - 1))

        tfile_output.cd()
        for tree in spill_trees.values():
            tree.Write()
        tfile_output.Close()
        tfile_input.Close()
示例#15
0
def writeNewFile(infile, outfile, regions):
    infile.cd()
    allregions = []
    nextregionkey = TIter(gDirectory.GetListOfKeys())
    regionkey = nextregionkey.Next()
    histograms = {}
    while regionkey and regionkey.GetTitle():
        print regionkey
        if regionkey.IsFolder() == 1:
            regionname = regionkey.GetName()
            newregionname = getNewRegionName(regionname)
            histograms[newregionname] = {}
            allregions.append(regionname)
            inregiondir = infile.Get(regionname)
            inregiondir.cd()
            nextsamplekey = TIter(gDirectory.GetListOfKeys())
            samplekey = nextsamplekey.Next()
            while samplekey:
                if samplekey.IsFolder() == 1:
                    samplename = samplekey.GetName()
                    insampledir = inregiondir.Get(samplename)
                    insampledir.cd()
                    hist = insampledir.Get("nominal")
                    if samplename in histograms[newregionname]:
                        histograms[newregionname][samplename].Add(hist)
                    else:
                        histograms[newregionname][samplename] = hist
                samplekey = nextsamplekey.Next()
        regionkey = nextregionkey.Next()

    #get the binning
    binning = {}
    for newregion in histograms.keys():
        #binning[newregion]=getBinning(histograms[newregion]) #based on the binning algorithm
        #        #ICHEP binning
        #        binning["lhAll_cba_boost_loose_signal"]  = [30.0, 70.0, 80.0, 90.0, 100.0, 110.0, 120.0, 140.0, 230.0]
        #        binning["lhAll_cba_boost_tight_signal"] = [30.0, 70.0, 80.0, 90.0, 100.0, 110.0, 120.0, 140.0, 230.0]
        #        binning["lhAll_cba_vbf_loose_signal"] = [30.0, 70.0, 80.0,  90.0, 100.0, 110.0, 130.0, 150.0, 230.0]
        #        binning["lhAll_cba_vbf_tight_signal"] = [30.0, 75.0, 95.0, 115.0, 125.0, 135.0, 150.0, 230.0]
        #        binning["lhAll_mva_boost_signal"]  = [-1.0, -0.9, -0.8, -0.7, -0.6, -0.5, -0.4, -0.3, -0.2, -0.1, 0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0]
        #        binning["lhAll_mva_vbf_signal"] =  [-1.0, -0.9, -0.8, -0.7, -0.6, -0.5, -0.4, -0.3, -0.2, -0.1, 0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 1.0]
        #Optimised binning
        binning["lhAll_cba_boost_loose_signal"] = [
            30.0, 70.0, 80.0, 90.0, 100.0, 110.0, 120.0, 140.0, 150.0, 180.0,
            230.0
        ]  #Optimisation MMC Ver7
        binning["lhAll_cba_boost_tight_signal"] = [
            30.0, 70.0, 80.0, 90.0, 100.0, 110.0, 120.0, 140.0, 150.0, 180.0,
            230.0
        ]  #Optimisation MMC Ver7
        binning["lhAll_cba_vbf_loose_signal"] = [
            30.0, 70.0, 80.0, 90.0, 100.0, 115.0, 135.0, 150.0, 230.0
        ]  #Optimisation MMC Ver7
        binning["lhAll_cba_vbf_tight_signal"] = [
            30.0, 70.0, 80.0, 90.0, 100.0, 115.0, 135.0, 150.0, 230.0
        ]  #Optimisation MMC Ver7
        binning["lhAll_mva_boost_signal"] = [
            -1.0, -0.9, -0.8, -0.7, -0.6, -0.5, -0.4, -0.3, -0.2, -0.1, 0.0,
            0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.775, 0.85, 0.925, 1.0
        ]  #Ver2
        binning["lhAll_mva_vbf_signal"] = [
            -1.0, -0.85, -0.75, -0.65, -0.55, -0.45, -0.35, -0.25, -0.15,
            -0.05, 0.05, 0.15, 0.25, 0.35, 0.45, 0.55, 0.65, 0.75, 1.0
        ]  #Ver2
        #TopCRs for LH
        binning["lhAll_cba_boost_top"] = [0, 1]
        binning["lhAll_cba_vbf_top"] = [0, 1]
        binning["lhAll_mva_boost_top"] = [0, 1]
        binning["lhAll_mva_vbf_top"] = [0, 1]
        ##binning["lhAll_vbf_tight_signal"] = [30.0, 80.0, 95.0, 110.0, 130.0, 150.0, 230.0] #1 kyle fix
        print "Binning for region ", newregion, " -> ", binning[newregion]

    #now write output file
    infile.cd()
    nextregionkey = TIter(gDirectory.GetListOfKeys())
    regionkey = nextregionkey.Next()
    while regionkey and regionkey.GetTitle():
        print regionkey
        if regionkey.IsFolder() == 1:
            regionname = regionkey.GetName()
            newregionname = getNewRegionName(regionname)
            outfile.cd()
            outfile.mkdir(regionname)
            outregiondir = outfile.Get(regionname)
            infile.cd()
            inregiondir = infile.Get(regionname)
            inregiondir.cd()
            nextsamplekey = TIter(gDirectory.GetListOfKeys())
            samplekey = nextsamplekey.Next()
            while samplekey:
                if samplekey.IsFolder() == 1:
                    samplename = samplekey.GetName()
                    outregiondir.cd()
                    outregiondir.mkdir(samplename)
                    outsampledir = outregiondir.Get(samplename)
                    inregiondir.cd()
                    insampledir = inregiondir.Get(samplename)
                    insampledir.cd()
                    nextsystkey = TIter(gDirectory.GetListOfKeys())
                    systkey = nextsystkey.Next()
                    while systkey:
                        obj = systkey.ReadObj()
                        if obj.IsA().InheritsFrom("TH1"):
                            systname = systkey.GetName()
                            outsampledir.cd()
                            #print binning[newregionname], newregionname
                            #if 'tight' or 'high' in newregionname:
                            #    goodbin = [float(x) for x in range(30,240,20)]
                            #    goodbin = [30.0, 90.0, 230.0]
                            #    print goodbin
                            #    outhist=rebin(obj,goodbin,outsampledir)
                            #elif 'loose' or 'low' in newregionname:
                            #    outhist=rebin(obj,goodbin,outsampledir)
                            #else:
                            #if "vbf" in newregionname and "tight" in newregionname:
                            outhist = rebin(obj, binning[newregionname],
                                            outsampledir)
                            #else: outhist = obj
                            outhist.Write()
                        systkey = nextsystkey.Next()
                else:  #take care of lumi histogram
                    obj = samplekey.ReadObj()
                    if obj.IsA().InheritsFrom("TH1"):
                        newobj = obj.Clone()
                        outregiondir.cd()
                        newobj.SetDirectory(outregiondir)
                        newobj.Write()
                samplekey = nextsamplekey.Next()
        regionkey = nextregionkey.Next()
示例#16
0
nameDir = "run_"+sys.argv[1]+"/CaloMonitoring/LArCellMon_NoTrigSel/Sporadic20GeV/"+sys.argv[3]

myFile = TRFIOFile(nameFile)

# General numbers
hNbEvts = myFile.Get("run_"+sys.argv[1]+"/LAr/FEBMon/perPartitionData/Eventtype")
print "This stream contains %d events"%hNbEvts.GetEntries()

myFile.cd(nameDir)

# Compile all channels 
if len(sys.argv) == 4:
   h=[]
   q=[]
   hLB=[]
   listOfKeys = gDirectory.GetListOfKeys()
   for key in listOfKeys:
      name = key.GetName()
      type = key.GetClassName()
      if type == "TH1F" and name.find("EN") != -1 :
         h.append(myFile.Get(nameDir+"/"+name))
      if type == "TH1F" and name.find("Quality") != -1 :
         q.append(myFile.Get(nameDir+"/"+name))
      if type == "TH2F" and name.find("ENLB") != -1 :
         hLB.append(myFile.Get(nameDir+"/"+name))

   nhists = len(h)
   print "retrieved %i histos"%nhists

   for i in range(0, nhists):
      if h[i].Integral(61,h[i].GetNbinsX()+1) >= 20:
示例#17
0
def saveForLimit(TightIsoOS, prefixLabel, mass, massType, fine_binning,
                 category, susy):

    TightIsoOS.Hist('WW').Add(TightIsoOS.Hist('ZZ'))
    TightIsoOS.Hist('WW').Add(TightIsoOS.Hist('WZ'))

    noscomp = {
        'DYJets': 'ZTT',
        'DYJets_Electron': 'ZL',
        'DYJets_Fakes': 'ZJ',
        'WJets': 'W',
        'TTJets': 'TT',
        'WW': 'VV',
        'QCDdata': 'QCD',
        'Data': 'data_obs'
    }

    if susy:
        TightIsoOS.Hist('HiggsGGH125').Add(
            TightIsoOS.Hist('HiggsVBF125'))  ## adding SM Higgs as a bkg
        TightIsoOS.Hist('HiggsGGH125').Add(
            TightIsoOS.Hist('HiggsVH125'))  ## adding SM Higgs as a bkg
        noscomp.update({'HiggsGGH125': 'ggH_SM125+qqH_SM125+VH_SM125'})
        sigcomp = {
            'HiggsSUSYBB' + str(mass): 'bbH' + str(mass),
            'HiggsSUSYGluGlu' + str(mass): 'ggH' + str(mass),
        }
    else:
        sigcomp = {
            'HiggsGGH' + str(mass): 'ggH' + str(mass),
            'HiggsVBF' + str(mass): 'qqH' + str(mass),
            'HiggsVH' + str(mass): 'VH' + str(mass),
        }

    allcomp = {}

    if fine_binning:
        fbnoscomp = {}
        fbsigcomp = {}
        for k in noscomp.keys():
            fbnoscomp.update({k: noscomp[k] + '_fine_binning'})
        for k in sigcomp.keys():
            fbsigcomp.update({k: sigcomp[k] + '_fine_binning'})
        allcomp.update(fbnoscomp)
        allcomp.update(fbsigcomp)
    else:
        allcomp.update(noscomp)
        allcomp.update(sigcomp)

    fileName = '/'.join([
        os.getcwd(), prefixLabel, prefixLabel + '_tauTau_' + category + '_' +
        TightIsoOS.varName + '.root'
    ])

    if TightIsoOS.varName == massType:

        if not os.path.isfile(fileName):
            rootfile = TFile(fileName, 'recreate')
            channel = rootfile.mkdir('tauTau_' + category)
            for comp in allcomp.keys():
                TightIsoOS.Hist(comp).weighted.SetName(allcomp[comp])
                channel.Add(TightIsoOS.Hist(comp).weighted)
            channel.Write()

        else:
            rootfile = TFile(fileName, 'update')
            rootfile.cd('tauTau_' + category)

            alreadyIn = []
            dirList = gDirectory.GetListOfKeys()
            for k2 in dirList:
                h2 = k2.ReadObj()
                alreadyIn.append(h2.GetName())

            for comp in allcomp.keys():
                if comp in alreadyIn: pass
                TightIsoOS.Hist(comp).weighted.SetName(allcomp[comp])
                TightIsoOS.Hist(comp).weighted.Write()
            gDirectory.cd('..')

    rootfile.Close()
        continue
    testFile = TFile(fileName)
    if not (testFile.IsZombie()):
        processed_datasets.append(dataset)

#### exit if no datasets found
if len(processed_datasets) is 0:
    print datasets
    sys.exit("Can't find any output root files for the given list of datasets")

#open the first ROOT file and get the list of channels
channels = []
dataset_file = "%s/%s.root" % (condor_dir, processed_datasets[0])
inputFile = TFile(dataset_file)

for key in gDirectory.GetListOfKeys():
    if (key.GetClassName() != "TDirectoryFile"):
        continue
    if "CutFlowPlotter" not in key.GetName():
        continue
    channels.append(key.GetName())

#get and store the yields and errors for each dataset
yields = {}
stat_errors = {}
sys_errors = {}
bgMCSum = {}
bgMCStatErrSquared = {}
bgMCSysErrSquared = {}
processed_datasets_channels = {}
def saveForLimit(TightIsoOS, prefixLabel, mass, massType, fine_binning, category, susy):

    #TightIsoOS.Hist('WW').Add(TightIsoOS.Hist('ZZ'))
    #TightIsoOS.Hist('WW').Add(TightIsoOS.Hist('WZ'))

    TightIsoOS.Hist('WWJetsTo2L2Nu').Add(TightIsoOS.Hist('WZJetsTo2L2Q'))
    TightIsoOS.Hist('WWJetsTo2L2Nu').Add(TightIsoOS.Hist('WZJetsTo3LNu'))
    TightIsoOS.Hist('WWJetsTo2L2Nu').Add(TightIsoOS.Hist('ZZJetsTo4L'))
    TightIsoOS.Hist('WWJetsTo2L2Nu').Add(TightIsoOS.Hist('ZZJetsTo2L2Nu'))
    TightIsoOS.Hist('WWJetsTo2L2Nu').Add(TightIsoOS.Hist('ZZJetsTo2L2Q'))

    ## we add single top to dibosons
    TightIsoOS.Hist('WWJetsTo2L2Nu').Add(TightIsoOS.Hist('T_tW'))
    TightIsoOS.Hist('WWJetsTo2L2Nu').Add(TightIsoOS.Hist('Tbar_tW'))
    #TightIsoOS.Hist('T_tW').Add(TightIsoOS.Hist('Tbar_tW'))

    TightIsoOS.Hist('TTJetsFullLept').Add(TightIsoOS.Hist('TTJetsSemiLept'))
    TightIsoOS.Hist('TTJetsFullLept').Add(TightIsoOS.Hist('TTJetsHadronic'))
    
    if susy :
      TightIsoOS.Hist('DYJets').Add(TightIsoOS.Hist('TTJets_emb'))

    noscomp = {
        'DYJets'          :'ZTT',
        'DYJets_ZL'       :'ZL',
        'DYJets_ZJ'       :'ZJ',
        'WJets'           :'W',
        #'TTJets'        :'TT',
        'TTJetsFullLept'  :'TT',
        #'T_tW'          :'T',
        #'WW'            :'VV',
        'WWJetsTo2L2Nu'   :'VV',
        'QCDdata'         :'QCD',
        'Data'            :'data_obs'        
        #'HiggsGGHtoWW125' :'ggH_hww_SM125',
        #'HiggsVBFtoWW125' :'qqH_hww_SM125',
        #'HiggsVHtoWW125'  :'VH_hww_SM125' ,
        }        

    if susy :
      sigcomp = {
          'HiggsSUSYBB'    +str(mass):'bbH'+str(mass),
          'HiggsSUSYGluGlu'+str(mass):'ggH'+str(mass),
          }
      noscomp.update({'HiggsGGH125':'ggH_SM125'})
      noscomp.update({'HiggsVBF125':'qqH_SM125'})
      noscomp.update({'HiggsVH125' :'VH_SM125' })
    else :
      sigcomp = {
          'HiggsGGH'+str(mass):'ggH'+str(mass),
          'HiggsVBF'+str(mass):'qqH'+str(mass),
          'HiggsVH' +str(mass):'VH' +str(mass),        
          'HiggsGGH'+str(mass)+'_pthUp'  :'ggH'+str(mass)+'_QCDscale_ggH1inUp'  ,
          #'HiggsGGH'+str(mass)+'_pthNom' :'ggH'+str(mass)+'_QCDscale_ggH1inNom' ,
          'HiggsGGH'+str(mass)+'_pthDown':'ggH'+str(mass)+'_QCDscale_ggH1inDown',
          }

    allcomp = {}

    if fine_binning :
      fbnoscomp = {}
      fbsigcomp = {}
      for k in noscomp.keys() :
        fbnoscomp.update({k:noscomp[k]+'_fine_binning'})   
      for k in sigcomp.keys() :
        fbsigcomp.update({k:sigcomp[k]+'_fine_binning'})   
      allcomp.update(fbnoscomp)
      allcomp.update(fbsigcomp)
    else :
      allcomp.update(noscomp)
      allcomp.update(sigcomp)
             
    fileName = '/'.join([os.getcwd(),prefixLabel,prefixLabel+'_tauTau_'+category+'_'+TightIsoOS.varName+'.root'])

    if TightIsoOS.varName == massType :
     
      if not os.path.isfile(fileName) :
        rootfile = TFile(fileName,'recreate')
        channel  = rootfile.mkdir('tauTau_'+category)
        print TightIsoOS.histos
        for comp in allcomp.keys() :
          TightIsoOS.Hist(comp).weighted.SetName(allcomp[comp])
          channel.Add(TightIsoOS.Hist(comp).weighted)
        channel.Write()
       
      else : 
        rootfile = TFile(fileName,'update')
        rootfile.cd('tauTau_'+category)
        
        alreadyIn = []
        dirList = gDirectory.GetListOfKeys()
        for k2 in dirList:
          h2 = k2.ReadObj()
          alreadyIn.append(h2.GetName())

        for comp in allcomp.keys() :
          if allcomp[comp] in alreadyIn : pass
          TightIsoOS.Hist(comp).weighted.SetName(allcomp[comp])
          TightIsoOS.Hist(comp).weighted.Write()
                
        gDirectory.cd('..')    

    rootfile.Close()