Пример #1
0
 def __init__(self,
              name,
              projection,
              nx,
              xmin,
              xmax,
              ny,
              ymin,
              ymax,
              side=0):
     '''arguments
     @name  name = title name for the pane
     @param projection = name of the projection (string)
                         eg one of 'xy', 'yz', 'xz' ,'ECAL_thetaphi', 'HCAL_thetaphi'
     @param nx: points on x axis
     @param xmin: lower left corner x for ROOT hists
     @param xmax: upper right corner x for ROOT hists
     @param nx: points on x axis
     @param ymin: lower left corner y for ROOT hists
     @param ymax: upper right corner y for ROOT hists
     @param side: documents which subscreen we are in 0 = main or left, 1 = right
     '''
     self.side = side
     self.projection = projection
     TH1.AddDirectory(False)
     self.hist = TH2F(name, name, nx, xmin, xmax, ny, ymin, ymax)
     TH1.AddDirectory(True)
     self.hist.Draw()
     self.hist.SetStats(False)
     self.registered = dict()
     self.locked = dict()
Пример #2
0
 def __init__(self, name, projection, nx, xmin, xmax, ny, ymin, ymax,
              dx=600, dy=600):
     self.projection = projection
     tx = 50 + self.__class__.nviews * (dx+10) 
     ty = 50
     self.canvas = TCanvas(name, name, tx, ty, dx, dy)
     TH1.AddDirectory(False)
     self.hist = TH2F(name, name, nx, xmin, xmax, ny, ymin, ymax)
     TH1.AddDirectory(True)
     self.hist.Draw()
     self.hist.SetStats(False)
     self.registered = dict()
     self.locked = dict()
     self.__class__.nviews += 1 
Пример #3
0
def scale_factors(forward, central, output):
    TH1.AddDirectory(ROOT.kFALSE);
    #rootfile = TFile(ROOTFile,"READ")
    #forward = rootfile.Get(forward_name).Clone()
    #central = rootfile.Get(central_name).Clone()

    #print "loaded", forward_name, "and", central_name
    
    central_norm = central.GetSumOfWeights()
    forward_norm = forward.GetSumOfWeights()

   
    #for i in xrange(1,forward.GetNcells()-1):
    #    forward.SetBinError(i,forward.GetBinError(i)/forward.GetBinContent(i))
    #    central.SetBinError(i,central.GetBinError(i)/central.GetBinContent(i))

    
    #central.Scale(1/central_norm)
    #forward.Scale(1/forward_norm)
    
    #for i in xrange(1,forward.GetNcells()-1):
    #    print 'bin',i,'content',central.GetBinContent(i),'\pm',central.GetBinError(i)
    #central = central.Rebin(4)
    #forward = forward.Rebin(4)
    can = ROOT.TCanvas()
    
    #fit_scale = get_scale(central, forward, central_norm/forward_norm,700)
    #forward.Scale(fit_scale)
    

    correction_hist = forward.Clone('corrections')
    for mc_bin in xrange(1,forward.GetNcells()-1):
        a = forward.GetBinContent(mc_bin)
        b = central.GetBinContent(mc_bin)
	if b ==0. or a==0.:
            correction_hist.SetBinContent(mc_bin,-1)
            correction_hist.SetBinError(mc_bin,0)
            continue
        b_err = central.GetBinError(mc_bin)
        a_err = forward.GetBinError(mc_bin)
        factor = a/b
        error = ((1/b*a_err)**2 + (a/(b*b) * b_err)**2)
        # print 'b',b,'+-',b_err
        # print 'a',a,'+-',a_err
        # print 'bin',mc_bin,'factor',factor,'\pm',math.sqrt(error)
        
        correction_hist.SetBinContent(mc_bin,factor)
        correction_hist.SetBinError(mc_bin,math.sqrt(error))
        
    correction_hist.Scale((correction_hist.GetNcells()-2)/correction_hist.GetSumOfWeights())

        
    correction_hist.SetAxisRange(0.5, 4,"Y");
    correction_hist.Draw();
    #can.SaveAs(output)
    return correction_hist
Пример #4
0
def ReadHistogram(fitdir, region, process):
    #hist_ = []
    ## construct histogram name
    histname = fitdir + '/' + region + '/' + process
    TH1.AddDirectory(0)

    ## get histo from the rootfile
    hist_ = fin.Get(histname)

    TH1.AddDirectory(0)

    ## This will take into account of underfow and overflow
    ## "width" will multiply the bin-content with bin-width. This is to get the correct integral for bin-width normalised histogram.
    integral_ = 0.0
    if type(hist_) is TObject:
        integral_ = 0.0
    elif type(hist_) is TH1F:
        integral_ = hist_.Integral(0, -1, "width")
    print 'histname = ', histname, 'integral =', integral_

    return integral_
Пример #5
0
def main(argv=None):
    if argv is None:
        argv = sys.argv[1:]

    args = arg_parser().parse_args(argv)

    from ROOT import TChain, gROOT, gSystem, TH1

    gSystem.SetBuildDir(".rootbuild")
    gROOT.SetBatch(True)
    TH1.AddDirectory(False)

    chain = TChain("particles")
    for fname in args.file:
        chain.Add(fname)

    load_ok = gROOT.LoadMacro("RunFemto.C+")
    assert load_ok >= 0, load_ok

    from ROOT import RunFemto
    RunFemto(chain, args.output, args.limit)
Пример #6
0
__version__ = "$Id"

try:
    import numpy as np

    import ROOT
    from ROOT import gDirectory, TChain, TCanvas, TFile, TProfile, TNtuple
    from ROOT import TH1, TH1D, TH2F, TF1, TH1F, TGraph, TGraphErrors, TLine
    from ROOT import gROOT, gStyle, gBenchmark, gRandom, gSystem, gDirectory
    from ROOT import gPad, TText, TLatex, TMarker, TColor, TNamed
except ImportError as e:
    print e
    raise ImportError

ROOT.gROOT.SetBatch(True)
TH1.AddDirectory(False)


def objExists(infile, objname, subdir='', verbose=False):
    """
    Test if object exists in input file
    objExists(input file, object name, sub-directory='', verbose=False)
    """
    # Try to Open File
    fin = ROOT.TFile(infile, "READ")
    file_open = fin.IsOpen()
    err_mess = "\n***Can not open ROOT file %s***\n" % (infile)
    assert (file_open), err_mess

    if (verbose):
        print "\n============= Opening %s\n" % infile
def map_projection(sample_histo_filename,
                   params,
                   open_file=None,
                   open_tree=None):
    """
    Map histogram projection to a root file

    :param sample_histo_filename:   (str) e.g. ``'mysample myhisto /nfs/path/to/file.root'``
    :param params:                  dictionary with parameters (see below)
    :param open_file:               open TFile instance (can be None)
    :param open_tree:               TTree instance to be used (can be None)

    The param dict must have these contents:

    ======================= ================================================================
    histos                  dict of histoname -> tuple(title, n_bins, low bound, high bound)
                            IMPORTANT: the name of the histogram is also the plotted quantity
                            If another quantity should be plotted, it can be passed as the first
                            item in the tuple: tuple(quantity, title, n_bins, low bound, high bound)
    treename                name of the TTree in the ROOT File (not needed when open_tree is given)
    selection (optional)    selection string for TTree.Draw
    nm1 (optional)          create N-1 plots (not placing a selection on the plotted variable)
    weight (optional)       used in selection string for TTree.Draw
    aliases (optional)      dict alias -> function to be used with TTree.SetAlias
    ======================= ================================================================
    """
    from ROOT import TFile, TH1, TH1F, TTree

    sample, histoname, filename = sample_histo_filename.split()
    histoargs = params['histos'][histoname]
    selection = params.get('selection')
    if len(histoargs) == 5:
        quantity, histoargs = histoargs[0], histoargs[1:]
    else:
        quantity = histoname

    if any(isinstance(selection, t) for t in (list, tuple)):
        if params.get('nm1', True):
            # N-1 instruction: don't cut the plotted variable
            selection = list(s for s in selection if quantity not in s)
        selection = ' && '.join(selection)

    selection = '%s*(%s)' % (params.get('weight') or '1', selection or '1')
    histo_draw_cmd = '%s>>+%s' % (quantity, 'new_histo')
    input_file = open_tree or open_file or TFile(filename)

    try:
        if input_file.IsZombie():
            raise RuntimeError('input_file.IsZombie(): %s' % input_file)

        TH1.AddDirectory(True)
        histo = TH1F('new_histo', *histoargs)

        tree = open_tree or input_file.Get(params['treename'])
        if not isinstance(tree, TTree):
            raise RuntimeError(
                'There seems to be no tree named "%s" in file "%s"' %
                (params['treename'], input_file))

        for alias, fcn in params.get('aliases', {}).iteritems():
            if not tree.SetAlias(alias, fcn):
                raise RuntimeError(
                    'Error in TTree::SetAlias: it did not understand %s.' %
                    alias)

        n_selected = tree.Draw(histo_draw_cmd, selection, 'goff')
        if n_selected < 0:
            raise RuntimeError(
                'Error in TTree::Project. Are variables, selections and '
                'weights are properly defined? cmd, selection: %s, %s' %
                (histo_draw_cmd, selection))

        histo.SetDirectory(0)
        histo.SetName(histoname)

    finally:
        TH1.AddDirectory(False)
        if not (open_file or open_tree):
            input_file.Close()

    yield sample + ' ' + histoname, histo
Пример #8
0
import ROOT
ROOT.gROOT.SetStyle("Plain")
ROOT.gStyle.SetOptStat(000000000)
ROOT.gStyle.SetOptTitle(0)

from ROOT import TFile, TH1, TObject, TF1, TLegend,TH1F

import sys
sys.path.insert(0, '../limits/')

from injection_merge import *
from signal_background_uncertainty import *
TH1.AddDirectory(ROOT.kFALSE);

import scipy.stats as stats


def scale_factors(forward, central, output):
    TH1.AddDirectory(ROOT.kFALSE);
    #rootfile = TFile(ROOTFile,"READ")
    #forward = rootfile.Get(forward_name).Clone()
    #central = rootfile.Get(central_name).Clone()

    #print "loaded", forward_name, "and", central_name
    
    central_norm = central.GetSumOfWeights()
    forward_norm = forward.GetSumOfWeights()

   
    #for i in xrange(1,forward.GetNcells()-1):
    #    forward.SetBinError(i,forward.GetBinError(i)/forward.GetBinContent(i))
Пример #9
0
def create_merged_hists(input_fname, output_fname, postfix, region=['', '']):
    TH1.AddDirectory(ROOT.kFALSE)
    print '=' * 10
    print 'doing the merging'
    print 'input file', input_fname
    print 'output file', output_fname
    in_file = TFile(input_fname)
    keys = in_file.GetListOfKeys()
    hist_names = []
    histos = []
    cloned = []

    for key in keys:
        key = str(key.GetName())
        #print key
        category = key.split("__")[0]
        systematic = False
        if len(key.split("__")) > 3: continue  # and not 'scaleWeight' in key:
        #continue
        #if 'scaleWeight' in key:
        #    systematic = True
        new_name = category + '__' + postfix
        #if systematic:
        #        new_name +='__scaleWeight__'
        #        if 'plus' in key: new_name+='plus'
        #        else: new_name+='minus'
        if region[0]:
            new_name = new_name.replace(region[0], region[1])
        if not new_name in hist_names:
            hist_names.append(new_name)
            histos.append(in_file.Get(key).Clone(new_name))
            histos[-1].Sumw2()
            cloned.append(key)

    #print hist_names
    #print cloned

    for key in keys:
        key = str(key.GetName())
        splitkey = key.split('__')
        category = splitkey[0]
        sys = ''
        if len(splitkey) > 2: sys = splitkey[2] + '__' + splitkey[3]
        for i, name in enumerate(hist_names):
            if category in name and sys in name:
                if key not in cloned:
                    #print 'merging histogram',key,'to',histos[i].GetName()#,cloned[i]
                    hist = in_file.Get(key).Clone(category + '__' + postfix)
                    if type(hist) is not type(histos[i]):
                        print 'can not merge', key, '. it is not of the same type'
                    histos[i].Add(hist)
                    break
                else:
                    break

    in_file.Close()
    out_file = TFile(output_fname, "RECREATE")
    out_file.cd()
    for item in histos:
        item.Write()
    out_file.Close()
    del hist_names
    del histos
    del cloned

    print 'done with merging'
    print '=' * 10
    return output_fname
Пример #10
0
def add_signal_background_uncer(background,
                                signal_region_mc,
                                background_region_mc,
                                output_fname,
                                store_other_histos=False,
                                debug=None):
    TH1.AddDirectory(ROOT.kFALSE)
    print '=' * 10
    print 'Adding the uncertainty of the signal to background region'
    print 'background', background
    print 'signal region', signal_region_mc, 'background region', background_region_mc
    print 'output file', output_fname

    signal_mc_file = TFile(signal_region_mc)
    background_mc_file = TFile(background_region_mc)
    back = TFile(background)

    keys = back.GetListOfKeys()

    hist_names = []
    histos = []
    cloned = []
    other_histos = []
    debug_histos = []

    for key in keys:
        key = str(key.GetName())
        if not 'Background' in key:
            if (store_other_histos): other_histos.append(back.Get(key).Clone())
            continue
        print 'going to use', key, 'and', key.replace(
            'Background', 'DATA'), 'to create signal/background mc uncertainty'
        histos.append(back.Get(key).Clone())
        diff_hist = signal_mc_file.Get(key.replace('Background',
                                                   'DATA')).Clone(key + "diff")
        sig_norm = diff_hist.GetSumOfWeights()
        diff_hist.Scale(1 / sig_norm)  #Integral())
        back_mc_h = background_mc_file.Get(key).Clone()
        back_norm = back_mc_h.GetSumOfWeights()
        back_mc_h.Scale(1 / back_norm)  #Integral())
        #diff_hist.Divide(back_mc_h)
        diff_hist.Add(back_mc_h, -1)

        #fit_function = TF1("linear","1+[0]*x")
        #diff_hist.Fit(fit_function)
        #print fit_function.GetParameter(0)

        plus = histos[-1].Clone()  #back.Get(key).Clone(key+"__mcregion__plus")
        minus = histos[-1].Clone(
        )  #back.Get(key).Clone(key+"__mcregion__minus")

        binsum = 0
        for i in xrange(1, histos[-1].GetNcells() - 1):
            #factor = 1.0/fit_function.Eval(histos[-1].GetXaxis().GetBinCenter(i))
            #histos[-1].SetBinContent(i,histos[-1].GetBinContent(i)*factor)
            #if factor > 1: plus.SetBinContent(i,plus.GetBinContent(i)*factor*factor)
            #else:  minus.SetBinContent(i,minus.GetBinContent(i)*factor*factor)
            binsum += diff_hist.GetBinContent(i)
            #diff_hist.SetBinContent(i, math.fabs(diff_hist.GetBinContent(i)))
            #diff_hist.SetBinContent(i, diff_hist.GetBinContent(i)*-1)

        diff_hist.Scale(histos[-1].GetSumOfWeights())

        if binsum >= 0:
            plus.Add(diff_hist)
            minus.Add(diff_hist, -1)
        else:
            plus.Add(diff_hist, -1)
            minus.Add(diff_hist)

        histos.append(plus)
        histos.append(minus)

        continue

        #diff_hist.Add(back_mc_h,-1)
        print 'entries background', back.Get(
            key).GetSumOfWeights(), 'signal mc', signal_mc_file.Get(
                key.replace('Background', 'DATA')).GetSumOfWeights(
                ), 'back mc', background_mc_file.Get(key).GetSumOfWeights()
        if debug:
            debug_histos.append(diff_hist.Clone())
            debug_histos.append(back_mc_h.Clone())
            #debug_histos[-1].Scale(back_norm )
            #debug_histos[-2].Scale(back_norm )
            debug_histos.append(
                signal_mc_file.Get(key.replace('Background', 'DATA')).Clone())
            debug_histos[-1].Scale(1 / sig_norm)  # back_norm/sig_norm)
            #debug_histos[-1].Chi2Test(debug_histos[-2],"P")

#print diff_hist.GetBinContent(i)
        diff_hist.Scale(histos[-1].GetSumOfWeights())  #Integral())

        #histos[-1].Add(diff_hist)
        plus = histos[-1].Clone()  #back.Get(key).Clone(key+"__mcregion__plus")
        minus = histos[-1].Clone(
        )  #back.Get(key).Clone(key+"__mcregion__minus")

        #plus.Add(diff_hist)
        #minus.Add(diff_hist,-1)

        binsum = 0.0
        for i in xrange(1, diff_hist.GetNcells() - 1):
            binsum += diff_hist.GetBinContent(i)

        if binsum >= 0:
            plus.Add(diff_hist, 2)
            minus.Add(diff_hist, -2)
        else:
            minus.Add(diff_hist, 2)
            plus.Add(diff_hist, -2)
        """
        binsum = 0.0;
        for i in xrange(1,diff_hist.GetNcells()-1):
	    binsum +=diff_hist.GetBinContent(i)
            #diff_hist.SetBinContent(i, math.fabs(diff_hist.GetBinContent(i)))
        """
        #print binsum
        #if binsum>=0:

        #diff_hist.Scale(0.0001)
        #minus.Add(diff_hist,-1)
        #else:
        #   plus.Add(diff_hist,-1)
        #   minus.Add(diff_hist)

        #histos.append(diff_hist)
        histos.append(plus)
        histos.append(minus)
        if debug:
            debug_histos.append(plus.Clone())
            debug_histos.append(minus.Clone())

    out_file = TFile(output_fname, "RECREATE")
    out_file.cd()
    for item in histos:
        item.Write()

    if (store_other_histos):
        for item in other_histos:
            item.Write()
    out_file.Close()
    signal_mc_file.Close()
    background_mc_file.Close()
    back.Close()
    debug_file = None
    if debug:
        debug_file = TFile(debug, 'recreate')
        debug_file.cd()
        for item in debug_histos:
            item.Write()

    print 'done with uncertainty'
    print '=' * 10
Пример #11
0
def map_projection(key_histo_filename, params, open_file=None, open_tree=None):
    """
    Map histogram projection to a root file

    :param key_histo_filename:      (str) e.g. ``'mysample myhisto /nfs/path/to/file.root'``
    :param params:                  dictionary with parameters (see below)
    :param open_file:               open TFile instance (can be None)
    :param open_tree:               TTree instance to be used (can be None)

    The param dict must have these contents:

    ======================= ================================================================
    histos                  dict of histoname -> tuple(title,n_bins,low bound,high bound)
                            IMPORTANT: the name of the histogram is also the plotted quantity
                            If another quantity should be plotted, it can be passed as the first
                            item in the tuple: tuple(quantity,title,n_bins,low bound,high bound).
                            If a six-tuple is provided, the first item is interpreted as a special
                            weight-expression for this histogram: tuple(weight,quantity,title,...).
    treename                name of the TTree in the ROOT File (not needed when open_tree is given)
    selection (optional)    selection string for TTree.Draw
    nm1 (optional)          create N-1 plots (not placing a selection on the plotted variable)
    weight (optional)       used in selection string for TTree.Draw
    aliases (optional)      dict alias -> function to be used with TTree.SetAlias
    ======================= ================================================================
    """
    from ROOT import TFile, TH1, TH1F, TTree

    key, histoname, filename = key_histo_filename.split()
    histoargs = params['histos'][histoname]
    if len(histoargs) == 6:
        weight, quantity, histoargs = histoargs[0], histoargs[1], histoargs[2:]
    elif len(histoargs) == 5:
        weight, quantity, histoargs = '', histoargs[0], histoargs[1:]
    else:
        weight, quantity = '', histoname

    histo_draw_cmd = '%s>>+%s' % (quantity, 'new_histo')
    input_file = open_tree or open_file or TFile(filename)
    selection = _prepare_selection(params, quantity, weight)

    try:
        if input_file.IsZombie():
            raise RuntimeError('input_file.IsZombie(): %s' % input_file)

        TH1.AddDirectory(True)
        histo_factory = params.get('histo_factory', TH1F)
        histo = histo_factory(histoname, *histoargs)
        histo.SetName('new_histo')

        tree = open_tree or input_file.Get(params['treename'])
        if not isinstance(tree, TTree):
            raise RuntimeError(
                'There seems to be no tree named "%s" in file "%s"' %
                (params['treename'], input_file))

        for alias, fcn in params.get('aliases', {}).iteritems():
            if not tree.SetAlias(alias, fcn):
                raise RuntimeError(
                    'Error in TTree::SetAlias: it did not understand %s.' %
                    alias)

        tree_prep = params.get('tree_prep')
        if tree_prep:
            tree = tree_prep(tree) or tree

        n_selected = tree.Draw(histo_draw_cmd, selection, 'goff')
        if n_selected < 0:
            raise RuntimeError(
                'Error in TTree::Project. Are variables, selections and '
                'weights are properly defined? cmd, selection: %s, %s' %
                (histo_draw_cmd, selection))

        histo.SetDirectory(0)
        histo.SetName(histoname)

    finally:
        TH1.AddDirectory(False)
        if not (open_file or open_tree):
            input_file.Close()

    yield key + ' ' + histoname, histo
Пример #12
0
def merge_root_file(target, source_list):
    """
    Merge next file from the source list with the target file.
    Function called recursively for each element of the list.

    :param TFile target: the result ROOT file
    :param TList source_list: list of input files to merge
    """
    logger = get_logger()
    raw_path = target.GetPath()
    path = raw_path[raw_path.find(":") + 1:]

    first_source = source_list.First()
    first_source.cd(path)
    current_source_dir = gDirectory
    # gain time, do not add the objects in the list in memory
    status = TH1.AddDirectoryStatus()
    TH1.AddDirectory(False)

    # loop over all keys in this directory
    #global_chain = TChain()
    next_key = TIter(current_source_dir.GetListOfKeys())
    #key = TKey()
    #TKey old_key = None
    key = next_key()
    while key:
        # keep only the highest cycle number for each key
        #if old_key and not old_key.GetName() == key.GetName():
        #    continue
        # read object from first source file
        first_source.cd(path)
        obj = key.ReadObj()

        if obj.IsA().InheritsFrom(TH1.Class()):
            # descendant of TH1 -> merge it
            logger.info("Merging histogram %s", obj.GetName())
            h1 = TH1(obj)

            # loop over all source files and add the content of the
            # correspondant histogram to the one pointed to by "h1"
            next_source = source_list.After(first_source)
            while next_source:
                # make sure we are at the correct directory level by cd'ing to path
                next_source.cd(path)
                key2 = gDirectory.GetListOfKeys().FindObject(h1.GetName())
                if key2:
                    h2 = TH1(key2.ReadObj())
                    h1.Add(h2)
                    #del h2
                next_source = source_list.After(next_source)

        elif obj.IsA().InheritsFrom(TTree.Class()):
            logger.info("Merging tree %s", obj.GetName())
            # loop over all source files and create a chain of Trees "global_chain"
            obj_name = obj.GetName()
            global_chain = TChain(obj_name)
            global_chain.Add(first_source.GetName())
            next_source = source_list.After(first_source)
            while next_source:
                global_chain.Add(next_source.GetName())
                next_source = source_list.After(next_source)

        elif obj.IsA().InheritsFrom(TDirectory.Class()):
            logger.info("Found subdirectory %s", obj.GetName())
            # create a new subdir of same name and title in the target file
            target.cd()
            new_dir = target.mkdir(obj.GetName(), obj.GetTitle())
            # newdir is now the starting point of another round of merging
            # newdir still knows its depth within the target file via
            # GetPath(), so we can still figure out where we are in the recursion
            merge_root_file(new_dir, source_list)

        else:
            logger.info("Unknown object type, name: %s, title: %s",
                        obj.GetName(), obj.GetTitle())

        # now write the merged histogram (which is "in" obj) to the target file
        # note that this will just store obj in the current directory level,
        # which is not persistent until the complete directory itself is stored
        # by "target.Write()" below
        if obj is not None:
            target.cd()
            # if the object is a tree, it is stored in global_chain...
            if obj.IsA().InheritsFrom(TTree.Class()):
                global_chain.Merge(target.GetFile(), 0, "keep")
            else:
                obj.Write(key.GetName())

        # move to the next element
        key = next_key()

    # save modifications to target file
    target.SaveSelf(True)
    TH1.AddDirectory(status)
    target.Write()
Пример #13
0
def loadHistos(inputdata, inputMC, region, weights, weights2, trackType,
               mcIsData, dataIsMC):
    _fileDATA = []
    if not dataIsMC:
        _fileDATA.append(TFile(inputdata))
    else:
        for data in inputdata:
            _fileDATA.append(TFile(data))
    _fileMC = []
    if mcIsData:
        _fileMC.append(TFile(inputMC))
    else:
        for mc in inputMC:
            _fileMC.append(TFile(mc))

    hdata = TH2F()
    hmc = TH2F()

    hdata.SetDirectory(0)
    hmc.SetDirectory(0)
    TH1.AddDirectory(kFALSE)

    reg = ""
    if ("BB" in region or "barrel" in region): reg = "_BB"
    elif ("BE" in region or "endcap" in region): reg = "_BE"

    if not dataIsMC:
        hdata = _fileDATA[0].Get(
            "Our2017MuonsPlusMuonsMinus%sResolution/DileptonMass_2d_vsPt%s" %
            (trackType, reg)).Clone()
    else:
        for k, data in enumerate(inputdata):
            tmp = _fileDATA[k].Get(
                "Our2017MuonsPlusMuonsMinus%sResolution/DileptonMass_2d_vsPt%s"
                % (trackType, reg)).Clone()
            if k == 0 and not weights2:
                hdata = tmp
            elif k == 0 and weights2:
                nEvents = _fileDATA[k].Get(
                    "EventCounter/Events").GetBinContent(1)
                print("Weighting with %s " % (40000 * weights2[k] / nEvents))
                tmp.Scale(40000 * weights2[k] / nEvents)
                hdata = tmp
            elif not weights2:
                hdata.Add(tmp)
            else:
                nEvents = _fileDATA[k].Get(
                    "EventCounter/Events").GetBinContent(1)
                print("Weighting with %s " % (40000 * weights2[k] / nEvents))
                tmp.Scale(40000 * weights2[k] / nEvents)
                hdata.Add(tmp)

    if mcIsData:
        hmc = _fileMC[0].Get(
            "Our2017MuonsPlusMuonsMinus%sResolution/DileptonMass_2d_vsPt%s" %
            (trackType, reg)).Clone()
    else:
        for k, mc in enumerate(inputMC):
            #print mc
            tmp = _fileMC[k].Get(
                "Our2017MuonsPlusMuonsMinus%sResolution/DileptonMass_2d_vsPt%s"
                % (trackType, reg)).Clone()

            if k == 0 and not weights:
                hmc = tmp
            elif k == 0 and weights:
                nEvents = _fileMC[k].Get("EventCounter/Events").GetBinContent(
                    1)
                print("Weighting with %s " % (40000 * weights[k] / nEvents))
                tmp.Scale(40000 * weights[k] / nEvents)
                hmc = tmp
            elif not weights:
                hmc.Add(tmp)
            else:
                nEvents = _fileMC[k].Get("EventCounter/Events").GetBinContent(
                    1)
                print("Weighting with %s " % (40000 * weights[k] / nEvents))
                tmp.Scale(40000 * weights[k] / nEvents)
                hmc.Add(tmp)

    for f in _fileDATA:
        f.Close()
    for f in _fileMC:
        f.Close()

    if "BB" in reg:
        data = [TH1D() for x in range(len(ptbins) - 1)]
        mc = [TH1D() for x in range(len(ptbins) - 1)]
        ptda = [0 for x in range(len(ptbins) - 1)]
        ptmc = [0 for x in range(len(ptbins) - 1)]
    else:
        data = [TH1D() for x in range(len(ptbins) - 2)]
        mc = [TH1D() for x in range(len(ptbins) - 2)]
        ptda = [0 for x in range(len(ptbins) - 2)]
        ptmc = [0 for x in range(len(ptbins) - 2)]

    for h in data:
        h.SetDirectory(0)
        TH1.AddDirectory(kFALSE)
    for h in mc:
        h.SetDirectory(0)
        TH1.AddDirectory(kFALSE)

    for i, h in enumerate(data):
        ymin, ymax = getBinRange(hdata, ptbins[i], ptbins[i + 1], reg)
        hdata.GetYaxis().SetRangeUser(ptbins[i], ptbins[i + 1])
        hmc.GetYaxis().SetRangeUser(ptbins[i], ptbins[i + 1])
        ptda[i] = hdata.GetMean(2)
        ptmc[i] = hmc.GetMean(2)
        hdata.GetYaxis().SetRange()
        hmc.GetYaxis().SetRange()

        data[i] = hdata.ProjectionX("datapy%s%s" % (ptbins[i], region), ymin,
                                    ymax)
        mc[i] = hmc.ProjectionX("mcpy%s%s" % (ptbins[i], region), ymin, ymax)

        #~ data[i].Rebin(1)
        #~ mc  [i].Rebin(1)

        if (data[i].Integral() < 1500):
            data[i].Rebin(6)
        elif (data[i].Integral() < 2500):
            data[i].Rebin(4)
        else:
            data[i].Rebin(rebinFactor)

        if (mc[i].Integral() < 1500):
            mc[i].Rebin(6)
        elif (mc[i].Integral() < 2500):
            mc[i].Rebin(4)
        else:
            mc[i].Rebin(rebinFactor)

        #~ if mcIsData:
        #~ mc[i].Rebin(2)
        #~ else:
        #~ mc[i].Rebin(2)


#        if (ptbins[i]==200 or ptbins[i]==152) and "BE" in region:
#            mc[i].Rebin(2)

    return data, mc, ptda, ptmc
Пример #14
0
def AddFFcorr(infname,
              intreename,
              outfname,
              outtreename,
              Lcstate,
              leptname,
              q2True_branchname,
              costhlTrue_branchname,
              nentries_to_read=1000000000,
              chunksize=10000):

    TH1.AddDirectory(kFALSE)

    perfname = None
    q2factor = None
    if Lcstate == 'Lc':
        perfname = './CorrectionTables/LcFFratios.root'
        q2factor = 1.
    elif (Lcstate == 'Lc2595' or Lcstate == 'Lc2625'):
        perfname = './CorrectionTables/LcstFFratios.root'
        q2factor = 1e-6
    else:
        raise Exception('Lc state not recognised', Lcstate)

    if leptname != 'mu' and leptname != 'tau':
        raise Exception('Lepton name not recognised', leptname)

    print('Using the histname', Lcstate + leptname + "_ratio")

    #variables to get from file
    varsdf = ['runNumber', 'eventNumber']
    varsdf += ['Lb_TRUEP_X', 'Lb_TRUEP_Y', 'Lb_TRUEP_Z', 'Lb_TRUEP_E']
    varsdf += ['Lc_TRUEP_X', 'Lc_TRUEP_Y', 'Lc_TRUEP_Z', 'Lc_TRUEP_E']
    varsdf += [
        'Lb_True' + leptname.capitalize() + '_PX',
        'Lb_True' + leptname.capitalize() + '_PY',
        'Lb_True' + leptname.capitalize() + '_PZ',
        'Lb_True' + leptname.capitalize() + '_PE'
    ]
    varsdf += [
        'Lb_TrueNeutrino_PX', 'Lb_TrueNeutrino_PY', 'Lb_TrueNeutrino_PZ',
        'Lb_TrueNeutrino_PE'
    ]

    File = TFile.Open(perfname, "read")
    Histg = File.Get(Lcstate + leptname + "_ratio")
    perfHist = Histg.Clone(Lcstate + leptname + "_rationew")
    File.Close()
    Xmin = perfHist.GetXaxis().GetXmin()
    Xmax = perfHist.GetXaxis().GetXmax()
    Ymin = perfHist.GetYaxis().GetXmin()
    Ymax = perfHist.GetYaxis().GetXmax()
    Limits = (Xmin, Xmax, Ymin, Ymax)
    print(Limits, perfHist.Integral())

    #variables to store in the new ttree
    varstoStore = {
        'runNumber': np.int,
        'eventNumber': np.int,
        'Event_FFcorr': np.float64,
        costhlTrue_branchname: np.float64,
        q2True_branchname: np.float64
    }

    aliases = {}
    #create a new rootfile
    with uproot3.recreate(outfname) as f:
        f[outtreename] = uproot3.newtree(varstoStore)

        #loop over the old rootfile chunkwise
        events_read = 0
        if chunksize >= nentries_to_read: chunksize = nentries_to_read
        for df_data in uproot4.iterate(infname + ':' + intreename,
                                       varsdf,
                                       aliases=aliases,
                                       cut=None,
                                       library="pd",
                                       step_size=chunksize):
            if events_read >= nentries_to_read: break

            #Compute q2 and cosThetaL
            pxl = df_data['Lb_True' + leptname.capitalize() + '_PX']
            pxnu = df_data['Lb_TrueNeutrino_PX']
            pyl = df_data['Lb_True' + leptname.capitalize() + '_PY']
            pynu = df_data['Lb_TrueNeutrino_PY']
            pzl = df_data['Lb_True' + leptname.capitalize() + '_PZ']
            pznu = df_data['Lb_TrueNeutrino_PZ']
            pel = df_data['Lb_True' + leptname.capitalize() + '_PE']
            penu = df_data['Lb_TrueNeutrino_PE']
            if (Lcstate == 'Lc2595' or Lcstate == 'Lc2625'):
                #this should be Lcstar momentum
                pxlc = df_data['Lb_TRUEP_X'] - pxl - pxnu
                pylc = df_data['Lb_TRUEP_X'] - pyl - pynu
                pzlc = df_data['Lb_TRUEP_X'] - pzl - pznu
                pelc = df_data['Lb_TRUEP_X'] - pel - penu
            elif Lcstate == 'Lc':
                pxlc = df_data['Lc_TRUEP_X']
                pylc = df_data['Lc_TRUEP_Y']
                pzlc = df_data['Lc_TRUEP_Z']
                pelc = df_data['Lc_TRUEP_E']

            PLc_lab = LorentzVector(
                Vector(pxlc, pylc,
                       pzlc), pelc)  #Format of LorentzVector(Vector(X,Y,Z), E)
            Pl_lab = LorentzVector(Vector(pxl, pyl, pzl), pel)
            PNu_lab = LorentzVector(Vector(pxnu, pynu, pznu), penu)
            PLb_lab = PLc_lab + Pl_lab + PNu_lab
            qsq, cthl = return_phasespace(PLb_lab, PLc_lab, Pl_lab)
            #print(qsq,cthl)
            df_data[q2True_branchname] = qsq
            df_data[costhlTrue_branchname] = cthl

            #get the corrections
            applyvars = [q2True_branchname, costhlTrue_branchname
                         ]  #has to be in correct order like in histogram
            df_data['Event_FFcorr'] = df_data[applyvars].apply(
                storeeff2D, args=[perfHist, Limits, q2factor], axis=1)

            #get only the things that need to be stored and write them to the file
            branch_dict = {
                vartostore: df_data[vartostore].to_numpy()
                for vartostore in list(varstoStore.keys())
            }
            f[outtreename].extend(branch_dict)
            events_read += df_data.shape[0]
            print('Events read', events_read)
Пример #15
0
def add_signal_background_uncer(background,
                                signal_region_mc,
                                background_region_mc,
                                output_fname="",
                                store_other_histos=False,
                                debug=None):
    TH1.AddDirectory(ROOT.kFALSE)

    if not output_fname:
        output_fname = background.replace('.root', '_unc.root')
    correction = output_fname.replace('.root', '_correction.root')

    print '=' * 10
    print 'Adding the uncertainty of the signal to background region'
    print 'background', background
    print 'signal region', signal_region_mc, 'background region', background_region_mc
    print 'output file', output_fname
    print 'correction file', correction

    correction_file = TFile(correction, 'RECREATE')

    signal_mc_file = TFile(signal_region_mc)
    background_mc_file = TFile(background_region_mc)
    back = TFile(background)

    keys = back.GetListOfKeys()

    hist_names = []
    histos = []
    cloned = []
    other_histos = []
    debug_histos = []

    scale = False

    for key in keys:
        key = str(key.GetName())
        if len(key.split('__')) > 2: continue
        #print key
        if not 'Background' in key:
            if (store_other_histos): other_histos.append(back.Get(key).Clone())
            continue
        print 'going to use', key, 'and', key.replace(
            'Background', 'DATA'), 'to create signal/background mc uncertainty'

        category = key.split('__')[0]

        data_back = back.Get(key).Clone()
        data_norm = data_back.GetSumOfWeights()

        signal_mc_h = signal_mc_file.Get(key.replace(
            'Background', 'DATA')).Clone("signalregion")
        back_mc_h = background_mc_file.Get(key).Clone("background")

        new_sig_binning = update(signal_mc_h, 0.10)
        print new_sig_binning
        signal_mc_h = signal_mc_h.Rebin(
            len(new_sig_binning) - 1, category + "__SR",
            array.array('d', new_sig_binning))
        back_mc_h = back_mc_h.Rebin(
            len(new_sig_binning) - 1, category + "__CR",
            array.array('d', new_sig_binning))
        new_sig_binning = update(back_mc_h, 0.05)
        signal_mc_h = signal_mc_h.Rebin(
            len(new_sig_binning) - 1, category + "__SR",
            array.array('d', new_sig_binning))
        back_mc_h = back_mc_h.Rebin(
            len(new_sig_binning) - 1, category + "__CR",
            array.array('d', new_sig_binning))
        print len(get_binning(signal_mc_h))
        print get_binning(signal_mc_h)

        sig_norm = signal_mc_h.GetSumOfWeights()
        back_norm = back_mc_h.GetSumOfWeights()

        sig_fit_scale = get_scale(back_mc_h, signal_mc_h, back_norm / sig_norm,
                                  700)
        signal_mc_h.Scale(sig_fit_scale)
        correction_file.cd()

        signal_mc_h.Write()
        back_mc_h.Write()

        correction_hist = signal_mc_h.Clone(category + '__corrections')
        for mc_bin in xrange(1, signal_mc_h.GetNcells() - 1):
            b = back_mc_h.GetBinContent(mc_bin)
            b_err = back_mc_h.GetBinError(mc_bin)
            a = signal_mc_h.GetBinContent(mc_bin)
            a_err = signal_mc_h.GetBinError(mc_bin)
            factor = a / b
            error = ((1 / b * a_err)**2 + (a / (b * b) * b_err)**2)
            correction_hist.SetBinContent(mc_bin, factor)
            correction_hist.SetBinError(mc_bin, math.sqrt(error))

        scaling = (correction_hist.GetNcells() -
                   2) / correction_hist.GetSumOfWeights()
        correction_hist.Scale(scaling)
        correction_hist.Write()
        draw_error_hist(
            correction_hist,
            output_fname.split('/')[0] + '/' + key + '_correction.eps', key)

        name = "__mcR_" + key.split("__")[0]

        name = name.replace('Mu', '')
        name = name.replace('Ele', '')

        plus = data_back.Clone(
            key + name +
            "__plus")  #back.Get(key).Clone(key+"__mcregion__plus")
        minus = data_back.Clone(
            key + name +
            "__minus")  #back.Get(key).Clone(key+"__mcregion__minus")

        scaling = 1
        for i in xrange(1, data_back.GetNcells() - 1):
            content = data_back.GetBinContent(i)
            error = data_back.GetBinError(i)**2
            bin_center = data_back.GetBinCenter(i)
            mc_bin = signal_mc_h.GetXaxis().FindBin(bin_center)

            b = back_mc_h.GetBinContent(mc_bin)
            a = signal_mc_h.GetBinContent(mc_bin)
            b_err = back_mc_h.GetBinError(mc_bin)
            a_err = signal_mc_h.GetBinError(mc_bin)

            #print 'a',a,'+-',a_err,'b',b,'+-',b_err
            factor = a / b * scaling
            bin_err = ((1 / b * a_err)**2 + (a /
                                             (b * b) * b_err)**2) * scaling**2
            #factor = 1/factor
            #print 'bin',i,'center', bin_center,'content', content, 'new content', content*factor, 'correction factor', factor,'+-', math.sqrt(bin_err)
            #print 'stat  unc',math.sqrt(error)
            error = error + content**2 * bin_err
            #print 'stat + sys',math.sqrt(error)

            data_back.SetBinContent(i, content * factor)
            data_back.SetBinError(i, math.sqrt(error))
            minus.SetBinContent(i, content * factor * factor)

        # method with shape uncertainties
        histos.append(data_back)
        histos.append(plus)
        histos.append(minus)

    out_file = TFile(output_fname, "RECREATE")
    out_file.cd()
    for item in histos:
        item.Write()

    if (store_other_histos):
        for item in other_histos:
            item.Write()
    out_file.Close()
    signal_mc_file.Close()
    background_mc_file.Close()
    back.Close()
    debug_file = None
    if debug:
        debug_file = TFile(debug, 'recreate')
        debug_file.cd()
        for item in debug_histos:
            item.Write()

    print 'done with uncertainty'
    print '=' * 10
    return output_fname
Пример #16
0
def AddPIDCalibWeights(
        infname,
        intreename,
        outfname,
        magtype,
        year='2016',
        perfhistpath='/disk/lhcb_data2/amathad/Lb2Lclnu_analysis/perfhist/RLc',
        nentries_to_read=1000000000,
        chunksize=10000):
    """
    Function:
    ---------
    Adds PIDCalib weights for following cuts
    Proton: [DLLp>0]
    Kaon  : [DLLK>4]
    Pion  : [DLLK<2]
    Muon  : [DLLmu>2&&DLLmu-DLLK>2&&DLLmu-DLLp>2&&IsMuon==1&&MC15TuneV1_ProbNNghost<0.2] (NB: This is an 'offline' cut on Muon not Stripping cut).

    Arguments:
    ---------
    infname:    full path to input file tfile including it's name
    intreename: ttree name in the input file
    outfname:   full path to the output file including it's name where it will be written
    magtype :   MagDown or MagUp
    year :      2016
    perfhistpath: path where the perfomance histogram are stored (these are created using custom binning scheme). Default is set to '/disk/lhcb_data2/amathad/Lb2Lclnu_analysis/perfhist/RLc'.
    nentries_to_read: number of candidates to read. Set to root default i.e. 1000000000
    chunksize: Pandas data frame chunksize to read

    Output:
    ------
    Stores a root file with the same TTree name as the input. The file contains eventNumber, runNumber and PIDCalib weights (Event_PIDCalibEffWeight).
    """
    TH1.AddDirectory(kFALSE)
    yr = "Turbo" + year[-2:]
    varsdf = ['runNumber', 'eventNumber', 'nTracks']
    perfHist = {}
    for trck_PIDCalib in trcks_PIDCalib:
        if trck_PIDCalib == 'K':
            varsdf += [trck_PIDCalib + "_P", trck_PIDCalib + "_PT"]
        else:
            varsdf += [
                trck_PIDCalib.lower() + "_P",
                trck_PIDCalib.lower() + "_PT"
            ]

        prefix = perfhistpath + "/PerfHists_" + trck_PIDCalib + "_" + yr + "_" + magtype
        binningname = "binning-" + trck_PIDCalib + "-" + yr + "-" + magtype
        suffix = "_".join(leafs_PIDCalib)
        perfname = prefix + "_" + binningname + "_" + suffix + ".root"
        File = TFile.Open(perfname, "read")
        Histg = File.Get(perfhistname[trck_PIDCalib])
        perfHist[trck_PIDCalib] = Histg.Clone(trck_PIDCalib + "new")
        File.Close()

    varstoStore = ['runNumber', 'eventNumber', 'Event_PIDCalibEffWeight']
    if os.path.exists(outfname): os.remove(outfname)
    if nentries_to_read <= chunksize: chunksize = nentries_to_read
    events_read = 0
    for df_data in read_root(infname,
                             intreename,
                             chunksize=chunksize,
                             columns=varsdf):
        print('Events read', events_read)
        if events_read >= nentries_to_read: break
        for trck_PIDCalib in trcks_PIDCalib:
            Xmin = perfHist[trck_PIDCalib].GetXaxis().GetXmin()
            Xmax = perfHist[trck_PIDCalib].GetXaxis().GetXmax()
            Ymin = perfHist[trck_PIDCalib].GetYaxis().GetXmin()
            Ymax = perfHist[trck_PIDCalib].GetYaxis().GetXmax()
            Zmin = perfHist[trck_PIDCalib].GetZaxis().GetXmin()
            Zmax = perfHist[trck_PIDCalib].GetZaxis().GetXmax()
            Limits = (Xmin, Xmax, Ymin, Ymax, Zmin, Zmax)
            if trck_PIDCalib == 'K':
                applyvars = [
                    trck_PIDCalib + '_P', trck_PIDCalib + '_PT', 'nTracks'
                ]
                df_data[trck_PIDCalib +
                        '_PIDCalibeff'] = df_data[applyvars].apply(
                            storeeff,
                            args=[perfHist[trck_PIDCalib], Limits],
                            axis=1)
            else:
                applyvars = [
                    trck_PIDCalib.lower() + '_P',
                    trck_PIDCalib.lower() + '_PT', 'nTracks'
                ]
                df_data[trck_PIDCalib.lower() +
                        '_PIDCalibeff'] = df_data[applyvars].apply(
                            storeeff,
                            args=[perfHist[trck_PIDCalib], Limits],
                            axis=1)

        df_data['Event_PIDCalibEffWeight'] = df_data[
            'K_PIDCalibeff'] * df_data['p_PIDCalibeff'] * df_data[
                'pi_PIDCalibeff'] * df_data['mu_PIDCalibeff']
        df_data[varstoStore].to_root(outfname,
                                     key=intreename,
                                     mode='a',
                                     store_index=False)
        events_read += df_data.shape[0]
Пример #17
0
from ROOT import TFile, TH1
import collections

TH1.AddDirectory(0)

for year in ['2016', '2017', '2018']:

    print year

    ###############################
    # Calculate top QCDscale norm #
    ###############################

    samples = collections.OrderedDict()
    handle = open('../Full%s_v7/njets/NormFact/samples.py' % (year), 'r')
    exec(handle)
    handle.close()

    f0 = TFile(
        "../Full{year}_v7/njets/NormFact/rootFile/plots_WW{year}_v7_njets_NormFact.root"
        .format(year=year))

    print "QCDscale_top"

    nnom = [0] * 4
    nup = [0] * 4
    ndown = [0] * 4
    for i in xrange(4):
        hnom = f0.Get("ww2l2v_13TeV_B%d/events/histo_top" % i)
        hup = f0.Get("ww2l2v_13TeV_B%d/events/histo_top_QCDscale_topUp" % i)
        hdown = f0.Get("ww2l2v_13TeV_B%d/events/histo_top_QCDscale_topDown" %
Пример #18
0
    def fhadd(self, prefix="", force=False, verbose=False, slow=True):
        """ taken from https://root.cern.ch/phpBB3/viewtopic.php?t=14881
        This function will merge objects from a list of root files and write them    
        to a target root file. The target file is newly created and must not
        exist, or if -f ("force") is given, must not be one of the source files.
        
        IMPORTANT: It is required that all files have the same content!

        Fast but memory hungry alternative to ROOT's hadd.
        
        Arguments:

        target -- name of the target root file
        sources -- list of source root files
        classname -- restrict merging to objects inheriting from classname
        force -- overwrite target file if exists
        """

        target = prefix + self.Name + ".root"
        sources = [j.Output for j in self.Jobs]

        TH1.AddDirectory(False)
        # check if target file exists and exit if it does and not in force mode
        if not force and os.path.exists(target):
            raise RuntimeError("target file %s exists" % target)

        # open the target file
        print "fhadd Target file:", target
        outfile = TFile(target, "RECREATE")

        # open the seed file - contents is looked up from here
        seedfilename = sources[0]
        print "fhadd Source file 1", seedfilename
        seedfile = TFile(seedfilename)

        # get contents of seed file
        print "looping over seed file"
        contents = self.loop(seedfile)
        print "done %d objects are ready to be merged" % len(contents)
        if (verbose):
            for c in contents:
                print c

        # open remaining files
        otherfiles = []
        for n, f in enumerate(sources[1:]):
            print "fhadd Source file %d: %s" % (n + 2, f)
            otherfiles.append(TFile(f))

        # loop over contents and merge objects from other files to seed file objects
        for n, (path, hname) in enumerate(contents):

            print "fhadd Target object: %s" % os.path.join(path, hname)
            obj_path = os.path.join(path, hname)
            obj_ = seedfile.Get(obj_path[1:])

            outfile.cd('/')
            # create target directory structure
            for d in path.split('/')[1:]:
                directory = gDirectory.GetDirectory(d)
                if not directory:
                    gDirectory.mkdir(d).cd()
                else:
                    gDirectory.cd(d)
            obj = None
            if obj_.InheritsFrom("TTree"):
                obj = obj_.CloneTree()
            else:
                obj = obj_.Clone()

            # merge objects
            l = TList()
            for o in [of.Get(obj_path[1:]) for of in otherfiles]:
                l.Add(o)
            obj.Merge(l)

            # delete objects if in slow mode
            if slow:
                print "Deleting %d object(s)", l.GetEntries()
                l.Delete()

            # write object to target
            obj.Write(obj.GetName(), TObject.kOverwrite)

        print "Writing and closing file"

        # let ROOT forget about open files - prevents deletion of TKeys
        for f in [outfile, seedfile] + otherfiles:
            gROOT.GetListOfFiles().Remove(f)

        outfile.Write()
        outfile.Close()

        for f in [seedfile] + otherfiles:
            f.Close()