def setUp(self):
        """Create dictionaries needed by combiners."""

        dirname = os.path.join(os.path.dirname(__file__), 'fixtures')
        orfdata = TFile(
            os.path.abspath(os.path.join(dirname, 'dqm_data.root')),
            'read'
        )
        orfref = TFile(
            os.path.abspath(os.path.join(dirname, 'dqm_ref.root')),
            'read'
        )

        # valid ROOT files
        assert(not orfdata.IsZombie())
        assert(not orfref.IsZombie())

        self.tdir = tempfile.mkdtemp()
        self.rfdata = TFile(os.path.join(self.tdir, 'fdata.root'), 'recreate')
        self.rfref = TFile(os.path.join(self.tdir, 'fref.root'), 'recreate')

        configfile = os.path.abspath(os.path.join(os.path.dirname(__file__), 'analysis_config_test.py'))
        with open(configfile, 'r') as inputFile:
            exec(inputFile.read())
        config = AnalysisConfigWrapper((analysis_config_branches, analysis_config_leaves)) 
        self.mycombiner = config.getTrunk(orfdata.GetName(), orfref.GetName(), r"pedestal\S*")
        self.mycombiner.evaluate()
示例#2
0
def add_points(graph, directory, layer, usePU):

    ipt = graph.GetN()

    # List runs
    for root, directories, files in os.walk(directory):
        for rundir in sorted(directories):
            if "run_" in rundir:
                # start to process run
                run = rundir[4:]
                #print "processing run ", run

                lumi = 0
                lumi_err = 0

                # Get informations for a given run
                frun = TFile(directory + "/" + rundir +
                             "/withMasking/rootfile/SiStripHitEffHistos_run" +
                             run + ".root")
                fdir = frun.GetDirectory("SiStripHitEff")

                # for efficiency
                hfound = fdir.Get("found")
                htotal = fdir.Get("all")

                if htotal == None:
                    print '  Missing histogram in file ' + frun.GetName()
                    continue

                # lumi
                if usePU == 0: hlumi = fdir.Get("instLumi")
                else: hlumi = fdir.Get("PU")
                if hlumi == None:
                    print '  Missing lumi/pu histogram in file ' + frun.GetName(
                    )
                    continue
                lumi = hlumi.GetMean()
                lumi_err = hlumi.GetRMS()
                #print "lumi (avg+/-rms): ", lumi, "+/-", lumi_err

                # efficiency for a given layer
                found = hfound.GetBinContent(layer)
                total = htotal.GetBinContent(layer)
                if total > 0: eff = found / total
                else: eff = 0
                #print run, eff, lumi, lumi_err

                # remove run without lumi informations
                if lumi > 1:
                    eff_vs_lumi.SetPoint(ipt, lumi, eff)
                    low = TEfficiency.Bayesian(total, found, .683, 1, 1, False)
                    up = TEfficiency.Bayesian(total, found, .683, 1, 1, True)
                    if eff - low > 0.01:
                        print 'large error bar for run', run, 'layer', layer, 'eff:', '{:.4f}'.format(
                            eff), 'err:', '{:.4f}'.format(eff - low)
                    #if lumi_err > lumi/3.: print 'wide lumi range for run', run, 'layer', layer, 'eff:', '{:.4f}'.format(eff), 'lumi/pu:', '{:.4f}'.format(lumi), 'rms:', '{:.4f}'.format(lumi_err)
                    eff_vs_lumi.SetPointError(ipt, lumi_err, lumi_err,
                                              eff - low, up - eff)
                    ipt += 1
                frun.Close()
示例#3
0
    def setUp(self):
        """Create dictionaries needed by combiners."""

        dirname = os.path.join(os.path.dirname(__file__), 'fixtures')
        orfdata = TFile(
            os.path.abspath(os.path.join(dirname, 'dqm_data.root')), 'read')
        orfref = TFile(os.path.abspath(os.path.join(dirname, 'dqm_ref.root')),
                       'read')

        # valid ROOT files
        assert (not orfdata.IsZombie())
        assert (not orfref.IsZombie())

        self.tdir = tempfile.mkdtemp()
        self.rfdata = TFile(os.path.join(self.tdir, 'fdata.root'), 'recreate')
        self.rfref = TFile(os.path.join(self.tdir, 'fref.root'), 'recreate')

        hist_recipes = [
            (get_avg_trend, 'Vetra/NoiseMon/ADCCMSuppressed',
             ('RMSNoise_vs_ChipChannel', 'AvgRMSNoise_trend')),
            (get_avg_hist, 'Vetra/NoiseMon/ADCCMSuppressed',
             ('RMSNoise_vs_ChipChannel', 'AvgRMSNoise_all')),
            (get_avg_hist, 'Vetra/NoiseMon/ADCCMSuppressed',
             ('RMSNoise_vs_ChipChannel', 'AvgRMSNoise_R', 'r')),
            (get_avg_hist, 'Vetra/NoiseMon/ADCCMSuppressed',
             ('RMSNoise_vs_ChipChannel', 'AvgRMSNoise_Phi', 'p')),
            # (get_avg_hist, 'Vetra/VeloPedestalSubtractorMoni',
            #  ('Ped_Sub_ADCs_Profile', 'Ped_Sub_ADCs_all'))
        ]
        # histograms: make, save, and cleanup
        for recipe in hist_recipes:
            href = recipe[0](orfref.GetDirectory(recipe[1]), *recipe[2])
            self.rfref.WriteTObject(href)
            del href
            hdata = recipe[0](orfdata.GetDirectory(recipe[1]), *recipe[2])
            self.rfdata.WriteTObject(hdata)
            del hdata
        self.rfref.Close()
        self.rfdata.Close()

        # Write DQ database to temp directory (rather than run directory)
        Config().dq_db_file_path = os.path.join(self.tdir, Config().dq_db)

        configfile = os.path.join(os.path.dirname(__file__),
                                  'analysis_config_test.py')
        with open(configfile, 'r') as inputFile:
            exec(inputFile.read())
        config = AnalysisConfigWrapper(
            (analysis_config_branches, analysis_config_leaves))
        self.mycombiner = config.getTrunk(orfdata.GetName(), orfref.GetName())

        # Results to compare against
        self.results = {"score": Score(70.62594356001006), "lvl": ERROR}
def main():
    # Open input files
    file_data   = TFile("ZMassLessThan10GeV_TimePerf-plots.root", "read")
    file_signal = TFile("ZMassLessThan5GeV_TimePerf-plots.root", "read")
    file_backgd = TFile("ZMass_MoreThan5GeV_TimePerf-plots.root", "read")
    
    # Get input TH1F
    hist_data = file_data.Get('EBEB/seed time')
    hist_McSig = file_signal.Get('EBEB/seed time')
    hist_McBkg = file_backgd.Get('EBEB/seed time')

    # Determine amount of signal/background in data
    num_sig, num_sig_error, num_bkg, num_bkg_error  = \
        get_num_sig_bkg(hist_data, hist_McSig, hist_McBkg,0.006,0.02)
    
    # Save scaled MC plots to output root file
    file_output = TFile("Hists_purityFits.root","recreate")
    file_output.cd()
    hist_McBkg.Scale(num_bkg/hist_McBkg.Integral())
    hist_McSig.Scale(num_sig/hist_McSig.Integral())
    hist_data.SetName("data")
    hist_McBkg.SetName("MC_bgd")
    hist_McSig.SetName("MC_sig")
    hist_data.Write()
    hist_McBkg.Write()
    hist_McSig.Write()
    file_output.Close()
    
    print "Created %s" % file_output.GetName()
def getIntegral(sample, hist, xlo, xhi):
    inputFile = TFile("condor/" + condor_dir + "/" + sample + ".root")
    #print "Reading file:  " + inputFile.GetName()

    HistogramObj = inputFile.Get("OSUAnalysis/" + channel + "/" + hist)
    if not HistogramObj:
        print "WARNING: Could not find histogram " + "OSUAnalysis/" + channel + "/" + hist + " in file " + sample + ".root" + ". Will skip it and continue."
        return
    histogram = HistogramObj.Clone()
    histogram.SetDirectory(0)
    inputFile.Close()

    xloBin = histogram.GetXaxis().FindBin(float(xlo))
    xhiBin = histogram.GetXaxis().FindBin(float(xhi))
    xlo = histogram.GetXaxis().GetBinLowEdge(
        xloBin)  # lo edge is the left edge of the first bin
    xhi = histogram.GetXaxis().GetBinLowEdge(
        xhiBin +
        1)  # hi edge is the left edge of the bin to the right of the last bin
    intError = Double(0.0)
    integral = histogram.IntegralAndError(xloBin, xhiBin, intError)

    line = "Integral of " + hist + " in " + inputFile.GetName(
    ) + " from " + str(xlo) + " to " + str(xhi) + ": " + str(
        integral) + " +- " + str(intError)
    print line
    return integral
示例#6
0
def main():
    # Open input files
    file_data = TFile("Hists_Data.root", "read")
    file_signal = TFile("Hists_PhotonJet.root", "read")
    file_backgd = TFile("Hists_QCD.root", "read")

    # Get input TH1F
    hist_data = file_data.Get('g_pass_Liso_barrel/h_photon_sieie')
    hist_McSig = file_signal.Get('g_pass_Liso_barrel/h_photon_sieie')
    hist_McBkg = file_backgd.Get('g_pass_Liso_barrel/h_photon_sieie')

    # Determine amount of signal/background in data
    num_sig, num_sig_error, num_bkg, num_bkg_error  = \
        get_num_sig_bkg(hist_data, hist_McSig, hist_McBkg,0.006,0.02)

    # Save scaled MC plots to output root file
    file_output = TFile("Hists_purityFits.root", "recreate")
    file_output.cd()
    hist_McBkg.Scale(num_bkg / hist_McBkg.Integral())
    hist_McSig.Scale(num_sig / hist_McSig.Integral())
    hist_data.SetName("data")
    hist_McBkg.SetName("MC_bgd")
    hist_McSig.SetName("MC_sig")
    hist_data.Write()
    hist_McBkg.Write()
    hist_McSig.Write()
    file_output.Close()

    print "Created %s" % file_output.GetName()
示例#7
0
    def GetListObjects(self):

        afile = TFile(self.Filename)

        if afile.IsZombie():
            print(" error trying to open file: " + self.Filename)
            sys.exit()

        if self.XML:

            print('''
<cuy>
''')
            print('  <validation type=\"' + afile.GetName() + '\" file=\"' +
                  self.Filename + '\" release=\"x.y.z\">')

        self.dir = ROOT.gDirectory
        self.Loop()

        if self.XML:

            print('''
  </validation>

</cuy>
''')
def main():
    # Open input files
    file_data = TFile("data_bg_file.root", "read")
    file_signal = TFile("sig_gmbs600.root", "read")
    file_backgd = TFile("data_bg_file.root", "read")

    # Get input TH1F
    hist_data = file_data.Get('h_dataTime')
    hist_McSig = file_signal.Get('h_sgTime__ctau6000_hehb')
    hist_McBkg = file_backgd.Get('h_bgTime')

    # Determine amount of signal/background in data
    num_sig, num_sig_error, num_bkg, num_bkg_error  = \
        get_num_sig_bkg(hist_data, hist_McSig, hist_McBkg,0.006,0.02)

    # Save scaled MC plots to output root file
    file_output = TFile("Hists_purityFits.root", "recreate")
    file_output.cd()
    hist_McBkg.Scale(num_bkg / hist_McBkg.Integral())
    hist_McSig.Scale(num_sig / hist_McSig.Integral())
    hist_data.SetName("data")
    hist_McBkg.SetName("MC_bgd")
    hist_McSig.SetName("MC_sig")
    hist_data.Write()
    hist_McBkg.Write()
    hist_McSig.Write()
    file_output.Close()

    print "Created %s" % file_output.GetName()
示例#9
0
def create_tree(filename="test_tree.root"):
    outfile = TFile(filename, 'recreate')
    tree = Tree('test_tree', 'A test tree')
    tree.var('var1')
    for i in range(100):
        tree.fill('var1', i)
        tree.tree.Fill()
    print 'creating a tree', tree.tree.GetName(),\
        tree.tree.GetEntries(), 'entries in',\
        outfile.GetName()
    outfile.Write()
示例#10
0
def SetData(file, name, lumi):
  tmp = {}
  f = TFile(file)
  fnames = f.GetName().split('.')
  fname = fnames[0]

  tmp["file"] = f
  tmp["hname"] = [x.GetName() for x in f.GetListOfKeys()]
  tmp["hname"].remove("EventInfo")
  tmp["lumi"] = lumi 
  tmp["name"] = name
  datasamples[fname] = tmp
示例#11
0
def copyOneFile(dataset):
    # If the input and output files are defined outside the loop, histograms after the first instance are not found.
    # I did not track down what the cause of this behavior was.  --Wells

    fin = TFile(condor_dir + "/pu.root", "READ")
    fout = TFile(
        os.environ['CMSSW_BASE'] +
        "/src/OSUT3Analysis/Configuration/data/pu.root", "UPDATE")
    print "Copying histogram " + dataset + " from " + fin.GetName(
    ) + " to " + fout.GetName()

    fin.cd()
    h = fin.Get(dataset)
    if not h:
        print "  Could not find hist named " + dataset + " in " + fin.GetName()
        return

    fout.cd()
    if dataset != h.GetName():
        # print "  Resetting name from " + h.GetName() + " to " + dataset
        h.SetName(dataset)

    # Check if the histogram already exists in the destination file
    h2 = fout.Get(dataset)
    if h2:
        print "  The histogram " + h2.GetName(
        ) + " already exists in the destination file."
        overwrite = raw_input("  Do you want to overwrite it? (y/n): ")
        if not (overwrite is "y"):
            print "  Will not overwrite existing histogram."
            return  # Only overwrite if the user enters "y"

        # delete all previously existing instances of the histogram
        print "  Will overwrite existing histogram."
        fout.Delete(dataset + ";*")

    h.Write()

    fin.Close()
    fout.Close()
示例#12
0
class TreeProducer(object):
    """Base class to create and prepare a custom output file & tree for analysis modules."""
    def __init__(self, filename, module, **kwargs):
        self.filename = filename
        self.module = module
        self.outfile = TFile(filename, 'RECREATE')
        ncuts = kwargs.get('ncuts', 25)
        self.cutflow = Cutflow('cutflow', ncuts)
        self.display = kwargs.get('display_cutflow', True)
        self.pileup = TH1D('pileup', 'pileup', 100, 0, 100)
        self.tree = TTree('tree', 'tree')

    def addBranch(self,
                  name,
                  dtype='f',
                  default=None,
                  title=None,
                  arrname=None):
        """Add branch with a given name, and create an array of the same name as address."""
        if hasattr(self, name):
            raise IOError("Branch of name '%s' already exists!" % (name))
        if not arrname:
            arrname = name
        if isinstance(dtype, str):  # Set correct data type for numpy:
            if dtype == 'F':  # 'F' = 'complex64', which do not work for filling float branches
                dtype = 'float32'  # 'f' = 'float32' -> 'F' -> Float_t
        if isinstance(dtype, str):  # Set correct data type for numpy:
            if dtype == 'D':  # 'D' = 'complex128', which do not work for filling float branches
                dtype = 'float64'  # 'd' = 'float64' -> 'D' -> Double_t
        setattr(self, arrname, np.zeros(1, dtype=dtype))
        branch = self.tree.Branch(name, getattr(self, arrname),
                                  '%s/%s' % (name, root_dtype[dtype]))
        if default != None:
            getattr(self, name)[0] = default
        if title:
            branch.SetTitle(title)
        return branch

    def fill(self):
        """Fill tree."""
        return self.tree.Fill()

    def endJob(self):
        """Write and close files after the job ends."""
        if self.display:
            self.cutflow.display()
            print ">>> Write %s..." % (self.outfile.GetName())
        self.outfile.Write()
        self.outfile.Close()
示例#13
0
def create_tree(filename=FNAME, nentries=None):
    if not nentries:
        if os.path.isfile(filename):
            #default number of entries, file exists
            return filename
        else:
            nentries = 200
    nentries = int(nentries)
    outfile = TFile(filename, 'recreate')
    tree = Tree('test_tree', 'A test tree')
    tree.var('var1')
    for i in range(nentries):
        tree.fill('var1', i)
        tree.tree.Fill()
    outfile.Write()
    outfile.Close()
    return outfile.GetName()
示例#14
0
    def plot_focal_surface(self, gtu_num):
        """
        plot the focal surface for a given gtu_num
        """
        # initialise
        pcd = np.zeros((1, 1, self._rows, self._cols), dtype='B')
        datafile = TFile(self.filename)
        datafile.tevent.SetBranchAddress("photon_count_data", pcd)
        focal_surface = np.zeros((self._rows, self._cols), dtype='B')

        # get the pixel values
        datafile.tevent.GetEntry(gtu_num)
        focal_surface[:][:] = pcd[0][0][:][:]

        fig = plt.figure(figsize=(6, 6))
        ax = fig.add_subplot(111)
        p = plt.imshow(focal_surface, axes=ax, interpolation='nearest')
        plt.xlabel('pixel X')
        plt.ylabel('pixel Y')
        plt.title(datafile.GetName() + "\n" + 'GTU: ' + str(gtu_num), y=1.1)

        # Set ticks
        major_ticks = np.arange(-.5, self._rows, 8)
        minor_ticks = np.arange(-.5, self._rows, 1)
        #ax.set_xlim(0.0,47.0)
        ax.set_xticks(major_ticks)
        ax.set_xticks(minor_ticks, minor=True)
        ax.set_yticks(major_ticks)
        ax.set_yticks(minor_ticks, minor=True)
        ax.set_xticklabels(np.arange(0, self._rows + 1, 8))
        ax.set_yticklabels(np.arange(self._cols, 0, -8))

        # set grid
        #ax.grid(which='minor', alpha=0.2)
        ax.grid(color='k', linestyle='-', linewidth=2)
        ax.grid(which='major', alpha=0.4)

        # add colourbar
        fig.subplots_adjust(right=0.8)
        cbar_ax = fig.add_axes([0.9, 0.1, 0.05, 0.8])
        cbar = fig.colorbar(p, cax=cbar_ax)
        cbar.set_label('# of counts', labelpad=1)
        cbar.formatter.set_powerlimits((0, 0))

        datafile.Close()
def add_points(graph, directory, subdir, layer):

    ipt = graph.GetN()
    labels = []

    # List runs
    for root, directories, files in os.walk(directory):
        for rundir in sorted(directories):
            if "run_" in rundir:
                # start to process run
                run = rundir[4:]
                #print "processing run ", run

                # for efficiency
                frun = TFile(directory + "/" + rundir + "/" + subdir +
                             "/rootfile/SiStripHitEffHistos_run" + run +
                             ".root")
                fdir = frun.GetDirectory("SiStripHitEff")
                hfound = fdir.Get("found")
                htotal = fdir.Get("all")

                if htotal == None:
                    print '  Missing histogram in file ' + frun.GetName()
                    continue

                # efficiency for a given layer
                found = hfound.GetBinContent(int(layer))
                total = htotal.GetBinContent(int(layer))
                if total > 0: eff = found / total
                else: eff = 0
                #print run, eff

                graph.SetPoint(ipt, ipt + 1, eff)
                labels.append(run)
                low = TEfficiency.Bayesian(total, found, .683, 1, 1, False)
                up = TEfficiency.Bayesian(total, found, .683, 1, 1, True)
                #eff_vs_run.SetPointError(ipt, 0, 0, eff-low, up-eff)
                ipt += 1
                frun.Close()

    axis = graph.GetXaxis()
    for i in range(graph.GetN()):
        axis.SetBinLabel(axis.FindBin(i + 1), labels[i])
        #print i, axis.FindBin(i+1), labels[i]
    return labels
示例#16
0
def create_tree(filename=FNAME, mean=0, sigma=1, nentries=None):
    '''Create the test tree in file FNAME.'''
    if not nentries:
        if os.path.isfile(filename):
            #default number of entries, file exists
            return filename
        else:
            nentries = 200
    nentries = int(nentries)
    outfile = TFile(filename, 'recreate')
    tree = Tree('test_tree', 'A test tree')
    tree.var('x')
    tree.var('iev')
    for i in range(nentries):
        tree.fill('x', random.gauss(mean, sigma))
        tree.fill('iev', i)
        tree.tree.Fill()
    outfile.Write()
    outfile.Close()
    return outfile.GetName()
示例#17
0
def AddBkg(file, name, color, xsection):
    tmp = {}
    f = TFile(file)
    fnames = f.GetName().split('.')
    fname = fnames[0]

    nevt = 1

    tmp["file"] = f
    tmp["hname"] = [x.GetName() for x in f.GetListOfKeys()]
    if xsection is not 1:
        tmp["hname"].remove("EventInfo")
        h = f.Get("EventInfo")
        nevt = h.GetBinContent(2)

    tmp["total"] = nevt
    tmp["col"] = color
    tmp["xsection"] = xsection
    tmp["name"] = name
    bkgsamples[fname] = tmp
示例#18
0
def pruneBaby(fname, dir, prunedir, pruneBranches):

    fullname = dir + "/" + fname
    if "pnfs" in fullname:
        fullname = "dcap://t3se01.psi.ch:22125//" + fullname
    file = TFile.Open(fullname)
    tree = file.Get("mt2")

    print "-> Pruning " + fullname

    for i in pruneBranches:
        tree.SetBranchStatus(i + "_*", 0)
        tree.SetBranchStatus("n" + i, 0)

    newfilename = fname.split(".root")[0] + "_prune.root"
    newfile = TFile(prunedir + "/" + newfilename, "recreate")
    print "newfile: " + newfile.GetName()
    newtree = tree.CloneTree()
    newtree.Write("", 5)
    newfile.Close()
def getIntegral(sample, condor_dir, channel, hist, xlo, xhi, hwts):
    inputFile = TFile("condor/" + condor_dir + "/" + sample + ".root")
    #print "Reading file:  " + inputFile.GetName()

    HistogramObj = inputFile.Get("OSUAnalysis/" + channel + "/" + hist)
    if not HistogramObj:
        print "WARNING: Could not find histogram " + "OSUAnalysis/" + channel + "/" + hist + " in file " + sample + ".root" + ". Will skip it and continue."
        return
    histogram = HistogramObj.Clone()
    histogram.SetDirectory(0)
    inputFile.Close()

    xloBin = histogram.GetXaxis().FindBin(float(xlo))
    xhiBin = histogram.GetXaxis().FindBin(float(xhi))
    xlo = histogram.GetXaxis().GetBinLowEdge(
        xloBin)  # lo edge is the left edge of the first bin
    xhi = histogram.GetXaxis().GetBinLowEdge(
        xhiBin +
        1)  # hi edge is the left edge of the bin to the right of the last bin

    if hwts:
        for i in range(1, histogram.GetNbinsX() + 1):
            val = histogram.GetBinContent(i)
            err = histogram.GetBinError(i)
            binCtr = histogram.GetBinCenter(i)
            wt = hwts.GetBinContent(
                hwts.FindBin(binCtr)
            ) + 1.0  # Add 1.0, to convert from (data-MC)/MC to data/MC
            #            print "Debug:  bin ", i, ": wt=", wt
            histogram.SetBinContent(i, val * wt)
            histogram.SetBinError(i, err * wt)
    intError = Double(0.0)
    integral = histogram.IntegralAndError(xloBin, xhiBin, intError)

    line = "Integral of " + hist + " in " + inputFile.GetName(
    ) + " from " + str(xlo) + " to " + str(xhi) + ": " + str(
        integral) + " +- " + str(intError)
    #    print line
    return integral
示例#20
0
def AddBkg(file, name, color, xsection):
  tmp = {}
  f = TFile(file)
  fnames = f.GetName().split('.')
  fname = fnames[0]
 
  tmp["file"] = f
  tmp["hname"] = [x.GetName() for x in f.GetListOfKeys()]
  tmp["hname"].remove("EventInfo")

  #debug
  #N_h = len(tmp["hname"])
  tmp_h = f.Get(tmp["hname"][0])
  tmp_n = tmp_h.Integral()
  print fname, " : ", name, " : ", tmp["hname"][0], " : " , tmp_n

  h = f.Get("EventInfo")
  nevt = h.GetBinContent(2)
  tmp["total"] = nevt 
  tmp["col"] = color
  tmp["xsection"] = xsection
  tmp["name"] = name
  bkgsamples[fname] = tmp
示例#21
0
def create_tree(filename=FNAME, nentries=None):
    '''Create the test tree in file FNAME.'''
    if not nentries:
        file_good = False
        if os.path.isfile(filename):
            rfile = TFile(filename)
            if not rfile.IsZombie():
                file_good = True
        if file_good:
            return filename
        else:
            # file needs to be regenerated so setting default
            # number of entries
            nentries = 200
    nentries = int(nentries)
    outfile = TFile(filename, 'recreate')
    tree = Tree('test_tree', 'A test tree')
    tree.var('var1')
    for i in range(nentries):
        tree.fill('var1', i)
        tree.tree.Fill()
    outfile.Write()
    outfile.Close()
    return outfile.GetName()
        break

###################
## load the data dict from processed root file
###################
Data = {}
Data['file_name'] = []
# initial Data first with the branch name
for (_, new_pandas_branch_name) in BranchesToKeep:
    Data[new_pandas_branch_name] = []
for i in range(NumEventsInData):
    if (i + 1) % 100 == 0:
        print("==== processed_file: " + str(i + 1) +
              " events finished loading")
    processed_tree.GetEntry(i)
    Data['file_name'].append(pfile2.GetName())
    for (root_branch_name, new_pandas_branch_name) in BranchesToKeep:
        Data[new_pandas_branch_name].append(
            getattr(processed_tree, root_branch_name))

######################
## Convert to data format in pandas
######################
processedPandasData = {}
for item in Data:
    processedPandasData[item] = pd.Series(Data[item])
df = pd.DataFrame(processedPandasData)

#####################
## Merge the new dictionary to existing dataframe
#####################
示例#23
0
def matchAndSplitTrees(inputFiles_reco,
                       inputFiles_truth,
                       inputFiles_sumw,
                       outputName,
                       truthLevel='parton',
                       treename='nominal',
                       saveUnmatchedReco=True,
                       saveUnmatchedTruth=True):

    ##########
    print("Read input trees and build index")
    # Reco
    print("Reco level")
    try:
        tree_reco = getTrees(inputFiles_reco, treename, True)
    except RuntimeError as err:
        #tree_reco = None
        print("Failed to get reco level trees: {}".format(err))
        return

    # MC truth level
    print(truthLevel.capitalize() + " level")
    try:
        tree_truth = getTrees(inputFiles_truth, treename, True)
    except RuntimeError as err:
        print("Failed to get parton level trees: {}".format(err))
        return

    # Sum weights
    print("Read sumWeights")
    try:
        tree_sumw = getTrees(inputFiles_sumw, 'sumWeights', False)
    except RuntimeError as err:
        print("Failed to get sumWeights: {}".format(err))
        return

    sumWeights = getSumWeights(tree_sumw)
    print("sum weights = ", sumWeights)

    ##########
    # Output trees
    print("Create output trees")
    #####
    # reco branches
    if truthLevel == "parton":
        reco_prefix_thad = "klfitter_bestPerm_topHad"
        reco_prefix_tlep = "klfitter_bestPerm_topLep"
        reco_prefix_ttbar = "klfitter_bestPerm_ttbar"
    else:  # particle level
        reco_prefix_thad = "PseudoTop_Reco_top_had"
        reco_prefix_tlep = "PseudoTop_Reco_top_lep"
        reco_prefix_ttbar = "PseudoTop_Reco_ttbar"

    # truth branches
    if truthLevel == "parton":
        truth_prefix_thad = "MC_thad_afterFSR"
        truth_prefix_tlep = "MC_tlep_afterFSR"
        truth_prefix_ttbar = "MC_ttbar_afterFSR"
    else:  # particle levels
        truth_prefix_thad = "PseudoTop_Particle_top_had"
        truth_prefix_tlep = "PseudoTop_Particle_top_lep"
        truth_prefix_ttbar = "PseudoTop_Particle_ttbar"

    #####
    # e+jets
    outfile_ej = TFile('{}_{}_ejets.root'.format(outputName, truthLevel),
                       'recreate')
    print("Create output file: {}".format(outfile_ej.GetName()))

    # add extra branches
    # reco
    extra_variables_reco_ej = varsExtra(reco_prefix_thad,
                                        reco_prefix_tlep,
                                        reco_prefix_ttbar,
                                        compute_energy=True,
                                        sum_weights=sumWeights)

    newtree_reco_ej = prepareOutputTree(tree_reco, 'reco')
    extra_variables_reco_ej.set_up_branches(newtree_reco_ej)

    # truth
    extra_variables_truth_ej = varsExtra(truth_prefix_thad,
                                         truth_prefix_tlep,
                                         truth_prefix_ttbar,
                                         compute_energy=truthLevel != "parton")

    newtree_truth_ej = prepareOutputTree(tree_truth, truthLevel)
    extra_variables_truth_ej.set_up_branches(newtree_truth_ej)

    #####
    # mu+jets
    outfile_mj = TFile('{}_{}_mjets.root'.format(outputName, truthLevel),
                       'recreate')
    print("Create output file: {}".format(outfile_mj.GetName()))

    # add extra branches
    # reco
    extra_variables_reco_mj = varsExtra(reco_prefix_thad,
                                        reco_prefix_tlep,
                                        reco_prefix_ttbar,
                                        compute_energy=True,
                                        sum_weights=sumWeights)

    newtree_reco_mj = prepareOutputTree(tree_reco, 'reco')
    extra_variables_reco_mj.set_up_branches(newtree_reco_mj)

    # truth
    extra_variables_truth_mj = varsExtra(truth_prefix_thad,
                                         truth_prefix_tlep,
                                         truth_prefix_ttbar,
                                         compute_energy=truthLevel != "parton")

    newtree_truth_mj = prepareOutputTree(tree_truth, truthLevel)
    extra_variables_truth_mj.set_up_branches(newtree_truth_mj)

    ##########
    print("Iterate through events in reco trees")
    unmatched_reco_entries = []

    for i in range(tree_reco.GetEntries()):
        if not i % 10000:
            print("processing event #{}".format(i))
        tree_reco.GetEntry(i)

        eventID = (tree_reco.runNumber, tree_reco.eventNumber)

        ####
        # try matching truth level events
        truth_entry = tree_truth.GetEntryNumberWithIndex(*eventID)
        if truth_entry < 0:
            # found no matched event
            unmatched_reco_entries.append(i)
        else:
            # found matched event
            # write reco events
            isEle = isEJets(tree_reco)
            if isEle:
                extra_variables_reco_ej.set_match_flag(1)
                extra_variables_reco_ej.write_event(tree_reco)
                newtree_reco_ej.Fill()
            else:
                extra_variables_reco_mj.set_match_flag(1)
                extra_variables_reco_mj.write_event(tree_reco)
                newtree_reco_mj.Fill()

            # write truth events
            tree_truth.GetEntry(truth_entry)
            if isEle:
                extra_variables_truth_ej.set_match_flag(1)
                extra_variables_truth_ej.write_event(tree_truth)
                newtree_truth_ej.Fill()
            else:
                extra_variables_truth_mj.set_match_flag(1)
                extra_variables_truth_mj.write_event(tree_truth)
                newtree_truth_mj.Fill()

    if saveUnmatchedReco:
        # append unmatched reco events
        print("Append unmatched reco events")
        for ievt, ireco_unmatched in enumerate(unmatched_reco_entries):
            if not ievt % 10000:
                print("processing unmatched reco event {}".format(ievt))

            tree_reco.GetEntry(ireco_unmatched)

            isEle = isEJets(tree_reco)
            if isEle:
                extra_variables_reco_ej.set_match_flag(0)
                extra_variables_reco_ej.write_event(tree_reco)
                newtree_reco_ej.Fill()
            else:
                extra_variables_reco_mj.set_match_flag(0)
                extra_variables_reco_mj.write_event(tree_reco)
                newtree_reco_mj.Fill()

    if saveUnmatchedTruth:
        # append unmatched truth events
        print("Append unmatched {} events".format(truthLevel))
        for j in range(tree_truth.GetEntries()):
            if not j % 10000:
                print("processing {} event {}".format(truthLevel, j))

            tree_truth.GetEntry(j)
            reco_entry = tree_reco.GetEntryNumberWithIndex(
                tree_truth.runNumber, tree_truth.eventNumber)

            if reco_entry >= 0:
                # found matched reco event.
                # skip since it has already been written.
                continue

            if isEJets(tree_truth):
                extra_variables_truth_ej.set_match_flag(0)
                extra_variables_truth_ej.write_event(tree_truth)
                newtree_truth_ej.Fill()
            else:
                extra_variables_truth_mj.set_match_flag(0)
                extra_variables_truth_mj.write_event(tree_truth)
                newtree_truth_mj.Fill()

    # Write and close output files
    outfile_ej.Write()
    outfile_ej.Close()

    outfile_mj.Write()
    outfile_mj.Close()
示例#24
0
def GetYieldAndError(condor_dir, process, channel):
    inputFile = TFile("condor/" + condor_dir + "/" + process + ".root")
    hist = inputFile.Get(channel + "/" + integrateHistogramName)
    if not hist:
        print "Could not find hist " + channel + "/" + integrateHistogramName + " in " + inputFile.GetName(
        )
    hist.SetDirectory(0)
    inputFile.Close()
    yieldAndErrorList = {}
    nBinsX = hist.GetNbinsX()

    intError = Double(0.0)
    integral = hist.IntegralAndError(0, nBinsX + 1, intError)
    fracError = 1.0 + (intError / integral) if integral > 0.0 else 1.0

    raw_integral = hist.GetEntries()

    yieldAndErrorList['yield'] = integral
    yieldAndErrorList['rawYield'] = raw_integral
    yieldAndErrorList['error'] = fracError
    yieldAndErrorList['absError'] = intError
    yieldAndErrorList[
        'weight'] = integral / raw_integral if raw_integral > 0.0 else 0.0
    return yieldAndErrorList
示例#25
0
def rewtOneHist(dataset, hwts):
    fileName = condor_dir + "/" + dataset + ".root"
    if not os.path.exists(fileName):
        print "WARNING: didn't find ", fileName
        return
    print "About to reweight histogram in " + fileName
    inFile = TFile(fileName, "UPDATE")
    if inFile.IsZombie() or not inFile.GetNkeys():
        return
    inFile.cd()
    h = inFile.Get(str(arguments.histToBeReWeighted)).Clone()
    if not h:
        print "  Could not find hist named " + arguments.histToBeReWeighted + " in " + inFile.GetName(
        )
        return
    h.SetDirectory(0)
    newName = h.GetName() + str(arguments.suffixRename)
    h.SetName(newName)
    dir = arguments.histToBeReWeighted
    dir = dir[:dir.rfind("/")]
    print "Will write hist to directory " + dir
    inFile.cd(dir)
    tdir = inFile.GetDirectory(dir)
    tdir.Delete(newName + ";*")
    for i in range(1, h.GetNbinsX() + 1):
        val = h.GetBinContent(i)
        err = h.GetBinError(i)
        binCtr = h.GetBinCenter(i)
        wt = hwts.GetBinContent(hwts.FindBin(binCtr))
        h.SetBinContent(i, val * wt)
        h.SetBinError(i, err * wt)
    h.Write()

    inFile.Close()
示例#26
0
def GetYieldAndError(condor_dir, process, channel):
    inputFile = TFile("condor/" + condor_dir + "/" + process + ".root")
    hist = inputFile.Get(channel + "/" + integrateHistogramName)
    if not hist:
        print "Could not find hist " + channel + "/" + integrateHistogramName + " in " + inputFile.GetName(
        )
    hist.SetDirectory(0)
    nGenerated = inputFile.Get(
        channel.replace('Plotter/Met Plots',
                        'CutFlowPlotter/eventCounter')).GetEntries()
    inputFile.Close()

    intError = Double(0.0)
    integral = hist.IntegralAndError(0, hist.GetNbinsX() + 1, intError)
    fracError = 1.0 + (intError / integral) if integral > 0.0 else 1.0

    raw_integral = hist.GetEntries()

    # don't need cross section uncertainties, since that is its own nuisance parameter
    crossSectionWeight = 1.0
    if process != data_dataset:
        crossSectionWeight *= lumi / nGenerated
        if arguments.limitType == "higgsino":
            crossSectionWeight *= float(signal_cross_sections_higgsino[
                process.split('_')[1][:-3]]['value'])
        elif arguments.limitType == "wino":
            crossSectionWeight *= float(
                signal_cross_sections[process.split('_')[2][:-3]]['value'])

    if process != data_dataset:
        datasetInfo = open("condor/" + condor_dir + "/" + process +
                           "/datasetInfo_" + process + "_cfg.py")
        xsec = [
            x for x in list(datasetInfo) if x.startswith('crossSection = ')
        ]
        datasetInfo.close()
        if len(xsec) > 0:
            xsec = float(xsec[-1].split('=')[-1])
        if xsec < 0:
            integral *= crossSectionWeight
            intError *= crossSectionWeight

    acceptance = integral / nGenerated / crossSectionWeight

    yieldAndError = {
        'yield':
        integral,
        'rawYield':
        raw_integral,
        'error':
        fracError,
        'absError':
        intError,
        'weight':
        integral / raw_integral if raw_integral > 0.0 else 0.0,
        'acceptance':
        integral / nGenerated / crossSectionWeight,
        'acceptanceError': (integral if integral > 0.0 else 1.1) / nGenerated /
        crossSectionWeight,
    }

    return yieldAndError
示例#27
0
from ROOT import TFile
from PhysicsTools.HeppyCore.statistics.tree import Tree

outfile = TFile('test_tree.root', 'recreate')

tree = Tree('test_tree', 'A test tree')
tree.var('var1')

for i in range(100):
    tree.fill('var1', i)
    tree.tree.Fill()

print 'creating a tree', tree.tree.GetName(),\
      tree.tree.GetEntries(), 'entries in',\
      outfile.GetName()

outfile.Write()
示例#28
0
    print(arg)
    f = TFile(arg)
    phi = float(re.sub(r"\.root$", "", arg))
    for h in f.GetListOfKeys():
        h = h.ReadObj()
        name = h.GetName()
        print(name)
        M = tuple([
            float(x) for x in re.search(r"hj_mass\[([0-9,\.]*)\)", name).group(
                1).split(',')
        ])

        fs = h.GetListOfFunctions()
        d[M][phi] = float(
            next(f for f in h.GetListOfFunctions()
                 if "-logl" in f.GetName()).GetTitle().split('=')[1])

import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages

pages = PdfPages('scan.pdf')

for M, d_phi in d.items():
    print("hj_mass:", M)
    logl0 = None
    x = []
    y = []
    for phi, logl in sorted(d_phi.items()):
        if logl0 is None: logl0 = logl
        logl -= logl0
        print("{}: {}".format(phi, logl))
示例#29
0
    nFn = channel[cc].replace('.root',
                              '_' + str(num_of_jets) + 'j_skimed.root')
    ss = path + channel[cc]
name = ss
newFileName = nFn
#-----------===============================
#-----------------------------------------------------------------------------------------------------------
if 1:
    oldFile = TFile(name, "READ")
    oldTree = oldFile.Get("ntuple/tree")
    NofEntries = oldTree.GetEntriesFast()
    numOfEntriesToScan_local = NofEntries
    if NofEntries > nLimit: numOfEntriesToScan_local = nLimit
    if testOn == 1: numOfEntriesToScan_local = numOfEntriesToScan
    #locate and register the Jet branches of the old ttree
    print '\nskimming file', oldFile.GetName(
    ), '\tevents =', oldTree.GetEntries(), '\tweight =', oldTree.GetWeight(
    ), '\n'
    newFile = TFile(newFileName,
                    "RECREATE")  #('Skim/' + newFileName, "RECREATE")
    newFile.cd()
    newTree = TTree(treeName, treeName)

    if lola_on == 0:
        forBDT.branchLeafStrGen()
        newTree.Branch(JetName, Jets1, forBDT.branchLeafStr)
    elif lola_on == 1:
        forLola.branchLeafStrGen()
        newTree.Branch(JetName, Jets1, forLola.branchLeafStr)
    # this attribute list must exactly match (the order of) the features in the header file!!!!

    attr = forBDT.preList + forBDT.attrTypeList
示例#30
0
def skim(name):

    oldFile = TFile(name, "READ")
    oldTree = oldFile.Get("ntuple/tree")
    oldTree.SetBranchAddress("Lepton1", AddressOf(Lepton1, "pt"))
    oldTree.SetBranchAddress("Lepton2", AddressOf(Lepton2, "pt"))
    oldTree.SetBranchAddress("FatJet1", AddressOf(FatJet1, "pt"))
    oldTree.SetBranchAddress("V", AddressOf(V, "pt"))
    oldTree.SetBranchAddress("X", AddressOf(X, "pt"))
    oldTree.SetBranchAddress("MEt", AddressOf(MEt, "pt"))

    print 'skimming file', oldFile.GetName(), '\tevents =', oldTree.GetEntries(
    ), '\tweight =', oldTree.GetWeight()

    newFile = TFile("Skim/" + name, "RECREATE")
    newFile.cd()
    newTree = TTree("alpha", "alpha")

    EventNumberBranch = newTree.Branch('EventNumber', EventNumber,
                                       'EventNumber/F')
    EventWeightBranch = newTree.Branch('EventWeight', EventWeight,
                                       'EventWeight/F')
    RunNumberBranch = newTree.Branch('RunNumber', RunNumber, 'RunNumber/F')
    LumiNumberBranch = newTree.Branch('LumiNumber', LumiNumber, 'LumiNumber/F')
    isZtoEEBranch = newTree.Branch('isZtoEE', isZtoEE, 'isZtoEE/O')
    isZtoMMBranch = newTree.Branch('isZtoMM', isZtoMM, 'isZtoMM/O')
    isZtoNNBranch = newTree.Branch('isZtoNN', isZtoNN, 'isZtoNN/O')
    isMCBranch = newTree.Branch('isMC', isMC, 'isMC/O')
    FatJet1_ptBranch = newTree.Branch('FatJet1_pt', FatJet1_pt, 'FatJet1_pt/F')
    FatJet1_softdropPuppiMassBranch = newTree.Branch(
        'FatJet1_softdropPuppiMass', FatJet1_softdropPuppiMass,
        'FatJet1_softdropPuppiMass/F')
    FatJet1_softdropPuppiMassCorrBranch = newTree.Branch(
        'FatJet1_softdropPuppiMassCorr', FatJet1_softdropPuppiMassCorr,
        'FatJet1_softdropPuppiMassCorr/F')
    FatJet1_softdropPuppiMassCorrNotSmearedBranch = newTree.Branch(
        'FatJet1_softdropPuppiMassCorrNotSmeared',
        FatJet1_softdropPuppiMassCorrNotSmeared,
        'FatJet1_softdropPuppiMassCorrNotSmeared/F')
    FatJet1_puppiTau21Branch = newTree.Branch('FatJet1_puppiTau21',
                                              FatJet1_puppiTau21,
                                              'FatJet1_puppiTau21/F')
    FatJet1_ddtTau21Branch = newTree.Branch('FatJet1_ddtTau21',
                                            FatJet1_ddtTau21,
                                            'FatJet1_ddtTau21/F')
    FatJet1_CSVBranch = newTree.Branch('FatJet1_CSV', FatJet1_CSV,
                                       'FatJet1_CSV/F')
    FatJet1_CSVRBranch = newTree.Branch('FatJet1_CSVR', FatJet1_CSVR,
                                        'FatJet1_CSVR/F')
    FatJet1_CSV1Branch = newTree.Branch('FatJet1_CSV1', FatJet1_CSV1,
                                        'FatJet1_CSV1/F')
    FatJet1_CSV2Branch = newTree.Branch('FatJet1_CSV2', FatJet1_CSV2,
                                        'FatJet1_CSV2/F')
    FatJet1_CSVR1Branch = newTree.Branch('FatJet1_CSVR1', FatJet1_CSVR1,
                                         'FatJet1_CSVR1/F')
    FatJet1_CSVR2Branch = newTree.Branch('FatJet1_CSVR2', FatJet1_CSVR2,
                                         'FatJet1_CSVR2/F')
    V_massBranch = newTree.Branch('V_mass', V_mass, 'V_mass/F')
    V_ptBranch = newTree.Branch('V_pt', V_pt, 'V_pt/F')
    MEt_ptBranch = newTree.Branch('MEt_pt', MEt_pt, 'MEt_pt/F')
    X_massBranch = newTree.Branch('X_mass', X_mass, 'X_mass/F')
    X_tmassBranch = newTree.Branch('X_tmass', X_tmass, 'X_tmass/F')

    theweight = oldTree.GetWeight()

    for event in range(0, oldTree.GetEntries() - 1):
        oldTree.GetEntry(event)

        # Alpha selections

        # Channel
        if not oldTree.isZtoMM and not oldTree.isZtoEE and not oldTree.isZtoNN:
            continue

        # Trigger
        if not oldTree.isMC:
            if oldTree.isZtoMM:
                #if not ( oldTree.HLT_TkMu50_v or oldTree.HLT_Mu50_v ): continue
                if not (oldTree.HLT_Mu45_eta2p1_v): continue
            elif oldTree.isZtoEE:
                if not (oldTree.HLT_Ele105_CaloIdVT_GsfTrkIdT_v
                        or oldTree.HLT_Ele115_CaloIdVT_GsfTrkIdT_v):
                    continue
            elif oldTree.isZtoNN:
                if not (oldTree.HLT_PFMETNoMu90_PFMHTNoMu90_IDTight_v
                        or oldTree.HLT_PFMETNoMu110_PFMHTNoMu110_IDTight_v
                        or oldTree.HLT_PFMETNoMu120_PFMHTNoMu120_IDTight_v
                        or oldTree.HLT_PFMET170_NoiseCleaned_v
                        or oldTree.HLT_PFMET170_JetIdCleaned_v
                        or oldTree.HLT_PFMET170_HBHECleaned_v):
                    continue
            else:
                continue
        # Leptons
        if oldTree.isZtoMM and not (
            ((Lepton1.isHighPt and Lepton2.isHighPt) or
             (Lepton1.isTrackerHighPt and Lepton2.isHighPt) or
             (Lepton1.isHighPt and Lepton2.isTrackerHighPt))
                and Lepton1.pt > 55 and Lepton2.pt > 20
                and abs(Lepton1.eta) < 2.1 and abs(Lepton2.eta) < 2.1
                and not (Lepton1.pt > 500 and abs(Lepton1.eta) > 1.2)
                and not (Lepton2.pt > 500 and abs(Lepton2.eta) > 1.2)
                and Lepton1.trkIso < 0.1 and Lepton2.trkIso < 0.1):
            continue

        if oldTree.isZtoEE and not (Lepton1.pt > 135 and Lepton2.pt > 35
                                    and Lepton1.isLoose and Lepton2.isLoose):
            continue
        # No Leptons
        if oldTree.isZtoNN and not (
                MEt.pt > 200 and oldTree.nLooseMuons == 0
                and oldTree.nVetoElectrons == 0 and oldTree.nPhotons == 0
                and oldTree.nTaus == 0 and oldTree.MinJetMetDPhi > 0.5
                and oldTree.Flag_EcalDeadCellTriggerPrimitiveFilter
                and oldTree.Flag_HBHENoiseFilter
                and oldTree.Flag_HBHENoiseIsoFilter
                and oldTree.Flag_globalTightHalo2016Filter
                and oldTree.Flag_goodVertices and oldTree.Flag_BadPFMuon
                and oldTree.Flag_BadChCand):
            continue
        if not oldTree.isMC:
            if oldTree.isZtoNN and not oldTree.Flag_eeBadScFilter: continue
        # Boost and Z
        if (oldTree.isZtoEE
                or oldTree.isZtoMM) and not (V.pt > 170 and FatJet1.pt > 170
                                             and V.mass > 70 and V.mass < 110):
            continue
        # Boost and Cleaning for Z invisible
        if oldTree.isZtoNN and not (FatJet1.pt > 200 and FatJet1.isTight
                                    and FatJet1.nhf < 0.8 and FatJet1.chf > 0.2
                                    and oldTree.MaxJetBTag < 0.5426
                                    and X.dPhi > 2):
            continue
        # Grooming
        if not (FatJet1.softdropPuppiMassCorr > 30): continue

        # Copy relevant variables
        EventNumber[0] = oldTree.EventNumber
        EventWeight[0] = oldTree.EventWeight * theweight
        RunNumber[0] = oldTree.RunNumber
        LumiNumber[0] = oldTree.LumiNumber
        isZtoEE[0] = oldTree.isZtoEE
        isZtoMM[0] = oldTree.isZtoMM
        isZtoNN[0] = oldTree.isZtoNN
        isMC[0] = oldTree.isMC
        FatJet1_pt[0] = FatJet1.pt
        FatJet1_softdropPuppiMass[0] = FatJet1.softdropPuppiMass
        FatJet1_softdropPuppiMassCorr[0] = FatJet1.softdropPuppiMassCorr
        FatJet1_softdropPuppiMassCorrNotSmeared[
            0] = FatJet1.softdropPuppiMassCorrNotSmeared
        FatJet1_puppiTau21[0] = FatJet1.puppiTau21
        FatJet1_ddtTau21[0] = FatJet1.ddtTau21
        FatJet1_CSV[0] = FatJet1.CSV
        FatJet1_CSVR[0] = FatJet1.CSVR
        FatJet1_CSV1[0] = FatJet1.CSV1
        FatJet1_CSV2[0] = FatJet1.CSV2
        FatJet1_CSVR1[0] = FatJet1.CSVR1
        FatJet1_CSVR2[0] = FatJet1.CSVR2
        V_mass[0] = V.mass
        V_pt[0] = V.pt
        MEt_pt[0] = MEt.pt
        X_mass[0] = X.mass
        X_tmass[0] = X.tmass

        newTree.Fill()

    print 'produced skimmed file', newFile.GetName(
    ), '\tevents =', newTree.GetEntries(), '\tweight =', newTree.GetWeight()

    newFile.cd()
    newTree.Write()
    newFile.Close()
    oldFile.Close()