Ejemplo n.º 1
0
def split_in_bins_n_save (root_file, saving_dir, withTurbo, mother_particle = "Lc"):

	ybins = Imports.getYbins() #Rapidity bins
    
	ptbins = Imports.getPTbins()

	#Also vestige of using turbo output: the variables would be preseparated, so no need to do it ourselves
	if not withTurbo:
		particles = ["Lc", "Xic"]
	else:
		particles = []
		particles.append(mother_particle)
    
	if not os.path.exists(saving_dir + "ybins/"):
		os.mkdir(saving_dir + "ybins/")
	if not os.path.exists(saving_dir + "ptbins/"):
		os.mkdir(saving_dir + "ptbins/")
	if not os.path.exists(saving_dir + "y_ptbins/"):
		os.mkdir(saving_dir + "y_ptbins/")

	extra_variables = [""]
	tree = root_file
	for particle in particles:
		if particle == "Lc":
			mass_cuts = "lcplus_MM < 2375"
		if particle == "Xic":
			mass_cuts = "lcplus_MM > 2375"
			
		print("Particle: " + particle)
		for ybin in ybins:
			
			ycuts = "lcplus_RAPIDITY >= {0} && lcplus_RAPIDITY < {1}".format(ybin[0], ybin[1])
			allcuts = " {0} && {1}".format(ycuts, mass_cuts)
			
			strip_n_save(0,0, allcuts, "", saving_dir + "ybins/" + particle + "_ybin_{0}-{1}.root".format(ybin[0], ybin[1]), extra_variables,particle, bins = True, tree = tree)
			
			n = len(ptbins)
			i = 0
			print("Files with y({0})".format(ybin))
			for ptbin in ptbins:
				#FOR THE PROGRESSION BAR
				if i < n:
					j = (i + 1) / n
					sys.stdout.write('\r')
					sys.stdout.write("[%-20s] %d%%" % ('='*int(20*j), 100*j))
					sys.stdout.flush()
					i += 1
				
				ptcuts = "lcplus_PT >= {0} && lcplus_PT < {1}".format(ptbin[0], ptbin[1])
				if (ybin[0] == 2.0):
					allcuts = " {0} && {1}".format(ptcuts, mass_cuts)
					strip_n_save(0,0, allcuts, "", saving_dir + "ptbins/" + particle + "_ptbin_{0}-{1}.root".format(ptbin[0], ptbin[1]), extra_variables, particle, bins = True,tree = tree)
				yptcut = ycuts + " && " + ptcuts
				allcuts = " {0} && {1}".format(yptcut, mass_cuts)
				strip_n_save(0,0, allcuts, "", saving_dir + "y_ptbins/" + particle + "_ybin_{0}-{1}_ptbin_{2}-{3}.root".format(ybin[0],ybin[1],ptbin[0],ptbin[1]), extra_variables, particle, bins = True, tree = tree)
			print("\n")
Ejemplo n.º 2
0
def compariuse():
    Imports.import_ref()
    if plt_len_entry.get() != '':
        cfg.plt_len = int(plt_len_entry.get())
    if success_criteria_entry.get() != str(cfg.success_criteria):
        cfg.success_criteria = int(success_criteria_entry.get())
    if romi_ver_ent.get() != cfg.romi_ver:
        cfg.romi_ver = romi_ver_ent.get()
    if bp_ent_mean_dia.get() != '':
        cfg.bp_mean_dia = float(bp_ent_mean_dia.get())
    if bp_ent_mean_sys.get() != '':
        cfg.bp_mean_sys = float(bp_ent_mean_sys.get())
    if bp_ent_std_sys.get() != '':
        cfg.bp_std_sys = float(bp_ent_std_sys.get())
    if bp_ent_std_dia.get() != '':
        cfg.bp_std_dia = float(bp_ent_std_dia.get())
    if auth_ent_subjects.get() != '':
        cfg.auth_subjects = int(auth_ent_subjects.get())
    if auth_ent_fp.get() != '':
        cfg.auth_fp = float(auth_ent_fp.get())
    if auth_ent_fn.get() != '':
        cfg.auth_fn = float(auth_ent_fn.get())
    if int(rr_pct_ent.get()) != cfg.devs['RR'][0]:
        cfg.devs['RR'][0] = int(rr_pct_ent.get())
    if int(rr_unit_ent.get()) != cfg.devs['RR'][1]:
        cfg.devs['RR'][1] = int(rr_unit_ent.get())
    if int(hr_pct_ent.get()) != cfg.devs['HR'][0]:
        cfg.devs['HR'][0] = int(hr_pct_ent.get())
    if int(hr_unit_ent.get()) != cfg.devs['HR'][1]:
        cfg.devs['HR'][1] = int(hr_unit_ent.get())
    if title_entry.get() != '':
        cfg.title = title_entry.get()
    if sub_title_entry.get() != '':
        cfg.sub_title = sub_title_entry.get()
    if show_annot.get() == 0:
        cfg.show_dev = False
    if count_annot.get() == 0:
        cfg.count_dev = False
    if report_37.get() == 0:
        cfg.report_37 = False
    if show_sum.get() == 0:
        cfg.show_summary = False
    plot = build_plot_dic()
    print('Compariusing...')
Ejemplo n.º 3
0
def split_in_bins_n_save(root_file, saving_dir, run, mother_particle="Lc"):

    ybins = Imports.getYbins()  #Rapidity bins

    ptbins = Imports.getPTbins()

    if run == 1:
        particles = ["Lc", "Xic"]
    else:
        particles = []
        particles.append(mother_particle)

    os.mkdir(saving_dir + "ybins/")
    os.mkdir(saving_dir + "ptbins/")
    os.mkdir(saving_dir + "y_ptbins/")

    tree = root_file
    for particle in particles:
        if particle == "Lc":
            mass_cuts = "lcplus_MM < 2375"
        if particle == "Xic":
            mass_cuts = "lcplus_MM > 2375"
        for ybin in ybins:
            ycuts = "lcplus_RAPIDITY >= {0} && lcplus_RAPIDITY < {1}".format(
                ybin[0], ybin[1])
            allcuts = " {0} && {1}".format(ycuts, mass_cuts)
            Strip.strip_n_save(0,
                               0,
                               allcuts,
                               "",
                               saving_dir + "ybins/" + particle +
                               "_ybin_{0}-{1}.root".format(ybin[0], ybin[1]),
                               extra_variables=[""],
                               bins=True,
                               tree=tree)
            for ptbin in ptbins:
                ptcuts = "lcplus_PT >= {0} && lcplus_PT < {1}".format(
                    ptbin[0], ptbin[1])
                if (ybin[0] == 2.0):
                    allcuts = " {0} && {1}".format(ptcuts, mass_cuts)
                    Strip.strip_n_save(
                        0,
                        0,
                        allcuts,
                        "",
                        saving_dir + "ptbins/" + particle +
                        "_ptbin_{0}-{1}.root".format(ptbin[0], ptbin[1]),
                        extra_variables=[""],
                        particle=mother_particle,
                        bins=True,
                        tree=tree)
                yptcut = ycuts + " && " + ptcuts
                allcuts = " {0} && {1}".format(yptcut, mass_cuts)
                Strip.strip_n_save(0,
                                   0,
                                   allcuts,
                                   "",
                                   saving_dir + "y_ptbins/" + particle +
                                   "_ybin_{0}-{1}_ptbin_{2}-{3}.root".format(
                                       ybin[0], ybin[1], ptbin[0], ptbin[1]),
                                   extra_variables=[""],
                                   particle=mother_particle,
                                   bins=True,
                                   tree=tree)
Ejemplo n.º 4
0
#!/usr/bin/python3
# da desc na tabela para pegar os valores e automatizat oM.query
import gi
import Imports

gi.require_version("Gtk", "3.0")
from gi.repository import Gtk

oM = Imports.Mysqli()
cursor = oM.Con.cursor()


class Faculdade:
    def __init__(self):
        builder = Gtk.Builder()
        builder.add_from_file("Faculdade.glade")

        self.window1 = builder.get_object("window1")
        self.entryNome = builder.get_object("entryNome")
        self.entrySobrenome = builder.get_object("entrySobrenome")
        self.rbtSexo = builder.get_object("rbtSexoM")
        self.entryEmail = builder.get_object("entryEmail")
        self.entryTelefone = builder.get_object("entryTelefone")
        self.entryCurso = builder.get_object("entryCurso")

        self.btnCadastrar = builder.get_object("btnCadastrar")
        self.btnLimpar = builder.get_object("btnLimpar")

        self.window1.show()

        builder.connect_signals({
Ejemplo n.º 5
0
#################

import ROOT, Imports, sys
from Imports import *
sys.path.append('./MassFitting/')
from fittingDict import fittingDict

getData = True  # Load data.
makesWeights = True  # Generate sWeights, write to workspace. Requires getData.
makeFriendTree = True  # create friend tree for simple future sweight plotting. Requires makesWeights.
plotVariable = False  # make an sPlot using sWeights in RooDataSet from workspace.
testFriendTree = True  # test sWeights from friend tree to do an sPlot.

inputdir = "/dcache/bfys/scalo/binned_files/"
outputdir = "/dcache/bfys/cpawley/sWeights/"
cuts = Imports.getDataCuts()

#folders_dict = {"39":["2018_MagDown",2155] , "31":["2017_MagDown", 1843], "40":["2016_MagDown",1859], "41":["2015_MagDown", 579], "42":["2012_MagDown", 1155], "43":["2011_MagDown", 907], "45":["2011_MagUp", 817], "46":["2012_MagUp", 1342], "47":["2015_MagUp", 370], "48":["2016_MagUp", 1771], "49":["2017_MagUp", 1839], "50":["2018_MagUp", 2298] } #a dictionary containing the details of the all the years' data according to joblog.txt

folders_dict = {
    "42": ["2012_MagDown", 1155],
    "46": ["2012_MagUp", 1342]
}  #a dictionary containing the details of the all the years' data according to joblog.txt

y_bins = ["_y2.0-2.5", "_y2.5-3.0", "_y3.0-3.5", "_y3.5-4.0"]
pt_bins = [
    "_pt3000-4000", "_pt4000-5000", "_pt5000-6000", "_pt6000-7000",
    "_pt7000-8000", "_pt8000-10000", "_pt10000-20000"
]
particle_types = ["Lc", "Xic"]
import ROOT, os
from ROOT import TChain, TCanvas, TH1
import Imports

#directory = "/data/bfys/jdevries/gangadir/workspace/jdevries/LocalXML/"
#directory = "/dcache/bfys/jdevries/ntuples/LcAnalysis/ganga/"
#job = "78"
ID = ""

ybins = Imports.getYbins()
ptbins = Imports.getPTbins()

#cuts = "lcplus_P < 300000 && lcplus_OWNPV_CHI2 < 80 && pplus_ProbNNp > 0.5 && kminus_ProbNNk > 0.4 && piplus_ProbNNpi > 0.5 && pplus_P < 120000 && kminus_P < 115000 && piplus_P < 80000 && pplus_PIDp > 0 && kminus_PIDK > 0"
cuts = "lcplus_P < 300000 && lcplus_OWNPV_CHI2 < 80 &&  pplus_P < 120000 && kminus_P < 115000 && piplus_P < 80000"
#101:[285, 2017, "MagDown"]
#dictionary = {95:[284, 2017, "MagUp", 2003842], 96:[284, 2017, "MagUp", 2006193], 97:[283, 2018, "MagUp", 2004731], 98:[278, 2018, "MagUp", 2007066], 102:[281, 2017, "MagDown", 2002494], 103:[279, 2018, "MagDown", 2000268], 104:[277, 2018, "MagDown", 2005446], 105:[281, 2016, "MagUp", 2007242], 106:[286, 2016, "MagUp", 2006283], 107:[290, 2016, "MagDown", 2009238], 108:[282, 2016, "MagDown", 2052337]}
dictionary = {30:[27, 2012, "MagDown", 551509], 88:[25, 2012, "MagDown", 519998]}

f_text = open("/dcache/bfys/scalo/run1_Selection_Eff_output_v2.txt", "w+")

for job in dictionary:
    particle = ""
    #cuts = "lcplus_L0HadronDecision_TOS==1 && lcplus_Hlt1TrackAllL0Decision_TOS==1"
    n_subjobs = dictionary[job][0]
    year = dictionary[job][1]
    MagPol = dictionary[job][2]
    tot_entries = dictionary[job][3]
    if (job == 30):
        particle = "Xic"
        #ID = "26103090"
		ID = "25103029"
import ROOT, os, sys, Imports
from ROOT import TChain, TFile
sys.path.append(
    './tuples_prep_scripts/'
)  #step necessary to import the scripts placed in the subdirectory of the current directory
import Strip, SetBranch, SplitScript

folders_dict = Imports.getFoldersDict(
)  #a dictionary containing the details of the all the years' data according to joblog.txt

dir = "/dcache/bfys/scalo/"

particle = "Lc"

for element in folders_dict:
    #These commented lines should be uncommented in case nTracks is a variable needed for the PID efficiency calculation
    if int(element) < 47:
        extra_variables = [
            "lcplus_Hlt1TrackAllL0Decision_TOS",
            "lcplus_Hlt2CharmHadD2HHHDecision_TOS"
        ]
        run = 1
    else:
        extra_variables = ["nTracks", "lcplus_Hlt1TrackMVADecision_TOS"]
        run = 2
    if (int(element) > 114 and int(element) < 118):
        particle = "Xic"
    else:
        particle = "Lc"
    cuts = Imports.getDataCuts(run)
    name = folders_dict[element][0]
particle = "Lc"  # valid types :- Xic or Lc (For MC studies)
# Define if you want to add sWeights
addsWeights = True

#Input dir is where the reduce tuples are, output is where we will make our plots and our friend trees are in sweightdir
inputdir = "/dcache/bfys/jtjepkem/binned_files/"
sweightsdir = "/data/bfys/cpawley/sWeights/"
outputdir = "/data/bfys/cpawley/dalitz/"

#Years, Mag pol and part. types hardcoded
years = [2011, 2012, 2015, 2016, 2017, 2018]
magPol = ["MagUp", "MagDown"]
particle_types = ["Lc", "Xic"]

#y and pt bins may vary - so we import them
y_bin_temp = Imports.getYbins()
y_bin = []
for y in y_bin_temp:
    y_bin.append("{}-{}".format(y[0], y[1]))

pt_bin_temp = Imports.getPTbins()
pt_bin = []
for pt in pt_bin_temp:
    pt_bin.append("{}-{}".format(pt[0], pt[1]))


def invariantMass(p1, p2):
    # build invariant mass string
    m1 = p1 + "_M"
    ptot1 = p1 + "_P"
    px1 = p1 + "_PX"
def main(argv):

    ROOT.gROOT.SetBatch(True)  #STOP SHOWING THE GRAPH FOR ROOT

    try:
        opts, args = getopt.getopt(argv, "hstp")
    except getopt.GetoptError:
        print("The arguments are wrong")
        sys.exit(2)

    options = []
    arguments = []

    for opt, arg in opts:
        options.append(opt)
        arguments.append(arg)

    if not options:
        options = ["-s", "-t", "-p"]

    if "-h" in options:
        print(
            textwrap.dedent("""\

			Welcome to the efficiencies.py script.

			The parameters are
				-h : help
				-s : Selection
				-t : Trigger
				-p : PID

			Running with no parameter will output all the data at once.
			"""))

        sys.exit()

    for opt in options:

        if opt == "-s":

            selecEffDict = {}
            years = []

            print("\nCreation of the selection efficiency files")

            n = len(MC_jobs_Dict)
            i = 0

            for job in MC_jobs_Dict:
                #FOR THE PROGRESSION BAR
                if i < n:
                    j = (i + 1) / n
                    sys.stdout.write('\r')
                    sys.stdout.write("[%-20s] %d%%" %
                                     ('=' * int(20 * j), 100 * j))
                    sys.stdout.flush()
                    i += 1
                if job == "NA":
                    continue

                particle = MC_jobs_Dict[job][3]
                year = MC_jobs_Dict[job][0]
                pol = MC_jobs_Dict[job][1]
                subjobs = MC_jobs_Dict[job][2]
                identifier = MC_jobs_Dict[job][4]

                if year not in years:
                    years.append(year)

                filename = "MC_Lc2pKpiTuple_" + identifier + ".root"

                if int(year) <= 2012:
                    run = 1
                    cuts = Imports.getDataCuts(run)
                else:
                    run = 2
                    cuts = Imports.getDataCuts(run)

                Lc_MC_tree = TChain(
                    "tuple_Lc2pKpi/DecayTree"
                )  # !!! QUESTION : NOT BETTER ISTEAD OF CHAIN; JUST GETENTRIES FROM EACH ONE BY ONE, ONCE WITHOUT CUT AND ONCE WITH?

                for subjob in os.listdir(RAW_TUPLE_PATH + job):
                    Lc_MC_tree.Add(RAW_TUPLE_PATH + job + "/" + subjob + "/" +
                                   filename)

                N = float(Lc_MC_tree.GetEntries()
                          )  #WHY DID SIMON USE A HARDCODED NUMBER OF ENTRIES??
                #k = float(Lc_MC_tree.GetEntries(cuts + " && " + turbo)) SIMON VERSION
                k = float(Lc_MC_tree.GetEntries(cuts))
                eff = float(k / N)
                binom_error = (1 / N) * ((k * (1 - k / N))**(0.5))

                selecEffDict[particle + "_" + str(year) + "_" + pol] = {
                    'val': eff,
                    'err': binom_error
                }

            print("\nSelection efficiency calculations are done!")

            latexTable(selecEffDict, years, "Selection")

            prettyEffDict = pprint.pformat(selecEffDict)
            dictF = open(dict_path + "Selection_Eff_Dict.py", "w")
            dictF.write("selDict = " + str(prettyEffDict))
            dictF.close()

        elif opt == "-t":

            print("\nCreation of the trigger efficiency files")

            trigEffDict = {}
            years = []

            n = len(MC_jobs_Dict)
            i = 0

            for job in MC_jobs_Dict:
                #FOR THE PROGRESSION BAR
                if i < n:
                    j = (i + 1) / n
                    sys.stdout.write('\r')
                    sys.stdout.write("[%-20s] %d%%" %
                                     ('=' * int(20 * j), 100 * j))
                    sys.stdout.flush()
                    i += 1
                if job == "NA":
                    continue

                particle = MC_jobs_Dict[job][3]
                year = MC_jobs_Dict[job][0]
                pol = MC_jobs_Dict[job][1]
                subjobs = MC_jobs_Dict[job][2]
                identifier = MC_jobs_Dict[job][4]

                if year not in years:
                    years.append(year)

                filename = "MC_Lc2pKpiTuple_" + identifier + ".root"

                if int(year) <= 2012:
                    run = 1
                    cuts = "lcplus_L0HadronDecision_TOS == 1 && lcplus_Hlt1TrackAllL0Decision_TOS == 1"
                    turbo = "lcplus_Hlt2CharmHadD2HHHDecision_TOS==1"
                else:
                    run = 2
                    cuts = Imports.getDataCuts(run)
                    turbo = "lcplus_Hlt1TrackMVADecision_TOS==1"

                #WHAT DO I NEED FOR TRIGGER CUTS?!!

                Lc_MC_tree = TChain(
                    "tuple_Lc2pKpi/DecayTree"
                )  # !!! QUESTION : NOT BETTER ISTEAD OF CHAIN; JUST GETENTRIES FROM EACH ONE BY ONE, ONCE WITHOUT CUT AND ONCE WITH?

                for subjob in os.listdir(RAW_TUPLE_PATH + job):
                    Lc_MC_tree.Add(RAW_TUPLE_PATH + job + "/" + subjob + "/" +
                                   filename)

                N = float(
                    Lc_MC_tree.GetEntries(
                        turbo + " && lcplus_L0HadronDecision_TOS==1"))
                k = float(Lc_MC_tree.GetEntries(cuts + " && " + turbo))
                eff = float(k / N)
                binom_error = (1 / N) * ((k * (1 - k / N))**(0.5))

                trigEffDict[particle + "_" + str(year) + "_" + pol] = {
                    'val': eff,
                    'err': binom_error
                }

            print("\nTrigger efficiency calculations are done!")

            latexTable(trigEffDict, years, "Trigger")

            prettyEffDict = pprint.pformat(trigEffDict)
            dictF = open(dict_path + "Trigger_Eff_Dict.py", "w")
            dictF.write("trigDict = " + str(prettyEffDict))
            dictF.close()

        elif opt == "-p":
            pass

        else:
            sys.exit()
Ejemplo n.º 10
0
import ROOT, Imports, os
from ROOT import TChain, TCanvas, TH1

particle = "Lc"

if particle == "Lc":
    mass_range = [2240, 2340]
    peak_range = [2288, 2280, 2290]
    peak_width = [2, 0, 10]
    normalisation_factor = 6

    Imports.Lc_MC_datatree()
    MC_tree = Imports.Lc_MC_tree

if particle == "Xic":
    mass_range = [2420, 2520]
    peak_range = [2469, 2460, 2480]
    peak_width = [8, 0, 20]
    normalisation_factor = 4

    Imports.Xic_MC_datatree_1()
    MC_tree = Imports.Xic_MC_tree_1

c1 = ROOT.TCanvas("c1")

histogram1 = ROOT.TH1F("histogram1", "hist 1", 300, 2240, 2340)
histogram1.SetLineColor(9)
histogram1.SetLineWidth(1)

mass = ROOT.RooRealVar("mass", "Mass", mass_range[0], mass_range[1],
                       "MeV/c^{2}")
Ejemplo n.º 11
0
def main():
	#a dictionary containing the details of the all the years' data according to joblog.txt
	#Run 1 is automatically Lc, and Run 2 has particle specified.
	folders_dict = DATA_jobs_Dict

	path = [TUPLE_PATH_NOTRIG,TUPLE_PATH]
	
	for PATH in path:
		if PATH == TUPLE_PATH:
			trig = True
		else:
			trig = False
		
		#vestige of using turbo output for run 2 data. Still present in case we want to switch back to using Turbo line.
		withTurbo = False
		
		for element in folders_dict:
			if int(element) > 41 and int(element) < 47:
			   extra_variables = [
					"lcplus_Hlt1TrackAllL0Decision_TOS", 
					"lcplus_Hlt2CharmHadD2HHHDecision_TOS",
					"lcplus_L0HadronDecision_TOS",
					]
			   cutsRun = 1
			   particle = "Lc"
			else:
			   extra_variables = [
					"nSPDHits", 
					"nTracks",
			   		"lcplus_L0HadronDecision_TOS",
					"lcplus_Hlt1TrackMVADecision_TOS",
					"lcplus_Hlt2CharmHadXicpToPpKmPipTurboDecision_TOS",
					"lcplus_Hlt2CharmHadLcpToPpKmPipTurboDecision_TOS",]
			   particle = "Lc"
			   cutsRun = 2
			   
			name = folders_dict[element][0]
			subjobs = folders_dict[element][1]
			saving_directory = PATH + name + "_clusters/"
			
			cuts = Imports.getDataCuts(cutsRun, trig)
			
			if not os.path.exists(saving_directory):
			   os.makedirs(saving_directory)
			   
			file_directory = RAW_TUPLE_PATH + element
			
			print ("\nStarting process for " + name)
				
			step = subjobs//20 #carry out the process in 20 clusters of datafiles to avoid memory overflow
			Max = step
			Min = 0

		# Loop used to apply global cuts on the data
			print("Creation of Clusters")
			n = 20
			i = 0
			while (Max <= subjobs):
				#FOR THE PROGRASSION BAR
				if i < n:
					j = (i + 1) / n
					sys.stdout.write('\r')
					sys.stdout.write("[%-20s] %d%%" % ('='*int(20*j), 100*j))
					sys.stdout.flush()
					i += 1
				
				if Max == Min:
					break
				strip_n_save(Min, Max, cuts, file_directory, saving_directory, extra_variables, particle)
				temp = Max
				if (Max+step > subjobs):
					Max = subjobs
				else:
					Max += step
				Min = temp

			clusters = os.listdir(saving_directory)
		
			print("\n\nTChaining the clusters")
			final_chain = TChain("DecayTree")
			n = len(clusters)
			i = 0
			for element in clusters:
				if i < n:
					j = (i + 1) / n
					sys.stdout.write('\r')
					sys.stdout.write("[%-20s] %d%%" % ('='*int(20*j), 100*j))
					sys.stdout.flush()
					i += 1
				final_chain.Add(saving_directory + element)

			
			if not os.path.exists(PATH + name + "/bins"):
			   os.makedirs(PATH + name + "/bins")
			saving_dir = PATH + name + "/bins/"
			print("\n\nCreating the final files")
			split_in_bins_n_save(final_chain, saving_dir, withTurbo, particle) # split the datafile into mass-y-pt bins

			print ("\nProcess completed for " + name)
	
		#CREATION OF THE TOTAL YEAR DATA FILES (e.g. 2012_MagUp/Xic_Total.root)
		print("\Creation of the Total Year data files")
		mother_particle = ["Xic", "Lc"]
		BASE_PATH = PATH
		
		n = len(os.listdir(BASE_PATH))
		p = 0
		for i in os.listdir(BASE_PATH):
			if p < n:
					j = (p + 1) / n
					sys.stdout.write('\r')
					sys.stdout.write("[%-20s] %d%%" % ('='*int(20*j), 100*j))
					sys.stdout.flush()
					p += 1
					
			if "cluster" in i:
				continue
		
			for part in mother_particle:
				totfile = ROOT.TFile.Open(BASE_PATH + i + "/{}_total.root".format(part),"RECREATE")
				totfile.cd()
				tree = TChain("DecayTree")

				for j in os.listdir(BASE_PATH + i +"/bins/ybins"):
						if part in j:
								tree.Add(BASE_PATH + i +"/bins/ybins/"+j)
				tree.Write()
				totfile.Close()
				del totfile
		
		print("\nDeleting Clusters")
		os.system("rm -rf {}*_clusters".format(BASE_PATH))
		
		print("\nNTuple Preparation is done, happy analysis!")
Ejemplo n.º 12
0
import ROOT, os, Imports
from ROOT import TChain, TCanvas, TH1

Imports.datatree()
Imports.Lc_MC_datatree()

c1 = ROOT.TCanvas("c1")

masshist = ROOT.TH1F("masshist", "Histogram of L_{c}^{+} mass", 300, 2200,
                     2600)
masshist.GetXaxis().SetTitle("M(L_{c}^{+}) [MeV/c^{2}]")
masshist.GetYaxis().SetTitle("Number of events")

DataCuts = "1==1"
Background_cuts = "(lcplus_MM > 2320 && lcplus_MM < 2350) || (lcplus_MM > 2220 && lcplus_MM < 2260)"  #to select area of background on both sides
IDcuts = "abs(pplus1_ID)==211 && abs(kminus_ID)==321 && abs(pplus0_ID)==2212 && abs(lcplus_ID)==4122 && (lcplus_BKGCAT == 0 || lcplus_BKGCAT == 50)"  #cuts to ensure right particles in the Monte Carlo
#IDcuts = "(lcplus_MM > 2320 && lcplus_MM < 2350) || (lcplus_MM > 2220 && lcplus_MM < 2260)"

variables_to_plot = [
    "lcplus_P", "lcplus_OWNPV_CHI2", "pplus0_ProbNNp", "kminus_ProbNNk",
    "pplus1_ProbNNpi", "pplus0_P", "kminus_P", "pplus1_P", "kminus_PIDK",
    "pplus0_PIDp", "lcplus_IPCHI2_OWNPV", "lcplus_ETA", "lcplus_PT",
    "lcplus_TAU", "lcplus_PVConstrainedDTF_chi2"
]

name = "Lc_vs_MC"

histogram1 = ROOT.TH1F("masshist", "Histogram of L_{c} mass", 300, 2200, 2600)
histogram2 = ROOT.TH1F("masshist", "Histogram of L_{c} mass", 300, 2200, 2600)

histogram1.SetLineColor(2)  # red for real background data
Ejemplo n.º 13
0
def split_in_bins_and_save(root_file, saving_directory, run, useful_variables, mother_particle = "Lc"):
    
    #Rapidity and transverse momentum
    ybins = Imports.getYbins()
    ptbins = Imports.getPTbins()
    
    if (run==1):
        particles = ["Lc","Xic"]
    else:
        particles = []
        particles.append(mother_particle)
        
    if not os.path.exists(saving_directory + "ybins/"):
        os.makedirs(saving_directory + "ybins/")
        
    if not os.path.exists(saving_directory + "ptbins/"):
        os.makedirs(saving_directory + "ptbins/")
        
    if not os.path.exists(saving_directory + "y_ptbins/"):
        os.makedirs(saving_directory + "y_ptbins/")
    
    tree = root_file
    
    for particle in particles:
        if particle == "Lc":
            mass_cuts = "lcplus_MM < 2375"
        if particle == "Xic":
            mass_cuts = "lcplus_MM > 2375"
            
    for ybin in ybins:
    
        ycuts = "lcplus_RAPIDITY >= {0} && lcplus_RAPIDITY < {1}".format(ybin[0], ybin[1])
        allcuts = " {0} && {1}".format(ycuts, mass_cuts)
        
        strip_and_save(0, 0, allcuts, "", saving_directory+"ybins/"+particle+"_ybin_{0}-{1}.root".format(ybin[0],ybin[1]), useful_variables, particle, bins = True, tree = tree)
        
        n = len(ptbins)
        i = 0
        
        print("Files with y({0})".format(ybin))
        
        for ptbin in ptbins:
            #Progress bar
            if(i<n):
                j = (i + 1) / n
                sys.stdout.write('\r')
                sys.stdout.write("[%-20s] %d%%" % ('='*int(20*j), 100*j))
                sys.stdout.flush()
                i += 1
            
            ptcuts = "lcplus_PT >= {0} && lcplus_PT < {1}".format(ptbin[0], ptbin[1])
            
            if (ybin[0]==2.0):
                allcuts = " {0} && {1}".format(ptcuts, mass_cuts)
                strip_and_save(0, 0, allcuts, "", saving_directory + "ptbins/" + particle + "_ptbin_{0}-{1}.root".format(ptbin[0], ptbin[1]), useful_variables, particle, bins = True,tree = tree)
                
            ypt_cut = ycuts+"&&"+ptcuts
            allcuts = "{0} && {1}".format(ypt_cut, mass_cuts)
            
            strip_and_save(0, 0, allcuts, "", saving_directory + "y_ptbins/" + particle + "_ybin_{0}-{1}_ptbin_{2}-{3}.root".format(ybin[0],ybin[1],ptbin[0],ptbin[1]), useful_variables, particle, bins = True, tree = tree)
            
        print("\n")
Ejemplo n.º 14
0
def prepMC():
    print("Prepping MC")

    MC_PATH = MC_TUPLE_PATH

    RAW_TUPLES = RAW_TUPLE_PATH

    TESTING = True

    if not os.path.exists(MC_PATH):
        os.makedirs(MC_PATH)
    
    blind_data = False

    useful_variables = []
    
    if(blind_data):

        variables = ["lcplus_MM", 
                       "lcplus_P", 
                       "lcplus_PT", 
                       "lcplus_ETA",
                       "lcplus_RAPIDITY", 
                       "lcplus_TIP", 
                       "lcplus_IPCHI2_OWNPV", 
                       "lcplus_OWNPV_CHI2", 
                       "lcplus_TAU",
                       "lcplus_IP_OWNPV",
                       "lcplus_L0HadronDecision_TOS", 
                       "lcplus_FD_OWNPV",
                       "lcplus_ENDVERTEX_CHI2",
                       "pplus_M", 
                       "pplus_P", 
                       "pplus_PT",
                       "pplus_RAPIDITY", 
                       "pplus_ETA",
                       "pplus_ProbNNp",
                       "pplus_OWNPV_CHI2",
                       "kminus_OWNPV_CHI2",
                       "piplus_OWNPV_CHI2",
                       "piplus_M",
                       "piplus_P", 
                       "piplus_PT", 
                       "piplus_RAPIDITY",
                       "piplus_ETA",
                       "piplus_ProbNNpi",
                       "piplus_IP_OWNPV",
                       "pplus_PIDp",
                       "kminus_M",
                       "kminus_P", 
                       "kminus_PT", 
                       "kminus_RAPIDITY",
                       "kminus_ETA",
                       "kminus_ProbNNk", 
                       "kminus_PIDK", 
                       "PVNTRACKS",
                       "piplus_PX", 
                       "pplus_PX", 
                       "kminus_PX", 
                       "piplus_PY", 
                       "pplus_PY", 
                       "kminus_PY", 
                       "piplus_PZ", 
                       "pplus_PZ", 
                       "kminus_PZ",
                       "pplus_IP_OWNPV",
                       "kminus_IP_OWNPV",
                       "kminus_IPCHI2_OWNPV",
                       "piplus_IPCHI2_OWNPV",
                       "pplus_IPCHI2_OWNPV",
                       "pplus_TRACK_PCHI2",
                       "piplus_TRACK_PCHI2",
		       "kminus_TRACK_PCHI2"]

        useful_variables = variables
        
    else:
        variables = ["lcplus_MM", 
                     "lcplus_P", 
                     "lcplus_PT", 
                     "lcplus_IPCHI2_OWNPV", 
                     "lcplus_OWNPV_CHI2", 
                     "lcplus_IP_OWNPV",
                     "lcplus_L0HadronDecision_TOS", 
                     "lcplus_FD_OWNPV",
                     "lcplus_ENDVERTEX_CHI2",
                     "lcplus_ENDVERTEX_NDOF",
                     "pplus_M", 
                     "pplus_P", 
                     "pplus_PT",
                     "pplus_ProbNNp",
                     "pplus_OWNPV_CHI2",
                     "kminus_OWNPV_CHI2",
                     "piplus_OWNPV_CHI2",
                     "piplus_M",
                     "piplus_P", 
                     "piplus_PT", 
                     "piplus_PIDK",
                     "piplus_PIDp",
                     "piplus_ProbNNpi",
                     "piplus_IP_OWNPV",
                     "pplus_PIDp",
                     "pplus_PIDK",
                     "kminus_M",
                     "kminus_P", 
                     "kminus_PT", 
                     "kminus_ProbNNk", 
                     "kminus_PIDK", 
                     "kminus_PIDp",
                     "PVNTRACKS",
                     "pplus_IP_OWNPV",
                     "kminus_IP_OWNPV",
                     "kminus_IPCHI2_OWNPV",
                     "piplus_IPCHI2_OWNPV",
                     "pplus_IPCHI2_OWNPV",
                     "pplus_TRACK_PCHI2",
                     "piplus_TRACK_PCHI2",
                     "kminus_TRACK_PCHI2",
                     "pplus_MC15TuneV1_ProbNNp",
                     "pplus_MC15TuneV1_ProbNNk",
                     "pplus_MC15TuneV1_ProbNNpi",
                     "pplus_MC15TuneV1_ProbNNghost",
                     "kminus_MC15TuneV1_ProbNNp",
                     "kminus_MC15TuneV1_ProbNNk",
                     "kminus_MC15TuneV1_ProbNNpi",
                     "kminus_MC15TuneV1_ProbNNghost",
                     "piplus_MC15TuneV1_ProbNNp",
                     "piplus_MC15TuneV1_ProbNNk",
                     "piplus_MC15TuneV1_ProbNNpi",
                     "piplus_MC15TuneV1_ProbNNghost", 
                     "piplus_ID",
                     "kminus_ID",
                     "pplus_ID",
                     "lcplus_ID",
                     "lcplus_RAPIDITY",
                     "piplus_RAPIDITY",
                     "pplus_RAPIDITY",
                     "kminus_RAPIDITY",]

        useful_variables = variables


    if(TESTING):
        dictionary = {"108":["2016","MagDown", 282,"Xic","26103090"]}
    else:
        dictionary = MC_jobs_Dict

    for element in dictionary:
        print("Currently working on "+dictionary[element][0] +"_"+ dictionary[element][1])
        if int(element) > 41 and int(element) < 47:
            particle = "Lc"
            extra_variables = ["lcplus_Hlt1TrackAllL0Decision_TOS", "lcplus_Hlt2CharmHadD2HHHDecision_TOS","*L0*","*Hlt*","*HLT*","lcplus_Hlt2CharmHad{}pToPpKmPipTurboDecision_TOS".format(particle)]
            run = 1
            
        else:
            particle = dictionary[element][3]
            extra_variables = ["nSPDHits", "nTracks", "lcplus_Hlt1TrackMVADecision_TOS","lcplus_Hlt2CharmHad{}pToPpKmPipTurboDecision_TOS".format(particle)]
            run = 2

        for extra_variable in extra_variables:
            if not (extra_variable == ""):
                useful_variables.append(extra_variable)

        if(blind_data):
            saving_directory = MC_PATH + dictionary[element][0] +"_"+ dictionary[element][1]+"_blinded/"
        else:
            saving_directory = MC_PATH + dictionary[element][0] +"_"+ dictionary[element][1]+"/"

        if not os.path.exists(saving_directory):
            os.makedirs(saving_directory)

        tfile = ROOT.TFile.Open(saving_directory+particle+"_MC_total.root","RECREATE")

        tree = ROOT.TChain("tuple_Lc2pKpi/DecayTree")

        subjobs = dictionary[element][2]

        print("Adding files to TChain")
        for i in range(subjobs):
            mc_file = RAW_TUPLES+element+"/"+str(i)+"/MC_Lc2pKpiTuple_"+dictionary[element][4]+".root"
            if os.path.exists(RAW_TUPLES+element+"/"+str(i)+"/"):
                tree.Add(mc_file)

        if(tree.GetEntries() == 0) or (tree.GetEntries() == -1):
            print("Stopped creation of "+dictionary[element][0] +"_"+ dictionary[element][1]+" as there were 0 entries")
            tfile.Close()
            del tfile
            os.system("rm -rf {}".format(saving_directory+particle+"_MC_total.root"))
            continue

        print("Activating useful branches on the tree")
        tree = setBranch_function(tree, useful_variables)
        cuts = Imports.getMCCuts(particle,run)

        tfile.cd()
        print("Skimming tree and writing to a new root file")
        new_tree = tree.CopyTree(cuts)
        new_tree.Write("", ROOT.TObject.kOverwrite)
        tfile.Write("", ROOT.TObject.kOverwrite)
        tfile.Close()

    print("Finished prepping MC")
Ejemplo n.º 15
0
def main(script_run):
    print ("Starting main")

    #If you want to test on a small portion of data, then enable it here and add the data to the dictionary below
    TESTING = True

    if(TESTING):
        folders_dict = {"115":["2016_MagDown",186,"Xic"]}
    else:
        #Dictionary for all the data
        folders_dict = DATA_jobs_Dict
    
    #Path to save the tuples
    PATH = TUPLE_PATH+script_run+"/"

    if not os.path.exists(PATH):
        os.makedirs(PATH)
    
    blind_data = True

    makeFile(PATH, script_run, dictionary = folders_dict, blinded = blind_data)

    useful_variables = []
    
    if(blind_data):

        variables = ["lcplus_MM", 
                     "lcplus_P", 
                     "lcplus_PT", 
                     "lcplus_IPCHI2_OWNPV", 
                     "lcplus_OWNPV_CHI2", 
                     "lcplus_IP_OWNPV",
                     "lcplus_L0HadronDecision_TOS", 
                     "lcplus_FD_OWNPV",
                     "lcplus_ENDVERTEX_CHI2",
                     "lcplus_ENDVERTEX_NDOF",
                     "lcplus_RAPIDITY",
                     "piplus_RAPIDITY",
                     "pplus_RAPIDITY",
                     "kminus_RAPIDITY",
                     "pplus_M", 
                     "pplus_P", 
                     "pplus_PT",
                     "pplus_ProbNNp",
                     "pplus_OWNPV_CHI2",
                     "kminus_OWNPV_CHI2",
                     "piplus_OWNPV_CHI2",
                     "piplus_M",
                     "piplus_P", 
                     "piplus_PT", 
                     "piplus_PIDK",
                     "piplus_PIDp",
                     "piplus_ProbNNpi",
                     "piplus_IP_OWNPV",
                     "pplus_PIDp",
                     "pplus_PIDK",
                     "kminus_M",
                     "kminus_P", 
                     "kminus_PT", 
                     "kminus_ProbNNk", 
                     "kminus_PIDK", 
                     "kminus_PIDp",
                     "PVNTRACKS",
                     "pplus_IP_OWNPV",
                     "kminus_IP_OWNPV",
                     "kminus_IPCHI2_OWNPV",
                     "piplus_IPCHI2_OWNPV",
                     "pplus_IPCHI2_OWNPV",
                     "pplus_TRACK_PCHI2",
                     "piplus_TRACK_PCHI2",
                     "kminus_TRACK_PCHI2",
                     "pplus_MC15TuneV1_ProbNNp",
                     "pplus_MC15TuneV1_ProbNNk",
                     "pplus_MC15TuneV1_ProbNNpi",
                     "pplus_MC15TuneV1_ProbNNghost",
                     "kminus_MC15TuneV1_ProbNNp",
                     "kminus_MC15TuneV1_ProbNNk",
                     "kminus_MC15TuneV1_ProbNNpi",
                     "kminus_MC15TuneV1_ProbNNghost",
                     "piplus_MC15TuneV1_ProbNNp",
                     "piplus_MC15TuneV1_ProbNNk",
                     "piplus_MC15TuneV1_ProbNNpi",
                     "piplus_MC15TuneV1_ProbNNghost"]

        useful_variables = variables
        
    else:
        variables = ["lcplus_MM", 
                       "lcplus_P", 
                       "lcplus_PT", 
                       "lcplus_ETA",
                       "lcplus_RAPIDITY", 
                       "lcplus_TIP", 
                       "lcplus_IPCHI2_OWNPV", 
                       "lcplus_OWNPV_CHI2", 
                       "lcplus_TAU",
                       "lcplus_L0HadronDecision_TOS", 
                       "lcplus_FD_OWNPV",
                       "pplus_M", 
                       "pplus_P", 
                       "pplus_PT",
                       "pplus_RAPIDITY", 
                       "pplus_ETA",
                       "pplus_ProbNNp",
                       "piplus_M",
                       "piplus_P", 
                       "piplus_PT", 
                       "piplus_RAPIDITY",
                       "piplus_ETA",
                       "piplus_ProbNNpi",
                       "pplus_PIDp",
                       "kminus_M",
                       "kminus_P", 
                       "kminus_PT", 
                       "kminus_RAPIDITY",
                       "piplus_IP_OWNPV",
                       "kminus_ETA",
                       "kminus_ProbNNk", 
                       "kminus_PIDK",
                       "PVNTRACKS", 
                       "piplus_PX", 
                       "pplus_PX", 
                       "kminus_PX", 
                       "piplus_PY",
                       "pplus_PY", 
                       "kminus_PY", 
                       "piplus_PZ",
                       "pplus_PZ", 
                       "kminus_PZ",
                       "pplus_IP_OWNPV",
                       "kminus_IP_OWNPV",
                       "kminus_IPCHI2_OWNPV",
                       "piplus_IPCHI2_OWNPV",
                       "pplus_IPCHI2_OWNPV"]

        useful_variables = variables
        

    appendVars(PATH, script_run, variables = useful_variables)
    
    for element in folders_dict:
        if int(element) > 41 and int(element) < 47:
            extra_variables = ["lcplus_Hlt1TrackAllL0Decision_TOS", "lcplus_Hlt2CharmHadD2HHHDecision_TOS","*L0*","*Hlt*","*HLT*"]
            run = 1
            particle = "Lc"
            
        else:
            extra_variables = ["nSPDHits", "nTracks", "lcplus_Hlt1TrackMVADecision_TOS"]
            particle = folders_dict[element][2]
            run = 2

        for extra_variable in extra_variables:
            if not (extra_variable == ""):
                #If an extra variable is needed, it will be appended
                useful_variables.append(extra_variable)
        
        
        subjobs = folders_dict[element][1]
        
        if (blind_data):
             name = folders_dict[element][0]+"_blinded"
            
        else:
             name = folders_dict[element][0]
        
        saving_directory = PATH+name+"_clusters/"

        cuts = Imports.getDataCuts(run, blinded = blind_data)
        
        if not os.path.exists(saving_directory):
            os.makedirs(saving_directory)
        
        file_directory = RAW_TUPLE_PATH+element
        
        print("\nStarting process for "+name)
        
        #Carries out the process in steps of 20
        step = subjobs//20
        Max = step
        Min = 0
        
        #Clusters are created here
        print("Creation of clusters")
        n = 20 
        i = 0 
        
        while (Max<=subjobs):
            #Progress bar
            if (i<n):
                j = (i+1)/n
                sys.stdout.write("\r")
                sys.stdout.write("[%-20s] %d%%" % ('='*int(20*j), 100*j))
                sys.stdout.flush()
                i+=1
                
            if (Max == Min):
                break
                
            strip_and_save(Min, Max, cuts, file_directory, saving_directory, useful_variables, particle)
            
            temp = Max
            
            if (Max+step > subjobs):
                Max = subjobs
            
            else:
                Max+=step
                
            Min = temp
            
        clusters = os.listdir(saving_directory)
        
        print("\n\nTChaining the clusters")
        
        final_chain = TChain("DecayTree")
        
        n = len(clusters)
        i = 0
        
        for element in clusters:
            if (i<n):
                j = (i+1)/n 
                sys.stdout.write('\r')
                sys.stdout.write("[%-20s] %d%%" % ('='*int(20*j), 100*j))
                sys.stdout.flush()
                i += 1
            final_chain.Add(saving_directory+element)
            
        if not os.path.exists(PATH+name+"/bins"):
            os.makedirs(PATH+name+"/bins")
            
        saving_directory = PATH+name+"/bins/"
        
        print("\n\nCreating the final files")
        
        split_in_bins_and_save(final_chain, saving_directory, run, useful_variables, particle)
        
        print("\nProcess completed for "+name)
        
    #Creation of the total Year Data files 
    print("\nCreation of the total year data files")
    
    mother_particle = ["Xic","Lc"]
    
    BASE_PATH = TUPLE_PATH+script_run+"/"
    
    n = len(os.listdir(BASE_PATH))
    p = 0 
    
    for i in os.listdir(BASE_PATH):

        if (p < n):
            j = (p + 1) / n
            sys.stdout.write('\r')
            sys.stdout.write("[%-20s] %d%%" % ('='*int(20*j), 100*j))
            sys.stdout.flush()
            p += 1

        if "description" in i:
            continue
            
        if "cluster" in i:
            continue
            
        for particle in mother_particle:
                
            totfile = ROOT.TFile.Open(BASE_PATH+i+"/{}_total.root".format(particle),"RECREATE")
            totfile.cd()
            
            tree = TChain("DecayTree")
                
            for j in os.listdir(BASE_PATH+i+"/bins/ybins"):
                if particle in j:
                    tree.Add(BASE_PATH+i+"/bins/ybins/"+j)
            
            tree.Write()
            totfile.Close()
            
            del totfile

    print("\nDeleting clusters")

    os.system("rm -rf {}*_clusters".format(BASE_PATH))
            
    print("\nNTuple preparation is done")