コード例 #1
0
def main():
    set_root_defaults()
    options, _ = parse_arguments()
    variable = "ST"
    config_7TeV = XSectionConfig(7)
    config_8TeV = XSectionConfig(8)
    path_to_JSON_7TeV = options.path + "/7TeV/" + variable + "/"
    path_to_JSON_8TeV = options.path + "/8TeV/" + variable + "/"
    # we need the generators
    # and the central samples + errors
    results_7TeV, _ = read_xsection_measurement_results(
        path_to_JSON_7TeV,
        variable,
        bin_edges,
        category="central",
        channel="combined",
        k_values={"combined": config_7TeV.k_values_combined},
    )
    results_8TeV, _ = read_xsection_measurement_results(
        path_to_JSON_8TeV,
        variable,
        bin_edges,
        category="central",
        channel="combined",
        k_values={"combined": config_8TeV.k_values_combined},
    )
    plot_results(results_7TeV, results_8TeV, variable)
コード例 #2
0
def main():
    set_root_defaults()
    # prevent directory ownership of ROOT histograms (python does the garbage
    # collection)
    parser = OptionParser()
    parser.add_option("-n", "--n_toy_mc",
                      dest="n_toy_mc", default=300,
                      help="number of toy MC to create", type=int)
    parser.add_option("-o", "--output",
                      dest="output_folder", default='data/toy_mc/',
                      help="output folder for toy MC")
#     parser.add_option("-v", "--variable", dest="variable", default='MET',
#                       help="set the variable to analyse (MET, HT, ST, MT, WPT)")
    parser.add_option("-m", "--metType", dest="metType", default='type1',
                      help="set MET type for analysis of MET, ST or MT")
    parser.add_option("-c", "--centre-of-mass-energy", dest="CoM", default=13,
                      help="set the centre of mass energy for analysis. Default = 13 [TeV]", type=int)
    parser.add_option('-V', '--verbose', dest="verbose", action="store_true",
                      help="Print the event number, reco and gen variable value")

    (options, _) = parser.parse_args()

    measurement_config = XSectionConfig(options.CoM)
#     variable = options.variable
    met_type = measurement_config.translate_options[options.metType]

    create_toy_mc(input_file=measurement_config.unfolding_central,
                  output_folder=options.output_folder,
#                   variable=variable,
                  n_toy=options.n_toy_mc,
                  centre_of_mass=options.CoM,
                  ttbar_xsection=measurement_config.ttbar_xsection,
                  met_type=met_type)
コード例 #3
0
def main():
    set_root_defaults()
    options, _ = parse_arguments()
    variable = 'ST'
    config_7TeV = XSectionConfig(7)
    config_8TeV = XSectionConfig(8)
    path_to_JSON_7TeV = options.path + '/7TeV/' + variable + '/'
    path_to_JSON_8TeV = options.path + '/8TeV/' + variable + '/'
    # we need the generators
    # and the central samples + errors
    results_7TeV, _ = read_xsection_measurement_results(
        path_to_JSON_7TeV,
        variable,
        bin_edges,
        category='central',
        channel='combined',
        k_values={'combined': config_7TeV.k_values_combined})
    results_8TeV, _ = read_xsection_measurement_results(
        path_to_JSON_8TeV,
        variable,
        bin_edges,
        category='central',
        channel='combined',
        k_values={'combined': config_8TeV.k_values_combined})
    plot_results(results_7TeV, results_8TeV, variable)
コード例 #4
0
def main():
    set_root_defaults()
    options, _ = parse_arguments()
    variable = 'ST'
    config_7TeV = XSectionConfig(7)
    config_8TeV = XSectionConfig(8)
    path_to_JSON_7TeV = options.path + '/7TeV/' + variable + '/'
    path_to_JSON_8TeV = options.path + '/8TeV/' + variable + '/'
    # we need the generators
    # and the central samples + errors
    results_7TeV, _ = read_xsection_measurement_results( path_to_JSON_7TeV,
                                                     variable,
                                                     bin_edges_full,
                                                     category = 'central',
                                                     channel = 'combined',
                                                     k_values = {
                                                                 'combined': config_7TeV.k_values_combined}
                                                     )
    results_8TeV, _ = read_xsection_measurement_results( path_to_JSON_8TeV,
                                                     variable,
                                                     bin_edges_full,
                                                     category = 'central',
                                                     channel = 'combined',
                                                     k_values = {
                                                                 'combined': config_8TeV.k_values_combined}
                                                     )
    plot_results(results_7TeV, results_8TeV, variable)
コード例 #5
0
def main():
    '''
        Main function for this script
    '''
    set_root_defaults(msg_ignore_level=3001)

    parser = OptionParser()
    parser.add_option("-o", "--output",
                      dest="output_folder", default='data/pull_data/',
                      help="output folder for pull data files")
    parser.add_option("-n", "--n_input_mc", type=int,
                      dest="n_input_mc", default=100,
                      help="number of toy MC used for the tests")
    parser.add_option("-k", "--k_value", type=int,
                      dest="k_value", default=3,
                      help="k-value for SVD unfolding")
    parser.add_option("--tau", type='float',
                      dest="tau_value", default=-1.,
                      help="tau-value for SVD unfolding")
    parser.add_option("-m", "--method", type='string',
                      dest="method", default='RooUnfoldSvd',
                      help="unfolding method")
    parser.add_option("-f", "--file", type='string',
                      dest="file", default='data/toy_mc/unfolding_toy_mc.root',
                      help="file with toy MC")
    parser.add_option("-v", "--variable", dest="variable", default='MET',
                      help="set the variable to analyse (MET, HT, ST, MT, WPT)")
    parser.add_option("-s", "--centre-of-mass-energy", dest="CoM", default=13,
                      help='''set the centre of mass energy for analysis.
                      Default = 8 [TeV]''', type=int)
    parser.add_option("-c", "--channel", type='string',
                      dest="channel", default='combined',
                      help="channel to be analysed: electron|muon|combined")

    parser.add_option("--offset_toy_mc", type=int,
                      dest="offset_toy_mc", default=0,
                      help="offset of the toy MC used to response matrix")
    parser.add_option("--offset_toy_data", type=int,
                      dest="offset_toy_data", default=0,
                      help="offset of the toy MC used as data for unfolding")
    (options, _) = parser.parse_args()

    centre_of_mass = options.CoM
    make_folder_if_not_exists(options.output_folder)

    # set the number of toy MC for error calculation
    k_value = options.k_value
    tau_value = options.tau_value
    use_n_toy = options.n_input_mc
    offset_toy_mc = options.offset_toy_mc
    offset_toy_data = options.offset_toy_data
    method = options.method
    variable = options.variable

    create_unfolding_pull_data(options.file, method, options.channel,
                               centre_of_mass, variable, use_n_toy, use_n_toy,
                               options.output_folder, offset_toy_mc,
                               offset_toy_data, k_value, tau_value)
コード例 #6
0
def main():
    '''
        Main function for this script
    '''
    set_root_defaults(msg_ignore_level=3001)

    parser = OptionParser()
    parser.add_option("-o", "--output",
                      dest="output_folder", default='data/pull_data/',
                      help="output folder for pull data files")
    parser.add_option("-n", "--n_input_mc", type=int,
                      dest="n_input_mc", default=100,
                      help="number of toy MC used for the tests")
    parser.add_option("--tau", type='float',
                      dest="tau_value", default=-1.,
                      help="tau-value for SVD unfolding")
    parser.add_option("-m", "--method", type='string',
                      dest="method", default='TUnfold',
                      help="unfolding method")
    parser.add_option("-f", "--file", type='string',
                      dest="file", default='data/toy_mc/unfolding_toy_mc.root',
                      help="file with toy MC")
    parser.add_option("-v", "--variable", dest="variable", default='MET',
                      help="set the variable to analyse (defined in config/variable_binning.py)")
    parser.add_option("--com", "--centre-of-mass-energy", dest="CoM", default=13,
                      help='''set the centre of mass energy for analysis.
                      Default = 8 [TeV]''', type=int)
    parser.add_option("-c", "--channel", type='string',
                      dest="channel", default='combined',
                      help="channel to be analysed: electron|muon|combined")
    parser.add_option("-s", type='string',
                      dest="sample", default='madgraph',
                      help="channel to be analysed: electron|muon|combined")

    (options, _) = parser.parse_args()

    centre_of_mass = options.CoM
    measurement_config = XSectionConfig(centre_of_mass)
    make_folder_if_not_exists(options.output_folder)

    use_n_toy = options.n_input_mc
    method = options.method
    variable = options.variable
    sample = options.sample
    tau_value = options.tau_value

    create_unfolding_pull_data(options.file, method, options.channel,
                               centre_of_mass, variable,
                               sample,
                               measurement_config.unfolding_central,
                               use_n_toy,
                               options.output_folder,
                               tau_value)
コード例 #7
0
 def run(self):
     '''
         Run the workload
     '''
     import src.unfolding_tests.create_unfolding_pull_data as pull
     from tools.ROOT_utils import set_root_defaults
     set_root_defaults(msg_ignore_level=3001)
     pulls_file_name = pull.create_unfolding_pull_data(self.input_file_name,
                                     self.method,
                                     self.channel_to_run,
                                     self.centre_of_mass,
                                     self.variable_to_run,
                                     self.sample_to_run,
                                     self.response,
                                     self.n_toy_data,
                                     self.output_folder,
                                     self.tau_value_to_run
                                     )
コード例 #8
0
def create_unfolding_pull_data(input_file_name, method, channel,
                               centre_of_mass, variable,
                               sample, 
                               responseFile,
                               n_toy_data,
                               output_folder, 
                               tau_value,
                                run_matrix=None):
    '''
        Sets up all variables for check_multiple_data_multiple_unfolding
    '''
    set_root_defaults(msg_ignore_level=3001)
    timer = Timer()
    input_file = File(input_file_name, 'read')
    folder_template = '{path}/{centre_of_mass}TeV/{variable}/{sample}/'

    msg_template = 'Producing unfolding pull data for {variable},'
    msg_template += ' tau-value {value}'
    inputs = {
        'path': output_folder,
        'centre_of_mass': centre_of_mass,
        'variable': variable,
        'sample': sample,
        'value': round(tau_value,4),
    }

    h_response = get_response_histogram(responseFile, variable, channel)
    output_folder = folder_template.format(**inputs)
    make_folder_if_not_exists(output_folder)
    print(msg_template.format(**inputs))
    print('Output folder: {0}'.format(output_folder))
    print ('Response here :',h_response)
    output_file_name = check_multiple_data_multiple_unfolding(
                                input_file, method, channel, variable, 
                                h_response,
                                n_toy_data,
                                output_folder, 
                                tau_value,
                            )
    print('Runtime', timer.elapsed_time())

    return output_file_name
コード例 #9
0
def main():
    set_root_defaults()
    # prevent directory ownership of ROOT histograms (python does the garbage
    # collection)
    parser = OptionParser()
    parser.add_option("-n", "--n_toy_mc",
                      dest="n_toy_mc", default=300,
                      help="number of toy MC to create", type=int)
    parser.add_option("-o", "--output",
                      dest="output_folder", default='data/toy_mc/',
                      help="output folder for toy MC")
    parser.add_option("-s", dest="sample", default='madgraph',
                        help='set underlying sample for creating the toy MC.  Possible options : madgraph, powhegPythia, amcatnlo.  Default is madgraph')
    parser.add_option("-c", "--centre-of-mass-energy", dest="CoM", default=13,
                      help="set the centre of mass energy for analysis. Default = 13 [TeV]", type=int)
    parser.add_option('-V', '--verbose', dest="verbose", action="store_true",
                      help="Print the event number, reco and gen variable value")

    (options, _) = parser.parse_args()

    measurement_config = XSectionConfig(options.CoM)

    input_file = None
    if options.sample == 'madgraph':
        input_file = measurement_config.unfolding_madgraphMLM
    elif options.sample == 'powhegPythia':
        input_file = measurement_config.unfolding_central
    elif options.sample == 'amcatnlo':
        input_file = measurement_config.unfolding_amcatnlo


    create_toy_mc(input_file=input_file,
                  sample=options.sample,
                  output_folder=options.output_folder,
#                   variable=variable,
                  n_toy=options.n_toy_mc,
                  centre_of_mass=options.CoM,
                  ttbar_xsection=measurement_config.ttbar_xsection
                  )
コード例 #10
0
    plot_data_total.Sumw2()
    plot_ttbar_passed.Sumw2()
    plot_ttbar_total.Sumw2()
    
    bin_edge_array = get_binning(trigger_under_study)
    n_bins = len(bin_edge_array) - 1
    
    plot_data_passed = asrootpy(plot_data_passed.Rebin(n_bins, 'truth', bin_edge_array))
    plot_data_total = asrootpy(plot_data_total.Rebin(n_bins, 'truth', bin_edge_array))
    plot_ttbar_passed = asrootpy(plot_ttbar_passed.Rebin(n_bins, 'truth', bin_edge_array))
    plot_ttbar_total = asrootpy(plot_ttbar_total.Rebin(n_bins, 'truth', bin_edge_array))
    
    return plot_data_passed, plot_data_total, plot_ttbar_passed, plot_ttbar_total

if __name__ == '__main__':
    set_root_defaults()
    
    CMS.title['fontsize'] = 40
    CMS.x_axis_title['fontsize'] = 50
    CMS.y_axis_title['fontsize'] = 50
    CMS.axis_label_major['labelsize'] = 40
    CMS.axis_label_minor['labelsize'] = 40
    CMS.legend_properties['size'] = 40
    
    output_formats = ['png', 'pdf']
    output_folder = '/storage/TopQuarkGroup/results/plots/Trigger/'
    
    triggers = [
                'HLT_Ele25_CaloIdVT_TrkIdT_TriCentralJet30',
                'HLT_Ele25_CaloIdVT_CaloIsoT_TrkIdT_TrkIsoT_TriCentralJet30',
                'HLT_Ele25_CaloIdVT_CaloIsoT_TrkIdT_TrkIsoT_TriCentralPFJet30',
コード例 #11
0
            m[xbin, ybin] = probMatrix.GetBinContent( xbin, ybin)
    svd = TDecompSVD( m )
    svd.Decompose()
    svd.Print()
    sig = svd.GetSig()
    sig.Print()
    nSig = len(sig)
    sigmaMax = sig[0]
    sigmaMin = sig[nSig-2]
    condition = sigmaMax / max(0,sigmaMin)
    # condition = 1
    print condition
    return condition

def print_results_to_screen(result_dict):
    '''
        Print the results to the screen
        Can copy straight into config
    '''

    for com in result_dict.keys():
        for channel in result_dict[com].keys():
            print "\nCHANNEL : ", channel
            for variable in result_dict[com][channel].keys():
                print '"{0}" : {1},'.format(variable, result_dict[com][channel][variable])
        

if __name__ == '__main__':
    set_root_defaults( set_batch = True, msg_ignore_level = 3001 )
    main()
コード例 #12
0
def main():
    "Main Function"
    set_root_defaults()

    parser = OptionParser(
        "Script to check progress of CRAB jobs in creating nTuples. Run as: python check_CRAB_jobs.py -p projectFolder -n numberOfJobs >&check.log &"
    )
    parser.add_option("-p",
                      "--projectFolder",
                      dest="projectFolder",
                      help="specify project")
    parser.add_option("-n",
                      "--numberOfJobs",
                      dest="numberOfJobs",
                      help="specify project")

    (options, _) = parser.parse_args()

    #make sure the project option has been specified
    if not options.projectFolder:
        parser.error(
            'Please enter a project folder as the -p option: /gpfs_phys/storm/cms/user/...'
        )

    #normalise the projectFolder filepath and add a "/" at the end
    projectFolder = os.path.normpath(options.projectFolder) + os.sep

    #list the items in the CRAB output folder on the Bristol Storage Element.
    storageElementList = glob.glob(projectFolder + "*.root")
    if storageElementList:
        pass
    else:
        print "Location Error: Specified project folder does not exist on the Bristol Storage Element, signifying that the CRAB job has probably not started running yet or you forgot to include the full path /gpfs_storm/cms/user/..."
        sys.exit()

    #The following section has been commented out because if it is the first time this script is being run in a session, a grid password will be needed which will cause the script
    #to not be able to finish. Since the only purpose of this following CRAB command is to obtain the number of jobs, for the time being the number of jobs has been entered as an option to
    #the script which should be manually entered by the user.

    #get the status of the crab jobs and extract the number of output files expected on the Bristol Storage Element.


#	projectFolder = options.projectFolder.split("/")[6]
#	status = commands.getstatusoutput("crab -status -c " + projectFolder)
#	statusFormatted = status[1].split("\n")
#	for line in statusFormatted:
#		if "crab:" in line and "Total Jobs" in line:
#			words = line.split()
#			numberOfJobs = int(words[1])

#Now, check that all job root files are present in Bristol Storage Element folder:

    missingOrBrokenTemp = []
    missingOrBroken = []
    goodFilesTemp = []
    goodFiles = []
    presentJobList = []
    duplicatesToDelete = []

    #make list of all the job numbers which should be present.
    jobList = range(1, int(options.numberOfJobs) + 1)

    #try opening all files in Bristol Storage Element folder and add to missing list if they cannot be opened.
    for f in storageElementList:
        #make list of all jobs numbers in the Bristol Storage Element folder
        jobNumber = int((re.split('[\W+,_]', f))[-4])
        presentJobList.append(jobNumber)

        #check if files are corrupt or not
        try:
            rootFile = File(f)
            rootFile.Close()
        except:
            print "Adding Job Number", jobNumber, "to missingOrBroken list because file is corrupted."
            missingOrBrokenTemp.append(jobNumber)
        else:
            goodFilesTemp.append(jobNumber)

    #now add any absent files to the missing list:
    for job in jobList:
        if job not in presentJobList:
            print "Adding Job Number", job, "to missingOrBroken list because it doesn't exist on the Storage Element."
            missingOrBrokenTemp.append(job)

    #Remove any job numbers from missingOrBroken which appear in both goodFiles and missingOrBroken lists
    for job in missingOrBrokenTemp:
        if job not in goodFilesTemp:
            missingOrBroken.append(job)
        else:
            print "Removing", job, "from missingOrBroken list because there is at least one duplicate good output file."

    #Remove any job numbers from goodFiles which appear more than once in goodFiles
    for job in goodFilesTemp:
        if job not in goodFiles:
            goodFiles.append(job)
        else:
            duplicatesToDelete.append(job)

    print "\n The following", len(
        goodFiles
    ), "good output files were found in the Bristol Storage Element folder:"
    print str(goodFiles).replace(" ", "")
    print "\n The following", len(
        duplicatesToDelete
    ), "job numbers have multiple good files on the Bristol Storage Element folder which can be deleted:"
    print str(duplicatesToDelete).replace(" ", "")
    print "\n The following", len(
        missingOrBroken
    ), "job numbers could not be found in the Bristol Storage Element folder:"
    print str(missingOrBroken).replace(" ", "")
コード例 #13
0
        upper_edge_pt = jet_pt_bins[i+1]
        pt_bin_range = 'pt_' + str(lower_edge_pt) + '_' + str(upper_edge_pt)
        dictionary[pt_bin_range] = {}

        data_efficiency_in_bin = data_efficiency.GetEfficiency( i+1 )
        data_efficiency_in_bin_error_up = data_efficiency.GetEfficiencyErrorUp( i+1 )
        data_efficiency_in_bin_error_down = data_efficiency.GetEfficiencyErrorLow( i+1 )
        dictionary[pt_bin_range]['data'] = { 'efficiency' : data_efficiency_in_bin,
                                                  'err_up' : data_efficiency_in_bin_error_up,
                                                  'err_down' : data_efficiency_in_bin_error_down,
                                                }

    pickle.dump( dictionary, output_pickle )

if __name__ == '__main__':
    set_root_defaults( msg_ignore_level = 3001 )
    parser = OptionParser()
    parser.add_option("-p", "--path", dest="path", default='/hdfs/TopQuarkGroup/trigger_BLT_ntuples/',
                  help="set path to input BLT ntuples")
    parser.add_option("-o", "--output_folder", dest="output_plots_folder", default='plots/2011/hadron_leg/',
                  help="set path to save tables")

    (options, args) = parser.parse_args()
    input_path = options.path
    output_folder = options.output_plots_folder
    output_pickle_folder = './pickle_files/'
    channel = 'electron'
    centre_of_mass = 7

    make_folder_if_not_exists(output_folder)
    make_folder_if_not_exists(output_pickle_folder)
コード例 #14
0
def main():
    '''
        Main function for this script
    '''
    set_root_defaults(msg_ignore_level=3001)

    parser = OptionParser()
    parser.add_option("-o",
                      "--output",
                      dest="output_folder",
                      default='data/pull_data/',
                      help="output folder for pull data files")
    parser.add_option("-n",
                      "--n_input_mc",
                      type=int,
                      dest="n_input_mc",
                      default=100,
                      help="number of toy MC used for the tests")
    parser.add_option("-k",
                      "--k_value",
                      type=int,
                      dest="k_value",
                      default=3,
                      help="k-value for SVD unfolding")
    parser.add_option("--tau",
                      type='float',
                      dest="tau_value",
                      default=-1.,
                      help="tau-value for SVD unfolding")
    parser.add_option("-m",
                      "--method",
                      type='string',
                      dest="method",
                      default='RooUnfoldSvd',
                      help="unfolding method")
    parser.add_option("-f",
                      "--file",
                      type='string',
                      dest="file",
                      default='data/toy_mc/unfolding_toy_mc.root',
                      help="file with toy MC")
    parser.add_option(
        "-v",
        "--variable",
        dest="variable",
        default='MET',
        help="set the variable to analyse (MET, HT, ST, MT, WPT)")
    parser.add_option("-s",
                      "--centre-of-mass-energy",
                      dest="CoM",
                      default=13,
                      help='''set the centre of mass energy for analysis.
                      Default = 8 [TeV]''',
                      type=int)
    parser.add_option("-c",
                      "--channel",
                      type='string',
                      dest="channel",
                      default='combined',
                      help="channel to be analysed: electron|muon|combined")

    parser.add_option("--offset_toy_mc",
                      type=int,
                      dest="offset_toy_mc",
                      default=0,
                      help="offset of the toy MC used to response matrix")
    parser.add_option("--offset_toy_data",
                      type=int,
                      dest="offset_toy_data",
                      default=0,
                      help="offset of the toy MC used as data for unfolding")
    (options, _) = parser.parse_args()

    centre_of_mass = options.CoM
    make_folder_if_not_exists(options.output_folder)

    # set the number of toy MC for error calculation
    k_value = options.k_value
    tau_value = options.tau_value
    use_n_toy = options.n_input_mc
    offset_toy_mc = options.offset_toy_mc
    offset_toy_data = options.offset_toy_data
    method = options.method
    variable = options.variable

    create_unfolding_pull_data(options.file, method, options.channel,
                               centre_of_mass, variable, use_n_toy, use_n_toy,
                               options.output_folder, offset_toy_mc,
                               offset_toy_data, k_value, tau_value)
コード例 #15
0
from optparse import OptionParser
from config.latex_labels import b_tag_bins_latex, samples_latex, fit_variables_latex, fit_variables_units_latex, variables_latex, control_plots_latex
from config.variable_binning import fit_variable_bin_edges, bin_edges, control_plots_bins
from config import XSectionConfig
from tools.file_utilities import read_data_from_JSON, make_folder_if_not_exists
from tools.plotting import make_data_mc_comparison_plot, Histogram_properties, \
make_control_region_comparison
from tools.hist_utilities import prepare_histograms
from tools.ROOT_utils import get_histograms_from_files, set_root_defaults, get_histograms_from_trees

channels = ['EPlusJets', 'MuPlusJets']
if __name__ == '__main__':

    set_root_defaults()
    parser = OptionParser()
    parser.add_option("-p",
                      "--path",
                      dest="path",
                      default='data/absolute_eta_M3_angle_bl/',
                      help="set path to JSON files")
    parser.add_option("-o",
                      "--output_folder",
                      dest="output_folder",
                      default='plots/control_plots/',
                      help="set path to save plots")
    parser.add_option(
        "-c",
        "--centre-of-mass-energy",
        dest="CoM",
        default=13,
        type=int,
コード例 #16
0
def main():
	"Main Function"
	set_root_defaults()

	parser = OptionParser("Script to check progress of CRAB jobs in creating nTuples. Run as: python check_CRAB_jobs.py -p projectFolder -n numberOfJobs >&check.log &")
	parser.add_option("-p", "--projectFolder", dest="projectFolder", help="specify project")
	parser.add_option("-n", "--numberOfJobs", dest="numberOfJobs",
		help="specify project")

	(options, _) = parser.parse_args()

	#make sure the project option has been specified
	if not options.projectFolder:
		parser.error('Please enter a project folder as the -p option: /gpfs_phys/storm/cms/user/...')

	#normalise the projectFolder filepath and add a "/" at the end
	projectFolder = os.path.normpath(options.projectFolder) + os.sep

	#list the items in the CRAB output folder on the Bristol Storage Element.
	storageElementList=glob.glob(projectFolder + "*.root")
	if storageElementList:
		pass
	else:
		print "Location Error: Specified project folder does not exist on the Bristol Storage Element, signifying that the CRAB job has probably not started running yet or you forgot to include the full path /gpfs_storm/cms/user/..."
		sys.exit()

	#The following section has been commented out because if it is the first time this script is being run in a session, a grid password will be needed which will cause the script
	#to not be able to finish. Since the only purpose of this following CRAB command is to obtain the number of jobs, for the time being the number of jobs has been entered as an option to
	#the script which should be manually entered by the user.

	#get the status of the crab jobs and extract the number of output files expected on the Bristol Storage Element.
#	projectFolder = options.projectFolder.split("/")[6]
#	status = commands.getstatusoutput("crab -status -c " + projectFolder)
#	statusFormatted = status[1].split("\n")
#	for line in statusFormatted:
#		if "crab:" in line and "Total Jobs" in line:
#			words = line.split()
#			numberOfJobs = int(words[1])


	#Now, check that all job root files are present in Bristol Storage Element folder:

	missingOrBrokenTemp = []
	missingOrBroken = []
	goodFilesTemp = []
	goodFiles = []
	presentJobList = []
	duplicatesToDelete = []

	#make list of all the job numbers which should be present.
	jobList = range(1,int(options.numberOfJobs)+1)

	#try opening all files in Bristol Storage Element folder and add to missing list if they cannot be opened.
	for f in storageElementList:
		#make list of all jobs numbers in the Bristol Storage Element folder
		jobNumber = int((re.split('[\W+,_]',f))[-4])
		presentJobList.append(jobNumber)

		#check if files are corrupt or not
		try:
			rootFile = File(f)
			rootFile.Close()
		except:
			print "Adding Job Number", jobNumber, "to missingOrBroken list because file is corrupted."
			missingOrBrokenTemp.append(jobNumber)
		else:
			goodFilesTemp.append(jobNumber)

	#now add any absent files to the missing list:
	for job in jobList:
		if job not in presentJobList:
			print "Adding Job Number", job, "to missingOrBroken list because it doesn't exist on the Storage Element."
			missingOrBrokenTemp.append(job)

	#Remove any job numbers from missingOrBroken which appear in both goodFiles and missingOrBroken lists
	for job in missingOrBrokenTemp:
		if job not in goodFilesTemp:
			missingOrBroken.append(job)
		else:
			print "Removing", job, "from missingOrBroken list because there is at least one duplicate good output file."

	#Remove any job numbers from goodFiles which appear more than once in goodFiles
	for job in goodFilesTemp:
		if job not in goodFiles:
			goodFiles.append(job)
		else:
			duplicatesToDelete.append(job)

	print "\n The following", len(goodFiles), "good output files were found in the Bristol Storage Element folder:"
	print str(goodFiles).replace(" ", "")  
	print "\n The following", len(duplicatesToDelete), "job numbers have multiple good files on the Bristol Storage Element folder which can be deleted:"
	print str(duplicatesToDelete).replace(" ", "")
	print "\n The following", len(missingOrBroken), "job numbers could not be found in the Bristol Storage Element folder:"
	print str(missingOrBroken).replace(" ", "")
コード例 #17
0
def generate_toy(n_toy, n_input_mc, config, output_folder, start_at=0, split=1):
    from progressbar import Percentage, Bar, ProgressBar, ETA
    set_root_defaults()
    genWeight = '( EventWeight * {0})'.format(config.luminosity_scale)
    file_name = config.ttbar_category_templates_trees['central']
    make_folder_if_not_exists(output_folder)
    outfile = get_output_file_name(
        output_folder, n_toy, start_at, n_input_mc, config.centre_of_mass_energy)

    variable_bins = bin_edges.copy()
    
    widgets = ['Progress: ', Percentage(), ' ', Bar(),
           ' ', ETA()]
    
    with root_open(file_name, 'read') as f_in, root_open(outfile, 'recreate') as f_out:
        tree = f_in.Get("TTbar_plus_X_analysis/Unfolding/Unfolding")
        n_events = tree.GetEntries()
        print("Number of entries in tree : ", n_events)
        for channel in ['electron', 'muon']:
            print('Channel :', channel)
            gen_selection, gen_selection_vis = '', ''
            if channel is 'muon':
                gen_selection = '( isSemiLeptonicMuon == 1 )'
                gen_selection_vis = '( isSemiLeptonicMuon == 1 && passesGenEventSelection )'
            else:
                gen_selection = '( isSemiLeptonicElectron == 1 )'
                gen_selection_vis = '( isSemiLeptonicElectron == 1 && passesGenEventSelection )'

            selection = '( {0} ) * ( {1} )'.format(genWeight, gen_selection)
            selection_vis = '( {0} ) * ( {1} )'.format(genWeight,
                                                       gen_selection_vis)
            weighted_entries = get_weighted_entries(tree, selection)
            weighted_entries_vis = get_weighted_entries(tree, selection_vis)
            pbar = ProgressBar(widgets=widgets, maxval=n_input_mc).start()

            toy_mc_sets = []
            for variable in ['MET', 'HT', 'ST', 'WPT']:  # variable_bins:
                toy_mc = ToySet(f_out, variable, channel, n_toy)
                toy_mc_sets.append(toy_mc)
            count = 0
            for event in tree:
                # generate 300 weights for each event
                mc_weights = get_mc_weight(weighted_entries, n_toy)
                mc_weights_vis = get_mc_weight(weighted_entries_vis, n_toy)

                if count >= n_input_mc:
                    break
                count += 1
                if count < start_at:
                    continue
#                 weight = event.EventWeight * config.luminosity_scale
#                 # rescale to N input events
#                 weight *= n_events / n_input_mc / split
                weight = 1

                for toy_mc in toy_mc_sets:
                    toy_mc.fill(event, weight, mc_weights, mc_weights_vis)
                if count % 1000 == 1:
                    pbar.update(count)
                    print('Processed {0} events'.format(count))
            pbar.finish()
            for toy_mc in toy_mc_sets:
                toy_mc.write()
    print('Toy MC was saved to file:', outfile)
コード例 #18
0
        normalised_xsection['massup'] = massup_normalised_xsection

    file_template = '{path_to_JSON}/{category}/normalised_xsection_{channel}_{method}.txt'
    filename = file_template.format(
                path_to_JSON = path_to_JSON,
                category = category,
                channel = channel,
                method = method,
                )

    if normalise_to_one:
        filename = filename.replace( 'normalised_xsection', 'normalised_to_one_xsection' )
    write_data_to_JSON( normalised_xsection, filename )

if __name__ == '__main__':
    set_root_defaults( msg_ignore_level = 3001 )
    # setup
    parser = OptionParser()
    parser.add_option( "-p", "--path", dest = "path", default = 'data/M3_angle_bl/',
                      help = "set path to JSON files" )
    parser.add_option( "-v", "--variable", dest = "variable", default = 'MET',
                      help = "set the variable to analyse (MET, HT, ST, MT)" )
    parser.add_option( "-b", "--bjetbin", dest = "bjetbin", default = '2m',
                      help = "set b-jet multiplicity for analysis. Options: exclusive: 0-3, inclusive (N or more): 0m, 1m, 2m, 3m, 4m" )
    parser.add_option( "-m", "--metType", dest = "metType", default = 'type1',
                      help = "set MET type for analysis of MET, ST or MT" )
    parser.add_option( "-f", "--load_fakes", dest = "load_fakes", action = "store_true",
                      help = "Load fakes histogram and perform manual fake subtraction in TSVDUnfold" )
    parser.add_option( "-u", "--unfolding_method", dest = "unfolding_method", default = 'RooUnfoldSvd',
                      help = "Unfolding method: RooUnfoldSvd (default), TSVDUnfold, RooUnfoldTUnfold, RooUnfoldInvert, RooUnfoldBinByBin, RooUnfoldBayes" )
    parser.add_option( "-e", "--error_treatment", type = 'int',
コード例 #19
0
def main():
    set_root_defaults()
    # prevent directory ownership of ROOT histograms (python does the garbage collection)
    TH1F.AddDirectory( False )
    parser = OptionParser()
    parser.add_option( "-n", "--n_toy_mc",
                      dest = "n_toy_mc", default = 300,
                      help = "number of toy MC to create", type = int )
    parser.add_option( "-o", "--output",
                      dest = "output_folder", default = 'data/toy_mc/',
                      help = "output folder for toy MC" )
    parser.add_option( "-v", "--variable", dest = "variable", default = 'MET',
                      help = "set the variable to analyse (MET, HT, ST, MT, WPT)" )
    parser.add_option( "-m", "--metType", dest = "metType", default = 'type1',
                      help = "set MET type for analysis of MET, ST or MT" )
    parser.add_option( "-c", "--centre-of-mass-energy", dest = "CoM", default = 8,
                      help = "set the centre of mass energy for analysis. Default = 8 [TeV]", type = int )
    parser.add_option( '-V', '--verbose', dest = "verbose", action = "store_true",
                      help = "Print the event number, reco and gen variable value" )

    ( options, _ ) = parser.parse_args()
    measurement_config = XSectionConfig( options.CoM )

    centre_of_mass = options.CoM
    ttbar_xsection = measurement_config.ttbar_xsection
    variable = options.variable
    met_type = measurement_config.translate_options[options.metType]
    n_toy_mc = options.n_toy_mc
    make_folder_if_not_exists( options.output_folder )
    
    # get histograms
    input_file_hists = File( measurement_config.unfolding_madgraph )
    # define output file
    out_file_template = '%s/toy_mc_%s_N_%d_%dTeV.root'
    out_file_name = out_file_template % (options.output_folder, variable, n_toy_mc, centre_of_mass)
    output = File( out_file_name, 'recreate' )
    
    for channel in ['electron', 'muon']:
        # first get the weights
        h_truth, h_measured, h_response, _ = get_unfold_histogram_tuple( input_file_hists,
                                                                                       variable,
                                                                                       channel,
                                                                                       met_type,
                                                                                       centre_of_mass,
                                                                                       ttbar_xsection,
                                                                                       load_fakes = False )
        # create directories
        directory = output.mkdir( channel )
        mkdir = directory.mkdir
        cd = directory.cd
        cd()
        # generate toy MC
        for i in range( 1, n_toy_mc + 1 ):
            mkdir( 'toy_%d' % i )
            cd( 'toy_%d' % i )
            # create histograms
            # add tuples (truth, measured, response) of histograms
            truth = generate_toy_MC_from_distribution(h_truth)
            measured = generate_toy_MC_from_distribution(h_measured)
            response = generate_toy_MC_from_2Ddistribution(h_response)
            
            truth.SetName('truth')
            measured.SetName('measured')
            response.SetName('response')
            
            truth.Write()
            measured.Write()
            response.Write()
    output.Write()
    output.Close()