def finalizeOutput(job_script_dir, output_dir, flat_ntuples):
    # first, check the output is all there
    pass_check_output = CheckOutput.checkOutput(job_script_dir, output_dir)
    print pass_check_output

    if not pass_check_output:
        print 'did not pass output check, bailiing before merging the files'
        return False
    print 'passed output check... proceding to file merging'

    # next, merge the the files in output directory
    RunHelpers.mergeOutputFiles(output_dir, flat_ntuples)
Example #2
0
def main():
    is_data = sys.argv[1]
    is_full_sim = sys.argv[2]
    input_list = sys.argv[3]

    print 'is_data: %s' % is_data
    print 'is_full_sim: %s' % is_full_sim
    print 'input_list: %s' % input_list

    file_list = RunHelpers.getFileListFromGridInput(input_list)

    print 'file_list: %s' % file_list

    runTntMaker( file_list
               , is_data = is_data
               , is_full_sim = is_full_sim
               )
def runBMinusLAnalysis( file_list
                      , is_data
                      , is_egamma_stream
                      , is_full_sim
                      , syst_struct           = None
                      , tree_name             = 'susy'
                      , dsid                  = 1
                      , out_file_special_name = None
                      , is_tnt                = False
                      , fancy_progress_bar    = True
                      , job_num               = 0
                      , total_num_jobs        = 1
                      , total_num_events      = 0
                      , total_num_entries     = 0
                      , sum_mc_event_weights  = 0
                      , out_dir               = './'
                      ):
    # ==============================================================================
    # If the num events are not set and we are running over TNTs, get the total
    #   NumEvents
    print 'total num events: ', total_num_events
    if total_num_events == 0 and is_tnt:
        print 'Getting total num unskimmed events'
        print '  -- this is slow. you should do this once per data set - not for each stream!'
        total_num_events = getTotalNumEvents(file_list, is_tnt)

    print "Adding files to TChain"
    print '  Tree name: ' , tree_name
    t = RunHelpers.getTChain(file_list, tree_name)
    print t
    print t.GetEntries()

    # ==============================================================================
    print 'Creating BMinusLAnalysis object'
    bmla = ROOT.PennSusyFrame.BMinusLAnalysis(t)

    print 'configuring BMinusLAnalysis object'
    if out_file_special_name is not None:
        process_label = ''.join( [ out_file_special_name
                                 , '__'
                                 , str(job_num)
                                 , '_of_'
                                 , str(total_num_jobs)
                                 ]
                               )
        bmla.setProcessLabel(process_label)

    bmla.setFancyProgressBar(False)

    # set is data or MC
    #   if MC, we need to set various other things like cross section, k-factor, ...
    if is_data:
        bmla.setIsData()

        if is_egamma_stream:
            bmla.setIsEgammaStream()
        else:
            bmla.setIsMuonStream()
    else:
        bmla.setIsMC()

        xsec_dict = CrossSectionReader.getCrossSection(dsid)
        if xsec_dict is None:
            return
        bmla.setCrossSection(xsec_dict['xsec'])
        bmla.setKFactor(     xsec_dict['kfac'])
        bmla.setFilterEff(   xsec_dict['eff'])

        bmla.setTotalNumEntries(    total_num_entries    )
        bmla.setNumGeneratedEvents( total_num_events     )
        bmla.setSumMCEventWeights(  sum_mc_event_weights )

    # set is full sim/fast sim
    if is_full_sim:
        bmla.setFullSim()

    # set unblind!
    bmla.setIsBlind(False)

    # turn on systematics
    syst_tag = ''
    if syst_struct:
        print 'turning on systematics'
        syst_struct.configureAnalysisObject(bmla)

        syst_tag = ''.join([syst_struct.getRunName(), '.'])

    # set start entry and max number events
    if total_num_jobs > 1:
        print 'total num jobs (', total_num_jobs, ') > 1'
        this_job_events = int( math.ceil( float(total_num_entries)
                                        / total_num_jobs
                                        )
                             )
        this_job_start = job_num*this_job_events

        print 'total num entries: ', total_num_entries
        print 'setting max num events: ', this_job_events
        print type(this_job_events)
        bmla.setMaxNumEvents(this_job_events)
        print 'setting start entry: ', this_job_start
        bmla.setStartEntry(this_job_start)

    # set out histogram file name
    # base name
    out_ntup_file_name = [out_dir, '/', 'BMinusL.', syst_tag]

    # append any special tags
    if out_file_special_name is not None:
        out_ntup_file_name.extend([out_file_special_name, '.'])

    # append 'hist' tag
    out_ntup_file_name.append('hist')

    # append job number
    if total_num_jobs > 1:
        out_ntup_file_name.extend( ['.'
                                   , str(job_num)
                                   , '_of_'
                                   , str(total_num_jobs)
                                   ]
                                 )

    # this is a root file
    out_ntup_file_name.append('.root')

    # set output file name in analyzer
    bmla.setOutHistFileName(''.join(out_ntup_file_name))

    # Set critical cuts
    bmla.setCritCutGrl(            1)
    bmla.setCritCutIncompleteEvent(1)
    bmla.setCritCutLarError(       1)
    bmla.setCritCutTileError(      1)
    bmla.setCritCutTileHotSpot(    1)
    bmla.setCritCutTileTrip(       1)
    bmla.setCritCutBadJetVeto(     1)
    bmla.setCritCutCaloProblemJet( 1)
    bmla.setCritCutPrimaryVertex(  1)
    bmla.setCritCutBadMuonVeto(    1)
    bmla.setCritCutCosmicMuonVeto( 1)
    bmla.setCritCutHFOR(           1)
    bmla.setCritCutMcOverlap(      1)
    bmla.setCritCutGe2Lepton(      1)
    bmla.setCritCut2Lepton(        0)
    bmla.setCritCut2SignalLepton(  0)
    bmla.setCritCut2BJets(         0)
    bmla.setCritCutBadJetVeto(     1)
    bmla.setCritCutBLPairing(      0)

    # Set cut values
    bmla.setElPtCut(  lep_pt_cut, -1 )
    bmla.setMuPtCut(  lep_pt_cut, -1 )
    bmla.setBJetPtCut(jet_pt_cut, -1 )
    bmla.setMV1Cut(btag_working_point)

    # Turn on/off Z fudge factor
    bmla.setDoZKFactor(True)
    # bmla.setDoZKFactor(False)

    # Turn on/off detailed B-L histograms
    bmla.setDoDetailedBLHists(False)

    # prepare tools and run analysis loop
    print 'preparing tools'
    bmla.prepareTools()
    print 'looping -- ', out_file_special_name
    bmla.Loop()
    print 'done looping -- ', out_file_special_name

    # ==============================================================================
    print ''
    print ''
    systematic_runs = itertools.chain([None], RunHelpers.syst_list)
    for syst in systematic_runs:
        syst_struct = RunHelpers.SystematicStruct()
        if syst is not None:
            syst_struct.setSyst(syst, True)
        syst_tag = syst_struct.getRunName()

        syst_struct.printInfo()
        print syst_tag


        data_set_dicts[syst_tag] = RunHelpers.addAllSamplesToList(
                egamma_data_samples = egamma_data_samples,
                muon_data_samples   = muon_data_samples,
                full_sim_mc_samples = full_sim_mc_samples,
                fast_sim_mc_samples = fast_sim_mc_samples,
                file_list_path_base = 'EosFileLists/tnt_107/tnt_107',
                out_dir             = '__'.join([out_dir, syst_tag]),
                syst_struct         = syst_struct)

    # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    for syst, the_dicts in data_set_dicts.items():
        print syst, ' -- ', the_dicts

        this_out_dir = '__'.join([out_dir, syst])
        this_sym_link_name = ''.join(['./NextPlotDir.BMinusL.', syst])
        print 'this sym link name: ', this_sym_link_name

        this_run_analysis_fun = RunBMinusLAnalysis.runBMinusLAnalysisFun

        if run_local:
def runIsrAnalysis( file_list
                  , is_data
                  , is_full_sim
                  , tree_name             = 'susy'
                  , dsid                  = 1
                  , out_file_special_name = None
                  , is_tnt                = False
                  , fancy_progress_bar    = True
                  , job_num               = 0
                  , total_num_jobs        = 1
                  , total_num_events      = 0
                  , total_num_entries     = 0
                  , out_dir               = './'
                  ):
    # ==============================================================================
    # If the num events are not set and we are running over TNTs, get the total NumEvents
    print 'total num events: %s' % total_num_events
    if total_num_events == 0 and is_tnt:
        print 'Getting total num unskimmed events'
        print '  -- this is slow. you should do this once per data set - not for each stream!'
        total_num_events = getTotalNumEvents(file_list, is_tnt)

    print "Adding files to TChain"
    t = RunHelpers.getTChain(file_list, tree_name)

    # ==============================================================================
    print 'Creating IsrAnalysis object'
    isra = ROOT.PennSusyFrame.IsrAnalysis(t)

    print 'configuring IsrAnalysis object'
    if out_file_special_name is not None:
        isra.setProcessLabel(out_file_special_name)
    isra.setFancyProgressBar(False)

    # set is data or MC
    if is_data:
        isra.setIsData()
    else:
        isra.setIsMC()

        xsec_dict = CrossSectionReader.getCrossSection(dsid)
        if xsec_dict is None:
            return
        isra.setCrossSection(xsec_dict['xsec'])
        isra.setKFactor(     xsec_dict['kfac'])
        isra.setFilterEff(   xsec_dict['eff'])

        isra.setTotalNumEntries(    total_num_entries )
        isra.setNumGeneratedEvents( total_num_events  )

    # set is full sim/fast sim
    if is_full_sim:
        isra.setFullSim()

    # set start entry and max number events
    if total_num_jobs > 1:
        print 'total num jobs (%s) > 1' % total_num_jobs
        this_job_events = int(math.ceil( float(total_num_entries) / total_num_jobs ))
        this_job_start = job_num*this_job_events

        print 'total num entries; %s' % total_num_entries
        print 'setting max num events: %s' % this_job_events
        print type(this_job_events)
        isra.setMaxNumEvents(this_job_events)
        print 'setting start entry: %s' % this_job_start
        isra.setStartEntry(this_job_start)

    # set out histogram file name
    print 'setting histogram names'
    out_hist_file_name = '%s/Isr.' % out_dir
    if out_file_special_name is not None:
        out_hist_file_name += '%s.' % out_file_special_name
    out_hist_file_name += 'hists'
    if total_num_jobs > 1:
        out_hist_file_name += '.%d_of_%d' % (job_num, total_num_jobs)
    out_hist_file_name += '.root'
    isra.setOutHistFileName(out_hist_file_name)

    # Set critical cuts
    print 'setting critical cuts'
    # isra.setCritCutGrl(            1)
    # isra.setCritCutIncompleteEvent(1)
    # isra.setCritCutLarError(       1)
    # isra.setCritCutTileError(      1)
    # isra.setCritCutTileHotSpot(    1)
    # isra.setCritCutTileTrip(       1)
    # isra.setCritCutBadJetVeto(     1)
    # isra.setCritCutCaloProblemJet( 1)
    # isra.setCritCutPrimaryVertex(  1)
    # isra.setCritCutBadMuonVeto(    1)
    # isra.setCritCutCosmicMuonVeto( 1)
    # isra.setCritCutHFOR(           1)
    # isra.setCritCutMcOverlap(      1)
    # isra.setCritCutGe2Lepton(      1)
    # isra.setCritCut2Lepton(        1)
    # isra.setCritCut2SignalLepton(  1)

    # prepare tools and run analysis loop
    print 'preparing tools'
    isra.prepareTools()
    print 'looping -- %s' % out_file_special_name
    isra.Loop()
    print 'done looping -- %s' % out_file_special_name

    # ==============================================================================
    print ''
    print ''
    num_processes = int(user_input)
    run_local = True

today_date = datetime.datetime.now()
out_dir = '%s/hists/bminusl_hists_%04d_%02d_%02d__%02d_%02d' % ( os.environ['PWD']
                                                               , today_date.year
                                                               , today_date.month
                                                               , today_date.day
                                                               , today_date.hour
                                                               , today_date.minute
                                                               )
print out_dir

# ==============================================================================
if __name__ == '__main__':
    RunHelpers.safeMakeDir(out_dir)

    print 'getting file list'

    data_samples = {
                   }
    full_sim_mc_samples = {
                            117050:{'label':'117050.PowhegPythia_P2011C_ttbar'                 , 'num_jobs':30}
                          , 105200:{'label':'105200.McAtNloJimmy_CT10_ttbar_LeptonFilter'      , 'num_jobs':30}
                          , 200332:{'label':'200332.AlpgenPythia_Auto_P2011C_ZeebbNp0'         , 'num_jobs':20}
                          , 200333:{'label':'200333.AlpgenPythia_Auto_P2011C_ZeebbNp1'         , 'num_jobs':10}
                          , 200334:{'label':'200334.AlpgenPythia_Auto_P2011C_ZeebbNp2'         , 'num_jobs':3}
                          , 200335:{'label':'200335.AlpgenPythia_Auto_P2011C_ZeebbNp3incl'     , 'num_jobs':1}
                          , 200340:{'label':'200340.AlpgenPythia_Auto_P2011C_ZmumubbNp0'       , 'num_jobs':20}
                          , 200341:{'label':'200341.AlpgenPythia_Auto_P2011C_ZmumubbNp1'       , 'num_jobs':10}
                          , 200342:{'label':'200342.AlpgenPythia_Auto_P2011C_ZmumubbNp2'       , 'num_jobs':3}
import RunTntMaker

sys.path.append('%s/RunHelpers/' % os.environ['BASE_WORK_DIR'])
import RunHelpers

# ==============================================================================
if __name__ == '__main__':
    base_work_dir = os.environ['BASE_WORK_DIR']
    print 'getting file list'

    data_set_dicts = []
    RunHelpers.addSamplesToList( sample_dict = {'label':'202641.MadGraphPythia_AUET2B_CTEQ6L1_SM_TT_directBL_1000' , 'num_jobs':1}
                               , data_set_dicts = data_set_dicts
                               # , file_list_path = '%s/EosFileLists/local/raw_d3pd.202641.MadGraphPythia_AUET2B_CTEQ6L1_SM_TT_directBL_1000.txt' % os.environ['BASE_WORK_DIR']
                               , file_list_path = 'EosFileLists/local/raw_d3pd.202641.MadGraphPythia_AUET2B_CTEQ6L1_SM_TT_directBL_1000.txt'
                               , is_data = False
                               , is_egamma_stream = False
                               , is_full_sim = False
                               , dsid = 202641
                               , out_dir = 'local_tnt'
                               )

    print ''
    print data_set_dicts
    print ''
    RunTntMaker.runTntMaker( file_list = data_set_dicts[0]['file_list']
                           , is_data = False
                           , is_full_sim = False
                           )

    # # file_list = RunTntMaker.getFileListFromFile('%s/EosFileLists/local/raw_d3pd.202641.MadGraphPythia_AUET2B_CTEQ6L1_SM_TT_directBL_1000.txt' % os.environ['BASE_WORK_DIR'])
    # # file_list = file_list[:1]
from multiprocessing import Pool
import time
import sys
import datetime

sys.path.append('%s/EwkAnalysis/RunScripts/' % os.environ['BASE_WORK_DIR'])
import RunIsrAnalysis

sys.path.append('%s/RunHelpers/' % os.environ['BASE_WORK_DIR'])
import RunHelpers

# ------------------------------------------------------------------------------
# get number of parallel processes from command line inputs
num_processes = int(sys.argv[1]) if len(sys.argv) > 1 else 1

out_dir = 'hists/isr_hists%s' % RunHelpers.getDateTimeLabel(True, False)

# ==============================================================================
if __name__ == '__main__':
    RunHelpers.safeMakeDir(out_dir)

    print 'getting file list'
    data_set_dicts = []
    data_samples = {}

    full_sim_mc_samples = {##  117650:{'label':'117650.AlpgenPythia_P2011C_ZeeNp0'         , 'num_jobs':10}
                          ## , 117651:{'label':'117651.AlpgenPythia_P2011C_ZeeNp1'       , 'num_jobs':10}
                          ## , 117652:{'label':'117652.AlpgenPythia_P2011C_ZeeNp2'       , 'num_jobs':5}
                          ## , 117653:{'label':'117653.AlpgenPythia_P2011C_ZeeNp3'       , 'num_jobs':1}
                          ## , 117654:{'label':'117654.AlpgenPythia_P2011C_ZeeNp4'       , 'num_jobs':1}
                          ## , 117655:{'label':'117655.AlpgenPythia_P2011C_ZeeNp5'       , 'num_jobs':1}
    run_local = True

today_date = datetime.datetime.now()
out_dir = '%s/hists/bminusl_opt_ntup_%04d_%02d_%02d__%02d_%02d' % ( os.environ['PWD']
                                                                  , today_date.year
                                                                  , today_date.month
                                                                  , today_date.day
                                                                  , today_date.hour
                                                                  , today_date.minute
                                                                  )
print out_dir

# ==============================================================================

if __name__ == '__main__':
    RunHelpers.safeMakeDir(out_dir)

    print 'getting file list'

    egamma_data_samples = {
                            'periodA_egamma':{'label':'periodA_egamma', 'num_jobs':2}
                          , 'periodB_egamma':{'label':'periodB_egamma', 'num_jobs':2}
                          , 'periodC_egamma':{'label':'periodC_egamma', 'num_jobs':2}
                          , 'periodD_egamma':{'label':'periodD_egamma', 'num_jobs':2}
                          , 'periodE_egamma':{'label':'periodE_egamma', 'num_jobs':2}
                          , 'periodG_egamma':{'label':'periodG_egamma', 'num_jobs':2}
                          , 'periodH_egamma':{'label':'periodH_egamma', 'num_jobs':2}
                          , 'periodI_egamma':{'label':'periodI_egamma', 'num_jobs':2}
                          , 'periodJ_egamma':{'label':'periodJ_egamma', 'num_jobs':2}
                          , 'periodL_egamma':{'label':'periodL_egamma', 'num_jobs':2}
                          }
# ------------------------------------------------------------------------------
# get number of parallel processes from command line inputs
num_processes = int(sys.argv[1]) if len(sys.argv) > 1 else 1

today_date = datetime.datetime.now()
out_dir = 'hists/bminusl_hists_%04d_%02d_%02d__%02d_%02d' % ( today_date.year
                                                            , today_date.month
                                                            , today_date.day
                                                            , today_date.hour
                                                            , today_date.minute
                                                            )
print out_dir

# ==============================================================================
if __name__ == '__main__':
    RunHelpers.safeMakeDir(out_dir)

    print 'getting file list'
    data_set_dicts = []

    data_samples = {
                   }
    full_sim_mc_samples = {
                          }
    fast_sim_mc_samples = {
                            202641:{'label':'202641.MadGraphPythia_AUET2B_CTEQ6L1_SM_TT_directBL_1000' , 'num_jobs':1}
                          }

    print fast_sim_mc_samples

    # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def runBMinusLTTNTMaker( file_list
                       , is_data
                       , is_egamma_stream
                       , is_full_sim
                       , tree_name             = 'susy'
                       , dsid                  = 1
                       , out_file_special_name = None
                       , is_tnt                = False
                       , fancy_progress_bar    = True
                       , job_num               = 0
                       , total_num_jobs        = 1
                       , total_num_events      = 0
                       , total_num_entries     = 0
                       , sum_mc_event_weights  = 0
                       , out_dir               = './'
                       ):
    # ==============================================================================
    # If the num events are not set and we are running over TNTs, get the total NumEvents
    print 'total num events: %s' % total_num_events
    if total_num_events == 0 and is_tnt:
        print 'Getting total num unskimmed events'
        print '  -- this is slow. you should do this once per data set - not for each stream!'
        total_num_events = getTotalNumEvents(file_list, is_tnt)

    print "Adding files to TChain"
    t = RunHelpers.getTChain(file_list, tree_name)

    # ==============================================================================
    print 'Creating BMinusLTTNTMaker object'
    bmlttntm = ROOT.PennSusyFrame.BMinusLTTNTMaker(t)

    print 'configuring BMinusLTTNTMaker object'
    if out_file_special_name is not None:
        bmlttntm.setProcessLabel(out_file_special_name)
    bmlttntm.setFancyProgressBar(False)

    # set is data or MC
    if is_data:
        bmlttntm.setIsData()
    else:
        bmlttntm.setIsMC()

        xsec_dict = CrossSectionReader.getCrossSection(dsid)
        if xsec_dict is None:
            return
        bmlttntm.setCrossSection(xsec_dict['xsec'])
        bmlttntm.setKFactor(     xsec_dict['kfac'])
        bmlttntm.setFilterEff(   xsec_dict['eff'])

        print 'Setting TotalNumEntries:   '  , total_num_entries
        print 'Setting NumGeneratedEvents: ' , total_num_events
        print 'Setting SumMCEventWeights:  ' , sum_mc_event_weights

        bmlttntm.setTotalNumEntries(    total_num_entries )
        bmlttntm.setNumGeneratedEvents( total_num_events  )
        bmlttntm.setSumMCEventWeights(  sum_mc_event_weights )

    # set is full sim/fast sim
    if is_full_sim:
        bmlttntm.setFullSim()

    # set start entry and max number events
    if total_num_jobs > 1:
        print 'total num jobs (%s) > 1' % total_num_jobs
        this_job_events = int(math.ceil( float(total_num_entries) / total_num_jobs ))
        this_job_start = job_num*this_job_events

        print 'total num entries; %s' % total_num_entries
        print 'setting max num events: %s' % this_job_events
        print type(this_job_events)
        bmlttntm.setMaxNumEvents(this_job_events)
        print 'setting start entry: %s' % this_job_start
        bmlttntm.setStartEntry(this_job_start)

    # set out histogram file name
    print 'setting histogram names'
    out_ntup_file_name = '%s/BMinusL.' % out_dir
    if out_file_special_name is not None:
        out_ntup_file_name += '%s.' % out_file_special_name
    out_ntup_file_name += 'tnt'
    if total_num_jobs > 1:
        out_ntup_file_name += '.%d_of_%d' % (job_num, total_num_jobs)
    out_ntup_file_name += '.root'
    bmlttntm.setOutNtupleFileName(out_ntup_file_name)

    # # Set critical cuts
    # print 'setting critical cuts'
    # bmlttntm.setCritCutGrl(            1)
    # bmlttntm.setCritCutIncompleteEvent(1)
    # bmlttntm.setCritCutLarError(       1)
    # bmlttntm.setCritCutTileError(      1)
    # bmlttntm.setCritCutTileHotSpot(    1)
    # bmlttntm.setCritCutTileTrip(       1)
    # bmlttntm.setCritCutBadJetVeto(     1)
    # bmlttntm.setCritCutCaloProblemJet( 1)
    # bmlttntm.setCritCutPrimaryVertex(  1)
    # bmlttntm.setCritCutBadMuonVeto(    1)
    # bmlttntm.setCritCutCosmicMuonVeto( 1)
    # bmlttntm.setCritCutHFOR(           1)
    # bmlttntm.setCritCutMcOverlap(      1)
    # bmlttntm.setCritCutGe2Lepton(      1)
    # bmlttntm.setCritCut2Lepton(        0)
    # bmlttntm.setCritCut2SignalLepton(  0)
    # bmlttntm.setCritCut2BJets(         0)
    # bmlttntm.setCritCutBadJetVeto(     1)
    # bmlttntm.setCritCutBLPairing(      0)

    # Set cut values
    bmlttntm.setElPtCut(  lep_pt_cut, -1     )
    bmlttntm.setMuPtCut(  lep_pt_cut, -1     )
    bmlttntm.setBJetPtCut(jet_pt_cut, -1     )
    bmlttntm.setMV1Cut(btag_working_point)

    # prepare tools and run analysis loop
    print 'preparing tools'
    bmlttntm.prepareTools()
    print 'looping -- %s' % out_file_special_name
    bmlttntm.Loop()
    print 'done looping -- %s' % out_file_special_name

    # ==============================================================================
    print ''
    print ''
# ------------------------------------------------------------------------------
# get number of parallel processes from command line inputs
num_processes = int(sys.argv[1]) if len(sys.argv) > 1 else 1

today_date = datetime.datetime.now()
out_dir = 'hists/bminusl_hists_%04d_%02d_%02d__%02d_%02d' % ( today_date.year
                                                            , today_date.month
                                                            , today_date.day
                                                            , today_date.hour
                                                            , today_date.minute
                                                            )
print out_dir

# ==============================================================================
if __name__ == '__main__':
    RunHelpers.safeMakeDir(out_dir)

    print 'getting file list'
    data_set_dicts = []
    data_samples = {
                   }
    # full_sim_mc_samples = { # 110820:{'label':'110820.AlpgenPythia_P2011C_ZeebbNp3'    , 'num_jobs':1}
    #                         # 200334:{'label':'200334.AlpgenPythia_Auto_P2011C_ZeebbNp2'         , 'num_jobs':3}
    #                       }
    # fast_sim_mc_samples = { 202641:{'label':'202641.MadGraphPythia_AUET2B_CTEQ6L1_SM_TT_directBL_1000', 'num_jobs':1}
    #                       }
    full_sim_mc_samples = { }

    fast_sim_mc_samples = { # 105200:{'label':'105200.McAtNloJimmy_CT10_ttbar_LeptonFilter', 'num_jobs':15}
                             202632:{'label':'202632.MadGraphPythia_AUET2B_CTEQ6L1_SM_TT_directBL_100' , 'num_jobs':1}
                           , 202633:{'label':'202633.MadGraphPythia_AUET2B_CTEQ6L1_SM_TT_directBL_200' , 'num_jobs':1}