def create_config(is_mc):
    """
    Create a default CRAB configuration suitable to run the framework
    :return:
    """

    from CRABClient.UserUtilities import config, getUsernameFromSiteDB
    config = config()

    config.General.workArea = 'tasks'
    config.General.transferOutputs = True
    config.General.transferLogs = True

    config.JobType.pluginName = 'Analysis'
    config.JobType.psetName = '../python/dummy_pset.py'
    config.JobType.scriptExe = '../bin/runFrameworkOnGrid.sh'
    config.JobType.sendPythonFolder = True
    config.JobType.disableAutomaticOutputCollection = True
    config.JobType.allowUndistributedCMSSW = True
    config.JobType.inputFiles = ['../python/runFrameworkOnGrid.py']
    config.JobType.outputFiles = ['output.root']

    config.Data.inputDBS = 'global'

    if is_mc:
        config.Data.splitting = 'FileBased'
    else:
        config.Data.splitting = 'LumiBased'

    config.Data.outLFNDirBase = '/store/user/%s/' % (getUsernameFromSiteDB())
    config.Data.publication = False

    config.Site.storageSite = 'T2_BE_UCL'

    return config
def create_config():
    """
    Create a default CRAB configuration
    :return:
    """

    from CRABClient.UserUtilities import config, getUsernameFromSiteDB
    config = config()

    config.General.workArea = 'tasks'
    config.General.transferOutputs = True
    config.General.transferLogs = True

    config.JobType.pluginName = 'Analysis'
    config.JobType.disableAutomaticOutputCollection = True
    config.JobType.outputFiles = []
    config.JobType.allowUndistributedCMSSW = True

    config.Data.inputDBS = 'global'

    config.Data.splitting = 'LumiBased'

    config.Data.outLFNDirBase = '/store/user/%s/' % (getUsernameFromSiteDB())
    config.Data.publication = False

    config.Site.storageSite = 'T2_BE_UCL'

    return config
Beispiel #3
0
def submission():
	from CRABClient.UserUtilities import config
	config = config()
	config.General.workArea = '/nfs/dust/cms/user/%s/crab_kappa_skim-%s'%(getUsernameFromSiteDB(), date)
	check_path(config.General.workArea)
	config.General.transferOutputs = True
	config.General.transferLogs = True
	config.User.voGroup = 'dcms'
	
	config.JobType.pluginName = 'Analysis'
	config.JobType.psetName = 'kSkimming_run2_cfg.py'
	#config.JobType.inputFiles = ['Summer15_V5_MC.db']
	config.JobType.allowUndistributedCMSSW = True
	config.Site.blacklist = ["T2_BR_SPRACE"]
	config.Data.inputDBS = 'global'
	config.Data.splitting = 'FileBased'
	config.Data.unitsPerJob = 1
	config.Data.outLFNDirBase = '/store/user/%s/higgs-kit/skimming/%s'%(getUsernameFromSiteDB(), date)
	config.Data.publication = False
	
	config.Site.storageSite = "T2_DE_DESY"
	# load nicknames form gc-style config files and write them to a flat nicknames list
	nicknames = read_grid_control_includes(["samples/13TeV/Fall15_SM_Analysis.conf"])
	#nicknames = ['SUSYGluGluToHToTauTauM160_RunIIFall15MiniAODv2_76X_13TeV_MINIAOD_pythia8']

	# loop over datasets and get repsective nicks
	for nickname in nicknames:
		config.General.requestName = nickname
		config.JobType.pyCfgParams = ['globalTag=76X_dataRun2_16Dec2015_v0' if isData(nickname) else 'globalTag=76X_mcRun2_asymptotic_RunIIFall15DR76_v1' ,'kappaTag=KAPPA_2_1_0','nickname=%s'%(nickname),'outputfilename=kappa_%s.root'%(nickname),'testsuite=False']
		config.JobType.outputFiles = ['kappa_%s.root'%(nickname)]
		config.Data.inputDataset = get_sample_by_nick(nickname)
		p = Process(target=submit, args=(config,))
		p.start()
		p.join()
Beispiel #4
0
def submission(events_per_job):
	from CRABClient.UserUtilities import config
	config = config()
	config.General.workArea = '/nfs/dust/cms/user/%s/kappa/crab_kappa_skim80X-%s'%(getUsernameFromSiteDB(), date)
	#config.General.workArea = '/net/scratch_cms/institut_3b/%s/kappa/crab_kappa_skim-%s'%(getUsernameFromSiteDB(), date)
	#config.General.workArea = '/nfs/dust/cms/user/<your-NAF-username>/kappa/crab_kappa_skim80X-%s'% date  #if CERN-username != NAF-username
	check_path(config.General.workArea)
	config.General.transferOutputs = True
	config.General.transferLogs = True
	config.User.voGroup = 'dcms'
	
	config.JobType.pluginName = 'Analysis'
	config.JobType.psetName = 'kSkimming_run2_cfg.py'
	#config.JobType.inputFiles = ['Spring16_25nsV6_DATA.db', 'Spring16_25nsV6_MC.db']
	config.JobType.allowUndistributedCMSSW = True
	config.Site.blacklist = ["T2_BR_SPRACE"]
	config.Data.splitting = 'FileBased'
	config.Data.unitsPerJob = 1
	config.Data.outLFNDirBase = '/store/user/%s/higgs-kit/skimming/80X_%s'%(getUsernameFromSiteDB(), date)
	config.Data.publication = False
	
	config.Site.storageSite = "T2_DE_DESY"
	# load nicknames form gc-style config files and write them to a flat nicknames list
	nicknames = read_grid_control_includes(["samples/13TeV/Summer16_SM_Analysis.conf"])
	#nicknames = read_grid_control_includes(["samples/13TeV/Spring16_SM_Higgs_CPmixing_2.conf"])
	#nicknames = read_grid_control_includes(["samples/13TeV/2016B_Data.conf"])
	#nicknames = ['SUSYGluGluToHToTauTauM160_RunIIFall15MiniAODv2_76X_13TeV_MINIAOD_pythia8']

	# loop over datasets and get repsective nicks
	for nickname in nicknames:
		config.General.requestName = nickname[:100]
		config.Data.inputDBS = get_inputDBS_by_nick(nickname)
		config.Data.unitsPerJob = 1
		nfiles = get_n_files_from_nick(nickname)
		if events_per_job:
			nevents = get_n_generated_events_from_nick(nickname)
			try:
				if int(nfiles) > 0 and int(nevents) > 0:
					files_per_job = int(events_per_job) * int(nfiles) / int(nevents)
					if files_per_job > 1:
						config.Data.unitsPerJob = int(files_per_job)
			except:
				print "Its not possilbe to make ",events_per_job," events/job for ",nickname," which has Nevents:",nevents," and Nfiles",nfiles," in the database. Just make one file per job"
		if float(config.Data.unitsPerJob) > 0 and float(nfiles)/float(config.Data.unitsPerJob) >= job_submission_limit:
			files_per_job = ceil(float(nfiles)/job_submission_limit)
			if files_per_job > 1:
				config.Data.unitsPerJob = int(files_per_job)

		config.JobType.pyCfgParams = ['globalTag=80X_dataRun2_2016SeptRepro_v7' if isData(nickname) else 'globalTag=80X_mcRun2_asymptotic_2016_TrancheIV_v8' if "PUMoriond17" in getScenario(nickname) else 'globalTag=80X_mcRun2_asymptotic_2016_miniAODv2_v1' ,'kappaTag=KAPPA_2_1_0','nickname=%s'%(nickname),'outputfilename=kappa_%s.root'%(nickname),'testsuite=False']
		config.JobType.outputFiles = ['kappa_%s.root'%(nickname)]
		config.Data.inputDataset = get_sample_by_nick(nickname)
		#config.Data.lumiMask = '/nfs/dust/cms/user/<NAF-username>/kappa/crab_kappa_skim80X-<campaign-date>/results/missingLumis.json' # for running of a subset of lumi sections
		p = Process(target=submit, args=(config,))
		p.start()
		p.join()
def submission():
	from CRABClient.UserUtilities import config, getUsernameFromSiteDB
	config = config()

	##-- Your name of the crab project
	config.General.requestName = 'KAPPA_FROM_AOD_SUSYGluGlu_Sync2015'
	#config.General.workArea = 'crab_projects'
	config.General.workArea = '/net/scratch_cms/institut_3b/%s/kappa/crab_kappa_skim-%s'%(getUsernameFromSiteDB(), date)

	##-- Transfer root files as well as log files "cmsRun -j FrameworkJobReport.xml" (log file = FrameworkJobReport.xml)
	check_path(config.General.workArea)	
	config.General.transferOutputs = True
	config.General.transferLogs = True

	##-- We want to have the special dcms role (better fair share at german grid sites). 
	config.User.voGroup = 'dcms'

	##-- the scripts (Analysis means with EDM input) which are executed. psetName is the cmsRun config and scriptExe is a shell config which should include "cmsRun -j FrameworkJobReport.xml -p PSet.py" (PSet.py is the renamed config.JobType.psetName)
	config.JobType.pluginName = 'Analysis'
	config.JobType.sendPythonFolder = True
	config.JobType.psetName = 'AODtoMiniAOD_cfg.py'
	config.JobType.scriptExe = 'kappaWorkflow_privateMiniAOD.sh'
	#config.JobType.maxJobRuntimeMin = 2750
	#config.JobType.maxMemoryMB = 6000

	##-- instead of taking the outputfile per hand use the result of pset.py and renamed it, which cheat on the test of is an EDM file test and allows to use publish the data 
	config.JobType.disableAutomaticOutputCollection = True
	config.JobType.outputFiles = ['kappaTuple.root']

	##-- The dataset you want to process:

	config.Data.inputDataset = '/SUSYGluGluToHToTauTau_M-160_TuneCUETP8M1_13TeV-pythia8/RunIIFall15DR76-PU25nsData2015v1_76X_mcRun2_asymptotic_v12-v1/AODSIM'
	#'/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIIFall15DR76-PU25nsData2015v1_76X_mcRun2_asymptotic_v12_ext1-v1/AODSIM'
	config.Data.inputDBS = 'global'
	config.Data.splitting = 'FileBased'
	config.Data.unitsPerJob = 1
	##-- If you want to run test jobs set totalUnits to a small number and publication to false
	#config.Data.totalUnits = 10
	config.Data.publication = False
	
	##-- the output strorage element
	config.Site.storageSite = 'T2_DE_DESY'
	config.Data.outLFNDirBase = '/store/user/%s/higgs-kit/skimming/80X_%s'%(getUsernameFromSiteDB(), date)
	
	##-- Run in xrootd mode (which allows you to run the jobs on all possible sites) 
	#config.Data.ignoreLocality = True
	#config.Site.whitelist = ['T2_CH_CERN','T2_DE_DESY','T1_DE_KIT','T2_DE_RWTH','T2_UK_London_IC', 'T2_US_MIT']
	

	p = Process(target=submit, args=(config,))
	p.start()
	p.join()
def get_config(args):
    '''Get a crab config file based on the arguments of crabSubmit'''
    from CRABClient.UserUtilities import config

    config = config()

    config.General.workArea         = get_crab_workArea(args)
    config.General.transferOutputs  = True

    config.JobType.pluginName       = 'Analysis'
    if args.scriptExe:
        config.JobType.psetName     = '{0}/src/DevTools/Utilities/test/PSet.py'.format(os.environ['CMSSW_BASE'])
        config.JobType.scriptExe    = args.cfg
        config.JobType.scriptArgs   = args.cmsRunArgs #+ ['outputFile=crab.root']
        config.JobType.outputFiles  = ['crab.root']
    else:
        config.JobType.psetName     = args.cfg
        config.JobType.pyCfgParams  = args.cmsRunArgs
    config.JobType.sendPythonFolder = True
    config.JobType.numCores         = args.numCores
    config.JobType.maxMemoryMB      = args.maxMemoryMB
    config.JobType.maxJobRuntimeMin = args.maxJobRuntimeMin
    if args.external:
        config.JobType.sendExternalFolder = True

    config.Data.inputDBS            = args.inputDBS
    config.Data.useParent           = args.useParent
    config.Data.splitting           = 'FileBased'
    config.Data.unitsPerJob         = args.filesPerJob
    #config.Data.splitting           = 'LumiBased'
    #config.Data.unitsPerJob         = 10
    #config.Data.splitting           = 'EventAwareLumiBased'
    #config.Data.unitsPerJob         = 100000
    config.Data.outLFNDirBase       = '/store/user/{0}/{1}/'.format(args.user,args.jobName)
    config.Data.publication         = args.publish
    config.Data.outputDatasetTag    = args.jobName
    if args.applyLumiMask:
        config.Data.lumiMask        = getJson(args.applyLumiMask)
        #config.Data.splitting       = 'LumiBased'
        #config.Data.unitsPerJob     = args.lumisPerJob
    if args.allowNonValid:
        config.Data.allowNonValidInputDataset = True

    config.Site.storageSite         = args.site
    #if args.scriptExe:
    #    config.Site.whitelist = ['T2_US_Wisconsin']


    return config
def main():

    options = getOptions()
    
    #from WMCore.Configuration import Configuration
    from CRABClient.UserUtilities import config
    config = config()

    from CRABAPI.RawCommand import crabCommand
    from httplib import HTTPException

    # We want to put all the CRAB project directories from the tasks we submit here into one common directory.
    # That's why we need to set this parameter (here or above in the configuration file, it does not matter, we will not overwrite it).
    config.section_("General")
    config.General.workArea = options.dir
    config.General.transferLogs = True

    config.section_("JobType")
    config.JobType.pluginName = 'Analysis'
    config.JobType.psetName = options.config
    config.JobType.allowUndistributedCMSSW = True
    # config.JobType.pyCfgParams = ['DataProcessing=MC25ns_MiniAODv2','lheLabel=externalLHEProducer']
    config.section_("Data")
    config.Data.inputDataset = None
    # config.Data.inputDBS = 'phys03' #to be commented in case of global#
    config.Data.splitting = 'LumiBased'#
    config.Data.unitsPerJob = 10
    #config.Data.lumiMask = './JSON.txt'
    #config.Data.splitting = 'FileBased'
    #config.Data.unitsPerJob = 1
    config.Data.ignoreLocality = True
    config.Data.publication = False    
    config.Data.outLFNDirBase = '/store/user/cgalloni/Pxl_res'

    config.section_("Site")
    config.Site.storageSite = 'T3_CH_PSI'
    #config.Site.blacklist=['T2_US_Nebraska','T2_US_Wisconsin','T2_FR_IPHC','T2_EE_Estonia','T2_DE_RWTH']
    #config.Site.whitelist=['T2_US_Nebraska','T2_US_Wisconsin','T2_FR_IPHC','T2_EE_Estonia',
    print 'Using config ' + options.config
    print 'Writing to directory ' + options.dir

    
    def submit(config):
        try:
            crabCommand('submit', config = config)
        except HTTPException, hte:
            print 'Cannot execute command'
            print hte.headers
Beispiel #8
0
def submission():
    from CRABClient.UserUtilities import config
    config = config()

    config.General.workArea = '/nfs/dust/cms/user/gsieber/crab_kappa_skim-2015-11-29_8TEV'
    check_path(config.General.workArea)
    config.General.transferOutputs = True
    config.General.transferLogs = True
    config.User.voGroup = 'dcms'

    config.JobType.pluginName = 'Analysis'
    config.JobType.psetName = 'skim_pfjets.py'
    config.JobType.inputFiles = ['Winter14_V5_DATA.db','Winter14_V5_MC.db']
    config.JobType.allowUndistributedCMSSW = True

    config.Data.inputDBS = 'global'
    config.Data.splitting = 'FileBased'
    config.Data.unitsPerJob = 10
    config.Data.outLFNDirBase = '/store/user/sieber/SKIMS_JETS_2015/2015-11-29_8TEV'
    config.Data.publication = False

    config.Site.storageSite = "T2_DE_DESY"

    with open('datasets.json') as json_file:
        try:
            datasets = json.load(json_file)
        except ValueError:
            print 'Failed to parse json file.'
            sys.exit(1)

    # loop over datasets and get repsective nicks
    for nickname in datasets.keys():
        print nickname
        print datasets[nickname]['globaltag']
        print datasets[nickname]['dataset']
        config.General.requestName = nickname
        config.JobType.pyCfgParams = [str('globaltag=%s'%(datasets[nickname]['globaltag'])), 
                                      str('outputfilename=kappa_%s.root'%(nickname)),
                                      str('data={0}'.format(datasets[nickname]['is_data']))
                                      ]
        print config.JobType.pyCfgParams
        config.JobType.outputFiles = [str('kappa_%s.root'%(nickname))]
        config.Data.inputDataset = datasets[nickname]['dataset']
        p = Process(target=submit, args=(config,))
        p.start()
        p.join()
Beispiel #9
0
	def crab_default_cfg(self):
		from CRABClient.UserUtilities import config
		config = config()
		config.General.workArea = self.workdir
		config.General.transferOutputs = True
		config.General.transferLogs = True
		config.User.voGroup = 'dcms'
		config.JobType.pluginName = 'Analysis'
		config.JobType.psetName = os.path.join(os.environ.get("CMSSW_BASE"), "src/Kappa/Skimming/higgsTauTau/", self.configfile)
		#config.JobType.inputFiles = ['Spring16_25nsV6_DATA.db', 'Spring16_25nsV6_MC.db']
		config.JobType.maxMemoryMB = 2500
		config.JobType.allowUndistributedCMSSW = True
		config.Site.blacklist = ["T3_FR_IPNL", "T3_US_UCR", "T2_BR_SPRACE", "T1_RU_*", "T2_RU_*", "T3_US_UMiss", "T2_US_Vanderbilt", "T2_EE_Estonia", "T2_TW_*"]
		config.Data.splitting = 'FileBased'
		config.Data.outLFNDirBase = '/store/user/%s/higgs-kit/skimming/%s'%(self.getUsernameFromSiteDB_cache(), os.path.basename(self.workdir.rstrip("/")))
		config.Data.publication = False
		config.Site.storageSite = self.storage_for_output
		return config
Beispiel #10
0
def get_config(args):
    '''Get a crab config file based on the arguments of crabSubmit'''
    uname = os.environ['USER']
    from CRABClient.UserUtilities import config

    config = config()

    config.General.workArea         = get_crab_workArea(args)
    config.General.transferOutputs  = True

    config.JobType.pluginName       = 'Analysis'
    #if args.scriptExe:
    #    config.JobType.psetName     = '{0}/src/DevTools/Utilities/test/PSet.py'.format(os.environ['CMSSW_BASE'])
    #    config.JobType.scriptExe    = args.cfg
    #else:
    config.JobType.psetName         = args.cfg
    config.JobType.pyCfgParams      = args.cmsRunArgs
    #if args.scriptExe: # add in the outputFile
    #    config.JobType.pyCfgParams += ['--outputFile=crab_out.root']
    #    config.JobType.outputFiles  = ['crab_out.root']
    config.JobType.sendPythonFolder = True

    config.Data.inputDBS            = args.inputDBS
    config.Data.splitting           = 'FileBased'
    config.Data.unitsPerJob         = args.filesPerJob
    #config.Data.splitting           = 'LumiBased'
    #config.Data.unitsPerJob         = 10
    #config.Data.splitting           = 'EventAwareLumiBased'
    #config.Data.unitsPerJob         = 100000
    config.Data.outLFNDirBase       = '/store/user/{0}/{1}/'.format(uname,args.jobName)
    config.Data.publication         = args.publish
    config.Data.outputDatasetTag    = args.jobName
    if args.applyLumiMask:
        config.Data.lumiMask        = '/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/'\
                                      'Collisions15/13TeV/'\
                                      'Cert_246908-260627_13TeV_PromptReco_Collisions15_25ns_JSON_v2.txt'
        #config.Data.splitting       = 'LumiBased'
        #config.Data.unitsPerJob     = args.lumisPerJob

    config.Site.storageSite         = 'T2_US_Wisconsin'

    return config
Beispiel #11
0
def main():

    options = getOptions()

    #from WMCore.Configuration import Configuration
    from CRABClient.UserUtilities import config
    config = config()

    from CRABAPI.RawCommand import crabCommand
    from httplib import HTTPException

    # We want to put all the CRAB project directories from the tasks we submit here into one common directory.
    # That's why we need to set this parameter (here or above in the configuration file, it does not matter, we will not overwrite it).
    config.section_("General")
    config.General.workArea = options.dir
    config.General.transferLogs = True
    config.section_("JobType")
    config.JobType.pluginName = 'Analysis'
    config.JobType.psetName = options.config
    config.section_("Data")
    config.Data.inputDataset = None
    config.Data.inputDBS      = 'global'
    config.Data.splitting     = 'FileBased'
    config.Data.totalUnits    = 3000
    config.Data.unitsPerJob   = 10
    config.Data.outLFNDirBase = '/store/user/aspiezia/v6'
    config.section_("Site")
    config.Site.storageSite = 'T2_CN_Beijing'

    print 'Using config ' + options.config
    print 'Writing to directory ' + options.dir

    
    def submit(config):
        try:
            crabCommand('submit', config = config)
        except HTTPException, hte:
            print 'Cannot execute command'
            print hte.headers
Beispiel #12
0
def main():

    options = getOptions()

    #from WMCore.Configuration import Configuration
    from CRABClient.UserUtilities import config
    config = config()

    from CRABAPI.RawCommand import crabCommand
    from httplib import HTTPException

    # We want to put all the CRAB project directories from the tasks we submit here into one common directory.
    # That's why we need to set this parameter (here or above in the configuration file, it does not matter, we will not overwrite it).
    config.section_("General")
    config.General.workArea = options.dir
    config.General.transferLogs = True
    config.section_("JobType")
    config.JobType.pluginName = 'Analysis'
    config.JobType.psetName = options.config
    config.section_("Data")
    config.Data.inputDataset = None
    config.Data.inputDBS      = 'global'
    config.Data.splitting     = 'LumiBased'
    config.Data.unitsPerJob   = 60
    config.Data.lumiMask      = '/afs/cern.ch/work/a/aspiezia/Ntuplizer/CMSSW_7_4_14/src/BSMFramework/BSM3G_TNT_Maker/data/JSON/Cert_246908-260627_13TeV_PromptReco_Collisions15_25ns_JSON_v2.txt'
    config.Data.outLFNDirBase = '/store/user/aspiezia/v6'
    config.section_("Site")
    config.Site.storageSite = 'T2_CN_Beijing'

    print 'Using config ' + options.config
    print 'Writing to directory ' + options.dir

    
    def submit(config):
        try:
            crabCommand('submit', config = config)
        except HTTPException, hte:
            print 'Cannot execute command'
            print hte.headers
def main():

    options = getOptions()
    # The submit command needs special treatment.
    if options.crabCmd == 'submit':

        #--------------------------------------------------------
        # This is the base config:
        #--------------------------------------------------------
        from CRABClient.UserUtilities import config
        config = config()

        config.General.requestName = 'runHits_analysis_v10_r2'
        #config.General.requestName = 'runHitsRelValSingleMuFlatPt0p7to10noPU-v2V7'
        config.General.workArea = options.workArea
        config.General.transferOutputs = True
        config.General.transferLogs = False

        config.JobType.pluginName = 'Analysis'
        config.JobType.psetName = 'runHits_cfg.py'
        #config.JobType.pyCfgParams = ['useMTDTrack=True','crysLayout=barzflat','output=DumpHits.root']
        #config.JobType.pyCfgParams = ['useMTDTrack=True','crysLayout=barzflat','output=DumpHits.root','dumpRecHits=True'] # BBT, 01-22-19
        config.JobType.pyCfgParams = [
            'useMTDTrack=False', 'crysLayout=barzflat', 'output=DumpHits.root',
            'dumpRecHits=True'
        ]  # BBT, 01-22-19

        config.Data.inputDataset = None
        #config.Data.inputDBS = 'phys03'
        config.Data.inputDBS = 'https://cmsweb.cern.ch/dbs/prod/global/DBSReader/'  ###Probably wont need to change

        #config.Data.splitting = 'FileBased'
        #config.Data.unitsPerJob = 1
        config.Data.splitting = 'Automatic'

        #config.Data.outLFNDirBase = '/store/user/meridian/MTD'
        #config.Data.outLFNDirBase = '/store/user/b/btannenw/MTD/10_4_0_mtd3/SingleMu_FlatPt_BTL_barz_v2/'
        config.Data.outLFNDirBase = '/store/user/btannenw/'
        config.Data.publication = False
        #config.Data.outputDatasetTag = '10_4_0_mtd3_runHits_analysis_v7'
        #config.Data.outputDatasetTag = '10_4_0_mtd3_runHitsRelValSingleMuFlatPt0p7to10noPU-v2V7_r0'
        config.Data.outputDatasetTag = '10_4_0_mtd5_runHits_analysis_v10_r2'
        config.Data.allowNonValidInputDataset = True
        config.Data.useParent = True

        #config.Site.storageSite = 'T2_CH_CERN'
        #config.User.voRole = 'priorityuser'
        config.Site.storageSite = 'T3_US_FNALLPC'

        #--------------------------------------------------------

        # Will submit one task for each of these input datasets.
        inputDatasets = [
            #'/RelValNuGun/CMSSW_10_4_0_mtd5-PU25ns_103X_upgrade2023_realistic_v2_2023D35PU200-v1/GEN-SIM-RECO', # 200PU
            #'/RelValSinglePiFlatPt_0p7to10_pythia8_cfi/CMSSW_10_4_0_mtd5-103X_upgrade2023_realistic_v2_2023D35noPU-v1/GEN-SIM-RECO', # single pion, no PU
            #'/RelValSingleMuFlatPt_0p7to10_pythia8/CMSSW_10_4_0_mtd5-103X_upgrade2023_realistic_v2_2023D35noPU-v1/GEN-SIM-RECO' # single muon, no PU
            '/MinBias_TuneCP5_14TeV-pythia8/PhaseIIMTDTDRAutumn18DR-NoPU_103X_upgrade2023_realistic_v2-v1/FEVT'  # minBias, no PU
            # ^^ TDR RelVals ^^

            #            '/RelValDYToLL_M_50_14TeV/meridian-CMSSW_10_4_0_mtd2_patch1-103X_upgrade2023_realistic_v2_2023D35noPU-v1-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValSingleMuFlatPt_0p7to10_pythia8/meridian-RelValSingleMuFlatPt0p7to10pythia8CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValMinBias_14TeV/meridian-RelValMinBias14TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValSinglePiFlatPt_0p7to10_pythia8_cfi/meridian-RelValSinglePiFlatPt0p7to10pythia8cfiCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValSingleKaonFlatPt_0p7to10/meridian-RelValSingleKaonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1-479d09f3e9ff3659dd49d9006e28a0a3/USER'
            #### V2 ### chi2 cut @ 50
            #            '/RelValSinglePiFlatPt_0p7to10_pythia8_cfi/meridian-RelValSinglePiFlatPt0p7to10pythia8cfiCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V2-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #### V3 ### chi2 cut @ 1000
            #            '/RelValSinglePiFlatPt_0p7to10_pythia8_cfi/meridian-RelValSinglePiFlatPt0p7to10pythia8cfiCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V3-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValDYToLL_M_50_14TeV/meridian-RelValDYToLLM5014TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValMinBias_14TeV/meridian-RelValMinBias14TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValSingleMuFlatPt_0p7to10_pythia8/meridian-RelValSingleMuFlatPt0p7to10pythia8CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V3-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValSingleKaonFlatPt_0p7to10/meridian-RelValSingleKaonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValSingleProtonFlatPt_0p7to10/meridian-RelValSingleProtonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValDYToLL_M_50_14TeV/meridian-RelValDYToLLM5014TeVCMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/DYToLL_M-50_14TeV_pythia8/meridian-DYToLLM-5014TeVpythia8PhaseIIMTDTDRAutumn18DR-PU200pilot103Xupgrade2023realisticv2ext2-v2V3-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValNuGun/meridian-RelValNuGunCMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USE',
            #            '/RelValSingleMuFlatPt_0p7to10/meridian-RelValSingleMuFlatPt0p7to10CMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USER'
            ### V4 chi2=50
            #            '/RelValSingleProtonFlatPt_0p7to10/meridian-RelValSingleProtonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValSinglePiFlatPt_0p7to10_pythia8_cfi/meridian-RelValSinglePiFlatPt0p7to10pythia8cfiCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V4-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValSingleMuFlatPt_0p7to10_pythia8/meridian-RelValSingleMuFlatPt0p7to10pythia8CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V4-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValSingleMuFlatPt_0p7to10/meridian-RelValSingleMuFlatPt0p7to10CMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValSingleKaonFlatPt_0p7to10/meridian-RelValSingleKaonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValNuGun/meridian-RelValNuGunCMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValMinBias_14TeV/meridian-RelValMinBias14TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValDYToLL_M_50_14TeV/meridian-RelValDYToLLM5014TeVCMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValDYToLL_M_50_14TeV/meridian-RelValDYToLLM5014TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER'
            #            '/DYToLL_M-50_14TeV_pythia8/meridian-DYToLLM-5014TeVpythia8PhaseIIMTDTDRAutumn18DR-PU200pilot103Xupgrade2023realisticv2ext2-v2V4-479d09f3e9ff3659dd49d9006e28a0a3/USER'
            ### V5 chi2=500 but use only bestChi2 in layer
            #            '/RelValSingleProtonFlatPt_0p7to10/meridian-RelValSingleProtonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V5-5bbb48e34bbdc7eb02c41abac102606e/USER',
            #            '/RelValSinglePiFlatPt_0p7to10_pythia8_cfi/meridian-RelValSinglePiFlatPt0p7to10pythia8cfiCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V5-5bbb48e34bbdc7eb02c41abac102606e/USER',
            #            '/RelValSingleMuFlatPt_0p7to10_pythia8/meridian-RelValSingleMuFlatPt0p7to10pythia8CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V5-5bbb48e34bbdc7eb02c41abac102606e/USER',
            #            '/RelValSingleMuFlatPt_0p7to10/meridian-RelValSingleMuFlatPt0p7to10CMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V5-5bbb48e34bbdc7eb02c41abac102606e/USER',
            #            '/RelValSingleKaonFlatPt_0p7to10/meridian-RelValSingleKaonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V5-5bbb48e34bbdc7eb02c41abac102606e/USER',
            #            '/RelValNuGun/meridian-RelValNuGunCMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V5-5bbb48e34bbdc7eb02c41abac102606e/USER',
            #            '/RelValMinBias_14TeV/meridian-RelValMinBias14TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V5-5bbb48e34bbdc7eb02c41abac102606e/USER',
            #            '/RelValDYToLL_M_50_14TeV/meridian-RelValDYToLLM5014TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V5-5bbb48e34bbdc7eb02c41abac102606e/USER',
            #            '/DYToLL_M-50_14TeV_pythia8/meridian-DYToLLM-5014TeVpythia8PhaseIIMTDTDRAutumn18DR-PU200pilot103Xupgrade2023realisticv2ext2-v2V5-5bbb48e34bbdc7eb02c41abac102606e/USER'
            ### V7 chi2=500, use only bestChi2, clusterTimeThreshold=10
            #             '/RelValSingleProtonFlatPt_0p7to10/meridian-RelValSingleProtonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V7-45dc70b98f9b98b0807197db02f5776e/USER',
            #             '/RelValSinglePiFlatPt_0p7to10_pythia8_cfi/meridian-RelValSinglePiFlatPt0p7to10pythia8cfiCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V7-45dc70b98f9b98b0807197db02f5776e/USER',
            #             '/RelValSingleMuFlatPt_0p7to10_pythia8/meridian-RelValSingleMuFlatPt0p7to10pythia8CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V7-45dc70b98f9b98b0807197db02f5776e/USER',
            #             '/RelValSingleMuFlatPt_0p7to10/meridian-RelValSingleMuFlatPt0p7to10CMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V7-45dc70b98f9b98b0807197db02f5776e/USER',
            #             '/RelValSingleKaonFlatPt_0p7to10/meridian-RelValSingleKaonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V7-45dc70b98f9b98b0807197db02f5776e/USER',
            #             '/RelValNuGun/meridian-RelValNuGunCMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V7-45dc70b98f9b98b0807197db02f5776e/USER',
            #             '/RelValMinBias_14TeV/meridian-RelValMinBias14TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V7-45dc70b98f9b98b0807197db02f5776e/USER',
            #             '/RelValDYToLL_M_50_14TeV/meridian-RelValDYToLLM5014TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V7-45dc70b98f9b98b0807197db02f5776e/USER',
            #             '/DYToLL_M-50_14TeV_pythia8/meridian-DYToLLM-5014TeVpythia8PhaseIIMTDTDRAutumn18DR-PU200pilot103Xupgrade2023realisticv2ext2-v2V7-45dc70b98f9b98b0807197db02f5776e/USER',
            ### V8 chi2=100, use only bestChi2, clusterTimeThreshold=10
            #             '/RelValSingleProtonFlatPt_0p7to10/meridian-RelValSingleProtonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V8-45dc70b98f9b98b0807197db02f5776e/USER',
            #             '/RelValSinglePiFlatPt_0p7to10_pythia8_cfi/meridian-RelValSinglePiFlatPt0p7to10pythia8cfiCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V8-45dc70b98f9b98b0807197db02f5776e/USER',
            #             '/RelValSingleMuFlatPt_0p7to10_pythia8/meridian-RelValSingleMuFlatPt0p7to10pythia8CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V8-45dc70b98f9b98b0807197db02f5776e/USER',
            #             '/RelValSingleMuFlatPt_0p7to10/meridian-RelValSingleMuFlatPt0p7to10CMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V8-45dc70b98f9b98b0807197db02f5776e/USER',
            #             '/RelValSingleKaonFlatPt_0p7to10/meridian-RelValSingleKaonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V8-45dc70b98f9b98b0807197db02f5776e/USER',
            #             '/RelValNuGun/meridian-RelValNuGunCMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V8-45dc70b98f9b98b0807197db02f5776e/USER',
            #             '/RelValMinBias_14TeV/meridian-RelValMinBias14TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V8-45dc70b98f9b98b0807197db02f5776e/USER',
            #             '/RelValDYToLL_M_50_14TeV/meridian-RelValDYToLLM5014TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V8-45dc70b98f9b98b0807197db02f5776e/USER',
            #             '/DYToLL_M-50_14TeV_pythia8/meridian-DYToLLM-5014TeVpythia8PhaseIIMTDTDRAutumn18DR-PU200pilot103Xupgrade2023realisticv2ext2-v2V8-45dc70b98f9b98b0807197db02f5776e/USER',
            ### V9 chi2=500, use only bestChi2, clusterTimeThreshold=5
            #             '/RelValSingleProtonFlatPt_0p7to10/meridian-RelValSingleProtonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V9-bf30672fb8f2d5421b922079d4e20bce/USER',
            #             '/RelValSinglePiFlatPt_0p7to10_pythia8_cfi/meridian-RelValSinglePiFlatPt0p7to10pythia8cfiCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V9-bf30672fb8f2d5421b922079d4e20bce/USER',
            #             '/RelValSingleMuFlatPt_0p7to10_pythia8/meridian-RelValSingleMuFlatPt0p7to10pythia8CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V9-bf30672fb8f2d5421b922079d4e20bce/USER',
            #            '/RelValSingleMuFlatPt_0p7to10/meridian-RelValSingleMuFlatPt0p7to10CMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V9-bf30672fb8f2d5421b922079d4e20bce/USER',
            #             '/RelValSingleKaonFlatPt_0p7to10/meridian-RelValSingleKaonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V9-bf30672fb8f2d5421b922079d4e20bce/USER',
            #             '/RelValNuGun/meridian-RelValNuGunCMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V9-bf30672fb8f2d5421b922079d4e20bce/USER',
            #             '/RelValMinBias_14TeV/meridian-RelValMinBias14TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V9-bf30672fb8f2d5421b922079d4e20bce/USER',
            #             '/RelValDYToLL_M_50_14TeV/meridian-RelValDYToLLM5014TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V9-bf30672fb8f2d5421b922079d4e20bce/USER',
            #             '/DYToLL_M-50_14TeV_pythia8/meridian-DYToLLM-5014TeVpythia8PhaseIIMTDTDRAutumn18DR-PU200pilot103Xupgrade2023realisticv2ext2-v2V9-bf30672fb8f2d5421b922079d4e20bce/USER'

            ### V10 chi2=100, use only bestChi2, clusterTimeThreshold=10
            #'/DYToLL_M-50_14TeV_pythia8/meridian-DYToLLM-5014TeVpythia8PhaseIIMTDTDRAutumn18DR-PU200pilot103Xupgrade2023realisticv2ext2-v2V10-45dc70b98f9b98b0807197db02f5776e/USER'
        ]

        for inDS in inputDatasets:
            # inDS is of the form /A/B/C. Since B is unique for each inDS, use this in the CRAB request name.
            config.General.requestName = 'runHits_%s-v10_r2' % (
                inDS.split('/')[1])
            config.General.requestName = config.General.requestName.translate(
                None, '_')
            config.Data.inputDataset = inDS
            config.Data.outputDatasetTag = '%s_v10_r2' % (
                config.General.requestName)
            # Submit.
            try:
                print "Submitting for input dataset %s" % (inDS)
                crabCommand(options.crabCmd,
                            config=config,
                            *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Submission for input dataset %s failed: %s" % (
                    inDS, hte.headers)
            except ClientException as cle:
                print "Submission for input dataset %s failed: %s" % (inDS,
                                                                      cle)

    # All other commands can be simply executed.
    elif options.workArea:

        for dir in os.listdir(options.workArea):
            projDir = os.path.join(options.workArea, dir)
            if not os.path.isdir(projDir):
                continue
            # Execute the crab command.
            msg = "Executing (the equivalent of): crab %s --dir %s %s" % (
                options.crabCmd, projDir, options.crabCmdOpts)
            print "-" * len(msg)
            print msg
            print "-" * len(msg)
            try:
                crabCommand(options.crabCmd,
                            dir=projDir,
                            *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Failed executing command %s for task %s: %s" % (
                    options.crabCmd, projDir, hte.headers)
            except ClientException as cle:
                print "Failed executing command %s for task %s: %s" % (
                    options.crabCmd, projDir, cle)
    cfg_original = cfgFile_original.read()
    cfgFile_original.close()

    cfg_modified = cfg_original.replace("#__", "")
    cfg_modified = cfg_modified.replace("#type#", "'%s'" % sampleOption['type'])

    cfgFileName_modified = os.path.join(submissionDirectoryCrab, cfgFileName_original.replace("_cfg.py", "_%s_%s_cfg.py" % (sampleName, version)))
    if not os.path.exists(os.path.dirname(cfgFileName_modified)): os.makedirs(os.path.dirname(cfgFileName_modified))
    cfgFile_modified = open(cfgFileName_modified, "w")
    cfgFile_modified.write(cfg_modified)
    cfgFile_modified.close()

    output_files = [ "antiElectronDiscrMVATrainingNtuple.root" ]
        
    # create config file for Crab
    crabConfig = config()
    crabConfig.General.workArea = os.path.join(submissionDirectoryCrab, "crabdir_%s_%s" % (sampleName, version))
    #check_path(crabConfig.General.workArea)
    crabConfig.General.transferOutputs = True
    crabConfig.General.transferLogs = True
    crabConfig.General.requestName = sampleName
    crabConfig.User.voGroup = 'dcms'

    crabConfig.JobType.pluginName = 'Analysis'
    crabConfig.JobType.psetName = cfgFileName_modified
    crabConfig.JobType.allowUndistributedCMSSW = True
    crabConfig.JobType.outputFiles = [ "antiElectronDiscrMVATrainingNtuple.root" ]

    crabConfig.Data.inputDBS = 'global'
    crabConfig.Data.inputDataset = sampleOption['datasetpath']
    crabConfig.Data.splitting = 'FileBased'
from CRABClient.UserUtilities import config
configure = config()

# Variables
kind = "tuple"
process = "%%PROCESS%%"           # "qcdp", "sq150to4j", etc.
subprocess = "%%SUBPROCESS%%"     # "qcdp50", "sq150to4j", etc. This is used in the output name.
generation = "%%GENERATION%%"
suffix = "%%SUFFIX%%"
n = 200000                         # Number of events per job.
#units = %%UNITS%%                 # The number of events to run over (only necessary when generating!)
# /Variables

configure.General.requestName = '{0}_{1}_{2}_{3}'.format(kind, subprocess, generation, suffix)
configure.General.workArea = 'crab_projects'
configure.General.transferLogs = True
configure.JobType.pluginName = 'Analysis'
configure.JobType.psetName = '%%CMSSWCONFIG%%'       # The CMSSW configuration file name
configure.JobType.pyCfgParams = %%LISTOFPARAMS%%     # The parameters passed to the CMSSW configuration file
configure.JobType.inputFiles = ["jec_data"]          # Files and directories that CRAB has access to.

configure.Data.inputDataset = '%%DATASET%%'          # Input dataset name
configure.Data.inputDBS = '%%INSTANCE%%'             # "global" (official), "phys03" (private), etc.

configure.Data.splitting = 'EventAwareLumiBased'
configure.Data.unitsPerJob = n
#configure.Data.totalUnits = units
configure.Data.outLFNDirBase = '/store/user/elhughes'         # Only other option: "/store/group/<groupname>/<subdir>"
configure.Data.publication = False
configure.Data.outputDatasetTag = '{0}_{1}_{2}_{3}'.format(kind, subprocess, generation, suffix)
%%MASK%%
def submitLimitCrab(tag,h,amasses,**kwargs):
    dryrun = kwargs.get('dryrun',False)
    jobName = kwargs.get('jobName',None)
    pointsPerJob = kwargs.get('pointsPerJob',10)
    parametric = kwargs.get('parametric',False)

    a = '${A}'

    datacard = 'datacards_shape/MuMuTauTau/mmmt_{}_HToAAH{}A{}.txt'.format(tag,h,'X' if parametric else '${A}')

    combineCommands = getCommands(**kwargs)

    sample_dir = '/{}/{}/crab_projects/{}/{}/{}'.format(scratchDir,pwd.getpwuid(os.getuid())[0], jobName, tag, h)
    python_mkdir(sample_dir)

    # create submit dir
    submit_dir = '{}/crab'.format(sample_dir)
    if os.path.exists(submit_dir):
        logging.warning('Submission directory exists for {0}.'.format(jobName))
        return

    # create bash script
    bash_name = '{}/script.sh'.format(sample_dir)
    bashScript = '#!/bin/bash\n'
    bashScript += 'eval `scramv1 runtime -sh`\n'
    bashScript += 'ls\n'
    bashScript += 'printenv\n'
    bashScript += 'mkdir datacards_shape\n'
    bashScript += 'mv MuMuTauTau datacards_shape/MuMuTauTau\n'
    bashScript += 'files=`python -c "import PSet; print \' \'.join(list(PSet.process.source.fileNames))"`\n'
    bashScript += 'echo $files\n'
    bashScript += 'for A in $files; do\n'
    for cc in combineCommands:
        bashScript += cc.format(datacard=datacard,h=h,a=a,tag=tag)+'\n'
    bashScript += 'done\n'
    bashScript += """echo '''<FrameworkJobReport>\
<ReadBranches>\n
</ReadBranches>\n
<PerformanceReport>\n
  <PerformanceSummary Metric="StorageStatistics">\n
    <Metric Name="Parameter-untracked-bool-enabled" Value="true"/>\n
    <Metric Name="Parameter-untracked-bool-stats" Value="true"/>\n
    <Metric Name="Parameter-untracked-string-cacheHint" Value="application-only"/>\n
    <Metric Name="Parameter-untracked-string-readHint" Value="auto-detect"/>\n
    <Metric Name="ROOT-tfile-read-totalMegabytes" Value="0"/>\n
    <Metric Name="ROOT-tfile-write-totalMegabytes" Value="0"/>\n
  </PerformanceSummary>\n
</PerformanceReport>\n
<GeneratorInfo>\n
</GeneratorInfo>\n
</FrameworkJobReport>''' > FrameworkJobReport.xml\n"""
    with open(bash_name,'w') as file:
        file.write(bashScript)
    os.system('chmod +x {0}'.format(bash_name))

    # setup crab config
    from CRABClient.UserUtilities import config

    config = config()

    config.General.workArea         = submit_dir
    config.General.transferOutputs  = True

    config.JobType.pluginName       = 'Analysis'
    config.JobType.psetName         = '{0}/src/DevTools/Utilities/test/PSet.py'.format(os.environ['CMSSW_BASE'])
    config.JobType.scriptExe        = bash_name
    config.JobType.outputFiles      = []
    config.JobType.inputFiles       = ['datacards_shape/MuMuTauTau']

    config.Data.outLFNDirBase       = '/store/user/{}/{}/{}/{}'.format(UNAME, jobName, tag, h)
    config.Data.outputDatasetTag    = jobName
    config.Data.userInputFiles      = [str(a) for a in amasses]
    config.Data.splitting           = 'FileBased'
    config.Data.unitsPerJob         = pointsPerJob
    config.Data.outputPrimaryDataset= 'Limits'

    config.Site.storageSite         = 'T2_US_Wisconsin'

    # submit
    submitArgs = ['--config',config]
    if dryrun: submitArgs += ['--dryrun']

    from CRABClient.ClientExceptions import ClientException
    from CRABClient.ClientUtilities import initLoggers
    from httplib import HTTPException
    import CRABClient.Commands.submit as crabClientSubmit

    tblogger, logger, memhandler = initLoggers()
    tblogger.setLevel(logging.INFO)
    logger.setLevel(logging.INFO)
    memhandler.setLevel(logging.INFO)

    try:
        logging.info('Submitting {}/{}/{}'.format(jobName,tag,h))
        crabClientSubmit.submit(logger,submitArgs)()
    except HTTPException as hte:
        logging.info("Submission failed: {}".format(hte.headers))
    except ClientException as cle:
        logging.info("Submission failed: {}".format(cle))
Beispiel #17
0
def main():

    options = getOptions()

    # The submit command needs special treatment.
    if options.crabCmd == 'submit':

        #--------------------------------------------------------
        # This is the base config:
        #--------------------------------------------------------
        from CRABClient.UserUtilities import config
        config = config()

        config.General.requestName = None
        #config.General.workArea = 'ZMuondecay'
        config.General.workArea = 'testParkedCB3'
        config.General.transferOutputs = True
        config.General.transferLogs = False

        config.JobType.pluginName = 'Analysis'
        config.JobType.psetName = '/afs/cern.ch/work/g/gayalasa/public/B0Analysis/CMSSW_10_6_12/src/myAnalyzers/BtoKsMuMu/test/Psiks0Rootupler.py'  #Parked configfile
        config.JobType.allowUndistributedCMSSW = True

        config.Data.inputDataset = None
        config.Data.inputDBS = 'global'
        #     config.Data.splitting = 'Automatic'
        config.Data.splitting = 'FileBased'
        config.Data.unitsPerJob = 20
        #     config.Data.totalUnits = 30
        #config.Data.lumiMask = '' # no idea
        config.Data.publication = True
        config.Data.outputDatasetTag = None
        #	config.Data.outLFNDirBase = '/store/user/%s/parkedFullTest/' % ("gayalasa")
        #	config.Site.storageSite = 'T3_US_FNALLPC'
        config.Site.storageSite = 'T3_CH_CERNBOX'
        #config.Site.storageSite = None # Choose your site.
        #only to ingnore Blacklist
        config.Site.whitelist = ['T2_US*']
        config.Data.ignoreLocality = True
        #--------------------------------------------------------

        # Will submit one task for each of these input datasets.
        inputDatasets = [
            '/ParkingBPH1/Run2018A-05May2019-v1/MINIAOD',  # BParked A 
            '/ParkingBPH2/Run2018A-05May2019-v1/MINIAOD',
            '/ParkingBPH3/Run2018A-05May2019-v1/MINIAOD',
            '/ParkingBPH4/Run2018A-05May2019-v1/MINIAOD',
            '/ParkingBPH5/Run2018A-05May2019-v1/MINIAOD',
            '/ParkingBPH6/Run2018A-05May2019-v1/MINIAOD',
            '/ParkingBPH1/Run2018B-05May2019-v2/MINIAOD',
            '/ParkingBPH2/Run2018B-05May2019-v2/MINIAOD',
            '/ParkingBPH3/Run2018B-05May2019-v2/MINIAOD',
            '/ParkingBPH4/Run2018B-05May2019-v2/MINIAOD',
            '/ParkingBPH5/Run2018B-05May2019-v2/MINIAOD',
            '/ParkingBPH6/Run2018B-05May2019-v2/MINIAOD',
            '/ParkingBPH1/Run2018C-05May2019-v1/MINIAOD',
            '/ParkingBPH2/Run2018C-05May2019-v1/MINIAOD',
            '/ParkingBPH3/Run2018C-05May2019-v1/MINIAOD',
            '/ParkingBPH4/Run2018C-05May2019-v1/MINIAOD',
            '/ParkingBPH5/Run2018C-05May2019-v1/MINIAOD',
            '/ParkingBPH1/Run2018D-05May2019promptD-v1/MINIAOD',
            '/ParkingBPH2/Run2018D-05May2019promptD-v1/MINIAOD'
            '/ParkingBPH3/Run2018D-05May2019promptD-v1/MINIAOD',
            '/ParkingBPH4/Run2018D-05May2019promptD-v1/MINIAOD',
            '/ParkingBPH5/Run2018D-05May2019promptD-v1/MINIAOD'
        ]

        for inDS in inputDatasets:
            # inDS is of the form /A/B/C. Since B is unique for each inDS, use this in the CRAB request name.
            config.General.requestName = inDS.split('/')[1] + '-' + inDS.split(
                '/')[2]
            config.Data.inputDataset = inDS
            config.Data.outputDatasetTag = '%s_%s' % (
                config.General.workArea, config.General.requestName)
            # Submit.
            try:
                print "Submitting for input dataset %s" % (inDS)
                crabCommand(options.crabCmd,
                            config=config,
                            *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Submission for input dataset %s failed: %s" % (
                    inDS, hte.headers)
            except ClientException as cle:
                print "Submission for input dataset %s failed: %s" % (inDS,
                                                                      cle)

    # All other commands can be simply executed.
    elif options.workArea:

        for dir in os.listdir(options.workArea):
            projDir = os.path.join(options.workArea, dir)
            if not os.path.isdir(projDir):
                continue
            # Execute the crab command.
            msg = "Executing (the equivalent of): crab %s --dir %s %s" % (
                options.crabCmd, projDir, options.crabCmdOpts)
            print "-" * len(msg)
            print msg
            print "-" * len(msg)
            try:
                crabCommand(options.crabCmd,
                            dir=projDir,
                            *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Failed executing command %s for task %s: %s" % (
                    options.crabCmd, projDir, hte.headers)
            except ClientException as cle:
                print "Failed executing command %s for task %s: %s" % (
                    options.crabCmd, projDir, cle)
Beispiel #18
0
    'HNL_majorana_all_ctau1p0e04_massHNL3p0_Vall3p107e-04_tarball.tar.xz',
    'HNL_majorana_all_ctau1p0e04_massHNL4p5_Vall1p016e-04_tarball.tar.xz',
]
'''

#gridpacks = gridpacks[0:30]
myJobs = {}
for gridpack in gridpacks:
    myJobs[gridpack.replace('+', '').replace('_tarball.tar.xz', '')] = [
        lambda cfg, gridpack=gridpack: cfg.JobType.pyCfgParams.append(
            'gridpack=root://gfe02.grid.hep.ph.ic.ac.uk//pnfs/hep.ph.ic.ac.uk/data/cms/store/user/mkomm/HNL/gridpacksv3/'
            + gridpack)
    ]

userName = '******'  #getUsernameFromSiteDB()
configTmpl = config()

configTmpl.section_('General')
configTmpl.General.transferOutputs = True
configTmpl.General.transferLogs = False

configTmpl.section_('JobType')
configTmpl.JobType.psetName = "lhe2pu_2018.py"
configTmpl.JobType.pluginName = 'PrivateMC'
configTmpl.JobType.outputFiles = []
configTmpl.JobType.allowUndistributedCMSSW = True
configTmpl.JobType.maxJobRuntimeMin = 18 * 60
configTmpl.JobType.scriptExe = 'LHE2PAT.sh'
configTmpl.JobType.pyCfgParams = []
configTmpl.JobType.inputFiles = ['LHE2PAT.sh']
configTmpl.JobType.maxMemoryMB = 3500
def main():

    options = getOptions()

    skimmer_base_dir = os.path.join(os.environ['CMSSW_BASE'],
                                    'src/iDMAnalysis/skimmer')

    isData = False
    if options.sampleType == 'data' or options.sampleType == 'NoBPTX' or options.sampleType == 'trig':
        isData = True

    year = options.year

    # The submit command needs special treatment.
    if options.crabCmd == 'submit':

        #--------------------------------------------------------
        # This is the base config:
        #--------------------------------------------------------
        from CRABClient.UserUtilities import config  #, getUsernameFromSiteDB
        config = config()

        if not options.workArea:
            config.General.workArea = 'crab'
        else:
            config.General.workArea = options.workArea
        config.General.transferOutputs = True
        config.General.transferLogs = True

        config.JobType.pluginName = 'Analysis'
        config.JobType.psetName = os.path.join(skimmer_base_dir,
                                               'test/run_ntuplizer_cfg.py')
        config.JobType.allowUndistributedCMSSW = True
        #config.JobType.psetName = 'python/iDMAnalyzer_cfg.py'
        #config.JobType.maxMemoryMB = 4000
        #config.JobType.numCores = 1

        config.Data.splitting = 'Automatic'
        if isData == True:
            #config.Data.splitting = 'LumiBased'
            #config.Data.unitsPerJob = 100
            if options.sampleType == 'NoBPTX':
                config.Data.lumiMask = os.path.join(
                    skimmer_base_dir,
                    'data/CosmicJSON_E_D_UGMT_bottomOnly.txt')
            elif year == '2018':
                config.Data.lumiMask = 'https://cms-service-dqm.web.cern.ch/cms-service-dqm/CAF/certification/Collisions18/13TeV/ReReco/Cert_314472-325175_13TeV_17SeptEarlyReReco2018ABC_PromptEraD_Collisions18_JSON.txt'
            elif year == '2017':
                config.Data.lumiMask = 'https://cms-service-dqm.web.cern.ch/cms-service-dqm/CAF/certification/Collisions17/13TeV/ReReco/Cert_294927-306462_13TeV_EOY2017ReReco_Collisions17_JSON_v1.txt'
            elif year == '2016':
                config.Data.lumiMask = 'https://cms-service-dqm.web.cern.ch/cms-service-dqm/CAF/certification/Collisions16/13TeV/ReReco/Final/Cert_271036-284044_13TeV_ReReco_07Aug2017_Collisions16_JSON.txt'

        if isData == True:
            config.Data.outLFNDirBase = '/store/group/lpcmetx/iDM/Ntuples/%s/data_fourteenthrun' % year
        else:
            if options.sampleType == 'signal':
                config.Data.outLFNDirBase = '/store/group/lpcmetx/iDM/Ntuples/%s/signal_fourteenthrun' % year
            else:
                config.Data.outLFNDirBase = '/store/group/lpcmetx/iDM/Ntuples/%s/backgrounds_fourteenthrun' % year

        config.Data.publication = False
        config.Data.ignoreLocality = True

        if options.sampleType == 'cosmics':
            config.Data.inputDBS = 'phys03'

        #config.Site.ignoreGlobalBlacklist = True
        #config.Site.whitelist = ['T2_RU_ITEP']
        config.Site.whitelist = [
            'T2_US_*', 'T2_DE_*', 'T2_EE_*', 'T2_ES_*', 'T2_GR_*', 'T2_HU_*',
            'T2_IT_*', 'T2_RU_*', 'T2_UK_*'
        ]
        config.Site.blacklist = ['T2_TW_NCHC', 'T2_BE_IIHE', 'T2_FR_GRIF_LLR']
        #config.Site.blac = ['T3_RU_FIAN', 'T3_US_MIT', 'T3_US_UCD',
        #'T3_CO_Uniandes', 'T3_US_NotreDame',
        #'T3_IN_PUHEP', 'T3_UK_ScotGrid_ECDF', 'T3_BY_NCPHEP',
        #'T2_CH_CERN_HLT', 'T3_CH_CERN_HelixNebula', 'T3_IN_TIFRCloud',
        #'T0_CH_CERN', 'T3_GR_IASA', 'T3_CN_PKU', 'T0_CH_CSCS_HPC',
        #'T3_IR_IPM', 'T2_RU_ITEP', 'T3_US_JHU', 'T3_US_Kansas',
        #'T3_US_FSU', 'T3_KR_UOS', 'T3_CH_PSI']
        #config.Site.whitelist = ["T3_US_FNALLPC"]
        config.Site.storageSite = 'T3_US_FNALLPC'

        #--------------------------------------------------------

        total = {}

        with open(
                os.path.join(skimmer_base_dir,
                             'data/dataset_db_%s.json' % year), 'r') as db:
            data = json.load(db)

            QCD = data['QCD_' + year]

            TTbar = data['TTbar_' + year]
            for it in [
                    'TTTo2L2Nu', 'TTSemiLeptonic', 'TTToHadronic', 'TT_diLept'
            ]:
                if it in TTbar:
                    del TTbar[it]

            SingleTop = data['SingleTop_' + year]
            #for it in ['ST_t-channel_top_5f', 'ST_t-channel_antitop_5f']:
            if year != '2016':
                for it in ['ST_t-channel_top_4f', 'ST_t-channel_antitop_4f']:
                    if it in SingleTop:
                        del SingleTop[it]

            WJets = data['WJets_' + year]
            for it in ['WJetsToLnu', 'WJetsToLNu_HT-70To100']:
                if it in WJets:
                    del WJets[it]

            ZJets = data['ZJets_' + year]

            DY = data['DY_' + year]
            for it in ['DYJetsToTauTau', 'DYJetsToLL_M-5to50']:
                if it in DY:
                    del DY[it]
            if year == '2017':
                for it in ['DYJetsToLL_M-50toInf']:
                    if it in DY:
                        del DY[it]
            elif year == '2016':
                for it in ['DYJetsToLL_M-50toInf_NLO']:
                    if it in DY:
                        del DY[it]

            Diboson = data['Diboson_' + year]
            for it in [
                    'WZTo3LNu', 'ZZTo2L2Nu', 'ZZTo2L2Nu_ext2', 'WWTo2L2Nu',
                    'WWJJToLNuLNu'
            ]:
                if it in Diboson:
                    del Diboson[it]

            Triboson = data['Triboson_' + year]

            total_MC = merge_dicts(QCD, TTbar, SingleTop, ZJets, WJets, DY,
                                   Diboson, Triboson)

            for key, val in total_MC.items():
                total_MC[key + '_' + year] = val
                del total_MC[key]

            total_Data = data['Data_MET_' + year]
            #del total_Data["MET_2017RunB"]
            #del total_Data["MET_Run2018A"]
            #del total_Data["MET_2016RunB"]

            total_Trig = data['Data_SingleMu_' + year]

            total_NoBPTX = data['Data_NoBPTX_' + year]
            for it in [
                    'NoBPTX_2016RunB', 'NoBPTX_2016RunC', 'NoBPTX_2016RunD',
                    'NoBPTX_2016RunF', 'NoBPTX_2016RunG', 'NoBPTX_2016RunH'
            ]:
                if it in total_NoBPTX:
                    del total_NoBPTX[it]

            #total_cosmics = data['CosmicsMC_' + year]

            total_signal = data['signal_' + year]
            for key, val in total_signal.items():
                total_signal[key + '_' + year] = val
                del total_signal[key]

            if options.sampleType == 'data':
                total = merge_dicts(total, total_Data)
            elif options.sampleType == 'signal':
                total = merge_dicts(total, total_signal)
            elif options.sampleType == 'trig':
                total = merge_dicts(total, total_Trig)
            elif options.sampleType == 'NoBPTX':
                total = merge_dicts(total, total_NoBPTX)
            elif options.sampleType == 'cosmics':
                total = merge_dicts(total, total_cosmics)
            elif options.sampleType == 'MC':
                total = merge_dicts(total, total_MC)
            elif options.sampleType == 'all':
                total = merge_dicts(total, total_MC, total_Data, total_Trig)
            elif options.sampleType == 'custom':
                #total = {'ST_tW_top': '/ST_tW_top_5f_inclusiveDecays_13TeV-powheg-pythia8_TuneCUETP8M1/RunIISummer16DR80Premix-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/AODSIM',
                #        'WWW': '/WWW_4F_TuneCUETP8M1_13TeV-amcatnlo-pythia8/RunIISummer16DR80Premix-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/AODSIM',
                #        'WW': '/WW_TuneCUETP8M1_13TeV-pythia8/RunIISummer16DR80Premix-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/AODSIM'
                #        }
                #total = {'ST_tW_top_2018': '/ST_tW_top_5f_inclusiveDecays_TuneCP5_13TeV-powheg-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15_ext1-v1/AODSIM',
                #        'ST_t-channel_antitop_5f_2018': '/ST_t-channel_antitop_5f_TuneCP5_13TeV-powheg-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM',
                #        'ST_t-channel_top_5f_2018': '/ST_t-channel_top_5f_TuneCP5_13TeV-powheg-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM',
                #        'ZJetsToNuNu_HT-100To200_2018': '/ZJetsToNuNu_HT-100To200_13TeV-madgraph/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM',
                #        'WJetsToLNu_HT-400To600_2018': '/WJetsToLNu_HT-400To600_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM',
                #        'WJetsToLNu_HT-200To400_2018': '/WJetsToLNu_HT-200To400_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM',
                #        'DYJetsToLL_M-50toInf_2018': '/DYJetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM',
                #        'WWZ_2018': '/WWZ_TuneCP5_13TeV-amcatnlo-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15_ext1-v2/AODSIM'
                #        }
                #total = {'QCD_HT500To700_2017': '/QCD_HT500to700_TuneCP5_13TeV-madgraph-pythia8/RunIIFall17DRPremix-PU2017_94X_mc2017_realistic_v11-v2/AODSIM'}
                #total = {'DYJetsToLL_M-50_HT1200to2500_2017': '/DYJetsToLL_M-50_HT-1200to2500_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17DRPremix-94X_mc2017_realistic_v11-v1/AODSIM'}
                #total = {'ZJetsToNuNu_HT-1200To2500_2018': '/ZJetsToNuNu_HT-1200To2500_13TeV-madgraph/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM'}
                #total = {'QCD_HT200To300_2018': '/QCD_HT200to300_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM'}
                #total = {'DYJetsToLL_M-50toInf_2016': '/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16DR80Premix-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext2-v1/AODSIM'}
                #total = {'WJetsToLNu_HT-2500ToInf_2018': '/WJetsToLNu_HT-2500ToInf_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM',
                #        'ZJetsToNuNu_HT-200To400_2018': '/ZJetsToNuNu_HT-200To400_13TeV-madgraph/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM'
                #        }
                #total = {'ST_t-channel_antitop_4f_2016': '/ST_t-channel_antitop_4f_inclusiveDecays_13TeV-powhegV2-madspin-pythia8_TuneCUETP8M1/RunIISummer16DR80Premix-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/AODSIM'}
                #total = {'QCD_HT100To200_2018': '/QCD_HT100to200_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM'}
                #total = {'ST_s-channel_2016': '/ST_s-channel_4f_leptonDecays_13TeV-amcatnlo-pythia8_TuneCUETP8M1/RunIISummer16DR80Premix-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/AODSIM'}
                #total = {'WW_2017': '/WW_TuneCP5_13TeV-pythia8/RunIIFall17DRPremix-PU2017_94X_mc2017_realistic_v11-v1/AODSIM',
                #        'WWZ_2017': '/WWZ_4F_TuneCP5_13TeV-amcatnlo-pythia8/RunIIFall17DRPremix-PU2017_94X_mc2017_realistic_v11-v2/AODSIM',
                #        'WZZ_2017': '/WZZ_TuneCP5_13TeV-amcatnlo-pythia8/RunIIFall17DRPremix-PU2017_94X_mc2017_realistic_v11-v1/AODSIM',
                #        'WWW_2017': '/WWW_4F_TuneCP5_13TeV-amcatnlo-pythia8/RunIIFall17DRPremix-PU2017_94X_mc2017_realistic_v11-v2/AODSIM',
                #        'DYJetsToLL_M-50_HT100to200_2017': '/DYJetsToLL_M-50_HT-100to200_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17DRPremix-PU2017_94X_mc2017_realistic_v11-v1/AODSIM'
                #        }
                #total = {'DYJetsToLL_M-50toInf_2016': '/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16DR80Premix-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext1-v1/AODSIM'}
                #total = {'DYJetsToLL_M-50_HT-400to600_2017': '/DYJetsToLL_M-50_HT-400to600_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17DRPremix-94X_mc2017_realistic_v10_ext1-v1/AODSIM'}
                #total = {'ZJetsToNuNu_HT-200To400_2018': '/ZJetsToNuNu_HT-200To400_13TeV-madgraph/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM'}
                #total = {'WJetsToLNu_HT-2500ToInf_2018': '/WJetsToLNu_HT-2500ToInf_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM'}
                #total = {'QCD_HT500to700_2018': '/QCD_HT500to700_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM'}
                #total = {'DYJetsToLL_M-50toInfo_2017': '/DYJetsToLL_M-50_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIFall17DRPremix-RECOSIMstep_94X_mc2017_realistic_v10_ext1-v1/AODSIM'}
                #total = {'WJetsToLNu_HT-1200To2500': '/WJetsToLNu_HT-1200To2500_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM'}
                #total = {'ZJetsToNuNu_HT-200To400': '/ZJetsToNuNu_HT-200To400_13TeV-madgraph/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM',
                #        'ZJetsToNuNu_HT-600To800': '/ZJetsToNuNu_HT-600To800_13TeV-madgraph/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM',
                #        'ZJetsToNuNu_HT-2500ToInf': '/ZJetsToNuNu_HT-2500ToInf_13TeV-madgraph/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM'}
                #total = {'QCD_HT500to700_2018': '/QCD_HT500to700_TuneCP5_13TeV-madgraphMLM-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM'}
                total = {
                    'ZJetsToNuNu_HT-400To600_2017':
                    '/ZJetsToNuNu_HT-400To600_13TeV-madgraph/RunIIFall17DRPremix-94X_mc2017_realistic_v10-v1/AODSIM'
                }
                #total = {'DYJetsToLL_M-50toInfo_2016': '/DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16DR80Premix-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext2-v1/AODSIM'}
                #total = {
                #        'ST_s-channel_2018': '/ST_s-channel_4f_leptonDecays_TuneCP5_13TeV-madgraph-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15_ext1-v4/AODSIM',
                #        'ST_t-channel_top_2018': '/ST_t-channel_top_5f_TuneCP5_13TeV-powheg-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM',
                #        'ST_t-channel_antitop_2018': '/ST_t-channel_antitop_5f_TuneCP5_13TeV-powheg-pythia8/RunIIAutumn18DRPremix-102X_upgrade2018_realistic_v15-v1/AODSIM'
                #        }
                # total = {
                #         'ZJetsToNuNu_HT-800To1200_2016': '/ZJetsToNuNu_HT-800To1200_13TeV-madgraph/RunIISummer16DR80Premix-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/AODSIM',
                #         'WJetsToLNu_HT-200To400_2016': '/WJetsToLNu_HT-200To400_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16DR80Premix-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6_ext2-v1/AODSIM',
                #         'WJetsToLNu_HT-1200To2500_2016': '/WJetsToLNu_HT-1200To2500_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16DR80Premix-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/AODSIM'
                #         }
                #total = merge_dicts() # -------------------------------> put here the custom samples you want!!!
            else:
                print "ERROR! SampleType option %s not recoginzed." % options.sampleType
                sys.exit()

        if len(total) == 0:
            print "ERROR! No samples selected to be processed."
            sys.exit()

        for sample, dataset in total.items():

            isRun2018D = False
            if sample == 'MET_Run2018D' or sample == 'NoBPTX_2018D':
                isRun2018D = True

            config.JobType.pyCfgParams = [
                'data={}'.format(isData), 'Run2018D={}'.format(isRun2018D),
                'numThreads={}'.format(1), 'year={}'.format(year)
            ]
            config.JobType.numCores = 1

            config.Data.inputDataset = dataset
            config.General.requestName = 'iDMAnalysis_' + sample
            #config.Data.outputDatasetTag = sample

            # If we need to pull input files from a list file instead of CRAB:
            #config.Data.userInputFiles = open(basedir + sample + '.list').readlines()

            # Submit.
            try:
                print "Submitting for input dataset %s with options %s" % (
                    sample, options.crabCmdOpts)
                crabCommand(options.crabCmd,
                            config=config,
                            *options.crabCmdOpts.split())
                #p = Process(target=crabCommand, args=(options.crabCmd, config, options.crabCmdOpts.split(),))
                #p.start()
                #p.join()
            except HTTPException as hte:
                print "Submission for input dataset %s failed: %s" % (
                    sample, hte.headers)
            except ClientException as cle:
                print "Submission for input dataset %s failed: %s" % (sample,
                                                                      cle)

    # All other commands can be simply executed.
    elif options.workArea:

        for dir in os.listdir(options.workArea):
            projDir = os.path.join(options.workArea, dir)
            if not os.path.isdir(projDir):
                continue
            # Execute the crab command.
            msg = "Executing (the equivalent of): crab %s --dir %s %s" % (
                options.crabCmd, projDir, options.crabCmdOpts)
            print "-" * len(msg)
            print msg
            print "-" * len(msg)
            try:
                crabCommand(options.crabCmd,
                            dir=projDir,
                            *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Failed executing command %s for task %s: %s" % (
                    options.crabCmd, projDir, hte.headers)
            except ClientException as cle:
                print "Failed executing command %s for task %s: %s" % (
                    options.crabCmd, projDir, cle)
Beispiel #20
0
def submission(events_per_job):
    from CRABClient.UserUtilities import config
    config = config()
    config.General.workArea = '/nfs/dust/cms/user/%s/kappa/crab_kappa_skim80X-%s' % (
        getUsernameFromSiteDB(), date)
    #config.General.workArea = '/net/scratch_cms/institut_3b/%s/kappa/crab_kappa_skim-%s'%(getUsernameFromSiteDB(), date)
    #config.General.workArea = '/nfs/dust/cms/user/<your-NAF-username>/kappa/crab_kappa_skim80X-%s'% date  #if CERN-username != NAF-username
    check_path(config.General.workArea)
    config.General.transferOutputs = True
    config.General.transferLogs = True
    config.User.voGroup = 'dcms'

    config.JobType.pluginName = 'Analysis'
    config.JobType.psetName = 'kSkimming_run2_cfg.py'
    #config.JobType.inputFiles = ['Spring16_25nsV6_DATA.db', 'Spring16_25nsV6_MC.db']
    config.JobType.allowUndistributedCMSSW = True
    config.Site.blacklist = ["T2_BR_SPRACE"]
    config.Data.splitting = 'FileBased'
    config.Data.unitsPerJob = 1
    config.Data.outLFNDirBase = '/store/user/%s/higgs-kit/skimming/80X_%s' % (
        getUsernameFromSiteDB(), date)
    config.Data.publication = False

    config.Site.storageSite = "T2_DE_DESY"
    # load nicknames form gc-style config files and write them to a flat nicknames list
    nicknames = read_grid_control_includes(
        ["samples/13TeV/Summer16_SM_Analysis.conf"])
    #nicknames = read_grid_control_includes(["samples/13TeV/Spring16_SM_Higgs_CPmixing_2.conf"])
    #nicknames = read_grid_control_includes(["samples/13TeV/2016B_Data.conf"])
    #nicknames = ['SUSYGluGluToHToTauTauM160_RunIIFall15MiniAODv2_76X_13TeV_MINIAOD_pythia8']

    # loop over datasets and get repsective nicks
    for nickname in nicknames:
        config.General.requestName = nickname[:100]
        config.Data.inputDBS = get_inputDBS_by_nick(nickname)
        config.Data.unitsPerJob = 1
        nfiles = get_n_files_from_nick(nickname)
        if events_per_job:
            nevents = get_n_generated_events_from_nick(nickname)
            try:
                if int(nfiles) > 0 and int(nevents) > 0:
                    files_per_job = int(events_per_job) * int(nfiles) / int(
                        nevents)
                    if files_per_job > 1:
                        config.Data.unitsPerJob = int(files_per_job)
            except:
                print "Its not possilbe to make ", events_per_job, " events/job for ", nickname, " which has Nevents:", nevents, " and Nfiles", nfiles, " in the database. Just make one file per job"
        if float(config.Data.unitsPerJob) > 0 and float(nfiles) / float(
                config.Data.unitsPerJob) >= job_submission_limit:
            files_per_job = ceil(float(nfiles) / job_submission_limit)
            if files_per_job > 1:
                config.Data.unitsPerJob = int(files_per_job)

        config.JobType.pyCfgParams = [
            'nickname=%s' % (nickname),
            'outputfilename=kappa_%s.root' % (nickname), 'mode=crab'
        ]
        config.JobType.outputFiles = ['kappa_%s.root' % (nickname)]
        config.Data.inputDataset = get_sample_by_nick(nickname)
        #config.Data.lumiMask = '/nfs/dust/cms/user/<NAF-username>/kappa/crab_kappa_skim80X-<campaign-date>/results/missingLumis.json' # for running of a subset of lumi sections
        p = Process(target=submit, args=(config, ))
        p.start()
        p.join()
Beispiel #21
0
def main():

    options = getOptions()

    # The submit command needs special treatment.
    if options.crabCmd == 'submit':

        #--------------------------------------------------------
        # This is the base config:
        #--------------------------------------------------------
        from CRABClient.UserUtilities import config
        config = config()
        
        config.General.requestName = None
        #config.General.workArea = None
        
        config.General.transferOutputs = True
        config.General.transferLogs = False
        
        config.JobType.pluginName = 'Analysis'
        config.JobType.psetName = 'PSet.py'
        config.JobType.scriptExe = 'crab_script.sh'
        config.JobType.inputFiles = ['example_postproc_2017_MC.py','../scripts/haddnano.py'] #hadd nano will not be needed once nano tools are in cmssw
        config.JobType.sendPythonFolder	 = True
        config.JobType.allowUndistributedCMSSW = True
        
        config.Data.inputDataset = None
        config.Data.inputDBS = 'global'
        config.Data.splitting = 'FileBased' # 'Automatic' #'LumiBased'
        #config.Data.splitting = 'EventAwareLumiBased'
        config.Data.unitsPerJob = 2
        config.Data.totalUnits = 10
        
        config.Data.outLFNDirBase = '/store/user/ssawant/NanoAODPostProc_%d' % era 
        config.Data.publication = False
        config.Data.outputDatasetTag = None ####
        #config.Data.useParent = True
        
        config.Site.storageSite = 'T2_IN_TIFR' # Choose your site. 
        #--------------------------------------------------------
        
        # Will submit one task for each of these input datasets.
        inputDatasets = []
        
        # Read input NanoAOD dataset from samples.py file
        for sample_name, sample_info in samples.items():
            #print "\n{}: {}".format(sample_name, sample_info)
            if sample_name == 'sum_events'  or  sample_info["type"] == "data":
                continue
            
            if sample_info["use_it"] == False:
                continue
            
            if 'NANOAODSIM' in sample_info['NanoAOD']:
                if not sample_info["process_name_specific"] in ["signal_ggf_spin0_400_hh_4t", "signal_ggf_spin0_400_hh_2v2t", "signal_ggf_spin0_400_hh_4v"]:
                    continue
                inputDatasets.append(sample_info['NanoAOD'])
        
        
        
        
        for inDS in inputDatasets: 
            config.General.requestName = inDS.split('/')[1] # sample name
            #config.General.workArea = config.General.requestName
            config.Data.inputDataset = inDS
            config.Data.outputDatasetTag = '%s' % (inDS.split('/')[2]) # Campaign GT details
            
            # Submit.
            try:
                print "Submitting for input dataset %s" % (inDS)
                crabCommand(options.crabCmd, config = config, *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Submission for input dataset %s failed: %s" % (inDS, hte.headers)
            except ClientException as cle:
                print "Submission for input dataset %s failed: %s" % (inDS, cle)
        
    # resubmit FAILED jobs
    elif options.crabCmd == 'resubmit' and options.workArea:
        
        for dir in os.listdir(options.workArea):
            projDir = os.path.join(options.workArea, dir)
            if not os.path.isdir(projDir):
                continue
            # Execute the crab status.
            msg = "Executing (the equivalent of): crab status --dir %s %s" % (projDir, options.crabCmdOpts)
            print "-"*len(msg)
            print msg
            print "-"*len(msg)
         
            result = crabCommand("status", dir = projDir, *options.crabCmdOpts.split())
            print "\n\nresult of the crab resubmit: ",result
            print "result['status']: ",result['status']
            print "result['dagStatus']: ",result['dagStatus']
            if not result['status'] in ['FAILED', 'SUBMITFAILED']:
                continue
            print "crab_resubmit: %s  *******" % projDir
            
            # resubmit
            try:
                crabCommand("resubmit", dir = projDir, *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Failed executing command %s for task %s: %s" % (options.crabCmd, projDir, hte.headers)
            except ClientException as cle:
                print "Failed executing command %s for task %s: %s" % (options.crabCmd, projDir, cle)
            
            
    # All other commands can be simply executed.
    elif options.workArea:
        
        for dir in os.listdir(options.workArea):
            projDir = os.path.join(options.workArea, dir)
            if not os.path.isdir(projDir):
                continue
            # Execute the crab command.
            msg = "Executing (the equivalent of): crab %s --dir %s %s" % (options.crabCmd, projDir, options.crabCmdOpts)
            print "-"*len(msg)
            print msg
            print "-"*len(msg)
            try:
                crabCommand(options.crabCmd, dir = projDir, *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Failed executing command %s for task %s: %s" % (options.crabCmd, projDir, hte.headers)
            except ClientException as cle:
                print "Failed executing command %s for task %s: %s" % (options.crabCmd, projDir, cle)
def main():
    options = getOptions()
    # The submit command needs special treatment.
    if options.crabCmd == 'submit':
        #--------------------------------------------------------
        # This is the base config:
        #--------------------------------------------------------
        from CRABClient.UserUtilities import config
        config = config()

        config.General.requestName = None
        config.General.workArea = 'crab_projects'
        config.General.transferOutputs = True
        config.General.transferLogs = True
        config.JobType.pluginName = 'Analysis'
        config.JobType.psetName = 'Run_QCD_test_miaod_v2_94x_mc_cfg.py'
        #               config.Data.ignoreLocality = True
        config.Data.inputDBS = 'global'
        config.JobType.inputFiles = [
            "/afs/cern.ch/work/s/sukundu/private/ESV_charge_CMSSW/Uncertainty2017/Fall17_17Nov2017F_V6_DATA/Fall17_17Nov2017F_V6_DATA_UncertaintySources_AK4PFchs.txt",
            "/afs/cern.ch/work/s/sukundu/private/ESV_charge_CMSSW/Uncertainty2017/Fall17_17Nov2017E_V6_DATA/Fall17_17Nov2017E_V6_DATA_UncertaintySources_AK4PFchs.txt",
            "/afs/cern.ch/work/s/sukundu/private/ESV_charge_CMSSW/Uncertainty2017/Fall17_17Nov2017D_V6_DATA/Fall17_17Nov2017D_V6_DATA_UncertaintySources_AK4PFchs.txt",
            "/afs/cern.ch/work/s/sukundu/private/ESV_charge_CMSSW/Uncertainty2017/Fall17_17Nov2017C_V6_DATA/Fall17_17Nov2017C_V6_DATA_UncertaintySources_AK4PFchs.txt",
            "/afs/cern.ch/work/s/sukundu/private/ESV_charge_CMSSW/Uncertainty2017/Fall17_17Nov2017B_V6_DATA/Fall17_17Nov2017B_V6_DATA_UncertaintySources_AK4PFchs.txt",
            "/afs/cern.ch/work/s/sukundu/private/ESV_charge_CMSSW/Uncertainty2017/MC17_12Apr2018/Fall17_V3b_MC_PtResolution_AK4PFchs.txt",
            "/afs/cern.ch/work/s/sukundu/private/ESV_charge_CMSSW/Uncertainty2017/MC17_12Apr2018/Fall17_V3b_MC_SF_AK4PFchs.txt",
            "/afs/cern.ch/work/s/sukundu/private/ESV_charge_CMSSW/Uncertainty2017/MC17_12Apr2018/Fall17_17Nov2017_V32_MC_Uncertainty_AK4PFchs.txt",
            "/afs/cern.ch/work/s/sukundu/private/ESV_charge_CMSSW/Uncertainty2017/MC17_12Apr2018/Fall17_17Nov2017_V32_MC_UncertaintySources_AK4PFchs.txt"
        ]

        config.JobType.maxMemoryMB = 2500
        #               config.JobType.priority = 9999
        #               config.Data.splitting = 'EventAwareLumiBased'
        config.Data.splitting = 'FileBased'
        config.Data.unitsPerJob = 1
        config.Data.useParent = True
        config.Data.inputDataset = None
        #               config.Data.splitting = 'LumiBased'
        #               config.Data.splitting = 'Automatic'
        #               config.Data.unitsPerJob = 20
        #               config.Data.totalUnits = 30
        config.Data.outputDatasetTag = None
        #                config.Data.lumiMask = '/afs/cern.ch/cms/CAF/CMSCOMM/COMM_DQM/certification/Collisions18/13TeV/ReReco/Cert_314472-325175_13TeV_17SeptEarlyReReco2018ABC_PromptEraD_Collisions18_JSON.txt'
        #               config.Data.outLFNDirBase = '/store/user/%s/' % (getUsernameFromSiteDB())
        config.Data.publication = True
        config.JobType.allowUndistributedCMSSW = True
        config.Site.storageSite = 'T2_IN_TIFR'

        # Will submit one task for each of these input datasets.
        inputDatasets = [
            '/QCD_Pt_15to30_TuneCP5_13TeV_pythia8/RunIISummer19UL17MiniAOD-106X_mc2017_realistic_v6-v2/MINIAODSIM',
            '/QCD_Pt_30to50_TuneCP5_13TeV_pythia8/RunIISummer19UL17MiniAOD-106X_mc2017_realistic_v6-v2/MINIAODSIM',
            '/QCD_Pt_50to80_TuneCP5_13TeV_pythia8/RunIISummer19UL17MiniAOD-106X_mc2017_realistic_v6-v2/MINIAODSIM',
            '/QCD_Pt_80to120_TuneCP5_13TeV_pythia8/RunIISummer19UL17MiniAOD-106X_mc2017_realistic_v6-v2/MINIAODSIM',
            '/QCD_Pt_120to170_TuneCP5_13TeV_pythia8/RunIISummer19UL17MiniAOD-106X_mc2017_realistic_v6-v2/MINIAODSIM',
            '/QCD_Pt_170to300_TuneCP5_13TeV_pythia8/RunIISummer19UL17MiniAOD-106X_mc2017_realistic_v6-v2/MINIAODSIM',
            '/QCD_Pt_300to470_TuneCP5_13TeV_pythia8/RunIISummer19UL17MiniAOD-106X_mc2017_realistic_v6-v2/MINIAODSIM',
            '/QCD_Pt_470to600_TuneCP5_13TeV_pythia8/RunIISummer19UL17MiniAOD-106X_mc2017_realistic_v6-v2/MINIAODSIM',
            '/QCD_Pt_600to800_TuneCP5_13TeV_pythia8/RunIISummer19UL17MiniAOD-106X_mc2017_realistic_v6-v2/MINIAODSIM',
            '/QCD_Pt_800to1000_TuneCP5_13TeV_pythia8/RunIISummer19UL17MiniAOD-106X_mc2017_realistic_v6-v2/MINIAODSIM',
            '/QCD_Pt_1000to1400_TuneCP5_13TeV_pythia8/RunIISummer19UL17MiniAOD-106X_mc2017_realistic_v6-v2/MINIAODSIM',
            '/QCD_Pt_1400to1800_TuneCP5_13TeV_pythia8/RunIISummer19UL17MiniAOD-106X_mc2017_realistic_v6-v2/MINIAODSIM',
            '/QCD_Pt_1800to2400_TuneCP5_13TeV_pythia8/RunIISummer19UL17MiniAOD-106X_mc2017_realistic_v6-v2/MINIAODSIM',
            '/QCD_Pt_2400to3200_TuneCP5_13TeV_pythia8/RunIISummer19UL17MiniAOD-106X_mc2017_realistic_v6-v2/MINIAODSIM',
            '/QCD_Pt_3200toInf_TuneCP5_13TeV_pythia8/RunIISummer19UL17MiniAOD-106X_mc2017_realistic_v6-v2/MINIAODSIM'
        ]

        requestName = [
            'Pt_15to30_TuneCP5', 'Pt_30to50_TuneCP5', 'Pt_50to80_TuneCP5',
            'Pt_80to120_TuneCP5', 'Pt_120to170_TuneCP5', 'Pt_170to300_TuneCP5',
            'Pt_300to470_TuneCP5', 'Pt_470to600_TuneCP5',
            'Pt_600to800_TuneCP5', 'Pt_800to1000_TuneCP5',
            'Pt_1000to1400_TuneCP5', 'Pt_1400to1800_TuneCP5',
            'Pt_1800to2400_TuneCP5', 'Pt_2400to3200_TuneCP5',
            'Pt_3200toinf_TuneCP5'
        ]
        ireq = 0
        for inDS in inputDatasets:
            config.General.requestName = requestName[ireq]
            ireq += 1
            config.Data.inputDataset = inDS
            config.Data.outputDatasetTag = '%s_%s' % (
                config.General.workArea, config.General.requestName)
            # Submit.
            try:
                print "Submitting for input dataset %s" % (inDS)
                crabCommand(options.crabCmd,
                            config=config,
                            *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Submission for input dataset %s failed: %s" % (
                    inDS, hte.headers)
            except ClientException as cle:
                print "Submission for input dataset %s failed: %s" % (inDS,
                                                                      cle)
    elif options.workArea:
        for dir in os.listdir(options.workArea):
            projDir = os.path.join(options.workArea, dir)
            if not os.path.isdir(projDir):
                continue
            # Execute the crab command.
            msg = "Executing (the equivalent of): crab %s --dir %s %s" % (
                options.crabCmd, projDir, options.crabCmdOpts)
            print "-" * len(msg)
            print msg
            print "-" * len(msg)
            try:
                crabCommand(options.crabCmd,
                            dir=projDir,
                            *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Failed executing command %s for task %s: %s" % (
                    options.crabCmd, projDir, hte.headers)
            except ClientException as cle:
                print "Failed executing command %s for task %s: %s" % (
                    options.crabCmd, projDir, cle)
def main():

    options = getOptions()

    # The submit command needs special treatment.
    if options.crabCmd == 'submit':

        #--------------------------------------------------------
        # This is the base config:
        #--------------------------------------------------------
        from CRABClient.UserUtilities import config
        config = config()

        config.General.requestName = None
        #config.General.workArea = 'ZMuondecay'
        config.General.workArea = '2017DT_psi_ee_v7_4'
	config.General.transferOutputs = True
	config.General.transferLogs = False

        config.JobType.pluginName = 'Analysis'
	config.JobType.psetName = '/afs/cern.ch/work/g/gayalasa/public/Zll/z_sl7/ZtoJpsill/CMSSW_10_6_3/src/AnalizeZll/ZtoJpsileplep/test/runV7Elec17.py' #2018 DT configfile
	config.JobType.allowUndistributedCMSSW = True

        config.Data.inputDataset = None
	config.Data.inputDBS = 'global'
   #     config.Data.splitting = 'Automatic'
   #     config.Data.splitting = 'LumiBased'
        config.Data.splitting = 'FileBased'
   #     config.Data.unitsPerJob = 30
        config.Data.unitsPerJob = 1
   #     config.Data.totalUnits = 30
	config.Data.lumiMask = 'https://cms-service-dqm.web.cern.ch/cms-service-dqm/CAF/certification/Collisions17/13TeV/Final/Cert_294927-306462_13TeV_PromptReco_Collisions17_JSON.txt'
	config.Data.publication = True
        config.Data.outputDatasetTag = None
	config.Data.outLFNDirBase = '/store/user/%s/Zpsi_ee17_v7_4/' % ("gayalasa")
	config.Site.storageSite = 'T3_US_FNALLPC'
        #config.Site.storageSite = None # Choose your site. 
        #--------------------------------------------------------

        # Will submit one task for each of these input datasets.
        inputDatasets = [ 
                           '/DoubleEG/Run2017B-31Mar2018-v1/MINIAOD',       # DoubleEG
                           '/DoubleEG/Run2017C-31Mar2018-v1/MINIAOD',
                           '/DoubleEG/Run2017D-31Mar2018-v1/MINIAOD',
                           '/DoubleEG/Run2017E-31Mar2018-v1/MINIAOD',
                           '/DoubleEG/Run2017F-31Mar2018-v1/MINIAOD',

                           '/MuonEG/Run2017B-31Mar2018-v1/MINIAOD',         # MuonEG
                           '/MuonEG/Run2017C-31Mar2018-v1/MINIAOD',
                           '/MuonEG/Run2017D-31Mar2018-v1/MINIAOD',
                           '/MuonEG/Run2017E-31Mar2018-v1/MINIAOD',
                           '/MuonEG/Run2017F-31Mar2018-v1/MINIAOD',

                           '/SingleElectron/Run2017B-31Mar2018-v1/MINIAOD', # SingleElectron
                           '/SingleElectron/Run2017C-31Mar2018-v1/MINIAOD',
                           '/SingleElectron/Run2017D-31Mar2018-v1/MINIAOD',
                           '/SingleElectron/Run2017E-31Mar2018-v1/MINIAOD',
                           '/SingleElectron/Run2017F-31Mar2018-v1/MINIAOD',

                           '/SingleMuon/Run2017B-31Mar2018-v1/MINIAOD',     # SingleMuon
                           '/SingleMuon/Run2017C-31Mar2018-v1/MINIAOD',
                           '/SingleMuon/Run2017D-31Mar2018-v1/MINIAOD',
                           '/SingleMuon/Run2017E-31Mar2018-v1/MINIAOD',
                           '/SingleMuon/Run2017F-31Mar2018-v1/MINIAOD',

                           '/DoubleMuon/Run2017B-31Mar2018-v1/MINIAOD',     # DoubleMuon
                           '/DoubleMuon/Run2017C-31Mar2018-v1/MINIAOD',
                           '/DoubleMuon/Run2017D-31Mar2018-v1/MINIAOD',
                           '/DoubleMuon/Run2017E-31Mar2018-v1/MINIAOD',
                           '/DoubleMuon/Run2017F-31Mar2018-v1/MINIAOD'
                 	]
 
        for inDS in inputDatasets:
             # inDS is of the form /A/B/C. Since B is unique for each inDS, use this in the CRAB request name.
            config.General.requestName = inDS.split('/')[1]+inDS.split('/')[2]
            config.Data.inputDataset = inDS
            config.Data.outputDatasetTag = '%s_%s' % (config.General.workArea, config.General.requestName)
            # Submit.
            try:
                print "Submitting for input dataset %s" % (inDS)
                crabCommand(options.crabCmd, config = config, *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Submission for input dataset %s failed: %s" % (inDS, hte.headers)
            except ClientException as cle:
                print "Submission for input dataset %s failed: %s" % (inDS, cle)

    # All other commands can be simply executed.
    elif options.workArea:

        for dir in os.listdir(options.workArea):
            projDir = os.path.join(options.workArea, dir)
            if not os.path.isdir(projDir):
                continue
            # Execute the crab command.
            msg = "Executing (the equivalent of): crab %s --dir %s %s" % (options.crabCmd, projDir, options.crabCmdOpts)
            print "-"*len(msg)
            print msg
            print "-"*len(msg)
            try:
                crabCommand(options.crabCmd, dir = projDir, *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Failed executing command %s for task %s: %s" % (options.crabCmd, projDir, hte.headers)
            except ClientException as cle:
                print "Failed executing command %s for task %s: %s" % (options.crabCmd, projDir, cle)
Beispiel #24
0
def main():

    options = getOptions()

    # The submit command needs special treatment.
    if options.crabCmd == 'submit':

        # External files needed by CRAB
        inputDir = '/afs/cern.ch/user/k/kmcdermo/public/input/'
        inputPaths = 'HLTpaths.txt'
        inputFilters = 'HLTfilters.txt'
        inputJSON = 'golden2017-nov30.json'

        #--------------------------------------------------------
        # This is the base config:
        #--------------------------------------------------------
        from CRABClient.UserUtilities import config
        config = config()

        config.General.workArea = options.workArea
        config.General.requestName = None

        config.JobType.pluginName = 'Analysis'
        config.JobType.psetName = 'hltplots.py'
        config.JobType.pyCfgParams = None
        config.JobType.inputFiles = [
            inputDir + inputPaths, inputDir + inputFilters
        ]

        config.Data.inputDataset = None
        config.Data.lumiMask = inputDir + inputJSON
        config.Data.splitting = 'EventAwareLumiBased'
        config.Data.unitsPerJob = 1000000

        config.Data.outputDatasetTag = None
        config.Data.publication = False
        config.Site.storageSite = 'T2_CH_CERN'
        config.Data.outLFNDirBase = '/store/user/kmcdermo/'
        #--------------------------------------------------------

        # Will submit one task for each of these input datasets.
        inputDataAndOpts = [
            [
                '/SingleMuon/Run2017B-PromptReco-v1/MINIAOD',
                '92X_dataRun2_Prompt_v4', 'False'
            ],
            [
                '/SingleMuon/Run2017B-PromptReco-v2/MINIAOD',
                '92X_dataRun2_Prompt_v5', 'False'
            ],
            [
                '/SingleMuon/Run2017C-PromptReco-v1/MINIAOD',
                '92X_dataRun2_Prompt_v6', 'True'
            ],
            [
                '/SingleMuon/Run2017C-PromptReco-v2/MINIAOD',
                '92X_dataRun2_Prompt_v7', 'True'
            ],
            [
                '/SingleMuon/Run2017C-PromptReco-v3/MINIAOD',
                '92X_dataRun2_Prompt_v8', 'True'
            ],
            [
                '/SingleMuon/Run2017D-PromptReco-v1/MINIAOD',
                '92X_dataRun2_Prompt_v8', 'True'
            ],
            [
                '/SingleMuon/Run2017E-PromptReco-v1/MINIAOD',
                '92X_dataRun2_Prompt_v9', 'True'
            ],
            [
                '/SingleMuon/Run2017F-PromptReco-v1/MINIAOD',
                '92X_dataRun2_Prompt_v9', 'True'
            ],
        ]

        for inDO in inputDataAndOpts:
            # inDO[0] is of the form /A/B/C. Since B is unique for each inDS, use this in the CRAB request name.
            config.General.requestName = inDO[0].split('/')[2]
            config.JobType.pyCfgParams = [
                'globalTag=' + inDO[1], 'useOOTPhotons=' + inDO[2],
                'applyTriggerPS=True', 'psPath=HLT_IsoMu27_v',
                'inputPaths=' + inputPaths, 'inputFilters=' + inputFilters
            ]
            config.Data.inputDataset = inDO[0]
            config.Data.outputDatasetTag = '%s_%s' % (
                config.General.workArea, config.General.requestName)
            # Submit.
            try:
                print "Submitting for input dataset %s" % (inDO[0])
                crabCommand(options.crabCmd,
                            config=config,
                            *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Submission for input dataset %s failed: %s" % (
                    inDO[0], hte.headers)
            except ClientException as cle:
                print "Submission for input dataset %s failed: %s" % (inDO[0],
                                                                      cle)

    # All other commands can be simply executed.
    elif options.workArea:

        for dir in os.listdir(options.workArea):
            projDir = os.path.join(options.workArea, dir)
            if not os.path.isdir(projDir):
                continue
            # Execute the crab command.
            msg = "Executing (the equivalent of): crab %s --dir %s %s" % (
                options.crabCmd, projDir, options.crabCmdOpts)
            print "-" * len(msg)
            print msg
            print "-" * len(msg)
            try:
                crabCommand(options.crabCmd,
                            dir=projDir,
                            *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Failed executing command %s for task %s: %s" % (
                    options.crabCmd, projDir, hte.headers)
            except ClientException as cle:
                print "Failed executing command %s for task %s: %s" % (
                    options.crabCmd, projDir, cle)
Beispiel #25
0
def main():

    options = getOptions()
    print "IsData? ", options.isData
    datasetsFile = open( options.datasets )
    jobsLines = datasetsFile.readlines()
    jobs = []

    from CRABClient.UserUtilities import config
    config = config()

    from CRABAPI.RawCommand import crabCommand
    from httplib import HTTPException

    config.section_("General")
    config.General.workArea = options.dir
   
    config.section_("JobType")
    config.JobType.pluginName = 'Analysis'
    config.JobType.psetName = options.config
    config.JobType.allowUndistributedCMSSW = True
#    config.JobType.pyCfgParams = ['isData=' + str(options.isData), 'changeJECs=True', 'channel=wzjets']
    cfgparams = []
    if options.pyCfgParams != None:
        cfgparams=options.pyCfgParams
    cfgparams.append('isData=' + str(options.isData))
    cfgparams.append('changeJECs=True')
    cfgparams.append('mode=crab')
#    cfgparams.append('channel=wprime')
    print "======> Config bef loop: ", cfgparams
        
    
    #config.JobType.inputFiles = ['Fall15_25nsV2_DATA.db', 'Fall15_25nsV2_MC.db']
    #config.JobType.inputFiles = ["Fall15_25nsV2_MC_L1FastJet_AK4PFchs.txt", "Fall15_25nsV2_MC_L1RC_AK4PFchs.txt","Fall15_25nsV2_MC_L2Relative_AK4PFchs.txt", "Fall15_25nsV2_MC_L3Absolute_AK4PFchs.txt","Fall15_25nsV2_MC_L2L3Residual_AK4PFchs.txt", "Fall15_25nsV2_DATA_L1FastJet_AK4PFchs.txt","Fall15_25nsV2_DATA_L1RC_AK4PFchs.txt","Fall15_25nsV2_DATA_L2Relative_AK4PFchs.txt","Fall15_25nsV2_DATA_L3Absolute_AK4PFchs.txt",  "Fall15_25nsV2_DATA_L2L3Residual_AK4PFchs.txt"]
    
    #Input part:

    
    wildcard= options.jecVersion+"*txt"
    lscmd = "ls JECs/"+wildcard 
    outs=commands.getstatusoutput(lscmd)
    #print "status: ", outs[0]," result: ",outs[1]
    inputs=[]
    for l in outs[1].split("\n"):
        inputs.append(l)

    inputs.append('Spring16_25nsV10_MC_PtResolution_AK8PFchs.txt')
    inputs.append('DeepCSV_94XSF_V3_B_F.csv')
    inputs.append('CSVv2_Moriond17_B_H.csv')
    inputs.append('CSVv2_ichep.csv')
    inputs.append('cMVAv2_Moriond17_B_H.csv')
    inputs.append('cMVAv2_ichep.csv')
    inputs.append('btagging_cmva.root')
    
    
    config.JobType.inputFiles = inputs
#    print "inputs are", config.JobType.inputFiles
    oldinputs = [
        'Spring16_25nsV10_MC_PtResolution_AK8PFchs.txt',
        'Summer16_23Sep2016BCDV4_DATA_L1FastJet_AK4PFchs.txt',
        'Summer16_23Sep2016BCDV4_DATA_L1FastJet_AK8PFchs.txt',
        'Summer16_23Sep2016BCDV4_DATA_L1RC_AK4PFchs.txt',
        'Summer16_23Sep2016BCDV4_DATA_L1RC_AK8PFchs.txt',
        'Summer16_23Sep2016BCDV4_DATA_L2L3Residual_AK4PFchs.txt',
        'Summer16_23Sep2016BCDV4_DATA_L2L3Residual_AK8PFchs.txt',
        'Summer16_23Sep2016BCDV4_DATA_L2Relative_AK4PFchs.txt',
        'Summer16_23Sep2016BCDV4_DATA_L2Relative_AK8PFchs.txt',
        'Summer16_23Sep2016BCDV4_DATA_L2Residual_AK4PFchs.txt',
        'Summer16_23Sep2016BCDV4_DATA_L3Absolute_AK4PFchs.txt',
        'Summer16_23Sep2016BCDV4_DATA_L3Absolute_AK8PFchs.txt',
        'Summer16_23Sep2016BCDV4_DATA_UncertaintySources_AK4PFchs.txt',
        'Summer16_23Sep2016BCDV4_DATA_UncertaintySources_AK8PFchs.txt',
        'Summer16_23Sep2016BCDV4_DATA_Uncertainty_AK4PFchs.txt',
        'Summer16_23Sep2016BCDV4_DATA_Uncertainty_AK8PFchs.txt',
        'Summer16_23Sep2016EFV4_DATA_L1FastJet_AK4PFchs.txt',
        'Summer16_23Sep2016EFV4_DATA_L1FastJet_AK8PFchs.txt',
        'Summer16_23Sep2016EFV4_DATA_L1RC_AK4PFchs.txt',
        'Summer16_23Sep2016EFV4_DATA_L1RC_AK8PFchs.txt',
        'Summer16_23Sep2016EFV4_DATA_L2L3Residual_AK4PFchs.txt',
        'Summer16_23Sep2016EFV4_DATA_L2L3Residual_AK8PFchs.txt',
        'Summer16_23Sep2016EFV4_DATA_L2Relative_AK4PFchs.txt',
        'Summer16_23Sep2016EFV4_DATA_L2Relative_AK8PFchs.txt',
        'Summer16_23Sep2016EFV4_DATA_L2Residual_AK4PFchs.txt',
        'Summer16_23Sep2016EFV4_DATA_L3Absolute_AK4PFchs.txt',
        'Summer16_23Sep2016EFV4_DATA_L3Absolute_AK8PFchs.txt',
        'Summer16_23Sep2016EFV4_DATA_UncertaintySources_AK4PFchs.txt',
        'Summer16_23Sep2016EFV4_DATA_UncertaintySources_AK8PFchs.txt',
        'Summer16_23Sep2016EFV4_DATA_Uncertainty_AK4PFchs.txt',
        'Summer16_23Sep2016EFV4_DATA_Uncertainty_AK8PFchs.txt',
        'Summer16_23Sep2016GV4_DATA_L1FastJet_AK4PFchs.txt',
        'Summer16_23Sep2016GV4_DATA_L1FastJet_AK8PFchs.txt',
        'Summer16_23Sep2016GV4_DATA_L1RC_AK4PFchs.txt',
        'Summer16_23Sep2016GV4_DATA_L1RC_AK8PFchs.txt',
        'Summer16_23Sep2016GV4_DATA_L2L3Residual_AK4PFchs.txt',
        'Summer16_23Sep2016GV4_DATA_L2L3Residual_AK8PFchs.txt',
        'Summer16_23Sep2016GV4_DATA_L2Relative_AK4PFchs.txt',
        'Summer16_23Sep2016GV4_DATA_L2Relative_AK8PFchs.txt',
        'Summer16_23Sep2016GV4_DATA_L2Residual_AK4PFchs.txt',
        'Summer16_23Sep2016GV4_DATA_L3Absolute_AK4PFchs.txt',
        'Summer16_23Sep2016GV4_DATA_L3Absolute_AK8PFchs.txt',
        'Summer16_23Sep2016GV4_DATA_UncertaintySources_AK4PFchs.txt',
        'Summer16_23Sep2016GV4_DATA_UncertaintySources_AK8PFchs.txt',
        'Summer16_23Sep2016GV4_DATA_Uncertainty_AK4PFchs.txt',
        'Summer16_23Sep2016GV4_DATA_Uncertainty_AK8PFchs.txt',
        'Summer16_23Sep2016HV4_DATA_L1FastJet_AK4PFchs.txt',
        'Summer16_23Sep2016HV4_DATA_L1FastJet_AK8PFchs.txt',
        'Summer16_23Sep2016HV4_DATA_L1RC_AK4PFchs.txt',
        'Summer16_23Sep2016HV4_DATA_L1RC_AK8PFchs.txt',
        'Summer16_23Sep2016HV4_DATA_L2L3Residual_AK4PFchs.txt',
        'Summer16_23Sep2016HV4_DATA_L2L3Residual_AK8PFchs.txt',
        'Summer16_23Sep2016HV4_DATA_L2Relative_AK4PFchs.txt',
        'Summer16_23Sep2016HV4_DATA_L2Relative_AK8PFchs.txt',
        'Summer16_23Sep2016HV4_DATA_L2Residual_AK4PFchs.txt',
        'Summer16_23Sep2016HV4_DATA_L3Absolute_AK4PFchs.txt',
        'Summer16_23Sep2016HV4_DATA_L3Absolute_AK8PFchs.txt',
        'Summer16_23Sep2016HV4_DATA_UncertaintySources_AK4PFchs.txt',
        'Summer16_23Sep2016HV4_DATA_UncertaintySources_AK8PFchs.txt',
        'Summer16_23Sep2016HV4_DATA_Uncertainty_AK4PFchs.txt',
        'Summer16_23Sep2016HV4_DATA_Uncertainty_AK8PFchs.txt',
        'Summer16_23Sep2016V4_MC_L1FastJet_AK4PFchs.txt',
        'Summer16_23Sep2016V4_MC_L1FastJet_AK8PFchs.txt',
        'Summer16_23Sep2016V4_MC_L1RC_AK4PFchs.txt',
        'Summer16_23Sep2016V4_MC_L1RC_AK8PFchs.txt',
        'Summer16_23Sep2016V4_MC_L2L3Residual_AK4PFchs.txt',
        'Summer16_23Sep2016V4_MC_L2L3Residual_AK8PFchs.txt',
        'Summer16_23Sep2016V4_MC_L2Relative_AK4PFchs.txt',
        'Summer16_23Sep2016V4_MC_L2Relative_AK8PFchs.txt',
        'Summer16_23Sep2016V4_MC_L3Absolute_AK4PFchs.txt',
        'Summer16_23Sep2016V4_MC_L3Absolute_AK8PFchs.txt',
        'Summer16_23Sep2016V4_MC_Uncertainty_AK4PFchs.txt',
        'Summer16_23Sep2016V4_MC_Uncertainty_AK8PFchs.txt',
        ]
    config.section_("Data")
    config.Data.ignoreLocality = True
    config.Data.inputDataset = None
    config.Data.inputDBS = 'phys03'
    config.Data.splitting = 'FileBased' 
#    config.Data.splitting = 'Automatic' 
    config.Data.unitsPerJob = 10
    config.Data.publication = False    
    #    config.Data.outLFNDirBase = '/store/user/cgiuglia/trees/May12/'
    #    config.Data.outLFNDirBase = '/store/user/oiorio/ttDM/trees/2018/May28/'
    config.Data.outLFNDirBase = '/store/user/oiorio/Wprime/2019/July/July18/'
#    config.Data.outLFNDirBase = '/store/user/oiorio/Tprime/trees/2019/94X2016Mar14/'
    
    config.section_("Site")
#    config.Site.storageSite = 'T2_IT_Pisa'
    config.Site.storageSite = 'T2_CH_CSCS'
    config.Site.whitelist = ['T2_CH_CERN','T2_IT_*','T2_DE_*','T2_CH_*']
    #config.Site.blacklist = ['T2_DE_RWTH']

    print 'Using config ' + options.config
    print 'Writing to directory ' + options.dir

    
    def submit(config):
        try:
            crabCommand('submit', config = config)
        except HTTPException, hte:
            print 'Cannot execute command'
            print hte.headers
Beispiel #26
0
        )
        samples += [samp]
    return samples, skipped

if __name__ == "__main__":
    parser = argparse.ArgumentParser(description='Submits crab jobs')
    parser.add_argument('--indir', action="store", help="path to samples", type=str, default="samples/eth")
    parser.add_argument('--out', action="store", required=True, help="output site, e.g. T2_CH_CSCS", type=str)
    parser.add_argument('--tag', action="store", required=True, help="unique tag for processing", type=str)
    parser.add_argument('--user', action="store", help="username on grid", type=str, default=getUsernameFromSiteDB())
    args = parser.parse_args()
   
    samples, skipped = make_samples(args.indir)
    
    for sample in samples:
        cfg = config()
        
        cfg.section_("General")
        cfg.General.requestName = 'MEM_{0}_{1}'.format(args.tag, sample.name)
        cfg.General.workArea = 'crab_projects'
        cfg.General.transferLogs = True
        
        cfg.section_("JobType")
        cfg.JobType.pluginName = 'Analysis'
        cfg.JobType.psetName = 'PSet.py'
        cfg.JobType.scriptExe = 'wrapper.sh'
        cfg.JobType.sendPythonFolder = True
        cfg.JobType.maxMemoryMB = 1000
        cfg.JobType.inputFiles = [
            cfg.JobType.scriptExe,
            'mem.py',
Beispiel #27
0
def main():

    options = getOptions()

    # The submit command needs special treatment.
    if options.crabCmd == 'submit':

        #--------------------------------------------------------
        # This is the base config:
        #--------------------------------------------------------
        from CRABClient.UserUtilities import config
        config = config()

        config.General.requestName = None
        #config.General.workArea = 'ZMuondecay'
        config.General.workArea = '2018MC_4l_mm_v7_1'
        config.General.transferOutputs = True
        config.General.transferLogs = False

        config.JobType.pluginName = 'Analysis'
        config.JobType.psetName = '/afs/cern.ch/work/g/gayalasa/public/Zll/z_sl7/ZtoJpsill/CMSSW_10_6_3/src/AnalizeZll/ZtoJpsileplep/test/runV7Muon_4l.py'  #2018 DT configfile
        config.JobType.allowUndistributedCMSSW = True

        config.Data.inputDataset = None
        config.Data.inputDBS = 'global'
        #     config.Data.splitting = 'Automatic'
        #     config.Data.splitting = 'LumiBased'
        config.Data.splitting = 'FileBased'
        #     config.Data.unitsPerJob = 30
        config.Data.unitsPerJob = 1
        #     config.Data.totalUnits = 30
        #config.Data.lumiMask = 'https://cms-service-dqm.web.cern.ch/cms-service-dqm/CAF/certification/Collisions18/13TeV/ReReco/Cert_314472-325175_13TeV_17SeptEarlyReReco2018ABC_PromptEraD_Collisions18_JSON_MuonPhys.txt' #has nosence in Mc
        config.Data.publication = True
        config.Data.outputDatasetTag = None
        config.Data.outLFNDirBase = '/store/user/%s/Z4l_mm18mc_v7_1/' % (
            "gayalasa")
        config.Site.storageSite = 'T3_US_FNALLPC'
        #config.Site.storageSite = None # Choose your site.
        #--------------------------------------------------------

        # Will submit one task for each of these input datasets.
        inputDatasets = [
            '/ZZTo4L_TuneCP5_13TeV_powheg_pythia8/RunIIAutumn18MiniAOD-102X_upgrade2018_realistic_v15_ext1-v2/MINIAODSIM',
            '/ZZTo4L_TuneCP5_13TeV_powheg_pythia8/RunIIAutumn18MiniAOD-102X_upgrade2018_realistic_v15_ext2-v2/MINIAODSIM'
        ]

        for inDS in inputDatasets:
            # inDS is of the form /A/B/C. Since B is unique for each inDS, use this in the CRAB request name.
            config.General.requestName = (inDS.split('/')[2]).split('-')[0] + (
                inDS.split('/')[2]).split('_')[-1]
            config.Data.inputDataset = inDS
            config.Data.outputDatasetTag = '%s_%s' % (
                config.General.workArea, config.General.requestName)
            # Submit.
            try:
                print "Submitting for input dataset %s" % (inDS)
                crabCommand(options.crabCmd,
                            config=config,
                            *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Submission for input dataset %s failed: %s" % (
                    inDS, hte.headers)
            except ClientException as cle:
                print "Submission for input dataset %s failed: %s" % (inDS,
                                                                      cle)

    # All other commands can be simply executed.
    elif options.workArea:

        for dir in os.listdir(options.workArea):
            projDir = os.path.join(options.workArea, dir)
            if not os.path.isdir(projDir):
                continue
            # Execute the crab command.
            msg = "Executing (the equivalent of): crab %s --dir %s %s" % (
                options.crabCmd, projDir, options.crabCmdOpts)
            print "-" * len(msg)
            print msg
            print "-" * len(msg)
            try:
                crabCommand(options.crabCmd,
                            dir=projDir,
                            *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Failed executing command %s for task %s: %s" % (
                    options.crabCmd, projDir, hte.headers)
            except ClientException as cle:
                print "Failed executing command %s for task %s: %s" % (
                    options.crabCmd, projDir, cle)
def main():

    options = getOptions()
    # The submit command needs special treatment.
    if options.crabCmd == 'submit':

        #--------------------------------------------------------
        # This is the base config:
        #--------------------------------------------------------
        from CRABClient.UserUtilities import config
        config = config()

        config.General.requestName = 'MTDRECO'
        config.General.workArea = options.workArea
        config.General.transferOutputs = True
        config.General.transferLogs = False

        config.JobType.pluginName = 'Analysis'
        config.JobType.psetName = 'runMTD_cfg.py'
        config.JobType.pyCfgParams = [
            'runMTDReco=True', 'crysLayout=barzflat', 'output=mtdReco.root'
        ]

        config.Data.inputDataset = None
        config.Data.inputDBS = 'global'
        config.Data.splitting = 'FileBased'
        config.Data.unitsPerJob = 1
        config.Data.outLFNDirBase = '/store/group/dpg_mtd/comm_mtd/meridian/10_4_0_mtd3'
        config.Data.publication = True
        config.Data.outputDatasetTag = None
        config.Data.allowNonValidInputDataset = True

        config.Site.storageSite = 'T2_CH_CERN'
        #        config.Site.whitelist = [ 'T2_CH_CERN','T2_US_Nebraska','T2_US_Wisconsin' ]

        config.User.voRole = 'priorityuser'

        #--------------------------------------------------------

        # Will submit one task for each of these input datasets.
        inputDatasets = [
            #                         '/RelValDYToLL_M_50_14TeV/CMSSW_10_4_0_mtd2_patch1-103X_upgrade2023_realistic_v2_2023D35noPU-v1/GEN-SIM-RECO',
            #                          '/RelValDYToLL_M_50_14TeV/CMSSW_10_4_0_mtd2_patch1-PU25ns_103X_upgrade2023_realistic_v2_2023D35PU200-v1/GEN-SIM-RECO',
            '/RelValSingleMuFlatPt_0p7to10/CMSSW_10_4_0_mtd2_patch1-PU25ns_103X_upgrade2023_realistic_v2_2023D35PU200-v1/GEN-SIM-RECO',
            '/RelValNuGun/CMSSW_10_4_0_mtd2_patch1-PU25ns_103X_upgrade2023_realistic_v2_2023D35PU200-v1/GEN-SIM-RECO',
            '/RelValMinBias_14TeV/CMSSW_10_4_0_mtd2_patch1-103X_upgrade2023_realistic_v2_2023D35noPU-v1/GEN-SIM-RECO',
            '/RelValSingleMuFlatPt_0p7to10_pythia8/CMSSW_10_4_0_mtd2_patch1-103X_upgrade2023_realistic_v2_2023D35noPU-v2/GEN-SIM-RECO',
            '/RelValSinglePiFlatPt_0p7to10_pythia8_cfi/CMSSW_10_4_0_mtd2_patch1-103X_upgrade2023_realistic_v2_2023D35noPU-v2/GEN-SIM-RECO',
            '/RelValSingleKaonFlatPt_0p7to10/CMSSW_10_4_0_mtd2_patch1-103X_upgrade2023_realistic_v2_2023D35noPU-v1/GEN-SIM-RECO',
            '/RelValSingleProtonFlatPt_0p7to10/CMSSW_10_4_0_mtd2_patch1-103X_upgrade2023_realistic_v2_2023D35noPU-v1/GEN-SIM-RECO',
            '/DYToLL_M-50_14TeV_pythia8/PhaseIIMTDTDRAutumn18DR-PU200_pilot_103X_upgrade2023_realistic_v2_ext2-v2/GEN-SIM-RECO'
        ]

        for inDS in inputDatasets:
            # inDS is of the form /A/B/C. Since B is unique for each inDS, use this in the CRAB request name.
            config.General.requestName = '%s_%s_V4' % (inDS.split('/')[1],
                                                       inDS.split('/')[2])
            config.General.requestName = config.General.requestName.translate(
                None, '_')
            config.Data.inputDataset = inDS
            config.Data.outputDatasetTag = '%s' % (config.General.requestName)
            # Submit.
            try:
                print "Submitting for input dataset %s" % (inDS)
                crabCommand(options.crabCmd,
                            config=config,
                            *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Submission for input dataset %s failed: %s" % (
                    inDS, hte.headers)
            except ClientException as cle:
                print "Submission for input dataset %s failed: %s" % (inDS,
                                                                      cle)

    # All other commands can be simply executed.
    elif options.workArea:

        for dir in os.listdir(options.workArea):
            projDir = os.path.join(options.workArea, dir)
            if not os.path.isdir(projDir):
                continue
            # Execute the crab command.
            msg = "Executing (the equivalent of): crab %s --dir %s %s" % (
                options.crabCmd, projDir, options.crabCmdOpts)
            print "-" * len(msg)
            print msg
            print "-" * len(msg)
            try:
                crabCommand(options.crabCmd,
                            dir=projDir,
                            *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Failed executing command %s for task %s: %s" % (
                    options.crabCmd, projDir, hte.headers)
            except ClientException as cle:
                print "Failed executing command %s for task %s: %s" % (
                    options.crabCmd, projDir, cle)
Beispiel #29
0
     "QCD_run3"),
    ("/RelValNuGun/CMSSW_11_0_0_pre12-PU_110X_mcRun3_2021_realistic_v5-v1/GEN-SIM-DIGI-RAW",
     "NuGun_run3"),
    ("/RelValTTbar_14TeV/CMSSW_11_0_0_pre12-PU_110X_mcRun3_2021_realistic_v5-v1/GEN-SIM-DIGI-RAW",
     "TTbar_run3"),
    #("/RelValTTbar_14TeV/CMSSW_11_0_0_pre12-PU25ns_110X_mcRun4_realistic_v2_2026D41PU140-v1/GEN-SIM-DIGI-RAW", "TTbar_run4_pu140"),
    #("/RelValTTbar_14TeV/CMSSW_11_0_0_pre12-PU25ns_110X_mcRun4_realistic_v2_2026D41PU200-v1/GEN-SIM-DIGI-RAW", "TTbar_run4_pu200")
]

if __name__ == "__main__":
    for dataset, name in samples:

        if os.path.isfile("step3_dump.pyc"):
            os.remove("step3_dump.pyc")

        conf = config()

        conf.General.requestName = name
        conf.General.transferLogs = True
        conf.General.workArea = 'crab_projects'
        conf.JobType.pluginName = 'Analysis'
        conf.JobType.psetName = 'step3_dump.py'
        conf.JobType.maxJobRuntimeMin = 8 * 60
        conf.JobType.allowUndistributedCMSSW = True
        conf.JobType.outputFiles = [
            "step3_inMINIAODSIM.root", "step3_AOD.root"
        ]
        conf.JobType.maxMemoryMB = 6000
        conf.JobType.numCores = 2

        conf.Data.inputDataset = dataset
Beispiel #30
0
    def init_config(self,inputDataset,cmsrun_cfg_file='test.py'):
        self.config = config() ## new empty crab config 
        self.fill_config_defaults()
	self.config.JobType.psetName = cmsrun_cfg_file  
        self.config.Data.inputDataset = inputDataset
Beispiel #31
0
def main():

    options = getOptions()
    # The submit command needs special treatment.
    if options.crabCmd == 'submit':

        #--------------------------------------------------------
        # This is the base config:
        #--------------------------------------------------------
        from CRABClient.UserUtilities import config
        config = config()

        config.General.requestName = 'runHits_analysis'
        config.General.workArea = options.workArea
        config.General.transferOutputs = True
        config.General.transferLogs = False

        config.JobType.pluginName = 'Analysis'
        config.JobType.psetName = 'runHits_cfg.py'
        config.JobType.pyCfgParams = [
            'useMTDTrack=False', 'crysLayout=barzflat',
            'output=DumpHits_noMTD.root'
        ]

        config.Data.inputDataset = None
        config.Data.inputDBS = 'phys03'
        config.Data.splitting = 'FileBased'
        config.Data.unitsPerJob = 1
        config.Data.outLFNDirBase = '/store/user/meridian/MTD'
        config.Data.publication = False
        config.Data.outputDatasetTag = '10_4_0_mtd3_runHits_analysis_v7'
        config.Data.allowNonValidInputDataset = True
        config.Data.useParent = True

        config.Site.storageSite = 'T2_CH_CERN'
        config.User.voRole = 'priorityuser'

        #--------------------------------------------------------

        # Will submit one task for each of these input datasets.
        inputDatasets = [
            #            '/RelValDYToLL_M_50_14TeV/meridian-CMSSW_10_4_0_mtd2_patch1-103X_upgrade2023_realistic_v2_2023D35noPU-v1-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValSingleMuFlatPt_0p7to10_pythia8/meridian-RelValSingleMuFlatPt0p7to10pythia8CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValMinBias_14TeV/meridian-RelValMinBias14TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValSinglePiFlatPt_0p7to10_pythia8_cfi/meridian-RelValSinglePiFlatPt0p7to10pythia8cfiCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValSingleKaonFlatPt_0p7to10/meridian-RelValSingleKaonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1-479d09f3e9ff3659dd49d9006e28a0a3/USER'
            #### V2 ### chi2 cut @ 50
            #            '/RelValSinglePiFlatPt_0p7to10_pythia8_cfi/meridian-RelValSinglePiFlatPt0p7to10pythia8cfiCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V2-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #### V3 ### chi2 cut @ 1000
            #            '/RelValSinglePiFlatPt_0p7to10_pythia8_cfi/meridian-RelValSinglePiFlatPt0p7to10pythia8cfiCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V3-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValDYToLL_M_50_14TeV/meridian-RelValDYToLLM5014TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValMinBias_14TeV/meridian-RelValMinBias14TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValSingleMuFlatPt_0p7to10_pythia8/meridian-RelValSingleMuFlatPt0p7to10pythia8CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V3-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValSingleKaonFlatPt_0p7to10/meridian-RelValSingleKaonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValSingleProtonFlatPt_0p7to10/meridian-RelValSingleProtonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValDYToLL_M_50_14TeV/meridian-RelValDYToLLM5014TeVCMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/DYToLL_M-50_14TeV_pythia8/meridian-DYToLLM-5014TeVpythia8PhaseIIMTDTDRAutumn18DR-PU200pilot103Xupgrade2023realisticv2ext2-v2V3-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            #            '/RelValNuGun/meridian-RelValNuGunCMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USE',
            #            '/RelValSingleMuFlatPt_0p7to10/meridian-RelValSingleMuFlatPt0p7to10CMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V3-479d09f3e9ff3659dd49d9006e28a0a3/USER'
            ### V4 chi2=50
            '/RelValSingleProtonFlatPt_0p7to10/meridian-RelValSingleProtonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            '/RelValSinglePiFlatPt_0p7to10_pythia8_cfi/meridian-RelValSinglePiFlatPt0p7to10pythia8cfiCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V4-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            '/RelValSingleMuFlatPt_0p7to10_pythia8/meridian-RelValSingleMuFlatPt0p7to10pythia8CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v2V4-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            '/RelValSingleMuFlatPt_0p7to10/meridian-RelValSingleMuFlatPt0p7to10CMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            '/RelValSingleKaonFlatPt_0p7to10/meridian-RelValSingleKaonFlatPt0p7to10CMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            '/RelValNuGun/meridian-RelValNuGunCMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            '/RelValMinBias_14TeV/meridian-RelValMinBias14TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            '/RelValDYToLL_M_50_14TeV/meridian-RelValDYToLLM5014TeVCMSSW1040mtd2patch1-PU25ns103Xupgrade2023realisticv22023D35PU200-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER',
            '/RelValDYToLL_M_50_14TeV/meridian-RelValDYToLLM5014TeVCMSSW1040mtd2patch1-103Xupgrade2023realisticv22023D35noPU-v1V4-479d09f3e9ff3659dd49d9006e28a0a3/USER'
            '/DYToLL_M-50_14TeV_pythia8/meridian-DYToLLM-5014TeVpythia8PhaseIIMTDTDRAutumn18DR-PU200pilot103Xupgrade2023realisticv2ext2-v2V4-479d09f3e9ff3659dd49d9006e28a0a3/USER'
        ]

        for inDS in inputDatasets:
            # inDS is of the form /A/B/C. Since B is unique for each inDS, use this in the CRAB request name.
            config.General.requestName = 'runHits_%s' % (inDS.split('/')[1])
            config.General.requestName = config.General.requestName.translate(
                None, '_')
            config.Data.inputDataset = inDS
            config.Data.outputDatasetTag = '%s_v4_noMTDTrack' % (
                config.General.requestName)
            # Submit.
            try:
                print "Submitting for input dataset %s" % (inDS)
                crabCommand(options.crabCmd,
                            config=config,
                            *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Submission for input dataset %s failed: %s" % (
                    inDS, hte.headers)
            except ClientException as cle:
                print "Submission for input dataset %s failed: %s" % (inDS,
                                                                      cle)

    # All other commands can be simply executed.
    elif options.workArea:

        for dir in os.listdir(options.workArea):
            projDir = os.path.join(options.workArea, dir)
            if not os.path.isdir(projDir):
                continue
            # Execute the crab command.
            msg = "Executing (the equivalent of): crab %s --dir %s %s" % (
                options.crabCmd, projDir, options.crabCmdOpts)
            print "-" * len(msg)
            print msg
            print "-" * len(msg)
            try:
                crabCommand(options.crabCmd,
                            dir=projDir,
                            *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Failed executing command %s for task %s: %s" % (
                    options.crabCmd, projDir, hte.headers)
            except ClientException as cle:
                print "Failed executing command %s for task %s: %s" % (
                    options.crabCmd, projDir, cle)
Beispiel #32
0
def main():

    options = getOptions()

    # The submit command needs special treatment.
    if options.crabCmd == 'submit':

        #--------------------------------------------------------
        # This is the base config:
        #--------------------------------------------------------
        from CRABClient.UserUtilities import config
        config = config()

        config.General.requestName = None
        #config.General.workArea = 'ZMuondecay'
        config.General.workArea = '2017DT_psi_mm_v7_4CB'
	config.General.transferOutputs = True
	config.General.transferLogs = False

        config.JobType.pluginName = 'Analysis'
	config.JobType.psetName = '/afs/cern.ch/work/g/gayalasa/public/Zll/z_sl7/ZtoJpsill/CMSSW_10_6_3/src/AnalizeZll/ZtoJpsileplep/test/runV7Muon.py' #2018 DT configfile
	config.JobType.allowUndistributedCMSSW = True

        config.Data.inputDataset = None
	config.Data.inputDBS = 'global'
   #     config.Data.splitting = 'Automatic'
   #     config.Data.splitting = 'LumiBased'
        config.Data.splitting = 'FileBased'
   #     config.Data.unitsPerJob = 30
        config.Data.unitsPerJob = 1
   #     config.Data.totalUnits = 30
	config.Data.lumiMask = 'https://cms-service-dqm.web.cern.ch/cms-service-dqm/CAF/certification/Collisions17/13TeV/Final/Cert_294927-306462_13TeV_PromptReco_Collisions17_JSON_MuonPhys.txt' #has nosence in Mc
	config.Data.publication = True
        config.Data.outputDatasetTag = None
	#config.Data.outLFNDirBase = '/store/user/%s/Zpsi_mm17_v7_4/' % ("gayalasa")
	#config.Site.storageSite = 'T3_US_FNALLPC'
    config.Site.storageSite = 'T3_CH_CERNBOX'
	#config.Data.outLFNDirBase = 'gsiftp://eosuserftp.cern.ch/eos/user/g/%s/Zpsi_mm17_v7_4' %("gayalasa")
        #config.Site.storageSite = None # Choose your site. 
        #--------------------------------------------------------

        # Will submit one task for each of these input datasets.
        inputDatasets = [ 
                           '/DoubleEG/Run2017B-31Mar2018-v1/MINIAOD',       # DoubleEG
                           '/DoubleEG/Run2017C-31Mar2018-v1/MINIAOD',
                           '/DoubleEG/Run2017D-31Mar2018-v1/MINIAOD',
                           '/DoubleEG/Run2017E-31Mar2018-v1/MINIAOD',
                           '/DoubleEG/Run2017F-31Mar2018-v1/MINIAOD',

                           '/MuonEG/Run2017B-31Mar2018-v1/MINIAOD',         # MuonEG
                           '/MuonEG/Run2017C-31Mar2018-v1/MINIAOD',
                           '/MuonEG/Run2017D-31Mar2018-v1/MINIAOD',
                           '/MuonEG/Run2017E-31Mar2018-v1/MINIAOD',
                           '/MuonEG/Run2017F-31Mar2018-v1/MINIAOD',

                           '/SingleElectron/Run2017B-31Mar2018-v1/MINIAOD', # SingleElectron
                           '/SingleElectron/Run2017C-31Mar2018-v1/MINIAOD',
                           '/SingleElectron/Run2017D-31Mar2018-v1/MINIAOD',
                           '/SingleElectron/Run2017E-31Mar2018-v1/MINIAOD',
                           '/SingleElectron/Run2017F-31Mar2018-v1/MINIAOD',

                           '/SingleMuon/Run2017B-31Mar2018-v1/MINIAOD',     # SingleMuon
                           '/SingleMuon/Run2017C-31Mar2018-v1/MINIAOD',
                           '/SingleMuon/Run2017D-31Mar2018-v1/MINIAOD',
                           '/SingleMuon/Run2017E-31Mar2018-v1/MINIAOD',
                           '/SingleMuon/Run2017F-31Mar2018-v1/MINIAOD',

                           '/DoubleMuon/Run2017B-31Mar2018-v1/MINIAOD',     # DoubleMuon
                           '/DoubleMuon/Run2017C-31Mar2018-v1/MINIAOD',
                           '/DoubleMuon/Run2017D-31Mar2018-v1/MINIAOD',
                           '/DoubleMuon/Run2017E-31Mar2018-v1/MINIAOD',
                           '/DoubleMuon/Run2017F-31Mar2018-v1/MINIAOD'
                 	]
 
        for inDS in inputDatasets:
             # inDS is of the form /A/B/C. Since B is unique for each inDS, use this in the CRAB request name.
            config.General.requestName = inDS.split('/')[1]+inDS.split('/')[2]
            #config.General.requestName = 'mumu17Test'
            config.Data.inputDataset = inDS
            config.Data.outputDatasetTag = '%s_%s' % (config.General.workArea, config.General.requestName)
            # Submit.
            try:
                print "Submitting for input dataset %s" % (inDS)
                crabCommand(options.crabCmd, config = config, *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Submission for input dataset %s failed: %s" % (inDS, hte.headers)
            except ClientException as cle:
                print "Submission for input dataset %s failed: %s" % (inDS, cle)
Beispiel #33
0
from CRABClient.UserUtilities import config, getUsernameFromSiteDB
config = config()

config.General.requestName   = 'ZZ27Feb'
#config.General.requestName   = 'TTJetsDiLepOut04'
#config.General.requestName   = 'DataMuEG'
config.General.workArea = 'crab_projects'
config.General.transferOutputs = True
config.General.transferLogs = False

config.JobType.pluginName = 'Analysis'
config.JobType.psetName = 'test/whelicity_pythia_ElMu_cfg.py'
config.JobType.inputFiles  = ['Spring16_25nsV10_MC_PhiResolution_AK4PFchs.txt','Spring16_25nsV10_MC_PtResolution_AK4PFchs.txt','Spring16_25nsV10_MC_SF_AK4PFchs.txt','egammaEffi.txt_EGM2D.root','ISOEfficienciesAndSF_BCDEF.root','IDEfficienciesAndSF_BCDEF.root','TkegammaEffi.txt_EGM2D.root','Tracking_EfficienciesAndSF_BCDEFGH.root','CSVv2_Moriond17_B_H.csv']
config.JobType.outputFiles = ['tree.root','sanityCheckHistos.root']
config.Data.inputDataset = '/ZZ_TuneCUETP8M1_13TeV-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM'

config.Data.inputDBS = 'global'
config.Data.splitting = 'FileBased'
config.Data.unitsPerJob = 1
config.Data.ignoreLocality  = True
config.Data.outLFNDirBase = '/store/user/%s/' % (getUsernameFromSiteDB())
#config.Data.publication = True
config.Data.outputDatasetTag = 'ElMuMC'

config.Site.storageSite = 'T3_IR_IPM'
Beispiel #34
0
    
    # loop over samples
    for sample, info in doc['samples'].iteritems():
      print("\n\n*** Sample {} ***".format(sample))
      # Given we have repeated datasets check for different parts
      parts = info['parts'] if 'parts' in info else [None]
      for part in parts:
        name = sample % part if part is not None else sample
        
        # filter names according to what we need
        if not fnmatch(name, args.filter): continue
        print 'submitting', name

        isMC = info['isMC']

        this_config = config()
        this_config.section_('General')
        this_config.General.transferOutputs = True
        this_config.General.transferLogs = True
        this_config.General.workArea = 'BParkingNANO_%s' % production_tag

        this_config.section_('Data')
        this_config.Data.publication = False
        #this_config.Data.outLFNDirBase = '/store/group/cmst3/group/bpark/%s' % (this_config.General.workArea)
        this_config.Data.outLFNDirBase = '/store/user/{}/BParkingNANO/{}/'.format(getUsernameFromCRIC(), skim_version)

        this_config.Data.inputDBS = 'global'

        this_config.section_('JobType')
        this_config.JobType.pluginName = 'Analysis'
        this_config.JobType.psetName = '../test/run_nano_FFR_AllJpsiMuMu_cfg.py'
def main():

    options = getOptions()

    # The submit command needs special treatment.
    if options.crabCmd == 'submit':

        #--------------------------------------------------------
        # This is the base config:
        #--------------------------------------------------------
        from CRABClient.UserUtilities import config
        config = config()

        config.General.requestName = None
        #config.General.workArea = 'ZMuondecay'
        config.General.workArea = '2016MC_psi_ee_v1'
        config.General.transferOutputs = True
        config.General.transferLogs = False

        config.JobType.pluginName = 'Analysis'
        config.JobType.psetName = '/afs/cern.ch/work/g/gayalasa/public/Zll/z_sl7/jspielec16mc/CMSSW_9_4_13/src/AnalyzeZll/OniaRootupler/test/runMcElec16.py'  #2016 MC configfile
        config.JobType.allowUndistributedCMSSW = True

        config.Data.inputDataset = None
        config.Data.inputDBS = 'global'
        #     config.Data.splitting = 'Automatic'
        config.Data.splitting = 'LumiBased'
        config.Data.unitsPerJob = 30
        #     config.Data.totalUnits = 30
        #config.Data.lumiMask = '' #has nosence in Mc
        config.Data.publication = True
        config.Data.outputDatasetTag = None
        config.Data.outLFNDirBase = '/store/user/%s/Zpsi_ee16MC_v1/' % (
            getUsernameFromSiteDB())
        config.Site.storageSite = 'T3_US_FNALLPC'
        #config.Site.storageSite = None # Choose your site.
        #--------------------------------------------------------

        # Will submit one task for each of these input datasets.
        inputDatasets = [
            '/ZToJPsiEE_TuneCUEP8M1_13TeV-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM'  # DoubleMuon
        ]

        for inDS in inputDatasets:
            # inDS is of the form /A/B/C. Since B is unique for each inDS, use this in the CRAB request name.
            config.General.requestName = 'ZToJPsiEE_TuneCUEP8M1_13TeV-pythia8'  # hardcoded because is to big inDS.split('/')[1]+inDS.split('/')[2]
            config.Data.inputDataset = inDS
            config.Data.outputDatasetTag = '%s_%s' % (
                config.General.workArea, config.General.requestName)
            # Submit.
            try:
                print "Submitting for input dataset %s" % (inDS)
                crabCommand(options.crabCmd,
                            config=config,
                            *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Submission for input dataset %s failed: %s" % (
                    inDS, hte.headers)
            except ClientException as cle:
                print "Submission for input dataset %s failed: %s" % (inDS,
                                                                      cle)

    # All other commands can be simply executed.
    elif options.workArea:

        for dir in os.listdir(options.workArea):
            projDir = os.path.join(options.workArea, dir)
            if not os.path.isdir(projDir):
                continue
            # Execute the crab command.
            msg = "Executing (the equivalent of): crab %s --dir %s %s" % (
                options.crabCmd, projDir, options.crabCmdOpts)
            print "-" * len(msg)
            print msg
            print "-" * len(msg)
            try:
                crabCommand(options.crabCmd,
                            dir=projDir,
                            *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Failed executing command %s for task %s: %s" % (
                    options.crabCmd, projDir, hte.headers)
            except ClientException as cle:
                print "Failed executing command %s for task %s: %s" % (
                    options.crabCmd, projDir, cle)
from CRABClient.UserUtilities import config, getUsernameFromSiteDB

config = config()
config.General.requestName = 'b2gtreeV4_ST_tW_antitop_5f_inclusiveDecays_RunIISpring16MiniAODv2'
config.General.transferOutputs = True
config.General.transferLogs = True
config.JobType.pluginName = 'Analysis'
config.JobType.psetName = 'run_B2GTTbarTreeMaker_MC2_Toolbox.py'
config.JobType.maxJobRuntimeMin = 2750
config.Data.inputDataset = '/ST_tW_antitop_5f_inclusiveDecays_13TeV-powheg-pythia8_TuneCUETP8M1/RunIISpring16MiniAODv2-PUSpring16_80X_mcRun2_asymptotic_2016_miniAODv2_v0-v1/MINIAODSIM'
config.Data.inputDBS = 'global'
config.JobType.inputFiles = [
'PUweight_Nov4JSON_Nov6PiileupJSON_Xsec72383_MCRunIISpring16MiniAODv2.root',
'Spring16_25nsV6_MC_L1FastJet_AK8PFchs.txt',
'Spring16_25nsV6_MC_L2Relative_AK8PFchs.txt',
'Spring16_25nsV6_MC_L3Absolute_AK8PFchs.txt',
'Spring16_25nsV6_MC_L2L3Residual_AK8PFchs.txt',
'Spring16_25nsV6_MC_Uncertainty_AK8PFchs.txt',
'Spring16_25nsV6_MC_L1FastJet_AK4PFchs.txt',
'Spring16_25nsV6_MC_L2Relative_AK4PFchs.txt',
'Spring16_25nsV6_MC_L3Absolute_AK4PFchs.txt',
'Spring16_25nsV6_MC_L2L3Residual_AK4PFchs.txt',
'Spring16_25nsV6_MC_Uncertainty_AK4PFchs.txt',
'Spring16_25nsV6_MC_L1FastJet_AK8PFPuppi.txt',
'Spring16_25nsV6_MC_L2Relative_AK8PFPuppi.txt',
'Spring16_25nsV6_MC_L3Absolute_AK8PFPuppi.txt',
'Spring16_25nsV6_MC_L2L3Residual_AK8PFPuppi.txt',
'Spring16_25nsV6_MC_Uncertainty_AK8PFPuppi.txt',
'Spring16_25nsV6_MC_L1FastJet_AK4PFPuppi.txt',
'Spring16_25nsV6_MC_L2Relative_AK4PFPuppi.txt',
'Spring16_25nsV6_MC_L3Absolute_AK4PFPuppi.txt',
Beispiel #37
0
def main():

    options = getOptions()

    # The submit command needs special treatment.
    if options.crabCmd == 'submit':

        # External files needed by CRAB
        inputDir = '/afs/cern.ch/user/k/kmcdermo/public/input/'
        inputPaths = 'HLTpathsWExtras.txt'
        inputFilters = 'HLTfilters.txt'
        inputFlags = 'METflags.txt'

        #--------------------------------------------------------
        # This is the base config:
        #--------------------------------------------------------
        from CRABClient.UserUtilities import config
        config = config()

        config.General.workArea = options.workArea
        config.General.requestName = None

        config.JobType.pluginName = 'Analysis'
        config.JobType.psetName = 'dispho.py'
        config.JobType.numCores = 8
        config.JobType.pyCfgParams = None
        config.JobType.inputFiles = [
            inputDir + inputPaths, inputDir + inputFilters,
            inputDir + inputFlags
        ]

        config.Data.inputDBS = None
        config.Data.inputDataset = None
        config.Data.splitting = 'EventAwareLumiBased'
        config.Data.unitsPerJob = None

        config.Data.outputDatasetTag = None
        config.Data.publication = False
        config.Site.storageSite = 'T2_CH_CERN'
        config.Data.outLFNDirBase = '/store/user/kmcdermo/nTuples/unskimmed/ootID'
        #--------------------------------------------------------

        # Will submit one task for each of these input datasets.
        inputDataAndOpts = [
            [
                '/GMSB_L200TeV_CTau400cm_930/kmcdermo-GMSB_L200TeV_CTau400cm_930_step3-23134fac048c68b5122d77328802e60f/USER',
                '0.04', '1', '0.81418', 'isGMSB', 10000, 'phys03'
            ],
            [
                '/GJet_Pt-15To6000_TuneCP5-Flat_13TeV_pythia8/RunIIFall17MiniAOD-94X_mc2017_realistic_v10-v1/MINIAODSIM',
                '283200', '1', '1', 'isBkgd', 500000, 'global'
            ],
            [
                '/ADDmonoPhoton_MD-1_d-5_TuneCUETP8M1_13TeV-pythia8/RunIISummer17MiniAOD-NZSFlatPU28to62_92X_upgrade2017_realistic_v10-v1/MINIAODSIM',
                '0.9701', '1', '1', 'isADD', 10000, 'global'
            ],
            [
                '/ADDmonoPhoton_MD-1_d-3_TuneCUETP8M1_13TeV-pythia8/RunIISummer17MiniAOD-NZSFlatPU28to62_92X_upgrade2017_realistic_v10-v1/MINIAODSIM',
                '0.4108', '1', '1', 'isADD', 10000, 'global'
            ],
        ]

        for inDO in inputDataAndOpts:
            # inDO[0] is of the form /A/B/C. Since A is unique for each inDO for Monte Carlo, use this in the CRAB request name.
            config.General.requestName = inDO[0].split('/')[1]
            config.JobType.pyCfgParams = [
                'globalTag=94X_mc2017_realistic_v10', 'storeRecHits=False',
                'nThreads=' + str(config.JobType.numCores), 'xsec=' + inDO[1],
                'filterEff=' + inDO[2], 'BR=' + inDO[3], inDO[4] + '=True',
                'inputPaths=' + inputPaths, 'inputFilters=' + inputFilters,
                'inputFlags=' + inputFlags
            ]
            config.Data.unitsPerJob = inDO[5]
            config.Data.inputDBS = inDO[6]
            config.Data.inputDataset = inDO[0]
            config.Data.outputDatasetTag = '%s_%s' % (
                config.General.workArea, config.General.requestName)
            # Submit.
            try:
                print "Submitting for input dataset %s" % (inDO[0])
                crabCommand(options.crabCmd,
                            config=config,
                            *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Submission for input dataset %s failed: %s" % (
                    inDO[0], hte.headers)
            except ClientException as cle:
                print "Submission for input dataset %s failed: %s" % (inDO[0],
                                                                      cle)

    # All other commands can be simply executed.
    elif options.workArea:

        for dir in os.listdir(options.workArea):
            projDir = os.path.join(options.workArea, dir)
            if not os.path.isdir(projDir):
                continue
            # Execute the crab command.
            msg = "Executing (the equivalent of): crab %s --dir %s %s" % (
                options.crabCmd, projDir, options.crabCmdOpts)
            print "-" * len(msg)
            print msg
            print "-" * len(msg)
            try:
                crabCommand(options.crabCmd,
                            dir=projDir,
                            *options.crabCmdOpts.split())
            except HTTPException as hte:
                print "Failed executing command %s for task %s: %s" % (
                    options.crabCmd, projDir, hte.headers)
            except ClientException as cle:
                print "Failed executing command %s for task %s: %s" % (
                    options.crabCmd, projDir, cle)