示例#1
0
CMSSW_VERSION = os.getenv('CMSSW_VERSION', 'CMSSW_VERSION')
if CMSSW_VERSION == 'CMSSW_VERSION':
    print 'please setup your CMSSW environement'
    sys.exit(0)

if len(sys.argv) == 1:
    print "Please pass in argument a number between 0 and 2"
    sys.exit()

elif sys.argv[1] == '0':
    print 'Build Efficiency maps'
    FarmDirectory = "FARM"
    JobName = "HscpBuildEffMaps"
    LaunchOnCondor.Jobs_RunHere = 1
    LaunchOnCondor.SendCluster_Create(FarmDirectory, JobName)
    for m in SStauMassPoints:
        LaunchOnCondor.SendCluster_Push([
            "FWLITE",
            os.getcwd() + "/GetEfficiencyMaps.C", '"SStau' + str(m) + '"', "1",
            '"root://eoscms//eos/cms/store/user/querten/ModelIndepSept/ModelIndep_SingleStau'
            + str(m) + '.root"'
        ])
    LaunchOnCondor.SendCluster_Submit()

elif sys.argv[1] == '1':
    print 'Merge efficiencies'
    fileList = ''
    for m in SStauMassPoints:
        fileList += ' pictures/Histos_SStau' + str(m) + '.root'
    os.system('hadd -f pictures/Histos.root' + fileList)
示例#2
0
            'mkdir -p ~/x509_user_proxy; voms-proxy-init --voms cms -valid 192:00 --out ~/x509_user_proxy/x509_proxy'
        )  #all must be done in the same command to avoid environement problems.  Note that the first sourcing is only needed in Louvain


if sys.argv[1] == '1':
    if UseRemoteSamples:
        initProxy()
    print("compile the Stability code")
    os.system("sh " + os.getcwd() + "/StabilityCheck.sh ")  #just compile

    print 'STABILITY'
    FarmDirectory = "FARM"
    JobName = "HSCPStability"
    LaunchOnCondor.Jobs_RunHere = 0
    LaunchOnCondor.Jobs_Queue = "8nh"
    LaunchOnCondor.SendCluster_Create(FarmDirectory, JobName)
    #LaunchOnCondor.SendCluster_Push(["FWLITE", os.getcwd()+"/StabilityCheck.C", '"ANALYSE"'])
    #LaunchOnCondor.SendCluster_Push(["BASH", "sh " + os.getcwd()+"/StabilityCheck.sh " + os.getcwd()+"/pictures"])

    #NJobs = 500
    #for Job in range(0,NJobs) :
    #      LaunchOnCondor.SendCluster_Push(["BASH", "sh " + os.getcwd()+"/StabilityCheck.sh " + os.getcwd()+"/pictures " + str(Job) +" " + str(NJobs)])
    #LaunchOnCondor.SendCluster_Submit()

    cwd = '%s/src/SUSYBSMAnalysis/HSCP/test/UsefulScripts/StabilityCheck' % os.environ[
        'CMSSW_BASE']
    f = open('%s/../../AnalysisCode/Analysis_Samples.txt' % cwd, 'r')
    #   f= open('Analysis_Samples_tmp.txt','r')
    index = -1
    for line in f:
        index += 1
示例#3
0
   FarmDirectory = "MERGECrab"
   MergeTemplateName = "Merge_Template_cfg.py"
   EndPath = ""

   if TransferDirectlyToStorage:
      print "Grid certificate is needed for the final lcg-cp command ..."
      initProxy()
      EndPath = "%s/HSCP2016" % StorageDir
   else:
      EndPath = "%s/%s/outputs" % (os.getcwd(), FarmDirectory)

   if not os.path.isdir(EndPath):
      os.system("mkdir -p %s" % EndPath)
   runs = getRunList(AllLumisFile)
   createMergeConfigTemplate(MergeTemplateName)
   LaunchOnCondor.SendCluster_Create(FarmDirectory, "HSCPEdmMerge")
   for run in runs:
      paths = ["%s/DoubleMuon/crab_Run%s_DoubleMuon/*/0000/" % (StorageDir, run),
               "%s/MET/crab_Run%s_MET/*/0000/" % (StorageDir, run),
               "%s/SingleMuon/crab_Run%s_SingleMuon/*/0000/" % (StorageDir, run)]
      createToMergeList(paths)

      LaunchOnCondor.Jobs_InitCmds   = ['export HOME=%s' % os.environ['HOME'], 'export X509_USER_PROXY=$HOME/x509_user_proxy/x509_proxy']
      LaunchOnCondor.Jobs_FinalCmds  = ['rm -f %s/Run2016_%s.root' % (EndPath, run)]


      if TransferDirectlyToStorage:
         LaunchOnCondor.Jobs_FinalCmds += ["lcg-cp -v -n 10 -D srmv2 -b file://${PWD}/Run2016_%s.root srm://ingrid-se02.cism.ucl.ac.be:8444/srm/managerv2\?SFN=%s/Run2016_%s.root && rm -f Run2016_%s.root" % (run, EndPath, run, run)] # if you do not use zsh, change '\?' to '?'
      else:
         LaunchOnCondor.Jobs_FinalCmds += ["mv Run2016_%s.root %s" % (run, EndPath)]
示例#4
0
    )
    initProxy()

    datasetList = []
    for DATASETMASK in DATASETMASKS:
        command_out = commands.getstatusoutput(
            'das_client --limit=0 --query "dataset=' + DATASETMASK + '"')
        print 'das_client --limit=0 --query "dataset=' + DATASETMASK + '"'
        print command_out
        datasetList += command_out[1].split()

    #get the list of samples to process from a local file
    #datasetList= open('DatasetList','r')
    JobName = "HSCPEdmProd"
    FarmDirectory = "FARM"
    LaunchOnCondor.SendCluster_Create(FarmDirectory, JobName)
    LaunchOnCondor.Jobs_Queue = '8nh'

    os.system("mkdir -p out")
    for DATASET in datasetList:
        DATASET = DATASET.replace('\n', '')
        FILELIST = filesFromDataset(DATASET)
        LaunchOnCondor.Jobs_InitCmds = []
        if (not ISLOCAL):
            LaunchOnCondor.Jobs_InitCmds = [
                'export HOME=%s' % os.environ['HOME'],
                'export X509_USER_PROXY=~/x509_user_proxy/x509_proxy; voms-proxy-init --noregen;'
            ]

        print DATASET + " : "
        for RUN in FILELIST:
示例#5
0
command_out = commands.getstatusoutput(
    'das_client.py --limit=0 --query "dataset=' + DATASETMASK + '"')
datasetList = command_out[1].split()

#get the list of samples to process from a local file
#datasetList= open('DatasetList','r')
for DATASET in datasetList:
    DATASET = DATASET.replace('\n', '')
    NAME = nameFromDataset(DATASET)
    FILELIST = filesFromDataset(DATASET)
    print DATASET + " --> " + NAME + " --> " + str(FILELIST)

    JobName = NAME
    FarmDirectory = "FARM_EDM"
    LaunchOnCondor.SendCluster_Create(FarmDirectory, JobName)
    LaunchOnCondor.Jobs_InitCmds = []
    if (not ISLOCAL):
        LaunchOnCondor.Jobs_InitCmds = [
            'export X509_USER_PROXY=~/x509_user_proxy/x509_proxy; voms-proxy-init --noregen;'
        ]

    f = open("HSCPEDM_cfg.py", "w")
    f.write("import sys, os\n")
    f.write("import FWCore.ParameterSet.Config as cms\n")
    f.write("\n")
    if ('HSCP' in DATASET):
        f.write("isSignal = True\n")
        f.write("isBckg = False\n")
    else:
        f.write("isSignal = False\n")
示例#6
0
import SUSYBSMAnalysis.HSCP.LaunchOnCondor as LaunchOnCondor

SAMPLES = [
    'MU', 'MET', 'ELEC', 'Gluino300', 'Gluino1000', 'Gluino1500', 'GMStau126',
    'GMStau494'
]
PT = [20.0, 40.0, 60.0, 100]
NH = [3, 5, 7]
DEDX = [2.7, 3.0, 3.3]

if sys.argv[1] == '1':
    JobName = "SkimEff"
    FarmDirectory = "FARM_SkimEff"
    LaunchOnCondor.Jobs_NEvent = 100000
    LaunchOnCondor.Jobs_Skip = 0
    LaunchOnCondor.SendCluster_Create(FarmDirectory, JobName)
    for s in SAMPLES:
        for p in PT:
            for n in NH:
                for d in DEDX:
                    LaunchOnCondor.SendCluster_Push([
                        "CMSSW", "HSCPSkim_cfg.py", "XXX_SAMPLE_XXX",
                        str(s), "XXX_PT_XXX",
                        str(p), "XXX_NH_XXX",
                        str(n), "XXX_DEDX_XXX",
                        str(d)
                    ])
    LaunchOnCondor.SendCluster_Submit()

if sys.argv[1] == '2':
    #LaunchOnCondor.runInteractively = True
示例#7
0
            f.write("\n")
            f.write("InputFileList.extend(['file:" + "step2.root" + "'])\n")

            f.write("\n")
            f.write(
                "#main EDM tuple cfg that depends on the above parameters\n")
            f.write(
                "execfile( os.path.expandvars('${CMSSW_BASE}/src/SUSYBSMAnalysis/HSCP/test/MakeEDMtuples/HSCParticleProducer_cfg.py') )\n"
            )
            f.close()

        JobName = S[0] + "_SIMEDM"
        FarmDirectory = "FARM_" + JobName
        LaunchOnCondor.Jobs_NEvent = S[3]
        LaunchOnCondor.Jobs_Skip = 0
        LaunchOnCondor.SendCluster_Create(FarmDirectory, JobName)
        LaunchOnCondor.Jobs_InitCmds = [
            'export X509_USER_PROXY=~/x509_user_proxy/x509_proxy; voms-proxy-init --noregen;'
        ]
        for i in range(0, S[2]):
            LaunchOnCondor.Jobs_Count = i
            LaunchOnCondor.Jobs_Skip += LaunchOnCondor.Jobs_NEvent
            LaunchOnCondor.SendCluster_Push([
                "CMSSW",
                [
                    "GEN_SIM_Template_cfg.py", "RECO_Template_cfg.py",
                    "HSCPEDM_Template_cfg.py"
                ]
            ])
            LaunchOnCondor.Jobs_FinalCmds = ['rm step1.root; rm step2.root']
        LaunchOnCondor.SendCluster_Submit()
示例#8
0
    initProxy()

    print("compile the MuonTimingStudy code")
    os.system("sh " + os.getcwd() + "/MuonTimingStudy.sh ")  #just compile

    command_out = commands.getstatusoutput(
        'das_client --limit=0 --query "dataset=' + DATASETMASK + '"')
    print 'das_client --limit=0 --query "dataset=' + DATASETMASK + '"'
    print command_out
    datasetList = command_out[1].split()

    #get the list of samples to process from a local file
    #datasetList= open('DatasetList','r')
    JobName = "CSCTimeStudy"
    FarmDirectory = "FARM_TEMP"
    LaunchOnCondor.SendCluster_Create(FarmDirectory, JobName)
    LaunchOnCondor.Jobs_Queue = '8nh'

    os.system("mkdir -p out")
    for DATASET in datasetList:
        DATASET = DATASET.replace('\n', '')
        FILELIST = filesFromDataset(DATASET)
        LaunchOnCondor.Jobs_InitCmds = []
        if (not ISLOCAL):
            LaunchOnCondor.Jobs_InitCmds = [
                'export X509_USER_PROXY=~/x509_user_proxy/x509_proxy; voms-proxy-init --noregen;'
            ]

        print DATASET + " : "
        for RUN in FILELIST:
            print str(RUN) + " --> %i files to process" % len(FILELIST[RUN])
LoadJson(JSON)

if args.STEP == 0:
    os.system("rm -f *.d *.so *.pcm *.root *.pyc")
    os.system("rm -rdf FARM_TEMP out pictures")

if args.STEP == 1:
    print 'proxy initialization..\n==============='
    #initProxy()
    print 'splitting json by run....\n=============='
    splitLumiFileByRuns(JSON)

    os.system("mkdir -p out")
    JobName = "HSCPEdmProd"
    FarmDirectory = "FARM"
    LaunchOnCondor.SendCluster_Create(FarmDirectory, JobName)
    LaunchOnCondor.Jobs_Queue = '8nh'
    LaunchOnCondor.Jobs_InitCmds = []
    for lumiFile in LumiFiles:

        matchedDatasets = findDatasetsForRun(lumiFile, PrimaryDatasetsToMatch)
        matchedDatasets.sort()
        runN = getRunNum(lumiFile)
        print matchedDatasets

        FilesByRun = []
        for dataset in matchedDatasets:
            filesFromDaset(dataset)

        if (not ISLOCAL):
            LaunchOnCondor.Jobs_InitCmds = [
示例#10
0
                run, MET, correctMET, singleMuon, correctSingleMuon,
                doubleMuon, correctDoubleMuon
            ])
    if not printNumberOfFiles:
        for entry in runsToPrint:
            print entry[0], finalJudgementString
    else:
        for entry in runsToPrint:
            print "Run %s %s: MET (%i/%i), SingleMuon (%i/%i), DoubleMuon (%i/%i)" % (
                entry[0], finalJudgementString, entry[1], entry[2], entry[3],
                entry[4], entry[5], entry[6])

    if printOnlySuccess and mergeAvailableRuns:

        FarmDirectory = 'MERGEAvailable'
        LaunchOnCondor.SendCluster_Create(FarmDirectory, "HSCPEdmMergeAvail")
        LaunchOnCondor.Jobs_Queue = '8nh'
        AlreadyMergingRuns = []

        if mergeOnTheFly:
            runningScripts = os.popen(
                'condor_q %s -long | grep Cmd | grep HSCPEdmMergeAvail' %
                (os.environ['USER'])).read()
            runningScripts = runningScripts.split('\n')
            runningScripts.pop()
            for script in runningScripts:
                if script.find(os.getcwd()) == -1: continue
                script = script.split('=')[1]
                script = script.replace('"', '')
                script = script.split('/')[len(script.split('/')) - 1]
                script = script.split('_')[2]
示例#11
0
isLocal = False  #allow to access data in Louvain from remote sites
if(commands.getstatusoutput("hostname -f")[1].find("ucl.ac.be")!=-1): isLocal = True
os.system('rm -rf ~/x509_user_proxy/x509_proxy')


if sys.argv[1]=='1':
        os.system("sh " + os.getcwd() + "/DeDxStudy.sh ") #just compile

	for DATASET in datasetList :
	   outdir =  os.getcwd() + "/Histos/"+DATASET[0]+"/"
	   os.system('mkdir -p ' + outdir)

	   JobName = "DEDXHISTO_"+DATASET[0]
	   FarmDirectory = "FARM_DEDXHISTO_"+DATASET[0]
	   LaunchOnCondor.Jobs_Queue = '8nh'
	   LaunchOnCondor.SendCluster_Create(FarmDirectory, JobName)

 	   FILELIST = []        
           if(DATASET[1][-1]=='/'): #file path is a directory, consider all files from the directory
              if(isLocal):
      	         FILELIST = LaunchOnCondor.GetListOfFiles('', DATASET[1]+'/*.root', '')
              else:
                 initProxy()
                 initCommand = 'export X509_USER_PROXY=~/x509_user_proxy/x509_proxy; voms-proxy-init --noregen;'
                 LaunchOnCondor.Jobs_InitCmds = [initCommand]
                 print initCommand+'lcg-ls -b -D srmv2 "srm://ingrid-se02.cism.ucl.ac.be:8444/srm/managerv2?SFN='+DATASET[1]+'" | xargs -I {} basename {}'
                 print commands.getstatusoutput(initCommand+'lcg-ls -b -D srmv2 "srm://ingrid-se02.cism.ucl.ac.be:8444/srm/managerv2?SFN='+DATASET[1]+'" | xargs -I {} basename {}')
                 LocalFileList = commands.getstatusoutput(initCommand+'lcg-ls -b -D srmv2 "srm://ingrid-se02.cism.ucl.ac.be:8444/srm/managerv2?SFN='+DATASET[1]+'" | xargs -I {} basename {}')[1].split('\n')
                 for f in LocalFileList:
                    if(f[-5:].find('.root')==-1):continue #only .root file considered
                    FILELIST += ["root://cms-xrd-global.cern.ch/"+DATASET[1].replace('/storage/data/cms/store/','/store/')+f]
示例#12
0
            or ((time.time() - os.path.getmtime(
                os.path.expanduser('~/x509_user_proxy/x509_proxy'))) > 600)):
        print "You are going to run on a sample over grid using either CRAB or the AAA protocol, it is therefore needed to initialize your grid certificate"
        os.system(
            'mkdir -p ~/x509_user_proxy; voms-proxy-init --voms cms -valid 192:00 --out ~/x509_user_proxy/x509_proxy'
        )  #all must be done in the same command to avoid environement problems.  Note that the first sourcing is only needed in Louvain


if sys.argv[1] == '1':
    if UseRemoteSamples:
        initProxy()
    print 'ANALYSIS'
    FarmDirectory = "FARM"
    JobName = "HscpAnalysis"
    LaunchOnCondor.Jobs_RunHere = 0
    LaunchOnCondor.SendCluster_Create(FarmDirectory, JobName)
    f = open('Analysis_Samples.txt', 'r')
    index = -1
    for line in f:
        index += 1
        if (line.startswith('#')): continue
        vals = line.split(',')
        if ((vals[0].replace('"', '')) in CMSSW_VERSION):
            for Type in AnalysesToRun:
                #LaunchOnCondor.Jobs_FinalCmds = ['mv *.root %s/src/SUSYBSMAnalysis/HSCP/test/AnalysisCode/Results/Type%i/' % (os.environ['CMSSW_BASE'], Type)]
                LaunchOnCondor.Jobs_FinalCmds = [
                    'cp -r Results %s/src/SUSYBSMAnalysis/HSCP/test/AnalysisCode/ && rm -rf Results'
                    % (os.environ['CMSSW_BASE'])
                ]
                if (UseRemoteSamples):
                    LaunchOnCondor.Jobs_InitCmds = [
示例#13
0
    #["RunD_*_204801_205100","RunD_204801_205100"],
    #["RunD_*_205101_205400","RunD_205101_205400"],
    #["RunD_*_205401_205700","RunD_205401_205700"],
    #["RunD_*_205701_206000","RunD_205701_206000"],
    #["RunD_*_206001_206300","RunD_206001_206300"],
    #["RunD_*_206301_206600","RunD_206301_206600"],
    #["RunD_*_206601_206900","RunD_206601_206900"],
    #["RunD_*_206901_207200","RunD_206901_207200"],
    #["RunD_*_207201_207500","RunD_207201_207500"],
    #["RunD_*_207501_207800","RunD_207501_207800"],
    #["RunD_*_207801_208100","RunD_207801_208100"],
    #["RunD_*_208101_208357","RunD_208101_208357"],
    ["RunD_*_208358_208686", "RunD_208358_208686"]
]

FarmDirectory = "MERGE"
for j in Jobs:
    LaunchOnCondor.ListToFile(
        LaunchOnCondor.GetListOfFiles(
            '"dcache:',
            '/pnfs/cms/WAX/11/store/user/farrell3/HSCPEDMUpdateData2012_30Nov2012/'
            + j[0] + '/HSCP_*.root', '",'), FarmDirectory + "InputFile.txt")
    #LaunchOnCondor.SendCMSJobs(FarmDirectory, j[1], "Merge_cfg.py", FarmDirectory + "InputFile.txt", 1, ['XXX_SAVEPATH_XXX','file:/storage/data/cms/users/quertenmont/HSCP/CMSSW_4_2_8/12_08_16/'])
    LaunchOnCondor.SendCMSJobs(
        FarmDirectory, j[1], "Merge_cfg.py", FarmDirectory + "InputFile.txt",
        1, [
            'XXX_SAVEPATH_XXX',
            '/uscmst1b_scratch/lpc1/3DayLifetime/farrell/2012Data_04Sep2012'
        ])
os.system("rm " + FarmDirectory + "InputFile.txt")
示例#14
0
            repla += '\n'
            repla += '# Path and EndPath definitions\n'

            lines = lines.replace('# Path and EndPath definitions', repla)
            lines = lines.replace('cms.Path(process.psim)',
                                  'cms.Path(process.psim * process.shifter)')

        with open("GEN_SIM_Template_cfg.py", "w") as f:
            f.write(lines)
            f.close()

        JobName = S[0] + "_SIMAOD"
        FarmDirectory = "FARM_" + JobName
        LaunchOnCondor.Jobs_NEvent = S[3]
        LaunchOnCondor.Jobs_Skip = 0
        LaunchOnCondor.SendCluster_Create(FarmDirectory, JobName)
        for i in range(0, S[2]):
            LaunchOnCondor.Jobs_Count = i
            LaunchOnCondor.Jobs_Skip += LaunchOnCondor.Jobs_NEvent
            LaunchOnCondor.SendCluster_Push(
                ["CMSSW", ["GEN_SIM_Template_cfg.py", "RECO_Template_cfg.py"]])
            #LaunchOnCondor.Jobs_FinalCmds = ['mv step1.root ' + os.getcwd()+"/FARM_"+S[0]+"_SIMAOD/outputs/"+S[0]+"_SIM_%04i.root" % i]
        LaunchOnCondor.SendCluster_Submit()

elif sys.argv[1] == '2':  #AOD --> EDM files
    for S in samples:
        JobName = S[0]
        FarmDirectory = "FARM_EDM"
        LaunchOnCondor.SendCluster_Create(FarmDirectory, JobName)

        f = open("HSCPEDM_cfg.py", "w")