예제 #1
0
def test2():
    dataset_names = [
        # "/TT_TuneCUETP8M2T4_13TeV-powheg-pythia8/RunIISummer17MiniAOD-92X_upgrade2017_realistic_v10_ext1-v1/MINIAODSIM",
        "/Dummy_test_StopBabyMaker_v25/CMS4",
    ]

    # Make a base directory
    basedir = "/hadoop/cms/store/user/{0}/metis_test/example/".format(os.getenv("USER"))
    MutableFile(basedir).touch()

    # Make a directory sample, giving it the location and a dataset name for bookkeeping purposes
    # The globber must be customized (by default, it is *.root) in order to pick up the text files
    ds = DirectorySample(location=basedir, dataset="/TEST/Examplev1/TEST", globber="*.txt")

    # Make a CondorTask (3 in total, one for each input)
    task = CondorTask(
            sample = ds,
            files_per_output = 1,
            tag = "v0",
            output_name = "ttbar_powheg_pythia8_92X.root",
            executable = "condor_executable.sh",
            cmssw_version = "CMSSW_9_3_1",
            scram_arch = "slc6_amd64_gcc700",
            arguments = "testarg1",
            tarfile = "input.tar.gz",
            condor_submit_params = {"sites": "UAF,T2_US_UCSD,UCSB"},
            no_load_from_backup = True, # for the purpose of the example, don't use a backup
    )
    # do_cmd("rm -rf {0}".format(task.get_outputdir()))

    # Process and sleep until complete
    is_complete = False
    for t in [5.0, 5.0, 10.0, 15.0, 20.0]:
        task.process()
        print("Sleeping for {0} seconds".format(int(t)))
        time.sleep(t)
        is_complete = task.complete()
        if is_complete: break

    # If it's complete, make a dummy sample out of the output directory
    # in order to pick up the files. Then cat out the contents and sum
    # them up. This should be 3*2*10 = 100
    if is_complete:
        print("Job completed! Checking outputs...")
        outsamp = DirectorySample(location=task.get_outputdir(), dataset="/Blah/blah/BLAH", globber="*.txt")
        tot = 0
        for f in outsamp.get_files():
            mf = MutableFile(f.get_name())
            tot += int(mf.cat())
        print("It looks like we found 3*2*10 = {0}".format(tot))
예제 #2
0
 def test_instantiation(self):
     dsname = "/blah/blah/BLAH/"
     dirsamp = DirectorySample(dataset=dsname, location="/dummy/dir/")
     self.assertEqual(len(dirsamp.get_files()), 0)
예제 #3
0
def main():

    if data_year == "2016":
        samples_to_run = dataset.hww_samples_to_run_2016
        samples_short_name = dataset.samples_short_name_2016
        dslocs = dataset.dslocscms4_2016_allpf

    # file/dir paths
    main_dir = os.path.dirname(os.path.abspath(__file__))
    metis_path = os.path.dirname(os.path.dirname(metis.__file__))
    tar_path = os.path.join(metis_path, "package.tar")
    tar_gz_path = tar_path + ".gz"
    metis_dashboard_path = os.path.join(metis_path, "dashboard")
    exec_path = os.path.join(main_dir, "metis.sh")
    merge_exec_path = os.path.join(main_dir, "merge.sh")
    hadoop_path = "metis/baby/{}".format(
        job_tag
    )  # The output goes to /hadoop/cms/store/user/$USER/"hadoop_path"
    if job_tag.find("HWW") != -1:
        args = "0"  # HWWBaby

    # Create tarball
    os.chdir(main_dir)
    os.system(
        "tar -chzf {} localsetup.sh processBaby *.so *.pcm rooutil/lib*.so coreutil/data coreutil/lib*.so *.txt btagsf MVAinput jetCorrections leptonSFs puWeight2016.root pileup_jul21_nominalUpDown.root ../CORE/Tools/ mergeHadoopFiles.C rooutil/hadd.py fastjet/fastjet-install/lib"
        .format(tar_gz_path))

    # Change directory to metis
    os.chdir(metis_path)

    total_summary = {}

    # Loop over datasets to submit
    while True:

        all_tasks_complete = True

        for sample in samples_to_run:

            loc = dslocs[sample]

            # define the task
            maker_sample_name = "/MAKER_" + sample[1:]
            maker_task = CondorTask(
                sample=DirectorySample(dataset=maker_sample_name,
                                       location=loc),
                tag=job_tag,
                arguments=args,
                executable=exec_path,
                tarfile=tar_gz_path,
                special_dir=hadoop_path,
                output_name="output.root",
                files_per_output=1,
                condor_submit_params={"sites": "T2_US_UCSD"},
                open_dataset=False,
                flush=True,
                #min_completion_fraction = 0.5,
                #no_load_from_backup  = True,
            )

            # process the job (either submits, checks for resubmit, or finishes etc.)
            maker_task.process()

            # save some information for the dashboard
            total_summary[maker_task.get_sample().get_datasetname(
            )] = maker_task.get_task_summary()

            # define the task
            merge_sample_name = "/MERGE_" + sample[1:]
            merge_task = CondorTask(
                sample=DirectorySample(dataset=merge_sample_name,
                                       location=maker_task.get_outputdir()),
                tag=job_tag,
                executable=merge_exec_path,
                tarfile=tar_gz_path,
                files_per_output=100000,
                output_dir=maker_task.get_outputdir() + "/merged",
                output_name=samples_short_name[sample] + ".root",
                condor_submit_params={"sites": "T2_US_UCSD"},
                open_dataset=False,
                flush=True,
                output_is_tree=True,
                cmssw_version="CMSSW_9_2_0",
                scram_arch="slc6_amd64_gcc530",
                #no_load_from_backup  = True,
            )

            if maker_task.complete():

                # process the job (either submits, checks for resubmit, or finishes etc.)
                merge_task.process()

                # save some information for the dashboard
                total_summary[merge_task.get_sample().get_datasetname(
                )] = merge_task.get_task_summary()

            # Aggregate whether all tasks are complete
            all_tasks_complete = all_tasks_complete and maker_task.complete()

        # parse the total summary and write out the dashboard
        StatsParser(data=total_summary, webdir=metis_dashboard_path).do()

        # Print msummary table so I don't have to load up website
        os.system("msummary -r | tee summary.txt")
        os.system("chmod -R 755 {}".format(metis_dashboard_path))

        # If all done exit the loop
        if all_tasks_complete:
            print ""
            print "Job={} finished".format(job_tag)
            print ""
            break

        # Neat trick to not exit the script for force updating
        print 'Press Ctrl-C to force update, otherwise will sleep for 300 seconds'
        try:
            for i in range(0, 300):
                sleep(1)  # could use a backward counter to be preeety :)
        except KeyboardInterrupt:
            raw_input("Press Enter to force update, or Ctrl-C to quit.")
            print "Force updating..."
예제 #4
0
파일: wwwmetis.py 프로젝트: sgnoohc/www
os.chdir(scriptsdir)
os.system("tar -rf {} *.sh *.C ".format(tar_path))
os.chdir(wwwdir)
os.system("gzip -f {}".format(tar_path))

# Configurations
baby_version = "16"
exec_path = os.path.join(scriptsdir, "run.sh")
args = "WWW_ScanChain.C output.root t -1 doskim"
total_summary = {}

while True:
    task = CondorTask(
            sample = DirectorySample(
                dataset="/WWW_v0_1_{}".format(baby_version),
                location="/hadoop/cms/store/user/bhashemi/AutoTwopler_babies/merged/VVV/WWW_v0.1.16/skim/",
                globber="*.root"
                ),
            open_dataset = False,
            flush = True,
            files_per_output = 1,
            output_name = "merged.root",
            tag = job_tag,
            arguments = args,
            executable = exec_path,
            tarfile = targzpath,
            special_dir = hadoop_path,
            condor_submit_params = {"sites" : "UAF,T2_US_UCSD"}
            )
    task.process()
    # save some information for the dashboard
예제 #5
0
 def test_get_globaltag(self):
     dirsamp = DirectorySample(dataset="/blah/blah/BLAH/",
                               location="/dummy/dir/")
     dirsamp.info["gtag"] = "dummygtag"
     self.assertEqual(dirsamp.get_globaltag(), dirsamp.info["gtag"])
예제 #6
0
### We test that the tag is actually there
repo_name = '%s/NanoAOD-tools'%args.user

g = Github(cred)
repo = g.get_repo(repo_name)
tags = [ x.name for x in repo.get_tags() ]
if not tag in tags:
    print ("The specified tag %s was not found in the repository: %s"%(tag, repo_name))
    print ("Exiting. Nothing was submitted.")
    exit()
else:
    print ("Yay, located tag %s in repository %s. Will start creating tasks now."%(tag, repo_name) )

# example
sample = DirectorySample(dataset='TTWJetsToLNu_Autumn18v4', location='/hadoop/cms/store/user/dspitzba/nanoAOD/TTWJetsToLNu_TuneCP5_13TeV-amcatnloFXFX-madspin-pythia8__RunIIAutumn18NanoAODv6-Nano25Oct2019_102X_upgrade2018_realistic_v20_ext1-v1/')

outDir = os.path.join(os.path.expandvars(cfg['meta']['localSkim']), tag+'_'+skim if skim=='trilep' else tag)

print ("Output will be here: %s"%outDir)

maker_tasks = []
merge_tasks = []

sample_list = samples.keys() if not args.small else samples.keys()[:2]

sample_list = [ x for x in samples.keys() if args.only in x ] #

print ("Will run over the following samples:")
print (sample_list)
print ()
예제 #7
0
def get_tasks():

    samples = [

        # DirectorySample(
        #     location = "/hadoop/cms/store/user/mliu/mcProduction/MINIAODSIM/www/",
        #     globber = "*_MiniAODv2.root",
        #     dataset = "/PrivateWWW/www-cms4-Private80X-v1/MINIAODSIM",
        #     gtag = "80X_mcRun2_asymptotic_2016_TrancheIV_v6",
        #     ),

        # DirectorySample(
        #     location = "/hadoop/cms/store/user/mliu/mcProduction/MINIAODSIM/www_ext/",
        #     globber = "www_ext_*_MiniAODv2.root",
        #     dataset = "/PrivateWWW/wwwext-cms4-Private80X-v1/MINIAODSIM",
        #     gtag = "80X_mcRun2_asymptotic_2016_TrancheIV_v6",
        #     ),

        # DirectorySample(
        #     location = "/hadoop/cms/store/user/mliu/mcProduction/MINIAODSIM/TChiWH_HToVVTauTau_HToBB_mChargino200_mLSP50/",
        #     globber = "*_MiniAODv2.root",
        #     dataset = "/TChiWH_HToVVTauTau_HToBB_mChargino200_mLSP50/cms4-Private80X-v1/MINIAODSIM",
        #     gtag = "80X_mcRun2_asymptotic_2016_TrancheIV_v6",
        #     ),
        DirectorySample(
            location=
            "/hadoop/cms/store/user/bhashemi/mcProduction/MINIAODSIM/wh_ext/",
            globber="*MiniAODv2.root",
            dataset=
            "/TChiWH_HToVVTauTau_HToBB_mChargino200_mLSP50/cms4-Private80X-v1/MINIAODSIM",
            gtag="80X_mcRun2_asymptotic_2016_TrancheIV_v6",
        ),
    ]

    tasks = []

    for sample in samples:

        pset_args = "data=False"
        cmsswver = "CMSSW_8_0_26_patch1"
        tarfile = "/nfs-7/userdata/libCMS3/lib_CMS4_V00-00-02_2017Sep27.tar.gz"

        task = CMSSWTask(
            sample=sample,
            files_per_output=30,
            output_name="merged_ntuple.root",
            tag="CMS4_V00-00-02_2017Sep27",
            pset="psets/pset_moriondremc.py",
            pset_args=pset_args,
            # condor_submit_params = {"use_xrootd":True},
            condor_submit_params={"sites": "T2_US_UCSD"},
            cmssw_version=cmsswver,
            tarfile=tarfile,
            special_dir="run2_moriond17_cms4/ProjectMetis",
            min_completion_fraction=0.90,
            publish_to_dis=True,
        )

        tasks.append(task)

    return tasks
예제 #8
0
def get_tasks(samples_dictionary,
              year,
              baby_type,
              baby_version_tag,
              dotestrun=False):

    job_tag = "{}{}_{}".format(baby_type, year, baby_version_tag)

    # file/dir paths
    main_dir = os.path.dirname(os.path.abspath(__file__))
    metis_path = os.path.dirname(os.path.dirname(metis.__file__))
    tar_path = os.path.join(metis_path, "package_{}.tar".format(job_tag))
    tar_gz_path = tar_path + ".gz"
    exec_path = os.path.join(main_dir, "metis.sh")
    merge_exec_path = os.path.join(main_dir, "merge.sh")
    hadoop_path = "metis/{}/{}".format(
        hadoop_dirname, job_tag
    )  # The output goes to /hadoop/cms/store/user/$USER/"hadoop_path"

    # Extra arguments that will be passed on to ./processBaby to specific which baby to create from the babymaker binary executable
    args = ""
    if job_tag.find("WVZ") != -1:
        args = "1"
    if job_tag.find("Dilep") != -1:
        args = "2"
    if job_tag.find("Trilep") != -1:
        args = "3"
    if job_tag.find("WVZMVA") != -1:
        args = "4"
    if job_tag.find("Truth") != -1:
        args = "5"
    if job_tag.find("WVZAll") != -1:
        args = "6"

    # Change directory to metis
    os.chdir(metis_path)

    tasks = []

    # BEGIN Sample Loop -------->
    # loop over the samples
    for sample in sorted(samples_dictionary.iterkeys()):

        #
        # Job 1 : Creating baby
        #

        # define the task
        maker_task = CondorTask(
            sample=SNTSample(
                dataset=sample,
                # exclude_tag_pattern="CMS4_V08-*", # ignore new samples by sicheng for 2016
                exclude_tag_pattern=
                "*v516*",  # ignore new samples by sicheng for 2016 
            ),
            tag=job_tag,
            arguments=args,
            executable=exec_path,
            tarfile=tar_gz_path,
            special_dir=hadoop_path,
            output_name="output.root",
            files_per_output=1,
            # files_per_output     = 1,
            condor_submit_params={"sites": "T2_US_UCSD"},
            # condor_submit_params = {"sites" : "UAF,T2_US_Wisconsin,T2_US_Florida,T2_US_Nebraska,T2_US_Caltech,T2_US_MIT,T2_US_Purdue"},
            # condor_submit_params = {"sites" : "UAF,T2_US_Wisconsin,T2_US_Florida,T2_US_Nebraska,T2_US_Caltech,T2_US_MIT"},
            # condor_submit_params = {"sites" : "UAF"},
            open_dataset=False,
            flush=True,
            max_jobs=5 if dotestrun else 0,
            # min_completion_fraction = 1.0 if "Run201" in sample else 0.9,
            # min_completion_fraction = 0.9,
            #no_load_from_backup  = True,
        )

        print sample, job_tag

        tasks.append(maker_task)

        #
        # Job 2 : Merging baby outputs
        #

        if maker_task.complete() and not dotestrun:

            merge_sample_name = "/MERGE_" + sample[1:]

            merge_task = CondorTask(
                sample=DirectorySample(dataset=merge_sample_name,
                                       location=maker_task.get_outputdir()),
                # open_dataset         = True, flush = True,
                executable=merge_exec_path,
                tarfile=tar_gz_path,
                files_per_output=1,
                output_dir=maker_task.get_outputdir() + "/merged",
                output_name=samples_dictionary[sample] + ".root",
                condor_submit_params={"sites": "UAF"},
                output_is_tree=True,
                # check_expectedevents = True,
                tag=job_tag,
                cmssw_version="CMSSW_9_2_0",
                scram_arch="slc6_amd64_gcc530",
                #no_load_from_backup    = True,
                max_jobs=1,
            )
            merge_task.reset_io_mapping()
            merge_task.update_mapping()

            tasks.append(merge_task)

    # <------ END Sample Loop

    return tasks
예제 #9
0
파일: submit.py 프로젝트: aminnj/scouting
    #         tag = tag,
    #         cmssw_version = "CMSSW_10_2_5",
    #         scram_arch = "slc6_amd64_gcc700",
    #         tarfile = "inputs_nm1.tar.gz",
    #         executable = "condor_exe.sh",
    #         condor_submit_params = {
    #             "sites":"T2_US_UCSD",
    #             },
    #         )
    # tasks.append(task)

    tag = "histsv3"
    task = CondorTask(
        sample=DirectorySample(
            location=
            "/hadoop/cms/store/user/namin/ProjectMetis/ScoutingCaloMuon_Run201*_v13_RAW_v25/",
            dataset="/Scouting/main/HISTS",
        ),
        files_per_output=2,
        output_name="output.root",
        tag=tag,
        cmssw_version="CMSSW_10_2_5",
        scram_arch="slc6_amd64_gcc700",
        tarfile="inputs_main.tar.gz",
        executable="condor_exe.sh",
        condor_submit_params={
            "sites": "T2_US_UCSD",
        },
    )
    tasks.append(task)
예제 #10
0
def runall(special_dir, tag, total_nevents, events_per_output, config):
    #def runall(special_dir, tags, total_nevents, events_per_output, configs):

    for _ in range(2500):

        #  for i in range(len(configs)):
        #    config = configs[i]
        #    special_dir = special_dirs[i]
        #    tag = tags[i]

        proc_tag = "v1"
        #special_dir = "workflowtest/ProjectMetis"
        #special_dir = "miniaod_runII/JHUSample_ttH"

        cmssw_v_gensim = config["cmssw_v_gensim"]
        pset_gensim = config["pset_gensim"]
        scram_arch_gensim = config["scram_arch_gensim"]

        cmssw_v_aodsim = config["cmssw_v_aodsim"]
        pset_aodsim = config["pset_aodsim"]
        scram_arch_aodsim = config["scram_arch_aodsim"]

        pset_aodsim2 = config["pset_aodsim2"]
        cmssw_v_aodsim2 = cmssw_v_aodsim  #config["pset_aodsim2"]
        scram_arch_aodsim2 = scram_arch_aodsim  #config["scram_arch_aodsim2"]

        cmssw_v_miniaodsim = config["cmssw_v_miniaodsim"]
        pset_miniaodsim = config["pset_miniaodsim"]
        scram_arch_miniaodsim = config["scram_arch_miniaodsim"]

        step1 = CMSSWTask(
            # Change dataset to something more meaningful (but keep STEP1, as we use this
            # for string replacement later); keep N=1
            sample=DummySample(N=1, dataset="/" + tag + "_STEP1"),
            # A unique identifier
            tag=proc_tag,
            special_dir=special_dir,
            # Probably want to beef up the below two numbers to control splitting,
            # but note that step2 is the bottleneck, so don't put too many events
            # in one output file here
            events_per_output=events_per_output,
            total_nevents=total_nevents,
            #events_per_output = 50,
            #total_nevents = 1000,
            # We have one input dummy file, so this must be True
            split_within_files=True,
            pset="psets/" + pset_gensim,
            cmssw_version=cmssw_v_gensim,
            scram_arch=scram_arch_gensim,
            condor_submit_params={
                "sites":
                "T2_US_UCSD",
                "classads": [[
                    "SingularityImage",
                    "/cvmfs/singularity.opensciencegrid.org/cmssw/cms:rhel6-m202006"
                ]]
            })

        step2 = CMSSWTask(
            sample=DirectorySample(
                location=step1.get_outputdir(),
                dataset=step1.get_sample().get_datasetname().replace(
                    "STEP1", "STEP2"),
            ),
            tag=proc_tag,
            special_dir=special_dir,
            open_dataset=True,
            files_per_output=1,
            pset="psets/" + pset_aodsim,
            cmssw_version=cmssw_v_aodsim,
            scram_arch=scram_arch_aodsim,
            condor_submit_params={
                "sites":
                "T2_US_UCSD",
                "classads": [[
                    "SingularityImage",
                    "/cvmfs/singularity.opensciencegrid.org/cmssw/cms:rhel6-m202006"
                ]]
            })

        step3 = CMSSWTask(
            sample=DirectorySample(
                location=step2.get_outputdir(),
                dataset=step2.get_sample().get_datasetname().replace(
                    "STEP2", "STEP3"),
            ),
            tag=proc_tag,
            special_dir=special_dir,
            open_dataset=True,
            files_per_output=1,
            pset="psets/" + pset_aodsim2,
            cmssw_version=cmssw_v_aodsim2,
            scram_arch=scram_arch_aodsim2,
            condor_submit_params={
                "sites":
                "T2_US_UCSD",
                "classads": [[
                    "SingularityImage",
                    "/cvmfs/singularity.opensciencegrid.org/cmssw/cms:rhel6-m202006"
                ]]
            })

        step4 = CMSSWTask(
            sample=DirectorySample(
                location=step3.get_outputdir(),
                dataset=step3.get_sample().get_datasetname().replace(
                    "STEP3", "STEP4"),
            ),
            tag=proc_tag,
            special_dir=special_dir,
            open_dataset=True,
            files_per_output=1,
            output_name="step4.root",
            pset="psets/" + pset_miniaodsim,
            cmssw_version=cmssw_v_miniaodsim,
            scram_arch=scram_arch_miniaodsim,
            condor_submit_params={
                "sites":
                "T2_US_UCSD",
                "classads": [[
                    "SingularityImage",
                    "/cvmfs/singularity.opensciencegrid.org/cmssw/cms:rhel6-m202006"
                ]]
            }
            # condor_submit_params = {"sites":"UAF,UCSD"},
        )
        '''
    step5 = CMSSWTask(
            sample = DirectorySample(
                location = step4.get_outputdir(),
                dataset = step4.get_sample().get_datasetname().replace("STEP4","STEP5"),
                ),
            tag = proc_tag,
            special_dir = special_dir,
            open_dataset = True,
            files_per_output = 1,
            pset = "psets/TOP-RunIIFall17NanoAODv7-00001_1_cfg.py",
            # The below two lines should match output file names in the pset
            output_name = "step5.root",
            #other_outputs = ["step3_inMINIAODSIM.root","step3_inDQM.root"],
            cmssw_version = "CMSSW_10_2_22",
            scram_arch = "slc6_amd64_gcc700",
            # condor_submit_params = {"sites":"UAF,UCSD"},
            )
    '''
        #for _ in range(25):
        total_summary = {}
        for task in [step1, step2, step3, step4]:
            task.process()
            summary = task.get_task_summary()
            total_summary[task.get_sample().get_datasetname()] = summary
        StatsParser(data=total_summary,
                    webdir="~/public_html/dump/metis/").do()

        time.sleep(2000)
예제 #11
0
    # Make a directory sample, giving it the location and a dataset name for bookkeeping purposes
    # The globber must be customized (by default, it is *.root) in order to pick up the text files

    # Make a CondorTask (3 in total, one for each input)
    maker_tasks = []
    merge_tasks = []

    for dsname, samploc in cms4_samples.items():
        cmsswver = "CMSSW_9_4_1"
        scramarch = "slc6_amd64_gcc630"
        tarfile = "input.tar.gz"
        tag = "v25_10"
        maker_task = CondorTask(
            sample = DirectorySample(
                dataset=dsname,
                location=samploc,
            ),
            # open_dataset = True if dsname.startswith("data_2016H_singleel") else False,
            files_per_output = 20 if dsname.startswith("data") else 1,
            tag = tag,
            cmssw_version = cmsswver,
            scram_arch = scramarch,
            tarfile = tarfile,
            executable = "condor_executable.sh",
            outdir_name = "stopBaby_" + dsname,
            output_name = "stopbaby.root",
            arguments = "1" if dsname.startswith("SMS") else "0", # isFastsim
            condor_submit_params = {"sites": "UAF"},
            # condor_submit_params = {"sites": "UAF,T2_US_UCSD,UCSB"},
            # no_load_from_backup = True,
        )
예제 #12
0
                ]]
            },
            #"SingularityImage":"/cvmfs/singularity.opensciencegrid.org/cmssw/cms:rhel6-m202006"},
            #condor_submit_params = {"sites" : "T2_US_UCSD,T2_US_CALTECH,T2_US_MIT,T2_US_WISCONSIN,T2_US_Nebraska,T2_US_Purdue,T2_US_Vanderbilt,T2_US_Florida"},
            special_dir=hadoop_path,
            arguments=args.replace(" ", "|"))
        task.process()
        allcomplete = allcomplete and task.complete()
        # save some information for the dashboard
        total_summary[ds] = task.get_task_summary()
        with open("summary.json", "w") as f_out:
            json.dump(total_summary, f_out, indent=4, sort_keys=True)

    # Loop through local samples
    for ds, loc, fpo, args in local_sets[:]:
        sample = DirectorySample(dataset=ds, location=loc)
        files = [f.name for f in sample.get_files()]
        print "For sample %s in directory %s, there are %d input files" % (
            ds, loc, len(files))
        #for file in files:
        #    print file

        task = CondorTask(
            sample=sample,
            open_dataset=True,
            files_per_output=fpo,
            output_name="test_nanoaod.root",
            tag=job_tag,
            cmssw_version=cmssw_ver,
            executable=exec_path,
            tarfile="./package.tar.gz",
예제 #13
0
        # in one output file here
        events_per_output=200,
        total_nevents=500000,
        # We have one input dummy file, so this must be True
        split_within_files=True,
        pset="psets/HIG-RunIIFall17wmLHEGS-01343_1_cfg_jhu_even.py",
        #cmssw_version = "CMSSW_9_3_6_patch2",
        cmssw_version="CMSSW_9_4_6_patch1",
        condor_submit_params={"sites": "T2_US_UCSD"},
        scram_arch="slc6_amd64_gcc630",
    )

    step2 = CMSSWTask(
        sample=DirectorySample(
            location=step1.get_outputdir(),
            dataset=step1.get_sample().get_datasetname().replace(
                "STEP1", "STEP2"),
        ),
        tag=proc_tag,
        special_dir=special_dir,
        open_dataset=True,
        files_per_output=1,
        pset="psets/HIG-RunIIFall17DRPremix-01240_1_cfg_jhu.py",
        cmssw_version="CMSSW_9_4_4",
        scram_arch="slc6_amd64_gcc630",
    )

    step3 = CMSSWTask(
        sample=DirectorySample(
            location=step2.get_outputdir(),
            dataset=step2.get_sample().get_datasetname().replace(
예제 #14
0
    print(dict["xs"] * 1000.) / (dict["n_events"] * (1 -
                                                     (2 * dict["frac_neg"])))
    return (dict["xs"] * 1000.) / (dict["n_events"] * (1 -
                                                       (2 * dict["frac_neg"])))


basepath = "/hadoop/cms/store/user/smay/ProjectMetis/"

all_jobs_done = False
total_summary = {}
while True:
    all_jobs_done = True
    for key, info in mc.iteritems():
        if args.test_run:
            continue
        sample = DirectorySample(dataset=key,
                                 location=basepath + info["globber"])
        job_args = '%s %s %.12f' % ("none" if args.selection == "2" else
                                    "Reweight", args.selection, scale1fb(info))
        output_name = "Zll_histograms.root"
        task = CondorTask(
            sample=sample,
            open_dataset=False,
            flush=True,
            files_per_output=1,
            #files_per_output = info["fpo"] if args.selection == 1 or args.selection == 3 else info["fpo"] * 3,
            output_name=output_name,
            tag=job_tag,
            cmssw_version="CMSSW_9_4_9",  # doesn't do anything
            arguments=job_args,
            executable=exec_path,
            tarfile=tar_path,
예제 #15
0
파일: nanometis.py 프로젝트: sgnoohc/nano
    [
        "WZ_TuneCUETP8M1_13TeV-pythia8_RunIISummer17MiniAOD-92X_upgrade2017_realistic_v10-v2_MINIAODSIM_NanoAODv1",
        "/hadoop/cms/store/user/namin/NanoAODv1/ProjectMetis/WZ_TuneCUETP8M1_13TeV-pythia8_RunIISummer17MiniAOD-92X_upgrade2017_realistic_v10-v2_MINIAODSIM_NanoAODv1"
    ],
    [
        "ZZ_TuneCUETP8M1_13TeV-pythia8_RunIISummer17MiniAOD-92X_upgrade2017_realistic_v10-v2_MINIAODSIM_NanoAODv1",
        "/hadoop/cms/store/user/namin/NanoAODv1/ProjectMetis/ZZ_TuneCUETP8M1_13TeV-pythia8_RunIISummer17MiniAOD-92X_upgrade2017_realistic_v10-v2_MINIAODSIM_NanoAODv1"
    ],
]

total_summary = {}
while True:

    allcomplete = True
    for ds, loc in dslocs:
        task = CondorTask(sample=DirectorySample(dataset=ds, location=loc),
                          open_dataset=False,
                          flush=True,
                          files_per_output=1,
                          output_name="merged.root",
                          tag=job_tag,
                          arguments=args,
                          executable=exec_path,
                          tarfile=targzpath,
                          special_dir=hadoop_path,
                          condor_submit_params={"sites": "UAF,T2_US_UCSD"})
        task.process()
        allcomplete = allcomplete and task.complete()
        # save some information for the dashboard
        total_summary[ds] = task.get_task_summary()
    # parse the total summary and write out the dashboard
예제 #16
0
NTUPLE_TAG = "v3"
exec_path = "condor_exe.sh"
tar_path = "input.tar.gz"
input_dir = "/hadoop/cms/store/user/ryan/beammuons/beammuons_PROC_{0}_FOURVECS_{1}".format(
    FOURVEC_TAG, SIM_TAG)
hadoop_path = "milliqan/milliq_mcgen/geant_ntuples"

proc_types = ["qcd", "qcd_nonbc", "w", "dy"]

total_summary = {}
while True:
    allcomplete = True
    for proc in proc_types:
        ds = "/beammuons_{0}/{1}_{2}/GEANT".format(proc, FOURVEC_TAG, SIM_TAG)
        sample = DirectorySample(dataset=ds,
                                 location=input_dir.replace("PROC", proc),
                                 globber="*.root")
        task = CondorTask(
            sample=sample,
            open_dataset=True,
            files_per_output=1,
            output_name="output.root",
            tag=NTUPLE_TAG,
            executable=exec_path,
            tarfile=tar_path,
            condor_submit_params={
                "sites":
                "T2_US_UCSD",
                "container":
                "/cvmfs/singularity.opensciencegrid.org/bbockelm/cms:rhel7"
            },