Ejemplo n.º 1
0
def submit_run(workdir: str, runnumber: int):
    logfile = os.path.join(workdir, "trendingExtractor.log")
    jobname = "TD_{RUN}".format(RUN=runnumber)
    executable = os.path.join(repo, "Trending", "runExtractTrendingTree.sh")
    cmd = "{EXE} {REPO} {WD} {RUN}".format(EXE=executable,
                                           REPO=repo,
                                           WD=workdir,
                                           RUN=runnumber)
    slurm.submit(cmd, jobname, logfile)
Ejemplo n.º 2
0
    def submit_download(self, sample: str) -> int:
        cert = self._tokens["cert"]
        key = self._tokens["key"]
        if not key or not cert:
            logging.error("Alien token not provided - cannot download ...")
            return None
        executable = os.path.join(self._repo, "downloader",
                                  "runDownloadAndMergeMCBatch.sh")
        jobname = "down_{SAMPLE}".format(SAMPLE=sample)
        outputdir = os.path.join(self._outputbase, sample)
        if not os.path.exists(outputdir):
            os.makedirs(outputdir, 0o755)
        logfile = os.path.join(outputdir, "download.log")

        downloadcmd = "{EXE} {DOWNLOADREPO} {OUTPUTDIR} {DATASET} {LEGOTRAIN}/{TRAINID} {ALIEN_CERT} {ALIEN_KEY}".format(
            EXE=executable,
            DOWNLOADREPO=self._repo,
            OUTPUTDIR=outputdir,
            DATASET=sample,
            LEGOTRAIN=self._legotrain,
            TRAINID=self._trainrun,
            ALIEN_CERT=cert,
            ALIEN_KEY=key)
        jobid = submit(command=downloadcmd,
                       jobname=jobname,
                       logfile=logfile,
                       partition=self._partitionDownload,
                       numnodes=1,
                       numtasks=4,
                       maxtime=self._maxtime)
        return jobid
Ejemplo n.º 3
0
 def submit_final(self, wait_jobid: int = 0) -> int:
     executable = os.path.join(self.__repo, "processMergeFinal.sh")
     command = "{EXECUTABLE} {OUTPUTDIR} {FILENAME} {REPO} {CHECK}".format(
         EXECUTABLE=executable,
         OUTPUTDIR=self.__outputdir,
         FILENAME=self.__filename,
         REPO=os.path.dirname(self.__repo),
         CHECK=1 if self.__check else 0)
     logfile = "{OUTPUTDIR}/mergefinal.log".format(
         OUTPUTDIR=self.__outputdir)
     jobname = "mergefinal"
     try:
         jobid = submit(command=command,
                        jobname=jobname,
                        logfile=logfile,
                        partition=self.__partition,
                        dependency=wait_jobid,
                        maxtime=self.__maxtime)
         return jobid
     except UnknownClusterException as e:
         logging.error("Submission error: %s", e)
         return -1
     except PartitionException as e:
         logging.error("Submission error: %s", e)
         return -1
Ejemplo n.º 4
0
 def submit_pthardbins(self, wait_jobid: int = 0) -> int:
     executable = os.path.join(self.__repo, "processMergeRun.sh")
     commmand = "{EXECUTABLE} {INPUTDIR} {OUTPUTDIR} {FILENAME}".format(
         EXECUTABLE=executable,
         INPUTDIR=self.__inputdir,
         OUTPUTDIR=self.__outputdir,
         FILENAME=self.__filename)
     logfile = "{OUTPUTDIR}/joboutput_%a.log".format(
         OUTPUTDIR=self.__outputdir)
     jobname = "mergebins"
     try:
         jobid = submit(command=commmand,
                        jobname=jobname,
                        logfile=logfile,
                        partition=self.__partition,
                        jobarray=[1, 20],
                        dependency=wait_jobid,
                        maxtime=self.__maxtime)
         return jobid
     except UnknownClusterException as e:
         logging.error("Submission error: %s", e)
         return -1
     except PartitionException as e:
         logging.error("Submission error: %s", e)
         return -1
 def submit(self):
     if not self.__trainrun:
         logging.error("Failed initializing train run")
         return
     cert = self.__tokens["cert"]
     key = self.__tokens["key"]
     if not key or not cert:
         logging.error("Alien token not provided - cannot download ...")
         return None
     executable = os.path.join(self.__repo, "downloader", "runDownloadRunwise.sh")
     samples = {}
     try:
         samples = self.__sampleDB.getDataSamples(self.__year)
     except SampleDB.YearNotFoundException as e:
         logging.error(e)
         raise LaunchHandler.SampleException(self.__year)
     jobids = {}
     for period,recpass in samples.items():
         logging.info("Downloading period %s, reconstruction pass %s", period, recpass)
         perioddir = os.path.join(self.__outputbase, period)
         if not os.path.exists(perioddir):
             os.makedirs(perioddir, 0o755)
         jobname = "down_{PERIOD}".format(PERIOD=period)
         logfile = os.path.join(perioddir, "download.log")
         filenamestr = self.__filename if self.__filename != None else "NONE"
         aodprodstr = self.__aodset if self.__aodset != None else  "NONE"
         downloadcmd = "{EXE} {DOWNLOADREPO} {ALIEN_CERT} {ALIEN_KEY} {OUTPUTDIR} {TRAINRUN} {LEGOTRAIN} {DATASET} {RECPASS} {AODPROD} {FILENAME}".format(EXE=executable, DOWNLOADREPO = self.__repo, ALIEN_CERT=cert, ALIEN_KEY=key, OUTPUTDIR=perioddir, TRAINRUN=self.__trainrun, LEGOTRAIN=self.__legotrain, DATASET=period, RECPASS=recpass, AODPROD=aodprodstr, FILENAME=filenamestr)
         logging.debug("Download command: %s", downloadcmd)
         jobid = submit(command=downloadcmd, jobname=jobname, logfile=logfile, partition=self.__partitionDownload, numnodes=1, numtasks=1)
         logging.info("Submitting download job for period {}: {}".format(period, jobid))
         jobids[period] = jobid
     return jobids
Ejemplo n.º 6
0
 def launch(self):
     print("launching jobs for {}".format(self.__year))
     executable = os.path.join(self.__repo, "EMCALTrigger",
                               "FastorAcceptance", "runFindIsolated.sh")
     commonoutputdir = os.path.join(self.__outputdir, "%d" % self.__year)
     if not os.path.exists(commonoutputdir):
         os.makedirs(commonoutputdir, 0o755)
     logdir = os.path.join(commonoutputdir, "logs")
     if not os.path.exists(logdir):
         os.makedirs(logdir, 0o755)
     runlist = self.read_runlists()
     for run in runlist:
         logfile = os.path.join(logdir, "filter_{}".format(run))
         jobname = "{}_{}".format(self.__year, run)
         cmd = "{} {} {} {}".format(executable, self.__repo,
                                    commonoutputdir, run)
         submit(cmd, jobname, logfile)
Ejemplo n.º 7
0
 def submit(self, year: int):
     if not self.__trainrun:
         logging.error("Failed initializing train run")
         return
     cert = self.__tokens["cert"]
     key = self.__tokens["key"]
     if not key or not cert:
         logging.error("Alien token not provided - cannot download ...")
         return None
     executable = os.path.join(self.__repo, "downloader", "runDownloadAndMergeDataBatch.sh")
     jobname = "down_{YEAR}".format(YEAR=year)
     logfile = os.path.join(self.__outputbase, "download.log")
     downloadcmd = "{EXE} {DOWNLOADREPO} {OUTPUTDIR} {YEAR} {TRAINRUN} {ALIEN_CERT} {ALIEN_KEY}".format(EXE=executable, DOWNLOADREPO = self.__repo, OUTPUTDIR=self.__outputbase, YEAR=year, TRAINRUN=self.__trainrun, ALIEN_CERT=cert, ALIEN_KEY=key)
     jobid = submit(command=downloadcmd, jobname=jobname, logfile=logfile, partition=self.__partitionDownload, numnodes=1, numtasks=1, maxtime=self.__maxtime)
     logging.info("Submitting download job: {}".format(jobid))
     return jobid
def merge_submitter_datasets(repo: str, inputdir: str, filename: str,
                             partition: str, maxtime: str, wait: list,
                             check: bool) -> dict:
    outputbase = os.path.join(inputdir, "merged")
    if not os.path.exists(outputbase):
        os.makedirs(outputbase, 0o755)
    executable = os.path.join(repo, "processMergeMCDatasets.sh")
    exefinal = os.path.join(repo, "processMergeFinal.sh")
    runcmd_bins = "{EXECUTABLE} {INPUTDIR} {OUTPUTDIR} {FILENAME}".format(
        EXECUTABLE=executable,
        INPUTDIR=inputdir,
        FILENAME=filename,
        OUTPUTDIR=outputbase)
    logfile_bins = os.path.join(outputbase, "joboutput_%a.log")
    jobid = submit_dependencies(runcmd_bins,
                                "mergebins",
                                logfile_bins,
                                partition,
                                jobarray=[1, 20],
                                dependency=wait,
                                maxtime=maxtime)
    print("Submitted merge job under JobID %d" % jobid)
    finalcmd = "{EXECUTABLE} {OUTPUTDIR} {FILENAME} {REPO} {CHECK}".format(
        EXECUTABLE=exefinal,
        OUTPUTDIR=outputbase,
        FILENAME=filename,
        REPO=os.path.dirname(repo),
        CHECK=1 if check else 0)
    jobidFinal = submit(
        finalcmd,
        "mergefinal",
        "{OUTPUTDIR}/mergefinal.log".format(OUTPUTDIR=outputbase),
        partition,
        dependency=jobid,
        maxtime=maxtime)
    print("Submitted final merging job under JobID %d" % jobidFinal)
    return {"pthard": jobid, "final": jobidFinal}
Ejemplo n.º 9
0
     EXE=unfoldingexecutable,
     REPO=repo,
     WDIR=args.workdir,
     DATAFILE=args.datafile,
     MCFILE=args.mcfile,
     SYSVAR=args.sysvar,
     MACRO=args.macro)
 logfile = "joboutput_R0%a.log"
 workdir = os.path.abspath(args.workdir)
 if not os.path.exists(workdir):
     os.makedirs(workdir, 0o755)
 os.chdir(workdir)
 unfoldingjob = slurm.submit(unfoldingcmd,
                             args.jobtag,
                             logfile,
                             args.queue,
                             1,
                             1, [2, 6],
                             maxtime=args.time)
 print("Submitting processing job under %d" % unfoldingjob)
 mergeexecutable = os.path.join(repo, "unfolding", "postprocess1D.sh")
 mergecmd = "{EXE} {WORKDIR}".format(EXE=mergeexecutable,
                                     WORKDIR=os.getcwd())
 logfile = "merge"
 mergejob = slurm.submit(mergecmd,
                         "merge_{TAG}".format(TAG=args.jobtag),
                         logfile,
                         args.queue,
                         1,
                         1,
                         None,
Ejemplo n.º 10
0
#! /usr/bin/env python3

from argparse import ArgumentParser
from SubstructureHelpers.slurm import submit
import os

if __name__ == "__main__":
    repo = os.path.abspath(os.getenv("SUBSTRUCTURE_ROOT"))
    parser = ArgumentParser("submitRepack.py", description="Submitter for repacking")
    parser.add_argument("inputdir", metavar="INPUTDIR", type=str, help="Input directory")
    parser.add_argument("-p", "--partition", metavar="PARTITION", type=str, default="short", help="Partition")
    args = parser.parse_args()

    executable = os.path.join(repo, "downloader", "runRepackStandalone.sh")
    cmd = "{EXE} {WD}".format(EXE=executable, WD=args.inputdir)
    logfile = os.path.join(args.inputdir, "repack.log")
    submit(cmd, "repack", logfile, args.partition)
                        type=float,
                        default=1.3,
                        help="Reweighting factor")
    args = parser.parse_args()

    repo = os.path.abspath(os.path.dirname(sys.argv[0]))
    unfoldingexecutable = os.path.join(
        repo, "runUnfolding1D_local_SysAngularity.sh")
    reweighthigh = 1 if args.reweighthigh else 0
    angularityweight = args.angularityweight
    unfoldingcmd = "{EXE} {WDIR} {DATAFILE} {MCFILE} {SYSVAR} {MACRO} {REWEIGHTHIGH} {WEIGHT}".format(
        EXE=unfoldingexecutable,
        WDIR=args.workdir,
        DATAFILE=args.datafile,
        MCFILE=args.mcfile,
        SYSVAR=args.sysvar,
        MACRO=args.macro,
        REWEIGHTHIGH=reweighthigh,
        WEIGHT=angularityweight)
    logfile = "joboutput_R0%a.log"
    os.chdir(args.workdir)
    unfoldingjob = slurm.submit(unfoldingcmd, args.jobtag, logfile, args.queue,
                                1, 1, [2, 6])
    print("Submitting processing job under %d" % unfoldingjob)
    mergeexecutable = os.path.join(repo, "postprocess1D.sh")
    mergecmd = "{EXE} {WORKDIR}".format(EXE=mergeexecutable,
                                        WORKDIR=os.getcwd())
    logfile = "merge"
    mergejob = slurm.submit(mergecmd, "merge_{TAG}".format(TAG=args.jobtag),
                            logfile, args.queue, 1, 1, None, unfoldingjob)
    print("Submitting merging job under %d" % mergejob)