def runAll(parserDict):
    """
    Runs Madgraph, Pythia and the SLHA creator for a given set of options.
    :param parserDict: a dictionary with the parser options.
    """
    
    t0 = time.time() 
    
    parser = ConfigParserExt()
    parser.read_dict(parserDict)
    
    #Run MadGraph and get path to the LHE file
    if parser.get('options','runMG'):
        mg5outFolder = Run_MG5(parser) 
        inputFile = os.path.join(mg5outFolder,"Events/run_01/unweighted_events.lhe.gz")
    else:
        if not parser.has_option("slhaCreator","inputFile") or not parser.get("slhaCreator","inputFile"):
            inputFile = None
        else:      
            inputFile = parser.get("slhaCreator","inputFile")

    #Create SLHA file
    if parser.get("options","runSlhaCreator"):
        if not inputFile:
            logger.error("Input LHE file not defined. The path to the LHE file must be given in [slhaCreator][inputFile] if runMG=False")
            return False
        elif not os.path.isfile(inputFile):
            logger.error("Input file %s for SLHA creator not found" %inputFile)
            return False
        else:
            slhaFile = getSLHAFile(parser,inputFile)
            if not slhaFile or not os.path.isfile(slhaFile):
                logger.error("Error creating SLHA file")
                return False
            else:
                logger.debug("File %s created" %slhaFile)
                
    #Clean output:
    if parser.get("options","cleanOutFolders"):
        logger.info("Cleaning output")
        if parser.get("options","keepLHE"):
            shutil.copy(inputFile,slhaFile.replace('.slha','.lhe.gz'))
        if os.path.isdir(mg5outFolder):
            shutil.rmtree(mg5outFolder)
          
    logger.info("Done in %3.2f min" %((time.time()-t0)/60.))
    now = datetime.datetime.now()
    
    return "Finished run at %s" %(now.strftime("%Y-%m-%d %H:%M"))
Пример #2
0
def main(parfile, verbose):
    """
    Submit parallel jobs using the parameter file.

    :param parfile: name of the parameter file.
    :param verbose: level of debugging messages.
    """
    level = args.verbose.lower()
    levels = {
        "debug": logging.DEBUG,
        "info": logging.INFO,
        "warn": logging.WARNING,
        "warning": logging.WARNING,
        "error": logging.ERROR
    }
    if not level in levels:
        logger.error("Unknown log level ``%s'' supplied!" % level)
        sys.exit()
    logger.setLevel(level=levels[level])

    parser = ConfigParserExt()
    ret = parser.read(parfile)
    if ret == []:
        logger.error("No such file or directory: '%s'" % parfile)
        sys.exit()

    if not parser.has_option('options', 'input'):
        logger.error("An input file or folder must be defined.")
        sys.exit()
    else:
        inputF = parser.get('options', 'input')
        if os.path.isfile(inputF):
            inputFiles = [os.path.abspath(inputF)]
        elif "*" in inputF:
            inputFiles = [os.path.abspath(f) for f in glob.glob(inputF)]
        elif os.path.isdir(inputF):
            inputFiles = [
                os.path.abspath(os.path.join(inputF, f))
                for f in os.listdir(inputF)
                if os.path.isfile(os.path.join(inputF, f))
            ]
        else:
            logger.error("Input format %s not accepted" % inputF)
            sys.exit()

    parserList = []
    for f in inputFiles:
        newParser = ConfigParserExt()
        newParser.read_dict(parser.toDict(raw=True))
        newParser.set("CheckMateParameters", "SLHAFile", f)
        newParser.set("CheckMateParameters", "Name",
                      os.path.splitext(os.path.basename(f))[0])
        newParser.set(
            "CheckMateParameters", "OutputDirectory",
            os.path.abspath(
                parser.get("CheckMateParameters", "OutputDirectory")))
        #Get tags of processes:
        processTags = [
            tag for tag in newParser.sections() if
            (tag.lower() != 'options' and tag.lower() != 'checkmateparameters')
        ]

        #Get xsec dictionary:
        useSLHA = False
        unit = 'PB'
        xsecDict = {}
        if newParser.has_option("options", "xsecUnit"):
            unit = newParser.get("options", "xsecUnit")
        if newParser.has_option("options", "useSLHAxsecs"):
            useSLHA = newParser.get("options", "useSLHAxsecs")
            if not isinstance(useSLHA, dict):
                logger.error(
                    "useSLHAxsecs should be defined as dictionary with a key for each CheckMate process."
                )
                sys.exit()

            xsecsAll = pyslha.readSLHAFile(f).xsections
            for pTag, xsecTuple in useSLHA.items():
                if not xsecTuple in xsecsAll: continue
                xsecs = xsecsAll[xsecTuple].xsecs
                xsecs = sorted(xsecs,
                               key=lambda xsec: xsec.qcd_order,
                               reverse=True)
                xsecDict[pTag] = xsecs[0]

        for pTag in processTags:
            pName = newParser.get(pTag, "Name")
            newParser.set(pTag, "MGparam", f)
            if useSLHA:
                if pTag in xsecDict:
                    newParser.set(pTag, "XSect",
                                  "%1.5g %s" % (xsecDict[pTag].value, unit))
                if pName in xsecDict:
                    newParser.set(pTag, "XSect",
                                  "%1.5g %s" % (xsecDict[pName].value, unit))

        parserList.append(newParser)

    ncpus = int(parser.get("options", "ncpu"))
    if ncpus < 0:
        ncpus = multiprocessing.cpu_count()
    ncpus = min(ncpus, len(parserList))
    pool = multiprocessing.Pool(processes=ncpus)
    children = []
    #Loop over parsers and submit jobs
    logger.info("Submitting %i jobs over %i cores" % (len(parserList), ncpus))
    for newParser in parserList:
        logger.debug("Submitting job for file %s" %
                     (newParser.get("CheckMateParameters", "SLHAFile")))
        parserDict = newParser.toDict(
            raw=False)  #Must convert to dictionary for pickling
        p = pool.apply_async(RunCheckMate, args=(parserDict, ))
        children.append(p)
        time.sleep(10)

    #Wait for jobs to finish:
    output = [p.get() for p in children]
    for out in output:
        print(out)