def main(parfile,verbose):
   
    level = verbose
    levels = { "debug": logging.DEBUG, "info": logging.INFO,
               "warn": logging.WARNING,
               "warning": logging.WARNING, "error": logging.ERROR }
    if not level in levels:
        logger.error ( "Unknown log level ``%s'' supplied!" % level )
        sys.exit()
    logger.setLevel(level = levels[level])    

    parser = ConfigParserExt()   
    ret = parser.read(parfile)
    if ret == []:
        logger.error( "No such file or directory: '%s'" % args.parfile)
        sys.exit()
            
    #Get a list of parsers (in case loops have been defined)    
    parserList = parser.expandLoops()

    ncpus = parser.getint("options","ncpu")
    if ncpus  < 0:
        ncpus =  multiprocessing.cpu_count()

    pool = multiprocessing.Pool(processes=ncpus)
    children = []
    #Loop over model parameters and submit jobs
    firstRun = True
    for newParser in parserList:
        if firstRun:
            if newParser.get('options','runMG') and not os.path.isdir(newParser.get('MadGraphPars','processFolder')):
                generateProcesses(newParser)
                
            if not newParser.has_option('slhaCreator','outputFolder') or not newParser.get('slhaCreator','outputFolder'):
                slhaFolder = tempfile.mkdtemp(dir='./', prefix='slha_')
                logger.info('SLHA output folder not defined. Files will saved to %s' %slhaFolder)
            else:
                slhaFolder = newParser.get('slhaCreator','outputFolder')       
            firstRun = False
            
        newParser.set('slhaCreator','outputFolder',slhaFolder)             
        parserDict = newParser.toDict(raw=False) #Must convert to dictionary for pickling
        p = pool.apply_async(runAll, args=(parserDict,))            
        children.append(p)
        
#     Wait for jobs to finish:
    output = [p.get() for p in children]
    return output
Exemple #2
0
def runAll(parserDict):
    """
    Runs Madgraph, Pythia and the SLHA creator for a given set of options.
    :param parserDict: a dictionary with the parser options.
    """
    
    t0 = time.time() 
    
    parser = ConfigParserExt()
    parser.read_dict(parserDict)
    
    #Run MadGraph and get path to the LHE file
    if parser.get('options','runMG'):
        mg5outFolder = Run_MG5(parser) 
        inputFile = os.path.join(mg5outFolder,"Events/run_01/unweighted_events.lhe.gz")
    else:
        if not parser.has_option("slhaCreator","inputFile") or not parser.get("slhaCreator","inputFile"):
            inputFile = None
        else:      
            inputFile = parser.get("slhaCreator","inputFile")

    #Create SLHA file
    if parser.get("options","runSlhaCreator"):
        if not inputFile:
            logger.error("Input LHE file not defined. The path to the LHE file must be given in [slhaCreator][inputFile] if runMG=False")
            return False
        elif not os.path.isfile(inputFile):
            logger.error("Input file %s for SLHA creator not found" %inputFile)
            return False
        else:
            slhaFile = getSLHAFile(parser,inputFile)
            if not slhaFile or not os.path.isfile(slhaFile):
                logger.error("Error creating SLHA file")
                return False
            else:
                logger.debug("File %s created" %slhaFile)
                
    #Clean output:
    if parser.get("options","cleanOutFolders"):
        logger.info("Cleaning output")
        if os.path.isdir(mg5outFolder):
            shutil.rmtree(mg5outFolder)
          
    logger.info("Done in %3.2f min" %((time.time()-t0)/60.))
    now = datetime.datetime.now()
    
    return "Finished run at %s" %(now.strftime("%Y-%m-%d %H:%M"))
Exemple #3
0
def runAll(parserDict):
    """
    Runs Madgraph, Pythia and the SLHA creator for a given set of options.
    :param parserDict: a dictionary with the parser options.
    """

    t0 = time.time()

    parser = ConfigParserExt()
    parser.read_dict(parserDict)

    #Run Pythia
    if parser.getboolean("options", "runPythia"):
        inputFile = parser.getstr("PythiaOptions", "inputFile")
        if not os.path.isfile(inputFile):
            logger.error("Input file %s for Pythia not found" % inputFile)
        else:
            Run_pythia(parser, inputFile)
            logger.debug("Pythia run submitted")

    #Run Rivet:
    if parser.getboolean("options", "runRivet"):
        Run_rivet(parser)
        logger.debug("Rivet run submitted")

    logger.info("Done in %3.2f min" % ((time.time() - t0) / 60.))
    now = datetime.datetime.now()

    return "Finished run at %s" % (now.strftime("%Y-%m-%d %H:%M"))
def RunCheckMate(parserDict):
    """
    Run CheckMATE using the parameters given in parser.

    :param parser: ConfigParser object with all the parameters needed.
    """
    t0 = time.time()
    parser = ConfigParserExt()
    parser.read_dict(parserDict)

    pars = parser.toDict(raw=False)["options"]

    outputFolder = os.path.abspath(
        parser.get("CheckMateParameters", "OutputDirectory"))
    resultFolder = os.path.join(outputFolder,
                                parser.get("CheckMateParameters", "Name"))
    if os.path.isdir(resultFolder):
        logger.info("Results folder %s found." % resultFolder)
        if parser.get("CheckMateParameters", "OutputExists") == 'overwrite':
            logger.info("Overwriting")
            shutil.rmtree(resultFolder)
        else:
            logger.info("Skipping" % resultFolder)
            return "---- %s skipped" % resultFolder
    cardFile = getCheckMateCard(parser)
    logger.debug('Steering card %s created' % cardFile)

    #Create output dirs, if do not exist:
    try:
        os.makedirs(outputFolder)
    except:
        pass

    #Run CheckMate
    checkmatePath = os.path.abspath(pars['checkmateFolder'])
    checkmateBin = os.path.join(checkmatePath, 'bin')
    logger.info('Running checkmate with steering card: %s ' % cardFile)
    logger.debug('Running: python2 ./CheckMATE %s at %s' %
                 (cardFile, checkmateBin))
    run = subprocess.Popen('python2 ./CheckMATE %s' % (cardFile),
                           shell=True,
                           stdout=subprocess.PIPE,
                           stderr=subprocess.PIPE,
                           cwd=checkmateBin)
    output, errorMsg = run.communicate()
    logger.debug('CheckMATE error:\n %s \n' % errorMsg)
    logger.debug('CheckMATE output:\n %s \n' % output)

    os.remove(cardFile)

    logger.info("Done in %3.2f min" % ((time.time() - t0) / 60.))

    #Remove parton level events:
    if pars['cleanUp'] is True:
        mg5folder = os.path.join(resultFolder, 'mg5amcatnlo')
        if os.path.isdir(mg5folder):
            logger.debug('Removing data from: %s \n' % mg5folder)
            for f in os.listdir(mg5folder):
                file_path = os.path.join(mg5folder, f)
                if os.path.isdir(file_path):
                    shutil.rmtree(file_path)
        analysisfolder = os.path.join(resultFolder, 'analysis')
        if os.path.isfile(
                os.path.join(analysisfolder,
                             'analysisstdout_atlas_1712_02118_ew.log')):
            os.remove(
                os.path.join(analysisfolder,
                             'analysisstdout_atlas_1712_02118_ew.log'))

    now = datetime.datetime.now()

    return "Finished running CheckMATE at %s" % (
        now.strftime("%Y-%m-%d %H:%M"))
def main(parfile, verbose):
    """
    Submit parallel jobs using the parameter file.

    :param parfile: name of the parameter file.
    :param verbose: level of debugging messages.
    """
    level = args.verbose.lower()
    levels = {
        "debug": logging.DEBUG,
        "info": logging.INFO,
        "warn": logging.WARNING,
        "warning": logging.WARNING,
        "error": logging.ERROR
    }
    if not level in levels:
        logger.error("Unknown log level ``%s'' supplied!" % level)
        sys.exit()
    logger.setLevel(level=levels[level])

    parser = ConfigParserExt()
    ret = parser.read(parfile)
    if ret == []:
        logger.error("No such file or directory: '%s'" % parfile)
        sys.exit()

    if not parser.has_option('options', 'input'):
        logger.error("An input file or folder must be defined.")
        sys.exit()
    else:
        inputF = parser.get('options', 'input')
        if os.path.isfile(inputF):
            inputFiles = [os.path.abspath(inputF)]
        elif "*" in inputF:
            inputFiles = [os.path.abspath(f) for f in glob.glob(inputF)]
        elif os.path.isdir(inputF):
            inputFiles = [
                os.path.abspath(os.path.join(inputF, f))
                for f in os.listdir(inputF)
                if os.path.isfile(os.path.join(inputF, f))
            ]
        else:
            logger.error("Input format %s not accepted" % inputF)
            sys.exit()

    parserList = []
    for f in inputFiles:
        newParser = ConfigParserExt()
        newParser.read_dict(parser.toDict(raw=True))
        newParser.set("CheckMateParameters", "SLHAFile", f)
        newParser.set("CheckMateParameters", "Name",
                      os.path.splitext(os.path.basename(f))[0])
        newParser.set(
            "CheckMateParameters", "OutputDirectory",
            os.path.abspath(
                parser.get("CheckMateParameters", "OutputDirectory")))
        #Get tags of processes:
        processTags = [
            tag for tag in newParser.sections() if
            (tag.lower() != 'options' and tag.lower() != 'checkmateparameters')
        ]

        #Get xsec dictionary:
        useSLHA = False
        unit = 'PB'
        xsecDict = {}
        if newParser.has_option("options", "xsecUnit"):
            unit = newParser.get("options", "xsecUnit")
        if newParser.has_option("options", "useSLHAxsecs"):
            useSLHA = newParser.get("options", "useSLHAxsecs")
            if not isinstance(useSLHA, dict):
                logger.error(
                    "useSLHAxsecs should be defined as dictionary with a key for each CheckMate process."
                )
                sys.exit()

            xsecsAll = pyslha.readSLHAFile(f).xsections
            for pTag, xsecTuple in useSLHA.items():
                if not xsecTuple in xsecsAll: continue
                xsecs = xsecsAll[xsecTuple].xsecs
                xsecs = sorted(xsecs,
                               key=lambda xsec: xsec.qcd_order,
                               reverse=True)
                xsecDict[pTag] = xsecs[0]

        for pTag in processTags:
            pName = newParser.get(pTag, "Name")
            newParser.set(pTag, "MGparam", f)
            if useSLHA:
                if pTag in xsecDict:
                    newParser.set(pTag, "XSect",
                                  "%1.5g %s" % (xsecDict[pTag].value, unit))
                if pName in xsecDict:
                    newParser.set(pTag, "XSect",
                                  "%1.5g %s" % (xsecDict[pName].value, unit))

        parserList.append(newParser)

    ncpus = int(parser.get("options", "ncpu"))
    if ncpus < 0:
        ncpus = multiprocessing.cpu_count()
    ncpus = min(ncpus, len(parserList))
    pool = multiprocessing.Pool(processes=ncpus)
    children = []
    #Loop over parsers and submit jobs
    logger.info("Submitting %i jobs over %i cores" % (len(parserList), ncpus))
    for newParser in parserList:
        logger.debug("Submitting job for file %s" %
                     (newParser.get("CheckMateParameters", "SLHAFile")))
        parserDict = newParser.toDict(
            raw=False)  #Must convert to dictionary for pickling
        p = pool.apply_async(RunCheckMate, args=(parserDict, ))
        children.append(p)
        time.sleep(10)

    #Wait for jobs to finish:
    output = [p.get() for p in children]
    for out in output:
        print(out)
Exemple #6
0
    args = ap.parse_args()

    level = args.loglevel.lower()
    levels = {
        "debug": logging.DEBUG,
        "info": logging.INFO,
        "warn": logging.WARNING,
        "warning": logging.WARNING,
        "error": logging.ERROR
    }
    if not level in levels:
        logger.error("Unknown log level ``%s'' supplied!" % level)
        sys.exit()
    logger.setLevel(level=levels[level])

    parser = ConfigParserExt(inline_comment_prefixes=(';', ))
    ret = parser.read('eff_parameters_default.ini')
    ret = parser.read(args.parfile)
    if ret == []:
        logger.error("No such file or directory: '%s'" % args.parfile)
        sys.exit()

    ncpus = parser.getint("options", "ncpu")
    if ncpus < 0:
        ncpus = multiprocessing.cpu_count()

    pool = multiprocessing.Pool(processes=ncpus)
    children = []
    #Loop over model parameters and submit jobs
    inputFiles = parser.get("PythiaOptions", "inputFile")
    if os.path.isfile(inputFiles):
Exemple #7
0
    def testConfigParserWrapper(self):

        parFile = 'test.ini'
        parser = ConfigParserExt()
        parser.read(parFile)
        rawDict = {
            'MadGraphPars': {
                'processFolder': 'outputDir/MG5_output/proc-littleHiggs',
                'ncores': '1',
                'runcard': 'inputCards/run_card.dat',
                'paramcard': 'inputCards/param_card-littleHiggs.dat',
                'mg5out':
                '"outputDir/MG5_output/MHd_%.1f_%iTeV" %(${MadGraphSet:MHd},int((${MadGraphSet:ebeam1}+${MadGraphSet:ebeam2})/1000.))',
                'MG5path': 'MG5'
            },
            'slhaCreator': {
                'inputFile':
                "${MadGraphPars:mg5out}/Events/run_01/unweighted_events.lhe.gz",
                'slhaout':
                '"MHd_%.1f_%iTeV.slha" %(${MadGraphSet:MHd},int((${MadGraphSet:ebeam1}+${MadGraphSet:ebeam2})/1000.))',
                'outputFolder': "'./'"
            },
            'options': {
                'runSlhaCreator': 'True',
                'runMG': 'True',
                'computeXsecsFor': '[8880001, 8880002]',
                'ncpu': '-1',
                'cleanOutFolders': 'True',
                'modelFolder': 'UFO_LittleHiggs',
                'computeWidths': 'all --body_decay=2'
            },
            'MadGraphSet': {
                'MHd': '2000.0',
                'ebeam2': '${ebeam1}',
                'ebeam1': '4000',
                'MHu': '${MHd}/2.',
                'MHdd': '${MHu}+50',
                'MHe': '${options:computeXsecsFor}[0]',
                'Mhve': '${MadGraphSet:MHe}*sqrt(4.)'
            }
        }
        valDict = {
            'MadGraphPars': {
                'processFolder': 'outputDir/MG5_output/proc-littleHiggs',
                'ncores': 1,
                'runcard': 'inputCards/run_card.dat',
                'paramcard': 'inputCards/param_card-littleHiggs.dat',
                'mg5out': 'outputDir/MG5_output/MHd_2000.0_8TeV',
                'MG5path': 'MG5'
            },
            'slhaCreator': {
                'inputFile':
                'outputDir/MG5_output/MHd_2000.0_8TeV/Events/run_01/unweighted_events.lhe.gz',
                'slhaout': 'MHd_2000.0_8TeV.slha',
                'outputFolder': './'
            },
            'options': {
                'runSlhaCreator': True,
                'runMG': True,
                'computeXsecsFor': [8880001, 8880002],
                'ncpu': -1,
                'cleanOutFolders': True,
                'modelFolder': 'UFO_LittleHiggs',
                'computeWidths': 'all --body_decay=2'
            },
            'MadGraphSet': {
                'MHd': 2000.0,
                'ebeam2': 4000,
                'ebeam1': 4000,
                'MHu': 1000.0,
                'MHdd': 1050.0,
                'MHe': 8880001,
                'Mhve': 17760002.0
            }
        }

        newDict = parser.toDict(raw=True)
        oldDict = rawDict
        #         newDict = parser.toDict(raw=False)
        #         oldDict = valDict

        for s in newDict:
            for opt in newDict[s]:
                if newDict[s][opt] != oldDict[s][opt]:
                    print(s, opt)
                    print('\t', newDict[s][opt], oldDict[s][opt])


#                     break

        self.assertEqual(parser.toDict(raw=True), rawDict)
        self.assertEqual(parser.toDict(raw=False), valDict)