class ProcessListSimpleTestCase( unittest.TestCase ): """ Test the simple methods of the class that dont need a sane CFG """ def setUp( self ): with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=False)): self.prol = ProcessList( 'myTestProcess.list' ) def test_constructor( self ): import DIRAC with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=True)), \ patch.object(DIRAC.Core.Utilities.CFG.CFG, 'loadFromFile', return_value=True): empty_process_list = ProcessList( 'existent_location' ) self.assertTrue( empty_process_list.cfg.existsKey('Processes') ) self.assertTrue( empty_process_list.isOK() ) def replace_load( self, _ ): #pylint: disable=missing-docstring self.createNewSection( 'myTestSection', 'testComment' ) self.createNewSection( 'Processes', 'testProcesses' ) with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=True)), \ patch.object(DIRAC.Core.Utilities.CFG.CFG, 'loadFromFile', side_effect=replace_load, autospec=True): other_process_list = ProcessList( 'existent_location' ) self.assertTrue( other_process_list.cfg.existsKey('Processes') ) self.assertFalse( self.prol.isOK() ) def test_addentry( self ): self.prol.cfg.createNewSection( 'Processes' ) self.prol.cfg.createNewSection( 'Processes/123' ) self.prol._addEntry( '123', STD_PROC_DICT )
class ProcessListSimpleTestCase(unittest.TestCase): """ Test the simple methods of the class that dont need a sane CFG """ def setUp(self): with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=False)): self.prol = ProcessList('myTestProcess.list') def test_constructor(self): import DIRAC with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=True)), \ patch.object(DIRAC.Core.Utilities.CFG.CFG, 'loadFromFile', return_value=True): empty_process_list = ProcessList('existent_location') self.assertTrue(empty_process_list.cfg.existsKey('Processes')) self.assertTrue(empty_process_list.isOK()) def replace_load(self, _): #pylint: disable=missing-docstring self.createNewSection('myTestSection', 'testComment') self.createNewSection('Processes', 'testProcesses') with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=True)), \ patch.object(DIRAC.Core.Utilities.CFG.CFG, 'loadFromFile', side_effect=replace_load, autospec=True): other_process_list = ProcessList('existent_location') self.assertTrue(other_process_list.cfg.existsKey('Processes')) self.assertFalse(self.prol.isOK()) def test_addentry(self): self.prol.cfg.createNewSection('Processes') self.prol.cfg.createNewSection('Processes/123') self.prol._addEntry('123', STD_PROC_DICT)
def test_constructor( self ): import DIRAC with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=True)), \ patch.object(DIRAC.Core.Utilities.CFG.CFG, 'loadFromFile', return_value=True): empty_process_list = ProcessList( 'existent_location' ) self.assertTrue( empty_process_list.cfg.existsKey('Processes') ) self.assertTrue( empty_process_list.isOK() ) def replace_load( self, _ ): #pylint: disable=missing-docstring self.createNewSection( 'myTestSection', 'testComment' ) self.createNewSection( 'Processes', 'testProcesses' ) with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=True)), \ patch.object(DIRAC.Core.Utilities.CFG.CFG, 'loadFromFile', side_effect=replace_load, autospec=True): other_process_list = ProcessList( 'existent_location' ) self.assertTrue( other_process_list.cfg.existsKey('Processes') ) self.assertFalse( self.prol.isOK() )
def test_constructor(self): import DIRAC with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=True)), \ patch.object(DIRAC.Core.Utilities.CFG.CFG, 'loadFromFile', return_value=True): empty_process_list = ProcessList('existent_location') self.assertTrue(empty_process_list.cfg.existsKey('Processes')) self.assertTrue(empty_process_list.isOK()) def replace_load(self, _): #pylint: disable=missing-docstring self.createNewSection('myTestSection', 'testComment') self.createNewSection('Processes', 'testProcesses') with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=True)), \ patch.object(DIRAC.Core.Utilities.CFG.CFG, 'loadFromFile', side_effect=replace_load, autospec=True): other_process_list = ProcessList('existent_location') self.assertTrue(other_process_list.cfg.existsKey('Processes')) self.assertFalse(self.prol.isOK())
def obtainProcessList(self): """Internal function Get the process list from storage if whizard.in was not provided :return: S_OK(), S_ERROR() """ res = self.ops.getValue("/ProcessList/Location", "") if not res: return S_ERROR("No process list found") processlistloc = res if not os.path.exists(os.path.basename(processlistloc)): res = self.datMan.getFile(processlistloc) if not res['OK']: self.log.error('Could not get processlist: %s' % res['Message']) return res self.processlist = ProcessList(os.path.basename(processlistloc)) return S_OK()
def obtainProcessList(self): """Internal function Get the process list from storage if whizard.in was not provided @return: S_OK(), S_ERROR() """ res = self.ops.getValue("/ProcessList/Location", "") if not res: return S_ERROR("No process list found") processlistloc = res if not os.path.exists(os.path.basename(processlistloc)): res = self.rm.getFile(processlistloc) if not res['OK']: self.log.error('Could not get processlist: %s' % res['Message']) return res self.processlist = ProcessList(os.path.basename(processlistloc)) return S_OK()
def getProcessList(self): """ Get the :mod:`ProcessList <ILCDIRAC.Core.Utilities.ProcessList.ProcessList>` needed by :mod:`Whizard <ILCDIRAC.Interfaces.API.NewInterface.Applications.Whizard>`. :return: process list object """ processlistpath = gConfig.getValue("/LocalSite/ProcessListPath", "") if not processlistpath: gLogger.info( 'Will download the process list locally. To gain time, please put it somewhere and add to \ your dirac.cfg the entry /LocalSite/ProcessListPath pointing to the file' ) pathtofile = self.ops.getValue("/ProcessList/Location", "") if not pathtofile: gLogger.error("Could not get path to process list") processlist = "" else: datMan = DataManager() datMan.getFile(pathtofile) processlist = os.path.basename(pathtofile) else: processlist = processlistpath self.processList = ProcessList(processlist) return self.processList
def doTheWhizardInstallation(): """Do the instalation for new whizard version Copy libraries, create tarball, upload processList file add entry in configuration system """ res = checkSLCVersion() if not res['OK']: gLogger.error(res['Message']) dexit(1) res = checkGFortranVersion() if not res['OK']: gLogger.error(res['Message']) dexit(1) cliParams = Params() cliParams.registerSwitches() Script.parseCommandLine( ignoreErrors= False) whizardResultFolder = cliParams.path platform = cliParams.platform whizard_version = cliParams.version appVersion = whizard_version beam_spectra_version = cliParams.beam_spectra if not whizardResultFolder or not whizard_version or not beam_spectra_version: Script.showHelp() dexit(2) from ILCDIRAC.Core.Utilities.ProcessList import ProcessList from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations from DIRAC.Interfaces.API.DiracAdmin import DiracAdmin from ILCDIRAC.Core.Utilities.FileUtils import upload from DIRAC.DataManagementSystem.Client.DataManager import DataManager diracAdmin = DiracAdmin() modifiedCS = False softwareSection = "/Operations/Defaults/AvailableTarBalls" processlistLocation = "ProcessList/Location" appName = "whizard" ops = Operations() path_to_process_list = ops.getValue(processlistLocation, "") if not path_to_process_list: gLogger.error("Could not find process list location in CS") dexit(2) gLogger.verbose("Getting process list from file catalog") datMan = DataManager() res = datMan.getFile(path_to_process_list) if not res['OK']: gLogger.error("Error while getting process list from storage") dexit(2) gLogger.verbose("done") ##just the name of the local file in current working directory processlist = os.path.basename(path_to_process_list) if not os.path.exists(processlist): gLogger.error("Process list does not exist locally") dexit(2) pl = ProcessList(processlist) startDir = os.getcwd() inputlist = {} os.chdir(whizardResultFolder) folderlist = os.listdir(whizardResultFolder) whiz_here = folderlist.count("whizard") if whiz_here == 0: gLogger.error("whizard executable not found in %s, please check" % whizardResultFolder) os.chdir(startDir) dexit(2) whizprc_here = folderlist.count("whizard.prc") if whizprc_here == 0: gLogger.error("whizard.prc not found in %s, please check" % whizardResultFolder) os.chdir(startDir) dexit(2) whizmdl_here = folderlist.count("whizard.mdl") if whizmdl_here == 0: gLogger.error("whizard.mdl not found in %s, please check" % whizardResultFolder) os.chdir(startDir) dexit(2) gLogger.verbose("Preparing process list") ## FIXME:: What is this doing exactly? Is this necessary? -- APS, JFS for f in folderlist: if f.count(".in"): infile = open(f, "r") found_detail = False for line in infile: if line.count("decay_description"): currprocess = f.split(".template.in")[0] inputlist[currprocess] = {} inputlist[currprocess]["InFile"] = f.rstrip("~") inputlist[currprocess]["Detail"] = line.split("\"")[1] found_detail = True if line.count("process_id") and found_detail: process_id = line.split("\"")[1] inputlist[currprocess]["Model"] = "" inputlist[currprocess]["Generator"] = "" inputlist[currprocess]["Restrictions"] = "" for process in process_id.split(): print "Looking for detail of process %s" % (process) process_detail = getDetailsFromPRC("whizard.prc", process) inputlist[currprocess]["Model"] = process_detail["Model"] inputlist[currprocess]["Generator"] = process_detail["Generator"] if len(inputlist[currprocess]["Restrictions"]): inputlist[currprocess]["Restrictions"] = inputlist[currprocess]["Restrictions"] + ", " + process_detail["Restrictions"] else: inputlist[currprocess]["Restrictions"] = process_detail["Restrictions"] #if len(inputlist[currprocess].items()): # inputlist.append(processdict) ## END FIXEME ##Update inputlist with what was found looking in the prc file processes = readPRCFile("whizard.prc") inputlist.update(processes) ##get from cross section files the cross sections for the processes in inputlist #Need full process list for f in folderlist: if f.count("cross_sections_"): crossfile = open(f, "r") for line in crossfile: line = line.rstrip().lstrip() if not len(line): continue if line[0] == "#" or line[0] == "!": continue if len(line.split()) < 2: continue currprocess = line.split()[0] if currprocess in inputlist: inputlist[currprocess]['CrossSection'] = line.split()[1] gLogger.notice("Preparing Tarball") ##Make a folder in the current directory of the user to store the whizard libraries, executable et al. localWhizardFolderRel = ("whizard" + whizard_version) # relative path localWhizardFolder = os.path.join(startDir, localWhizardFolderRel) if not os.path.exists(localWhizardFolder): os.makedirs(localWhizardFolder) localWhizardLibFolder = os.path.join(localWhizardFolder,'lib') if os.path.exists(localWhizardLibFolder): shutil.rmtree(localWhizardLibFolder) os.makedirs(localWhizardLibFolder) ##creates the lib folder whizardLibraries = getListOfLibraries(os.path.join(whizardResultFolder, "whizard")) copyLibsCall = ["rsync","-avzL"] for lib in whizardLibraries: copyLibsCall.append(lib) copyLibsCall.append(localWhizardLibFolder) subprocess.Popen(copyLibsCall, stdout=subprocess.PIPE, stderr=subprocess.PIPE) for fileName in folderlist: shutil.copy(fileName, localWhizardFolder) ##Get the list of md5 sums for all the files in the folder to be tarred os.chdir( localWhizardFolder ) subprocess.call(["find . -type f -exec md5sum {} > ../md5_checksum.md5 \\; && mv ../md5_checksum.md5 ."], shell=True) os.chdir(startDir) ##Create the Tarball gLogger.notice("Creating Tarball...") appTar = localWhizardFolder + ".tgz" myappTar = tarfile.open(appTar, "w:gz") myappTar.add(localWhizardFolderRel) myappTar.close() md5sum = md5.md5(open( appTar, 'r' ).read()).hexdigest() gLogger.notice("...Done") gLogger.notice("Registering new Tarball in CS") tarballurl = {} av_platforms = gConfig.getSections(softwareSection, []) if av_platforms['OK']: if platform not in av_platforms['Value']: gLogger.error("Platform %s unknown, available are %s." % (platform, ", ".join(av_platforms['Value']))) gLogger.error("If yours is missing add it in CS") dexit(255) else: gLogger.error("Could not find all platforms available in CS") dexit(255) av_apps = gConfig.getSections("%s/%s" % (softwareSection, platform), []) if not av_apps['OK']: gLogger.error("Could not find all applications available in CS") dexit(255) if appName.lower() in av_apps['Value']: versions = gConfig.getSections("%s/%s/%s" % (softwareSection, platform, appName.lower()), []) if not versions['OK']: gLogger.error("Could not find all versions available in CS") dexit(255) if appVersion in versions['Value']: gLogger.error('Application %s %s for %s already in CS, nothing to do' % (appName.lower(), appVersion, platform)) dexit(0) else: result = diracAdmin.csSetOption("%s/%s/%s/%s/TarBall" % (softwareSection, platform, appName.lower(), appVersion), os.path.basename(appTar)) if result['OK']: modifiedCS = True tarballurl = gConfig.getOption("%s/%s/%s/TarBallURL" % (softwareSection, platform, appName.lower()), "") if len(tarballurl['Value']) > 0: res = upload(tarballurl['Value'], appTar) if not res['OK']: gLogger.error("Upload to %s failed" % tarballurl['Value']) dexit(255) result = diracAdmin.csSetOption("%s/%s/%s/%s/Md5Sum" % (softwareSection, platform, appName.lower(), appVersion), md5sum) if result['OK']: modifiedCS = True result = diracAdmin.csSetOption("%s/%s/%s/%s/Dependencies/beam_spectra/version" % (softwareSection, platform, appName.lower(), appVersion), beam_spectra_version) else: result = diracAdmin.csSetOption("%s/%s/%s/%s/TarBall" % (softwareSection, platform, appName.lower(), appVersion), os.path.basename(appTar)) if result['OK']: modifiedCS = True tarballurl = gConfig.getOption("%s/%s/%s/TarBallURL" % (softwareSection, platform, appName.lower()), "") if len(tarballurl['Value']) > 0: res = upload(tarballurl['Value'], appTar) if not res['OK']: gLogger.error("Upload to %s failed" % tarballurl['Value']) dexit(255) result = diracAdmin.csSetOption("%s/%s/%s/%s/Md5Sum" % (softwareSection, platform, appName.lower(), appVersion), md5sum) result = diracAdmin.csSetOption("%s/%s/%s/%s/Dependencies/beam_spectra/version" % (softwareSection, platform, appName.lower(), appVersion), beam_spectra_version) gLogger.verbose("Done uploading the tar ball") os.remove(appTar) #Set for all new processes the TarBallURL for process in inputlist.keys(): inputlist[process]['TarBallCSPath'] = tarballurl['Value'] + os.path.basename(appTar) pl.updateProcessList(inputlist) pl.writeProcessList() raw_input("Do you want to upload the process list? Press ENTER to proceed or CTRL-C to abort!") pl.uploadProcessListToFileCatalog(path_to_process_list, appVersion) #Commit the changes if nothing has failed and the CS has been modified if modifiedCS: result = diracAdmin.csCommitChanges(False) gLogger.verbose(result) gLogger.notice('All done OK!') dexit(0)
def _getProductionSummary(): clip = _Params() clip.registerSwitch() Script.parseCommandLine() from ILCDIRAC.Core.Utilities.HTML import Table from ILCDIRAC.Core.Utilities.ProcessList import ProcessList from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient from DIRAC import gConfig, gLogger prod = clip.prod full_detail = clip.full_det fc = FileCatalogClient() processlist = gConfig.getValue('/LocalSite/ProcessListPath') prl = ProcessList(processlist) processesdict = prl.getProcessesDict() trc = TransformationClient() prodids = [] if not prod: conddict = {} conddict['Status'] = clip.statuses if clip.ptypes: conddict['Type'] = clip.ptypes res = trc.getTransformations(conddict) if res['OK']: for transfs in res['Value']: prodids.append(transfs['TransformationID']) else: prodids.extend(prod) metadata = [] gLogger.info("Will run on prods %s" % str(prodids)) for prodID in prodids: if prodID < clip.minprod: continue meta = {} meta['ProdID'] = prodID res = trc.getTransformation(str(prodID)) if not res['OK']: gLogger.error("Error getting transformation %s" % prodID) continue prodtype = res['Value']['Type'] proddetail = res['Value']['Description'] if prodtype == 'MCReconstruction' or prodtype == 'MCReconstruction_Overlay': meta['Datatype'] = 'DST' elif prodtype == 'MCGeneration': meta['Datatype'] = 'gen' elif prodtype == 'MCSimulation': meta['Datatype'] = 'SIM' elif prodtype in ['Split', 'Merge']: gLogger.warn("Invalid query for %s productions" % prodtype) continue else: gLogger.error("Unknown production type %s" % prodtype) continue res = fc.findFilesByMetadata(meta) if not res['OK']: gLogger.error(res['Message']) continue lfns = res['Value'] nb_files = len(lfns) path = "" if not len(lfns): gLogger.warn("No files found for prod %s" % prodID) continue path = os.path.dirname(lfns[0]) res = fc.getDirectoryUserMetadata(path) if not res['OK']: gLogger.warn('No meta data found for %s' % path) continue dirmeta = {} dirmeta['proddetail'] = proddetail dirmeta['prodtype'] = prodtype dirmeta['nb_files'] = nb_files dirmeta.update(res['Value']) lumi = 0. nbevts = 0 addinfo = None files = 0 xsec = 0.0 if not full_detail: lfn = lfns[0] info = _getFileInfo(lfn) nbevts = info[1] * len(lfns) lumi = info[0] * len(lfns) addinfo = info[2] if 'xsection' in addinfo: if 'sum' in addinfo['xsection']: if 'xsection' in addinfo['xsection']['sum']: xsec += addinfo['xsection']['sum']['xsection'] files += 1 else: for lfn in lfns: info = _getFileInfo(lfn) lumi += info[0] nbevts += info[1] addinfo = info[2] if 'xsection' in addinfo: if 'sum' in addinfo['xsection']: if 'xsection' in addinfo['xsection']['sum']: xsec += addinfo['xsection']['sum']['xsection'] files += 1 if not lumi: xsec = 0 files = 0 depthDict = {} depSet = set() res = fc.getFileAncestors(lfns, [1, 2, 3, 4]) temp_ancestorlist = [] if res['OK']: for lfn, ancestorsDict in res['Value']['Successful'].items(): for ancestor, dep in ancestorsDict.items(): depthDict.setdefault(dep, []) if ancestor not in temp_ancestorlist: depthDict[dep].append(ancestor) depSet.add(dep) temp_ancestorlist.append(ancestor) depList = list(depSet) depList.sort() for ancestor in depthDict[depList[-1]]: info = _getFileInfo(ancestor) lumi += info[0] addinfo = info[2] if 'xsection' in addinfo: if 'sum' in addinfo['xsection']: if 'xsection' in addinfo['xsection']['sum']: xsec += addinfo['xsection']['sum']['xsection'] files += 1 if xsec and files: xsec /= files dirmeta['CrossSection'] = xsec else: dirmeta['CrossSection'] = 0.0 if nbevts: dirmeta['NumberOfEvents'] = nbevts #if not lumi: # dirmeta['Luminosity']=0 # dirmeta['CrossSection']=0 #else: # if nbevts: # dirmeta['CrossSection']=nbevts/lumi # else: # dirmeta['CrossSection']=0 #if addinfo: # if 'xsection' in addinfo: # if 'sum' in addinfo['xsection']: # if 'xsection' in addinfo['xsection']['sum']: # dirmeta['CrossSection']=addinfo['xsection']['sum']['xsection'] if 'NumberOfEvents' not in dirmeta: dirmeta['NumberOfEvents'] = 0 #print processesdict[dirmeta['EvtType']] dirmeta['detail'] = '' if dirmeta['EvtType'] in processesdict: if 'Detail' in processesdict[dirmeta['EvtType']]: detail = processesdict[dirmeta['EvtType']]['Detail'] else: detail = dirmeta['EvtType'] if not prodtype == 'MCGeneration': res = trc.getTransformationInputDataQuery(str(prodID)) if res['OK']: if 'ProdID' in res['Value']: dirmeta['MomProdID'] = res['Value']['ProdID'] if 'MomProdID' not in dirmeta: dirmeta['MomProdID'] = 0 dirmeta['detail'] = _translate(detail) metadata.append(dirmeta) detectors = {} detectors['ILD'] = {} corres = { "MCGeneration": 'gen', "MCSimulation": 'SIM', "MCReconstruction": "REC", "MCReconstruction_Overlay": "REC" } detectors['ILD']['SIM'] = [] detectors['ILD']['REC'] = [] detectors['SID'] = {} detectors['SID']['SIM'] = [] detectors['SID']['REC'] = [] detectors['sid'] = {} detectors['sid']['SIM'] = [] detectors['sid']['REC'] = [] detectors['gen'] = [] for channel in metadata: if 'DetectorType' not in channel: detectors['gen'].append( (channel['detail'], channel['Energy'], channel['ProdID'], channel['nb_files'], channel['NumberOfEvents'] / channel['nb_files'], channel['NumberOfEvents'], channel['CrossSection'], str(channel['proddetail']))) else: if not channel['DetectorType'] in detectors: gLogger.error("This is unknown detector", channel['DetectorType']) continue detectors[channel['DetectorType']][corres[ channel['prodtype']]].append( (channel['detail'], channel['Energy'], channel['DetectorType'], channel['ProdID'], channel['nb_files'], channel['NumberOfEvents'] / channel['nb_files'], channel['NumberOfEvents'], channel['CrossSection'], channel['MomProdID'], str(channel['proddetail']))) with open("tables.html", "w") as of: of.write("""<!DOCTYPE html> <html> <head> <title> Production summary </title> </head> <body> """) if len(detectors['gen']): of.write("<h1>gen prods</h1>\n") table = Table(header_row=('Channel', 'Energy', 'ProdID', 'Tasks', 'Average Evts/task', 'Statistics', 'Cross Section (fb)', 'Comment')) for item in detectors['gen']: table.rows.append(item) of.write(str(table)) gLogger.info("Gen prods") gLogger.info(str(table)) if len(detectors['ILD']): of.write("<h1>ILD prods</h1>\n") for ptype in detectors['ILD'].keys(): if len(detectors['ILD'][ptype]): of.write("<h2>%s</h2>\n" % ptype) table = Table(header_row=('Channel', 'Energy', 'Detector', 'ProdID', 'Number of Files', 'Events/File', 'Statistics', 'Cross Section (fb)', 'Origin ProdID', 'Comment')) for item in detectors['ILD'][ptype]: table.rows.append(item) of.write(str(table)) gLogger.info("ILC CDR prods %s" % ptype) gLogger.info(str(table)) if len(detectors['SID']): of.write("<h1>SID prods</h1>\n") for ptype in detectors['SID'].keys(): if len(detectors['SID'][ptype]): of.write("<h2>%s</h2>\n" % ptype) table = Table(header_row=('Channel', 'Energy', 'Detector', 'ProdID', 'Number of Files', 'Events/File', 'Statistics', 'Cross Section (fb)', 'Origin ProdID', 'Comment')) for item in detectors['SID'][ptype]: table.rows.append(item) of.write(str(table)) gLogger.info("SID CDR prods %s" % ptype) gLogger.info(str(table)) if len(detectors['sid']): of.write("<h1>sid dbd prods</h1>\n") for ptype in detectors['sid'].keys(): if len(detectors['sid'][ptype]): of.write("<h2>%s</h2>\n" % ptype) table = Table(header_row=('Channel', 'Energy', 'Detector', 'ProdID', 'Number of Files', 'Events/File', 'Statistics', 'Cross Section (fb)', 'Origin ProdID', 'Comment')) for item in detectors['sid'][ptype]: table.rows.append(item) of.write(str(table)) gLogger.info("sid DBD prods %s" % ptype) gLogger.info(str(table)) of.write(""" </body> </html> """) gLogger.notice("Check ./tables.html in any browser for the results") dexit(0)
class WhizardAnalysis(ModuleBase): """ Specific Module to run a Whizard job. """ def __init__(self): super(WhizardAnalysis, self).__init__() self.enable = True self.STEP_NUMBER = '' self.debug = True self.SteeringFile = '' self.OutputFile = '' self.NumberOfEvents = 1 self.Lumi = 0 self.applicationName = 'whizard' self.evttype = "" self.RandomSeed = 0 self.getProcessInFile = False self.datMan = DataManager() self.processlist = None self.parameters = {} self.susymodel = 0 self.Model = '' self.genmodel = GeneratorModels() self.eventstring = ['! ', 'Fatal error:', 'PYSTOP', 'No matrix element available', 'Floating point exception', 'Event generation finished.', " n_events","luminosity", " sum "] self.excludeAllButEventString = False self.steeringparameters = '' self.options = None self.optionsdict = {} self.OptionsDictStr = '' self.GenLevelCutDictStr = '' self.genlevelcuts = {} self.willCut = False self.useGridFiles = False def obtainProcessList(self): """Internal function Get the process list from storage if whizard.in was not provided :return: S_OK(), S_ERROR() """ res = self.ops.getValue("/ProcessList/Location", "") if not res: return S_ERROR("No process list found") processlistloc = res if not os.path.exists(os.path.basename(processlistloc)): res = self.datMan.getFile(processlistloc) if not res['OK']: LOG.error('Could not get processlist: %s' % res['Message']) return res self.processlist = ProcessList(os.path.basename(processlistloc)) return S_OK() def applicationSpecificInputs(self): """Resolve module input :return: S_OK() """ self.parameters['ENERGY'] = self.energy if not self.RandomSeed and self.jobID: self.RandomSeed = self.jobID if 'IS_PROD' in self.workflow_commons or 'IS_DBD_GEN_PROD' in self.workflow_commons: self.RandomSeed = int(str(int(self.workflow_commons["PRODUCTION_ID"])) + str(int(self.workflow_commons["JOB_ID"]))) self.parameters['SEED'] = self.RandomSeed self.parameters['NBEVTS'] = self.NumberOfEvents self.parameters['LUMI'] = self.Lumi ##EVER USED??? if 'SusyModel' in self.step_commons: self.susymodel = self.step_commons['SusyModel'] self.SteeringFile = os.path.basename(self.step_commons.get("InputFile", self.SteeringFile)) if self.SteeringFile == "whizard.in": os.rename(self.SteeringFile, "whizardnew.in") self.SteeringFile = "whizardnew.in" self.parameters['PROCESS'] = self.evttype listofparams = self.steeringparameters.split(";") for param in listofparams: if param.count("="): self.parameters[param.split("=")[0]] = param.split("=")[1] if self.OptionsDictStr: LOG.info("Will use whizard.in definition from WhizardOptions.") try: self.optionsdict = eval(self.OptionsDictStr) if 'integration_input' not in self.optionsdict: self.optionsdict['integration_input'] = {} if 'seed' not in self.optionsdict['integration_input']: self.optionsdict['integration_input']['seed'] = int(self.RandomSeed) if 'process_input' in self.optionsdict: if 'sqrts' in self.optionsdict['process_input']: self.energy = self.optionsdict['process_input']['sqrts'] except: return S_ERROR("Could not convert string to dictionary for optionsdict") if self.GenLevelCutDictStr: LOG.info("Found generator level cuts") try: self.genlevelcuts = eval(self.GenLevelCutDictStr) except: return S_ERROR("Could not convert the generator level cuts back to dictionary") if not len(self.SteeringFile) and not self.optionsdict: self.getProcessInFile = True if "IS_PROD" in self.workflow_commons: if self.workflow_commons["IS_PROD"] and not self.willCut: #self.OutputFile = getProdFilename(self.OutputFile,int(self.workflow_commons["PRODUCTION_ID"]), # int(self.workflow_commons["JOB_ID"])) if 'ProductionOutputData' in self.workflow_commons: outputlist = self.workflow_commons['ProductionOutputData'].split(";") for obj in outputlist: if obj.lower().count("_gen_"): self.OutputFile = os.path.basename(obj) break else: #This is because most likely there is stdhepcut running after self.OutputFile = "willcut.stdhep" #getProdFilename(self.OutputFile,int(self.workflow_commons["PRODUCTION_ID"]), # int(self.workflow_commons["JOB_ID"])) if "IS_DBD_GEN_PROD" in self.workflow_commons and self.workflow_commons["IS_DBD_GEN_PROD"]: #self.OutputFile = getProdFilename(self.OutputFile,int(self.workflow_commons["PRODUCTION_ID"]), # int(self.workflow_commons["JOB_ID"])) if 'ProductionOutputData' in self.workflow_commons: outputlist = self.workflow_commons['ProductionOutputData'].split(";") for obj in outputlist: self.OutputFile = os.path.basename(obj) break else: self.OutputFile = getProdFilename(self.OutputFile, int(self.workflow_commons["PRODUCTION_ID"]), int(self.workflow_commons["JOB_ID"])) return S_OK() def runIt(self): """ Called by Agent Executes the following - resolve input variables - resolve installation location - resolve dependencies location (beam_spectra) - get processlist if needed - define output file name - prepare whizard.in - make magic :return: S_OK(), S_ERROR() """ self.result = S_OK() if not self.platform: self.result = S_ERROR( 'No ILC platform selected' ) elif not self.applicationLog: self.result = S_ERROR( 'No Log file provided' ) if not self.result['OK']: LOG.error("Failed to resolve input parameters:", self.result["Message"]) return self.result if not self.workflowStatus['OK'] or not self.stepStatus['OK']: LOG.verbose('Workflow status = %s, step status = %s' % (self.workflowStatus['OK'], self.stepStatus['OK'])) return S_OK('Whizard should not proceed as previous step did not end properly') #if self.debug: # self.excludeAllButEventString = False res = getSoftwareFolder(self.platform, self.applicationName, self.applicationVersion) if not res['OK']: LOG.error("Failed getting software folder", res['Message']) self.setApplicationStatus('Failed finding software') return res mySoftDir = res['Value'] ###Remove libc removeLibc(mySoftDir + "/lib") ##Need to fetch the new LD_LIBRARY_PATH new_ld_lib_path = getNewLDLibs(self.platform, self.applicationName, self.applicationVersion) #Don't forget to prepend the application's libs new_ld_lib_path = mySoftDir + "/lib:" + new_ld_lib_path ### Resolve dependencies (look for beam_spectra) deps = resolveDeps(self.platform, self.applicationName, self.applicationVersion) path_to_beam_spectra = "" path_to_gridfiles = "" for dep in deps: res = getSoftwareFolder(self.platform, dep[ "app" ], dep['version']) if not res['OK']: LOG.error("Failed getting software folder", res['Message']) self.setApplicationStatus('Failed finding software') return res depfolder = res['Value'] if dep["app"] == "beam_spectra": path_to_beam_spectra = depfolder elif dep["app"] == "gridfiles": path_to_gridfiles = depfolder ##Env variables needed to run whizard: avoids hard coded locations os.environ['LUMI_LINKER'] = path_to_beam_spectra + "/lumi_linker_000" os.environ['PHOTONS_B1'] = path_to_beam_spectra + "/photons_beam1_linker_000" os.environ['PHOTONS_B2'] = path_to_beam_spectra + "/photons_beam2_linker_000" os.environ['EBEAM'] = path_to_beam_spectra + "/ebeam_in_linker_000" os.environ['PBEAM'] = path_to_beam_spectra + "/pbeam_in_linker_000" os.environ['LUMI_EE_LINKER'] = path_to_beam_spectra + "/lumi_ee_linker_000" os.environ['LUMI_EG_LINKER'] = path_to_beam_spectra + "/lumi_eg_linker_000" os.environ['LUMI_GE_LINKER'] = path_to_beam_spectra + "/lumi_ge_linker_000" os.environ['LUMI_GG_LINKER'] = path_to_beam_spectra + "/lumi_gg_linker_000" list_of_gridfiles = [] if path_to_gridfiles and self.useGridFiles: tmp_list_of_gridfiles = [os.path.join(path_to_gridfiles, item) for item in os.listdir(path_to_gridfiles)] gridfilesfound = False for path in tmp_list_of_gridfiles: if os.path.isdir(path) and path.count(str(self.energy)): #Here look for a sub directory for the energy related grid files list_of_gridfiles = [os.path.join(path, item) for item in os.listdir(path)] gridfilesfound = True LOG.info('Found grid files specific for energy %s' % self.energy) break if not gridfilesfound: LOG.info("Will use generic grid files found, hope the energy is set right") list_of_gridfiles = [item for item in glob.glob(os.path.join(path_to_gridfiles, "*.grb")) + glob.glob(os.path.join(path_to_gridfiles, "*.grc"))] template = False if self.SteeringFile.count("template"): template = True ## Get from process file the proper whizard.in file if self.getProcessInFile: whizardin = "" res = self.obtainProcessList() if not res['OK']: LOG.error("Could not obtain process list") self.setApplicationStatus('Failed getting processlist') return res whizardin = self.processlist.getInFile(self.evttype) if not whizardin: LOG.error("Whizard input file was not found in process list, cannot proceed") self.setApplicationStatus('Whizard input file was not found') return S_ERROR("Error while resolving whizard input file") if whizardin.count("template"): template = True try: shutil.copy("%s/%s" % (mySoftDir, whizardin), "./whizardnew.in") self.SteeringFile = "whizardnew.in" except EnvironmentError: LOG.error("Could not copy %s from %s" % (whizardin, mySoftDir)) self.setApplicationStatus('Failed getting whizard.in file') return S_ERROR("Failed to obtain %s" % whizardin) ##Check existence of Les Houches input file leshouchesfiles = '' if not os.path.exists("LesHouches.msugra_1.in"): if self.susymodel: if self.susymodel == 1: if os.path.exists("%s/LesHouches_slsqhh.msugra_1.in" % (mySoftDir)): leshouchesfiles = "%s/LesHouches_slsqhh.msugra_1.in" % (mySoftDir) if self.susymodel == 2: if os.path.exists("%s/LesHouches_chne.msugra_1.in" % (mySoftDir)): leshouchesfiles = "%s/LesHouches_chne.msugra_1.in" % (mySoftDir) if self.Model: if self.genmodel.hasModel(self.Model)['OK']: if self.genmodel.getFile(self.Model)['OK']: if os.path.exists("%s/%s" % (mySoftDir, self.genmodel.getFile(self.Model)['Value'])): leshouchesfiles = "%s/%s" % (mySoftDir, self.genmodel.getFile(self.Model)['Value']) else: LOG.error("Request LesHouches file is missing, cannot proceed") self.setApplicationStatus("LesHouches file missing") return S_ERROR("The LesHouches file was not found. Probably you are using a wrong version of whizard.") else: LOG.warn("No file found attached to model %s" % self.Model) else: LOG.error("Model undefined:", self.Model) self.setApplicationStatus("Model undefined") return S_ERROR("No Model %s defined" % self.Model) else: leshouchesfiles = "LesHouches.msugra_1.in" outputfilename = self.evttype if self.optionsdict: LOG.info("Using: %s" % self.optionsdict) self.options = WhizardOptions(self.Model) res = self.options.changeAndReturn(self.optionsdict) if not res['OK']: return res res = self.options.toWhizardDotIn("whizard.in") elif not template: res = prepareWhizardFile(self.SteeringFile, outputfilename, self.energy, self.RandomSeed, self.NumberOfEvents, self.Lumi, "whizard.in") else: res = prepareWhizardFileTemplate(self.SteeringFile, outputfilename, self.parameters, "whizard.in") if not res['OK']: LOG.error('Something went wrong with input file generation') self.setApplicationStatus('Whizard: something went wrong with input file generation') return S_ERROR('Something went wrong with whizard.in file generation') foundproceesinwhizardin = res['Value'] scriptName = 'Whizard_%s_Run_%s.sh' % (self.applicationVersion, self.STEP_NUMBER) if os.path.exists(scriptName): os.remove(scriptName) script = open(scriptName, 'w') script.write('#!/bin/sh \n') script.write('#####################################################################\n') script.write('# Dynamically generated script to run a production or analysis job. #\n') script.write('#####################################################################\n') script.write('declare -x PATH=%s:$PATH\n' % mySoftDir) script.write('declare -x LD_LIBRARY_PATH=%s\n' % new_ld_lib_path) script.write('env | sort >> localEnv.log\n') script.write('echo =============================\n') script.write('echo Printing content of whizard.in \n') script.write('cat whizard.in\n') script.write('echo =============================\n') script.write('cp %s/whizard.mdl ./\n' % mySoftDir) if leshouchesfiles: if not leshouchesfiles == 'LesHouches.msugra_1.in': script.write('cp %s ./LesHouches.msugra_1.in\n' % (leshouchesfiles)) script.write('ln -s LesHouches.msugra_1.in fort.71\n') if len(list_of_gridfiles): for gridfile in list_of_gridfiles: script.write('cp %s ./\n' % (gridfile)) script.write('cp %s/whizard.prc ./\n' % mySoftDir) if self.genlevelcuts: res = self.makeWhizardDotCut1() if not res['OK']: script.close() LOG.error("Could not create the cut1 file") return S_ERROR("Could not create the cut1 file") script.write('echo =============================\n') script.write('echo Printing content of whizard.prc \n') script.write('cat whizard.prc\n') script.write('echo =============================\n') extracmd = "" if not self.debug: extracmd = "2>/dev/null" comm = "" if foundproceesinwhizardin: comm = 'whizard --simulation_input \'write_events_file = \"%s\"\'' % (outputfilename) else: comm = 'whizard --process_input \'process_id =\"%s\"\' --simulation_input \'write_events_file = \"%s\"\' ' % (self.evttype, outputfilename) comm = "%s %s %s\n" % (comm, self.extraCLIarguments, extracmd) LOG.info("Will run %s" % comm) script.write(comm) script.write('declare -x appstatus=$?\n') script.write('exit $appstatus\n') script.close() if os.path.exists(self.applicationLog): os.remove(self.applicationLog) os.chmod(scriptName, 0o755) comm = 'sh -c "./%s"' % (scriptName) self.setApplicationStatus('Whizard %s step %s' %(self.applicationVersion, self.STEP_NUMBER)) self.stdError = '' self.result = shellCall(0, comm, callbackFunction = self.redirectLogOutput, bufferLimit=209715200) #self.result = {'OK':True,'Value':(0,'Disabled Execution','')} if not self.result['OK']: LOG.error("Failed with error %s" % self.result['Message']) if not os.path.exists(self.applicationLog): LOG.error("Something went terribly wrong, the log file is not present") self.setApplicationStatus('%s failed terribly, you are doomed!' % (self.applicationName)) if not self.ignoreapperrors: return S_ERROR('%s did not produce the expected log' % (self.applicationName)) lumi = '' message = "" success = False ###Analyse log file with open(self.applicationLog) as logfile: for line in logfile: if line.count('! Event sample corresponds to luminosity'): elems = line.split() lumi = elems[-1] if line.count("*** Fatal error:"): status = 1 message = line break elif line.count("PYSTOP"): status = 1 message = line break elif line.count("No matrix element available"): status = 1 message = line break elif line.count("Floating point exception"): status = 1 message = line break elif line.count("Event generation finished."): success = True else: status = 0 if success: status = 0 else: status = 1 LOG.info('The sample generated has an equivalent luminosity of %s' % lumi) if lumi: self.workflow_commons['Luminosity'] = float(lumi) else: status = 1 ##Now care for the cross sections info = {} res = self.options.getAsDict() if os.path.exists("whizard.out") and res['OK']: full_opts_dict = res['Value'] processes = full_opts_dict['process_input']['process_id'].split() info = {} info['xsection'] = {} processes.append('sum') with open("whizard.out", "r") as inf: for line in inf: line = line.rstrip() for process in processes: if not process: continue if line.count(" %s " % process): info['xsection'][process] = {} line = line.lstrip() crosssection = line.split()[1] err_crosssection = line.split()[2] frac = line.split()[4] info['xsection'][process]['xsection'] = float(crosssection) info['xsection'][process]['err_xsection'] = float(err_crosssection) info['xsection'][process]['fraction'] = float(frac) if info: if 'Info' not in self.workflow_commons: self.workflow_commons['Info'] = info else: self.workflow_commons['Info'].update(info) LOG.info("Status after the application execution is %s" % str(status)) messageout = 'Whizard %s Successful' % (self.applicationVersion) failed = False if status != 0: LOG.error("Whizard execution completed with errors:") failed = True else: LOG.info("Whizard execution completed successfully") ###Deal with output file if len(self.OutputFile): if os.path.exists(outputfilename + ".001.stdhep"): LOG.notice("Looking for output files") ofnames = glob.glob(outputfilename+'*.stdhep') if len(ofnames) > 1: basename = self.OutputFile.split(".stdhep")[0] i = 0 for of in ofnames: i += 1 name = basename + "_" + str(i) + ".stdhep" os.rename(of, name) else: os.rename(outputfilename + ".001.stdhep", self.OutputFile) else: LOG.error("Whizard execution did not produce a stdhep file") self.setApplicationStatus('Whizard %s Failed to produce STDHEP file' % (self.applicationVersion)) messageout = 'Whizard Failed to produce STDHEP file' if not self.ignoreapperrors: return S_ERROR(messageout) if failed is True: LOG.error("==================================\n StdError:\n") LOG.error(message) self.setApplicationStatus('%s Exited With Status %s' % (self.applicationName, status)) LOG.error('Whizard Exited With Status %s' % (status)) messageout = 'Whizard Exited With Status %s' % (status) if not self.ignoreapperrors: return S_ERROR(messageout) else: self.setApplicationStatus(messageout) return S_OK( { "OutputFile": self.OutputFile } ) def makeWhizardDotCut1(self): """ When users need whizard cuts, this is called to prepare the file :return: S_OK() """ cutf = open("whizard.cut1","w") for key, values in self.genlevelcuts.items(): cutf.write("process %s\n" % key) for val in values: cutf.write(" %s\n" % val) cutf.close() return S_OK()
gLogger.verbose("Getting process list from storage") rm = ReplicaManager() res = rm.getFile(path_to_process_list) if not res['OK']: gLogger.error("Error while getting process list from storage") dexit(2) gLogger.verbose("done") processlist = os.path.basename(path_to_process_list) if not os.path.exists(processlist): gLogger.error("Process list does not exist locally") dexit(2) pl = ProcessList(processlist) startdir = os.getcwd() inputlist = {} os.chdir(whizard_location) folderlist = os.listdir(os.getcwd()) whiz_here = folderlist.count("whizard") if whiz_here == 0: gLogger.error("whizard executable not found in %s, please check" % whizard_location) os.chdir(startdir) dexit(2) whizprc_here = folderlist.count("whizard.prc") if whizprc_here == 0: gLogger.error("whizard.prc not found in %s, please check" % whizard_location) os.chdir(startdir) dexit(2)
if __name__=="__main__": clip = Params() clip.registerSwitch() Script.parseCommandLine() from ILCDIRAC.Core.Utilities.HTML import Table from ILCDIRAC.Core.Utilities.ProcessList import ProcessList from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient from DIRAC import gConfig, gLogger prod = clip.prod full_detail = clip.full_det fc = FileCatalogClient() processlist = gConfig.getValue('/LocalSite/ProcessListPath') prl = ProcessList(processlist) processesdict = prl.getProcessesDict() trc = TransformationClient() prodids = [] if not prod: conddict = {} conddict['Status'] = clip.statuses if clip.ptypes: conddict['Type'] = clip.ptypes res = trc.getTransformations( conddict ) if res['OK']: for transfs in res['Value']: prodids.append(transfs['TransformationID']) else: prodids.extend(prod)
class WhizardAnalysis(ModuleBase): """ Specific Module to run a Whizard job. """ def __init__(self): super(WhizardAnalysis, self).__init__() self.enable = True self.STEP_NUMBER = '' self.debug = True self.log = gLogger.getSubLogger("WhizardAnalysis") self.SteeringFile = '' self.OutputFile = '' self.NumberOfEvents = 1 self.Lumi = 0 self.applicationName = 'whizard' self.evttype = "" self.RandomSeed = 0 self.getProcessInFile = False self.datMan = DataManager() self.processlist = None self.parameters = {} self.susymodel = 0 self.Model = '' self.genmodel = GeneratorModels() self.eventstring = [ '! ', 'Fatal error:', 'PYSTOP', 'No matrix element available', 'Floating point exception', 'Event generation finished.', " n_events", "luminosity", " sum " ] self.excludeAllButEventString = False self.steeringparameters = '' self.options = None self.optionsdict = {} self.OptionsDictStr = '' self.GenLevelCutDictStr = '' self.genlevelcuts = {} self.willCut = False self.useGridFiles = False def obtainProcessList(self): """Internal function Get the process list from storage if whizard.in was not provided :return: S_OK(), S_ERROR() """ res = self.ops.getValue("/ProcessList/Location", "") if not res: return S_ERROR("No process list found") processlistloc = res if not os.path.exists(os.path.basename(processlistloc)): res = self.datMan.getFile(processlistloc) if not res['OK']: self.log.error('Could not get processlist: %s' % res['Message']) return res self.processlist = ProcessList(os.path.basename(processlistloc)) return S_OK() def applicationSpecificInputs(self): """Resolve module input :return: S_OK() """ self.parameters['ENERGY'] = self.energy if not self.RandomSeed and self.jobID: self.RandomSeed = self.jobID if 'IS_PROD' in self.workflow_commons or 'IS_DBD_GEN_PROD' in self.workflow_commons: self.RandomSeed = int( str(int(self.workflow_commons["PRODUCTION_ID"])) + str(int(self.workflow_commons["JOB_ID"]))) self.parameters['SEED'] = self.RandomSeed self.parameters['NBEVTS'] = self.NumberOfEvents self.parameters['LUMI'] = self.Lumi ##EVER USED??? if 'SusyModel' in self.step_commons: self.susymodel = self.step_commons['SusyModel'] self.SteeringFile = os.path.basename( self.step_commons.get("InputFile", self.SteeringFile)) if self.SteeringFile == "whizard.in": os.rename(self.SteeringFile, "whizardnew.in") self.SteeringFile = "whizardnew.in" self.parameters['PROCESS'] = self.evttype listofparams = self.steeringparameters.split(";") for param in listofparams: if param.count("="): self.parameters[param.split("=")[0]] = param.split("=")[1] if self.OptionsDictStr: self.log.info( "Will use whizard.in definition from WhizardOptions.") try: self.optionsdict = eval(self.OptionsDictStr) if 'integration_input' not in self.optionsdict: self.optionsdict['integration_input'] = {} if 'seed' not in self.optionsdict['integration_input']: self.optionsdict['integration_input']['seed'] = int( self.RandomSeed) if 'process_input' in self.optionsdict: if 'sqrts' in self.optionsdict['process_input']: self.energy = self.optionsdict['process_input'][ 'sqrts'] except: return S_ERROR( "Could not convert string to dictionary for optionsdict") if self.GenLevelCutDictStr: self.log.info("Found generator level cuts") try: self.genlevelcuts = eval(self.GenLevelCutDictStr) except: return S_ERROR( "Could not convert the generator level cuts back to dictionary" ) if not len(self.SteeringFile) and not self.optionsdict: self.getProcessInFile = True if "IS_PROD" in self.workflow_commons: if self.workflow_commons["IS_PROD"] and not self.willCut: #self.OutputFile = getProdFilename(self.OutputFile,int(self.workflow_commons["PRODUCTION_ID"]), # int(self.workflow_commons["JOB_ID"])) if 'ProductionOutputData' in self.workflow_commons: outputlist = self.workflow_commons[ 'ProductionOutputData'].split(";") for obj in outputlist: if obj.lower().count("_gen_"): self.OutputFile = os.path.basename(obj) break else: #This is because most likely there is stdhepcut running after self.OutputFile = "willcut.stdhep" #getProdFilename(self.OutputFile,int(self.workflow_commons["PRODUCTION_ID"]), # int(self.workflow_commons["JOB_ID"])) if "IS_DBD_GEN_PROD" in self.workflow_commons and self.workflow_commons[ "IS_DBD_GEN_PROD"]: #self.OutputFile = getProdFilename(self.OutputFile,int(self.workflow_commons["PRODUCTION_ID"]), # int(self.workflow_commons["JOB_ID"])) if 'ProductionOutputData' in self.workflow_commons: outputlist = self.workflow_commons[ 'ProductionOutputData'].split(";") for obj in outputlist: self.OutputFile = os.path.basename(obj) break else: self.OutputFile = getProdFilename( self.OutputFile, int(self.workflow_commons["PRODUCTION_ID"]), int(self.workflow_commons["JOB_ID"])) return S_OK() def runIt(self): """ Called by Agent Executes the following - resolve input variables - resolve installation location - resolve dependencies location (beam_spectra) - get processlist if needed - define output file name - prepare whizard.in - make magic :return: S_OK(), S_ERROR() """ self.result = S_OK() if not self.platform: self.result = S_ERROR('No ILC platform selected') elif not self.applicationLog: self.result = S_ERROR('No Log file provided') if not self.result['OK']: self.log.error("Failed to resolve input parameters:", self.result["Message"]) return self.result if not self.workflowStatus['OK'] or not self.stepStatus['OK']: self.log.verbose( 'Workflow status = %s, step status = %s' % (self.workflowStatus['OK'], self.stepStatus['OK'])) return S_OK( 'Whizard should not proceed as previous step did not end properly' ) #if self.debug: # self.excludeAllButEventString = False res = getSoftwareFolder(self.platform, self.applicationName, self.applicationVersion) if not res['OK']: self.log.error("Failed getting software folder", res['Message']) self.setApplicationStatus('Failed finding software') return res mySoftDir = res['Value'] ###Remove libc removeLibc(mySoftDir + "/lib") ##Need to fetch the new LD_LIBRARY_PATH new_ld_lib_path = getNewLDLibs(self.platform, self.applicationName, self.applicationVersion) #Don't forget to prepend the application's libs new_ld_lib_path = mySoftDir + "/lib:" + new_ld_lib_path ### Resolve dependencies (look for beam_spectra) deps = resolveDeps(self.platform, self.applicationName, self.applicationVersion) path_to_beam_spectra = "" path_to_gridfiles = "" for dep in deps: res = getSoftwareFolder(self.platform, dep["app"], dep['version']) if not res['OK']: self.log.error("Failed getting software folder", res['Message']) self.setApplicationStatus('Failed finding software') return res depfolder = res['Value'] if dep["app"] == "beam_spectra": path_to_beam_spectra = depfolder elif dep["app"] == "gridfiles": path_to_gridfiles = depfolder ##Env variables needed to run whizard: avoids hard coded locations os.environ['LUMI_LINKER'] = path_to_beam_spectra + "/lumi_linker_000" os.environ[ 'PHOTONS_B1'] = path_to_beam_spectra + "/photons_beam1_linker_000" os.environ[ 'PHOTONS_B2'] = path_to_beam_spectra + "/photons_beam2_linker_000" os.environ['EBEAM'] = path_to_beam_spectra + "/ebeam_in_linker_000" os.environ['PBEAM'] = path_to_beam_spectra + "/pbeam_in_linker_000" os.environ[ 'LUMI_EE_LINKER'] = path_to_beam_spectra + "/lumi_ee_linker_000" os.environ[ 'LUMI_EG_LINKER'] = path_to_beam_spectra + "/lumi_eg_linker_000" os.environ[ 'LUMI_GE_LINKER'] = path_to_beam_spectra + "/lumi_ge_linker_000" os.environ[ 'LUMI_GG_LINKER'] = path_to_beam_spectra + "/lumi_gg_linker_000" list_of_gridfiles = [] if path_to_gridfiles and self.useGridFiles: tmp_list_of_gridfiles = [ os.path.join(path_to_gridfiles, item) for item in os.listdir(path_to_gridfiles) ] gridfilesfound = False for path in tmp_list_of_gridfiles: if os.path.isdir(path) and path.count(str(self.energy)): #Here look for a sub directory for the energy related grid files list_of_gridfiles = [ os.path.join(path, item) for item in os.listdir(path) ] gridfilesfound = True self.log.info('Found grid files specific for energy %s' % self.energy) break if not gridfilesfound: self.log.info( "Will use generic grid files found, hope the energy is set right" ) list_of_gridfiles = [ item for item in glob.glob(os.path.join(path_to_gridfiles, "*.grb")) + glob.glob(os.path.join(path_to_gridfiles, "*.grc")) ] template = False if self.SteeringFile.count("template"): template = True ## Get from process file the proper whizard.in file if self.getProcessInFile: whizardin = "" res = self.obtainProcessList() if not res['OK']: self.log.error("Could not obtain process list") self.setApplicationStatus('Failed getting processlist') return res whizardin = self.processlist.getInFile(self.evttype) if not whizardin: self.log.error( "Whizard input file was not found in process list, cannot proceed" ) self.setApplicationStatus('Whizard input file was not found') return S_ERROR("Error while resolving whizard input file") if whizardin.count("template"): template = True try: shutil.copy("%s/%s" % (mySoftDir, whizardin), "./whizardnew.in") self.SteeringFile = "whizardnew.in" except EnvironmentError: self.log.error("Could not copy %s from %s" % (whizardin, mySoftDir)) self.setApplicationStatus('Failed getting whizard.in file') return S_ERROR("Failed to obtain %s" % whizardin) ##Check existence of Les Houches input file leshouchesfiles = '' if not os.path.exists("LesHouches.msugra_1.in"): if self.susymodel: if self.susymodel == 1: if os.path.exists("%s/LesHouches_slsqhh.msugra_1.in" % (mySoftDir)): leshouchesfiles = "%s/LesHouches_slsqhh.msugra_1.in" % ( mySoftDir) if self.susymodel == 2: if os.path.exists("%s/LesHouches_chne.msugra_1.in" % (mySoftDir)): leshouchesfiles = "%s/LesHouches_chne.msugra_1.in" % ( mySoftDir) if self.Model: if self.genmodel.hasModel(self.Model)['OK']: if self.genmodel.getFile(self.Model)['OK']: if os.path.exists( "%s/%s" % (mySoftDir, self.genmodel.getFile( self.Model)['Value'])): leshouchesfiles = "%s/%s" % ( mySoftDir, self.genmodel.getFile( self.Model)['Value']) else: self.log.error( "Request LesHouches file is missing, cannot proceed" ) self.setApplicationStatus( "LesHouches file missing") return S_ERROR( "The LesHouches file was not found. Probably you are using a wrong version of whizard." ) else: self.log.warn("No file found attached to model %s" % self.Model) else: self.log.error("Model undefined:", self.Model) self.setApplicationStatus("Model undefined") return S_ERROR("No Model %s defined" % self.Model) else: leshouchesfiles = "LesHouches.msugra_1.in" outputfilename = self.evttype if self.optionsdict: self.log.info("Using: %s" % self.optionsdict) self.options = WhizardOptions(self.Model) res = self.options.changeAndReturn(self.optionsdict) if not res['OK']: return res res = self.options.toWhizardDotIn("whizard.in") elif not template: res = prepareWhizardFile(self.SteeringFile, outputfilename, self.energy, self.RandomSeed, self.NumberOfEvents, self.Lumi, "whizard.in") else: res = prepareWhizardFileTemplate(self.SteeringFile, outputfilename, self.parameters, "whizard.in") if not res['OK']: self.log.error('Something went wrong with input file generation') self.setApplicationStatus( 'Whizard: something went wrong with input file generation') return S_ERROR( 'Something went wrong with whizard.in file generation') foundproceesinwhizardin = res['Value'] scriptName = 'Whizard_%s_Run_%s.sh' % (self.applicationVersion, self.STEP_NUMBER) if os.path.exists(scriptName): os.remove(scriptName) script = open(scriptName, 'w') script.write('#!/bin/sh \n') script.write( '#####################################################################\n' ) script.write( '# Dynamically generated script to run a production or analysis job. #\n' ) script.write( '#####################################################################\n' ) script.write('declare -x PATH=%s:$PATH\n' % mySoftDir) script.write('declare -x LD_LIBRARY_PATH=%s\n' % new_ld_lib_path) script.write('env | sort >> localEnv.log\n') script.write('echo =============================\n') script.write('echo Printing content of whizard.in \n') script.write('cat whizard.in\n') script.write('echo =============================\n') script.write('cp %s/whizard.mdl ./\n' % mySoftDir) if leshouchesfiles: if not leshouchesfiles == 'LesHouches.msugra_1.in': script.write('cp %s ./LesHouches.msugra_1.in\n' % (leshouchesfiles)) script.write('ln -s LesHouches.msugra_1.in fort.71\n') if len(list_of_gridfiles): for gridfile in list_of_gridfiles: script.write('cp %s ./\n' % (gridfile)) script.write('cp %s/whizard.prc ./\n' % mySoftDir) if self.genlevelcuts: res = self.makeWhizardDotCut1() if not res['OK']: script.close() self.log.error("Could not create the cut1 file") return S_ERROR("Could not create the cut1 file") script.write('echo =============================\n') script.write('echo Printing content of whizard.prc \n') script.write('cat whizard.prc\n') script.write('echo =============================\n') extracmd = "" if not self.debug: extracmd = "2>/dev/null" comm = "" if foundproceesinwhizardin: comm = 'whizard --simulation_input \'write_events_file = \"%s\"\'' % ( outputfilename) else: comm = 'whizard --process_input \'process_id =\"%s\"\' --simulation_input \'write_events_file = \"%s\"\' ' % ( self.evttype, outputfilename) comm = "%s %s %s\n" % (comm, self.extraCLIarguments, extracmd) self.log.info("Will run %s" % comm) script.write(comm) script.write('declare -x appstatus=$?\n') script.write('exit $appstatus\n') script.close() if os.path.exists(self.applicationLog): os.remove(self.applicationLog) os.chmod(scriptName, 0755) comm = 'sh -c "./%s"' % (scriptName) self.setApplicationStatus('Whizard %s step %s' % (self.applicationVersion, self.STEP_NUMBER)) self.stdError = '' self.result = shellCall(0, comm, callbackFunction=self.redirectLogOutput, bufferLimit=209715200) #self.result = {'OK':True,'Value':(0,'Disabled Execution','')} if not self.result['OK']: self.log.error("Failed with error %s" % self.result['Message']) if not os.path.exists(self.applicationLog): self.log.error( "Something went terribly wrong, the log file is not present") self.setApplicationStatus('%s failed terribly, you are doomed!' % (self.applicationName)) if not self.ignoreapperrors: return S_ERROR('%s did not produce the expected log' % (self.applicationName)) lumi = '' message = "" success = False ###Analyse log file with open(self.applicationLog) as logfile: for line in logfile: if line.count('! Event sample corresponds to luminosity'): elems = line.split() lumi = elems[-1] if line.count("*** Fatal error:"): status = 1 message = line break elif line.count("PYSTOP"): status = 1 message = line break elif line.count("No matrix element available"): status = 1 message = line break elif line.count("Floating point exception"): status = 1 message = line break elif line.count("Event generation finished."): success = True else: status = 0 if success: status = 0 else: status = 1 self.log.info( 'The sample generated has an equivalent luminosity of %s' % lumi) if lumi: self.workflow_commons['Luminosity'] = float(lumi) else: status = 1 ##Now care for the cross sections info = {} res = self.options.getAsDict() if os.path.exists("whizard.out") and res['OK']: full_opts_dict = res['Value'] processes = full_opts_dict['process_input']['process_id'].split() info = {} info['xsection'] = {} processes.append('sum') with open("whizard.out", "r") as inf: for line in inf: line = line.rstrip() for process in processes: if not process: continue if line.count(" %s " % process): info['xsection'][process] = {} line = line.lstrip() crosssection = line.split()[1] err_crosssection = line.split()[2] frac = line.split()[4] info['xsection'][process]['xsection'] = float( crosssection) info['xsection'][process]['err_xsection'] = float( err_crosssection) info['xsection'][process]['fraction'] = float(frac) if info: if 'Info' not in self.workflow_commons: self.workflow_commons['Info'] = info else: self.workflow_commons['Info'].update(info) self.log.info("Status after the application execution is %s" % str(status)) messageout = 'Whizard %s Successful' % (self.applicationVersion) failed = False if status != 0: self.log.error("Whizard execution completed with errors:") failed = True else: self.log.info("Whizard execution completed successfully") ###Deal with output file if len(self.OutputFile): if os.path.exists(outputfilename + ".001.stdhep"): self.log.notice("Looking for output files") ofnames = glob.glob(outputfilename + '*.stdhep') if len(ofnames) > 1: basename = self.OutputFile.split(".stdhep")[0] i = 0 for of in ofnames: i += 1 name = basename + "_" + str(i) + ".stdhep" os.rename(of, name) else: os.rename(outputfilename + ".001.stdhep", self.OutputFile) else: self.log.error( "Whizard execution did not produce a stdhep file") self.setApplicationStatus( 'Whizard %s Failed to produce STDHEP file' % (self.applicationVersion)) messageout = 'Whizard Failed to produce STDHEP file' if not self.ignoreapperrors: return S_ERROR(messageout) if failed is True: self.log.error("==================================\n StdError:\n") self.log.error(message) self.setApplicationStatus('%s Exited With Status %s' % (self.applicationName, status)) self.log.error('Whizard Exited With Status %s' % (status)) messageout = 'Whizard Exited With Status %s' % (status) if not self.ignoreapperrors: return S_ERROR(messageout) else: self.setApplicationStatus(messageout) return S_OK({"OutputFile": self.OutputFile}) def makeWhizardDotCut1(self): """ When users need whizard cuts, this is called to prepare the file :return: S_OK() """ cutf = open("whizard.cut1", "w") for key, values in self.genlevelcuts.items(): cutf.write("process %s\n" % key) for val in values: cutf.write(" %s\n" % val) cutf.close() return S_OK()
def setUp( self ): with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=False)): self.prol = ProcessList( 'myTestProcess.list' )
def setUp(self): with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=False)): self.prol = ProcessList('myTestProcess.list') self.prol.cfg.createNewSection('Processes')
def doTheWhizardInstallation(): """Do the instalation for new whizard version Copy libraries, create tarball, upload processList file add entry in configuration system """ res = checkSLCVersion() if not res['OK']: gLogger.error(res['Message']) dexit(1) res = checkGFortranVersion() if not res['OK']: gLogger.error(res['Message']) dexit(1) cliParams = Params() cliParams.registerSwitches() Script.parseCommandLine( ignoreErrors= False) whizardResultFolder = cliParams.path platform = cliParams.platform whizard_version = cliParams.version appVersion = whizard_version beam_spectra_version = cliParams.beam_spectra if not whizardResultFolder or not whizard_version or not beam_spectra_version: Script.showHelp() dexit(2) from ILCDIRAC.Core.Utilities.ProcessList import ProcessList from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations from DIRAC.Interfaces.API.DiracAdmin import DiracAdmin from ILCDIRAC.Core.Utilities.FileUtils import upload from DIRAC.DataManagementSystem.Client.DataManager import DataManager diracAdmin = DiracAdmin() modifiedCS = False softwareSection = "/Operations/Defaults/AvailableTarBalls" processlistLocation = "ProcessList/Location" appName = "whizard" ops = Operations() path_to_process_list = ops.getValue(processlistLocation, "") if not path_to_process_list: gLogger.error("Could not find process list location in CS") dexit(2) gLogger.verbose("Getting process list from file catalog") datMan = DataManager() res = datMan.getFile(path_to_process_list) if not res['OK']: gLogger.error("Error while getting process list from storage") dexit(2) gLogger.verbose("done") ##just the name of the local file in current working directory processlist = os.path.basename(path_to_process_list) if not os.path.exists(processlist): gLogger.error("Process list does not exist locally") dexit(2) pl = ProcessList(processlist) startDir = os.getcwd() inputlist = {} os.chdir(whizardResultFolder) folderlist = os.listdir(whizardResultFolder) whiz_here = folderlist.count("whizard") if whiz_here == 0: gLogger.error("whizard executable not found in %s, please check" % whizardResultFolder) os.chdir(startDir) dexit(2) whizprc_here = folderlist.count("whizard.prc") if whizprc_here == 0: gLogger.error("whizard.prc not found in %s, please check" % whizardResultFolder) os.chdir(startDir) dexit(2) whizmdl_here = folderlist.count("whizard.mdl") if whizmdl_here == 0: gLogger.error("whizard.mdl not found in %s, please check" % whizardResultFolder) os.chdir(startDir) dexit(2) gLogger.verbose("Preparing process list") ## FIXME:: What is this doing exactly? Is this necessary? -- APS, JFS for f in folderlist: if f.count(".in"): infile = open(f, "r") found_detail = False for line in infile: if line.count("decay_description"): currprocess = f.split(".template.in")[0] inputlist[currprocess] = {} inputlist[currprocess]["InFile"] = f.rstrip("~") inputlist[currprocess]["Detail"] = line.split("\"")[1] found_detail = True if line.count("process_id") and found_detail: process_id = line.split("\"")[1] inputlist[currprocess]["Model"] = "" inputlist[currprocess]["Generator"] = "" inputlist[currprocess]["Restrictions"] = "" for process in process_id.split(): print("Looking for detail of process %s" % (process)) process_detail = getDetailsFromPRC("whizard.prc", process) inputlist[currprocess]["Model"] = process_detail["Model"] inputlist[currprocess]["Generator"] = process_detail["Generator"] if len(inputlist[currprocess]["Restrictions"]): inputlist[currprocess]["Restrictions"] = inputlist[currprocess]["Restrictions"] + ", " + process_detail["Restrictions"] else: inputlist[currprocess]["Restrictions"] = process_detail["Restrictions"] #if len(inputlist[currprocess].items()): # inputlist.append(processdict) ## END FIXEME ##Update inputlist with what was found looking in the prc file processes = readPRCFile("whizard.prc") inputlist.update(processes) ##get from cross section files the cross sections for the processes in inputlist #Need full process list for f in folderlist: if f.count("cross_sections_"): crossfile = open(f, "r") for line in crossfile: line = line.rstrip().lstrip() if not len(line): continue if line[0] == "#" or line[0] == "!": continue if len(line.split()) < 2: continue currprocess = line.split()[0] if currprocess in inputlist: inputlist[currprocess]['CrossSection'] = line.split()[1] gLogger.notice("Preparing Tarball") ##Make a folder in the current directory of the user to store the whizard libraries, executable et al. localWhizardFolderRel = ("whizard" + whizard_version) # relative path localWhizardFolder = os.path.join(startDir, localWhizardFolderRel) if not os.path.exists(localWhizardFolder): os.makedirs(localWhizardFolder) localWhizardLibFolder = os.path.join(localWhizardFolder,'lib') if os.path.exists(localWhizardLibFolder): shutil.rmtree(localWhizardLibFolder) os.makedirs(localWhizardLibFolder) ##creates the lib folder whizardLibraries = getListOfLibraries(os.path.join(whizardResultFolder, "whizard")) copyLibsCall = ["rsync","-avzL"] for lib in whizardLibraries: copyLibsCall.append(lib) copyLibsCall.append(localWhizardLibFolder) subprocess.Popen(copyLibsCall, stdout=subprocess.PIPE, stderr=subprocess.PIPE) for fileName in folderlist: shutil.copy(fileName, localWhizardFolder) ##Get the list of md5 sums for all the files in the folder to be tarred os.chdir( localWhizardFolder ) subprocess.call(["find . -type f -exec md5sum {} > ../md5_checksum.md5 \\; && mv ../md5_checksum.md5 ."], shell=True) os.chdir(startDir) ##Create the Tarball gLogger.notice("Creating Tarball...") appTar = localWhizardFolder + ".tgz" myappTar = tarfile.open(appTar, "w:gz") myappTar.add(localWhizardFolderRel) myappTar.close() md5sum = md5.md5(open( appTar, 'r' ).read()).hexdigest() gLogger.notice("...Done") gLogger.notice("Registering new Tarball in CS") tarballurl = {} av_platforms = gConfig.getSections(softwareSection, []) if av_platforms['OK']: if platform not in av_platforms['Value']: gLogger.error("Platform %s unknown, available are %s." % (platform, ", ".join(av_platforms['Value']))) gLogger.error("If yours is missing add it in CS") dexit(255) else: gLogger.error("Could not find all platforms available in CS") dexit(255) av_apps = gConfig.getSections("%s/%s" % (softwareSection, platform), []) if not av_apps['OK']: gLogger.error("Could not find all applications available in CS") dexit(255) if appName.lower() in av_apps['Value']: versions = gConfig.getSections("%s/%s/%s" % (softwareSection, platform, appName.lower()), []) if not versions['OK']: gLogger.error("Could not find all versions available in CS") dexit(255) if appVersion in versions['Value']: gLogger.error('Application %s %s for %s already in CS, nothing to do' % (appName.lower(), appVersion, platform)) dexit(0) else: result = diracAdmin.csSetOption("%s/%s/%s/%s/TarBall" % (softwareSection, platform, appName.lower(), appVersion), os.path.basename(appTar)) if result['OK']: modifiedCS = True tarballurl = gConfig.getOption("%s/%s/%s/TarBallURL" % (softwareSection, platform, appName.lower()), "") if len(tarballurl['Value']) > 0: res = upload(tarballurl['Value'], appTar) if not res['OK']: gLogger.error("Upload to %s failed" % tarballurl['Value']) dexit(255) result = diracAdmin.csSetOption("%s/%s/%s/%s/Md5Sum" % (softwareSection, platform, appName.lower(), appVersion), md5sum) if result['OK']: modifiedCS = True result = diracAdmin.csSetOption("%s/%s/%s/%s/Dependencies/beam_spectra/version" % (softwareSection, platform, appName.lower(), appVersion), beam_spectra_version) else: result = diracAdmin.csSetOption("%s/%s/%s/%s/TarBall" % (softwareSection, platform, appName.lower(), appVersion), os.path.basename(appTar)) if result['OK']: modifiedCS = True tarballurl = gConfig.getOption("%s/%s/%s/TarBallURL" % (softwareSection, platform, appName.lower()), "") if len(tarballurl['Value']) > 0: res = upload(tarballurl['Value'], appTar) if not res['OK']: gLogger.error("Upload to %s failed" % tarballurl['Value']) dexit(255) result = diracAdmin.csSetOption("%s/%s/%s/%s/Md5Sum" % (softwareSection, platform, appName.lower(), appVersion), md5sum) result = diracAdmin.csSetOption("%s/%s/%s/%s/Dependencies/beam_spectra/version" % (softwareSection, platform, appName.lower(), appVersion), beam_spectra_version) gLogger.verbose("Done uploading the tar ball") os.remove(appTar) #Set for all new processes the TarBallURL for process in inputlist.keys(): inputlist[process]['TarBallCSPath'] = tarballurl['Value'] + os.path.basename(appTar) pl.updateProcessList(inputlist) pl.writeProcessList() raw_input("Do you want to upload the process list? Press ENTER to proceed or CTRL-C to abort!") pl.uploadProcessListToFileCatalog(path_to_process_list, appVersion) #Commit the changes if nothing has failed and the CS has been modified if modifiedCS: result = diracAdmin.csCommitChanges(False) gLogger.verbose(result) gLogger.notice('All done OK!') dexit(0)
class ProcessListComplexTestCase(unittest.TestCase): """ Test the different methods of the class, providing a usable CFG """ def setUp(self): with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=False)): self.prol = ProcessList('myTestProcess.list') self.prol.cfg.createNewSection('Processes') def test_updateproclist_and_getters(self): self.prol.cfg.createNewSection('Processes/myTestProcDeleteMe') self.prol.cfg.setOption( 'Processes/myTestProcDeleteMe/someRandomOption', True) dict_1 = {'CrossSection': 'someCross'} dict_1.update(STD_PROC_DICT) dict_2 = {'CrossSection': 'some_other_val'} dict_2.update(STD_PROC_DICT) process_dict = {} process_dict['MytestProcess'] = dict_1 process_dict['myTestProcDeleteMe'] = dict_2 result = self.prol.updateProcessList(process_dict) assertDiracSucceeds(result, self) conf = self.prol.cfg self.assertFalse( conf.existsKey('Processes/myTestProcDeleteMe/someRandomOption')) options = [ 'Processes/MytestProcess/CrossSection', 'Processes/myTestProcDeleteMe/CrossSection' ] assertEqualsImproved((map(conf.getOption, options)), (['someCross', 'some_other_val']), self) assertEqualsImproved( (self.prol.getCSPath('myTestProcDeleteMe'), self.prol.getInFile('myTestProcDeleteMe'), self.prol.existsProcess('myTestProcDeleteMe'), self.prol.existsProcess(''), self.prol.existsProcess('invalidProcess'), self.prol.existsProcess('myTestProcDeleteMeToo')), ('/test/cs/path/ball.tar', 'my/file.in', S_OK(True), S_OK(True), S_OK(False), S_OK(False)), self) assertListContentEquals(self.prol.getProcesses(), ['myTestProcDeleteMe', 'MytestProcess'], self) all_processes_dict = self.prol.getProcessesDict() assertEqualsImproved(len(all_processes_dict), 2, self) assertEqualsImproved( ('myTestProcDeleteMe' in all_processes_dict, 'MytestProcess' in all_processes_dict), (True, True), self) self.prol.printProcesses() def test_writeproclist(self): expected_write = 'Processes\n{\n mytestprocess123\n {\n TarBallCSPath = /test/cs/path/bal.tarr\n Detail = TestNoDetails\n Generator = mytestGen21\n Model = testmodel3001\n Restrictions = \n InFile = my/file.in\n CrossSection = 0\n }\n}\n' self.prol._addEntry( 'mytestprocess123', { 'TarBallCSPath': '/test/cs/path/bal.tarr', 'Detail': 'TestNoDetails', 'Generator': 'mytestGen21', 'Model': 'testmodel3001', 'Restrictions': '', 'InFile': 'my/file.in' }) exists_dict = { '/temp/dir': False, '/temp/dir/mytempfile.txt': True, '/my/folder/testpath.xml': True } fhandle_mock = Mock() with patch('tempfile.mkstemp', new=Mock(return_value=('handle', '/temp/dir/mytempfile.txt'))), \ patch('__builtin__.file', new=Mock(return_value=fhandle_mock)) as file_mock, \ patch('os.makedirs') as mkdir_mock, \ patch('os.path.exists', new=Mock(side_effect=lambda path: exists_dict[path])), \ patch('shutil.move') as move_mock, \ patch('os.close') as close_mock: assertDiracSucceedsWith_equals( self.prol.writeProcessList('/my/folder/testpath.xml'), '/my/folder/testpath.xml', self) mkdir_mock.assert_called_once_with('/temp/dir') file_mock.assert_called_once_with('/temp/dir/mytempfile.txt', 'w') fhandle_mock.write.assert_called_once_with(expected_write) close_mock.assert_called_once_with('handle') move_mock.assert_called_once_with('/temp/dir/mytempfile.txt', '/my/folder/testpath.xml') def test_writeproclist_notwritten(self): exists_dict = {'myTmpNameTestme': True} cfg_mock = Mock() cfg_mock.writeToFile.return_value = False self.prol.cfg = cfg_mock self.prol.location = '/my/folder/testpath2.txt' with patch('os.close') as close_mock, \ patch('os.path.exists', new=Mock(side_effect=lambda path: exists_dict[path])), \ patch('os.remove') as remove_mock, \ patch('tempfile.mkstemp', new=Mock(return_value=('myhandle', 'myTmpNameTestme'))): assertDiracFailsWith(self.prol.writeProcessList(), 'failed to write repo', self) close_mock.assert_called_once_with('myhandle') remove_mock.assert_called_once_with('myTmpNameTestme') def test_writeproclist_notwritten_noremove(self): exists_dict = {'myTmpNameTestme': False} cfg_mock = Mock() cfg_mock.writeToFile.return_value = False self.prol.cfg = cfg_mock with patch('os.close') as close_mock, \ patch('os.path.exists', new=Mock(side_effect=lambda path: exists_dict[path])), \ patch('os.remove') as remove_mock, \ patch('tempfile.mkstemp', new=Mock(return_value=('myhandle', 'myTmpNameTestme'))): assertDiracFailsWith( self.prol.writeProcessList('/my/folder/testpath2.txt'), 'failed to write repo', self) close_mock.assert_called_once_with('myhandle') self.assertFalse(remove_mock.called) def test_writeproclist_move_fails(self): exists_dict = {'/my/folder/testpath2.txt': False} cfg_mock = Mock() cfg_mock.writeToFile.return_value = True self.prol.cfg = cfg_mock with patch('os.close') as close_mock, \ patch('os.path.exists', new=Mock(side_effect=lambda path: exists_dict[path])), \ patch('os.remove') as remove_mock, \ patch('tempfile.mkstemp', new=Mock(return_value=('myhandle', 'myTmpNameTestme'))), \ patch('shutil.move', new=Mock(side_effect=OSError('mytestErr_os'))): assertDiracFailsWith( self.prol.writeProcessList('/my/folder/testpath2.txt'), 'failed to write repo', self) close_mock.assert_called_once_with('myhandle') self.assertFalse(remove_mock.called) def test_uploadproclist(self): import sys import DIRAC datman_mock = Mock() datman_mock.removeFile.return_value = S_OK('something') datmodule_mock = Mock() datmodule_mock.DataManager.return_value = datman_mock fileutil_mock = Mock() fileutil_mock.upload.return_value = S_OK('something') conf_mock = Mock() conf_mock.getOption.return_value = S_OK('/local/path/proc.list') mocked_modules = { 'DIRAC.DataManagementSystem.Client.DataManager': datmodule_mock, 'ILCDIRAC.Core.Utilities.FileUtils': fileutil_mock } module_patcher = patch.dict(sys.modules, mocked_modules) module_patcher.start() backup_conf = DIRAC.gConfig DIRAC.gConfig = conf_mock with patch('shutil.copy') as copy_mock, \ patch('subprocess.call') as proc_mock: self.prol.uploadProcessListToFileCatalog( '/my/secret/path/processlist.whiz', 'v120') assertMockCalls( copy_mock, [('myTestProcess.list', '/afs/cern.ch/eng/clic/software/whizard/whizard_195/'), ('myTestProcess.list', '/local/path/proc.list')], self) proc_mock.assert_called_once_with([ 'svn', 'ci', '/afs/cern.ch/eng/clic/software/whizard/whizard_195/proc.list', "-m'Process list for whizard version v120'" ], shell=False) DIRAC.gConfig = backup_conf module_patcher.stop() def test_uploadproclist_remove_fails(self): import sys import DIRAC datman_mock = Mock() datman_mock.removeFile.return_value = S_ERROR('my_test_err') datmodule_mock = Mock() datmodule_mock.DataManager.return_value = datman_mock fileutil_mock = Mock() conf_mock = Mock() conf_mock.getOption.return_value = S_OK('somepath') mocked_modules = { 'DIRAC.DataManagementSystem.Client.DataManager': datmodule_mock, 'ILCDIRAC.Core.Utilities.FileUtils': fileutil_mock } module_patcher = patch.dict(sys.modules, mocked_modules) module_patcher.start() backup_conf = DIRAC.gConfig DIRAC.gConfig = conf_mock DIRAC.exit = abort_test with self.assertRaises(KeyboardInterrupt) as ki: self.prol.uploadProcessListToFileCatalog('asd', 'v1') key_interrupt = ki.exception assertEqualsImproved(key_interrupt.args, ('abort_my_test', ), self) DIRAC.gConfig = backup_conf module_patcher.stop() def test_uploadproclist_upload_fails(self): import sys import DIRAC datman_mock = Mock() datman_mock.removeFile.return_value = S_OK('something') datmodule_mock = Mock() datmodule_mock.DataManager.return_value = datman_mock fileutil_mock = Mock() fileutil_mock.upload.return_value = S_ERROR('something') conf_mock = Mock() conf_mock.getOption.return_value = S_OK('somepath') mocked_modules = { 'DIRAC.DataManagementSystem.Client.DataManager': datmodule_mock, 'ILCDIRAC.Core.Utilities.FileUtils': fileutil_mock } module_patcher = patch.dict(sys.modules, mocked_modules) module_patcher.start() backup_conf = DIRAC.gConfig DIRAC.gConfig = conf_mock DIRAC.exit = abort_test with self.assertRaises(KeyboardInterrupt) as ki: self.prol.uploadProcessListToFileCatalog('asd', 'v1') key_interrupt = ki.exception assertEqualsImproved(key_interrupt.args, ('abort_my_test', ), self) DIRAC.gConfig = backup_conf module_patcher.stop() def test_uploadproclist_copy_and_commit_fail(self): import sys import DIRAC datman_mock = Mock() datman_mock.removeFile.return_value = S_OK('something') datmodule_mock = Mock() datmodule_mock.DataManager.return_value = datman_mock fileutil_mock = Mock() fileutil_mock.upload.return_value = S_OK('something') conf_mock = Mock() conf_mock.getOption.return_value = S_OK('somepath') mocked_modules = { 'DIRAC.DataManagementSystem.Client.DataManager': datmodule_mock, 'ILCDIRAC.Core.Utilities.FileUtils': fileutil_mock } module_patcher = patch.dict(sys.modules, mocked_modules) module_patcher.start() backup_conf = DIRAC.gConfig DIRAC.gConfig = conf_mock DIRAC.exit = abort_test with patch('shutil.copy', new=Mock(side_effect=OSError('oserr_testme_keeprunning'))), \ patch('subprocess.call', new=Mock(side_effect=OSError('subproc_test_err'))): self.prol.uploadProcessListToFileCatalog( '/my/secret/path/processlist.whiz', 'v120') DIRAC.gConfig = backup_conf module_patcher.stop() def test_uploadproclist_skip_copy(self): import sys import DIRAC datman_mock = Mock() datman_mock.removeFile.return_value = S_OK('something') datmodule_mock = Mock() datmodule_mock.DataManager.return_value = datman_mock fileutil_mock = Mock() fileutil_mock.upload.return_value = S_OK('something') conf_mock = Mock() conf_mock.getOption.return_value = S_OK('') mocked_modules = { 'DIRAC.DataManagementSystem.Client.DataManager': datmodule_mock, 'ILCDIRAC.Core.Utilities.FileUtils': fileutil_mock } module_patcher = patch.dict(sys.modules, mocked_modules) module_patcher.start() backup_conf = DIRAC.gConfig DIRAC.gConfig = conf_mock DIRAC.exit = abort_test with patch('shutil.copy', new=Mock(side_effect=IOError('dont_call_me'))), \ patch('subprocess.call', new=Mock(side_effect=IOError('dont_call_me_either'))): self.prol.uploadProcessListToFileCatalog( '/my/secret/path/processlist.whiz', 'v120') DIRAC.gConfig = backup_conf module_patcher.stop()
def _getProductionSummary(): clip = _Params() clip.registerSwitch() Script.parseCommandLine() from ILCDIRAC.Core.Utilities.HTML import Table from ILCDIRAC.Core.Utilities.ProcessList import ProcessList from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient from DIRAC import gConfig, gLogger prod = clip.prod full_detail = clip.full_det fc = FileCatalogClient() processlist = gConfig.getValue('/LocalSite/ProcessListPath') prl = ProcessList(processlist) processesdict = prl.getProcessesDict() trc = TransformationClient() prodids = [] if not prod: conddict = {} conddict['Status'] = clip.statuses if clip.ptypes: conddict['Type'] = clip.ptypes res = trc.getTransformations( conddict ) if res['OK']: for transfs in res['Value']: prodids.append(transfs['TransformationID']) else: prodids.extend(prod) metadata = [] gLogger.info("Will run on prods %s" % str(prodids)) for prodID in prodids: if prodID<clip.minprod: continue meta = {} meta['ProdID']=prodID res = trc.getTransformation(str(prodID)) if not res['OK']: gLogger.error("Error getting transformation %s" % prodID ) continue prodtype = res['Value']['Type'] proddetail = res['Value']['Description'] if prodtype == 'MCReconstruction' or prodtype == 'MCReconstruction_Overlay' : meta['Datatype']='DST' elif prodtype == 'MCGeneration': meta['Datatype']='gen' elif prodtype == 'MCSimulation': meta['Datatype']='SIM' elif prodtype in ['Split','Merge']: gLogger.warn("Invalid query for %s productions" % prodtype) continue else: gLogger.error("Unknown production type %s"% prodtype) continue res = fc.findFilesByMetadata(meta) if not res['OK']: gLogger.error(res['Message']) continue lfns = res['Value'] nb_files = len(lfns) path = "" if not len(lfns): gLogger.warn("No files found for prod %s" % prodID) continue path = os.path.dirname(lfns[0]) res = fc.getDirectoryUserMetadata(path) if not res['OK']: gLogger.warn('No meta data found for %s' % path) continue dirmeta = {} dirmeta['proddetail'] = proddetail dirmeta['prodtype'] = prodtype dirmeta['nb_files']=nb_files dirmeta.update(res['Value']) lumi = 0. nbevts = 0 addinfo = None files = 0 xsec = 0.0 if not full_detail: lfn = lfns[0] info = _getFileInfo(lfn) nbevts = info[1]*len(lfns) lumi = info[0]*len(lfns) addinfo = info[2] if 'xsection' in addinfo: if 'sum' in addinfo['xsection']: if 'xsection' in addinfo['xsection']['sum']: xsec += addinfo['xsection']['sum']['xsection'] files += 1 else: for lfn in lfns: info = _getFileInfo(lfn) lumi += info[0] nbevts += info[1] addinfo = info[2] if 'xsection' in addinfo: if 'sum' in addinfo['xsection']: if 'xsection' in addinfo['xsection']['sum']: xsec += addinfo['xsection']['sum']['xsection'] files += 1 if not lumi: xsec = 0 files = 0 depthDict = {} depSet = set() res = fc.getFileAncestors(lfns,[1,2,3,4]) temp_ancestorlist = [] if res['OK']: for lfn,ancestorsDict in res['Value']['Successful'].items(): for ancestor,dep in ancestorsDict.items(): depthDict.setdefault(dep,[]) if ancestor not in temp_ancestorlist: depthDict[dep].append(ancestor) depSet.add(dep) temp_ancestorlist.append(ancestor) depList = list(depSet) depList.sort() for ancestor in depthDict[depList[-1]]: info = _getFileInfo(ancestor) lumi += info[0] addinfo = info[2] if 'xsection' in addinfo: if 'sum' in addinfo['xsection']: if 'xsection' in addinfo['xsection']['sum']: xsec += addinfo['xsection']['sum']['xsection'] files += 1 if xsec and files: xsec /= files dirmeta['CrossSection']=xsec else: dirmeta['CrossSection']=0.0 if nbevts: dirmeta['NumberOfEvents']=nbevts #if not lumi: # dirmeta['Luminosity']=0 # dirmeta['CrossSection']=0 #else: # if nbevts: # dirmeta['CrossSection']=nbevts/lumi # else: # dirmeta['CrossSection']=0 #if addinfo: # if 'xsection' in addinfo: # if 'sum' in addinfo['xsection']: # if 'xsection' in addinfo['xsection']['sum']: # dirmeta['CrossSection']=addinfo['xsection']['sum']['xsection'] if 'NumberOfEvents' not in dirmeta: dirmeta['NumberOfEvents']=0 #print processesdict[dirmeta['EvtType']] dirmeta['detail']='' if dirmeta['EvtType'] in processesdict: if 'Detail' in processesdict[dirmeta['EvtType']]: detail = processesdict[dirmeta['EvtType']]['Detail'] else: detail=dirmeta['EvtType'] if not prodtype == 'MCGeneration': res = trc.getTransformationInputDataQuery(str(prodID)) if res['OK']: if 'ProdID' in res['Value']: dirmeta['MomProdID']=res['Value']['ProdID'] if 'MomProdID' not in dirmeta: dirmeta['MomProdID']=0 dirmeta['detail']= _translate(detail) metadata.append(dirmeta) detectors = {} detectors['ILD'] = {} corres = {"MCGeneration":'gen',"MCSimulation":'SIM',"MCReconstruction":"REC","MCReconstruction_Overlay":"REC"} detectors['ILD']['SIM'] = [] detectors['ILD']['REC'] = [] detectors['SID'] = {} detectors['SID']['SIM'] = [] detectors['SID']['REC'] = [] detectors['sid'] = {} detectors['sid']['SIM'] = [] detectors['sid']['REC'] = [] detectors['gen']=[] for channel in metadata: if 'DetectorType' not in channel: detectors['gen'].append((channel['detail'], channel['Energy'], channel['ProdID'], channel['nb_files'], channel['NumberOfEvents']/channel['nb_files'], channel['NumberOfEvents'], channel['CrossSection'],str(channel['proddetail']))) else: if not channel['DetectorType'] in detectors: gLogger.error("This is unknown detector", channel['DetectorType']) continue detectors[channel['DetectorType']][corres[channel['prodtype']]].append((channel['detail'], channel['Energy'], channel['DetectorType'], channel['ProdID'], channel['nb_files'], channel['NumberOfEvents']/channel['nb_files'], channel['NumberOfEvents'], channel['CrossSection'], channel['MomProdID'], str(channel['proddetail']))) with open("tables.html","w") as of: of.write("""<!DOCTYPE html> <html> <head> <title> Production summary </title> </head> <body> """) if len(detectors['gen']): of.write("<h1>gen prods</h1>\n") table = Table(header_row = ('Channel', 'Energy','ProdID','Tasks','Average Evts/task','Statistics','Cross Section (fb)','Comment')) for item in detectors['gen']: table.rows.append( item ) of.write(str(table)) gLogger.info("Gen prods") gLogger.info(str(table)) if len(detectors['ILD']): of.write("<h1>ILD prods</h1>\n") for ptype in detectors['ILD'].keys(): if len(detectors['ILD'][ptype]): of.write("<h2>%s</h2>\n"%ptype) table = Table(header_row = ('Channel', 'Energy','Detector','ProdID','Number of Files','Events/File','Statistics','Cross Section (fb)','Origin ProdID','Comment')) for item in detectors['ILD'][ptype]: table.rows.append( item ) of.write(str(table)) gLogger.info("ILC CDR prods %s" % ptype) gLogger.info(str(table)) if len(detectors['SID']): of.write("<h1>SID prods</h1>\n") for ptype in detectors['SID'].keys(): if len(detectors['SID'][ptype]): of.write("<h2>%s</h2>\n"%ptype) table = Table(header_row = ('Channel', 'Energy','Detector','ProdID','Number of Files','Events/File','Statistics','Cross Section (fb)','Origin ProdID','Comment')) for item in detectors['SID'][ptype]: table.rows.append( item ) of.write(str(table)) gLogger.info("SID CDR prods %s"%ptype) gLogger.info(str(table)) if len(detectors['sid']): of.write("<h1>sid dbd prods</h1>\n") for ptype in detectors['sid'].keys(): if len(detectors['sid'][ptype]): of.write("<h2>%s</h2>\n"%ptype) table = Table(header_row = ('Channel', 'Energy','Detector','ProdID','Number of Files','Events/File','Statistics','Cross Section (fb)','Origin ProdID','Comment')) for item in detectors['sid'][ptype]: table.rows.append( item ) of.write(str(table)) gLogger.info("sid DBD prods %s"%ptype) gLogger.info(str(table)) of.write(""" </body> </html> """) gLogger.notice("Check ./tables.html in any browser for the results") dexit(0)
def setUp(self): with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=False)): self.prol = ProcessList('myTestProcess.list')
class ProcessListComplexTestCase( unittest.TestCase ): """ Test the different methods of the class, providing a usable CFG """ def setUp( self ): with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=False)): self.prol = ProcessList( 'myTestProcess.list' ) self.prol.cfg.createNewSection( 'Processes' ) def test_updateproclist_and_getters( self ): self.prol.cfg.createNewSection( 'Processes/myTestProcDeleteMe' ) self.prol.cfg.setOption( 'Processes/myTestProcDeleteMe/someRandomOption', True ) dict_1 = { 'CrossSection' : 'someCross' } dict_1.update( STD_PROC_DICT ) dict_2 = { 'CrossSection' : 'some_other_val' } dict_2.update( STD_PROC_DICT ) process_dict = {} process_dict[ 'MytestProcess' ] = dict_1 process_dict[ 'myTestProcDeleteMe' ] = dict_2 result = self.prol.updateProcessList( process_dict ) assertDiracSucceeds( result, self ) conf = self.prol.cfg self.assertFalse( conf.existsKey( 'Processes/myTestProcDeleteMe/someRandomOption' ) ) options = [ 'Processes/MytestProcess/CrossSection', 'Processes/myTestProcDeleteMe/CrossSection' ] assertEqualsImproved( ( map( conf.getOption, options ) ), ( [ 'someCross', 'some_other_val' ] ), self ) assertEqualsImproved( ( self.prol.getCSPath( 'myTestProcDeleteMe' ), self.prol.getInFile( 'myTestProcDeleteMe' ), self.prol.existsProcess( 'myTestProcDeleteMe' ), self.prol.existsProcess( '' ), self.prol.existsProcess( 'invalidProcess' ), self.prol.existsProcess( 'myTestProcDeleteMeToo' ) ), ( '/test/cs/path/ball.tar', 'my/file.in', S_OK(True), S_OK(True), S_OK(False), S_OK(False) ), self ) assertListContentEquals( self.prol.getProcesses(), [ 'myTestProcDeleteMe', 'MytestProcess' ], self ) all_processes_dict = self.prol.getProcessesDict() assertEqualsImproved( len(all_processes_dict), 2, self ) assertEqualsImproved( ('myTestProcDeleteMe' in all_processes_dict, 'MytestProcess' in all_processes_dict), ( True, True ), self ) self.prol.printProcesses() def test_writeproclist( self ): expected_write = 'Processes\n{\n mytestprocess123\n {\n TarBallCSPath = /test/cs/path/bal.tarr\n Detail = TestNoDetails\n Generator = mytestGen21\n Model = testmodel3001\n Restrictions = \n InFile = my/file.in\n CrossSection = 0\n }\n}\n' self.prol._addEntry( 'mytestprocess123', { 'TarBallCSPath' : '/test/cs/path/bal.tarr', 'Detail' : 'TestNoDetails', 'Generator' : 'mytestGen21', 'Model' : 'testmodel3001', 'Restrictions' : '', 'InFile' : 'my/file.in' } ) exists_dict = { '/temp/dir' : False, '/temp/dir/mytempfile.txt' : True, '/my/folder/testpath.xml' : True } fhandle_mock = Mock() file_mock = Mock(return_value=fhandle_mock) with patch('tempfile.mkstemp', new=Mock(return_value=('handle', '/temp/dir/mytempfile.txt'))), \ patch('__builtin__.file', new=file_mock), \ patch('__builtin__.open', new=file_mock), \ patch('os.makedirs') as mkdir_mock, \ patch('os.path.exists', new=Mock(side_effect=lambda path: exists_dict[path])), \ patch('shutil.move') as move_mock, \ patch('os.close') as close_mock: assertDiracSucceedsWith_equals( self.prol.writeProcessList( '/my/folder/testpath.xml' ), '/my/folder/testpath.xml', self ) mkdir_mock.assert_called_once_with( '/temp/dir' ) file_mock.assert_called_once_with( '/temp/dir/mytempfile.txt', 'w' ) fhandle_mock.write.assert_called_once_with( expected_write ) close_mock.assert_called_once_with( 'handle' ) move_mock.assert_called_once_with( '/temp/dir/mytempfile.txt', '/my/folder/testpath.xml' ) def test_writeproclist_notwritten( self ): exists_dict = { 'myTmpNameTestme' : True } cfg_mock = Mock() cfg_mock.writeToFile.return_value = False self.prol.cfg = cfg_mock self.prol.location = '/my/folder/testpath2.txt' with patch('os.close') as close_mock, \ patch('os.path.exists', new=Mock(side_effect=lambda path: exists_dict[path])), \ patch('os.remove') as remove_mock, \ patch('tempfile.mkstemp', new=Mock(return_value=('myhandle', 'myTmpNameTestme'))): assertDiracFailsWith( self.prol.writeProcessList(), 'failed to write repo', self ) close_mock.assert_called_once_with( 'myhandle' ) remove_mock.assert_called_once_with( 'myTmpNameTestme') def test_writeproclist_notwritten_noremove( self ): exists_dict = { 'myTmpNameTestme' : False } cfg_mock = Mock() cfg_mock.writeToFile.return_value = False self.prol.cfg = cfg_mock with patch('os.close') as close_mock, \ patch('os.path.exists', new=Mock(side_effect=lambda path: exists_dict[path])), \ patch('os.remove') as remove_mock, \ patch('tempfile.mkstemp', new=Mock(return_value=('myhandle', 'myTmpNameTestme'))): assertDiracFailsWith( self.prol.writeProcessList( '/my/folder/testpath2.txt' ), 'failed to write repo', self ) close_mock.assert_called_once_with( 'myhandle' ) self.assertFalse( remove_mock.called ) def test_writeproclist_move_fails( self ): exists_dict = { '/my/folder/testpath2.txt' : False } cfg_mock = Mock() cfg_mock.writeToFile.return_value = True self.prol.cfg = cfg_mock with patch('os.close') as close_mock, \ patch('os.path.exists', new=Mock(side_effect=lambda path: exists_dict[path])), \ patch('os.remove') as remove_mock, \ patch('tempfile.mkstemp', new=Mock(return_value=('myhandle', 'myTmpNameTestme'))), \ patch('shutil.move', new=Mock(side_effect=OSError('mytestErr_os'))): assertDiracFailsWith( self.prol.writeProcessList( '/my/folder/testpath2.txt' ), 'failed to write repo', self ) close_mock.assert_called_once_with( 'myhandle' ) self.assertFalse( remove_mock.called ) def test_uploadproclist( self ): import sys import DIRAC datman_mock = Mock() datman_mock.removeFile.return_value = S_OK('something') datmodule_mock = Mock() datmodule_mock.DataManager.return_value = datman_mock fileutil_mock = Mock() fileutil_mock.upload.return_value = S_OK('something') conf_mock = Mock() conf_mock.getOption.return_value = S_OK( '/local/path/proc.list' ) mocked_modules = { 'DIRAC.DataManagementSystem.Client.DataManager' : datmodule_mock, 'ILCDIRAC.Core.Utilities.FileUtils' : fileutil_mock } module_patcher = patch.dict( sys.modules, mocked_modules ) module_patcher.start() backup_conf = DIRAC.gConfig DIRAC.gConfig = conf_mock with patch('shutil.copy') as copy_mock, \ patch('subprocess.call') as proc_mock: self.prol.uploadProcessListToFileCatalog( '/my/secret/path/processlist.whiz', 'v120' ) assertMockCalls( copy_mock, [ ( 'myTestProcess.list', '/afs/cern.ch/eng/clic/software/whizard/whizard_195/' ), ( 'myTestProcess.list', '/local/path/proc.list' ) ], self ) proc_mock.assert_called_once_with( [ 'svn', 'ci', '/afs/cern.ch/eng/clic/software/whizard/whizard_195/proc.list', "-m'Process list for whizard version v120'" ], shell=False ) DIRAC.gConfig = backup_conf module_patcher.stop() def test_uploadproclist_remove_fails( self ): import sys import DIRAC datman_mock = Mock() datman_mock.removeFile.return_value = S_ERROR('my_test_err') datmodule_mock = Mock() datmodule_mock.DataManager.return_value = datman_mock fileutil_mock = Mock() conf_mock = Mock() conf_mock.getOption.return_value = S_OK( 'somepath' ) mocked_modules = { 'DIRAC.DataManagementSystem.Client.DataManager' : datmodule_mock, 'ILCDIRAC.Core.Utilities.FileUtils' : fileutil_mock } module_patcher = patch.dict( sys.modules, mocked_modules ) module_patcher.start() backup_conf = DIRAC.gConfig DIRAC.gConfig = conf_mock DIRAC.exit = abort_test with self.assertRaises( KeyboardInterrupt ) as ki: self.prol.uploadProcessListToFileCatalog( 'asd', 'v1' ) key_interrupt = ki.exception assertEqualsImproved( key_interrupt.args, ( 'abort_my_test', ), self ) DIRAC.gConfig = backup_conf module_patcher.stop() def test_uploadproclist_upload_fails( self ): import sys import DIRAC datman_mock = Mock() datman_mock.removeFile.return_value = S_OK('something') datmodule_mock = Mock() datmodule_mock.DataManager.return_value = datman_mock fileutil_mock = Mock() fileutil_mock.upload.return_value = S_ERROR('something') conf_mock = Mock() conf_mock.getOption.return_value = S_OK( 'somepath' ) mocked_modules = { 'DIRAC.DataManagementSystem.Client.DataManager' : datmodule_mock, 'ILCDIRAC.Core.Utilities.FileUtils' : fileutil_mock } module_patcher = patch.dict( sys.modules, mocked_modules ) module_patcher.start() backup_conf = DIRAC.gConfig DIRAC.gConfig = conf_mock DIRAC.exit = abort_test with self.assertRaises( KeyboardInterrupt ) as ki: self.prol.uploadProcessListToFileCatalog( 'asd', 'v1' ) key_interrupt = ki.exception assertEqualsImproved( key_interrupt.args, ( 'abort_my_test', ), self ) DIRAC.gConfig = backup_conf module_patcher.stop() def test_uploadproclist_copy_and_commit_fail( self ): import sys import DIRAC datman_mock = Mock() datman_mock.removeFile.return_value = S_OK('something') datmodule_mock = Mock() datmodule_mock.DataManager.return_value = datman_mock fileutil_mock = Mock() fileutil_mock.upload.return_value = S_OK('something') conf_mock = Mock() conf_mock.getOption.return_value = S_OK( 'somepath' ) mocked_modules = { 'DIRAC.DataManagementSystem.Client.DataManager' : datmodule_mock, 'ILCDIRAC.Core.Utilities.FileUtils' : fileutil_mock } module_patcher = patch.dict( sys.modules, mocked_modules ) module_patcher.start() backup_conf = DIRAC.gConfig DIRAC.gConfig = conf_mock DIRAC.exit = abort_test with patch('shutil.copy', new=Mock(side_effect=OSError('oserr_testme_keeprunning'))), \ patch('subprocess.call', new=Mock(side_effect=OSError('subproc_test_err'))): self.prol.uploadProcessListToFileCatalog( '/my/secret/path/processlist.whiz', 'v120' ) DIRAC.gConfig = backup_conf module_patcher.stop() def test_uploadproclist_skip_copy( self ): import sys import DIRAC datman_mock = Mock() datman_mock.removeFile.return_value = S_OK('something') datmodule_mock = Mock() datmodule_mock.DataManager.return_value = datman_mock fileutil_mock = Mock() fileutil_mock.upload.return_value = S_OK('something') conf_mock = Mock() conf_mock.getOption.return_value = S_OK('') mocked_modules = { 'DIRAC.DataManagementSystem.Client.DataManager' : datmodule_mock, 'ILCDIRAC.Core.Utilities.FileUtils' : fileutil_mock } module_patcher = patch.dict( sys.modules, mocked_modules ) module_patcher.start() backup_conf = DIRAC.gConfig DIRAC.gConfig = conf_mock DIRAC.exit = abort_test with patch('shutil.copy', new=Mock(side_effect=IOError('dont_call_me'))), \ patch('subprocess.call', new=Mock(side_effect=IOError('dont_call_me_either'))): self.prol.uploadProcessListToFileCatalog( '/my/secret/path/processlist.whiz', 'v120' ) DIRAC.gConfig = backup_conf module_patcher.stop()
def setUp( self ): with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=False)): self.prol = ProcessList( 'myTestProcess.list' ) self.prol.cfg.createNewSection( 'Processes' )