def test_constructor(self): import DIRAC with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=True)), \ patch.object(DIRAC.Core.Utilities.CFG.CFG, 'loadFromFile', return_value=True): empty_process_list = ProcessList('existent_location') self.assertTrue(empty_process_list.cfg.existsKey('Processes')) self.assertTrue(empty_process_list.isOK()) def replace_load(self, _): #pylint: disable=missing-docstring self.createNewSection('myTestSection', 'testComment') self.createNewSection('Processes', 'testProcesses') with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=True)), \ patch.object(DIRAC.Core.Utilities.CFG.CFG, 'loadFromFile', side_effect=replace_load, autospec=True): other_process_list = ProcessList('existent_location') self.assertTrue(other_process_list.cfg.existsKey('Processes')) self.assertFalse(self.prol.isOK())
def obtainProcessList(self): """Internal function Get the process list from storage if whizard.in was not provided :return: S_OK(), S_ERROR() """ res = self.ops.getValue("/ProcessList/Location", "") if not res: return S_ERROR("No process list found") processlistloc = res if not os.path.exists(os.path.basename(processlistloc)): res = self.datMan.getFile(processlistloc) if not res['OK']: self.log.error('Could not get processlist: %s' % res['Message']) return res self.processlist = ProcessList(os.path.basename(processlistloc)) return S_OK()
def getProcessList(self): """ Get the :mod:`ProcessList <ILCDIRAC.Core.Utilities.ProcessList.ProcessList>` needed by :mod:`Whizard <ILCDIRAC.Interfaces.API.NewInterface.Applications.Whizard>`. :return: process list object """ processlistpath = gConfig.getValue("/LocalSite/ProcessListPath", "") if not processlistpath: gLogger.info( 'Will download the process list locally. To gain time, please put it somewhere and add to \ your dirac.cfg the entry /LocalSite/ProcessListPath pointing to the file' ) pathtofile = self.ops.getValue("/ProcessList/Location", "") if not pathtofile: gLogger.error("Could not get path to process list") processlist = "" else: datMan = DataManager() datMan.getFile(pathtofile) processlist = os.path.basename(pathtofile) else: processlist = processlistpath self.processList = ProcessList(processlist) return self.processList
def _getProductionSummary(): clip = _Params() clip.registerSwitch() Script.parseCommandLine() from ILCDIRAC.Core.Utilities.HTML import Table from ILCDIRAC.Core.Utilities.ProcessList import ProcessList from DIRAC.TransformationSystem.Client.TransformationClient import TransformationClient from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient from DIRAC import gConfig, gLogger prod = clip.prod full_detail = clip.full_det fc = FileCatalogClient() processlist = gConfig.getValue('/LocalSite/ProcessListPath') prl = ProcessList(processlist) processesdict = prl.getProcessesDict() trc = TransformationClient() prodids = [] if not prod: conddict = {} conddict['Status'] = clip.statuses if clip.ptypes: conddict['Type'] = clip.ptypes res = trc.getTransformations(conddict) if res['OK']: for transfs in res['Value']: prodids.append(transfs['TransformationID']) else: prodids.extend(prod) metadata = [] gLogger.info("Will run on prods %s" % str(prodids)) for prodID in prodids: if prodID < clip.minprod: continue meta = {} meta['ProdID'] = prodID res = trc.getTransformation(str(prodID)) if not res['OK']: gLogger.error("Error getting transformation %s" % prodID) continue prodtype = res['Value']['Type'] proddetail = res['Value']['Description'] if prodtype == 'MCReconstruction' or prodtype == 'MCReconstruction_Overlay': meta['Datatype'] = 'DST' elif prodtype == 'MCGeneration': meta['Datatype'] = 'gen' elif prodtype == 'MCSimulation': meta['Datatype'] = 'SIM' elif prodtype in ['Split', 'Merge']: gLogger.warn("Invalid query for %s productions" % prodtype) continue else: gLogger.error("Unknown production type %s" % prodtype) continue res = fc.findFilesByMetadata(meta) if not res['OK']: gLogger.error(res['Message']) continue lfns = res['Value'] nb_files = len(lfns) path = "" if not len(lfns): gLogger.warn("No files found for prod %s" % prodID) continue path = os.path.dirname(lfns[0]) res = fc.getDirectoryUserMetadata(path) if not res['OK']: gLogger.warn('No meta data found for %s' % path) continue dirmeta = {} dirmeta['proddetail'] = proddetail dirmeta['prodtype'] = prodtype dirmeta['nb_files'] = nb_files dirmeta.update(res['Value']) lumi = 0. nbevts = 0 addinfo = None files = 0 xsec = 0.0 if not full_detail: lfn = lfns[0] info = _getFileInfo(lfn) nbevts = info[1] * len(lfns) lumi = info[0] * len(lfns) addinfo = info[2] if 'xsection' in addinfo: if 'sum' in addinfo['xsection']: if 'xsection' in addinfo['xsection']['sum']: xsec += addinfo['xsection']['sum']['xsection'] files += 1 else: for lfn in lfns: info = _getFileInfo(lfn) lumi += info[0] nbevts += info[1] addinfo = info[2] if 'xsection' in addinfo: if 'sum' in addinfo['xsection']: if 'xsection' in addinfo['xsection']['sum']: xsec += addinfo['xsection']['sum']['xsection'] files += 1 if not lumi: xsec = 0 files = 0 depthDict = {} depSet = set() res = fc.getFileAncestors(lfns, [1, 2, 3, 4]) temp_ancestorlist = [] if res['OK']: for lfn, ancestorsDict in res['Value']['Successful'].items(): for ancestor, dep in ancestorsDict.items(): depthDict.setdefault(dep, []) if ancestor not in temp_ancestorlist: depthDict[dep].append(ancestor) depSet.add(dep) temp_ancestorlist.append(ancestor) depList = list(depSet) depList.sort() for ancestor in depthDict[depList[-1]]: info = _getFileInfo(ancestor) lumi += info[0] addinfo = info[2] if 'xsection' in addinfo: if 'sum' in addinfo['xsection']: if 'xsection' in addinfo['xsection']['sum']: xsec += addinfo['xsection']['sum']['xsection'] files += 1 if xsec and files: xsec /= files dirmeta['CrossSection'] = xsec else: dirmeta['CrossSection'] = 0.0 if nbevts: dirmeta['NumberOfEvents'] = nbevts #if not lumi: # dirmeta['Luminosity']=0 # dirmeta['CrossSection']=0 #else: # if nbevts: # dirmeta['CrossSection']=nbevts/lumi # else: # dirmeta['CrossSection']=0 #if addinfo: # if 'xsection' in addinfo: # if 'sum' in addinfo['xsection']: # if 'xsection' in addinfo['xsection']['sum']: # dirmeta['CrossSection']=addinfo['xsection']['sum']['xsection'] if 'NumberOfEvents' not in dirmeta: dirmeta['NumberOfEvents'] = 0 #print processesdict[dirmeta['EvtType']] dirmeta['detail'] = '' if dirmeta['EvtType'] in processesdict: if 'Detail' in processesdict[dirmeta['EvtType']]: detail = processesdict[dirmeta['EvtType']]['Detail'] else: detail = dirmeta['EvtType'] if not prodtype == 'MCGeneration': res = trc.getTransformationInputDataQuery(str(prodID)) if res['OK']: if 'ProdID' in res['Value']: dirmeta['MomProdID'] = res['Value']['ProdID'] if 'MomProdID' not in dirmeta: dirmeta['MomProdID'] = 0 dirmeta['detail'] = _translate(detail) metadata.append(dirmeta) detectors = {} detectors['ILD'] = {} corres = { "MCGeneration": 'gen', "MCSimulation": 'SIM', "MCReconstruction": "REC", "MCReconstruction_Overlay": "REC" } detectors['ILD']['SIM'] = [] detectors['ILD']['REC'] = [] detectors['SID'] = {} detectors['SID']['SIM'] = [] detectors['SID']['REC'] = [] detectors['sid'] = {} detectors['sid']['SIM'] = [] detectors['sid']['REC'] = [] detectors['gen'] = [] for channel in metadata: if 'DetectorType' not in channel: detectors['gen'].append( (channel['detail'], channel['Energy'], channel['ProdID'], channel['nb_files'], channel['NumberOfEvents'] / channel['nb_files'], channel['NumberOfEvents'], channel['CrossSection'], str(channel['proddetail']))) else: if not channel['DetectorType'] in detectors: gLogger.error("This is unknown detector", channel['DetectorType']) continue detectors[channel['DetectorType']][corres[ channel['prodtype']]].append( (channel['detail'], channel['Energy'], channel['DetectorType'], channel['ProdID'], channel['nb_files'], channel['NumberOfEvents'] / channel['nb_files'], channel['NumberOfEvents'], channel['CrossSection'], channel['MomProdID'], str(channel['proddetail']))) with open("tables.html", "w") as of: of.write("""<!DOCTYPE html> <html> <head> <title> Production summary </title> </head> <body> """) if len(detectors['gen']): of.write("<h1>gen prods</h1>\n") table = Table(header_row=('Channel', 'Energy', 'ProdID', 'Tasks', 'Average Evts/task', 'Statistics', 'Cross Section (fb)', 'Comment')) for item in detectors['gen']: table.rows.append(item) of.write(str(table)) gLogger.info("Gen prods") gLogger.info(str(table)) if len(detectors['ILD']): of.write("<h1>ILD prods</h1>\n") for ptype in detectors['ILD'].keys(): if len(detectors['ILD'][ptype]): of.write("<h2>%s</h2>\n" % ptype) table = Table(header_row=('Channel', 'Energy', 'Detector', 'ProdID', 'Number of Files', 'Events/File', 'Statistics', 'Cross Section (fb)', 'Origin ProdID', 'Comment')) for item in detectors['ILD'][ptype]: table.rows.append(item) of.write(str(table)) gLogger.info("ILC CDR prods %s" % ptype) gLogger.info(str(table)) if len(detectors['SID']): of.write("<h1>SID prods</h1>\n") for ptype in detectors['SID'].keys(): if len(detectors['SID'][ptype]): of.write("<h2>%s</h2>\n" % ptype) table = Table(header_row=('Channel', 'Energy', 'Detector', 'ProdID', 'Number of Files', 'Events/File', 'Statistics', 'Cross Section (fb)', 'Origin ProdID', 'Comment')) for item in detectors['SID'][ptype]: table.rows.append(item) of.write(str(table)) gLogger.info("SID CDR prods %s" % ptype) gLogger.info(str(table)) if len(detectors['sid']): of.write("<h1>sid dbd prods</h1>\n") for ptype in detectors['sid'].keys(): if len(detectors['sid'][ptype]): of.write("<h2>%s</h2>\n" % ptype) table = Table(header_row=('Channel', 'Energy', 'Detector', 'ProdID', 'Number of Files', 'Events/File', 'Statistics', 'Cross Section (fb)', 'Origin ProdID', 'Comment')) for item in detectors['sid'][ptype]: table.rows.append(item) of.write(str(table)) gLogger.info("sid DBD prods %s" % ptype) gLogger.info(str(table)) of.write(""" </body> </html> """) gLogger.notice("Check ./tables.html in any browser for the results") dexit(0)
def setUp(self): with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=False)): self.prol = ProcessList('myTestProcess.list') self.prol.cfg.createNewSection('Processes')
def setUp(self): with patch('%s.os.path.exists' % MODULE_NAME, new=Mock(return_value=False)): self.prol = ProcessList('myTestProcess.list')
def doTheWhizardInstallation(): """Do the instalation for new whizard version Copy libraries, create tarball, upload processList file add entry in configuration system """ res = checkSLCVersion() if not res['OK']: gLogger.error(res['Message']) dexit(1) res = checkGFortranVersion() if not res['OK']: gLogger.error(res['Message']) dexit(1) cliParams = Params() cliParams.registerSwitches() Script.parseCommandLine( ignoreErrors= False) whizardResultFolder = cliParams.path platform = cliParams.platform whizard_version = cliParams.version appVersion = whizard_version beam_spectra_version = cliParams.beam_spectra if not whizardResultFolder or not whizard_version or not beam_spectra_version: Script.showHelp() dexit(2) from ILCDIRAC.Core.Utilities.ProcessList import ProcessList from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations from DIRAC.Interfaces.API.DiracAdmin import DiracAdmin from ILCDIRAC.Core.Utilities.FileUtils import upload from DIRAC.DataManagementSystem.Client.DataManager import DataManager diracAdmin = DiracAdmin() modifiedCS = False softwareSection = "/Operations/Defaults/AvailableTarBalls" processlistLocation = "ProcessList/Location" appName = "whizard" ops = Operations() path_to_process_list = ops.getValue(processlistLocation, "") if not path_to_process_list: gLogger.error("Could not find process list location in CS") dexit(2) gLogger.verbose("Getting process list from file catalog") datMan = DataManager() res = datMan.getFile(path_to_process_list) if not res['OK']: gLogger.error("Error while getting process list from storage") dexit(2) gLogger.verbose("done") ##just the name of the local file in current working directory processlist = os.path.basename(path_to_process_list) if not os.path.exists(processlist): gLogger.error("Process list does not exist locally") dexit(2) pl = ProcessList(processlist) startDir = os.getcwd() inputlist = {} os.chdir(whizardResultFolder) folderlist = os.listdir(whizardResultFolder) whiz_here = folderlist.count("whizard") if whiz_here == 0: gLogger.error("whizard executable not found in %s, please check" % whizardResultFolder) os.chdir(startDir) dexit(2) whizprc_here = folderlist.count("whizard.prc") if whizprc_here == 0: gLogger.error("whizard.prc not found in %s, please check" % whizardResultFolder) os.chdir(startDir) dexit(2) whizmdl_here = folderlist.count("whizard.mdl") if whizmdl_here == 0: gLogger.error("whizard.mdl not found in %s, please check" % whizardResultFolder) os.chdir(startDir) dexit(2) gLogger.verbose("Preparing process list") ## FIXME:: What is this doing exactly? Is this necessary? -- APS, JFS for f in folderlist: if f.count(".in"): infile = open(f, "r") found_detail = False for line in infile: if line.count("decay_description"): currprocess = f.split(".template.in")[0] inputlist[currprocess] = {} inputlist[currprocess]["InFile"] = f.rstrip("~") inputlist[currprocess]["Detail"] = line.split("\"")[1] found_detail = True if line.count("process_id") and found_detail: process_id = line.split("\"")[1] inputlist[currprocess]["Model"] = "" inputlist[currprocess]["Generator"] = "" inputlist[currprocess]["Restrictions"] = "" for process in process_id.split(): print "Looking for detail of process %s" % (process) process_detail = getDetailsFromPRC("whizard.prc", process) inputlist[currprocess]["Model"] = process_detail["Model"] inputlist[currprocess]["Generator"] = process_detail["Generator"] if len(inputlist[currprocess]["Restrictions"]): inputlist[currprocess]["Restrictions"] = inputlist[currprocess]["Restrictions"] + ", " + process_detail["Restrictions"] else: inputlist[currprocess]["Restrictions"] = process_detail["Restrictions"] #if len(inputlist[currprocess].items()): # inputlist.append(processdict) ## END FIXEME ##Update inputlist with what was found looking in the prc file processes = readPRCFile("whizard.prc") inputlist.update(processes) ##get from cross section files the cross sections for the processes in inputlist #Need full process list for f in folderlist: if f.count("cross_sections_"): crossfile = open(f, "r") for line in crossfile: line = line.rstrip().lstrip() if not len(line): continue if line[0] == "#" or line[0] == "!": continue if len(line.split()) < 2: continue currprocess = line.split()[0] if currprocess in inputlist: inputlist[currprocess]['CrossSection'] = line.split()[1] gLogger.notice("Preparing Tarball") ##Make a folder in the current directory of the user to store the whizard libraries, executable et al. localWhizardFolderRel = ("whizard" + whizard_version) # relative path localWhizardFolder = os.path.join(startDir, localWhizardFolderRel) if not os.path.exists(localWhizardFolder): os.makedirs(localWhizardFolder) localWhizardLibFolder = os.path.join(localWhizardFolder,'lib') if os.path.exists(localWhizardLibFolder): shutil.rmtree(localWhizardLibFolder) os.makedirs(localWhizardLibFolder) ##creates the lib folder whizardLibraries = getListOfLibraries(os.path.join(whizardResultFolder, "whizard")) copyLibsCall = ["rsync","-avzL"] for lib in whizardLibraries: copyLibsCall.append(lib) copyLibsCall.append(localWhizardLibFolder) subprocess.Popen(copyLibsCall, stdout=subprocess.PIPE, stderr=subprocess.PIPE) for fileName in folderlist: shutil.copy(fileName, localWhizardFolder) ##Get the list of md5 sums for all the files in the folder to be tarred os.chdir( localWhizardFolder ) subprocess.call(["find . -type f -exec md5sum {} > ../md5_checksum.md5 \\; && mv ../md5_checksum.md5 ."], shell=True) os.chdir(startDir) ##Create the Tarball gLogger.notice("Creating Tarball...") appTar = localWhizardFolder + ".tgz" myappTar = tarfile.open(appTar, "w:gz") myappTar.add(localWhizardFolderRel) myappTar.close() md5sum = md5.md5(open( appTar, 'r' ).read()).hexdigest() gLogger.notice("...Done") gLogger.notice("Registering new Tarball in CS") tarballurl = {} av_platforms = gConfig.getSections(softwareSection, []) if av_platforms['OK']: if platform not in av_platforms['Value']: gLogger.error("Platform %s unknown, available are %s." % (platform, ", ".join(av_platforms['Value']))) gLogger.error("If yours is missing add it in CS") dexit(255) else: gLogger.error("Could not find all platforms available in CS") dexit(255) av_apps = gConfig.getSections("%s/%s" % (softwareSection, platform), []) if not av_apps['OK']: gLogger.error("Could not find all applications available in CS") dexit(255) if appName.lower() in av_apps['Value']: versions = gConfig.getSections("%s/%s/%s" % (softwareSection, platform, appName.lower()), []) if not versions['OK']: gLogger.error("Could not find all versions available in CS") dexit(255) if appVersion in versions['Value']: gLogger.error('Application %s %s for %s already in CS, nothing to do' % (appName.lower(), appVersion, platform)) dexit(0) else: result = diracAdmin.csSetOption("%s/%s/%s/%s/TarBall" % (softwareSection, platform, appName.lower(), appVersion), os.path.basename(appTar)) if result['OK']: modifiedCS = True tarballurl = gConfig.getOption("%s/%s/%s/TarBallURL" % (softwareSection, platform, appName.lower()), "") if len(tarballurl['Value']) > 0: res = upload(tarballurl['Value'], appTar) if not res['OK']: gLogger.error("Upload to %s failed" % tarballurl['Value']) dexit(255) result = diracAdmin.csSetOption("%s/%s/%s/%s/Md5Sum" % (softwareSection, platform, appName.lower(), appVersion), md5sum) if result['OK']: modifiedCS = True result = diracAdmin.csSetOption("%s/%s/%s/%s/Dependencies/beam_spectra/version" % (softwareSection, platform, appName.lower(), appVersion), beam_spectra_version) else: result = diracAdmin.csSetOption("%s/%s/%s/%s/TarBall" % (softwareSection, platform, appName.lower(), appVersion), os.path.basename(appTar)) if result['OK']: modifiedCS = True tarballurl = gConfig.getOption("%s/%s/%s/TarBallURL" % (softwareSection, platform, appName.lower()), "") if len(tarballurl['Value']) > 0: res = upload(tarballurl['Value'], appTar) if not res['OK']: gLogger.error("Upload to %s failed" % tarballurl['Value']) dexit(255) result = diracAdmin.csSetOption("%s/%s/%s/%s/Md5Sum" % (softwareSection, platform, appName.lower(), appVersion), md5sum) result = diracAdmin.csSetOption("%s/%s/%s/%s/Dependencies/beam_spectra/version" % (softwareSection, platform, appName.lower(), appVersion), beam_spectra_version) gLogger.verbose("Done uploading the tar ball") os.remove(appTar) #Set for all new processes the TarBallURL for process in inputlist.keys(): inputlist[process]['TarBallCSPath'] = tarballurl['Value'] + os.path.basename(appTar) pl.updateProcessList(inputlist) pl.writeProcessList() raw_input("Do you want to upload the process list? Press ENTER to proceed or CTRL-C to abort!") pl.uploadProcessListToFileCatalog(path_to_process_list, appVersion) #Commit the changes if nothing has failed and the CS has been modified if modifiedCS: result = diracAdmin.csCommitChanges(False) gLogger.verbose(result) gLogger.notice('All done OK!') dexit(0)