def getFile(lfn, se=""): dm = DataManager() download_ok = 0 get_active_replicas_ok = False lfn_on_se = False error_msg = "" if se: for i in range(0, 5): result = dm.getActiveReplicas(lfn) if result["OK"] and result["Value"]["Successful"]: get_active_replicas_ok = True lfnReplicas = result["Value"]["Successful"] if se in lfnReplicas[lfn]: lfn_on_se = True break time.sleep(3) print "- Get replicas for %s failed, try again" % lfn if not get_active_replicas_ok: return S_ERROR("Get replicas error: %s" % lfn) if lfn_on_se: se = StorageElement(se) # try 5 times for j in range(0, 5): result = se.getFile(lfn) if result["OK"] and result["Value"]["Successful"] and result["Value"]["Successful"].has_key(lfn): break time.sleep(random.randint(180, 600)) print "- %s getStorageFile(%s) failed, try again" % (lfn, se) if result["OK"]: if result["Value"]["Successful"] and result["Value"]["Successful"].has_key(lfn): download_ok = 1 else: error_msg = "Downloading %s from SE %s error!" % (lfn, se) else: error_msg = result["Message"] else: if se: print 'File %s not found on SE "%s" after %s tries, trying other SE' % (lfn, se, i + 1) # try 5 times for j in range(0, 5): result = dm.getFile(lfn) if result["OK"] and result["Value"]["Successful"] and result["Value"]["Successful"].has_key(lfn): break time.sleep(random.randint(180, 600)) print "- getFile(%s) failed, try again" % lfn if result["OK"]: if result["Value"]["Successful"] and result["Value"]["Successful"].has_key(lfn): download_ok = 2 else: error_msg = "Downloading %s from random SE error!" % lfn else: error_msg = result["Message"] if download_ok: return S_OK({lfn: {"DownloadOK": download_ok, "Retry": j + 1}}) return S_ERROR(error_msg)
def _get_file(self, lfn): dm = DataManager() result = dm.getFile(lfn, "") if not result['OK']: return S_ERROR(result['Message']) if result['Value']['Failed']: return S_ERROR(result['Value']) return result
def web_getSelectedFiles(self): self.set_header('Content-type', 'text/plain') arguments = self.request.arguments gLogger.always("submit: incoming arguments %s to getSelectedFiles" % arguments) tmpdir = '/tmp/eiscat/' + str(time.time()) + str(random.random()) dataMgr = DataManager(vo=self.vo) lfnStr = str(arguments['path'][0]) if not os.path.isdir(tmpdir): os.makedirs(tmpdir) os.chdir(tmpdir) for lfn in lfnStr.split(','): gLogger.always("Data manager get file %s" % lfn) last_slash = lfn.rfind("/") pos_relative = lfn.find("/") pos_relative = lfn.find("/", pos_relative + 1) pos_relative = lfn.find("/", pos_relative + 1) pos_relative = pos_relative pathInZip = lfn[pos_relative:last_slash] tmpPathInZip = tmpdir + pathInZip gLogger.always("path in zip %s" % tmpPathInZip) if not os.path.isdir(tmpPathInZip): os.makedirs(tmpPathInZip) result = dataMgr.getFile(str(lfn), destinationDir=str(tmpPathInZip)) if not result["OK"]: gLogger.error("getSelectedFiles: %s" % result["Message"]) #make zip file zipname = tmpdir.split('/')[-1] + '.zip' zf = zipfile.ZipFile('/tmp/eiscat/' + zipname, "w") gLogger.always("zip file /tmp/eiscat/%s" % zipname) gLogger.always("start walk in tmpdir %s" % tmpdir) for absolutePath, dirs, files in os.walk(tmpdir): gLogger.always("absolute path %s" % absolutePath) gLogger.always("files %s" % files) for filename in files: # relative path form tmpdir current chdir pos_relative = absolutePath.find("/") pos_relative = absolutePath.find("/", pos_relative + 1) pos_relative = absolutePath.find("/", pos_relative + 1) pos_relative = absolutePath.find("/", pos_relative + 1) pos_relative = pos_relative + 1 relativePath = absolutePath[pos_relative:] gLogger.always("relativePath %s, file %s" % (relativePath, filename)) zf.write(os.path.join(relativePath, filename)) zf.close() #read zip file f = open('/tmp/eiscat/' + zipname, "rb") obj = f.read() f.close() #cleanup shutil.rmtree(tmpdir) os.remove('/tmp/eiscat/' + zipname) self.set_header('Content-Disposition', 'attachment; filename="' + zipname) self.write(obj)
def getProcessList(self): """ Get the :mod:`ProcessList <ILCDIRAC.Core.Utilities.ProcessList.ProcessList>` needed by :mod:`Whizard <ILCDIRAC.Interfaces.API.NewInterface.Applications.Whizard>`. :return: process list object """ processlistpath = gConfig.getValue("/LocalSite/ProcessListPath", "") if not processlistpath: gLogger.info('Will download the process list locally. To gain time, please put it somewhere and add to \ your dirac.cfg the entry /LocalSite/ProcessListPath pointing to the file') pathtofile = self.ops.getValue("/ProcessList/Location", "") if not pathtofile: gLogger.error("Could not get path to process list") processlist = "" else: datMan = DataManager() datMan.getFile(pathtofile) processlist = os.path.basename(pathtofile) else: processlist = processlistpath self.processList = ProcessList(processlist) return self.processList
def downloadFile(lfn): """ Download a file using DMS Keyword arguments: lfn -- a logical file name """ DIRAC.gLogger.info('Downloading ',lfn) dm = DataManager() res=dm.getFile(lfn) if not res['OK']: DIRAC.gLogger.error ( res['Message'] ) DIRAC.gLogger.error ( 'Could not download %s'%lfn ) DIRAC.exit( -1 ) return DIRAC.S_OK()
def getProcessList(self): """ Get the :mod:`ProcessList <ILCDIRAC.Core.Utilities.ProcessList.ProcessList>` needed by :mod:`Whizard <ILCDIRAC.Interfaces.API.NewInterface.Applications.Whizard>`. :return: process list object """ processlistpath = gConfig.getValue("/LocalSite/ProcessListPath", "") if not processlistpath: gLogger.info( 'Will download the process list locally. To gain time, please put it somewhere and add to \ your dirac.cfg the entry /LocalSite/ProcessListPath pointing to the file' ) pathtofile = self.ops.getValue("/ProcessList/Location", "") if not pathtofile: gLogger.error("Could not get path to process list") processlist = "" else: datMan = DataManager() datMan.getFile(pathtofile) processlist = os.path.basename(pathtofile) else: processlist = processlistpath self.processList = ProcessList(processlist) return self.processList
def __prepareFileForHTTP(self, lfn, key): """ Prepare proxied file for HTTP """ global HTTP_PATH res = self.__prepareSecurityDetails() if not res['OK']: return res # Clear the local cache getFileDir = "%s/%s" % (HTTP_PATH, key) os.makedirs(getFileDir) # Get the file to the cache from DIRAC.DataManagementSystem.Client.DataManager import DataManager dataMgr = DataManager() result = dataMgr.getFile(lfn, destinationDir=getFileDir) result['CachePath'] = getFileDir return result
def __prepareFileForHTTP( self, lfn, key ): """ proxied preapre file for HTTP """ global HTTP_PATH res = self.__prepareSecurityDetails() if not res['OK']: return res # Clear the local cache getFileDir = "%s/%s" % ( HTTP_PATH, key ) os.makedirs(getFileDir) # Get the file to the cache from DIRAC.DataManagementSystem.Client.DataManager import DataManager dataMgr = DataManager() result = dataMgr.getFile( lfn, destinationDir = getFileDir ) result['CachePath'] = getFileDir return result
def downloadFile(tarballURL, app_tar, folder_name): """ Get the file locally. """ #need to make sure the url ends with /, other wise concatenation below returns bad url if tarballURL[-1] != "/": tarballURL += "/" app_tar_base = os.path.basename(app_tar) if tarballURL.find("http://")>-1: try : gLogger.debug("Downloading software", '%s' % (folder_name)) #Copy the file locally, don't try to read from remote, soooo slow #Use string conversion %s%s to set the address, makes the system more stable urllib.urlretrieve("%s%s" % (tarballURL, app_tar), app_tar_base) except IOError as err: gLogger.exception(str(err)) return S_ERROR('Exception during url retrieve: %s' % str(err)) else: datMan = DataManager() resget = datMan.getFile("%s%s" % (tarballURL, app_tar)) if not resget['OK']: gLogger.error("File could not be downloaded from the grid") return resget return S_OK()
def downloadFile(tarballURL, app_tar, folder_name): """ Get the file locally. """ #need to make sure the url ends with /, other wise concatenation below returns bad url if tarballURL[-1] != "/": tarballURL += "/" app_tar_base = os.path.basename(app_tar) if tarballURL.find("http://") > -1: try: gLogger.debug("Downloading software", '%s' % (folder_name)) #Copy the file locally, don't try to read from remote, soooo slow #Use string conversion %s%s to set the address, makes the system more stable urllib.urlretrieve("%s%s" % (tarballURL, app_tar), app_tar_base) except IOError as err: gLogger.exception(str(err)) return S_ERROR('Exception during url retrieve: %s' % str(err)) else: datMan = DataManager() resget = datMan.getFile("%s%s" % (tarballURL, app_tar)) if not resget['OK']: gLogger.error("File could not be downloaded from the grid") return resget return S_OK()
def doTheWhizardInstallation(): """Do the instalation for new whizard version Copy libraries, create tarball, upload processList file add entry in configuration system """ res = checkSLCVersion() if not res['OK']: gLogger.error(res['Message']) dexit(1) res = checkGFortranVersion() if not res['OK']: gLogger.error(res['Message']) dexit(1) cliParams = Params() cliParams.registerSwitches() Script.parseCommandLine( ignoreErrors= False) whizardResultFolder = cliParams.path platform = cliParams.platform whizard_version = cliParams.version appVersion = whizard_version beam_spectra_version = cliParams.beam_spectra if not whizardResultFolder or not whizard_version or not beam_spectra_version: Script.showHelp() dexit(2) from ILCDIRAC.Core.Utilities.ProcessList import ProcessList from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations from DIRAC.Interfaces.API.DiracAdmin import DiracAdmin from ILCDIRAC.Core.Utilities.FileUtils import upload from DIRAC.DataManagementSystem.Client.DataManager import DataManager diracAdmin = DiracAdmin() modifiedCS = False softwareSection = "/Operations/Defaults/AvailableTarBalls" processlistLocation = "ProcessList/Location" appName = "whizard" ops = Operations() path_to_process_list = ops.getValue(processlistLocation, "") if not path_to_process_list: gLogger.error("Could not find process list location in CS") dexit(2) gLogger.verbose("Getting process list from file catalog") datMan = DataManager() res = datMan.getFile(path_to_process_list) if not res['OK']: gLogger.error("Error while getting process list from storage") dexit(2) gLogger.verbose("done") ##just the name of the local file in current working directory processlist = os.path.basename(path_to_process_list) if not os.path.exists(processlist): gLogger.error("Process list does not exist locally") dexit(2) pl = ProcessList(processlist) startDir = os.getcwd() inputlist = {} os.chdir(whizardResultFolder) folderlist = os.listdir(whizardResultFolder) whiz_here = folderlist.count("whizard") if whiz_here == 0: gLogger.error("whizard executable not found in %s, please check" % whizardResultFolder) os.chdir(startDir) dexit(2) whizprc_here = folderlist.count("whizard.prc") if whizprc_here == 0: gLogger.error("whizard.prc not found in %s, please check" % whizardResultFolder) os.chdir(startDir) dexit(2) whizmdl_here = folderlist.count("whizard.mdl") if whizmdl_here == 0: gLogger.error("whizard.mdl not found in %s, please check" % whizardResultFolder) os.chdir(startDir) dexit(2) gLogger.verbose("Preparing process list") ## FIXME:: What is this doing exactly? Is this necessary? -- APS, JFS for f in folderlist: if f.count(".in"): infile = open(f, "r") found_detail = False for line in infile: if line.count("decay_description"): currprocess = f.split(".template.in")[0] inputlist[currprocess] = {} inputlist[currprocess]["InFile"] = f.rstrip("~") inputlist[currprocess]["Detail"] = line.split("\"")[1] found_detail = True if line.count("process_id") and found_detail: process_id = line.split("\"")[1] inputlist[currprocess]["Model"] = "" inputlist[currprocess]["Generator"] = "" inputlist[currprocess]["Restrictions"] = "" for process in process_id.split(): print "Looking for detail of process %s" % (process) process_detail = getDetailsFromPRC("whizard.prc", process) inputlist[currprocess]["Model"] = process_detail["Model"] inputlist[currprocess]["Generator"] = process_detail["Generator"] if len(inputlist[currprocess]["Restrictions"]): inputlist[currprocess]["Restrictions"] = inputlist[currprocess]["Restrictions"] + ", " + process_detail["Restrictions"] else: inputlist[currprocess]["Restrictions"] = process_detail["Restrictions"] #if len(inputlist[currprocess].items()): # inputlist.append(processdict) ## END FIXEME ##Update inputlist with what was found looking in the prc file processes = readPRCFile("whizard.prc") inputlist.update(processes) ##get from cross section files the cross sections for the processes in inputlist #Need full process list for f in folderlist: if f.count("cross_sections_"): crossfile = open(f, "r") for line in crossfile: line = line.rstrip().lstrip() if not len(line): continue if line[0] == "#" or line[0] == "!": continue if len(line.split()) < 2: continue currprocess = line.split()[0] if currprocess in inputlist: inputlist[currprocess]['CrossSection'] = line.split()[1] gLogger.notice("Preparing Tarball") ##Make a folder in the current directory of the user to store the whizard libraries, executable et al. localWhizardFolderRel = ("whizard" + whizard_version) # relative path localWhizardFolder = os.path.join(startDir, localWhizardFolderRel) if not os.path.exists(localWhizardFolder): os.makedirs(localWhizardFolder) localWhizardLibFolder = os.path.join(localWhizardFolder,'lib') if os.path.exists(localWhizardLibFolder): shutil.rmtree(localWhizardLibFolder) os.makedirs(localWhizardLibFolder) ##creates the lib folder whizardLibraries = getListOfLibraries(os.path.join(whizardResultFolder, "whizard")) copyLibsCall = ["rsync","-avzL"] for lib in whizardLibraries: copyLibsCall.append(lib) copyLibsCall.append(localWhizardLibFolder) subprocess.Popen(copyLibsCall, stdout=subprocess.PIPE, stderr=subprocess.PIPE) for fileName in folderlist: shutil.copy(fileName, localWhizardFolder) ##Get the list of md5 sums for all the files in the folder to be tarred os.chdir( localWhizardFolder ) subprocess.call(["find . -type f -exec md5sum {} > ../md5_checksum.md5 \\; && mv ../md5_checksum.md5 ."], shell=True) os.chdir(startDir) ##Create the Tarball gLogger.notice("Creating Tarball...") appTar = localWhizardFolder + ".tgz" myappTar = tarfile.open(appTar, "w:gz") myappTar.add(localWhizardFolderRel) myappTar.close() md5sum = md5.md5(open( appTar, 'r' ).read()).hexdigest() gLogger.notice("...Done") gLogger.notice("Registering new Tarball in CS") tarballurl = {} av_platforms = gConfig.getSections(softwareSection, []) if av_platforms['OK']: if platform not in av_platforms['Value']: gLogger.error("Platform %s unknown, available are %s." % (platform, ", ".join(av_platforms['Value']))) gLogger.error("If yours is missing add it in CS") dexit(255) else: gLogger.error("Could not find all platforms available in CS") dexit(255) av_apps = gConfig.getSections("%s/%s" % (softwareSection, platform), []) if not av_apps['OK']: gLogger.error("Could not find all applications available in CS") dexit(255) if appName.lower() in av_apps['Value']: versions = gConfig.getSections("%s/%s/%s" % (softwareSection, platform, appName.lower()), []) if not versions['OK']: gLogger.error("Could not find all versions available in CS") dexit(255) if appVersion in versions['Value']: gLogger.error('Application %s %s for %s already in CS, nothing to do' % (appName.lower(), appVersion, platform)) dexit(0) else: result = diracAdmin.csSetOption("%s/%s/%s/%s/TarBall" % (softwareSection, platform, appName.lower(), appVersion), os.path.basename(appTar)) if result['OK']: modifiedCS = True tarballurl = gConfig.getOption("%s/%s/%s/TarBallURL" % (softwareSection, platform, appName.lower()), "") if len(tarballurl['Value']) > 0: res = upload(tarballurl['Value'], appTar) if not res['OK']: gLogger.error("Upload to %s failed" % tarballurl['Value']) dexit(255) result = diracAdmin.csSetOption("%s/%s/%s/%s/Md5Sum" % (softwareSection, platform, appName.lower(), appVersion), md5sum) if result['OK']: modifiedCS = True result = diracAdmin.csSetOption("%s/%s/%s/%s/Dependencies/beam_spectra/version" % (softwareSection, platform, appName.lower(), appVersion), beam_spectra_version) else: result = diracAdmin.csSetOption("%s/%s/%s/%s/TarBall" % (softwareSection, platform, appName.lower(), appVersion), os.path.basename(appTar)) if result['OK']: modifiedCS = True tarballurl = gConfig.getOption("%s/%s/%s/TarBallURL" % (softwareSection, platform, appName.lower()), "") if len(tarballurl['Value']) > 0: res = upload(tarballurl['Value'], appTar) if not res['OK']: gLogger.error("Upload to %s failed" % tarballurl['Value']) dexit(255) result = diracAdmin.csSetOption("%s/%s/%s/%s/Md5Sum" % (softwareSection, platform, appName.lower(), appVersion), md5sum) result = diracAdmin.csSetOption("%s/%s/%s/%s/Dependencies/beam_spectra/version" % (softwareSection, platform, appName.lower(), appVersion), beam_spectra_version) gLogger.verbose("Done uploading the tar ball") os.remove(appTar) #Set for all new processes the TarBallURL for process in inputlist.keys(): inputlist[process]['TarBallCSPath'] = tarballurl['Value'] + os.path.basename(appTar) pl.updateProcessList(inputlist) pl.writeProcessList() raw_input("Do you want to upload the process list? Press ENTER to proceed or CTRL-C to abort!") pl.uploadProcessListToFileCatalog(path_to_process_list, appVersion) #Commit the changes if nothing has failed and the CS has been modified if modifiedCS: result = diracAdmin.csCommitChanges(False) gLogger.verbose(result) gLogger.notice('All done OK!') dexit(0)
class Whizard2Analysis(ModuleBase): """ Specific Module to run a Whizard2 job. """ def __init__(self): super(Whizard2Analysis, self).__init__() self.enable = True self.STEP_NUMBER = '' self.result = S_ERROR() self.applicationName = 'whizard2' self.startFrom = 0 self.randomSeed = -1 self.whizard2SinFile = '' self.eventstring = ['+++ Generating event'] self.decayProc = ['decay_proc'] self.integratedProcess = '' self.datMan = DataManager() def applicationSpecificInputs(self): """ Resolve all input variables for the module here. :return: S_OK() """ self.randomSeed = self._determineRandomSeed() if "IS_PROD" in self.workflow_commons and self.workflow_commons["IS_PROD"]: self.OutputFile = getProdFilename(self.OutputFile, int(self.workflow_commons["PRODUCTION_ID"]), int(self.workflow_commons["JOB_ID"]), self.workflow_commons, ) return S_OK('Parameters resolved') def resolveIntegratedProcess(self): """Check if integrated process is set and act accordingly. If the integrated process was given as a tarball it should already be available in the working directory and we do nothing. """ if not self.integratedProcess: return S_OK() # integratedProcess is set, check CVMFS or filecatalog processes = self.ops.getOptionsDict('/AvailableTarBalls/%s/whizard2/%s/integrated_processes/processes' % ('x86_64-slc5-gcc43-opt', self.applicationVersion)) if not processes['OK']: LOG.error('Could not resolve known integrated processes', processes['Message']) return processes options = self.ops.getOptionsDict('/AvailableTarBalls/%s/whizard2/%s/integrated_processes' % ('x86_64-slc5-gcc43-opt', self.applicationVersion)) if not options['OK']: LOG.error('Failed to get integrated processes options', options['Message']) return options cvmfsPath = options['Value'].get('CVMFSPath', '') tarballURL = options['Value'].get('TarBallURL', '') processTarball = processes['Value'].get(self.integratedProcess, '') localTarball = os.path.join(cvmfsPath, processTarball) if os.path.exists(localTarball): LOG.info('Tarball found on cvmfs: %r' % localTarball) return extractTarball(localTarball, os.getcwd()) tarballLFN = os.path.join(tarballURL, processTarball) LOG.info('Trying to download tarball', tarballLFN) getFile = self.datMan.getFile(tarballLFN) if not getFile['OK']: LOG.error('Failed to download tarball', getFile['Message']) return getFile return extractTarball(os.path.split(tarballLFN)[1], os.getcwd()) def runIt(self): """ Called by JobAgent Execute the following: - get the environment variables that should have been set during installation - prepare the steering file and command line parameters - run Whizard2 on this steering file and catch the exit status :rtype: :func:`~DIRAC.Core.Utilities.ReturnValues.S_OK`, :func:`~DIRAC.Core.Utilities.ReturnValues.S_ERROR` """ self.result = S_OK() if not self.platform: self.result = S_ERROR( 'No ILC platform selected' ) elif not self.applicationLog: self.result = S_ERROR( 'No Log file provided' ) if not self.result['OK']: LOG.error("Failed to resolve input parameters:", self.result['Message']) return self.result if not self.workflowStatus['OK'] or not self.stepStatus['OK']: LOG.verbose('Workflow status = %s, step status = %s' % (self.workflowStatus['OK'], self.stepStatus['OK'])) return S_OK('Whizard2 should not proceed as previous step did not end properly') resIntProc = self.resolveIntegratedProcess() if not resIntProc['OK']: return resIntProc # get the enviroment script res = getEnvironmentScript(self.platform, self.applicationName, self.applicationVersion, S_ERROR("No init script provided in CVMFS!")) if not res['OK']: LOG.error("Could not obtain the environment script: ", res["Message"]) return res envScriptPath = res["Value"] whizard2SteerName = 'Whizard2_%s_Steer_%s.sin' % (self.applicationVersion, self.STEP_NUMBER) if os.path.exists(whizard2SteerName): os.remove(whizard2SteerName) whizard2Steer = [] whizard2Steer.append('!Seed set via API') whizard2Steer.append('seed = %s' % self.randomSeed) whizard2Steer.append('') whizard2Steer.append('!Parameters set via whizard2SinFile') whizard2Steer.append('') whizard2Steer.append(self.whizard2SinFile) whizard2Steer.append('') whizard2Steer.append('!Number of events set via API') whizard2Steer.append('') whizard2Steer.append('n_events = %s' % self.NumberOfEvents) whizard2Steer.append('') whizard2Steer.append('simulate (%s) {' % ",".join(self.decayProc)) whizard2Steer.append(' $sample = "%s"' % self.OutputFile.rsplit('.',1)[0] ) if self.OutputFile.rsplit('.',1)[-1] == 'slcio': whizard2Steer.append(' sample_format = lcio') whizard2Steer.append(' $extension_lcio = "slcio"') else: whizard2Steer.append(' sample_format = %s' % self.OutputFile.rsplit('.',1)[-1] ) whizard2Steer.append(' $extension_{st} = "{st}"'.format(st=self.OutputFile.rsplit('.',1)[-1])) whizard2Steer.append('}') with open(whizard2SteerName, 'w') as steerFile: steerFile.write( "\n".join(whizard2Steer) ) scriptName = 'Whizard2_%s_Run_%s.sh' % (self.applicationVersion, self.STEP_NUMBER) if os.path.exists(scriptName): os.remove(scriptName) script = [] script.append('#!/bin/bash') script.append('#####################################################################') script.append('# Dynamically generated script to run a production or analysis job. #') script.append('#####################################################################') script.append('source %s' % envScriptPath) script.append('echo =========') script.append('env | sort >> localEnv.log') script.append('echo whizard:`which whizard`') script.append('echo =========') script.append('whizard %s' % whizard2SteerName ) script.append('declare -x appstatus=$?') script.append('exit $appstatus') with open(scriptName, 'w') as scriptFile: scriptFile.write( "\n".join(script) ) if os.path.exists(self.applicationLog): os.remove(self.applicationLog) os.chmod(scriptName, 0o755) comm = 'bash "./%s"' % scriptName self.setApplicationStatus('Whizard2 %s step %s' % (self.applicationVersion, self.STEP_NUMBER)) self.stdError = '' self.result = shellCall(0, comm, callbackFunction = self.redirectLogOutput, bufferLimit = 20971520) resultTuple = self.result['Value'] if not os.path.exists(self.applicationLog): LOG.error("Something went terribly wrong, the log file is not present") self.setApplicationStatus('%s failed to produce log file' % (self.applicationName)) if not self.ignoreapperrors: return S_ERROR('%s did not produce the expected log %s' % (self.applicationName, self.applicationLog)) status = resultTuple[0] LOG.info("Status after the application execution is %s" % status) return self.finalStatusReport(status) def _determineRandomSeed(self): """determine what the randomSeed should be, depends on production or not .. Note:: Whizard2 we use *randomSeed* and not *RandomSeed* as in the other workflow modules """ if self.randomSeed == -1: self.randomSeed = self.jobID if "IS_PROD" in self.workflow_commons: self.randomSeed = int(str(int(self.workflow_commons["PRODUCTION_ID"])) + str(int(self.workflow_commons["JOB_ID"]))) return self.randomSeed
class ReplicaManagerTestCase(unittest.TestCase): """ Base class for the Replica Manager test cases """ def setUp(self): self.dataManager = DataManager() self.fileName = '/tmp/temporaryLocalFile' file = open(self.fileName,'w') file.write("%s" % time.time()) file.close() def test_putAndRegister(self): print '\n\n#########################################################################\n\n\t\t\tPut and register test\n' lfn = '/lhcb/test/unit-test/ReplicaManager/putAndRegister/testFile.%s' % time.time() diracSE = 'GRIDKA-RAW' putRes = self.dataManager.putAndRegister(lfn, self.fileName, diracSE) removeRes = self.dataManager.removeFile(lfn) # Check that the put was successful self.assert_(putRes['OK']) self.assert_(putRes['Value'].has_key('Successful')) self.assert_(putRes['Value']['Successful'].has_key(lfn)) self.assert_(putRes['Value']['Successful'][lfn]) # Check that the removal was successful self.assert_(removeRes['OK']) self.assert_(removeRes['Value'].has_key('Successful')) self.assert_(removeRes['Value']['Successful'].has_key(lfn)) self.assert_(removeRes['Value']['Successful'][lfn]) def test_putAndRegisterReplicate(self): print '\n\n#########################################################################\n\n\t\t\tReplication test\n' lfn = '/lhcb/test/unit-test/ReplicaManager/putAndRegisterReplicate/testFile.%s' % time.time() diracSE = 'GRIDKA-RAW' putRes = self.dataManager.putAndRegister(lfn, self.fileName, diracSE) replicateRes = self.dataManager.replicateAndRegister(lfn,'CNAF-DST') #,sourceSE='',destPath='',localCache='') removeRes = self.dataManager.removeFile(lfn) # Check that the put was successful self.assert_(putRes['OK']) self.assert_(putRes['Value'].has_key('Successful')) self.assert_(putRes['Value']['Successful'].has_key(lfn)) self.assert_(putRes['Value']['Successful'][lfn]) # Check that the replicate was successful self.assert_(replicateRes['OK']) self.assert_(replicateRes['Value'].has_key('Successful')) self.assert_(replicateRes['Value']['Successful'].has_key(lfn)) self.assert_(replicateRes['Value']['Successful'][lfn]) # Check that the removal was successful self.assert_(removeRes['OK']) self.assert_(removeRes['Value'].has_key('Successful')) self.assert_(removeRes['Value']['Successful'].has_key(lfn)) self.assert_(removeRes['Value']['Successful'][lfn]) def test_putAndRegisterGetReplicaMetadata(self): print '\n\n#########################################################################\n\n\t\t\tGet metadata test\n' lfn = '/lhcb/test/unit-test/ReplicaManager/putAndRegisterGetReplicaMetadata/testFile.%s' % time.time() diracSE = 'GRIDKA-RAW' putRes = self.dataManager.putAndRegister(lfn, self.fileName, diracSE) metadataRes = self.dataManager.getReplicaMetadata(lfn,diracSE) removeRes = self.dataManager.removeFile(lfn) # Check that the put was successful self.assert_(putRes['OK']) self.assert_(putRes['Value'].has_key('Successful')) self.assert_(putRes['Value']['Successful'].has_key(lfn)) self.assert_(putRes['Value']['Successful'][lfn]) # Check that the metadata query was successful self.assert_(metadataRes['OK']) self.assert_(metadataRes['Value'].has_key('Successful')) self.assert_(metadataRes['Value']['Successful'].has_key(lfn)) self.assert_(metadataRes['Value']['Successful'][lfn]) metadataDict = metadataRes['Value']['Successful'][lfn] self.assert_(metadataDict.has_key('Cached')) self.assert_(metadataDict.has_key('Migrated')) self.assert_(metadataDict.has_key('Size')) # Check that the removal was successful self.assert_(removeRes['OK']) self.assert_(removeRes['Value'].has_key('Successful')) self.assert_(removeRes['Value']['Successful'].has_key(lfn)) self.assert_(removeRes['Value']['Successful'][lfn]) def test_putAndRegsiterGetAccessUrl(self): print '\n\n#########################################################################\n\n\t\t\tGet Access Url test\n' lfn = '/lhcb/test/unit-test/ReplicaManager/putAndRegisterGetAccessUrl/testFile.%s' % time.time() diracSE = 'GRIDKA-RAW' putRes = self.dataManager.putAndRegister(lfn, self.fileName, diracSE) getAccessUrlRes = self.dataManager.getReplicaAccessUrl(lfn,diracSE) print getAccessUrlRes removeRes = self.dataManager.removeFile(lfn) # Check that the put was successful self.assert_(putRes['OK']) self.assert_(putRes['Value'].has_key('Successful')) self.assert_(putRes['Value']['Successful'].has_key(lfn)) self.assert_(putRes['Value']['Successful'][lfn]) # Check that the access url was successful self.assert_(getAccessUrlRes['OK']) self.assert_(getAccessUrlRes['Value'].has_key('Successful')) self.assert_(getAccessUrlRes['Value']['Successful'].has_key(lfn)) self.assert_(getAccessUrlRes['Value']['Successful'][lfn]) # Check that the removal was successful self.assert_(removeRes['OK']) self.assert_(removeRes['Value'].has_key('Successful')) self.assert_(removeRes['Value']['Successful'].has_key(lfn)) self.assert_(removeRes['Value']['Successful'][lfn]) def test_putAndRegisterRemoveReplica(self): print '\n\n#########################################################################\n\n\t\t\tRemove replica test\n' lfn = '/lhcb/test/unit-test/ReplicaManager/putAndRegisterRemoveReplica/testFile.%s' % time.time() diracSE = 'GRIDKA-RAW' putRes = self.dataManager.putAndRegister(lfn, self.fileName, diracSE) removeReplicaRes = self.dataManager.removeReplica(diracSE,lfn) removeRes = self.dataManager.removeFile(lfn) # Check that the put was successful self.assert_(putRes['OK']) self.assert_(putRes['Value'].has_key('Successful')) self.assert_(putRes['Value']['Successful'].has_key(lfn)) self.assert_(putRes['Value']['Successful'][lfn]) # Check that the replica removal was successful self.assert_(removeReplicaRes['OK']) self.assert_(removeReplicaRes['Value'].has_key('Successful')) self.assert_(removeReplicaRes['Value']['Successful'].has_key(lfn)) self.assert_(removeReplicaRes['Value']['Successful'][lfn]) # Check that the removal was successful self.assert_(removeRes['OK']) self.assert_(removeRes['Value'].has_key('Successful')) self.assert_(removeRes['Value']['Successful'].has_key(lfn)) self.assert_(removeRes['Value']['Successful'][lfn]) def test_registerFile(self): lfn = '/lhcb/test/unit-test/ReplicaManager/registerFile/testFile.%s' % time.time() physicalFile = 'srm://host:port/srm/managerv2?SFN=/sa/path%s' % lfn fileSize = 10000 storageElementName = 'CERN-RAW' fileGuid = makeGuid() fileTuple = (lfn,physicalFile,fileSize,storageElementName,fileGuid) registerRes = self.dataManager.registerFile(fileTuple) removeCatalogReplicaRes = self.dataManager.removeCatalogReplica(storageElementName,lfn) removeFileRes = self.dataManager.removeFile(lfn) # Check that the file registration was done correctly self.assert_(registerRes['OK']) self.assert_(registerRes['Value'].has_key('Successful')) self.assert_(registerRes['Value']['Successful'].has_key(lfn)) self.assert_(registerRes['Value']['Successful'][lfn]) # Check that the replica removal was successful self.assert_(removeCatalogReplicaRes['OK']) self.assert_(removeCatalogReplicaRes['Value'].has_key('Successful')) self.assert_(removeCatalogReplicaRes['Value']['Successful'].has_key(lfn)) self.assert_(removeCatalogReplicaRes['Value']['Successful'][lfn]) # Check that the removal was successful self.assert_(removeFileRes['OK']) self.assert_(removeFileRes['Value'].has_key('Successful')) self.assert_(removeFileRes['Value']['Successful'].has_key(lfn)) self.assert_(removeFileRes['Value']['Successful'][lfn]) def test_registerReplica(self): print '\n\n#########################################################################\n\n\t\t\tRegister replica test\n' lfn = '/lhcb/test/unit-test/ReplicaManager/registerReplica/testFile.%s' % time.time() physicalFile = 'srm://host:port/srm/managerv2?SFN=/sa/path%s' % lfn fileSize = 10000 storageElementName = 'CERN-RAW' fileGuid = makeGuid() fileTuple = (lfn,physicalFile,fileSize,storageElementName,fileGuid) registerRes = self.dataManager.registerFile(fileTuple) seName = 'GRIDKA-RAW' replicaTuple = (lfn,physicalFile,seName) registerReplicaRes = self.dataManager.registerReplica(replicaTuple) removeCatalogReplicaRes1 = self.dataManager.removeCatalogReplica(storageElementName,lfn) removeCatalogReplicaRes2 = self.dataManager.removeCatalogReplica(seName,lfn) removeFileRes = self.dataManager.removeFile(lfn) # Check that the file registration was done correctly self.assert_(registerRes['OK']) self.assert_(registerRes['Value'].has_key('Successful')) self.assert_(registerRes['Value']['Successful'].has_key(lfn)) self.assert_(registerRes['Value']['Successful'][lfn]) # Check that the replica registration was successful self.assert_(registerReplicaRes['OK']) self.assert_(registerReplicaRes['Value'].has_key('Successful')) self.assert_(registerReplicaRes['Value']['Successful'].has_key(lfn)) self.assert_(registerReplicaRes['Value']['Successful'][lfn]) # Check that the replica removal was successful self.assert_(removeCatalogReplicaRes1['OK']) self.assert_(removeCatalogReplicaRes1['Value'].has_key('Successful')) self.assert_(removeCatalogReplicaRes1['Value']['Successful'].has_key(lfn)) self.assert_(removeCatalogReplicaRes1['Value']['Successful'][lfn]) # Check that the replica removal was successful self.assert_(removeCatalogReplicaRes2['OK']) self.assert_(removeCatalogReplicaRes2['Value'].has_key('Successful')) self.assert_(removeCatalogReplicaRes2['Value']['Successful'].has_key(lfn)) self.assert_(removeCatalogReplicaRes2['Value']['Successful'][lfn]) # Check that the removal was successful self.assert_(removeFileRes['OK']) self.assert_(removeFileRes['Value'].has_key('Successful')) self.assert_(removeFileRes['Value']['Successful'].has_key(lfn)) self.assert_(removeFileRes['Value']['Successful'][lfn]) def test_putAndRegisterGet(self): print '\n\n#########################################################################\n\n\t\t\tGet file test\n' lfn = '/lhcb/test/unit-test/ReplicaManager/putAndRegisterGet/testFile.%s' % time.time() diracSE = 'GRIDKA-RAW' putRes = self.dataManager.putAndRegister(lfn, self.fileName, diracSE) getRes = self.dataManager.getFile(lfn) removeRes = self.dataManager.removeFile(lfn) localFilePath = "%s/%s" % (os.getcwd(),os.path.basename(lfn)) if os.path.exists(localFilePath): os.remove(localFilePath) # Check that the put was successful self.assert_(putRes['OK']) self.assert_(putRes['Value'].has_key('Successful')) self.assert_(putRes['Value']['Successful'].has_key(lfn)) self.assert_(putRes['Value']['Successful'][lfn]) # Check that the replica removal was successful self.assert_(getRes['OK']) self.assert_(getRes['Value'].has_key('Successful')) self.assert_(getRes['Value']['Successful'].has_key(lfn)) self.assertEqual(getRes['Value']['Successful'][lfn],localFilePath) # Check that the removal was successful self.assert_(removeRes['OK']) self.assert_(removeRes['Value'].has_key('Successful')) self.assert_(removeRes['Value']['Successful'].has_key(lfn)) self.assert_(removeRes['Value']['Successful'][lfn])
class ReplicaManagerTestCase(unittest.TestCase): """ Base class for the Replica Manager test cases """ def setUp(self): self.dataManager = DataManager() self.fileName = '/tmp/temporaryLocalFile' file = open(self.fileName, 'w') file.write("%s" % time.time()) file.close() def test_putAndRegister(self): print '\n\n#########################################################################\n\n\t\t\tPut and register test\n' lfn = '/lhcb/test/unit-test/ReplicaManager/putAndRegister/testFile.%s' % time.time( ) diracSE = 'GRIDKA-RAW' putRes = self.dataManager.putAndRegister(lfn, self.fileName, diracSE) removeRes = self.dataManager.removeFile(lfn) # Check that the put was successful self.assert_(putRes['OK']) self.assert_(putRes['Value'].has_key('Successful')) self.assert_(putRes['Value']['Successful'].has_key(lfn)) self.assert_(putRes['Value']['Successful'][lfn]) # Check that the removal was successful self.assert_(removeRes['OK']) self.assert_(removeRes['Value'].has_key('Successful')) self.assert_(removeRes['Value']['Successful'].has_key(lfn)) self.assert_(removeRes['Value']['Successful'][lfn]) def test_putAndRegisterReplicate(self): print '\n\n#########################################################################\n\n\t\t\tReplication test\n' lfn = '/lhcb/test/unit-test/ReplicaManager/putAndRegisterReplicate/testFile.%s' % time.time( ) diracSE = 'GRIDKA-RAW' putRes = self.dataManager.putAndRegister(lfn, self.fileName, diracSE) replicateRes = self.dataManager.replicateAndRegister( lfn, 'CNAF-DST') #,sourceSE='',destPath='',localCache='') removeRes = self.dataManager.removeFile(lfn) # Check that the put was successful self.assert_(putRes['OK']) self.assert_(putRes['Value'].has_key('Successful')) self.assert_(putRes['Value']['Successful'].has_key(lfn)) self.assert_(putRes['Value']['Successful'][lfn]) # Check that the replicate was successful self.assert_(replicateRes['OK']) self.assert_(replicateRes['Value'].has_key('Successful')) self.assert_(replicateRes['Value']['Successful'].has_key(lfn)) self.assert_(replicateRes['Value']['Successful'][lfn]) # Check that the removal was successful self.assert_(removeRes['OK']) self.assert_(removeRes['Value'].has_key('Successful')) self.assert_(removeRes['Value']['Successful'].has_key(lfn)) self.assert_(removeRes['Value']['Successful'][lfn]) def test_putAndRegisterGetReplicaMetadata(self): print '\n\n#########################################################################\n\n\t\t\tGet metadata test\n' lfn = '/lhcb/test/unit-test/ReplicaManager/putAndRegisterGetReplicaMetadata/testFile.%s' % time.time( ) diracSE = 'GRIDKA-RAW' putRes = self.dataManager.putAndRegister(lfn, self.fileName, diracSE) metadataRes = self.dataManager.getReplicaMetadata(lfn, diracSE) removeRes = self.dataManager.removeFile(lfn) # Check that the put was successful self.assert_(putRes['OK']) self.assert_(putRes['Value'].has_key('Successful')) self.assert_(putRes['Value']['Successful'].has_key(lfn)) self.assert_(putRes['Value']['Successful'][lfn]) # Check that the metadata query was successful self.assert_(metadataRes['OK']) self.assert_(metadataRes['Value'].has_key('Successful')) self.assert_(metadataRes['Value']['Successful'].has_key(lfn)) self.assert_(metadataRes['Value']['Successful'][lfn]) metadataDict = metadataRes['Value']['Successful'][lfn] self.assert_(metadataDict.has_key('Cached')) self.assert_(metadataDict.has_key('Migrated')) self.assert_(metadataDict.has_key('Size')) # Check that the removal was successful self.assert_(removeRes['OK']) self.assert_(removeRes['Value'].has_key('Successful')) self.assert_(removeRes['Value']['Successful'].has_key(lfn)) self.assert_(removeRes['Value']['Successful'][lfn]) def test_putAndRegsiterGetAccessUrl(self): print '\n\n#########################################################################\n\n\t\t\tGet Access Url test\n' lfn = '/lhcb/test/unit-test/ReplicaManager/putAndRegisterGetAccessUrl/testFile.%s' % time.time( ) diracSE = 'GRIDKA-RAW' putRes = self.dataManager.putAndRegister(lfn, self.fileName, diracSE) getAccessUrlRes = self.dataManager.getReplicaAccessUrl(lfn, diracSE) print getAccessUrlRes removeRes = self.dataManager.removeFile(lfn) # Check that the put was successful self.assert_(putRes['OK']) self.assert_(putRes['Value'].has_key('Successful')) self.assert_(putRes['Value']['Successful'].has_key(lfn)) self.assert_(putRes['Value']['Successful'][lfn]) # Check that the access url was successful self.assert_(getAccessUrlRes['OK']) self.assert_(getAccessUrlRes['Value'].has_key('Successful')) self.assert_(getAccessUrlRes['Value']['Successful'].has_key(lfn)) self.assert_(getAccessUrlRes['Value']['Successful'][lfn]) # Check that the removal was successful self.assert_(removeRes['OK']) self.assert_(removeRes['Value'].has_key('Successful')) self.assert_(removeRes['Value']['Successful'].has_key(lfn)) self.assert_(removeRes['Value']['Successful'][lfn]) def test_putAndRegisterRemoveReplica(self): print '\n\n#########################################################################\n\n\t\t\tRemove replica test\n' lfn = '/lhcb/test/unit-test/ReplicaManager/putAndRegisterRemoveReplica/testFile.%s' % time.time( ) diracSE = 'GRIDKA-RAW' putRes = self.dataManager.putAndRegister(lfn, self.fileName, diracSE) removeReplicaRes = self.dataManager.removeReplica(diracSE, lfn) removeRes = self.dataManager.removeFile(lfn) # Check that the put was successful self.assert_(putRes['OK']) self.assert_(putRes['Value'].has_key('Successful')) self.assert_(putRes['Value']['Successful'].has_key(lfn)) self.assert_(putRes['Value']['Successful'][lfn]) # Check that the replica removal was successful self.assert_(removeReplicaRes['OK']) self.assert_(removeReplicaRes['Value'].has_key('Successful')) self.assert_(removeReplicaRes['Value']['Successful'].has_key(lfn)) self.assert_(removeReplicaRes['Value']['Successful'][lfn]) # Check that the removal was successful self.assert_(removeRes['OK']) self.assert_(removeRes['Value'].has_key('Successful')) self.assert_(removeRes['Value']['Successful'].has_key(lfn)) self.assert_(removeRes['Value']['Successful'][lfn]) def test_registerFile(self): lfn = '/lhcb/test/unit-test/ReplicaManager/registerFile/testFile.%s' % time.time( ) physicalFile = 'srm://host:port/srm/managerv2?SFN=/sa/path%s' % lfn fileSize = 10000 storageElementName = 'CERN-RAW' fileGuid = makeGuid() fileTuple = (lfn, physicalFile, fileSize, storageElementName, fileGuid) registerRes = self.dataManager.registerFile(fileTuple) removeCatalogReplicaRes = self.dataManager.removeCatalogReplica( storageElementName, lfn) removeFileRes = self.dataManager.removeFile(lfn) # Check that the file registration was done correctly self.assert_(registerRes['OK']) self.assert_(registerRes['Value'].has_key('Successful')) self.assert_(registerRes['Value']['Successful'].has_key(lfn)) self.assert_(registerRes['Value']['Successful'][lfn]) # Check that the replica removal was successful self.assert_(removeCatalogReplicaRes['OK']) self.assert_(removeCatalogReplicaRes['Value'].has_key('Successful')) self.assert_( removeCatalogReplicaRes['Value']['Successful'].has_key(lfn)) self.assert_(removeCatalogReplicaRes['Value']['Successful'][lfn]) # Check that the removal was successful self.assert_(removeFileRes['OK']) self.assert_(removeFileRes['Value'].has_key('Successful')) self.assert_(removeFileRes['Value']['Successful'].has_key(lfn)) self.assert_(removeFileRes['Value']['Successful'][lfn]) def test_registerReplica(self): print '\n\n#########################################################################\n\n\t\t\tRegister replica test\n' lfn = '/lhcb/test/unit-test/ReplicaManager/registerReplica/testFile.%s' % time.time( ) physicalFile = 'srm://host:port/srm/managerv2?SFN=/sa/path%s' % lfn fileSize = 10000 storageElementName = 'CERN-RAW' fileGuid = makeGuid() fileTuple = (lfn, physicalFile, fileSize, storageElementName, fileGuid) registerRes = self.dataManager.registerFile(fileTuple) seName = 'GRIDKA-RAW' replicaTuple = (lfn, physicalFile, seName) registerReplicaRes = self.dataManager.registerReplica(replicaTuple) removeCatalogReplicaRes1 = self.dataManager.removeCatalogReplica( storageElementName, lfn) removeCatalogReplicaRes2 = self.dataManager.removeCatalogReplica( seName, lfn) removeFileRes = self.dataManager.removeFile(lfn) # Check that the file registration was done correctly self.assert_(registerRes['OK']) self.assert_(registerRes['Value'].has_key('Successful')) self.assert_(registerRes['Value']['Successful'].has_key(lfn)) self.assert_(registerRes['Value']['Successful'][lfn]) # Check that the replica registration was successful self.assert_(registerReplicaRes['OK']) self.assert_(registerReplicaRes['Value'].has_key('Successful')) self.assert_(registerReplicaRes['Value']['Successful'].has_key(lfn)) self.assert_(registerReplicaRes['Value']['Successful'][lfn]) # Check that the replica removal was successful self.assert_(removeCatalogReplicaRes1['OK']) self.assert_(removeCatalogReplicaRes1['Value'].has_key('Successful')) self.assert_( removeCatalogReplicaRes1['Value']['Successful'].has_key(lfn)) self.assert_(removeCatalogReplicaRes1['Value']['Successful'][lfn]) # Check that the replica removal was successful self.assert_(removeCatalogReplicaRes2['OK']) self.assert_(removeCatalogReplicaRes2['Value'].has_key('Successful')) self.assert_( removeCatalogReplicaRes2['Value']['Successful'].has_key(lfn)) self.assert_(removeCatalogReplicaRes2['Value']['Successful'][lfn]) # Check that the removal was successful self.assert_(removeFileRes['OK']) self.assert_(removeFileRes['Value'].has_key('Successful')) self.assert_(removeFileRes['Value']['Successful'].has_key(lfn)) self.assert_(removeFileRes['Value']['Successful'][lfn]) def test_putAndRegisterGet(self): print '\n\n#########################################################################\n\n\t\t\tGet file test\n' lfn = '/lhcb/test/unit-test/ReplicaManager/putAndRegisterGet/testFile.%s' % time.time( ) diracSE = 'GRIDKA-RAW' putRes = self.dataManager.putAndRegister(lfn, self.fileName, diracSE) getRes = self.dataManager.getFile(lfn) removeRes = self.dataManager.removeFile(lfn) localFilePath = "%s/%s" % (os.getcwd(), os.path.basename(lfn)) if os.path.exists(localFilePath): os.remove(localFilePath) # Check that the put was successful self.assert_(putRes['OK']) self.assert_(putRes['Value'].has_key('Successful')) self.assert_(putRes['Value']['Successful'].has_key(lfn)) self.assert_(putRes['Value']['Successful'][lfn]) # Check that the replica removal was successful self.assert_(getRes['OK']) self.assert_(getRes['Value'].has_key('Successful')) self.assert_(getRes['Value']['Successful'].has_key(lfn)) self.assertEqual(getRes['Value']['Successful'][lfn], localFilePath) # Check that the removal was successful self.assert_(removeRes['OK']) self.assert_(removeRes['Value'].has_key('Successful')) self.assert_(removeRes['Value']['Successful'].has_key(lfn)) self.assert_(removeRes['Value']['Successful'][lfn])
class SoftwareManager(object): """ Manage software setup """ def __init__(self, soft_category): """ Constructor """ self.CVMFS_DIR = '/cvmfs/sw.cta-observatory.org/software' self.LFN_ROOT = '/vo.cta.in2p3.fr/software' self.SOFT_CATEGORY_DICT = soft_category self.dm = DataManager() def _search_software(self, package, version, compiler, use_cvmfs): ''' Look for sotfware package ''' # software package category category = self.SOFT_CATEGORY_DICT[package] # look for software on cvmfs if use_cvmfs: package_dir = os.path.join(self.CVMFS_DIR, 'centos7', compiler, category, package, version) if os.path.isdir(package_dir): DIRAC.gLogger.notice('Found package %s version %s at:\n%s' % (package, version, package_dir)) return DIRAC.S_OK({'Source':'cvmfs', 'Path':package_dir}) else: DIRAC.gLogger.warn('%s\n not found on cvmfs'%package_dir) # look for tarball in the Dirac file catalog else: package_dir = os.path.join(self.LFN_ROOT, 'centos7', compiler, category, package, version) DIRAC.gLogger.notice('Looking for tarball in %s'%package_dir) results = self.dm.getFilesFromDirectory(package_dir) try: first_file_path = results['Value'][0] if first_file_path[-7:] == '.tar.gz': results = self.dm.getActiveReplicas(first_file_path) if results['OK']: return DIRAC.S_OK({'Source':'tarball', 'Path':package_dir}) except: DIRAC.gLogger.warn('No usual tarball found in the directory') return DIRAC.S_ERROR('Could not find package %s / %s / %s in any location' % (package, version, compiler)) def find_software(self, package, version, compiler='gcc48_default'): """ check if the software package is installed in any software area Keyword arguments: package -- package name as the directory name version -- software version as the directory name compiler -- compiler version and configuration """ # first check if cvmfs is available ops_helper = Operations() use_cvmfs = ops_helper.getValue('SoftwarePolicy/UseCvmfs', bool) DIRAC.gLogger.notice('SoftwarePolicy for UseCvmfs is:', use_cvmfs) # get platform and cpu information try: os_name, cpu_name, inst = get_os_and_cpu_info() DIRAC.gLogger.notice('Running %s on a %s ' %(os_name, cpu_name)) except: inst = 'sse4' DIRAC.gLogger.warn('Could not determine platform and cpu information') if compiler == 'gcc48_default': results = self._search_software(package, version, compiler, use_cvmfs) return results elif compiler == 'gcc48_sse4': # assume all processors have at least sse4 results = self._search_software(package, version, compiler, use_cvmfs) return results elif compiler == 'gcc48_avx': if inst in ['avx', 'avx2', 'avx512']: results = self._search_software(package, version, compiler, use_cvmfs) return results else: DIRAC.gLogger.warn('CPU has no avx instructions, running sse4 version') compiler = 'gcc48_sse4' results = self._search_software(package, version, compiler, use_cvmfs) return results elif compiler == 'gcc48_avx2': if inst in ['avx2', 'avx512']: results = self._search_software(package, version, compiler, use_cvmfs) return results else: DIRAC.gLogger.warn('CPU has no avx2 instructions, running sse4 version') compiler = 'gcc48_sse4' results = self._search_software(package, version, compiler, use_cvmfs) return results elif compiler == 'gcc48_avx512': if inst is 'avx512': results = self._search_software(package, version, compiler, use_cvmfs) return results else: DIRAC.gLogger.warn('CPU has no avx512 instructions, running sse4 version') compiler = 'gcc48_sse4' results = self._search_software(package, version, compiler, use_cvmfs) return results elif compiler == 'gcc48_matchcpu': compiler = 'gcc48_%s'%inst results = self._search_software(package, version, compiler, use_cvmfs) return results else: DIRAC.S_ERROR('Unknown compiler specified: %s'%compiler) return DIRAC.S_ERROR('Could not find package %s version %s / %s in any location' % (package, version, compiler)) def install_dirac_scripts(self, package_dir): """ copy DIRAC scripts in the current directory """ dirac_scripts = glob.glob(os.path.join(package_dir, 'dirac_*')) try: for one_file in dirac_scripts: shutil.copy2(one_file, '.') return DIRAC.S_OK() except shutil.Error as error: return DIRAC.S_ERROR('Failed to install DIRAC scripts:\n%s'%error) def dump_setup_script_path(self, package_dir, textfilename = 'setup_script_path.txt'): """ dump the path to setupPackage.sh in a one line ascii file to be read and source by the following script """ script_path = os.path.join(package_dir, 'setupPackage.sh') open(textfilename, 'w').writelines(script_path + '\n') return DIRAC.S_OK() def install_software(self, tar_lfn, target_dir='.'): """ install software package in the current directory """ DIRAC.gLogger.notice('Installing package at %s'%tar_lfn) # Download the tar file DIRAC.gLogger.notice('Trying to download package:', tar_lfn) res = self.dm.getFile(tar_lfn) if not res['OK']: return res if tar_lfn in res['Value']['Successful']: DIRAC.gLogger.notice(' Package downloaded successfully:', tar_lfn) else: error = 'Failed to download package:', tar_lfn return DIRAC.S_ERROR(error) # Extract the tar file to the target directory tar_mode = "r|*" tar = tarfile.open(tar_lfn, tar_mode) for tarInfo in tar: tar.extract(tarInfo, target_dir) tar.close() os.unlink(tar_lfn) # Done DIRAC.gLogger.notice('Package %s installed successfully at:\n%s' %(tar_lfn, target_dir)) return DIRAC.S_OK(target_dir)
class WhizardAnalysis(ModuleBase): """ Specific Module to run a Whizard job. """ def __init__(self): super(WhizardAnalysis, self).__init__() self.enable = True self.STEP_NUMBER = '' self.debug = True self.SteeringFile = '' self.OutputFile = '' self.NumberOfEvents = 1 self.Lumi = 0 self.applicationName = 'whizard' self.evttype = "" self.RandomSeed = 0 self.getProcessInFile = False self.datMan = DataManager() self.processlist = None self.parameters = {} self.susymodel = 0 self.Model = '' self.genmodel = GeneratorModels() self.eventstring = ['! ', 'Fatal error:', 'PYSTOP', 'No matrix element available', 'Floating point exception', 'Event generation finished.', " n_events","luminosity", " sum "] self.excludeAllButEventString = False self.steeringparameters = '' self.options = None self.optionsdict = {} self.OptionsDictStr = '' self.GenLevelCutDictStr = '' self.genlevelcuts = {} self.willCut = False self.useGridFiles = False def obtainProcessList(self): """Internal function Get the process list from storage if whizard.in was not provided :return: S_OK(), S_ERROR() """ res = self.ops.getValue("/ProcessList/Location", "") if not res: return S_ERROR("No process list found") processlistloc = res if not os.path.exists(os.path.basename(processlistloc)): res = self.datMan.getFile(processlistloc) if not res['OK']: LOG.error('Could not get processlist: %s' % res['Message']) return res self.processlist = ProcessList(os.path.basename(processlistloc)) return S_OK() def applicationSpecificInputs(self): """Resolve module input :return: S_OK() """ self.parameters['ENERGY'] = self.energy if not self.RandomSeed and self.jobID: self.RandomSeed = self.jobID if 'IS_PROD' in self.workflow_commons or 'IS_DBD_GEN_PROD' in self.workflow_commons: self.RandomSeed = int(str(int(self.workflow_commons["PRODUCTION_ID"])) + str(int(self.workflow_commons["JOB_ID"]))) self.parameters['SEED'] = self.RandomSeed self.parameters['NBEVTS'] = self.NumberOfEvents self.parameters['LUMI'] = self.Lumi ##EVER USED??? if 'SusyModel' in self.step_commons: self.susymodel = self.step_commons['SusyModel'] self.SteeringFile = os.path.basename(self.step_commons.get("InputFile", self.SteeringFile)) if self.SteeringFile == "whizard.in": os.rename(self.SteeringFile, "whizardnew.in") self.SteeringFile = "whizardnew.in" self.parameters['PROCESS'] = self.evttype listofparams = self.steeringparameters.split(";") for param in listofparams: if param.count("="): self.parameters[param.split("=")[0]] = param.split("=")[1] if self.OptionsDictStr: LOG.info("Will use whizard.in definition from WhizardOptions.") try: self.optionsdict = eval(self.OptionsDictStr) if 'integration_input' not in self.optionsdict: self.optionsdict['integration_input'] = {} if 'seed' not in self.optionsdict['integration_input']: self.optionsdict['integration_input']['seed'] = int(self.RandomSeed) if 'process_input' in self.optionsdict: if 'sqrts' in self.optionsdict['process_input']: self.energy = self.optionsdict['process_input']['sqrts'] except: return S_ERROR("Could not convert string to dictionary for optionsdict") if self.GenLevelCutDictStr: LOG.info("Found generator level cuts") try: self.genlevelcuts = eval(self.GenLevelCutDictStr) except: return S_ERROR("Could not convert the generator level cuts back to dictionary") if not len(self.SteeringFile) and not self.optionsdict: self.getProcessInFile = True if "IS_PROD" in self.workflow_commons: if self.workflow_commons["IS_PROD"] and not self.willCut: #self.OutputFile = getProdFilename(self.OutputFile,int(self.workflow_commons["PRODUCTION_ID"]), # int(self.workflow_commons["JOB_ID"])) if 'ProductionOutputData' in self.workflow_commons: outputlist = self.workflow_commons['ProductionOutputData'].split(";") for obj in outputlist: if obj.lower().count("_gen_"): self.OutputFile = os.path.basename(obj) break else: #This is because most likely there is stdhepcut running after self.OutputFile = "willcut.stdhep" #getProdFilename(self.OutputFile,int(self.workflow_commons["PRODUCTION_ID"]), # int(self.workflow_commons["JOB_ID"])) if "IS_DBD_GEN_PROD" in self.workflow_commons and self.workflow_commons["IS_DBD_GEN_PROD"]: #self.OutputFile = getProdFilename(self.OutputFile,int(self.workflow_commons["PRODUCTION_ID"]), # int(self.workflow_commons["JOB_ID"])) if 'ProductionOutputData' in self.workflow_commons: outputlist = self.workflow_commons['ProductionOutputData'].split(";") for obj in outputlist: self.OutputFile = os.path.basename(obj) break else: self.OutputFile = getProdFilename(self.OutputFile, int(self.workflow_commons["PRODUCTION_ID"]), int(self.workflow_commons["JOB_ID"])) return S_OK() def runIt(self): """ Called by Agent Executes the following - resolve input variables - resolve installation location - resolve dependencies location (beam_spectra) - get processlist if needed - define output file name - prepare whizard.in - make magic :return: S_OK(), S_ERROR() """ self.result = S_OK() if not self.platform: self.result = S_ERROR( 'No ILC platform selected' ) elif not self.applicationLog: self.result = S_ERROR( 'No Log file provided' ) if not self.result['OK']: LOG.error("Failed to resolve input parameters:", self.result["Message"]) return self.result if not self.workflowStatus['OK'] or not self.stepStatus['OK']: LOG.verbose('Workflow status = %s, step status = %s' % (self.workflowStatus['OK'], self.stepStatus['OK'])) return S_OK('Whizard should not proceed as previous step did not end properly') #if self.debug: # self.excludeAllButEventString = False res = getSoftwareFolder(self.platform, self.applicationName, self.applicationVersion) if not res['OK']: LOG.error("Failed getting software folder", res['Message']) self.setApplicationStatus('Failed finding software') return res mySoftDir = res['Value'] ###Remove libc removeLibc(mySoftDir + "/lib") ##Need to fetch the new LD_LIBRARY_PATH new_ld_lib_path = getNewLDLibs(self.platform, self.applicationName, self.applicationVersion) #Don't forget to prepend the application's libs new_ld_lib_path = mySoftDir + "/lib:" + new_ld_lib_path ### Resolve dependencies (look for beam_spectra) deps = resolveDeps(self.platform, self.applicationName, self.applicationVersion) path_to_beam_spectra = "" path_to_gridfiles = "" for dep in deps: res = getSoftwareFolder(self.platform, dep[ "app" ], dep['version']) if not res['OK']: LOG.error("Failed getting software folder", res['Message']) self.setApplicationStatus('Failed finding software') return res depfolder = res['Value'] if dep["app"] == "beam_spectra": path_to_beam_spectra = depfolder elif dep["app"] == "gridfiles": path_to_gridfiles = depfolder ##Env variables needed to run whizard: avoids hard coded locations os.environ['LUMI_LINKER'] = path_to_beam_spectra + "/lumi_linker_000" os.environ['PHOTONS_B1'] = path_to_beam_spectra + "/photons_beam1_linker_000" os.environ['PHOTONS_B2'] = path_to_beam_spectra + "/photons_beam2_linker_000" os.environ['EBEAM'] = path_to_beam_spectra + "/ebeam_in_linker_000" os.environ['PBEAM'] = path_to_beam_spectra + "/pbeam_in_linker_000" os.environ['LUMI_EE_LINKER'] = path_to_beam_spectra + "/lumi_ee_linker_000" os.environ['LUMI_EG_LINKER'] = path_to_beam_spectra + "/lumi_eg_linker_000" os.environ['LUMI_GE_LINKER'] = path_to_beam_spectra + "/lumi_ge_linker_000" os.environ['LUMI_GG_LINKER'] = path_to_beam_spectra + "/lumi_gg_linker_000" list_of_gridfiles = [] if path_to_gridfiles and self.useGridFiles: tmp_list_of_gridfiles = [os.path.join(path_to_gridfiles, item) for item in os.listdir(path_to_gridfiles)] gridfilesfound = False for path in tmp_list_of_gridfiles: if os.path.isdir(path) and path.count(str(self.energy)): #Here look for a sub directory for the energy related grid files list_of_gridfiles = [os.path.join(path, item) for item in os.listdir(path)] gridfilesfound = True LOG.info('Found grid files specific for energy %s' % self.energy) break if not gridfilesfound: LOG.info("Will use generic grid files found, hope the energy is set right") list_of_gridfiles = [item for item in glob.glob(os.path.join(path_to_gridfiles, "*.grb")) + glob.glob(os.path.join(path_to_gridfiles, "*.grc"))] template = False if self.SteeringFile.count("template"): template = True ## Get from process file the proper whizard.in file if self.getProcessInFile: whizardin = "" res = self.obtainProcessList() if not res['OK']: LOG.error("Could not obtain process list") self.setApplicationStatus('Failed getting processlist') return res whizardin = self.processlist.getInFile(self.evttype) if not whizardin: LOG.error("Whizard input file was not found in process list, cannot proceed") self.setApplicationStatus('Whizard input file was not found') return S_ERROR("Error while resolving whizard input file") if whizardin.count("template"): template = True try: shutil.copy("%s/%s" % (mySoftDir, whizardin), "./whizardnew.in") self.SteeringFile = "whizardnew.in" except EnvironmentError: LOG.error("Could not copy %s from %s" % (whizardin, mySoftDir)) self.setApplicationStatus('Failed getting whizard.in file') return S_ERROR("Failed to obtain %s" % whizardin) ##Check existence of Les Houches input file leshouchesfiles = '' if not os.path.exists("LesHouches.msugra_1.in"): if self.susymodel: if self.susymodel == 1: if os.path.exists("%s/LesHouches_slsqhh.msugra_1.in" % (mySoftDir)): leshouchesfiles = "%s/LesHouches_slsqhh.msugra_1.in" % (mySoftDir) if self.susymodel == 2: if os.path.exists("%s/LesHouches_chne.msugra_1.in" % (mySoftDir)): leshouchesfiles = "%s/LesHouches_chne.msugra_1.in" % (mySoftDir) if self.Model: if self.genmodel.hasModel(self.Model)['OK']: if self.genmodel.getFile(self.Model)['OK']: if os.path.exists("%s/%s" % (mySoftDir, self.genmodel.getFile(self.Model)['Value'])): leshouchesfiles = "%s/%s" % (mySoftDir, self.genmodel.getFile(self.Model)['Value']) else: LOG.error("Request LesHouches file is missing, cannot proceed") self.setApplicationStatus("LesHouches file missing") return S_ERROR("The LesHouches file was not found. Probably you are using a wrong version of whizard.") else: LOG.warn("No file found attached to model %s" % self.Model) else: LOG.error("Model undefined:", self.Model) self.setApplicationStatus("Model undefined") return S_ERROR("No Model %s defined" % self.Model) else: leshouchesfiles = "LesHouches.msugra_1.in" outputfilename = self.evttype if self.optionsdict: LOG.info("Using: %s" % self.optionsdict) self.options = WhizardOptions(self.Model) res = self.options.changeAndReturn(self.optionsdict) if not res['OK']: return res res = self.options.toWhizardDotIn("whizard.in") elif not template: res = prepareWhizardFile(self.SteeringFile, outputfilename, self.energy, self.RandomSeed, self.NumberOfEvents, self.Lumi, "whizard.in") else: res = prepareWhizardFileTemplate(self.SteeringFile, outputfilename, self.parameters, "whizard.in") if not res['OK']: LOG.error('Something went wrong with input file generation') self.setApplicationStatus('Whizard: something went wrong with input file generation') return S_ERROR('Something went wrong with whizard.in file generation') foundproceesinwhizardin = res['Value'] scriptName = 'Whizard_%s_Run_%s.sh' % (self.applicationVersion, self.STEP_NUMBER) if os.path.exists(scriptName): os.remove(scriptName) script = open(scriptName, 'w') script.write('#!/bin/sh \n') script.write('#####################################################################\n') script.write('# Dynamically generated script to run a production or analysis job. #\n') script.write('#####################################################################\n') script.write('declare -x PATH=%s:$PATH\n' % mySoftDir) script.write('declare -x LD_LIBRARY_PATH=%s\n' % new_ld_lib_path) script.write('env | sort >> localEnv.log\n') script.write('echo =============================\n') script.write('echo Printing content of whizard.in \n') script.write('cat whizard.in\n') script.write('echo =============================\n') script.write('cp %s/whizard.mdl ./\n' % mySoftDir) if leshouchesfiles: if not leshouchesfiles == 'LesHouches.msugra_1.in': script.write('cp %s ./LesHouches.msugra_1.in\n' % (leshouchesfiles)) script.write('ln -s LesHouches.msugra_1.in fort.71\n') if len(list_of_gridfiles): for gridfile in list_of_gridfiles: script.write('cp %s ./\n' % (gridfile)) script.write('cp %s/whizard.prc ./\n' % mySoftDir) if self.genlevelcuts: res = self.makeWhizardDotCut1() if not res['OK']: script.close() LOG.error("Could not create the cut1 file") return S_ERROR("Could not create the cut1 file") script.write('echo =============================\n') script.write('echo Printing content of whizard.prc \n') script.write('cat whizard.prc\n') script.write('echo =============================\n') extracmd = "" if not self.debug: extracmd = "2>/dev/null" comm = "" if foundproceesinwhizardin: comm = 'whizard --simulation_input \'write_events_file = \"%s\"\'' % (outputfilename) else: comm = 'whizard --process_input \'process_id =\"%s\"\' --simulation_input \'write_events_file = \"%s\"\' ' % (self.evttype, outputfilename) comm = "%s %s %s\n" % (comm, self.extraCLIarguments, extracmd) LOG.info("Will run %s" % comm) script.write(comm) script.write('declare -x appstatus=$?\n') script.write('exit $appstatus\n') script.close() if os.path.exists(self.applicationLog): os.remove(self.applicationLog) os.chmod(scriptName, 0o755) comm = 'sh -c "./%s"' % (scriptName) self.setApplicationStatus('Whizard %s step %s' %(self.applicationVersion, self.STEP_NUMBER)) self.stdError = '' self.result = shellCall(0, comm, callbackFunction = self.redirectLogOutput, bufferLimit=209715200) #self.result = {'OK':True,'Value':(0,'Disabled Execution','')} if not self.result['OK']: LOG.error("Failed with error %s" % self.result['Message']) if not os.path.exists(self.applicationLog): LOG.error("Something went terribly wrong, the log file is not present") self.setApplicationStatus('%s failed terribly, you are doomed!' % (self.applicationName)) if not self.ignoreapperrors: return S_ERROR('%s did not produce the expected log' % (self.applicationName)) lumi = '' message = "" success = False ###Analyse log file with open(self.applicationLog) as logfile: for line in logfile: if line.count('! Event sample corresponds to luminosity'): elems = line.split() lumi = elems[-1] if line.count("*** Fatal error:"): status = 1 message = line break elif line.count("PYSTOP"): status = 1 message = line break elif line.count("No matrix element available"): status = 1 message = line break elif line.count("Floating point exception"): status = 1 message = line break elif line.count("Event generation finished."): success = True else: status = 0 if success: status = 0 else: status = 1 LOG.info('The sample generated has an equivalent luminosity of %s' % lumi) if lumi: self.workflow_commons['Luminosity'] = float(lumi) else: status = 1 ##Now care for the cross sections info = {} res = self.options.getAsDict() if os.path.exists("whizard.out") and res['OK']: full_opts_dict = res['Value'] processes = full_opts_dict['process_input']['process_id'].split() info = {} info['xsection'] = {} processes.append('sum') with open("whizard.out", "r") as inf: for line in inf: line = line.rstrip() for process in processes: if not process: continue if line.count(" %s " % process): info['xsection'][process] = {} line = line.lstrip() crosssection = line.split()[1] err_crosssection = line.split()[2] frac = line.split()[4] info['xsection'][process]['xsection'] = float(crosssection) info['xsection'][process]['err_xsection'] = float(err_crosssection) info['xsection'][process]['fraction'] = float(frac) if info: if 'Info' not in self.workflow_commons: self.workflow_commons['Info'] = info else: self.workflow_commons['Info'].update(info) LOG.info("Status after the application execution is %s" % str(status)) messageout = 'Whizard %s Successful' % (self.applicationVersion) failed = False if status != 0: LOG.error("Whizard execution completed with errors:") failed = True else: LOG.info("Whizard execution completed successfully") ###Deal with output file if len(self.OutputFile): if os.path.exists(outputfilename + ".001.stdhep"): LOG.notice("Looking for output files") ofnames = glob.glob(outputfilename+'*.stdhep') if len(ofnames) > 1: basename = self.OutputFile.split(".stdhep")[0] i = 0 for of in ofnames: i += 1 name = basename + "_" + str(i) + ".stdhep" os.rename(of, name) else: os.rename(outputfilename + ".001.stdhep", self.OutputFile) else: LOG.error("Whizard execution did not produce a stdhep file") self.setApplicationStatus('Whizard %s Failed to produce STDHEP file' % (self.applicationVersion)) messageout = 'Whizard Failed to produce STDHEP file' if not self.ignoreapperrors: return S_ERROR(messageout) if failed is True: LOG.error("==================================\n StdError:\n") LOG.error(message) self.setApplicationStatus('%s Exited With Status %s' % (self.applicationName, status)) LOG.error('Whizard Exited With Status %s' % (status)) messageout = 'Whizard Exited With Status %s' % (status) if not self.ignoreapperrors: return S_ERROR(messageout) else: self.setApplicationStatus(messageout) return S_OK( { "OutputFile": self.OutputFile } ) def makeWhizardDotCut1(self): """ When users need whizard cuts, this is called to prepare the file :return: S_OK() """ cutf = open("whizard.cut1","w") for key, values in self.genlevelcuts.items(): cutf.write("process %s\n" % key) for val in values: cutf.write(" %s\n" % val) cutf.close() return S_OK()
class OverlayInput (ModuleBase): """ Download the files for overlay. """ def __init__(self): super(OverlayInput, self).__init__() self.enable = True self.STEP_NUMBER = '' self.applicationName = 'OverlayInput' self.curdir = os.getcwd() self.applicationLog = '' self.printoutflag = '' self.prodid = 0 self.detector = '' ##needed for backward compatibility self.detectormodel = "" self.energytouse = '' self.energy = 0 self.nbofeventsperfile = 100 self.lfns = [] self.nbfilestoget = 0 self.BkgEvtType = 'gghad' self.metaEventType = self.BkgEvtType self.BXOverlay = 0 self.ggtohadint = 3.2 self.nbsigeventsperfile = 0 self.nbinputsigfile = 1 self.NbSigEvtsPerJob = 0 self.datMan = DataManager() self.fcc = FileCatalogClient() self.site = DIRAC.siteName() self.useEnergyForFileLookup = True self.machine = 'clic_cdr' self.pathToOverlayFiles = '' self.processorName = '' def applicationSpecificInputs(self): self.pathToOverlayFiles = self.step_commons.get("pathToOverlayFiles", self.pathToOverlayFiles) if 'Detector' in self.step_commons: self.detectormodel = self.step_commons['Detector'] if not self.detectormodel and not self.detector and not self.pathToOverlayFiles: return S_ERROR('Detector model not defined') if 'Energy' in self.step_commons: self.energytouse = self.step_commons['Energy'] if self.energy: self.energytouse = energyWithLowerCaseUnit( self.energy ) if not self.energytouse and not self.pathToOverlayFiles: return S_ERROR("Energy not set anywhere!") if 'BXOverlay' in self.step_commons: self.BXOverlay = self.step_commons['BXOverlay'] if not self.BXOverlay: return S_ERROR("BXOverlay parameter not defined") if 'ggtohadint' in self.step_commons: self.ggtohadint = self.step_commons['ggtohadint'] if 'ProdID' in self.step_commons: self.prodid = self.step_commons['ProdID'] if 'NbSigEvtsPerJob' in self.step_commons: self.NbSigEvtsPerJob = self.step_commons['NbSigEvtsPerJob'] if 'BkgEvtType' in self.step_commons: self.BkgEvtType = self.step_commons['BkgEvtType'] self.metaEventType = self.BkgEvtType res = allowedBkg(self.BkgEvtType, self.energytouse, detector = self.detector, detectormodel = self.detectormodel, machine = self.machine) if not res['OK']: return res if res['Value'] < 0 and not self.pathToOverlayFiles: return S_ERROR("No suitable ProdID") #if 'Site' in self.workflow_commons: # self.site = self.workflow_commons['Site'] self.useEnergyForFileLookup = self.step_commons.get("useEnergyForFileLookup", self.useEnergyForFileLookup) if self.InputData: if self.NumberOfEvents: self.nbsigeventsperfile = self.NumberOfEvents else: return S_ERROR("Number of events in the signal file is missing") self.nbinputsigfile = len(self.InputData) LOG.info("Signal Events Per Job: %d " % self.NbSigEvtsPerJob) LOG.info("Background Event Type: %s " % self.BkgEvtType) LOG.info("Meta Event Type: %s " % self.metaEventType) LOG.info("Background Events per bunch crossing: %3.2f" % self.ggtohadint) LOG.info("SignalEventsPerFile: %d " % self.nbsigeventsperfile) if not self.NbSigEvtsPerJob and not self.nbsigeventsperfile: return S_ERROR("Could not determine the number of signal events per input file") return S_OK("Input variables resolved") def __getFilesFromFC(self): """ Get the list of files from the FileCatalog. """ meta = {} if self.energy and self.useEnergyForFileLookup: meta['Energy'] = str(int(self.energy)) meta['EvtType'] = self.BkgEvtType meta['Datatype'] = 'SIM' if self.detectormodel: meta['DetectorModel'] = self.detectormodel if self.machine == 'ilc_dbd': meta['Machine'] = 'ilc' if self.machine == 'clic_cdr': meta['Machine'] = 'clic' res = None if self.detector: res = self.ops.getValue("/Overlay/%s/%s/%s/%s/ProdID" % (self.machine, self.detector, self.energytouse, self.BkgEvtType), 0) self.nbofeventsperfile = self.ops.getValue("/Overlay/%s/%s/%s/%s/NbEvts" % (self.machine, self.energytouse, self.detector, self.BkgEvtType), 100) self.metaEventType = self.ops.getValue( "/Overlay/%s/%s/%s/%s/EvtType" % ( self.machine, self.energytouse, self.detector, self.BkgEvtType), self.BkgEvtType) else: res = self.ops.getValue("/Overlay/%s/%s/%s/%s/ProdID" % (self.machine, self.energytouse, self.detectormodel, self.BkgEvtType), 0) self.nbofeventsperfile = self.ops.getValue("/Overlay/%s/%s/%s/%s/NbEvts" % (self.machine, self.energytouse, self.detectormodel, self.BkgEvtType), 100) self.metaEventType = self.ops.getValue( "/Overlay/%s/%s/%s/%s/EvtType" % ( self.machine, self.energytouse, self.detectormodel, self.BkgEvtType), self.BkgEvtType) LOG.info("Number of Events Per BackgroundFile: %d " % self.nbofeventsperfile) meta['EvtType'] = self.metaEventType meta['ProdID'] = res if self.prodid: meta['ProdID'] = self.prodid LOG.info("Using %s as metadata" % (meta)) return self.fcc.findFilesByMetadata(meta) def __getFilesFromPath(self): """ Get the list of files from the FileCatalog via the user specified path. """ meta = {} return self.fcc.findFilesByMetadata(meta, self.pathToOverlayFiles) def __getFilesFromLyon(self, meta): """ List the files present at Lyon, not used. """ prodID = meta['ProdID'] prod = str(prodID).zfill(8) energy = meta['Energy'] bkg = meta["EvtType"] detector = meta["DetectorType"] path ="/ilc/prod/clic/%s/%s/%s/SIM/%s/" % (energy, bkg, detector, prod) comm = ["nsls", "%s" % path] res = subprocess.Popen(comm, stdout = subprocess.PIPE).communicate() dirlist = res[0].rstrip().split("\n") mylist = [] for mydir in dirlist: if mydir.count("dirac_directory"): continue curdir = path + mydir comm2 = ["nsls", curdir] res = subprocess.Popen(comm2, stdout = subprocess.PIPE).communicate() for oFile in res[0].rstrip().split("\n"): if oFile.count("dirac_directory"): continue mylist.append(path + mydir + "/" + oFile) if not mylist: return S_ERROR("File list is empty") return S_OK(mylist) def __getFilesFromCastor(self, meta): """ Get the available files (list) from the CERN castor storage """ prodID = meta['ProdID'] prod = str(prodID).zfill(8) energy = meta['Energy'] bkg = meta["EvtType"] detector = meta["DetectorType"] path = "/castor/cern.ch/grid/ilc/prod/%s/%s/%s/%s/SIM/%s/" % (self.machine, energy, bkg, detector, prod) comm = ["nsls", "%s" % path] res = subprocess.Popen(comm, stdout = subprocess.PIPE).communicate() dirlist = res[0].rstrip().split("\n") mylist = [] for mydir in dirlist: if mydir.count("dirac_directory"): continue curdir = path + mydir comm2 = ["nsls", curdir] res = subprocess.Popen(comm2, stdout = subprocess.PIPE).communicate() for oFile in res[0].rstrip().split("\n"): if oFile.count("dirac_directory"): continue mylist.append(path + mydir + "/" + oFile) if not mylist: return S_ERROR("File list is empty") return S_OK(mylist) def __getFilesLocaly(self): """ Download the files. """ numberofeventstoget = ceil(self.BXOverlay * self.ggtohadint) nbfiles = len(self.lfns) availableevents = nbfiles * self.nbofeventsperfile if availableevents < numberofeventstoget: return S_ERROR("Number of %s events available is less than requested" % ( self.BkgEvtType )) if not self.NbSigEvtsPerJob: ##Compute Nsignal events self.NbSigEvtsPerJob = self.nbinputsigfile * self.nbsigeventsperfile if not self.NbSigEvtsPerJob: return S_ERROR('Could not determine the number of signal events per job') LOG.verbose("There are %s signal event" % self.NbSigEvtsPerJob) ##Now determine how many files are needed to cover all signal events totnboffilestoget = int(ceil(self.NbSigEvtsPerJob * numberofeventstoget / self.nbofeventsperfile)) ##Limit ourself to some configuration maximum levels = [self.machine, self.energytouse, self.detectormodel, self.BkgEvtType] maxNbFilesToGet = getOptionValue(ops=self.ops, basePath="/Overlay", optionName="MaxNbFilesToGet", defaultValue=20, levels=levels) if totnboffilestoget > maxNbFilesToGet: totnboffilestoget = maxNbFilesToGet self.__disableWatchDog() overlaymon = OverlaySystemClient() ##Now need to check that there are not that many concurrent jobs getting the overlay at the same time error_count = 0 count = 0 while 1: if error_count > 10 : LOG.error('OverlayDB returned too many errors') return S_ERROR('Failed to get number of concurrent overlay jobs') res = overlaymon.canRun(self.site) if not res['OK']: error_count += 1 time.sleep(60) continue error_count = 0 #if running < max_concurrent_running: if res['Value']: break else: count += 1 if count > 300: return S_ERROR("Waited too long: 5h, so marking job as failed") if count % 10 == 0 : self.setApplicationStatus("Overlay standby number %s" % count) time.sleep(60) self.__enableWatchDog() self.setApplicationStatus('Getting overlay files') LOG.info('Will obtain %s files for overlay' % totnboffilestoget) os.mkdir("./overlayinput_" + self.metaEventType) os.chdir("./overlayinput_" + self.metaEventType) filesobtained = [] usednumbers = [] fail = False fail_count = 0 max_fail_allowed = self.ops.getValue("/Overlay/MaxFailedAllowed", 20) while not len(filesobtained) == totnboffilestoget: if fail_count > max_fail_allowed: fail = True break fileindex = random.randrange(nbfiles) if fileindex in usednumbers: continue usednumbers.append(fileindex) triedDataManager = False if self.site == 'LCG.CERN.ch': res = self.getEOSFile(self.lfns[fileindex]) elif self.site == 'LCG.IN2P3-CC.fr': res = self.getLyonFile(self.lfns[fileindex]) elif self.site == 'LCG.UKI-LT2-IC-HEP.uk': res = self.getImperialFile(self.lfns[fileindex]) elif self.site == 'LCG.RAL-LCG2.uk': res = self.getRALFile(self.lfns[fileindex]) elif self.site == 'LCG.KEK.jp': res = self.getKEKFile(self.lfns[fileindex]) else: self.__disableWatchDog() res = self.datMan.getFile(self.lfns[fileindex]) triedDataManager = True # In case the specific copying did not work (mostly because the files do # not exist locally) try again to get the file via the DataManager if (not res['OK']) and (not triedDataManager): res = self.datMan.getFile(self.lfns[fileindex]) if not res['OK']: LOG.warn('Could not obtain %s' % self.lfns[fileindex]) fail_count += 1 # Wait for a random time around 3 minutes LOG.verbose("Waste happily some CPU time (on average 3 minutes)") resWaste = wasteCPUCycles(60 * random.gauss(3, 0.1)) if not resWaste['OK']: LOG.error("Could not waste as much CPU time as wanted, but whatever!") continue filesobtained.append(self.lfns[fileindex]) # If no file could be obtained, need to make sure the job fails if len(usednumbers) == nbfiles and not filesobtained: fail = True break ## Remove all scripts remaining scripts = glob.glob("*.sh") for script in scripts: os.remove(script) ##Print the file list mylist = os.listdir(os.getcwd()) LOG.info("List of Overlay files:") LOG.info("\n".join(mylist)) os.chdir(self.curdir) res = overlaymon.jobDone(self.site) if not res['OK']: LOG.error("Could not declare the job as finished getting the files") if fail: LOG.error("Did not manage to get all files needed, too many errors") return S_ERROR("Failed to get files") LOG.info('Got all files needed.') return S_OK() def getCASTORFile(self, lfn): """ USe xrdcp or rfcp to get the files from castor """ prependpath = "/castor/cern.ch/grid" if not lfn.count("castor/cern.ch"): lfile = prependpath + lfn else: lfile = lfn LOG.info("Getting %s" % lfile) #command = "rfcp %s ./"%file basename = os.path.basename(lfile) if os.path.exists("overlayinput.sh"): os.unlink("overlayinput.sh") with open("overlayinput.sh","w") as script: script.write('#!/bin/sh \n') script.write('###############################\n') script.write('# Dynamically generated scrip #\n') script.write('###############################\n') if 'X509_USER_PROXY' in os.environ: script.write("cp %s /tmp/x509up_u%s \n" % (os.environ['X509_USER_PROXY'], os.getuid())) script.write('declare -x STAGE_SVCCLASS=ilcdata\n') script.write('declare -x STAGE_HOST=castorpublic\n') script.write(r"xrdcp -s root://castorpublic.cern.ch/%s ./ -OSstagerHost=castorpublic\&svcClass=ilcdata\n" % lfile.rstrip()) #script.write("/usr/bin/rfcp 'rfio://cgenstager.ads.rl.ac.uk:9002?svcClass=ilcTape&path=%s' %s\n"%(lfile,basename)) script.write(""" if [ ! -s %s ]; then echo "Using rfcp instead" rfcp %s ./ fi\n""" % (basename, lfile)) script.write('declare -x appstatus=$?\n') script.write('exit $appstatus\n') os.chmod("overlayinput.sh", 0o755) comm = 'sh -c "./overlayinput.sh"' self.result = shellCall(600, comm, callbackFunction = self.redirectLogOutput, bufferLimit = 20971520) localfile = os.path.basename(lfile) if os.path.exists(localfile): return S_OK(localfile) return S_ERROR("Failed") def getEOSFile(self, lfn): """ Use xrdcp to get the files from EOS """ prependpath = "/eos/experiment/clicdp/grid" if not lfn.startswith(prependpath): lfile = prependpath + lfn else: lfile = lfn LOG.info("Getting %s" % lfile) if os.path.exists("overlayinput.sh"): os.unlink("overlayinput.sh") with open("overlayinput.sh","w") as script: script.write('#!/bin/sh \n') script.write('################################\n') script.write('# Dynamically generated script #\n') script.write('################################\n') if 'X509_USER_PROXY' in os.environ: script.write("cp %s /tmp/x509up_u%s \n" % (os.environ['X509_USER_PROXY'], os.getuid())) script.write("xrdcp -s root://eospublic.cern.ch/%s ./ \n" % lfile.rstrip() ) script.write('declare -x appstatus=$?\n') script.write('exit $appstatus\n') os.chmod("overlayinput.sh", 0o755) comm = 'sh -c "./overlayinput.sh"' self.result = shellCall(600, comm, callbackFunction = self.redirectLogOutput, bufferLimit = 20971520) localfile = os.path.basename(lfile) if os.path.exists(localfile): return S_OK(localfile) return S_ERROR("Failed") def getLyonFile(self, lfn): """ Use xrdcp to get the files from Lyon """ prependpath = '/pnfs/in2p3.fr/data' if not lfn.count('in2p3.fr/data'): lfile = prependpath + lfn else: lfile = lfn LOG.info("Getting %s" % lfile) #command = "rfcp %s ./"%file #comm = [] #comm.append("cp $X509_USER_PROXY /tmp/x509up_u%s"%os.getuid()) if os.path.exists("overlayinput.sh"): os.unlink("overlayinput.sh") with open("overlayinput.sh", "w") as script: script.write('#!/bin/sh \n') script.write('###############################\n') script.write('# Dynamically generated scrip #\n') script.write('###############################\n') script.write("cp %s /tmp/x509up_u%s \n" % (os.environ['X509_USER_PROXY'], os.getuid())) script.write(". /afs/in2p3.fr/grid/profiles/lcg_env.sh\n") script.write("xrdcp root://ccdcacsn179.in2p3.fr:1094%s ./ -s\n" % lfile.rstrip()) #script.write("/usr/bin/rfcp 'rfio://cgenstager.ads.rl.ac.uk:9002?svcClass=ilcTape&path=%s' %s\n"%(lfile,basename)) #script.write(""" #if [ ! -s %s ]; then # rfcp %s ./ #fi\n"""%(basename,lfile)) script.write('declare -x appstatus=$?\n') script.write('exit $appstatus\n') os.chmod("overlayinput.sh", 0o755) comm = 'sh -c "./overlayinput.sh"' self.result = shellCall(600, comm, callbackFunction = self.redirectLogOutput, bufferLimit = 20971520) localfile = os.path.basename(lfile) if os.path.exists(localfile): return S_OK(localfile) return S_ERROR("Failed") def getImperialFile(self, lfn): """ USe dccp to get the files from the Imperial SE """ prependpath = '/pnfs/hep.ph.ic.ac.uk/data' if not lfn.count('hep.ph.ic.ac.uk/data'): lfile = prependpath + lfn else: lfile = lfn LOG.info("Getting %s" % lfile) ###Don't check for CPU time as other wise, job can get killed self.__disableWatchDog() if os.path.exists("overlayinput.sh"): os.unlink("overlayinput.sh") with open("overlayinput.sh","w") as script: script.write('#!/bin/sh \n') script.write('###############################\n') script.write('# Dynamically generated scrip #\n') script.write('###############################\n') script.write("dccp dcap://%s%s ./\n" % (os.environ['VO_ILC_DEFAULT_SE'], lfile.rstrip())) #script.write("/usr/bin/rfcp 'rfio://cgenstager.ads.rl.ac.uk:9002?svcClass=ilcTape&path=%s' %s\n"%(lfile,basename)) #script.write(""" #if [ ! -s %s ]; then # rfcp %s ./ #fi\n"""%(basename,lfile)) script.write('declare -x appstatus=$?\n') script.write('exit $appstatus\n') os.chmod("overlayinput.sh", 0o755) comm = 'sh -c "./overlayinput.sh"' self.result = shellCall(600, comm, callbackFunction = self.redirectLogOutput, bufferLimit = 20971520) localfile = os.path.basename(lfile) if os.path.exists(localfile): return S_OK(localfile) return S_ERROR("Failed") def getRALFile(self, lfn): """ Use rfcp to get the files from RAL castor """ prependpath = '/castor/ads.rl.ac.uk/prod' if not lfn.count('ads.rl.ac.uk/prod'): lfile = prependpath + lfn else: lfile = lfn LOG.info("Getting %s" % lfile) ###Don't check for CPU time as other wise, job can get killed self.__disableWatchDog() #command = "rfcp %s ./"%file #comm = [] #comm.append("cp $X509_USER_PROXY /tmp/x509up_u%s"%os.getuid()) if 'X509_USER_PROXY' in os.environ: comm2 = ["cp", os.environ['X509_USER_PROXY'],"/tmp/x509up_u%s" % os.getuid()] res = subprocess.Popen(comm2, stdout = subprocess.PIPE).communicate() print(res) #comm.append("xrdcp root://ccdcacsn179.in2p3.fr:1094%s ./ -s"%file) #command = string.join(comm,";") #logfile = file(self.applicationLog,"w") os.environ['CNS_HOST'] = 'castorns.ads.rl.ac.uk' #comm4= ['declare','-x','CNS_HOST=castorns.ads.rl.ac.uk'] #res = subprocess.Popen(comm4,stdout=logfile,stderr=subprocess.STDOUT) #print res os.environ['STAGE_SVCCLASS'] = 'ilcTape' # comm5= ['declare','-x','STAGE_SVCCLASS=ilcTape'] # res = subprocess.call(comm5) # print res os.environ['STAGE_HOST'] = 'cgenstager.ads.rl.ac.uk' # comm6=['declare','-x','STAGE_HOST=cgenstager.ads.rl.ac.uk'] # res = subprocess.call(comm6) # print res basename = os.path.basename(lfile) if os.path.exists("overlayinput.sh"): os.unlink("overlayinput.sh") with open("overlayinput.sh","w") as script: script.write('#!/bin/sh \n') script.write('###############################\n') script.write('# Dynamically generated scrip #\n') script.write('###############################\n') script.write("/usr/bin/rfcp 'rfio://cgenstager.ads.rl.ac.uk:9002?svcClass=ilcTape&path=%s' %s\n" % (lfile, basename)) script.write('declare -x appstatus=$?\n') script.write('exit $appstatus\n') os.chmod("overlayinput.sh", 0o755) comm = 'sh -c "./overlayinput.sh"' self.result = shellCall(600, comm, callbackFunction = self.redirectLogOutput, bufferLimit = 20971520) localfile = os.path.basename(lfile) if os.path.exists(localfile): return S_OK(localfile) return S_ERROR("Failed") def getKEKFile(self, lfn): """ Use cp to get the files from kek-se """ prependpath = '/grid' lfile = prependpath + lfn LOG.info("Getting %s" % lfile) self.__disableWatchDog() if os.path.exists("overlayinput.sh"): os.unlink("overlayinput.sh") with open("overlayinput.sh", "w") as script: script.write('#!/bin/sh \n') script.write('###############################\n') script.write('# Dynamically generated scrip #\n') script.write('###############################\n') script.write("cp %s ./ -s\n" % lfile.rstrip()) script.write('declare -x appstatus=$?\n') script.write('exit $appstatus\n') os.chmod("overlayinput.sh", 0o755) comm = 'sh -c "./overlayinput.sh"' self.result = shellCall(600, comm, callbackFunction = self.redirectLogOutput, bufferLimit = 20971520) localfile = os.path.basename(lfile) if os.path.exists(localfile): return S_OK(localfile) return S_ERROR("Failed") def execute(self): """ Run the module, called rom Workflow """ self.result = self.resolveInputVariables() if not self.result['OK']: LOG.error("Failed to resolve input parameters:", self.result['Message']) return self.result LOG.info("Information after resolveInputVariables:") LOG.info("Signal Events Per Job: %d " % self.NbSigEvtsPerJob) LOG.info("Background Event Type: %s " % self.BkgEvtType) LOG.info("Meta Event Type: %s " % self.metaEventType) LOG.info("Background Events per bunch crossing: %3.2f" % self.ggtohadint) LOG.info("SignalEventsPerFile: %d " % self.nbsigeventsperfile) if not self.applicationLog: self.applicationLog = 'Overlay_input.log' self.applicationLog = os.path.join(os.getcwd(), self.applicationLog) if not self.workflowStatus['OK'] or not self.stepStatus['OK']: LOG.verbose('Workflow status = %s, step status = %s' % (self.workflowStatus['OK'], self.stepStatus['OK'])) return S_OK('OverlayInput should not proceed as previous step did not end properly') self.setApplicationStatus('Starting up Overlay') if self.pathToOverlayFiles: res = self.__getFilesFromPath() else: res = self.__getFilesFromFC() if not res['OK']: LOG.error("Failed to get the file list from the catalog:", res["Message"]) self.setApplicationStatus('OverlayProcessor failed to get file list') return res else: LOG.debug("Found these files: %s" % res) self.lfns = res['Value'] if not self.lfns: LOG.error("No Overlay LFNs found") self.setApplicationStatus('OverlayProcessor got an empty list') return S_ERROR('OverlayProcessor got an empty list') res = self.__getFilesLocaly() ###Now that module is finished,resume CPU time checks self.__enableWatchDog() if not res['OK']: LOG.error("Overlay failed with", res['Message']) self.setApplicationStatus('OverlayInput failed to get files locally with message %s' % res['Message']) return S_ERROR('OverlayInput failed to get files locally') self.setApplicationStatus('OverlayInput finished getting all files successfully') ## add overlay background information to workflow_commons stepNumber = int( self.step_commons['STEP_NUMBER'] ) self.workflow_commons["OI_%i_eventType" % stepNumber] = self.metaEventType self.workflow_commons["OI_%i_eventsPerBackgroundFile" % stepNumber] = self.nbofeventsperfile self.workflow_commons["OI_%i_processorName" % stepNumber] = self.processorName return S_OK('OverlayInput finished successfully') def __disableWatchDog( self ): """create the watchdog disable if it does not exists""" watchDogFilename = 'DISABLE_WATCHDOG_CPU_WALLCLOCK_CHECK' fullPath = os.path.join( self.curdir, watchDogFilename ) if not os.path.exists( fullPath ): with open( fullPath, 'w' ) as checkFile: checkFile.write('Dont look at cpu') def __enableWatchDog( self ): """remove the watchdog disable file if it exists""" watchDogFilename = 'DISABLE_WATCHDOG_CPU_WALLCLOCK_CHECK' fullPath = os.path.join( self.curdir, watchDogFilename ) if os.path.exists( fullPath ): os.remove( fullPath )
class Prod3SoftwareManager(object): """ Manage software setup for prod3 """ def __init__(self, soft_category={"corsika_simhessarray": "simulations"}): """ Constructor """ self.SW_SHARED_DIR = 'VO_VO_CTA_IN2P3_FR_SW_DIR' self.CVMFS_DIR = '/cvmfs/cta.in2p3.fr/software' self.LFN_ROOT = '/vo.cta.in2p3.fr/software' # self.SOFT_CATEGORY_DICT = {"corsika_simhessarray":"simulations"} self.SOFT_CATEGORY_DICT = soft_category self.dm = DataManager() def installDIRACScripts(self, package_dir): """ copy prod3 DIRAC scripts in the current directory """ cmd = 'cp ' + os.path.join(package_dir, 'dirac_*') + ' .' if not os.system(cmd): return DIRAC.S_OK() else: return DIRAC.S_ERROR('Failed to install DIRAC scripts') def dumpSetupScriptPath(self, package_dir, textfilename='setup_script_path.txt'): """ dump the path to setupPackage.sh in a one line ascii file to be read and source by the following script """ script_path = os.path.join(package_dir, 'setupPackage.sh') open(textfilename, 'w').writelines(script_path + '\n') return DIRAC.S_OK() def installSoftwarePackage(self, package, version, arch="sl6-gcc44", installDir='.'): """ install software package in the current directory """ DIRAC.gLogger.notice('Installing package %s version %s' % (package, version)) tarFile = package + '.tar.gz' tarLFN = os.path.join(self.LFN_ROOT, package, version, tarFile) ########## download the tar file ####################### DIRAC.gLogger.notice('Trying to download package:', tarLFN) res = self.dm.getFile(tarLFN) if not res['OK']: return res if tarLFN in res['Value']['Successful']: DIRAC.gLogger.notice(' Package downloaded successfully:', tarLFN) else: error = 'Failed to download package:', tarLFN return DIRAC.S_ERROR(error) ########## extract the tar file ####################### tarMode = "r|*" tar = tarfile.open(tarFile, tarMode) for tarInfo in tar: tar.extract(tarInfo, installDir) tar.close() os.unlink(tarFile) DIRAC.gLogger.notice( 'Package %s version %s installed successfully at:\n%s' % (package, version, installDir)) return DIRAC.S_OK(installDir) def _getSoftwareAreas(self): """ get the list of available software areas (shared area, cvmfs) """ areaList = [] opsHelper = Operations() UseCvmfs = opsHelper.getValue('SoftwarePolicy/UseCvmfs', bool) DIRAC.gLogger.notice('SoftwarePolicy for UseCvmfs is:', UseCvmfs) if UseCvmfs: areaList.append(self.CVMFS_DIR) if os.environ.has_key(self.SW_SHARED_DIR): shared_area = os.path.join(os.environ[self.SW_SHARED_DIR], 'software') areaList.append(shared_area) else: DIRAC.gLogger.warn('Shared area not found') return areaList def _getSharedArea(self): """ get Shared Area """ if os.environ.has_key(self.SW_SHARED_DIR): shared_area = os.path.join(os.environ[self.SW_SHARED_DIR], 'software') else: return DIRAC.S_ERROR('Shared area not found') return DIRAC.S_OK(shared_area) def _getPackageDir(self, area, arch, package, version): """ get Package directory """ package_dir = os.path.join(area, arch, self.SOFT_CATEGORY_DICT[package], package, version) return package_dir def removeSoftwarePackage(self, packagedir): """ remove Software Package """ cmd = 'rm -Rf ' + packagedir if not os.system(cmd): return DIRAC.S_OK() else: error = 'Failed to remove %s' % packagedir return DIRAC.S_ERROR(error) def checkSoftwarePackage(self, package, version, arch="sl6-gcc44", area=None): """ check if the software package is installed in any software area Keyword arguments: package -- package name as the directory name version -- software version as the directory name arch -- architecture as the directory name """ # if area is specified, just get the shared area if area: areaList = self._getSharedArea() else: areaList = self._getSoftwareAreas() if len(areaList) == 0: DIRAC.gLogger.warn('No software area is available') # ## look for the package directory in the software areas for area in areaList: package_dir = os.path.join(area, arch, self.SOFT_CATEGORY_DICT[package], package, version) if os.path.isdir(package_dir): DIRAC.gLogger.notice('Found package %s version %s at:\n%s' % (package, version, area)) return DIRAC.S_OK(package_dir) return DIRAC.S_ERROR( 'Could not find package %s version %s in any location' % (package, version))
class OverlayInput (ModuleBase): """ Download the files for overlay. """ def __init__(self): super(OverlayInput, self).__init__() self.enable = True self.STEP_NUMBER = '' self.log = gLogger.getSubLogger( "OverlayInput" ) self.applicationName = 'OverlayInput' self.curdir = os.getcwd() self.applicationLog = '' self.printoutflag = '' self.prodid = 0 self.detector = '' ##needed for backward compatibility self.detectormodel = "" self.energytouse = '' self.energy = 0 self.nbofeventsperfile = 100 self.lfns = [] self.nbfilestoget = 0 self.BkgEvtType = 'gghad' self.metaEventType = self.BkgEvtType self.BXOverlay = 0 self.ggtohadint = 3.2 self.nbsigeventsperfile = 0 self.nbinputsigfile = 1 self.NbSigEvtsPerJob = 0 self.datMan = DataManager() self.fcc = FileCatalogClient() self.site = DIRAC.siteName() self.useEnergyForFileLookup = True self.machine = 'clic_cdr' self.pathToOverlayFiles = '' self.processorName = '' def applicationSpecificInputs(self): self.pathToOverlayFiles = self.step_commons.get("pathToOverlayFiles", self.pathToOverlayFiles) if 'Detector' in self.step_commons: self.detectormodel = self.step_commons['Detector'] if not self.detectormodel and not self.detector and not self.pathToOverlayFiles: return S_ERROR('Detector model not defined') if 'Energy' in self.step_commons: self.energytouse = self.step_commons['Energy'] if self.energy: self.energytouse = energyWithLowerCaseUnit( self.energy ) if not self.energytouse and not self.pathToOverlayFiles: return S_ERROR("Energy not set anywhere!") if 'BXOverlay' in self.step_commons: self.BXOverlay = self.step_commons['BXOverlay'] if not self.BXOverlay: return S_ERROR("BXOverlay parameter not defined") if 'ggtohadint' in self.step_commons: self.ggtohadint = self.step_commons['ggtohadint'] if 'ProdID' in self.step_commons: self.prodid = self.step_commons['ProdID'] if 'NbSigEvtsPerJob' in self.step_commons: self.NbSigEvtsPerJob = self.step_commons['NbSigEvtsPerJob'] if 'BkgEvtType' in self.step_commons: self.BkgEvtType = self.step_commons['BkgEvtType'] self.metaEventType = self.BkgEvtType res = allowedBkg(self.BkgEvtType, self.energytouse, detector = self.detector, detectormodel = self.detectormodel, machine = self.machine) if not res['OK']: return res if res['Value'] < 0 and not self.pathToOverlayFiles: return S_ERROR("No suitable ProdID") #if 'Site' in self.workflow_commons: # self.site = self.workflow_commons['Site'] self.useEnergyForFileLookup = self.step_commons.get("useEnergyForFileLookup", self.useEnergyForFileLookup) if self.InputData: if self.NumberOfEvents: self.nbsigeventsperfile = self.NumberOfEvents else: return S_ERROR("Number of events in the signal file is missing") self.nbinputsigfile = len(self.InputData) self.log.info( "Signal Events Per Job: %d " % self.NbSigEvtsPerJob ) self.log.info( "Background Event Type: %s " % self.BkgEvtType ) self.log.info( "Meta Event Type: %s " % self.metaEventType ) self.log.info( "Background Events per bunch crossing: %3.2f" % self.ggtohadint ) self.log.info( "SignalEventsPerFile: %d " % self.nbsigeventsperfile ) if not self.NbSigEvtsPerJob and not self.nbsigeventsperfile: return S_ERROR("Could not determine the number of signal events per input file") return S_OK("Input variables resolved") def __getFilesFromFC(self): """ Get the list of files from the FileCatalog. """ meta = {} if self.energy and self.useEnergyForFileLookup: meta['Energy'] = str(int(self.energy)) meta['EvtType'] = self.BkgEvtType meta['Datatype'] = 'SIM' if self.detectormodel: meta['DetectorModel'] = self.detectormodel if self.machine == 'ilc_dbd': meta['Machine'] = 'ilc' if self.machine == 'clic_cdr': meta['Machine'] = 'clic' res = None if self.detector: res = self.ops.getValue("/Overlay/%s/%s/%s/%s/ProdID" % (self.machine, self.detector, self.energytouse, self.BkgEvtType), 0) self.nbofeventsperfile = self.ops.getValue("/Overlay/%s/%s/%s/%s/NbEvts" % (self.machine, self.energytouse, self.detector, self.BkgEvtType), 100) self.metaEventType = self.ops.getValue( "/Overlay/%s/%s/%s/%s/EvtType" % ( self.machine, self.energytouse, self.detector, self.BkgEvtType), self.BkgEvtType) else: res = self.ops.getValue("/Overlay/%s/%s/%s/%s/ProdID" % (self.machine, self.energytouse, self.detectormodel, self.BkgEvtType), 0) self.nbofeventsperfile = self.ops.getValue("/Overlay/%s/%s/%s/%s/NbEvts" % (self.machine, self.energytouse, self.detectormodel, self.BkgEvtType), 100) self.metaEventType = self.ops.getValue( "/Overlay/%s/%s/%s/%s/EvtType" % ( self.machine, self.energytouse, self.detectormodel, self.BkgEvtType), self.BkgEvtType) self.log.info( "Number of Events Per BackgroundFile: %d " % self.nbofeventsperfile ) meta['EvtType'] = self.metaEventType meta['ProdID'] = res if self.prodid: meta['ProdID'] = self.prodid self.log.info("Using %s as metadata" % (meta)) return self.fcc.findFilesByMetadata(meta) def __getFilesFromPath(self): """ Get the list of files from the FileCatalog via the user specified path. """ meta = {} return self.fcc.findFilesByMetadata(meta, self.pathToOverlayFiles) def __getFilesFromLyon(self, meta): """ List the files present at Lyon, not used. """ prodID = meta['ProdID'] prod = str(prodID).zfill(8) energy = meta['Energy'] bkg = meta["EvtType"] detector = meta["DetectorType"] path ="/ilc/prod/clic/%s/%s/%s/SIM/%s/" % (energy, bkg, detector, prod) comm = ["nsls", "%s" % path] res = subprocess.Popen(comm, stdout = subprocess.PIPE).communicate() dirlist = res[0].rstrip().split("\n") mylist = [] for mydir in dirlist: if mydir.count("dirac_directory"): continue curdir = path + mydir comm2 = ["nsls", curdir] res = subprocess.Popen(comm2, stdout = subprocess.PIPE).communicate() for oFile in res[0].rstrip().split("\n"): if oFile.count("dirac_directory"): continue mylist.append(path + mydir + "/" + oFile) if not mylist: return S_ERROR("File list is empty") return S_OK(mylist) def __getFilesFromCastor(self, meta): """ Get the available files (list) from the CERN castor storage """ prodID = meta['ProdID'] prod = str(prodID).zfill(8) energy = meta['Energy'] bkg = meta["EvtType"] detector = meta["DetectorType"] path = "/castor/cern.ch/grid/ilc/prod/%s/%s/%s/%s/SIM/%s/" % (self.machine, energy, bkg, detector, prod) comm = ["nsls", "%s" % path] res = subprocess.Popen(comm, stdout = subprocess.PIPE).communicate() dirlist = res[0].rstrip().split("\n") mylist = [] for mydir in dirlist: if mydir.count("dirac_directory"): continue curdir = path + mydir comm2 = ["nsls", curdir] res = subprocess.Popen(comm2, stdout = subprocess.PIPE).communicate() for oFile in res[0].rstrip().split("\n"): if oFile.count("dirac_directory"): continue mylist.append(path + mydir + "/" + oFile) if not mylist: return S_ERROR("File list is empty") return S_OK(mylist) def __getFilesLocaly(self): """ Download the files. """ numberofeventstoget = ceil(self.BXOverlay * self.ggtohadint) nbfiles = len(self.lfns) availableevents = nbfiles * self.nbofeventsperfile if availableevents < numberofeventstoget: return S_ERROR("Number of %s events available is less than requested" % ( self.BkgEvtType )) if not self.NbSigEvtsPerJob: ##Compute Nsignal events self.NbSigEvtsPerJob = self.nbinputsigfile * self.nbsigeventsperfile if not self.NbSigEvtsPerJob: return S_ERROR('Could not determine the number of signal events per job') self.log.verbose("There are %s signal event" % self.NbSigEvtsPerJob) ##Now determine how many files are needed to cover all signal events totnboffilestoget = int(ceil(self.NbSigEvtsPerJob * numberofeventstoget / self.nbofeventsperfile)) ##Limit ourself to some configuration maximum levels = [self.machine, self.energytouse, self.detectormodel, self.BkgEvtType] maxNbFilesToGet = getOptionValue(ops=self.ops, basePath="/Overlay", optionName="MaxNbFilesToGet", defaultValue=20, levels=levels) if totnboffilestoget > maxNbFilesToGet: totnboffilestoget = maxNbFilesToGet # res = self.ops.getOption("/Overlay/MaxConcurrentRunning",200) # self.log.verbose("Will allow only %s concurrent running"%res['Value']) # max_concurrent_running = res['Value'] # # jobpropdict = {} # jobpropdict['ApplicationStatus'] = 'Getting overlay files' # res = self.ops.getSections("/Overlay/Sites/") # sites = [] # if res['OK']: # sites = res['Value'] # self.log.verbose("Found the following sites to restrain: %s"%sites) # if self.site in sites: # res = self.ops.getOption("/Overlay/Sites/%s/MaxConcurrentRunning"%self.site,200) # self.log.verbose("Will allow only %s concurrent running at %s"%(res['Value'],self.site)) # jobpropdict['Site']=self.site # max_concurrent_running = res['Value'] self.__disableWatchDog() overlaymon = RPCClient('Overlay/Overlay', timeout=60) ##Now need to check that there are not that many concurrent jobs getting the overlay at the same time error_count = 0 count = 0 while 1: if error_count > 10 : self.log.error('OverlayDB returned too many errors') return S_ERROR('Failed to get number of concurrent overlay jobs') #jobMonitor = RPCClient('WorkloadManagement/JobMonitoring',timeout=60) #res = jobMonitor.getCurrentJobCounters(jobpropdict) #if not res['OK']: # error_count += 1 # time.sleep(60) # continue #running = 0 #if 'Running' in res['Value']: # running = res['Value']['Running'] res = overlaymon.canRun(self.site) if not res['OK']: error_count += 1 time.sleep(60) continue error_count = 0 #if running < max_concurrent_running: if res['Value']: break else: count += 1 if count > 300: return S_ERROR("Waited too long: 5h, so marking job as failed") if count % 10 == 0 : self.setApplicationStatus("Overlay standby number %s" % count) time.sleep(60) self.__enableWatchDog() self.setApplicationStatus('Getting overlay files') self.log.info('Will obtain %s files for overlay' % totnboffilestoget) os.mkdir("./overlayinput_" + self.metaEventType) os.chdir("./overlayinput_" + self.metaEventType) filesobtained = [] usednumbers = [] fail = False fail_count = 0 max_fail_allowed = self.ops.getValue("/Overlay/MaxFailedAllowed", 20) while not len(filesobtained) == totnboffilestoget: if fail_count > max_fail_allowed: fail = True break fileindex = random.randrange(nbfiles) if fileindex not in usednumbers: usednumbers.append(fileindex) triedDataManager = False if self.site == 'LCG.CERN.ch': res = self.getEOSFile(self.lfns[fileindex]) elif self.site == 'LCG.IN2P3-CC.fr': res = self.getLyonFile(self.lfns[fileindex]) elif self.site == 'LCG.UKI-LT2-IC-HEP.uk': res = self.getImperialFile(self.lfns[fileindex]) elif self.site == 'LCG.RAL-LCG2.uk': res = self.getRALFile(self.lfns[fileindex]) elif self.site == 'LCG.KEK.jp': res = self.getKEKFile(self.lfns[fileindex]) else: self.__disableWatchDog() res = self.datMan.getFile(self.lfns[fileindex]) triedDataManager = True #in case the specific copying did not work (mostly because the fileqs do #not exist locally) try again to get the file via the DataManager if (not res['OK']) and (not triedDataManager): res = self.datMan.getFile(self.lfns[fileindex]) if not res['OK']: self.log.warn('Could not obtain %s' % self.lfns[fileindex]) fail_count += 1 continue filesobtained.append(self.lfns[fileindex]) print "files now",filesobtained ##If no file could be obtained, need to make sure the job fails if len(usednumbers) == nbfiles and not filesobtained: fail = True break if len(filesobtained) < totnboffilestoget: ##Now wait for a random time around 3 minutes ###Actually, waste CPU time !!! self.log.verbose("Waste happily some CPU time (on average 3 minutes)") res = wasteCPUCycles(60 * random.gauss(3, 0.1)) if not res['OK']: self.log.error("Could not waste as much CPU time as wanted, but whatever!") ## Remove all scripts remaining scripts = glob.glob("*.sh") for script in scripts: os.remove(script) ##Print the file list mylist = os.listdir(os.getcwd()) self.log.info("List of Overlay files:") self.log.info("\n".join(mylist)) os.chdir(self.curdir) res = overlaymon.jobDone(self.site) if not res['OK']: self.log.error("Could not declare the job as finished getting the files") if fail: self.log.error("Did not manage to get all files needed, too many errors") return S_ERROR("Failed to get files") self.log.info('Got all files needed.') return S_OK() def getCASTORFile(self, lfn): """ USe xrdcp or rfcp to get the files from castor """ prependpath = "/castor/cern.ch/grid" if not lfn.count("castor/cern.ch"): lfile = prependpath + lfn else: lfile = lfn self.log.info("Getting %s" % lfile) #command = "rfcp %s ./"%file basename = os.path.basename(lfile) if os.path.exists("overlayinput.sh"): os.unlink("overlayinput.sh") with open("overlayinput.sh","w") as script: script.write('#!/bin/sh \n') script.write('###############################\n') script.write('# Dynamically generated scrip #\n') script.write('###############################\n') if 'X509_USER_PROXY' in os.environ: script.write("cp %s /tmp/x509up_u%s \n" % (os.environ['X509_USER_PROXY'], os.getuid())) script.write('declare -x STAGE_SVCCLASS=ilcdata\n') script.write('declare -x STAGE_HOST=castorpublic\n') script.write(r"xrdcp -s root://castorpublic.cern.ch/%s ./ -OSstagerHost=castorpublic\&svcClass=ilcdata\n" % lfile.rstrip()) #script.write("/usr/bin/rfcp 'rfio://cgenstager.ads.rl.ac.uk:9002?svcClass=ilcTape&path=%s' %s\n"%(lfile,basename)) script.write(""" if [ ! -s %s ]; then echo "Using rfcp instead" rfcp %s ./ fi\n""" % (basename, lfile)) script.write('declare -x appstatus=$?\n') script.write('exit $appstatus\n') os.chmod("overlayinput.sh", 0755) comm = 'sh -c "./overlayinput.sh"' self.result = shellCall(600, comm, callbackFunction = self.redirectLogOutput, bufferLimit = 20971520) localfile = os.path.basename(lfile) if os.path.exists(localfile): return S_OK(localfile) return S_ERROR("Failed") def getEOSFile(self, lfn): """ Use xrdcp to get the files from EOS """ prependpath = "/eos/experiment/clicdp/grid" if not lfn.startswith(prependpath): lfile = prependpath + lfn else: lfile = lfn self.log.info("Getting %s" % lfile) if os.path.exists("overlayinput.sh"): os.unlink("overlayinput.sh") with open("overlayinput.sh","w") as script: script.write('#!/bin/sh \n') script.write('################################\n') script.write('# Dynamically generated script #\n') script.write('################################\n') if 'X509_USER_PROXY' in os.environ: script.write("cp %s /tmp/x509up_u%s \n" % (os.environ['X509_USER_PROXY'], os.getuid())) script.write("xrdcp -s root://eospublic.cern.ch/%s ./ \n" % lfile.rstrip() ) script.write('declare -x appstatus=$?\n') script.write('exit $appstatus\n') os.chmod("overlayinput.sh", 0755) comm = 'sh -c "./overlayinput.sh"' self.result = shellCall(600, comm, callbackFunction = self.redirectLogOutput, bufferLimit = 20971520) localfile = os.path.basename(lfile) if os.path.exists(localfile): return S_OK(localfile) return S_ERROR("Failed") def getLyonFile(self, lfn): """ Use xrdcp to get the files from Lyon """ prependpath = '/pnfs/in2p3.fr/data' if not lfn.count('in2p3.fr/data'): lfile = prependpath + lfn else: lfile = lfn self.log.info("Getting %s" % lfile) #command = "rfcp %s ./"%file #comm = [] #comm.append("cp $X509_USER_PROXY /tmp/x509up_u%s"%os.getuid()) if os.path.exists("overlayinput.sh"): os.unlink("overlayinput.sh") with open("overlayinput.sh", "w") as script: script.write('#!/bin/sh \n') script.write('###############################\n') script.write('# Dynamically generated scrip #\n') script.write('###############################\n') script.write("cp %s /tmp/x509up_u%s \n" % (os.environ['X509_USER_PROXY'], os.getuid())) script.write(". /afs/in2p3.fr/grid/profiles/lcg_env.sh\n") script.write("xrdcp root://ccdcacsn179.in2p3.fr:1094%s ./ -s\n" % lfile.rstrip()) #script.write("/usr/bin/rfcp 'rfio://cgenstager.ads.rl.ac.uk:9002?svcClass=ilcTape&path=%s' %s\n"%(lfile,basename)) #script.write(""" #if [ ! -s %s ]; then # rfcp %s ./ #fi\n"""%(basename,lfile)) script.write('declare -x appstatus=$?\n') script.write('exit $appstatus\n') os.chmod("overlayinput.sh", 0755) comm = 'sh -c "./overlayinput.sh"' self.result = shellCall(600, comm, callbackFunction = self.redirectLogOutput, bufferLimit = 20971520) localfile = os.path.basename(lfile) if os.path.exists(localfile): return S_OK(localfile) return S_ERROR("Failed") def getImperialFile(self, lfn): """ USe dccp to get the files from the Imperial SE """ prependpath = '/pnfs/hep.ph.ic.ac.uk/data' if not lfn.count('hep.ph.ic.ac.uk/data'): lfile = prependpath + lfn else: lfile = lfn self.log.info("Getting %s" % lfile) ###Don't check for CPU time as other wise, job can get killed self.__disableWatchDog() if os.path.exists("overlayinput.sh"): os.unlink("overlayinput.sh") with open("overlayinput.sh","w") as script: script.write('#!/bin/sh \n') script.write('###############################\n') script.write('# Dynamically generated scrip #\n') script.write('###############################\n') script.write("dccp dcap://%s%s ./\n" % (os.environ['VO_ILC_DEFAULT_SE'], lfile.rstrip())) #script.write("/usr/bin/rfcp 'rfio://cgenstager.ads.rl.ac.uk:9002?svcClass=ilcTape&path=%s' %s\n"%(lfile,basename)) #script.write(""" #if [ ! -s %s ]; then # rfcp %s ./ #fi\n"""%(basename,lfile)) script.write('declare -x appstatus=$?\n') script.write('exit $appstatus\n') os.chmod("overlayinput.sh", 0755) comm = 'sh -c "./overlayinput.sh"' self.result = shellCall(600, comm, callbackFunction = self.redirectLogOutput, bufferLimit = 20971520) localfile = os.path.basename(lfile) if os.path.exists(localfile): return S_OK(localfile) return S_ERROR("Failed") def getRALFile(self, lfn): """ Use rfcp to get the files from RAL castor """ prependpath = '/castor/ads.rl.ac.uk/prod' if not lfn.count('ads.rl.ac.uk/prod'): lfile = prependpath + lfn else: lfile = lfn self.log.info("Getting %s" % lfile) ###Don't check for CPU time as other wise, job can get killed self.__disableWatchDog() #command = "rfcp %s ./"%file #comm = [] #comm.append("cp $X509_USER_PROXY /tmp/x509up_u%s"%os.getuid()) if 'X509_USER_PROXY' in os.environ: comm2 = ["cp", os.environ['X509_USER_PROXY'],"/tmp/x509up_u%s" % os.getuid()] res = subprocess.Popen(comm2, stdout = subprocess.PIPE).communicate() print res #comm.append("xrdcp root://ccdcacsn179.in2p3.fr:1094%s ./ -s"%file) #command = string.join(comm,";") #logfile = file(self.applicationLog,"w") os.environ['CNS_HOST'] = 'castorns.ads.rl.ac.uk' #comm4= ['declare','-x','CNS_HOST=castorns.ads.rl.ac.uk'] #res = subprocess.Popen(comm4,stdout=logfile,stderr=subprocess.STDOUT) #print res os.environ['STAGE_SVCCLASS'] = 'ilcTape' # comm5= ['declare','-x','STAGE_SVCCLASS=ilcTape'] # res = subprocess.call(comm5) # print res os.environ['STAGE_HOST'] = 'cgenstager.ads.rl.ac.uk' # comm6=['declare','-x','STAGE_HOST=cgenstager.ads.rl.ac.uk'] # res = subprocess.call(comm6) # print res basename = os.path.basename(lfile) if os.path.exists("overlayinput.sh"): os.unlink("overlayinput.sh") with open("overlayinput.sh","w") as script: script.write('#!/bin/sh \n') script.write('###############################\n') script.write('# Dynamically generated scrip #\n') script.write('###############################\n') script.write("/usr/bin/rfcp 'rfio://cgenstager.ads.rl.ac.uk:9002?svcClass=ilcTape&path=%s' %s\n" % (lfile, basename)) script.write('declare -x appstatus=$?\n') script.write('exit $appstatus\n') os.chmod("overlayinput.sh", 0755) comm = 'sh -c "./overlayinput.sh"' self.result = shellCall(600, comm, callbackFunction = self.redirectLogOutput, bufferLimit = 20971520) localfile = os.path.basename(lfile) if os.path.exists(localfile): return S_OK(localfile) return S_ERROR("Failed") def getKEKFile(self, lfn): """ Use cp to get the files from kek-se """ prependpath = '/grid' lfile = prependpath + lfn self.log.info("Getting %s" % lfile) self.__disableWatchDog() if os.path.exists("overlayinput.sh"): os.unlink("overlayinput.sh") with open("overlayinput.sh", "w") as script: script.write('#!/bin/sh \n') script.write('###############################\n') script.write('# Dynamically generated scrip #\n') script.write('###############################\n') script.write("cp %s ./ -s\n" % lfile.rstrip()) script.write('declare -x appstatus=$?\n') script.write('exit $appstatus\n') os.chmod("overlayinput.sh", 0755) comm = 'sh -c "./overlayinput.sh"' self.result = shellCall(600, comm, callbackFunction = self.redirectLogOutput, bufferLimit = 20971520) localfile = os.path.basename(lfile) if os.path.exists(localfile): return S_OK(localfile) return S_ERROR("Failed") def execute(self): """ Run the module, called rom Workflow """ self.result = self.resolveInputVariables() if not self.result['OK']: self.log.error("Failed to resolve input parameters:", self.result['Message']) return self.result self.log.info( "Information after resolveInputVariables:" ) self.log.info( "Signal Events Per Job: %d " % self.NbSigEvtsPerJob ) self.log.info( "Background Event Type: %s " % self.BkgEvtType ) self.log.info( "Meta Event Type: %s " % self.metaEventType ) self.log.info( "Background Events per bunch crossing: %3.2f" % self.ggtohadint ) self.log.info( "SignalEventsPerFile: %d " % self.nbsigeventsperfile ) if not self.applicationLog: self.applicationLog = 'Overlay_input.log' self.applicationLog = os.path.join(os.getcwd(), self.applicationLog) if not self.workflowStatus['OK'] or not self.stepStatus['OK']: self.log.verbose('Workflow status = %s, step status = %s' % (self.workflowStatus['OK'], self.stepStatus['OK'])) return S_OK('OverlayInput should not proceed as previous step did not end properly') self.setApplicationStatus('Starting up Overlay') if self.pathToOverlayFiles: res = self.__getFilesFromPath() else: res = self.__getFilesFromFC() if not res['OK']: self.log.error("Failed to get the file list from the catalog:", res["Message"]) self.setApplicationStatus('OverlayProcessor failed to get file list') return res else: self.log.debug("Found these files: %s" % res) self.lfns = res['Value'] if not self.lfns: self.log.error("No Overlay LFNs found") self.setApplicationStatus('OverlayProcessor got an empty list') return S_ERROR('OverlayProcessor got an empty list') res = self.__getFilesLocaly() ###Now that module is finished,resume CPU time checks self.__enableWatchDog() if not res['OK']: self.log.error("Overlay failed with", res['Message']) self.setApplicationStatus('OverlayInput failed to get files locally with message %s' % res['Message']) return S_ERROR('OverlayInput failed to get files locally') self.setApplicationStatus('OverlayInput finished getting all files successfully') ## add overlay background information to workflow_commons stepNumber = int( self.step_commons['STEP_NUMBER'] ) self.workflow_commons["OI_%i_eventType" % stepNumber] = self.metaEventType self.workflow_commons["OI_%i_eventsPerBackgroundFile" % stepNumber] = self.nbofeventsperfile self.workflow_commons["OI_%i_processorName" % stepNumber] = self.processorName return S_OK('OverlayInput finished successfully') def __disableWatchDog( self ): """create the watchdog disable if it does not exists""" watchDogFilename = 'DISABLE_WATCHDOG_CPU_WALLCLOCK_CHECK' fullPath = os.path.join( self.curdir, watchDogFilename ) if not os.path.exists( fullPath ): with open( fullPath, 'w' ) as checkFile: checkFile.write('Dont look at cpu') def __enableWatchDog( self ): """remove the watchdog disable file if it exists""" watchDogFilename = 'DISABLE_WATCHDOG_CPU_WALLCLOCK_CHECK' fullPath = os.path.join( self.curdir, watchDogFilename ) if os.path.exists( fullPath ): os.remove( fullPath )
class WhizardAnalysis(ModuleBase): """ Specific Module to run a Whizard job. """ def __init__(self): super(WhizardAnalysis, self).__init__() self.enable = True self.STEP_NUMBER = '' self.debug = True self.log = gLogger.getSubLogger("WhizardAnalysis") self.SteeringFile = '' self.OutputFile = '' self.NumberOfEvents = 1 self.Lumi = 0 self.applicationName = 'whizard' self.evttype = "" self.RandomSeed = 0 self.getProcessInFile = False self.datMan = DataManager() self.processlist = None self.parameters = {} self.susymodel = 0 self.Model = '' self.genmodel = GeneratorModels() self.eventstring = [ '! ', 'Fatal error:', 'PYSTOP', 'No matrix element available', 'Floating point exception', 'Event generation finished.', " n_events", "luminosity", " sum " ] self.excludeAllButEventString = False self.steeringparameters = '' self.options = None self.optionsdict = {} self.OptionsDictStr = '' self.GenLevelCutDictStr = '' self.genlevelcuts = {} self.willCut = False self.useGridFiles = False def obtainProcessList(self): """Internal function Get the process list from storage if whizard.in was not provided :return: S_OK(), S_ERROR() """ res = self.ops.getValue("/ProcessList/Location", "") if not res: return S_ERROR("No process list found") processlistloc = res if not os.path.exists(os.path.basename(processlistloc)): res = self.datMan.getFile(processlistloc) if not res['OK']: self.log.error('Could not get processlist: %s' % res['Message']) return res self.processlist = ProcessList(os.path.basename(processlistloc)) return S_OK() def applicationSpecificInputs(self): """Resolve module input :return: S_OK() """ self.parameters['ENERGY'] = self.energy if not self.RandomSeed and self.jobID: self.RandomSeed = self.jobID if 'IS_PROD' in self.workflow_commons or 'IS_DBD_GEN_PROD' in self.workflow_commons: self.RandomSeed = int( str(int(self.workflow_commons["PRODUCTION_ID"])) + str(int(self.workflow_commons["JOB_ID"]))) self.parameters['SEED'] = self.RandomSeed self.parameters['NBEVTS'] = self.NumberOfEvents self.parameters['LUMI'] = self.Lumi ##EVER USED??? if 'SusyModel' in self.step_commons: self.susymodel = self.step_commons['SusyModel'] self.SteeringFile = os.path.basename( self.step_commons.get("InputFile", self.SteeringFile)) if self.SteeringFile == "whizard.in": os.rename(self.SteeringFile, "whizardnew.in") self.SteeringFile = "whizardnew.in" self.parameters['PROCESS'] = self.evttype listofparams = self.steeringparameters.split(";") for param in listofparams: if param.count("="): self.parameters[param.split("=")[0]] = param.split("=")[1] if self.OptionsDictStr: self.log.info( "Will use whizard.in definition from WhizardOptions.") try: self.optionsdict = eval(self.OptionsDictStr) if 'integration_input' not in self.optionsdict: self.optionsdict['integration_input'] = {} if 'seed' not in self.optionsdict['integration_input']: self.optionsdict['integration_input']['seed'] = int( self.RandomSeed) if 'process_input' in self.optionsdict: if 'sqrts' in self.optionsdict['process_input']: self.energy = self.optionsdict['process_input'][ 'sqrts'] except: return S_ERROR( "Could not convert string to dictionary for optionsdict") if self.GenLevelCutDictStr: self.log.info("Found generator level cuts") try: self.genlevelcuts = eval(self.GenLevelCutDictStr) except: return S_ERROR( "Could not convert the generator level cuts back to dictionary" ) if not len(self.SteeringFile) and not self.optionsdict: self.getProcessInFile = True if "IS_PROD" in self.workflow_commons: if self.workflow_commons["IS_PROD"] and not self.willCut: #self.OutputFile = getProdFilename(self.OutputFile,int(self.workflow_commons["PRODUCTION_ID"]), # int(self.workflow_commons["JOB_ID"])) if 'ProductionOutputData' in self.workflow_commons: outputlist = self.workflow_commons[ 'ProductionOutputData'].split(";") for obj in outputlist: if obj.lower().count("_gen_"): self.OutputFile = os.path.basename(obj) break else: #This is because most likely there is stdhepcut running after self.OutputFile = "willcut.stdhep" #getProdFilename(self.OutputFile,int(self.workflow_commons["PRODUCTION_ID"]), # int(self.workflow_commons["JOB_ID"])) if "IS_DBD_GEN_PROD" in self.workflow_commons and self.workflow_commons[ "IS_DBD_GEN_PROD"]: #self.OutputFile = getProdFilename(self.OutputFile,int(self.workflow_commons["PRODUCTION_ID"]), # int(self.workflow_commons["JOB_ID"])) if 'ProductionOutputData' in self.workflow_commons: outputlist = self.workflow_commons[ 'ProductionOutputData'].split(";") for obj in outputlist: self.OutputFile = os.path.basename(obj) break else: self.OutputFile = getProdFilename( self.OutputFile, int(self.workflow_commons["PRODUCTION_ID"]), int(self.workflow_commons["JOB_ID"])) return S_OK() def runIt(self): """ Called by Agent Executes the following - resolve input variables - resolve installation location - resolve dependencies location (beam_spectra) - get processlist if needed - define output file name - prepare whizard.in - make magic :return: S_OK(), S_ERROR() """ self.result = S_OK() if not self.platform: self.result = S_ERROR('No ILC platform selected') elif not self.applicationLog: self.result = S_ERROR('No Log file provided') if not self.result['OK']: self.log.error("Failed to resolve input parameters:", self.result["Message"]) return self.result if not self.workflowStatus['OK'] or not self.stepStatus['OK']: self.log.verbose( 'Workflow status = %s, step status = %s' % (self.workflowStatus['OK'], self.stepStatus['OK'])) return S_OK( 'Whizard should not proceed as previous step did not end properly' ) #if self.debug: # self.excludeAllButEventString = False res = getSoftwareFolder(self.platform, self.applicationName, self.applicationVersion) if not res['OK']: self.log.error("Failed getting software folder", res['Message']) self.setApplicationStatus('Failed finding software') return res mySoftDir = res['Value'] ###Remove libc removeLibc(mySoftDir + "/lib") ##Need to fetch the new LD_LIBRARY_PATH new_ld_lib_path = getNewLDLibs(self.platform, self.applicationName, self.applicationVersion) #Don't forget to prepend the application's libs new_ld_lib_path = mySoftDir + "/lib:" + new_ld_lib_path ### Resolve dependencies (look for beam_spectra) deps = resolveDeps(self.platform, self.applicationName, self.applicationVersion) path_to_beam_spectra = "" path_to_gridfiles = "" for dep in deps: res = getSoftwareFolder(self.platform, dep["app"], dep['version']) if not res['OK']: self.log.error("Failed getting software folder", res['Message']) self.setApplicationStatus('Failed finding software') return res depfolder = res['Value'] if dep["app"] == "beam_spectra": path_to_beam_spectra = depfolder elif dep["app"] == "gridfiles": path_to_gridfiles = depfolder ##Env variables needed to run whizard: avoids hard coded locations os.environ['LUMI_LINKER'] = path_to_beam_spectra + "/lumi_linker_000" os.environ[ 'PHOTONS_B1'] = path_to_beam_spectra + "/photons_beam1_linker_000" os.environ[ 'PHOTONS_B2'] = path_to_beam_spectra + "/photons_beam2_linker_000" os.environ['EBEAM'] = path_to_beam_spectra + "/ebeam_in_linker_000" os.environ['PBEAM'] = path_to_beam_spectra + "/pbeam_in_linker_000" os.environ[ 'LUMI_EE_LINKER'] = path_to_beam_spectra + "/lumi_ee_linker_000" os.environ[ 'LUMI_EG_LINKER'] = path_to_beam_spectra + "/lumi_eg_linker_000" os.environ[ 'LUMI_GE_LINKER'] = path_to_beam_spectra + "/lumi_ge_linker_000" os.environ[ 'LUMI_GG_LINKER'] = path_to_beam_spectra + "/lumi_gg_linker_000" list_of_gridfiles = [] if path_to_gridfiles and self.useGridFiles: tmp_list_of_gridfiles = [ os.path.join(path_to_gridfiles, item) for item in os.listdir(path_to_gridfiles) ] gridfilesfound = False for path in tmp_list_of_gridfiles: if os.path.isdir(path) and path.count(str(self.energy)): #Here look for a sub directory for the energy related grid files list_of_gridfiles = [ os.path.join(path, item) for item in os.listdir(path) ] gridfilesfound = True self.log.info('Found grid files specific for energy %s' % self.energy) break if not gridfilesfound: self.log.info( "Will use generic grid files found, hope the energy is set right" ) list_of_gridfiles = [ item for item in glob.glob(os.path.join(path_to_gridfiles, "*.grb")) + glob.glob(os.path.join(path_to_gridfiles, "*.grc")) ] template = False if self.SteeringFile.count("template"): template = True ## Get from process file the proper whizard.in file if self.getProcessInFile: whizardin = "" res = self.obtainProcessList() if not res['OK']: self.log.error("Could not obtain process list") self.setApplicationStatus('Failed getting processlist') return res whizardin = self.processlist.getInFile(self.evttype) if not whizardin: self.log.error( "Whizard input file was not found in process list, cannot proceed" ) self.setApplicationStatus('Whizard input file was not found') return S_ERROR("Error while resolving whizard input file") if whizardin.count("template"): template = True try: shutil.copy("%s/%s" % (mySoftDir, whizardin), "./whizardnew.in") self.SteeringFile = "whizardnew.in" except EnvironmentError: self.log.error("Could not copy %s from %s" % (whizardin, mySoftDir)) self.setApplicationStatus('Failed getting whizard.in file') return S_ERROR("Failed to obtain %s" % whizardin) ##Check existence of Les Houches input file leshouchesfiles = '' if not os.path.exists("LesHouches.msugra_1.in"): if self.susymodel: if self.susymodel == 1: if os.path.exists("%s/LesHouches_slsqhh.msugra_1.in" % (mySoftDir)): leshouchesfiles = "%s/LesHouches_slsqhh.msugra_1.in" % ( mySoftDir) if self.susymodel == 2: if os.path.exists("%s/LesHouches_chne.msugra_1.in" % (mySoftDir)): leshouchesfiles = "%s/LesHouches_chne.msugra_1.in" % ( mySoftDir) if self.Model: if self.genmodel.hasModel(self.Model)['OK']: if self.genmodel.getFile(self.Model)['OK']: if os.path.exists( "%s/%s" % (mySoftDir, self.genmodel.getFile( self.Model)['Value'])): leshouchesfiles = "%s/%s" % ( mySoftDir, self.genmodel.getFile( self.Model)['Value']) else: self.log.error( "Request LesHouches file is missing, cannot proceed" ) self.setApplicationStatus( "LesHouches file missing") return S_ERROR( "The LesHouches file was not found. Probably you are using a wrong version of whizard." ) else: self.log.warn("No file found attached to model %s" % self.Model) else: self.log.error("Model undefined:", self.Model) self.setApplicationStatus("Model undefined") return S_ERROR("No Model %s defined" % self.Model) else: leshouchesfiles = "LesHouches.msugra_1.in" outputfilename = self.evttype if self.optionsdict: self.log.info("Using: %s" % self.optionsdict) self.options = WhizardOptions(self.Model) res = self.options.changeAndReturn(self.optionsdict) if not res['OK']: return res res = self.options.toWhizardDotIn("whizard.in") elif not template: res = prepareWhizardFile(self.SteeringFile, outputfilename, self.energy, self.RandomSeed, self.NumberOfEvents, self.Lumi, "whizard.in") else: res = prepareWhizardFileTemplate(self.SteeringFile, outputfilename, self.parameters, "whizard.in") if not res['OK']: self.log.error('Something went wrong with input file generation') self.setApplicationStatus( 'Whizard: something went wrong with input file generation') return S_ERROR( 'Something went wrong with whizard.in file generation') foundproceesinwhizardin = res['Value'] scriptName = 'Whizard_%s_Run_%s.sh' % (self.applicationVersion, self.STEP_NUMBER) if os.path.exists(scriptName): os.remove(scriptName) script = open(scriptName, 'w') script.write('#!/bin/sh \n') script.write( '#####################################################################\n' ) script.write( '# Dynamically generated script to run a production or analysis job. #\n' ) script.write( '#####################################################################\n' ) script.write('declare -x PATH=%s:$PATH\n' % mySoftDir) script.write('declare -x LD_LIBRARY_PATH=%s\n' % new_ld_lib_path) script.write('env | sort >> localEnv.log\n') script.write('echo =============================\n') script.write('echo Printing content of whizard.in \n') script.write('cat whizard.in\n') script.write('echo =============================\n') script.write('cp %s/whizard.mdl ./\n' % mySoftDir) if leshouchesfiles: if not leshouchesfiles == 'LesHouches.msugra_1.in': script.write('cp %s ./LesHouches.msugra_1.in\n' % (leshouchesfiles)) script.write('ln -s LesHouches.msugra_1.in fort.71\n') if len(list_of_gridfiles): for gridfile in list_of_gridfiles: script.write('cp %s ./\n' % (gridfile)) script.write('cp %s/whizard.prc ./\n' % mySoftDir) if self.genlevelcuts: res = self.makeWhizardDotCut1() if not res['OK']: script.close() self.log.error("Could not create the cut1 file") return S_ERROR("Could not create the cut1 file") script.write('echo =============================\n') script.write('echo Printing content of whizard.prc \n') script.write('cat whizard.prc\n') script.write('echo =============================\n') extracmd = "" if not self.debug: extracmd = "2>/dev/null" comm = "" if foundproceesinwhizardin: comm = 'whizard --simulation_input \'write_events_file = \"%s\"\'' % ( outputfilename) else: comm = 'whizard --process_input \'process_id =\"%s\"\' --simulation_input \'write_events_file = \"%s\"\' ' % ( self.evttype, outputfilename) comm = "%s %s %s\n" % (comm, self.extraCLIarguments, extracmd) self.log.info("Will run %s" % comm) script.write(comm) script.write('declare -x appstatus=$?\n') script.write('exit $appstatus\n') script.close() if os.path.exists(self.applicationLog): os.remove(self.applicationLog) os.chmod(scriptName, 0755) comm = 'sh -c "./%s"' % (scriptName) self.setApplicationStatus('Whizard %s step %s' % (self.applicationVersion, self.STEP_NUMBER)) self.stdError = '' self.result = shellCall(0, comm, callbackFunction=self.redirectLogOutput, bufferLimit=209715200) #self.result = {'OK':True,'Value':(0,'Disabled Execution','')} if not self.result['OK']: self.log.error("Failed with error %s" % self.result['Message']) if not os.path.exists(self.applicationLog): self.log.error( "Something went terribly wrong, the log file is not present") self.setApplicationStatus('%s failed terribly, you are doomed!' % (self.applicationName)) if not self.ignoreapperrors: return S_ERROR('%s did not produce the expected log' % (self.applicationName)) lumi = '' message = "" success = False ###Analyse log file with open(self.applicationLog) as logfile: for line in logfile: if line.count('! Event sample corresponds to luminosity'): elems = line.split() lumi = elems[-1] if line.count("*** Fatal error:"): status = 1 message = line break elif line.count("PYSTOP"): status = 1 message = line break elif line.count("No matrix element available"): status = 1 message = line break elif line.count("Floating point exception"): status = 1 message = line break elif line.count("Event generation finished."): success = True else: status = 0 if success: status = 0 else: status = 1 self.log.info( 'The sample generated has an equivalent luminosity of %s' % lumi) if lumi: self.workflow_commons['Luminosity'] = float(lumi) else: status = 1 ##Now care for the cross sections info = {} res = self.options.getAsDict() if os.path.exists("whizard.out") and res['OK']: full_opts_dict = res['Value'] processes = full_opts_dict['process_input']['process_id'].split() info = {} info['xsection'] = {} processes.append('sum') with open("whizard.out", "r") as inf: for line in inf: line = line.rstrip() for process in processes: if not process: continue if line.count(" %s " % process): info['xsection'][process] = {} line = line.lstrip() crosssection = line.split()[1] err_crosssection = line.split()[2] frac = line.split()[4] info['xsection'][process]['xsection'] = float( crosssection) info['xsection'][process]['err_xsection'] = float( err_crosssection) info['xsection'][process]['fraction'] = float(frac) if info: if 'Info' not in self.workflow_commons: self.workflow_commons['Info'] = info else: self.workflow_commons['Info'].update(info) self.log.info("Status after the application execution is %s" % str(status)) messageout = 'Whizard %s Successful' % (self.applicationVersion) failed = False if status != 0: self.log.error("Whizard execution completed with errors:") failed = True else: self.log.info("Whizard execution completed successfully") ###Deal with output file if len(self.OutputFile): if os.path.exists(outputfilename + ".001.stdhep"): self.log.notice("Looking for output files") ofnames = glob.glob(outputfilename + '*.stdhep') if len(ofnames) > 1: basename = self.OutputFile.split(".stdhep")[0] i = 0 for of in ofnames: i += 1 name = basename + "_" + str(i) + ".stdhep" os.rename(of, name) else: os.rename(outputfilename + ".001.stdhep", self.OutputFile) else: self.log.error( "Whizard execution did not produce a stdhep file") self.setApplicationStatus( 'Whizard %s Failed to produce STDHEP file' % (self.applicationVersion)) messageout = 'Whizard Failed to produce STDHEP file' if not self.ignoreapperrors: return S_ERROR(messageout) if failed is True: self.log.error("==================================\n StdError:\n") self.log.error(message) self.setApplicationStatus('%s Exited With Status %s' % (self.applicationName, status)) self.log.error('Whizard Exited With Status %s' % (status)) messageout = 'Whizard Exited With Status %s' % (status) if not self.ignoreapperrors: return S_ERROR(messageout) else: self.setApplicationStatus(messageout) return S_OK({"OutputFile": self.OutputFile}) def makeWhizardDotCut1(self): """ When users need whizard cuts, this is called to prepare the file :return: S_OK() """ cutf = open("whizard.cut1", "w") for key, values in self.genlevelcuts.items(): cutf.write("process %s\n" % key) for val in values: cutf.write(" %s\n" % val) cutf.close() return S_OK()
def doTheWhizardInstallation(): """Do the instalation for new whizard version Copy libraries, create tarball, upload processList file add entry in configuration system """ res = checkSLCVersion() if not res['OK']: gLogger.error(res['Message']) dexit(1) res = checkGFortranVersion() if not res['OK']: gLogger.error(res['Message']) dexit(1) cliParams = Params() cliParams.registerSwitches() Script.parseCommandLine( ignoreErrors= False) whizardResultFolder = cliParams.path platform = cliParams.platform whizard_version = cliParams.version appVersion = whizard_version beam_spectra_version = cliParams.beam_spectra if not whizardResultFolder or not whizard_version or not beam_spectra_version: Script.showHelp() dexit(2) from ILCDIRAC.Core.Utilities.ProcessList import ProcessList from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations from DIRAC.Interfaces.API.DiracAdmin import DiracAdmin from ILCDIRAC.Core.Utilities.FileUtils import upload from DIRAC.DataManagementSystem.Client.DataManager import DataManager diracAdmin = DiracAdmin() modifiedCS = False softwareSection = "/Operations/Defaults/AvailableTarBalls" processlistLocation = "ProcessList/Location" appName = "whizard" ops = Operations() path_to_process_list = ops.getValue(processlistLocation, "") if not path_to_process_list: gLogger.error("Could not find process list location in CS") dexit(2) gLogger.verbose("Getting process list from file catalog") datMan = DataManager() res = datMan.getFile(path_to_process_list) if not res['OK']: gLogger.error("Error while getting process list from storage") dexit(2) gLogger.verbose("done") ##just the name of the local file in current working directory processlist = os.path.basename(path_to_process_list) if not os.path.exists(processlist): gLogger.error("Process list does not exist locally") dexit(2) pl = ProcessList(processlist) startDir = os.getcwd() inputlist = {} os.chdir(whizardResultFolder) folderlist = os.listdir(whizardResultFolder) whiz_here = folderlist.count("whizard") if whiz_here == 0: gLogger.error("whizard executable not found in %s, please check" % whizardResultFolder) os.chdir(startDir) dexit(2) whizprc_here = folderlist.count("whizard.prc") if whizprc_here == 0: gLogger.error("whizard.prc not found in %s, please check" % whizardResultFolder) os.chdir(startDir) dexit(2) whizmdl_here = folderlist.count("whizard.mdl") if whizmdl_here == 0: gLogger.error("whizard.mdl not found in %s, please check" % whizardResultFolder) os.chdir(startDir) dexit(2) gLogger.verbose("Preparing process list") ## FIXME:: What is this doing exactly? Is this necessary? -- APS, JFS for f in folderlist: if f.count(".in"): infile = open(f, "r") found_detail = False for line in infile: if line.count("decay_description"): currprocess = f.split(".template.in")[0] inputlist[currprocess] = {} inputlist[currprocess]["InFile"] = f.rstrip("~") inputlist[currprocess]["Detail"] = line.split("\"")[1] found_detail = True if line.count("process_id") and found_detail: process_id = line.split("\"")[1] inputlist[currprocess]["Model"] = "" inputlist[currprocess]["Generator"] = "" inputlist[currprocess]["Restrictions"] = "" for process in process_id.split(): print("Looking for detail of process %s" % (process)) process_detail = getDetailsFromPRC("whizard.prc", process) inputlist[currprocess]["Model"] = process_detail["Model"] inputlist[currprocess]["Generator"] = process_detail["Generator"] if len(inputlist[currprocess]["Restrictions"]): inputlist[currprocess]["Restrictions"] = inputlist[currprocess]["Restrictions"] + ", " + process_detail["Restrictions"] else: inputlist[currprocess]["Restrictions"] = process_detail["Restrictions"] #if len(inputlist[currprocess].items()): # inputlist.append(processdict) ## END FIXEME ##Update inputlist with what was found looking in the prc file processes = readPRCFile("whizard.prc") inputlist.update(processes) ##get from cross section files the cross sections for the processes in inputlist #Need full process list for f in folderlist: if f.count("cross_sections_"): crossfile = open(f, "r") for line in crossfile: line = line.rstrip().lstrip() if not len(line): continue if line[0] == "#" or line[0] == "!": continue if len(line.split()) < 2: continue currprocess = line.split()[0] if currprocess in inputlist: inputlist[currprocess]['CrossSection'] = line.split()[1] gLogger.notice("Preparing Tarball") ##Make a folder in the current directory of the user to store the whizard libraries, executable et al. localWhizardFolderRel = ("whizard" + whizard_version) # relative path localWhizardFolder = os.path.join(startDir, localWhizardFolderRel) if not os.path.exists(localWhizardFolder): os.makedirs(localWhizardFolder) localWhizardLibFolder = os.path.join(localWhizardFolder,'lib') if os.path.exists(localWhizardLibFolder): shutil.rmtree(localWhizardLibFolder) os.makedirs(localWhizardLibFolder) ##creates the lib folder whizardLibraries = getListOfLibraries(os.path.join(whizardResultFolder, "whizard")) copyLibsCall = ["rsync","-avzL"] for lib in whizardLibraries: copyLibsCall.append(lib) copyLibsCall.append(localWhizardLibFolder) subprocess.Popen(copyLibsCall, stdout=subprocess.PIPE, stderr=subprocess.PIPE) for fileName in folderlist: shutil.copy(fileName, localWhizardFolder) ##Get the list of md5 sums for all the files in the folder to be tarred os.chdir( localWhizardFolder ) subprocess.call(["find . -type f -exec md5sum {} > ../md5_checksum.md5 \\; && mv ../md5_checksum.md5 ."], shell=True) os.chdir(startDir) ##Create the Tarball gLogger.notice("Creating Tarball...") appTar = localWhizardFolder + ".tgz" myappTar = tarfile.open(appTar, "w:gz") myappTar.add(localWhizardFolderRel) myappTar.close() md5sum = md5.md5(open( appTar, 'r' ).read()).hexdigest() gLogger.notice("...Done") gLogger.notice("Registering new Tarball in CS") tarballurl = {} av_platforms = gConfig.getSections(softwareSection, []) if av_platforms['OK']: if platform not in av_platforms['Value']: gLogger.error("Platform %s unknown, available are %s." % (platform, ", ".join(av_platforms['Value']))) gLogger.error("If yours is missing add it in CS") dexit(255) else: gLogger.error("Could not find all platforms available in CS") dexit(255) av_apps = gConfig.getSections("%s/%s" % (softwareSection, platform), []) if not av_apps['OK']: gLogger.error("Could not find all applications available in CS") dexit(255) if appName.lower() in av_apps['Value']: versions = gConfig.getSections("%s/%s/%s" % (softwareSection, platform, appName.lower()), []) if not versions['OK']: gLogger.error("Could not find all versions available in CS") dexit(255) if appVersion in versions['Value']: gLogger.error('Application %s %s for %s already in CS, nothing to do' % (appName.lower(), appVersion, platform)) dexit(0) else: result = diracAdmin.csSetOption("%s/%s/%s/%s/TarBall" % (softwareSection, platform, appName.lower(), appVersion), os.path.basename(appTar)) if result['OK']: modifiedCS = True tarballurl = gConfig.getOption("%s/%s/%s/TarBallURL" % (softwareSection, platform, appName.lower()), "") if len(tarballurl['Value']) > 0: res = upload(tarballurl['Value'], appTar) if not res['OK']: gLogger.error("Upload to %s failed" % tarballurl['Value']) dexit(255) result = diracAdmin.csSetOption("%s/%s/%s/%s/Md5Sum" % (softwareSection, platform, appName.lower(), appVersion), md5sum) if result['OK']: modifiedCS = True result = diracAdmin.csSetOption("%s/%s/%s/%s/Dependencies/beam_spectra/version" % (softwareSection, platform, appName.lower(), appVersion), beam_spectra_version) else: result = diracAdmin.csSetOption("%s/%s/%s/%s/TarBall" % (softwareSection, platform, appName.lower(), appVersion), os.path.basename(appTar)) if result['OK']: modifiedCS = True tarballurl = gConfig.getOption("%s/%s/%s/TarBallURL" % (softwareSection, platform, appName.lower()), "") if len(tarballurl['Value']) > 0: res = upload(tarballurl['Value'], appTar) if not res['OK']: gLogger.error("Upload to %s failed" % tarballurl['Value']) dexit(255) result = diracAdmin.csSetOption("%s/%s/%s/%s/Md5Sum" % (softwareSection, platform, appName.lower(), appVersion), md5sum) result = diracAdmin.csSetOption("%s/%s/%s/%s/Dependencies/beam_spectra/version" % (softwareSection, platform, appName.lower(), appVersion), beam_spectra_version) gLogger.verbose("Done uploading the tar ball") os.remove(appTar) #Set for all new processes the TarBallURL for process in inputlist.keys(): inputlist[process]['TarBallCSPath'] = tarballurl['Value'] + os.path.basename(appTar) pl.updateProcessList(inputlist) pl.writeProcessList() raw_input("Do you want to upload the process list? Press ENTER to proceed or CTRL-C to abort!") pl.uploadProcessListToFileCatalog(path_to_process_list, appVersion) #Commit the changes if nothing has failed and the CS has been modified if modifiedCS: result = diracAdmin.csCommitChanges(False) gLogger.verbose(result) gLogger.notice('All done OK!') dexit(0)
def getOutputData(baseDir, logLevel="INFO"): gLogger.setLevel(logLevel) exitCode = 0 res = getProxyInfo(False, False) if not res["OK"]: gLogger.error("Failed to get client proxy information.", res["Message"]) DIRAC.exit(71) print "Will search for files in %s" % baseDir activeDirs = [baseDir] # ######################################################################################################## # # before is from dirac-dms-user-lfns rm = DataManager() allFiles = [] while len(activeDirs) > 0: currentDir = activeDirs[0] res = rm.getFilesFromDirectory(currentDir) activeDirs.remove(currentDir) if not res["OK"]: gLogger.error("Error retrieving directory contents", "%s %s" % (currentDir, res["Message"])) else: allFiles = res["Value"] # ######################################################################################################## # # get file ntries = 5 # getFile supports bulk requests files_to_transfer = sortList(allFiles) successful_files = [] failed_files = [] while ntries > 0: if len(failed_files): files_to_transfer = failed_files gLogger.info("getting the following *list* of files %s" % str(files_to_transfer)) result = rm.getFile(files_to_transfer) if not result["OK"]: gLogger.error("Could not complete DataManager request") gLogger.error(str(result["Message"])) gLogger.info("sleep for 10s and re-try") time.sleep(10) break # next is to check what files we got successful_files = result["Value"]["Successful"].keys() failed_files = result["Value"]["Failed"].keys() if len(failed_files): gLogger.info("Could not retrieve one or more files") for key in failed_files: gLogger.error("%s:%s" % (key, result["Value"]["Failed"][key])) for s in successful_files: files_to_transfer.remove(s) for f in failed_files: gLogger.verbose("could not retrieve: %s" % f) else: break ntries -= 1 if len(failed_files): gLogger.error("ERROR could not get all files after %i trials. Giving up :(" % ntries) exitCode = 23 if exitCode: return {"OK": False, "Message": "Failed to finish operations.", "RC": exitCode} return S_OK(successful_files)
def getFile(lfn, se=''): dm = DataManager() download_ok = 0 get_active_replicas_ok = False lfn_on_se = False error_msg = '' if se: for i in range(0, 5): result = dm.getActiveReplicas(lfn) if result['OK'] and result['Value']['Successful']: get_active_replicas_ok = True lfnReplicas = result['Value']['Successful'] if se in lfnReplicas[lfn]: lfn_on_se = True break time.sleep(3) print '- Get replicas for %s failed, try again' % lfn if not get_active_replicas_ok: return S_ERROR('Get replicas error: %s' % lfn) if lfn_on_se: se = StorageElement(se) # try 5 times for j in range(0, 5): result = se.getFile(lfn) if result['OK'] and result['Value']['Successful'] and result[ 'Value']['Successful'].has_key(lfn): break time.sleep(random.randint(180, 600)) print '- %s getStorageFile(%s) failed, try again' % (lfn, se) if result['OK']: if result['Value']['Successful'] and result['Value'][ 'Successful'].has_key(lfn): download_ok = 1 else: error_msg = 'Downloading %s from SE %s error!' % (lfn, se) else: error_msg = result['Message'] else: if se: print 'File %s not found on SE "%s" after %s tries, trying other SE' % ( lfn, se, i + 1) # try 5 times for j in range(0, 5): result = dm.getFile(lfn) if result['OK'] and result['Value']['Successful'] and result[ 'Value']['Successful'].has_key(lfn): break time.sleep(random.randint(180, 600)) print '- getFile(%s) failed, try again' % lfn if result['OK']: if result['Value']['Successful'] and result['Value'][ 'Successful'].has_key(lfn): download_ok = 2 else: error_msg = 'Downloading %s from random SE error!' % lfn else: error_msg = result['Message'] if download_ok: return S_OK({lfn: {'DownloadOK': download_ok, 'Retry': j + 1}}) return S_ERROR(error_msg)