def getWNInjectedScript(self, outputFiles, indent, patternsToZip, postProcessLocationsFP): """ Returns script that have to be injected in the jobscript for postprocessing on the WN """ import inspect script_path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) script_location = os.path.join( script_path, 'uploadScript.py') from Ganga.GPIDev.Lib.File import FileUtils upload_script = FileUtils.loadScript(script_location, '') WNscript_location = os.path.join( script_path, 'WNInjectTemplate.py' ) script = FileUtils.loadScript(WNscript_location, '###INDENT###') selfConstructedLFNs = False if self.remoteDir == '' and self.lfn == '': import datetime t = datetime.datetime.now() this_date = t.strftime("%H.%M_%A_%d_%B_%Y") self.lfn = os.path.join(configDirac['DiracLFNBase'], 'GangaFiles_%s' % this_date) selfConstructedLFNs = True if self.remoteDir == '' and self.lfn != '': self.remoteDir = configDirac['DiracLFNBase'] if self.remoteDir[:4] == 'LFN:': lfn_base = self.remoteDir[4:] else: lfn_base = self.remoteDir for this_file in outputFiles: isCompressed = this_file.namePattern in patternsToZip if not regex.search(this_file.namePattern) is None: script += self._WN_wildcard_script(this_file.namePattern, lfn_base, str(isCompressed)) else: script += '###INDENT###print("Uploading: %s as: %s")\n' % (this_file.namePattern, str(os.path.join(lfn_base, this_file.namePattern))) script += '###INDENT###processes.append(uploadFile("%s", "%s", %s))\n' % (this_file.namePattern, lfn_base, str(isCompressed)) if stripProxy(self)._parent is not None and stripProxy(self).getJobObject() and getName(stripProxy(self).getJobObject().backend) != 'Dirac': script_env = self._getDiracEnvStr() else: script_env = str(None) script = '\n'.join([str('###INDENT###' + str(line)) for line in script.split('\n')]) replace_dict = {'###UPLOAD_SCRIPT###' : upload_script, '###STORAGE_ELEMENTS###' : str(configDirac['allDiracSE']), '###INDENT###' : indent, '###LOCATIONSFILE###' : postProcessLocationsFP, '###DIRAC_ENV###' : script_env} for k, v in replace_dict.iteritems(): script = script.replace(str(k), str(v)) return script
def getWNInjectedScript(self, outputFiles, indent, patternsToZip, postProcessLocationsFP): """ Returns script that have to be injected in the jobscript for postprocessing on the WN """ script_path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) script_location = os.path.join( script_path, 'uploadScript.py') from Ganga.GPIDev.Lib.File import FileUtils upload_script = FileUtils.loadScript(script_location, '') WNscript_location = os.path.join( script_path, 'WNInjectTemplate.py' ) script = FileUtils.loadScript(WNscript_location, '') if not self.remoteDir: try: job = self.getJobObject() lfn_folder = os.path.join("GangaUploadedFiles", "GangaJob_%s" % job.getFQID('.')) except AssertionError: t = datetime.datetime.now() this_date = t.strftime("%H.%M_%A_%d_%B_%Y") lfn_folder = os.path.join("GangaUploadedFiles", 'GangaFiles_%s' % this_date) self.lfn = os.path.join(DiracFile.diracLFNBase(), lfn_folder, self.namePattern) if self.remoteDir == '': self.remoteDir = DiracFile.diracLFNBase() if self.remoteDir[:4] == 'LFN:': lfn_base = self.remoteDir[4:] else: lfn_base = self.remoteDir for this_file in outputFiles: isCompressed = this_file.namePattern in patternsToZip if not regex.search(this_file.namePattern) is None: script += self._WN_wildcard_script(this_file.namePattern, lfn_base, str(isCompressed)) else: script += '###INDENT###print("Uploading: %s as: %s")\n' % (this_file.namePattern, str(os.path.join(lfn_base, this_file.namePattern))) script += '###INDENT###processes.append(uploadFile("%s", "%s", %s))\n' % (this_file.namePattern, lfn_base, str(isCompressed)) if stripProxy(self)._parent is not None and stripProxy(self).getJobObject() and getName(stripProxy(self).getJobObject().backend) != 'Dirac': script_env = self._getDiracEnvStr() else: script_env = str(None) script = '\n'.join([str('###INDENT###' + str(line)) for line in script.split('\n')]) replace_dict = {'###UPLOAD_SCRIPT###' : upload_script, '###STORAGE_ELEMENTS###' : str(configDirac['allDiracSE']), '###INDENT###' : indent, '###LOCATIONSFILE###' : postProcessLocationsFP, '###DIRAC_ENV###' : script_env} for k, v in replace_dict.iteritems(): script = script.replace(str(k), str(v)) return script
def getWNInjectedScript(self, outputFiles, indent, patternsToZip, postProcessLocationsFP): """ Returns script that have to be injected in the jobscript for postprocessing on the WN """ script_path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) script_location = os.path.join( script_path, 'uploadScript.py.template') upload_script = FileUtils.loadScript(script_location, '') WNscript_location = os.path.join( script_path, 'WNInjectTemplate.py.template' ) script = FileUtils.loadScript(WNscript_location, '') if not self.remoteDir: try: job = self.getJobObject() lfn_folder = os.path.join("GangaJob_%s" % job.getFQID('.'), "OutputFiles") except AssertionError: t = datetime.datetime.now() this_date = t.strftime("%H.%M_%A_%d_%B_%Y") lfn_folder = os.path.join('GangaFiles_%s' % this_date) lfn_base = os.path.join(DiracFile.diracLFNBase(self.credential_requirements), lfn_folder) else: lfn_base = oa.path.join(DiracFile.diracLFNBase(self.credential_requirements), self.remoteDir) for this_file in outputFiles: isCompressed = this_file.namePattern in patternsToZip if not regex.search(this_file.namePattern) is None: script += self._WN_wildcard_script(this_file.namePattern, lfn_base, str(isCompressed)) else: script += '###INDENT###print("Uploading: %s as: %s")\n' % (this_file.namePattern, str(os.path.join(lfn_base, this_file.namePattern))) script += '###INDENT###processes.append(uploadFile("%s", "%s", %s))\n' % (this_file.namePattern, lfn_base, str(isCompressed)) if stripProxy(self)._parent is not None and stripProxy(self).getJobObject() and getName(stripProxy(self).getJobObject().backend) != 'Dirac': script_env = self._getDiracEnvStr() else: script_env = str(None) script = '\n'.join([str('###INDENT###' + str(line)) for line in script.split('\n')]) replace_dict = {'###UPLOAD_SCRIPT###' : upload_script, '###STORAGE_ELEMENTS###' : str(configDirac['allDiracSE']), '###INDENT###' : indent, '###LOCATIONSFILE###' : postProcessLocationsFP, '###DIRAC_ENV###' : script_env} for k, v in replace_dict.iteritems(): script = script.replace(str(k), str(v)) return script
def getWNScriptDownloadCommand(self, indent): import inspect script_location = os.path.join( os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))), "downloadScript.py" ) from Ganga.GPIDev.Lib.File import FileUtils download_script = FileUtils.loadScript(script_location, "") script = """\n download_script='''\n###DOWNLOAD_SCRIPT###''' import subprocess dirac_env=###DIRAC_ENV### subprocess.Popen('''python -c "import sys\nexec(sys.stdin.read())"''', shell=True, env=dirac_env, stdin=subprocess.PIPE).communicate(download_script) """ script = "\n".join([str(indent + str(line)) for line in script.split("\n")]) replace_dict = { "###DOWNLOAD_SCRIPT###": download_script, "###DIRAC_ENV###": self._getDiracEnvStr(), "###LFN###": self.lfn, } for k, v in replace_dict.iteritems(): script = script.replace(str(k), str(v)) return script
def getWNInjectedScript(self, outputFiles, indent, patternsToZip, postProcessLocationsFP): """ Returns script that have to be injected in the jobscript for postprocessing on the WN """ lcgCommands = [] for outputFile in outputFiles: lcgCommands.append('lcgse %s %s %s' % (outputFile.namePattern, outputFile.lfc_host, outputFile.getUploadCmd())) logger.debug("OutputFile (%s) cmd for WN script is: %s" % (outputFile.namePattern, outputFile.getUploadCmd())) import inspect script_location = os.path.join( os.path.dirname( os.path.abspath(inspect.getfile(inspect.currentframe()))), 'scripts/LCGSEFileWNScript.py') from Ganga.GPIDev.Lib.File import FileUtils script = FileUtils.loadScript(script_location, '###INDENT###') script = script.replace('###LCGCOMMANDS###', str(lcgCommands)) script = script.replace('###PATTERNSTOZIP###', str(patternsToZip)) script = script.replace('###INDENT###', indent) script = script.replace('###POSTPROCESSLOCATIONSFP###', postProcessLocationsFP) return script
def getXMLSummaryScript(indent=''): '''Returns the necessary script to parse and make sense of the XMLSummary data''' import inspect from GangaLHCb.Lib.Applications.AppsBaseUtils import activeSummaryItems script = "###INDENT#### Parsed XMLSummary data extraction methods\n" for summaryItem in activeSummaryItems().values(): script += ''.join([ '###INDENT###' + line for line in inspect.getsourcelines(summaryItem)[0] ]) script += ''.join([ '###INDENT###' + line for line in inspect.getsourcelines(activeSummaryItems)[0] ]) import inspect script_location = os.path.join( os.path.dirname( os.path.abspath(inspect.getfile(inspect.currentframe()))), 'XMLWorkerScript.py') from Ganga.GPIDev.Lib.File import FileUtils xml_script = FileUtils.loadScript(script_location, '###INDENT###') script += xml_script return script.replace('###INDENT###', indent)
def getWNScriptDownloadCommand(self, indent): script_location = os.path.join( os.path.dirname( os.path.abspath(inspect.getfile(inspect.currentframe()))), 'downloadScript.py.template') download_script = FileUtils.loadScript(script_location, '') script = """\n download_script='''\n###DOWNLOAD_SCRIPT###''' import subprocess dirac_env=###DIRAC_ENV### subprocess.Popen('''python -c "import sys\nexec(sys.stdin.read())"''', shell=True, env=dirac_env, stdin=subprocess.PIPE).communicate(download_script) """ script = '\n'.join( [str(indent + str(line)) for line in script.split('\n')]) replace_dict = { '###DOWNLOAD_SCRIPT###': download_script, '###DIRAC_ENV###': self._getDiracEnvStr(), '###LFN###': self.lfn } for k, v in replace_dict.iteritems(): script = script.replace(str(k), str(v)) return script
def getWNInjectedScript(self, outputFiles, indent, patternsToZip, postProcessLocationsFP): """ Returns script that have to be injected in the jobscript for postprocessing on the WN """ lcgCommands = [] for outputFile in outputFiles: lcgCommands.append('lcgse %s %s %s' % ( outputFile.namePattern, outputFile.lfc_host, outputFile.getUploadCmd())) logger.debug("OutputFile (%s) cmd for WN script is: %s" % (outputFile.namePattern, outputFile.getUploadCmd())) import inspect script_location = os.path.join(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))), 'scripts/LCGSEFileWNScript.py') from Ganga.GPIDev.Lib.File import FileUtils script = FileUtils.loadScript(script_location, '###INDENT###') script = script.replace('###LCGCOMMANDS###', str(lcgCommands)) script = script.replace('###PATTERNSTOZIP###', str(patternsToZip)) script = script.replace('###INDENT###', indent) script = script.replace('###POSTPROCESSLOCATIONSFP###', postProcessLocationsFP) return script
def getWNCodeForOutputPostprocessing(job, indent): # dict containing the list of outputfiles that need to be processed on the # WN for every file type outputFilesProcessedOnWN = {} patternsToZip = [] if len(job.outputfiles) == 0: return "" else: for outputFile in job.outputfiles: outputfileClassName = getName(outputFile) backendClassName = getName(job.backend) if outputFile.compressed: patternsToZip.append(outputFile.namePattern) if outputfileClassName not in outputFilesProcessedOnWN.keys(): outputFilesProcessedOnWN[outputfileClassName] = [] if outputFilePostProcessingOnWN(job, outputfileClassName): outputFilesProcessedOnWN[outputfileClassName].append(outputFile) if not patternsToZip: if not any(outputFilesProcessedOnWN.values()): return "" logger.debug("Process: '%s' on WN" % str(outputFilePostProcessingOnWN)) shortScript = """\n import os, glob for patternToZip in ###PATTERNSTOZIP###: for currentFile in glob.glob(os.path.join(os.getcwd(),patternToZip)): if os.path.isfile(currentFile): os.system("gzip %s" % currentFile) postprocesslocations = open(os.path.join(os.getcwd(), '###POSTPROCESSLOCATIONSFILENAME###'), 'a+') """ from Ganga.GPIDev.Lib.File import FileUtils shortScript = FileUtils.indentScript(shortScript, '###INDENT###') insertScript = shortScript insertScript = insertScript.replace('###PATTERNSTOZIP###', str(patternsToZip)) insertScript = insertScript.replace('###POSTPROCESSLOCATIONSFILENAME###', getConfig('Output')['PostProcessLocationsFileName']) for outputFileName in outputFilesProcessedOnWN.keys(): if len(outputFilesProcessedOnWN[outputFileName]) > 0: insertScript += outputFilesProcessedOnWN[outputFileName][0].getWNInjectedScript(outputFilesProcessedOnWN[outputFileName], indent, patternsToZip, 'postprocesslocations') insertScript += """\n ###INDENT###postprocesslocations.close() """ insertScript = insertScript.replace('###INDENT###', indent) return insertScript
def getWNCodeForDownloadingInputFiles(job, indent): """ Generate the code to be run on the WN to download input files """ from Ganga.GPIDev.Lib.Dataset.GangaDataset import GangaDataset if ( job.inputfiles is None or len(job.inputfiles) == 0 and (not job.inputdata or ((not isType(job.inputdata, GangaDataset)) or not job.inputdata.treat_as_inputfiles)) ): return "" insertScript = """\n """ # first, go over any LocalFiles in GangaDatasets to be transferred # The LocalFiles in inputfiles have already been dealt with if job.inputdata and isType(job.inputdata, GangaDataset) and job.inputdata.treat_as_inputfiles: for inputFile in job.inputdata.files: inputfileClassName = getName(inputFile) if inputfileClassName == "LocalFile": # special case for LocalFile if getName(job.backend) in ["Localhost", "Batch", "LSF", "Condor", "PBS"]: # create symlink shortScript += """ # create symbolic links for LocalFiles for f in ###FILELIST###: os.symlink(f, os.path.basename(f)) """ shortScript = FileUtils.indentScript(shortScript, "###INDENT####") insertScript += shortScript insertScript = insertScript.replace("###FILELIST###", "%s" % inputFile.getFilenameList()) # if GangaDataset is used, check if they want the inputfiles transferred inputfiles_list = job.inputfiles if job.inputdata and isType(job.inputdata, GangaDataset) and job.inputdata.treat_as_inputfiles: inputfiles_list += job.inputdata.files for inputFile in inputfiles_list: inputfileClassName = getName(inputFile) if outputFilePostProcessingOnWN(job, inputfileClassName): inputFile.processWildcardMatches() if inputFile.subfiles: for subfile in inputFile.subfiles: insertScript += subfile.getWNScriptDownloadCommand(indent) else: insertScript += inputFile.getWNScriptDownloadCommand(indent) insertScript = insertScript.replace("###INDENT###", indent) return insertScript
def preparejob(self, jobconfig, master_input_sandbox): job = self.getJobObject() # print str(job.backend_output_postprocess) mon = job.getMonitoringService() import Ganga.Core.Sandbox as Sandbox subjob_input_sandbox = job.createPackedInputSandbox(jobconfig.getSandboxFiles() + Sandbox.getGangaModulesAsSandboxFiles(Sandbox.getDefaultModules())) appscriptpath = [jobconfig.getExeString()] + jobconfig.getArgStrings() if self.nice: appscriptpath = ['nice', '-n %d' % self.nice] + appscriptpath if self.nice < 0: logger.warning('increasing process priority is often not allowed, your job may fail due to this') sharedoutputpath = job.getOutputWorkspace().getPath() ## FIXME DON'T just use the blind list here, request the list of files to be in the output from a method. outputpatterns = jobconfig.outputbox environment = dict() if jobconfig.env is None else jobconfig.env import tempfile workdir = tempfile.mkdtemp(dir=config['location']) import inspect script_location = os.path.join(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))), 'LocalHostExec.py') from Ganga.GPIDev.Lib.File import FileUtils script = FileUtils.loadScript(script_location, '') script = script.replace('###INLINEMODULES###', inspect.getsource(Sandbox.WNSandbox)) from Ganga.GPIDev.Lib.File.OutputFileManager import getWNCodeForOutputSandbox, getWNCodeForOutputPostprocessing, getWNCodeForDownloadingInputFiles, getWNCodeForInputdataListCreation from Ganga.Utility.Config import getConfig jobidRepr = repr(job.getFQID('.')) script = script.replace('###OUTPUTSANDBOXPOSTPROCESSING###', getWNCodeForOutputSandbox(job, ['stdout', 'stderr', '__syslog__'], jobidRepr)) script = script.replace('###OUTPUTUPLOADSPOSTPROCESSING###', getWNCodeForOutputPostprocessing(job, '')) script = script.replace('###DOWNLOADINPUTFILES###', getWNCodeForDownloadingInputFiles(job, '')) script = script.replace('###CREATEINPUTDATALIST###', getWNCodeForInputdataListCreation(job, '')) script = script.replace('###APPLICATION_NAME###', repr(job.application._name)) script = script.replace('###INPUT_SANDBOX###', repr(subjob_input_sandbox + master_input_sandbox)) script = script.replace('###SHAREDOUTPUTPATH###', repr(sharedoutputpath)) script = script.replace('###APPSCRIPTPATH###', repr(appscriptpath)) script = script.replace('###OUTPUTPATTERNS###', str(outputpatterns)) script = script.replace('###JOBID###', jobidRepr) script = script.replace('###ENVIRONMENT###', repr(environment)) script = script.replace('###WORKDIR###', repr(workdir)) script = script.replace('###INPUT_DIR###', repr(job.getStringInputDir())) self.workdir = workdir script = script.replace('###GANGADIR###', repr(getConfig('System')['GANGA_PYTHONPATH'])) wrkspace = job.getInputWorkspace() scriptPath = wrkspace.writefile(FileBuffer('__jobscript__', script), executable=1) return scriptPath
def preparejob(self, jobconfig, master_input_sandbox): job = self.getJobObject() # print str(job.backend_output_postprocess) mon = job.getMonitoringService() import Ganga.Core.Sandbox as Sandbox subjob_input_sandbox = job.createPackedInputSandbox(jobconfig.getSandboxFiles() + Sandbox.getGangaModulesAsSandboxFiles(Sandbox.getDefaultModules())) appscriptpath = [jobconfig.getExeString()] + jobconfig.getArgStrings() if self.nice: appscriptpath = ['nice', '-n %d' % self.nice] + appscriptpath if self.nice < 0: logger.warning('increasing process priority is often not allowed, your job may fail due to this') sharedoutputpath = job.getOutputWorkspace().getPath() ## FIXME DON'T just use the blind list here, request the list of files to be in the output from a method. outputpatterns = jobconfig.outputbox environment = dict() if jobconfig.env is None else jobconfig.env import tempfile workdir = tempfile.mkdtemp(dir=config['location']) import inspect script_location = os.path.join(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))), 'LocalHostExec.py') from Ganga.GPIDev.Lib.File import FileUtils script = FileUtils.loadScript(script_location, '') script = script.replace('###INLINEMODULES###', inspect.getsource(Sandbox.WNSandbox)) from Ganga.GPIDev.Lib.File.OutputFileManager import getWNCodeForOutputSandbox, getWNCodeForOutputPostprocessing, getWNCodeForDownloadingInputFiles, getWNCodeForInputdataListCreation from Ganga.Utility.Config import getConfig jobidRepr = repr(job.getFQID('.')) script = script.replace('###OUTPUTSANDBOXPOSTPROCESSING###', getWNCodeForOutputSandbox(job, ['stdout', 'stderr', '__syslog__'], jobidRepr)) script = script.replace('###OUTPUTUPLOADSPOSTPROCESSING###', getWNCodeForOutputPostprocessing(job, '')) script = script.replace('###DOWNLOADINPUTFILES###', getWNCodeForDownloadingInputFiles(job, '')) script = script.replace('###CREATEINPUTDATALIST###', getWNCodeForInputdataListCreation(job, '')) script = script.replace('###APPLICATION_NAME###', repr(getName(job.application))) script = script.replace('###INPUT_SANDBOX###', repr(subjob_input_sandbox + master_input_sandbox)) script = script.replace('###SHAREDOUTPUTPATH###', repr(sharedoutputpath)) script = script.replace('###APPSCRIPTPATH###', repr(appscriptpath)) script = script.replace('###OUTPUTPATTERNS###', str(outputpatterns)) script = script.replace('###JOBID###', jobidRepr) script = script.replace('###ENVIRONMENT###', repr(environment)) script = script.replace('###WORKDIR###', repr(workdir)) script = script.replace('###INPUT_DIR###', repr(job.getStringInputDir())) self.workdir = workdir script = script.replace('###GANGADIR###', repr(getConfig('System')['GANGA_PYTHONPATH'])) wrkspace = job.getInputWorkspace() scriptPath = wrkspace.writefile(FileBuffer('__jobscript__', script), executable=1) return scriptPath
def getWNCodeForOutputPostprocessing(job, indent): # dict containing the list of outputfiles that need to be processed on the # WN for every file type outputFilesProcessedOnWN = {} patternsToZip = [] if len(job.outputfiles) == 0: return "" else: for outputFile in job.outputfiles: outputfileClassName = getName(outputFile) backendClassName = getName(job.backend) if outputFile.compressed: if outputfileClassName == 'LocalFile' and backendClassName not in ['Localhost', 'LSF', 'Interactive']: patternsToZip.append(outputFile.namePattern) elif outputfileClassName != 'LocalFile' and outputFilePostProcessingOnWN(job, outputfileClassName): patternsToZip.append(outputFile.namePattern) elif outputfileClassName != 'LocalFile' and outputFilePostProcessingOnClient(job, outputfileClassName) and backendClassName not in ['Localhost', 'LSF', 'Interactive']: patternsToZip.append(outputFile.namePattern) if outputfileClassName not in outputFilesProcessedOnWN.keys(): outputFilesProcessedOnWN[outputfileClassName] = [] if outputFilePostProcessingOnWN(job, outputfileClassName): outputFilesProcessedOnWN[ outputfileClassName].append(outputFile) shortScript = """\n import os, glob for patternToZip in ###PATTERNSTOZIP###: for currentFile in glob.glob(os.path.join(os.getcwd(),patternToZip)): os.system("gzip %s" % currentFile) postprocesslocations = file(os.path.join(os.getcwd(), '###POSTPROCESSLOCATIONSFILENAME###'), 'w') """ shortScript = FileUtils.indentScript(shortScript, '###INDENT###') insertScript = shortScript insertScript = insertScript.replace('###PATTERNSTOZIP###', str(patternsToZip)) insertScript = insertScript.replace('###POSTPROCESSLOCATIONSFILENAME###', getConfig('Output')['PostProcessLocationsFileName']) for outputFileName in outputFilesProcessedOnWN.keys(): if len(outputFilesProcessedOnWN[outputFileName]) > 0: insertScript += outputFilesProcessedOnWN[outputFileName][0].getWNInjectedScript(outputFilesProcessedOnWN[outputFileName], indent, patternsToZip, 'postprocesslocations') insertScript += """\n ###INDENT###postprocesslocations.close() """ insertScript = insertScript.replace('###INDENT###', indent) return insertScript
def downloadWrapper(app): from os.path import join, split from Ganga.GPIDev.Lib.File import FileBuffer import string from Ganga.GPIDev.Lib.File import getSharedPath rootsys = join('.', 'root') rootenv = {'ROOTSYS': rootsys} script = app.script if script == File(): if not app.usepython: script = File(defaultScript()) else: script = File(defaultPyRootScript()) else: script = File(os.path.join(os.path.join(Ganga.GPIDev.Lib.File.getSharedPath(), app.is_prepared.name), os.path.basename(app.script.name))) commandline = '' scriptPath = join('.', script.subdir, split(script.name)[1]) if not app.usepython: # Arguments to the ROOT script needs to be a comma separated list # enclosed in (). Strings should be enclosed in escaped double quotes. arglist = [] for arg in app.args: if isinstance(arg, str): arglist.append('\\\'' + arg + '\\\'') else: arglist.append(arg) rootarg = '\(\"' + string.join([str(s) for s in arglist], ',') + '\"\)' # use root commandline = 'root.exe -b -q ' + scriptPath + rootarg + '' else: # use python pyarg = string.join([str(s) for s in app.args], ' ') commandline = '\'%(PYTHONCMD)s ' + scriptPath + ' ' + pyarg + ' -b \'' logger.debug("Command line: %s: ", commandline) # Write a wrapper script that installs ROOT and runs script script_location = os.path.join(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))), 'wrapperScriptTemplate.py') from Ganga.GPIDev.Lib.File import FileUtils wrapperscript = FileUtils.loadScript(script_location, '') wrapperscript = wrapperscript.replace('###COMMANDLINE###', commandline) wrapperscript = wrapperscript.replace('###ROOTVERSION###', app.version) wrapperscript = wrapperscript.replace('###SCRIPTPATH###', scriptPath) wrapperscript = wrapperscript.replace('###USEPYTHON###', str(app.usepython)) logger.debug('Script to run on worker node\n' + wrapperscript) scriptName = "rootwrapper_generated_%s.py" % randomString() runScript = FileBuffer(scriptName, wrapperscript, executable=1) inputsandbox = app._getParent().inputsandbox + [script] return runScript, inputsandbox, rootenv
def getWNCodeForDownloadingInputFiles(job, indent): """ Generate the code to be run on the WN to download input files """ from Ganga.GPIDev.Lib.Dataset.GangaDataset import GangaDataset if job.inputfiles is None or len(job.inputfiles) == 0 and\ (not job.inputdata or ((not isType(job.inputdata, GangaDataset)) or\ not job.inputdata.treat_as_inputfiles )): return "" insertScript = """\n """ # first, go over any LocalFiles in GangaDatasets to be transferred # The LocalFiles in inputfiles have already been dealt with if job.inputdata and isType(job.inputdata, GangaDataset) and job.inputdata.treat_as_inputfiles: for inputFile in job.inputdata.files: inputfileClassName = getName(inputFile) if inputfileClassName == "LocalFile": # special case for LocalFile if getName(job.backend) in ['Localhost', 'Batch', 'LSF', 'Condor', 'PBS']: # create symlink shortScript = """ # create symbolic links for LocalFiles for f in ###FILELIST###: os.symlink(f, os.path.basename(f)) """ shortScript = FileUtils.indentScript(shortScript, '###INDENT####') insertScript += shortScript insertScript = insertScript.replace('###FILELIST###', "%s" % inputFile.getFilenameList()) # if GangaDataset is used, check if they want the inputfiles transferred inputfiles_list = job.inputfiles if job.inputdata and isType(job.inputdata, GangaDataset) and job.inputdata.treat_as_inputfiles: inputfiles_list += job.inputdata.files for inputFile in inputfiles_list: inputfileClassName = getName(inputFile) if outputFilePostProcessingOnWN(job, inputfileClassName): inputFile.processWildcardMatches() if inputFile.subfiles: for subfile in inputFile.subfiles: insertScript += subfile.getWNScriptDownloadCommand(indent) else: insertScript += inputFile.getWNScriptDownloadCommand(indent) insertScript = insertScript.replace('###INDENT###', indent) return insertScript
def getWNInjectedScript(self, outputFiles, indent, patternsToZip, postProcessLocationsFP): """ Returns script that have to be injected in the jobscript for postprocessing on the WN """ massStorageCommands = [] massStorageConfig = getConfig( 'Output')['MassStorageFile']['uploadOptions'] for outputFile in outputFiles: outputfilenameformat = 'None' if outputFile.outputfilenameformat != None and outputFile.outputfilenameformat != '': outputfilenameformat = outputFile.outputfilenameformat massStorageCommands.append([ 'massstorage', outputFile.namePattern, outputfilenameformat, massStorageConfig['mkdir_cmd'], massStorageConfig['cp_cmd'], massStorageConfig['ls_cmd'], massStorageConfig['path'] ]) import inspect script_location = os.path.join( os.path.dirname( os.path.abspath(inspect.getfile(inspect.currentframe()))), 'scripts/MassStorageFileWNScript.py') from Ganga.GPIDev.Lib.File import FileUtils script = FileUtils.loadScript(script_location, '###INDENT###') jobfqid = self.getJobObject().fqid jobid = jobfqid subjobid = '' if (jobfqid.find('.') > -1): jobid = jobfqid.split('.')[0] subjobid = jobfqid.split('.')[1] replace_dict = { '###MASSSTORAGECOMMANDS###': repr(massStorageCommands), '###PATTERNSTOZIP###': str(patternsToZip), '###INDENT###': indent, '###POSTPROCESSLOCATIONSFP###': postProcessLocationsFP, '###FULLJOBDIR###': str(jobfqid.replace('.', os.path.sep)), '###JOBDIR###': str(jobid), '###SUBJOBDIR###': str(subjobid) } for k, v in replace_dict.iteritems(): script = script.replace(str(k), str(v)) return script
def defaultScript(): tmpdir = tempfile.mktemp() os.mkdir(tmpdir) fname = os.path.join(tmpdir, 'test.C') script_location = os.path.join(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))), 'defaultRootScript.C') from Ganga.GPIDev.Lib.File import FileUtils script = FileUtils.loadScript(script_location, '') with open(fname, 'w') as f: f.write(script) return fname
def gaudi_script_template(): '''Creates the script that will be executed by DIRAC job. ''' import inspect script_location = os.path.join(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))), 'GaudiTemplate.py') from Ganga.GPIDev.Lib.File import FileUtils script_template = FileUtils.loadScript(script_location, '') return script_template
def getWNCodeForDownloadingInputFiles(job, indent): """ Generate the code to be run on the WN to download input files """ if len(job.inputfiles) == 0 and (not job.inputdata or job.inputdata._name != "GangaDataset" or not job.inputdata.treat_as_inputfiles): return "" insertScript = """\n """ # first, go over any LocalFiles in GangaDatasets to be transferred # The LocalFiles in inputfiles have already been dealt with if job.inputdata and job.inputdata._name == "GangaDataset" and job.inputdata.treat_as_inputfiles: for inputFile in job.inputdata.files: inputfileClassName = stripProxy(inputFile).__class__.__name__ if inputfileClassName == "LocalFile": # special case for LocalFile if stripProxy(job.backend).__class__.__name__ in ['Localhost', 'Batch', 'LSF', 'Condor', 'PBS']: # create symlink shortScript += """ # create symbolic links for LocalFiles for f in ###FILELIST###: os.symlink(f, os.path.basename(f)) """ shortScript = FileUtils.indentScript(shortScript, '###INDENT####') insertScript += shortScript insertScript = insertScript.replace('###FILELIST###', "%s" % inputFile.getFilenameList()) # if GangaDataset is used, check if they want the inputfiles transferred inputfiles_list = job.inputfiles if job.inputdata and job.inputdata._name == "GangaDataset" and job.inputdata.treat_as_inputfiles: inputfiles_list += job.inputdata.files for inputFile in inputfiles_list: inputfileClassName = stripProxy(inputFile).__class__.__name__ if outputFilePostProcessingOnWN(job, inputfileClassName): inputFile.processWildcardMatches() if inputFile.subfiles: for subfile in inputFile.subfiles: insertScript += subfile.getWNScriptDownloadCommand(indent) else: insertScript += inputFile.getWNScriptDownloadCommand(indent) insertScript = insertScript.replace('###INDENT###', indent) return insertScript
def diracAPI_script_template(): """ Generate and return the DiracAPI job submission template """ import inspect import os.path script_location = os.path.join(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))), 'DiracRTHScript.py') from Ganga.GPIDev.Lib.File import FileUtils script_template = FileUtils.loadScript(script_location, '') return script_template
def gaudi_script_template(): '''Creates the script that will be executed by DIRAC job. ''' import inspect script_location = os.path.join( os.path.dirname( os.path.abspath(inspect.getfile(inspect.currentframe()))), 'GaudiTemplate.py') from Ganga.GPIDev.Lib.File import FileUtils script_template = FileUtils.loadScript(script_location, '') return script_template
def diracAPI_script_template(): # NOTE setOutputData(replicate) replicate keyword only for LHCbDirac. must # move there when get a chance. import inspect import os.path script_location = os.path.join(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))), 'DiracRTHScript.py') from Ganga.GPIDev.Lib.File import FileUtils script_template = FileUtils.loadScript(script_location, '') return script_template
def diracAPI_script_template(): """ Generate and return the DiracAPI job submission template """ import inspect import os.path script_location = os.path.join( os.path.dirname( os.path.abspath(inspect.getfile(inspect.currentframe()))), 'DiracRTHScript.py.template') from Ganga.GPIDev.Lib.File import FileUtils script_template = FileUtils.loadScript(script_location, '') return script_template
def diracAPI_script_template(): # NOTE setOutputData(replicate) replicate keyword only for LHCbDirac. must # move there when get a chance. import inspect import os.path script_location = os.path.join( os.path.dirname( os.path.abspath(inspect.getfile(inspect.currentframe()))), 'DiracRTHScript.py') from Ganga.GPIDev.Lib.File import FileUtils script_template = FileUtils.loadScript(script_location, '') return script_template
def _WN_wildcard_script(self, namePattern, lfnBase, compressed): wildcard_str = """ for f in glob.glob('###NAME_PATTERN###'): processes.append(uploadFile(os.path.basename(f), '###LFN_BASE###', ###COMPRESSED###, '###NAME_PATTERN###')) """ wildcard_str = FileUtils.indentScript(wildcard_str, '###INDENT###') replace_dict = { '###NAME_PATTERN###' : namePattern, '###LFN_BASE###' : lfnBase, '###COMPRESSED###' : compressed } for k, v in replace_dict.iteritems(): wildcard_str = wildcard_str.replace(str(k), str(v)) return wildcard_str
def wrapper(self, regexp, version, timeout, kernel): """Write a wrapper Python script that executes the notebooks""" wrapperscript = FileUtils.loadScript(self.templatelocation(), '') wrapperscript = wrapperscript.replace('###NBFILES###', str(regexp)) wrapperscript = wrapperscript.replace('###VERSION###', str(version)) wrapperscript = wrapperscript.replace('###TIMEOUT###', str(timeout)) wrapperscript = wrapperscript.replace('###KERNEL###', str(kernel)) wrapperscript = wrapperscript.replace('###UUID###', str(uuid.uuid4())) logger.debug('Script to run on worker node\n' + wrapperscript) scriptName = "notebook_wrapper_generated.py" runScript = FileBuffer(scriptName, wrapperscript, executable=1) return runScript
def getWNScriptDownloadCommand(self, indent): # create symlink shortScript = """ # create symbolic links for LocalFiles for f in ###FILELIST###: if not os.path.exists(os.path.basename(f)): os.symlink(f, os.path.basename(f)) """ from Ganga.GPIDev.Lib.File import FileUtils shortScript = FileUtils.indentScript(shortScript, '###INDENT###') shortScript = shortScript.replace('###FILELIST###', "%s" % self.getFilenameList()) return shortScript
def create_runscript(useCmake=False): """ Returna run script for Gaudi Apps """ from GangaLHCb.Lib.Applications.EnvironFunctions import construct_run_environ environ_script = construct_run_environ(useCmake) import inspect script_location = os.path.join(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))), 'WorkerScript.py.template') from Ganga.GPIDev.Lib.File import FileUtils worker_script = FileUtils.loadScript(script_location, '') worker_script = worker_script.replace('###CONSTRUCT_ENVIRON###', environ_script) return worker_script
def defaultPyRootScript(): tmpdir = tempfile.mktemp() os.mkdir(tmpdir) fname = os.path.join(tmpdir, "test.py") script_location = os.path.join( os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))), "defaultPyRootScript.py" ) from Ganga.GPIDev.Lib.File import FileUtils default_script = FileUtils.loadScript(script_location, "") with open(fname, "w") as f: f.write(default_script) return fname
def _WN_wildcard_script(self, namePattern, lfnBase, compressed): wildcard_str = """ import glob, hashlib for f in glob.glob('###NAME_PATTERN###'): processes.append(uploadFile(os.path.basename(f), '###LFN_BASE###', ###COMPRESSED###, '###NAME_PATTERN###')) """ from Ganga.GPIDev.Lib.File import FileUtils wildcard_str = FileUtils.indentScript(wildcard_str, "###INDENT###") replace_dict = {"###NAME_PATTERN###": namePattern, "###LFN_BASE###": lfnBase, "###COMPRESSED###": compressed} for k, v in replace_dict.iteritems(): wildcard_str = wildcard_str.replace(str(k), str(v)) return wildcard_str
def getWNInjectedScript(self, outputFiles, indent, patternsToZip, postProcessLocationsFP): """ Returns script that have to be injected in the jobscript for postprocessing on the WN """ massStorageCommands = [] massStorageConfig = getConfig('Output')['MassStorageFile']['uploadOptions'] for outputFile in outputFiles: outputfilenameformat = 'None' if outputFile.outputfilenameformat != None and outputFile.outputfilenameformat != '': outputfilenameformat = outputFile.outputfilenameformat massStorageCommands.append(['massstorage', outputFile.namePattern, outputfilenameformat, massStorageConfig['mkdir_cmd'], massStorageConfig['cp_cmd'], massStorageConfig['ls_cmd'], massStorageConfig['path']]) import inspect script_location = os.path.join(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))), 'scripts/MassStorageFileWNScript.py') from Ganga.GPIDev.Lib.File import FileUtils script = FileUtils.loadScript(script_location, '###INDENT###') jobfqid = self.getJobObject().fqid jobid = jobfqid subjobid = '' if (jobfqid.find('.') > -1): jobid = jobfqid.split('.')[0] subjobid = jobfqid.split('.')[1] replace_dict = {'###MASSSTORAGECOMMANDS###' : repr(massStorageCommands), '###PATTERNSTOZIP###' : str(patternsToZip), '###INDENT###' : indent, '###POSTPROCESSLOCATIONSFP###' : postProcessLocationsFP, '###FULLJOBDIR###' : str(jobfqid.replace('.', os.path.sep)), '###JOBDIR###' : str(jobid), '###SUBJOBDIR###' : str(subjobid)} for k, v in replace_dict.iteritems(): script = script.replace(str(k), str(v)) return script
def create_runscript(useCmake=False): """ Returna run script for Gaudi Apps """ from GangaLHCb.Lib.Applications.EnvironFunctions import construct_run_environ environ_script = construct_run_environ(useCmake) import inspect script_location = os.path.join( os.path.dirname( os.path.abspath(inspect.getfile(inspect.currentframe()))), 'WorkerScript.py') from Ganga.GPIDev.Lib.File import FileUtils worker_script = FileUtils.loadScript(script_location, '') worker_script = worker_script.replace('###CONSTRUCT_ENVIRON###', environ_script) return worker_script
def getXMLSummaryScript(indent=''): '''Returns the necessary script to parse and make sense of the XMLSummary data''' import inspect from GangaLHCb.Lib.Applications.AppsBaseUtils import activeSummaryItems script = "###INDENT#### Parsed XMLSummary data extraction methods\n" for summaryItem in activeSummaryItems().values(): script += ''.join(['###INDENT###' + line for line in inspect.getsourcelines(summaryItem)[0]]) script += ''.join(['###INDENT###' + line for line in inspect.getsourcelines(activeSummaryItems)[0]]) import inspect script_location = os.path.join(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))), 'XMLWorkerScript.py.template') from Ganga.GPIDev.Lib.File import FileUtils xml_script = FileUtils.loadScript(script_location, '###INDENT###') script += xml_script return script.replace('###INDENT###', indent)
def prepare(self, app, appsubconfig, appmasterconfig, jobmasterconfig): logger.debug("Prepare") inputsandbox, outputsandbox = sandbox_prepare(app, appsubconfig, appmasterconfig, jobmasterconfig) job = app.getJobObject() logger.debug("Loading pickle files") #outputfiles=set([file.namePattern for file in job.outputfiles]).difference(set(getOutputSandboxPatterns(job))) # Cant wait to get rid of this when people no-longer specify # inputdata in options file ####################################################################### # splitters ensure that subjobs pick up inputdata from job over that in # optsfiles but need to take sare of unsplit jobs if not job.master: share_path = os.path.join(get_share_path(app), 'inputdata', 'options_data.pkl') if not job.inputdata: if os.path.exists(share_path): f = open(share_path, 'r+b') job.inputdata = pickle.load(f) f.close() ####################################################################### # Cant wait to get rid of this when people no-longer specify # outputsandbox or outputdata in options file ####################################################################### share_path = os.path.join(get_share_path(app), 'output', 'options_parser.pkl') logger.debug("Adding info from pickle files") if os.path.exists(share_path): f = open(share_path, 'r+b') parser = pickle.load(f) f.close() outbox, outdata = parser.get_output(job) from Ganga.GPIDev.Lib.File import FileUtils from Ganga.GPIDev.Base.Filters import allComponentFilters fileTransform = allComponentFilters['gangafiles'] job.non_copyable_outputfiles.extend([fileTransform(this_file, None) for this_file in outdata if not FileUtils.doesFileExist(this_file, job.outputfiles)]) job.non_copyable_outputfiles.extend([fileTransform(this_file, None) for this_file in outbox if not FileUtils.doesFileExist(this_file, job.outputfiles)]) outputsandbox = [f.namePattern for f in job.non_copyable_outputfiles] outputsandbox.extend([f.namePattern for f in job.outputfiles]) outputsandbox = unique(outputsandbox) ####################################################################### logger.debug("Doing XML Catalog stuff") data = job.inputdata data_str = '' if data: logger.debug("Returning options String") data_str = data.optionsString() if data.hasLFNs(): logger.debug("Returning Catalogue") inputsandbox.append( FileBuffer('catalog.xml', data.getCatalog())) cat_opts = '\nfrom Gaudi.Configuration import FileCatalog\nFileCatalog().Catalogs = ["xmlcatalog_file:catalog.xml"]\n' data_str += cat_opts logger.debug("Doing splitter_data stuff") if hasattr(job, '_splitter_data'): data_str += job._splitter_data inputsandbox.append(FileBuffer('data.py', data_str)) logger.debug("Doing GaudiPython stuff") cmd = 'python ./gaudipython-wrapper.py' opts = '' if is_gaudi_child(job.application): opts = 'options.pkl' cmd = 'gaudirun.py ' + \ ' '.join(job.application.args) + ' %s data.py' % opts logger.debug("Setting up script") script = script_generator(create_runscript(job.application.newStyleApp), remove_unreplaced=False, OPTS=opts, PROJECT_OPTS=job.application.setupProjectOptions, APP_NAME=job.application.appname, APP_VERSION=job.application.version, APP_PACKAGE=job.application.package, PLATFORM=job.application.platform, CMDLINE=cmd, XMLSUMMARYPARSING=getXMLSummaryScript()) # , # OUTPUTFILESINJECTEDCODE = getWNCodeForOutputPostprocessing(job, '')) logger.debug("Returning StandardJobConfig") return StandardJobConfig(FileBuffer('gaudi-script.py', script, executable=1), inputbox=unique(inputsandbox), outputbox=unique(outputsandbox))
def prepare(self, app, appsubconfig, appmasterconfig, jobmasterconfig): inputsandbox, outputsandbox = sandbox_prepare(app, appsubconfig, appmasterconfig, jobmasterconfig) job = app.getJobObject() if job.inputdata: if not job.splitter: if len(job.inputdata) > 100: raise BackendError( "You're submitting a job to Dirac with no splitter and more than 100 files, please add a splitter and try again!" ) outputfiles = [this_file for this_file in job.outputfiles if isType(this_file, DiracFile)] data_str = "import os\n" data_str += "execfile('data.py')\n" if hasattr(job, "_splitter_data"): data_str += job._splitter_data inputsandbox.append(FileBuffer("data-wrapper.py", data_str)) input_data = [] # Cant wait to get rid of this when people no-longer specify # inputdata in options file ####################################################################### # splitters ensure that subjobs pick up inputdata from job over that in # optsfiles but need to take care of unsplit jobs if not job.master: share_path = os.path.join(get_share_path(app), "inputdata", "options_data.pkl") if not job.inputdata: if os.path.exists(share_path): f = open(share_path, "r+b") job.inputdata = pickle.load(f) f.close() ####################################################################### # Cant wait to get rid of this when people no-longer specify # outputsandbox or outputdata in options file ####################################################################### share_path = os.path.join(get_share_path(app), "output", "options_parser.pkl") if os.path.exists(share_path): # if not os.path.exists(share_path): # raise GangaException('could not find the parser') f = open(share_path, "r+b") parser = pickle.load(f) f.close() outbox, outdata = parser.get_output(job) from Ganga.GPIDev.Lib.File import FileUtils from Ganga.GPIDev.Base.Filters import allComponentFilters fileTransform = allComponentFilters["gangafiles"] outdata_files = [ fileTransform(this_file, None) for this_file in outdata if not FileUtils.doesFileExist(this_file, job.outputfiles) ] job.non_copyable_outputfiles.extend( [output_file for output_file in outdata_files if not isType(output_file, DiracFile)] ) outbox_files = [ fileTransform(this_file, None) for this_file in outbox if not FileUtils.doesFileExist(this_file, job.outputfiles) ] job.non_copyable_outputfiles.extend( [outbox_file for outbox_file in outbox_files if not isType(outbox_file, DiracFile)] ) outputsandbox = [f.namePattern for f in job.non_copyable_outputfiles] outputsandbox.extend([f.namePattern for f in job.outputfiles if not isType(f, DiracFile)]) outputsandbox = unique(outputsandbox) # + outbox[:]) ####################################################################### input_data_dirac, parametricinput_data = dirac_inputdata(job.application) if input_data_dirac is not None: for f in input_data_dirac: if isType(f, DiracFile): input_data.append(f.lfn) elif isType(f, str): input_data.append(f) else: raise ApplicationConfigurationError( "Don't know How to handle anythig other than DiracFiles or strings to LFNs!" ) commandline = "python ./gaudipython-wrapper.py" if is_gaudi_child(app): commandline = "gaudirun.py " commandline += " ".join([str(arg) for arg in app.args]) commandline += " options.pkl data-wrapper.py" logger.debug("Command line: %s: ", commandline) gaudi_script_path = os.path.join(job.getInputWorkspace().getPath(), "gaudi-script.py") script_generator( gaudi_script_template(), # remove_unreplaced = False, outputfile_path=gaudi_script_path, PLATFORM=app.platform, COMMAND=commandline, XMLSUMMARYPARSING=getXMLSummaryScript() # , # OUTPUTFILESINJECTEDCODE = getWNCodeForOutputPostprocessing(job, ' ') ) # logger.debug( "input_data %s" % str( input_data ) ) # We want to propogate the ancestor depth to DIRAC when we have # inputdata set if job.inputdata is not None and isType(job.inputdata, LHCbDataset): # As the RT Handler we already know we have a Dirac backend if type(job.backend.settings) is not dict: raise ApplicationConfigurationError(None, "backend.settings should be a dict") if "AncestorDepth" in job.backend.settings: ancestor_depth = job.backend.settings["AncestorDepth"] else: ancestor_depth = job.inputdata.depth else: ancestor_depth = 0 lhcbdirac_script_template = lhcbdiracAPI_script_template() lhcb_dirac_outputfiles = lhcbdirac_outputfile_jdl(outputfiles) # not necessary to use lhcbdiracAPI_script_template any more as doing our own uploads to Dirac # remove after Ganga6 release # NOTE special case for replicas: replicate string must be empty for no # replication dirac_script = script_generator( lhcbdirac_script_template, DIRAC_IMPORT="from LHCbDIRAC.Interfaces.API.DiracLHCb import DiracLHCb", DIRAC_JOB_IMPORT="from LHCbDIRAC.Interfaces.API.LHCbJob import LHCbJob", DIRAC_OBJECT="DiracLHCb()", JOB_OBJECT="LHCbJob()", NAME=mangle_job_name(app), APP_NAME=app.appname, APP_VERSION=app.version, APP_SCRIPT=gaudi_script_path, APP_LOG_FILE="Ganga_%s_%s.log" % (app.appname, app.version), INPUTDATA=input_data, PARAMETRIC_INPUTDATA=parametricinput_data, OUTPUT_SANDBOX=API_nullifier(outputsandbox), OUTPUTFILESSCRIPT=lhcb_dirac_outputfiles, # job.fqid,#outputdata_path, OUTPUT_PATH="", OUTPUT_SE=getConfig("DIRAC")["DiracOutputDataSE"], SETTINGS=diracAPI_script_settings(job.application), DIRAC_OPTS=job.backend.diracOpts, PLATFORM=app.platform, REPLICATE="True" if getConfig("DIRAC")["ReplicateOutputData"] else "", ANCESTOR_DEPTH=ancestor_depth, ## This is to be modified in the final 'submit' function in the backend ## The backend also handles the inputfiles DiracFiles ass appropriate INPUT_SANDBOX="##INPUT_SANDBOX##", ) logger.debug("prepare: LHCbGaudiDiracRunTimeHandler") return StandardJobConfig(dirac_script, inputbox=unique(inputsandbox), outputbox=unique(outputsandbox))
def prepare(self, app, appsubconfig, appmasterconfig, jobmasterconfig): logger.debug("Prepare") inputsandbox, outputsandbox = sandbox_prepare(app, appsubconfig, appmasterconfig, jobmasterconfig) job = stripProxy(app).getJobObject() if job.inputdata: if not job.splitter: if len(job.inputdata) > 100: raise BackendError( "You're submitting a job to Dirac with no splitter and more than 100 files, please add a splitter and try again!" ) outputfiles = [ this_file for this_file in job.outputfiles if isType(this_file, DiracFile) ] data_str = 'import os\n' data_str += 'execfile(\'data.py\')\n' if hasattr(job, '_splitter_data'): data_str += job._splitter_data inputsandbox.append(FileBuffer('data-wrapper.py', data_str)) input_data = [] # Cant wait to get rid of this when people no-longer specify # inputdata in options file ####################################################################### # splitters ensure that subjobs pick up inputdata from job over that in # optsfiles but need to take care of unsplit jobs if not job.master: share_path = os.path.join(get_share_path(app), 'inputdata', 'options_data.pkl') if not job.inputdata: if os.path.exists(share_path): f = open(share_path, 'r+b') job.inputdata = pickle.load(f) f.close() ####################################################################### # Cant wait to get rid of this when people no-longer specify # outputsandbox or outputdata in options file ####################################################################### share_path = os.path.join(get_share_path(app), 'output', 'options_parser.pkl') if os.path.exists(share_path): # if not os.path.exists(share_path): # raise GangaException('could not find the parser') f = open(share_path, 'r+b') parser = pickle.load(f) f.close() outbox, outdata = parser.get_output(job) from Ganga.GPIDev.Lib.File import FileUtils from Ganga.GPIDev.Base.Filters import allComponentFilters fileTransform = allComponentFilters['gangafiles'] outdata_files = [ fileTransform(this_file, None) for this_file in outdata if not FileUtils.doesFileExist(this_file, job.outputfiles) ] job.non_copyable_outputfiles.extend([ output_file for output_file in outdata_files if not isType(output_file, DiracFile) ]) outbox_files = [ fileTransform(this_file, None) for this_file in outbox if not FileUtils.doesFileExist(this_file, job.outputfiles) ] job.non_copyable_outputfiles.extend([ outbox_file for outbox_file in outbox_files if not isType(outbox_file, DiracFile) ]) outputsandbox = [ f.namePattern for f in job.non_copyable_outputfiles ] outputsandbox.extend([ f.namePattern for f in job.outputfiles if not isType(f, DiracFile) ]) outputsandbox = unique(outputsandbox) # + outbox[:]) ####################################################################### input_data_dirac, parametricinput_data = dirac_inputdata( job.application) if input_data_dirac is not None: for f in input_data_dirac: if isType(f, DiracFile): input_data.append(f.lfn) elif isType(f, str): input_data.append(f) else: raise ApplicationConfigurationError( "Don't know How to handle anythig other than DiracFiles or strings to LFNs!" ) commandline = "python ./gaudipython-wrapper.py" if is_gaudi_child(app): commandline = 'gaudirun.py ' commandline += ' '.join([str(arg) for arg in app.args]) commandline += ' options.pkl data-wrapper.py' logger.debug('Command line: %s: ', commandline) gaudi_script_path = os.path.join(job.getInputWorkspace().getPath(), "gaudi-script.py") script_generator( gaudi_script_template(), #remove_unreplaced = False, outputfile_path=gaudi_script_path, PLATFORM=app.platform, COMMAND=commandline, XMLSUMMARYPARSING=getXMLSummaryScript() # , #OUTPUTFILESINJECTEDCODE = getWNCodeForOutputPostprocessing(job, ' ') ) #logger.debug( "input_data %s" % str( input_data ) ) # We want to propogate the ancestor depth to DIRAC when we have # inputdata set if job.inputdata is not None and isType(job.inputdata, LHCbDataset): # As the RT Handler we already know we have a Dirac backend if type(job.backend.settings) is not dict: raise ApplicationConfigurationError( None, 'backend.settings should be a dict') if 'AncestorDepth' in job.backend.settings: ancestor_depth = job.backend.settings['AncestorDepth'] else: ancestor_depth = job.inputdata.depth else: ancestor_depth = 0 lhcbdirac_script_template = lhcbdiracAPI_script_template() lhcb_dirac_outputfiles = lhcbdirac_outputfile_jdl(outputfiles) # not necessary to use lhcbdiracAPI_script_template any more as doing our own uploads to Dirac # remove after Ganga6 release # NOTE special case for replicas: replicate string must be empty for no # replication dirac_script = script_generator( lhcbdirac_script_template, DIRAC_IMPORT= 'from LHCbDIRAC.Interfaces.API.DiracLHCb import DiracLHCb', DIRAC_JOB_IMPORT= 'from LHCbDIRAC.Interfaces.API.LHCbJob import LHCbJob', DIRAC_OBJECT='DiracLHCb()', JOB_OBJECT='LHCbJob()', NAME=mangle_job_name(app), APP_NAME=stripProxy(app).appname, APP_VERSION=app.version, APP_SCRIPT=gaudi_script_path, APP_LOG_FILE='Ganga_%s_%s.log' % (stripProxy(app).appname, app.version), INPUTDATA=input_data, PARAMETRIC_INPUTDATA=parametricinput_data, OUTPUT_SANDBOX=API_nullifier(outputsandbox), OUTPUTFILESSCRIPT=lhcb_dirac_outputfiles, # job.fqid,#outputdata_path, OUTPUT_PATH="", OUTPUT_SE=getConfig('DIRAC')['DiracOutputDataSE'], SETTINGS=diracAPI_script_settings(job.application), DIRAC_OPTS=job.backend.diracOpts, PLATFORM=app.platform, REPLICATE='True' if getConfig('DIRAC')['ReplicateOutputData'] else '', ANCESTOR_DEPTH=ancestor_depth, ## This is to be modified in the final 'submit' function in the backend ## The backend also handles the inputfiles DiracFiles ass appropriate INPUT_SANDBOX='##INPUT_SANDBOX##') logger.debug("prepare: LHCbGaudiDiracRunTimeHandler") return StandardJobConfig(dirac_script, inputbox=unique(inputsandbox), outputbox=unique(outputsandbox))
def preparejob(self, jobconfig, master_input_sandbox): job = self.getJobObject() # print str(job.backend_output_postprocess) mon = job.getMonitoringService() import Ganga.Core.Sandbox as Sandbox subjob_input_sandbox = job.createPackedInputSandbox( jobconfig.getSandboxFiles() + Sandbox.getGangaModulesAsSandboxFiles(Sandbox.getDefaultModules()) + Sandbox.getGangaModulesAsSandboxFiles(mon.getSandboxModules()) ) appscriptpath = [jobconfig.getExeString()] + jobconfig.getArgStrings() if self.nice: appscriptpath = ["nice", "-n %d" % self.nice] + appscriptpath if self.nice < 0: logger.warning("increasing process priority is often not allowed, your job may fail due to this") sharedoutputpath = job.getOutputWorkspace().getPath() outputpatterns = jobconfig.outputbox environment = dict() if jobconfig.env is None else jobconfig.env import tempfile workdir = tempfile.mkdtemp(dir=config["location"]) import inspect script_location = os.path.join( os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))), "LocalHostExec.py" ) from Ganga.GPIDev.Lib.File import FileUtils script = FileUtils.loadScript(script_location, "") script = script.replace("###INLINEMODULES###", inspect.getsource(Sandbox.WNSandbox)) from Ganga.GPIDev.Lib.File.OutputFileManager import ( getWNCodeForOutputSandbox, getWNCodeForOutputPostprocessing, getWNCodeForDownloadingInputFiles, getWNCodeForInputdataListCreation, ) from Ganga.Utility.Config import getConfig jobidRepr = repr(job.getFQID(".")) script = script.replace( "###OUTPUTSANDBOXPOSTPROCESSING###", getWNCodeForOutputSandbox(job, ["stdout", "stderr", "__syslog__"], jobidRepr), ) script = script.replace("###OUTPUTUPLOADSPOSTPROCESSING###", getWNCodeForOutputPostprocessing(job, "")) script = script.replace("###DOWNLOADINPUTFILES###", getWNCodeForDownloadingInputFiles(job, "")) script = script.replace("###CREATEINPUTDATALIST###", getWNCodeForInputdataListCreation(job, "")) script = script.replace("###APPLICATION_NAME###", repr(job.application._name)) script = script.replace("###INPUT_SANDBOX###", repr(subjob_input_sandbox + master_input_sandbox)) script = script.replace("###SHAREDOUTPUTPATH###", repr(sharedoutputpath)) script = script.replace("###APPSCRIPTPATH###", repr(appscriptpath)) script = script.replace("###OUTPUTPATTERNS###", repr(outputpatterns)) script = script.replace("###JOBID###", jobidRepr) script = script.replace("###ENVIRONMENT###", repr(environment)) script = script.replace("###WORKDIR###", repr(workdir)) script = script.replace("###INPUT_DIR###", repr(job.getStringInputDir())) script = script.replace( "###MONITORING_SERVICE###", job.getMonitoringService().getWrapperScriptConstructorText() ) self.workdir = workdir script = script.replace("###GANGADIR###", repr(getConfig("System")["GANGA_PYTHONPATH"])) wrkspace = job.getInputWorkspace() scriptPath = wrkspace.writefile(FileBuffer("__jobscript__", script), executable=1) return scriptPath
def prepare(self, app, appsubconfig, appmasterconfig, jobmasterconfig): logger.debug("Prepare") inputsandbox, outputsandbox = sandbox_prepare(app, appsubconfig, appmasterconfig, jobmasterconfig) job = app.getJobObject() logger.debug("Loading pickle files") #outputfiles=set([file.namePattern for file in job.outputfiles]).difference(set(getOutputSandboxPatterns(job))) # Cant wait to get rid of this when people no-longer specify # inputdata in options file ####################################################################### # splitters ensure that subjobs pick up inputdata from job over that in # optsfiles but need to take sare of unsplit jobs if not job.master: share_path = os.path.join(get_share_path(app), 'inputdata', 'options_data.pkl') if not job.inputdata: if os.path.exists(share_path): f = open(share_path, 'r+b') job.inputdata = pickle.load(f) f.close() ####################################################################### # Cant wait to get rid of this when people no-longer specify # outputsandbox or outputdata in options file ####################################################################### share_path = os.path.join(get_share_path(app), 'output', 'options_parser.pkl') logger.debug("Adding info from pickle files") if os.path.exists(share_path): f = open(share_path, 'r+b') parser = pickle.load(f) f.close() outbox, outdata = parser.get_output(job) from Ganga.GPIDev.Lib.File import FileUtils from Ganga.GPIDev.Base.Filters import allComponentFilters fileTransform = allComponentFilters['gangafiles'] job.non_copyable_outputfiles.extend([ fileTransform(this_file, None) for this_file in outdata if not FileUtils.doesFileExist(this_file, job.outputfiles) ]) job.non_copyable_outputfiles.extend([ fileTransform(this_file, None) for this_file in outbox if not FileUtils.doesFileExist(this_file, job.outputfiles) ]) outputsandbox.extend( [f.namePattern for f in job.non_copyable_outputfiles]) outputsandbox.extend([f.namePattern for f in job.outputfiles]) outputsandbox = unique(outputsandbox) ####################################################################### logger.debug("Doing XML Catalog stuff") data = job.inputdata data_str = '' if data: logger.debug("Returning options String") data_str = data.optionsString() if data.hasLFNs(): logger.debug("Returning Catalogue") inputsandbox.append( FileBuffer('catalog.xml', data.getCatalog())) cat_opts = '\nfrom Gaudi.Configuration import FileCatalog\nFileCatalog().Catalogs = ["xmlcatalog_file:catalog.xml"]\n' data_str += cat_opts logger.debug("Doing splitter_data stuff") if hasattr(job, '_splitter_data'): data_str += job._splitter_data inputsandbox.append(FileBuffer('data.py', data_str)) logger.debug("Doing GaudiPython stuff") cmd = 'python ./gaudipython-wrapper.py' opts = '' if is_gaudi_child(job.application): opts = 'options.pkl' cmd = 'gaudirun.py ' + \ ' '.join(job.application.args) + ' %s data.py' % opts logger.debug("Setting up script") script = script_generator( create_runscript(job.application.newStyleApp), remove_unreplaced=False, OPTS=opts, PROJECT_OPTS=job.application.setupProjectOptions, APP_NAME=job.application.appname, APP_VERSION=job.application.version, APP_PACKAGE=job.application.package, PLATFORM=job.application.platform, CMDLINE=cmd, XMLSUMMARYPARSING=getXMLSummaryScript()) # , # OUTPUTFILESINJECTEDCODE = getWNCodeForOutputPostprocessing(job, '')) logger.debug("Returning StandardJobConfig") return StandardJobConfig(FileBuffer('gaudi-script.py', script, executable=1), inputbox=unique(inputsandbox), outputbox=unique(outputsandbox))
def preparejob(self, jobconfig, master_input_sandbox): """Method for preparing job script""" job = self.getJobObject() inputfiles = jobconfig.getSandboxFiles() inbox = job.createPackedInputSandbox(inputfiles) inbox.extend(master_input_sandbox) inpDir = job.getInputWorkspace(create=True).getPath() outDir = job.getOutputWorkspace(create=True).getPath() workdir = tempfile.mkdtemp() self.workdir = workdir exeString = jobconfig.getExeString() argList = jobconfig.getArgStrings() argString = " ".join(map(lambda x: " %s " % x, argList)) outputSandboxPatterns = jobconfig.outputbox patternsToZip = [] wnCodeForPostprocessing = '' wnCodeToDownloadInputFiles = '' if (len(job.outputfiles) > 0): from Ganga.GPIDev.Lib.File.OutputFileManager import getOutputSandboxPatternsForInteractive, getWNCodeForOutputPostprocessing (outputSandboxPatterns, patternsToZip) = getOutputSandboxPatternsForInteractive(job) wnCodeForPostprocessing = 'def printError(message):pass\ndef printInfo(message):pass' + \ getWNCodeForOutputPostprocessing(job, '') all_inputfiles = [this_file for this_file in job.inputfiles] if job.master: all_inputfiles.extend([this_file for this_file in job.master.inputfiles]) wnCodeToDownloadInputFiles = '' if(len(all_inputfiles) > 0): from Ganga.GPIDev.Lib.File.OutputFileManager import outputFilePostProcessingOnWN for inputFile in all_inputfiles: inputfileClassName = getName(inputFile) logger.debug("name: %s" % inputfileClassName) logger.debug("result: %s" % str(outputFilePostProcessingOnWN(job, inputfileClassName))) if outputFilePostProcessingOnWN(job, inputfileClassName): inputFile.processWildcardMatches() if inputFile.subfiles: getfromFile = False for subfile in inputFile.subfiles: wnCodeToDownloadInputFiles += subfile.getWNScriptDownloadCommand('') else: getfromFile = True else: getFromFile = True if getFromFile: wnCodeToDownloadInputFiles += inputFile.getWNScriptDownloadCommand('') wnCodeToDownloadInputData = '' if job.inputdata and (len(job.inputdata) > 0): from Ganga.GPIDev.Lib.File.OutputFileManager import getWNCodeForDownloadingInputFiles wnCodeToDownloadInputData = getWNCodeForDownloadingInputFiles(job, '') import inspect replace_dict = { '###CONSTRUCT_TIME###' : (time.strftime("%c")), '###WNSANDBOX_SOURCE###' : inspect.getsource(Sandbox.WNSandbox), '###GANGA_PYTHONPATH###' : getConfig("System")["GANGA_PYTHONPATH"], '###OUTPUTDIR###' : outDir, '###WORKDIR###' : workdir, '###IN_BOX###' : inbox, '###WN_INPUTFILES###' : wnCodeToDownloadInputFiles, '###WN_INPUTDATA###' : wnCodeToDownloadInputData, '###JOBCONFIG_ENV###' : jobconfig.env if jobconfig.env is not None else dict(), '###EXE_STRING###' : exeString, '###ARG_STRING###' : argString, '###WN_POSTPROCESSING###' : wnCodeForPostprocessing, '###PATTERNS_TO_ZIP###' : patternsToZip, '###OUTPUT_SANDBOX_PATTERNS###' : outputSandboxPatterns } script_location = os.path.join(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))), 'InteractiveScriptTemplate.py') from Ganga.GPIDev.Lib.File import FileUtils commandString = FileUtils.loadScript(script_location, '') for k, v in replace_dict.iteritems(): commandString = commandString.replace(str(k), str(v)) return job.getInputWorkspace().writefile(FileBuffer("__jobscript__", commandString), executable=1)
def preparejob(self, jobconfig, master_input_sandbox): job = self.getJobObject() mon = job.getMonitoringService() import Ganga.Core.Sandbox as Sandbox from Ganga.GPIDev.Lib.File import File from Ganga.Core.Sandbox.WNSandbox import PYTHON_DIR import inspect fileutils = File( inspect.getsourcefile(Ganga.Utility.files), subdir=PYTHON_DIR ) subjob_input_sandbox = job.createPackedInputSandbox(jobconfig.getSandboxFiles() + [ fileutils ] ) appscriptpath = [jobconfig.getExeString()] + jobconfig.getArgStrings() sharedoutputpath = job.getOutputWorkspace().getPath() ## FIXME Check this isn't a GangaList outputpatterns = jobconfig.outputbox environment = jobconfig.env if not jobconfig.env is None else {} import inspect script_location = os.path.join(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))), 'BatchScriptTemplate.py') from Ganga.GPIDev.Lib.File import FileUtils text = FileUtils.loadScript(script_location, '') import Ganga.Core.Sandbox as Sandbox import Ganga.Utility as Utility from Ganga.Utility.Config import getConfig from Ganga.GPIDev.Lib.File.OutputFileManager import getWNCodeForOutputSandbox, getWNCodeForOutputPostprocessing, getWNCodeForDownloadingInputFiles jobidRepr = repr(self.getJobObject().getFQID('.')) replace_dict = { '###OUTPUTSANDBOXPOSTPROCESSING###' : getWNCodeForOutputSandbox(job, ['__syslog__'], jobidRepr), '###OUTPUTUPLOADSPOSTPROCESSING###' : getWNCodeForOutputPostprocessing(job, ''), '###DOWNLOADINPUTFILES###' : getWNCodeForDownloadingInputFiles(job, ''), '###INLINEMODULES###' : inspect.getsource(Sandbox.WNSandbox), '###INLINEHOSTNAMEFUNCTION###' : inspect.getsource(Utility.util.hostname), '###APPSCRIPTPATH###' : repr(appscriptpath), #'###SHAREDINPUTPATH###' : repr(sharedinputpath)), '###INPUT_SANDBOX###' : repr(subjob_input_sandbox + master_input_sandbox), '###SHAREDOUTPUTPATH###' : repr(sharedoutputpath), '###OUTPUTPATTERNS###' : repr(outputpatterns), '###JOBID###' : jobidRepr, '###ENVIRONMENT###' : repr(environment), '###PREEXECUTE###' : self.config['preexecute'], '###POSTEXECUTE###' : self.config['postexecute'], '###JOBIDNAME###' : self.config['jobid_name'], '###QUEUENAME###' : self.config['queue_name'], '###HEARTBEATFREQUENCE###' : self.config['heartbeat_frequency'], '###INPUT_DIR###' : repr(job.getStringInputDir()), '###GANGADIR###' : repr(getConfig('System')['GANGA_PYTHONPATH']) } for k, v in replace_dict.iteritems(): text = text.replace(str(k), str(v)) logger.debug('subjob input sandbox %s ', subjob_input_sandbox) logger.debug('master input sandbox %s ', master_input_sandbox) from Ganga.GPIDev.Lib.File import FileBuffer return job.getInputWorkspace().writefile(FileBuffer('__jobscript__', text), executable=1)
def preparejob(self, jobconfig, master_input_sandbox): """Method for preparing job script""" job = self.getJobObject() from Ganga.GPIDev.Lib.File import File from Ganga.Core.Sandbox.WNSandbox import PYTHON_DIR import Ganga.Utility.files import inspect fileutils = File(inspect.getsourcefile(Ganga.Utility.files), subdir=PYTHON_DIR) inputfiles = jobconfig.getSandboxFiles() + [fileutils] inbox = job.createPackedInputSandbox(inputfiles) inbox.extend(master_input_sandbox) inpDir = job.getInputWorkspace(create=True).getPath() outDir = job.getOutputWorkspace(create=True).getPath() workdir = tempfile.mkdtemp() self.workdir = workdir exeString = jobconfig.getExeString() argList = jobconfig.getArgStrings() argString = " ".join(map(lambda x: " %s " % x, argList)) outputSandboxPatterns = jobconfig.outputbox patternsToZip = [] wnCodeForPostprocessing = '' wnCodeToDownloadInputFiles = '' if (len(job.outputfiles) > 0): from Ganga.GPIDev.Lib.File.OutputFileManager import getOutputSandboxPatternsForInteractive, getWNCodeForOutputPostprocessing (outputSandboxPatterns, patternsToZip) = getOutputSandboxPatternsForInteractive(job) wnCodeForPostprocessing = 'def printError(message):pass\ndef printInfo(message):pass' + \ getWNCodeForOutputPostprocessing(job, '') all_inputfiles = [this_file for this_file in job.inputfiles] if job.master is not None: all_inputfiles.extend( [this_file for this_file in job.master.inputfiles]) wnCodeToDownloadInputFiles = '' if (len(all_inputfiles) > 0): from Ganga.GPIDev.Lib.File.OutputFileManager import outputFilePostProcessingOnWN for inputFile in all_inputfiles: inputfileClassName = getName(inputFile) logger.debug("name: %s" % inputfileClassName) logger.debug( "result: %s" % str(outputFilePostProcessingOnWN(job, inputfileClassName))) if outputFilePostProcessingOnWN(job, inputfileClassName): inputFile.processWildcardMatches() if inputFile.subfiles: getfromFile = False for subfile in inputFile.subfiles: wnCodeToDownloadInputFiles += subfile.getWNScriptDownloadCommand( '') else: getfromFile = True else: getFromFile = True if getFromFile: wnCodeToDownloadInputFiles += inputFile.getWNScriptDownloadCommand( '') wnCodeToDownloadInputData = '' if job.inputdata and (len(job.inputdata) > 0): from Ganga.GPIDev.Lib.File.OutputFileManager import getWNCodeForDownloadingInputFiles wnCodeToDownloadInputData = getWNCodeForDownloadingInputFiles( job, '') import inspect replace_dict = { '###CONSTRUCT_TIME###': (time.strftime("%c")), '###WNSANDBOX_SOURCE###': inspect.getsource(Sandbox.WNSandbox), '###GANGA_PYTHONPATH###': getConfig("System")["GANGA_PYTHONPATH"], '###OUTPUTDIR###': outDir, '###WORKDIR###': workdir, '###IN_BOX###': inbox, '###WN_INPUTFILES###': wnCodeToDownloadInputFiles, '###WN_INPUTDATA###': wnCodeToDownloadInputData, '###JOBCONFIG_ENV###': jobconfig.env if jobconfig.env is not None else dict(), '###EXE_STRING###': exeString, '###ARG_STRING###': argString, '###WN_POSTPROCESSING###': wnCodeForPostprocessing, '###PATTERNS_TO_ZIP###': patternsToZip, '###OUTPUT_SANDBOX_PATTERNS###': outputSandboxPatterns } script_location = os.path.join( os.path.dirname( os.path.abspath(inspect.getfile(inspect.currentframe()))), 'InteractiveScriptTemplate.py.template') from Ganga.GPIDev.Lib.File import FileUtils commandString = FileUtils.loadScript(script_location, '') for k, v in replace_dict.iteritems(): commandString = commandString.replace(str(k), str(v)) return job.getInputWorkspace().writefile(FileBuffer( "__jobscript__", commandString), executable=1)