def genDataFiles(job): """ Generating a data.py file which contains the data we want gaudirun to use Args: job (Job): This is the job object which contains everything useful for generating the code """ logger.debug("Doing XML Catalog stuff") inputsandbox = [] data = job.inputdata if data: logger.debug("Returning options String") data_str = data.optionsString() if data.hasLFNs(): logger.info("Generating Data catalog for job: %s" % job.fqid) logger.debug("Returning Catalogue") inputsandbox.append(FileBuffer('catalog.xml', data.getCatalog())) cat_opts = '\nfrom Gaudi.Configuration import FileCatalog\nFileCatalog().Catalogs = ["xmlcatalog_file:catalog.xml"]\n' data_str += cat_opts inputsandbox.append( FileBuffer(GaudiExecDiracRTHandler.data_file, data_str)) else: inputsandbox.append( FileBuffer(GaudiExecDiracRTHandler.data_file, '#dummy_data_file\n' + LHCbDataset().optionsString())) return inputsandbox
def setUp(self): j = Job(application=DaVinci()) j.prepare() from Ganga.Utility.Config import getConfig if getConfig('Output')['ForbidLegacyInput']: j.inputfiles = [LocalFile(name='dummy.in')] else: j.inputsandbox = [File(name='dummy.in')] self.app = j.application._impl #self.extra = GaudiExtras() # self.extra.master_input_buffers['master.buffer'] = '###MASTERBUFFER###' #self.extra.master_input_files = [File(name='master.in')] # self.extra.input_buffers['subjob.buffer'] = '###SUBJOBBUFFER###' self.input_files = [ File(name='subjob.in'), File( FileBuffer('subjob.buffer', '###SUBJOBBUFFER###').create().name) ] self.appmasterconfig = StandardJobConfig(inputbox=[ File(name='master.in'), File( FileBuffer('master.buffer', '###MASTERBUFFER###').create().name) ]) j.outputfiles = ['dummy1.out', 'dummy2.out', 'dummy3.out'] self.rth = LHCbGaudiRunTimeHandler()
def testSubmitJobWithInputFile(self): """ This test adds a dummy inputfile into the job and tests that it is returned when the job is completed """ from Ganga.GPI import LocalFile tempName = 'testGaudiExecFile.txt' tempName2 = 'testGaudiExecFile2.txt' tempContent = '12345' tempContent2 = '67890' j = TestExternalGaudiExec._constructJob() tempFile = path.join(TestExternalGaudiExec.tmpdir_release, tempName) tempFile2 = path.join(TestExternalGaudiExec.tmpdir_release, tempName2) FileBuffer(tempName, tempContent).create(tempFile) FileBuffer(tempName2, tempContent2).create(tempFile2) j.inputfiles = [tempFile, LocalFile(tempFile2)] j.outputfiles = [LocalFile(tempName), LocalFile(tempName2)] j.submit() run_until_completed(j) assert j.status == 'completed' outputDir = stripProxy(j).getOutputWorkspace(create=False).getPath() assert path.isfile(tempFile) assert path.isfile(tempFile2) assert tempContent in open(tempFile).read() assert tempContent2 in open(tempFile2).read()
def generateJobScripts(app, appendJobScripts): """ Construct a DIRAC scripts which must be unique to each job to have unique checksum. This generates a unique file, uploads it to DRIAC and then stores the LFN in app.uploadedInput Args: app (GaudiExec): This expects a GaudiExec app to be passed so that the constructed appendJobScripts (bool): Should we add the job scripts to the script archive? (Only makes sense on backends which auto-extact tarballs before running) """ job = app.getJobObject() if not job.master: rjobs = job.subjobs or [job] else: rjobs = [job] tmp_dir = tempfile.gettempdir() # First create the extraOpts files needed 1 per subjob for this_job in rjobs: logger.debug("RTHandler Making Scripts: %s" % this_job.fqid) this_job.application.constructExtraFiles(this_job) if not job.master and job.subjobs: for sj in rjobs: sj.application.jobScriptArchive = sj.master.application.jobScriptArchive master_job = job.master or job # Now lets get the name of this tar file scriptArchive = os.path.join( master_job.application.jobScriptArchive.localDir, master_job.application.jobScriptArchive.namePattern) if appendJobScripts: # Now lets add the Job scripts to this archive and potentially the extra options to generate the summary.xml with tarfile.open(scriptArchive, 'a') as tar_file: if app.getMetadata: summaryScript = "\nfrom Gaudi.Configuration import *\nfrom Configurables import LHCbApp\nLHCbApp().XMLSummary='summary.xml'" summaryPath = os.path.join(job.getInputWorkspace().getPath(), 'summary.py') summaryFile = FileBuffer(summaryPath, summaryScript) summaryFile.create() tar_file.add(summaryPath, arcname='summary.py') for this_job in rjobs: this_app = this_job.application wnScript = generateWNScript(prepareCommand(this_app), this_app) this_script = os.path.join(tmp_dir, wnScript.name) wnScript.create(this_script) tar_file.add(this_script, arcname=os.path.join(wnScript.subdir, wnScript.name)) os.unlink(this_script) gzipFile(scriptArchive, scriptArchive + '.gz', True) app.jobScriptArchive.namePattern = app.jobScriptArchive.namePattern + '.gz'
def generateJobScripts(app, appendJobScripts): """ Construct a DIRAC scripts which must be unique to each job to have unique checksum. This generates a unique file, uploads it to DRIAC and then stores the LFN in app.uploadedInput Args: app (GaudiExec): This expects a GaudiExec app to be passed so that the constructed appendJobScripts (bool): Should we add the job scripts to the script archive? (Only makes sense on backends which auto-extact tarballs before running) """ job = app.getJobObject() if not job.master: rjobs = job.subjobs or [job] else: rjobs = [job] tmp_dir = tempfile.gettempdir() # First create the extraOpts files needed 1 per subjob for this_job in rjobs: logger.debug("RTHandler Making Scripts: %s" % this_job.fqid) this_job.application.constructExtraFiles(this_job) if not job.master and job.subjobs: for sj in rjobs: sj.application.jobScriptArchive = sj.master.application.jobScriptArchive master_job = job.master or job # Now lets get the name of this tar file scriptArchive = os.path.join(master_job.application.jobScriptArchive.localDir, master_job.application.jobScriptArchive.namePattern) if appendJobScripts: # Now lets add the Job scripts to this archive and potentially the extra options to generate the summary.xml with tarfile.open(scriptArchive, 'a') as tar_file: if app.getMetadata: summaryScript = "\nfrom Gaudi.Configuration import *\nfrom Configurables import LHCbApp\nLHCbApp().XMLSummary='summary.xml'" summaryPath = os.path.join(job.getInputWorkspace().getPath(), 'summary.py') summaryFile = FileBuffer(summaryPath, summaryScript) summaryFile.create() tar_file.add(summaryPath, arcname = 'summary.py') for this_job in rjobs: this_app = this_job.application wnScript = generateWNScript(prepareCommand(this_app), this_app) this_script = os.path.join(tmp_dir, wnScript.name) wnScript.create(this_script) tar_file.add(this_script, arcname=os.path.join(wnScript.subdir, wnScript.name)) os.unlink(this_script) gzipFile(scriptArchive, scriptArchive+'.gz', True) app.jobScriptArchive.namePattern = app.jobScriptArchive.namePattern + '.gz'
def master_prepare(self, app, appmasterconfig): """ Prepare the RTHandler for the master job so that applications to be submitted Args: app (GaudiExec): This application is only expected to handle GaudiExec Applications here appmasterconfig (unknown): Output passed from the application master configuration call """ inputsandbox, outputsandbox = master_sandbox_prepare( app, appmasterconfig) if isinstance(app.jobScriptArchive, LocalFile): app.jobScriptArchive = None generateJobScripts(app, appendJobScripts=True) scriptArchive = os.path.join(app.jobScriptArchive.localDir, app.jobScriptArchive.namePattern) inputsandbox.append(File(name=scriptArchive)) if app.getMetadata: logger.info("Adding options to make the summary.xml") inputsandbox.append( FileBuffer( 'summary.py', "\nfrom Gaudi.Configuration import *\nfrom Configurables import LHCbApp\nLHCbApp().XMLSummary='summary.xml'" )) return StandardJobConfig(inputbox=unique(inputsandbox), outputbox=unique(outputsandbox))
def testPrepareJob(self): from Ganga.GPI import Job, LocalFile, prepareGaudiExec import os if os.path.exists(TestExternalGaudiExec.tmpdir_release): os.system("rm -rf %s/*" % TestExternalGaudiExec.tmpdir_release) j = Job(application=prepareGaudiExec( 'DaVinci', latestDaVinci(), TestExternalGaudiExec.tmpdir_release)) myHelloOpts = path.join(TestExternalGaudiExec.tmpdir_release, 'hello.py') FileBuffer('hello.py', 'print("Hello")').create(myHelloOpts) assert path.isfile(myHelloOpts) j.application.options = [LocalFile(myHelloOpts)] j.prepare() assert j.application.is_prepared.name assert path.isdir(j.application.is_prepared.path())
def configure(self, master_appconfig): ## strip leading and trailing blanks from arguments self.arguments = [ a.strip() for a in self.arguments ] ## strip leading and trailing blanks from the command self.commands = [ a.strip() for a in self.commands ] ## the script layout the_script = layout.format ( scripts = [ os.path.join ( f.subdir , os.path.basename ( f.name ) ) for f in self.scripts ] , imports = [ os.path.join ( f.subdir , os.path.basename ( f.name ) ) for f in self.imports ] , arguments = self.arguments , command = self.commands ) print 'SCRIPT:\n', the_script # add summary.xml outputsandbox_temp = XMLPostProcessor._XMLJobFiles() outputsandbox_temp += unique(self.getJobObject().outputsandbox) outputsandbox = unique(outputsandbox_temp) input_files = [] input_files += [ FileBuffer('gaudipython-wrapper.py', the_script ) ] logger.debug("Returning StandardJobConfig") return (None, StandardJobConfig(inputbox=input_files, outputbox=outputsandbox))
def generateWrapperScript(app): """ This generates the wrapper script which is run for non GaudiExec type apps Args: app (GaudiExec): GaudiExec instance which contains the script to run on the WN """ return FileBuffer(name=app.getWrapperScriptName(), contents=app.getWNPythonContents())
def generateWNScript(commandline, app): """ Generate the script as a file buffer and return it Args: commandline (str): This is the command-line argument the script is wrapping app (Job): This is the app object which contains everything useful for generating the code """ job = app.getJobObject() exe_script_name = getScriptName(app) return FileBuffer(name=exe_script_name, contents=script_generator(gaudiRun_script_template(), COMMAND=commandline, OUTPUTFILESINJECTEDCODE = getWNCodeForOutputPostprocessing(job, ' ')), subdir='jobScript', executable=True)
def testParseInputFile(self): """ Test that we can parse a fake opts file and get the inputdata from it """ from Ganga.GPI import jobs j = jobs[-1] myOptsFile = path.join(TestExternalGaudiExec.tmpdir_release, 'myOpts.py') FileBuffer('myOpts.py', inputOptsFile()).create(myOptsFile) assert path.isfile(myOptsFile) j.application.readInputData(myOptsFile) assert len(j.inputdata) == 1
def testSubmitJobDiracWithInput(self): j = TestExternalGaudiExec._constructJob() from Ganga.GPI import LocalFile, Dirac, DiracProxy j.backend = Dirac(credential_requirements=DiracProxy( group='lhcb_user', encodeDefaultProxyFileName=False)) tempName = 'testGaudiExecFile.txt' tempContent = '12345' tempFile = path.join(TestExternalGaudiExec.tmpdir_release, tempName) FileBuffer(tempName, tempContent).create(tempFile) j.inputfiles = [tempFile] j.outputfiles = [LocalFile(tempName)] j.submit() assert j.status == "submitted"
def configure(self, master_appconfig): # self._configure() name = join('.', self.script[0].subdir, split(self.script[0].name)[-1]) script = "from Gaudi.Configuration import *\n" if self.args: script += 'import sys\nsys.argv += %s\n' % str(self.args) script += "importOptions('data.py')\n" script += "execfile(\'%s\')\n" % name # add summary.xml outputsandbox_temp = XMLPostProcessor._XMLJobFiles() outputsandbox_temp += unique(self.getJobObject().outputsandbox) outputsandbox = unique(outputsandbox_temp) input_files = [] input_files += [FileBuffer('gaudipython-wrapper.py', script)] logger.debug("Returning Job Configuration") return (None, StandardJobConfig(inputbox=input_files, outputbox=outputsandbox))
def _constructJob(): """ This is a helper method to construct a new GaudiExec job object for submission testing This just helps reduce repeat code between tests """ import os if os.path.exists(TestExternalGaudiExec.tmpdir_release): os.system("rm -fr %s/" % TestExternalGaudiExec.tmpdir_release) from Ganga.GPI import Job, LocalFile, prepareGaudiExec j = Job(application=prepareGaudiExec( 'DaVinci', latestDaVinci(), TestExternalGaudiExec.tmpdir_release)) myOpts = path.join(TestExternalGaudiExec.tmpdir_release, 'testfile.py') FileBuffer('testfile.py', 'print("ThisIsATest")').create(myOpts) j.application.options = [LocalFile(myOpts)] return j
def testSubmitJobComplete(self): """ Test that the job completes successfully """ from Ganga.GPI import jobs from Ganga.GPI import Job, LocalFile, prepareGaudiExec import os if os.path.exists(TestExternalGaudiExec.tmpdir_release): os.system("rm -rf %s/*" % TestExternalGaudiExec.tmpdir_release) j = Job(application=prepareGaudiExec( 'DaVinci', latestDaVinci(), TestExternalGaudiExec.tmpdir_release)) myOpts = path.join(TestExternalGaudiExec.tmpdir_release, 'testfile.py') FileBuffer('testfile.py', 'print("ThisIsATest")').create(myOpts) j.application.options = [LocalFile(myOpts)] j.submit() run_until_completed(j) assert j.status == 'completed' outputfile = path.join(j.outputdir, 'stdout') assert path.isfile(outputfile) assert 'testfile.py' in open(outputfile).read() assert 'data.py' in open(outputfile).read() assert 'ThisIsATest' in open(outputfile).read() assert j.application.platform in open(outputfile).read()
def configure(self, master_appconfig): # self._configure() modulename = split(self.module.name)[-1].split('.')[0] script = """ from copy import deepcopy from Gaudi.Configuration import * importOptions('data.py') import %s as USERMODULE EventSelectorInput = deepcopy(EventSelector().Input) FileCatalogCatalogs = deepcopy(FileCatalog().Catalogs) EventSelector().Input=[] FileCatalog().Catalogs=[]\n""" % modulename script_configure = "USERMODULE.configure(EventSelectorInput,FileCatalogCatalogs%s)\n" if self.params: param_string = ",params=%s" % self.params else: param_string = "" script_configure = script_configure % param_string script += script_configure script += "USERMODULE.run(%d)\n" % self.events script += getXMLSummaryScript() # add summary.xml outputsandbox_temp = XMLPostProcessor._XMLJobFiles() outputsandbox_temp += unique(self.getJobObject().outputsandbox) outputsandbox = unique(outputsandbox_temp) input_files = [] input_files += [FileBuffer('gaudipython-wrapper.py', script)] logger.debug("Returning StandardJobConfig") return (None, StandardJobConfig(inputbox=input_files, outputbox=outputsandbox))
def preparejob(self, jobconfig, master_input_sandbox): """Prepare the script to create the job on the remote host""" import tempfile workdir = tempfile.mkdtemp() job = self.getJobObject() script = """#!/usr/bin/env python from __future__ import print_function #----------------------------------------------------- # This job wrapper script is automatically created by # GANGA Remote backend handler. # # It controls: # 1. unpack input sandbox # 2. create the new job # 3. submit it #----------------------------------------------------- import os,os.path,shutil,tempfile import sys,popen2,time,traceback import tarfile ############################################################################################ ###INLINEMODULES### ############################################################################################ j = Job() output_sandbox = ###OUTPUTSANDBOX### input_sandbox = ###INPUTSANDBOX### appexec = ###APPLICATIONEXEC### appargs = ###APPLICATIONARGS### back_end = ###BACKEND### ganga_dir = ###GANGADIR### code = ###CODE### environment = ###ENVIRONMENT### user_env = ###USERENV### if user_env != None: for env_var in user_env: environment[env_var] = user_env[env_var] j.outputsandbox = output_sandbox j.backend = back_end # Unpack the input sandboxes shutil.move(os.path.expanduser(ganga_dir + "/__subjob_input_sbx__" + code), j.inputdir+"/__subjob_input_sbx__") shutil.move(os.path.expanduser(ganga_dir + "/__master_input_sbx__" + code), j.inputdir+"/__master_input_sbx__") # Add the files in the sandbox to the job inputsbx = [] fullsbxlist = [] try: tar = tarfile.open(j.inputdir+"/__master_input_sbx__") filelist = tar.getnames() print(filelist) for f in filelist: fullsbxlist.append( f ) inputsbx.append( j.inputdir + "/" + f ) except: print("Unable to open master input sandbox") try: tar = tarfile.open(j.inputdir+"/__subjob_input_sbx__") filelist = tar.getnames() for f in filelist: fullsbxlist.append( f ) inputsbx.append( j.inputdir + "/" + f ) except: print("Unable to open subjob input sandbox") # sort out the path of the exe if appexec in fullsbxlist: j.application = Executable ( exe = File(os.path.join(j.inputdir, appexec)), args = appargs, env = environment ) print("Script found: %s" % appexec) else: j.application = Executable ( exe = appexec, args = appargs, env = environment ) j.inputsandbox = inputsbx getPackedInputSandbox(j.inputdir+"/__subjob_input_sbx__", j.inputdir + "/.") getPackedInputSandbox(j.inputdir+"/__master_input_sbx__", j.inputdir + "/.") # submit the job j.submit() # Start pickle token print("***_START_PICKLE_***") # pickle the job import pickle print(j.outputdir) print(pickle.dumps(j._impl)) # print a finished token print("***_END_PICKLE_***") print("***_FINISHED_***") """ import inspect import Ganga.Core.Sandbox as Sandbox script = script.replace('###ENVIRONMENT###', repr(jobconfig.env)) script = script.replace('###USERENV###', repr(self.environment)) script = script.replace( '###INLINEMODULES###', inspect.getsource(Sandbox.WNSandbox)) script = script.replace( '###OUTPUTSANDBOX###', repr(jobconfig.outputbox)) script = script.replace( '###APPLICATIONEXEC###', repr(os.path.basename(jobconfig.getExeString()))) script = script.replace( '###APPLICATIONARGS###', repr(jobconfig.getArgStrings())) # get a string describing the required backend import cStringIO be_out = cStringIO.StringIO() job.backend.remote_backend.printTree(be_out, "copyable") be_str = be_out.getvalue() script = script.replace('###BACKEND###', be_str) script = script.replace('###GANGADIR###', repr(self.ganga_dir)) script = script.replace('###CODE###', repr(self._code)) sandbox_list = jobconfig.getSandboxFiles() str_list = "[ " for fname in sandbox_list: str_list += "j.inputdir + '/' + " + \ repr(os.path.basename(fname.name)) str_list += ", " str_list += "j.inputdir + '/__master_input_sbx__' ]" script = script.replace('###INPUTSANDBOX###', str_list) return job.getInputWorkspace().writefile(FileBuffer('__jobscript__.py', script), executable=0)
def _parse_options(self): try: parser = self._get_parser() except ApplicationConfigurationError as err: logger.debug("_get_parser Error:\n%s" % str(err)) raise err share_dir = os.path.join( expandfilename(getConfig('Configuration')['gangadir']), 'shared', getConfig('Configuration')['user'], self.is_prepared.name) # Need to remember to create the buffer as the perpare methods returns # are merely copied to the inputsandbox so must alread exist. # share_path = os.path.join(share_dir,'inputsandbox') # if not os.path.isdir(share_path): os.makedirs(share_path) fillPackedSandbox([FileBuffer('options.pkl', parser.opts_pkl_str)], os.path.join( share_dir, 'inputsandbox', '_input_sandbox_%s.tar' % self.is_prepared.name)) # FileBuffer(os.path.join(share_path,'options.pkl'), # parser.opts_pkl_str).create() # self.prep_inputbox.append(File(os.path.join(share_dir,'options.pkl'))) # Check in any input datasets defined in optsfiles and allow them to be # read into the inputdata = parser.get_input_data() if len(inputdata.files) > 0: logger.warning( 'Found inputdataset defined in optsfile, ' 'this will get pickled up and stored in the ' 'prepared state. Any change to the options/data will ' 'therefore require an unprepare first.') logger.warning( 'NOTE: the prefered way of working ' 'is to define inputdata in the job.inputdata field. ') logger.warning( 'Data defined in job.inputdata will superseed optsfile data!') logger.warning( 'Inputdata can be transfered from optsfiles to the job.inputdata field ' 'using job.inputdata = job.application.readInputData(optsfiles)' ) share_path = os.path.join(share_dir, 'inputdata') if not os.path.isdir(share_path): os.makedirs(share_path) f = open(os.path.join(share_path, 'options_data.pkl'), 'w+b') pickle.dump(inputdata, f) f.close() # store the outputsandbox/outputdata defined in the options file # Can remove this when no-longer need to define outputdata in optsfiles # Can remove the if job: when look into how to do prepare for standalone app # move into RuntimeHandler move whole parsing into options maybe? # try and get the job object # not present if preparing standalone app # must change this as prepare should be seperate from the jpb.inputdata share_path = os.path.join(share_dir, 'output') if not os.path.isdir(share_path): os.makedirs(share_path) f = open(os.path.join(share_path, 'options_parser.pkl'), 'w+b') pickle.dump(parser, f) f.close()
def preparejob(self, jobconfig, master_input_sandbox): """Prepare Condor description file""" job = self.getJobObject() inbox = job.createPackedInputSandbox(jobconfig.getSandboxFiles()) inpDir = job.getInputWorkspace().getPath() outDir = job.getOutputWorkspace().getPath() infileList = [] exeString = jobconfig.getExeString().strip() quotedArgList = [] for arg in jobconfig.getArgStrings(): quotedArgList.append("\\'%s\\'" % arg) exeCmdString = " ".join([exeString] + quotedArgList) for filePath in inbox: if not filePath in infileList: infileList.append(filePath) for filePath in master_input_sandbox: if not filePath in infileList: infileList.append(filePath) fileList = [] for filePath in infileList: fileList.append(os.path.basename(filePath)) if job.name: name = job.name else: name = job.application._name name = "_".join(name.split()) wrapperName = "_".join(["Ganga", str(job.id), name]) commandList = [ "#!/usr/bin/env python", "from __future__ import print_function", "# Condor job wrapper created by Ganga", "# %s" % (time.strftime("%c")), "", inspect.getsource(Sandbox.WNSandbox), "", "import os", "import time", "", "startTime = time.strftime" + "( '%a %d %b %H:%M:%S %Y', time.gmtime( time.time() ) )", "", "for inFile in %s:" % str(fileList), " getPackedInputSandbox( inFile )", "", "exePath = '%s'" % exeString, "if os.path.isfile( '%s' ):" % os.path.basename(exeString), " os.chmod( '%s', 0755 )" % os.path.basename(exeString), "wrapperName = '%s_bash_wrapper.sh'" % wrapperName, "wrapperFile = open( wrapperName, 'w' )", "wrapperFile.write( '#!/bin/bash\\n' )", "wrapperFile.write( 'echo \"\"\\n' )", "wrapperFile.write( 'echo \"Hostname: $(hostname -f)\"\\n' )", "wrapperFile.write( 'echo \"\\${BASH_ENV}: ${BASH_ENV}\"\\n' )", "wrapperFile.write( 'if ! [ -z \"${BASH_ENV}\" ]; then\\n' )", "wrapperFile.write( ' if ! [ -f \"${BASH_ENV}\" ]; then\\n' )", "wrapperFile.write( ' echo \"*** Warning: " + "\\${BASH_ENV} file not found ***\"\\n' )", "wrapperFile.write( ' fi\\n' )", "wrapperFile.write( 'fi\\n' )", "wrapperFile.write( 'echo \"\"\\n' )", "wrapperFile.write( '%s\\n' )" % exeCmdString, "wrapperFile.write( 'exit ${?}\\n' )", "wrapperFile.close()", "os.chmod( wrapperName, 0755 )", "result = os.system( './%s' % wrapperName )", "os.remove( wrapperName )", "", "endTime = time.strftime" + "( '%a %d %b %H:%M:%S %Y', time.gmtime( time.time() ) )", "print('\\nJob start: ' + startTime)", "print('Job end: ' + endTime)", "print('Exit code: %s' % str( result ))" ] commandString = "\n".join(commandList) wrapper = job.getInputWorkspace().writefile\ (FileBuffer(wrapperName, commandString), executable=1) infileString = ",".join(infileList) outfileString = ",".join(jobconfig.outputbox) cdfDict = \ { 'universe': self.universe, 'on_exit_remove': 'True', 'should_transfer_files': 'YES', 'when_to_transfer_output': 'ON_EXIT_OR_EVICT', 'executable': wrapper, 'transfer_executable': 'True', 'notification': 'Never', 'rank': self.rank, 'initialdir': outDir, 'error': 'stderr', 'output': 'stdout', 'log': 'condorLog', 'stream_output': 'false', 'stream_error': 'false', 'getenv': self.getenv } envList = [] if self.env: for key in self.env.keys(): value = self.env[key] if (isinstance(value, str)): value = os.path.expandvars(value) else: value = str(value) envList.append("=".join([key, value])) envString = ";".join(envList) if jobconfig.env: for key in jobconfig.env.keys(): value = jobconfig.env[key] if (isinstance(value, str)): value = os.path.expandvars(value) else: value = str(value) envList.append("=".join([key, value])) envString = ";".join(envList) if envString: cdfDict['environment'] = envString if infileString: cdfDict['transfer_input_files'] = infileString if self.globusscheduler: cdfDict['globusscheduler'] = self.globusscheduler if self.globus_rsl: cdfDict['globus_rsl'] = self.globus_rsl if outfileString: cdfDict['transfer_output_files'] = outfileString cdfList = [ "# Condor Description File created by Ganga", "# %s" % (time.strftime("%c")), ""] for key, value in cdfDict.iteritems(): cdfList.append("%s = %s" % (key, value)) cdfList.append(self.requirements.convert()) cdfList.append("queue") cdfString = "\n".join(cdfList) return job.getInputWorkspace().writefile\ (FileBuffer("__cdf__", cdfString))