def prepare(self): job = JobObject(self.si) job.setUniqueJobname("echo_job1") job.setCommandline('echo "Hello World"') job.createJob() self.job = job
def prepare(self): job = JobObject(self.si) job.setUniqueJobname("echo_job1") job.setCommandline("echo \"Hello World\"") job.createJob() self.job = job
def prepare(self): job = JobObject(self.si) job.setUniqueJobname("echo_job1") job.setCommandline('echo "Hello World"') job.addInputFileUrl("/home/markus/tmp/text0.txt") job.createJob() self.job = job
def prepare(self): job = JobObject(self.si) job.setUniqueJobname("echo_job1") job.setCommandline("echo \"Hello World\"") job.addInputFileUrl("/home/markus/tmp/text0.txt") job.createJob() self.job = job
def prepare(self): job = JobObject(self.si) job.setUniqueJobname("echo_job1") job.setCommandline('echo "Hello World"') job.addInputFileUrl("/home/markus/tmp/46mbInput0.bin") job.setApplication(Constants.GENERIC_APPLICATION_NAME) job.setSubmissionLocation(self.subLoc) job.createJob() self.job = job
def prepare(self): job = JobObject(self.si) job.setUniqueJobname("echo_job1") job.setCommandline("echo \"Hello World\"") job.addInputFileUrl("/home/markus/tmp/46mbInput0.bin") job.setApplication(Constants.GENERIC_APPLICATION_NAME) job.setSubmissionLocation(self.subLoc) job.createJob() self.job = job
class simpleStageJob(action): def __init__(self, si, subLoc=defaultSubLoc): super(simpleStageJob, self).__init__(si, subLoc) self.job = JobObject(self.si) self.job.setCommandline("echo \"hello world\"") self.job.setApplication(Constants.GENERIC_APPLICATION_NAME) self.job.setSubmissionLocation(self.subLoc) def prepare(self): self.job.setUniqueJobname(self.name()) self.job.createJob() def execute(self): self.job.submitJob()
def prepare(self): job = JobObject(self.si); job.setUniqueJobname("echo_job1") job.setCommandline("echo \"Hello World\"") job.addInputFileUrl("gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text0.txt"); job.addInputFileUrl("gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text1.txt"); job.addInputFileUrl("gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text2.txt"); job.addInputFileUrl("gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text3.txt"); job.addInputFileUrl("gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text4.txt"); job.setApplication(Constants.GENERIC_APPLICATION_NAME) job.setSubmissionLocation(self.subLoc) job.createJob() self.job = job
class simpleStageJob(action): def __init__(self, si, subLoc=defaultSubLoc): super(simpleStageJob, self).__init__(si, subLoc) self.job = JobObject(self.si) self.job.setCommandline('echo "hello world"') self.job.setApplication(Constants.GENERIC_APPLICATION_NAME) self.job.setSubmissionLocation(self.subLoc) def prepare(self): self.job.setUniqueJobname(self.name()) self.job.createJob() def execute(self): self.job.submitJob()
def prepare(self): job = JobObject(self.si) job.setUniqueJobname("echo_job1") job.setCommandline("echo \"Hello World\"") job.addInputFileUrl( "gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text0.txt") job.addInputFileUrl( "gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text1.txt") job.addInputFileUrl( "gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text2.txt") job.addInputFileUrl( "gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text3.txt") job.addInputFileUrl( "gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text4.txt") job.setApplication(Constants.GENERIC_APPLICATION_NAME) job.setSubmissionLocation(self.subLoc) job.createJob() self.job = job
def submit(self): if self.jobs: print 'There is already a run with the name "'+self.jobname+'". Exiting...' sys.exit(1) # uploading input file once, so we don't need to do it for every job again and again fm.cp(self.filepath, 'gsiftp://pan.nesi.org.nz/~/inputfiles/'+self.jobname, True) for i in range(0,self.runs): # create the job object job = JobObject(si); # set a unique jobname number = str(i+1).zfill(4) job.setUniqueJobname(self.jobname+"_run_"+number) # set the commandline that needs to be executed job.setCommandline(megabeast_path+' '+remote_home_dir+'/inputfiles/'+self.jobname+'/'+self.filename) job.setSubmissionLocation('pan:pan.nesi.org.nz') job.setCpus(self.cpus) job.setWalltime(self.walltime) job.setMemory(self.memory) job.setApplication('UltraBEAST') job.setApplicationVersion('0.1') #job.addInputFileUrl(self.filepath) # create the job on the backend and specify the VO to use temp_jobname = job.createJob("/nz/nesi") print "created job: '"+temp_jobname+"', submitting..." # submit the job job.submitJob() print "submission finished: " + temp_jobname
if not si: LoginManager.initEnvironment() si = LoginManager.login('bestgrid', True) filemanager = GrisuRegistryManager.getDefault(si).getFileManager() job = JobObject(si) job.setSubmissionLocation('pan:pan.nesi.org.nz') job.setTimestampJobname(jobname_template) job.setCommandline(commandline) # add input files for file in files_to_upload: job.addInputFileUrl(file) jobname = job.createJob('/nz/nesi') print 'Submitting job...' job.submitJob() print 'Jobname: ' + jobname print 'Waiting for job to finish...' job.waitForJobToFinish(jobstate_check_intervall) job_directory = job.getJobDirectoryUrl() print 'Job finished, jobdirectory: ' + job_directory print 'Downloading results' target = filemanager.downloadUrl(job_directory, File(target_dir), False) print 'Download finished, download folder: ' + target.getPath()
from grisu.frontend.control.login import LoginManager from grisu.frontend.model.job import JobObject si = LoginManager.loginCommandline("BeSTGRID-DEV") print "Logged in." job = JobObject(si) job.setUniqueJobname("cat_job", si) job.setCommandline("cat text0.txt") job.addInputFileUrl("/home/markus/tmp/text0.txt") job.createJob("/nz/nesi") # job.setSubmissionLocation('[email protected]:ng2.auckland.ac.nz') job.submitJob() print "Job submitted." job.waitForJobToFinish(10) print "Job finished. Status: " + job.getStatusString(False) print "Stdout: " + job.getStdOutContent() print "Stderr: " + job.getStdErrContent() job.kill(True)
from grisu.frontend.control.login import LoginManager from grisu.frontend.model.job import JobObject from grisu.model import GrisuRegistryManager si = LoginManager.loginCommandline("LOCAL") appInfo = GrisuRegistryManager.getDefault(si).getApplicationInformation( "ImageJ") print 'app: ' + appInfo.getApplicationName() for subloc in appInfo.getAvailableAllSubmissionLocations(): print subloc job = JobObject(si) job.setTimestampJobname("imageJ") job.setApplication("java") job.setApplication("ImageJ") job.setCommandline("echo Hello") job.setSubmissionLocation("normal:ng2.ivec.org") job.createJob("/ARCS/StartUp") job.submitJob() job.waitForJobToFinish(3) print "Stdout: " + job.getStdOutContent() print "Stderr: " + job.getStdErrContent()
LoginManager.initEnvironment() si = LoginManager.login('bestgrid', True) filemanager = GrisuRegistryManager.getDefault(si).getFileManager() job = JobObject(si) job.setSubmissionLocation('pan:pan.nesi.org.nz') job.setTimestampJobname(jobname_template) job.setCommandline(commandline) # add input files for file in files_to_upload: job.addInputFileUrl(file) jobname = job.createJob('/nz/nesi') print 'Submitting job...' job.submitJob() print 'Jobname: '+jobname print 'Waiting for job to finish...' job.waitForJobToFinish(jobstate_check_intervall) job_directory = job.getJobDirectoryUrl() print 'Job finished, jobdirectory: '+job_directory print 'Downloading results' target = filemanager.downloadUrl(job_directory, File(target_dir), False) print 'Download finished, download folder: '+target.getPath()
# better make that unique for each run, so we can resume workflows easier if necessary (this introduces quite a bit more complexity though) jobname_base = 'workflow_test' for total in range(1, amount_of_jobs_total + 1): job = JobObject(si) job.setJobname(jobname_base + '_' + str(total)) # always good to set the application if you know it, processing the job will be faster job.setApplication('UnixCommands') # also good to set queue if you know where you want to submit your job, not necessary, but processing of the job will be faster job.setSubmissionLocation('default:gram5.ceres.auckland.ac.nz') # job sleeps for a random time random_sleep = random.randrange(5, 75) job.setCommandline('sleep ' + str(random_sleep)) job.createJob('/nz/nesi') print 'created job: ' + job.getJobname() + ' (sleeptime: ' + str( random_sleep) + ')' created_jobs.append(job) finished = False while not finished: # submit another bunch of jobs while there are some if len(created_jobs) > 0: print 'still ' + str(len(created_jobs)) + ' jobs to submit...' while len(submitted_jobs) < amount_of_jobs_concurrent: if len(created_jobs) <= 0: break
job = JobObject(si); # set a unique jobname job.setUniqueJobname("echo_job1") print 'Set jobname to: '+ job.getJobname() # set the name of the application like it is published in mds. "generic" means not to use mds for the lookup. job.setApplication("generic") # since we are using a "generic" job, we need to specify a submission location. I'll make that easier later on... job.setSubmissionLocation("dque@edda-m:ng2.vpac.org") # set the commandline that needs to be executed job.setCommandline("echo \"Hello World\"") job.addInputFileUrl('/home/markus/test/singleJobFile_0.txt'); # create the job on the backend and specify the VO to use job.createJob("/ARCS/NGAdmin") print 'Submitting job...' # submit the job job.submitJob() print 'Waiting for the job to finish...' # this waits until the job is finished. Checks every 10 seconds (which would be too often for a real job) finished = job.waitForJobToFinish(10) if not finished: print "not finished yet." # kill the job on the backend anyway job.kill(True); else: print 'Job finished. Status: '+job.getStatusString(False) # download and cache the jobs' stdout and display it's content
"2.4") # Set the application version, note this is an exact match # job.setSubmissionLocation("all.q:ng2.scenzgrid.org#SGE") # Set the location the job will be submitted to job.addInputFileUrl(os.path.join(current_dir, "helloworld.py")) # Add a file job.setCommandline("python helloworld.py") # Set the command to be run print "INFO: job " + job.getJobname() + " defined" jobs.append(job) # Submit the jobs to be run # Note the exception catching to give more information about a job failing for job in jobs: time_start = time.time() try: print "INFO: Creating job " + job.getJobname( ) + " on " + backend + " backend, with " + group + " group" job.createJob(group) print "INFO: Submitting job " + job.getJobname() job.submitJob() except (JobsException), error: print "HALT: Exception submitting job!" print "Job: " + job.getJobname() + ", Error: " + error.getFailures( ).get(job).getLocalizedMessage() print "========================" time.sleep(3) error.printStackTrace() sys.exit(1) except (BackendException), error: print "HALT: Exception from grisu backend!" print "Job: " + job.getJobname() + ", Error: " + error.getFailures( ).get(job).getLocalizedMessage() print "========================"
print 'Creating job...' # create the job object job = JobObject(si); # set a unique jobname job.setTimestampJobname("diff_job") print 'Set jobname to: '+ job.getJobname() # set the commandline that needs to be executed job.setCommandline('diff ' + file1Name+ ' ' + file2Name) job.addInputFileUrl(file1url); job.addInputFileUrl(file2url); # create the job on the backend job.createJob() print 'Submitting job...' # submit the job job.submitJob() print 'Waiting for the job to finish...' # this waits until the job is finished. Checks every 10 seconds (which would be too often for a real job) job.waitForJobToFinish(10) print 'Job finished. Status: '+job.getStatusString(False) # download and cache the jobs' stdout and display it's content print "Stdout: " + job.getStdOutContent() # download and cache the jobs' stderr and display it's content print "Stderr: " + job.getStdErrContent() # kill and clean the job on the backend job.kill(True)
# better make that unique for each run, so we can resume workflows easier if necessary (this introduces quite a bit more complexity though) jobname_base = 'workflow_test' for total in range(1, amount_of_jobs_total+1): job = JobObject(si) job.setJobname(jobname_base+'_'+str(total)) # always good to set the application if you know it, processing the job will be faster job.setApplication('UnixCommands') # also good to set queue if you know where you want to submit your job, not necessary, but processing of the job will be faster job.setSubmissionLocation('default:gram5.ceres.auckland.ac.nz') # job sleeps for a random time random_sleep = random.randrange(5, 75) job.setCommandline('sleep '+str(random_sleep)) job.createJob('/nz/nesi') print 'created job: '+ job.getJobname()+' (sleeptime: '+str(random_sleep)+')' created_jobs.append(job) finished = False while not finished: # submit another bunch of jobs while there are some if len(created_jobs) > 0: print 'still '+str(len(created_jobs))+' jobs to submit...' while len(submitted_jobs) < amount_of_jobs_concurrent: if len(created_jobs) <= 0: break
except: print "Cannot stage nesi job script" sys.exit(-5) #open job file #stage in the job file for inputs in input_files: try: job.addInputFileUrl(inputs) print "input: " + inputs except Exception, e: print "Cannot stage in: " + arg print e job.kill(True) sys.exit(-3) job.createJob(group) print "Submitting job..." try: job.submitJob() except Exception, e: # Just catch all exceptions for time being. TODO print "Cannot submit job currently." print e job.kill(True) sys.exit(1) # That's all folks! sys.exit(0)
job = JobObject(si) # set a unique jobname job.setTimestampJobname("diff_job") print "Set jobname to: " + job.getJobname() # setting the application. this means that grisu can figure out the submission location and # you don't have to do that manually job.setApplication("UnixCommands") # set the commandline that needs to be executed job.setCommandline("diff " + file1Name + " " + file2Name) job.addInputFileUrl(file1url) job.addInputFileUrl(file2url) # create the job on the backend and specify the VO to use job.createJob("/ARCS/StartUp") print "Submitting job..." # submit the job job.submitJob() print "Waiting for the job to finish..." # this waits until the job is finished. Checks every 10 seconds (which would be too often for a real job) finished = job.waitForJobToFinish(10) if not finished: print "not finished yet." # kill the job on the backend anyway job.kill(True) else: print "Job finished. Status: " + job.getStatusString(False) # download and cache the jobs' stdout and display it's content
job = JobObject(si) # set a unique jobname job.setUniqueJobname("echo_job1") print 'Set jobname to: ' + job.getJobname() # set the name of the application like it is published in mds. "generic" means not to use mds for the lookup. job.setApplication("generic") # since we are using a "generic" job, we need to specify a submission location. I'll make that easier later on... job.setSubmissionLocation("dque@edda-m:ng2.vpac.org") # set the commandline that needs to be executed job.setCommandline("echo \"Hello World\"") job.addInputFileUrl('/home/markus/test/singleJobFile_0.txt') # create the job on the backend and specify the VO to use job.createJob("/ARCS/NGAdmin") print 'Submitting job...' # submit the job job.submitJob() print 'Waiting for the job to finish...' # this waits until the job is finished. Checks every 10 seconds (which would be too often for a real job) finished = job.waitForJobToFinish(10) if not finished: print "not finished yet." # kill the job on the backend anyway job.kill(True) else: print 'Job finished. Status: ' + job.getStatusString(False) # download and cache the jobs' stdout and display it's content
original = folder + 'original.txt' script = folder + 'wrap.sh' job = JobObject(si); job.setUniqueJobname('diff_wrap') job.setApplication('UnixCommands') job.setCommandline('sh wrap.sh 0 9') job.addInputFileUrl(original) job.addInputFileUrl(script) for i in range(10): file = folder+'test'+str(i)+'.txt' job.addInputFileUrl(file) job.createJob("/ARCS/BeSTGRID") job.submitJob() finished = job.waitForJobToFinish(10) print 'Job: '+job.getJobname() print print 'Stdout:' print job.getStdOutContent() print print 'Stderr:' print job.getStdErrContent() print print
from grisu.frontend.control.login import LoginManager from grisu.frontend.model.job import JobObject si = LoginManager.loginCommandline("BeSTGRID-DEV") print 'Logged in.' job = JobObject(si) job.setUniqueJobname("cat_job", si) job.setCommandline("cat text0.txt") job.addInputFileUrl('/home/markus/tmp/text0.txt') job.createJob("/nz/nesi") #job.setSubmissionLocation('[email protected]:ng2.auckland.ac.nz') job.submitJob() print 'Job submitted.' job.waitForJobToFinish(10) print 'Job finished. Status: ' + job.getStatusString(False) print "Stdout: " + job.getStdOutContent() print "Stderr: " + job.getStdErrContent() job.kill(True)
job = JobObject(service_interface) job.setJobname("echo_job-1") # job name must be unique print 'Set jobname to: ' + job.getJobname() # set the name of the application as it is published in MDS. # "generic" means not to use MDS for the lookup. job.setApplication("generic") # "generic" jobs require a submission location to be specified job.setSubmissionLocation("all.q:ng2.scenzgrid.org#SGE") # set the command that needs to be executed job.setCommandline("echo \"Hello World\"") # create the job on the backend and specify the VO to use job.createJob("/ARCS/BeSTGRID") print 'Submitting job...' # submit the job job.submitJob() print 'Waiting for the job to finish...' # this waits until the job is finished. Checks every 10 seconds (which would be too often for a real job) finished = job.waitForJobToFinish(10) print 'Job finished. Status: ' + job.getStatusString(False) # download and cache the jobs' stdout and display it's content print "Stdout: " + job.getStdOutContent() # download and cache the jobs' stderr and display it's content print "Stderr: " + job.getStdErrContent() # kill and clean the job on the backend job.kill(True)