def prepare(self): job = JobObject(self.si) job.setUniqueJobname("echo_job1") job.setCommandline("echo \"Hello World\"") job.addInputFileUrl("/home/markus/tmp/46mbInput0.bin") job.setApplication(Constants.GENERIC_APPLICATION_NAME) job.setSubmissionLocation(self.subLoc) job.createJob() self.job = job
def prepare(self): job = JobObject(self.si) job.setUniqueJobname("echo_job1") job.setCommandline('echo "Hello World"') job.addInputFileUrl("/home/markus/tmp/46mbInput0.bin") job.setApplication(Constants.GENERIC_APPLICATION_NAME) job.setSubmissionLocation(self.subLoc) job.createJob() self.job = job
class simpleStageJob(action): def __init__(self, si, subLoc=defaultSubLoc): super(simpleStageJob, self).__init__(si, subLoc) self.job = JobObject(self.si) self.job.setCommandline('echo "hello world"') self.job.setApplication(Constants.GENERIC_APPLICATION_NAME) self.job.setSubmissionLocation(self.subLoc) def prepare(self): self.job.setUniqueJobname(self.name()) self.job.createJob() def execute(self): self.job.submitJob()
class simpleStageJob(action): def __init__(self, si, subLoc=defaultSubLoc): super(simpleStageJob, self).__init__(si, subLoc) self.job = JobObject(self.si) self.job.setCommandline("echo \"hello world\"") self.job.setApplication(Constants.GENERIC_APPLICATION_NAME) self.job.setSubmissionLocation(self.subLoc) def prepare(self): self.job.setUniqueJobname(self.name()) self.job.createJob() def execute(self): self.job.submitJob()
def prepare(self): job = JobObject(self.si); job.setUniqueJobname("echo_job1") job.setCommandline("echo \"Hello World\"") job.addInputFileUrl("gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text0.txt"); job.addInputFileUrl("gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text1.txt"); job.addInputFileUrl("gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text2.txt"); job.addInputFileUrl("gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text3.txt"); job.addInputFileUrl("gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text4.txt"); job.setApplication(Constants.GENERIC_APPLICATION_NAME) job.setSubmissionLocation(self.subLoc) job.createJob() self.job = job
def prepare(self): job = JobObject(self.si) job.setUniqueJobname("echo_job1") job.setCommandline("echo \"Hello World\"") job.addInputFileUrl( "gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text0.txt") job.addInputFileUrl( "gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text1.txt") job.addInputFileUrl( "gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text2.txt") job.addInputFileUrl( "gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text3.txt") job.addInputFileUrl( "gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text4.txt") job.setApplication(Constants.GENERIC_APPLICATION_NAME) job.setSubmissionLocation(self.subLoc) job.createJob() self.job = job
def submit(self): if self.jobs: print 'There is already a run with the name "'+self.jobname+'". Exiting...' sys.exit(1) # uploading input file once, so we don't need to do it for every job again and again fm.cp(self.filepath, 'gsiftp://pan.nesi.org.nz/~/inputfiles/'+self.jobname, True) for i in range(0,self.runs): # create the job object job = JobObject(si); # set a unique jobname number = str(i+1).zfill(4) job.setUniqueJobname(self.jobname+"_run_"+number) # set the commandline that needs to be executed job.setCommandline(megabeast_path+' '+remote_home_dir+'/inputfiles/'+self.jobname+'/'+self.filename) job.setSubmissionLocation('pan:pan.nesi.org.nz') job.setCpus(self.cpus) job.setWalltime(self.walltime) job.setMemory(self.memory) job.setApplication('UltraBEAST') job.setApplicationVersion('0.1') #job.addInputFileUrl(self.filepath) # create the job on the backend and specify the VO to use temp_jobname = job.createJob("/nz/nesi") print "created job: '"+temp_jobname+"', submitting..." # submit the job job.submitJob() print "submission finished: " + temp_jobname
start = 30 end = 40 pathToInputFiles = batchJob.pathToInputFiles() inputFile1relPath = pathToInputFiles+'inputFile1.txt ' inputFile2relPath = pathToInputFiles+'inputFile2.txt' for i in range(start, end): # create a unique jobname for every job jobname = batchJobName+"_"+ str(i) print 'Creating job: '+jobname # create the single job job = JobObject(si) job.setJobname(jobname) # better to set the application to use explicitely because in that case we don't need to use mds (faster) job.setApplication('UnixCommands') job.setCommandline('cat '+ inputFile1relPath + ' ' + inputFile2relPath) job.setWalltimeInSeconds(60) # adding the job to the multijob batchJob.addJob(job) # only start the newly added jobs and wait for the restart to finish batchJob.restart(False, False, True, True) # don't forget to exit properly. this cleans up possible existing threads/executors sys.exit()
''' from grisu.frontend.control.login import LoginManager from grisu.frontend.model.job import JobObject import sys si = LoginManager.loginCommandline() print 'Creating job...' # create the job object job = JobObject(si); # set a unique jobname job.setUniqueJobname("echo_job1") print 'Set jobname to: '+ job.getJobname() # set the name of the application like it is published in mds. "generic" means not to use mds for the lookup. job.setApplication("generic") # since we are using a "generic" job, we need to specify a submission location. I'll make that easier later on... job.setSubmissionLocation("dque@edda-m:ng2.vpac.org") # set the commandline that needs to be executed job.setCommandline("echo \"Hello World\"") job.addInputFileUrl('/home/markus/test/singleJobFile_0.txt'); # create the job on the backend and specify the VO to use job.createJob("/ARCS/NGAdmin") print 'Submitting job...' # submit the job job.submitJob() print 'Waiting for the job to finish...'
exclude_sites.append("AUT") batch_jobs.setLocationsToExclude(exclude_sites) # Create a blacklist of sites to exclude # Currently the AUT location is not behaving, so always exclude it print "INFO: Adding common files to Batch Job Object " + batch_job_name batch_jobs.addInputFile(os.path.join(current_dir, dictionary_path)) batch_jobs.addInputFile(os.path.join(current_dir, "countbacon.py")) print "INFO: Defining jobs from input directory" job_count = 0 for file_name in os.listdir(input_path): print "INFO: Defining job for " + file_name job_name = base_job_name + "-" + file_name job = JobObject(service_interface) job.setJobname(job_name) job.setApplication("python") # Set the application being run job.setApplicationVersion("2.4") # Set the application version, note this is an exact match job.addInputFileUrl(os.path.join(current_dir, input_path, file_name)) job.setCommandline("python ../countbacon.py ../" + dictionary_path + " " + file_name) print "INFO: " + job.getJobname() + " defined" batch_jobs.addJob(job) print "INFO: " + job.getJobname() + " added to batch " + batch_jobs.getJobname() job_count += 1 print "INFO: " + str(job_count) + " jobs defined" print "INFO: Sending batch " + batch_jobs.getJobname() + " to " + backend + " and staging files..." try: batch_jobs.prepareAndCreateJobs(False) except (JobsException), error: print ("HALT: Exception submitting jobs from BatchJobObject " + batch_jobs.getJobname() + "!") for job in error.getFailures().keySet():
from grisu.frontend.control.login import LoginManager from grisu.frontend.model.job import JobObject import sys # create a service interface to the BeSTGRID backend service_interface = LoginManager.loginCommandline("BeSTGRID") print 'Creating job object...' job = JobObject(service_interface) job.setJobname("echo_job-1") # job name must be unique print 'Set jobname to: ' + job.getJobname() # set the name of the application as it is published in MDS. # "generic" means not to use MDS for the lookup. job.setApplication("generic") # "generic" jobs require a submission location to be specified job.setSubmissionLocation("all.q:ng2.scenzgrid.org#SGE") # set the command that needs to be executed job.setCommandline("echo \"Hello World\"") # create the job on the backend and specify the VO to use job.createJob("/ARCS/BeSTGRID") print 'Submitting job...' # submit the job job.submitJob() print 'Waiting for the job to finish...' # this waits until the job is finished. Checks every 10 seconds (which would be too often for a real job) finished = job.waitForJobToFinish(10)
print "Parsing commandline arguments..." file1url = sys.argv[1] file1Name = FileManager.getFilename(file1url) file2url = sys.argv[2] file2Name = FileManager.getFilename(file2url) print "Creating job..." # create the job object job = JobObject(si) # set a unique jobname job.setTimestampJobname("diff_job") print "Set jobname to: " + job.getJobname() # setting the application. this means that grisu can figure out the submission location and # you don't have to do that manually job.setApplication("UnixCommands") # set the commandline that needs to be executed job.setCommandline("diff " + file1Name + " " + file2Name) job.addInputFileUrl(file1url) job.addInputFileUrl(file2url) # create the job on the backend and specify the VO to use job.createJob("/ARCS/StartUp") print "Submitting job..." # submit the job job.submitJob() print "Waiting for the job to finish..." # this waits until the job is finished. Checks every 10 seconds (which would be too often for a real job)
# Since there may be many jobs submitted in the workshop, lets make them a bit unique to avoid issues. base_job_name = str(random.randint(10000, 99999)) + '-hello-' print "INFO: Base job name is " + base_job_name # There are three stages, creating the jobs, submitting the jobs, then after they have finished, retrieving the job outputs # Creating a list of jobs jobs = [] print "INFO: Defining " + str(job_count) + " helloworld jobs" for i in range(1, job_count + 1): print "INFO: Defining job " + str(i) + " of " + str(job_count) #The next lines define the actual job's parameters job = JobObject(service_interface) # Create a job job.setJobname(base_job_name + str(i)) # Give it a (hopefully) unique name job.setApplication("python") # Set the application being run job.setApplicationVersion( "2.4") # Set the application version, note this is an exact match # job.setSubmissionLocation("all.q:ng2.scenzgrid.org#SGE") # Set the location the job will be submitted to job.addInputFileUrl(os.path.join(current_dir, "helloworld.py")) # Add a file job.setCommandline("python helloworld.py") # Set the command to be run print "INFO: job " + job.getJobname() + " defined" jobs.append(job) # Submit the jobs to be run # Note the exception catching to give more information about a job failing for job in jobs: time_start = time.time() try: print "INFO: Creating job " + job.getJobname(
print 'Parsing commandline arguments...' file1url = sys.argv[1] file1Name = FileManager.getFilename(file1url) file2url = sys.argv[2] file2Name = FileManager.getFilename(file2url) print 'Creating job...' # create the job object job = JobObject(si) # set a unique jobname job.setTimestampJobname("diff_job") print 'Set jobname to: ' + job.getJobname() # setting the application. this means that grisu can figure out the submission location and # you don't have to do that manually job.setApplication("UnixCommands") # set the commandline that needs to be executed job.setCommandline('diff ' + file1Name + ' ' + file2Name) job.addInputFileUrl(file1url) job.addInputFileUrl(file2url) # create the job on the backend and specify the VO to use job.createJob("/ARCS/StartUp") print 'Submitting job...' # submit the job job.submitJob() print 'Waiting for the job to finish...' # this waits until the job is finished. Checks every 10 seconds (which would be too often for a real job)
amount_of_jobs_total = 10 amount_of_jobs_concurrent = 4 created_jobs = [] submitted_jobs = [] finished_jobs = [] # better make that unique for each run, so we can resume workflows easier if necessary (this introduces quite a bit more complexity though) jobname_base = 'workflow_test' for total in range(1, amount_of_jobs_total + 1): job = JobObject(si) job.setJobname(jobname_base + '_' + str(total)) # always good to set the application if you know it, processing the job will be faster job.setApplication('UnixCommands') # also good to set queue if you know where you want to submit your job, not necessary, but processing of the job will be faster job.setSubmissionLocation('default:gram5.ceres.auckland.ac.nz') # job sleeps for a random time random_sleep = random.randrange(5, 75) job.setCommandline('sleep ' + str(random_sleep)) job.createJob('/nz/nesi') print 'created job: ' + job.getJobname() + ' (sleeptime: ' + str( random_sleep) + ')' created_jobs.append(job) finished = False while not finished:
from grisu.frontend.control.login import LoginManager from grisu.frontend.model.job import JobObject from grisu.model import GrisuRegistryManager si = LoginManager.loginCommandline("LOCAL") appInfo = GrisuRegistryManager.getDefault(si).getApplicationInformation( "ImageJ") print 'app: ' + appInfo.getApplicationName() for subloc in appInfo.getAvailableAllSubmissionLocations(): print subloc job = JobObject(si) job.setTimestampJobname("imageJ") job.setApplication("java") job.setApplication("ImageJ") job.setCommandline("echo Hello") job.setSubmissionLocation("normal:ng2.ivec.org") job.createJob("/ARCS/StartUp") job.submitJob() job.waitForJobToFinish(3) print "Stdout: " + job.getStdOutContent() print "Stderr: " + job.getStdErrContent()
from grisu.frontend.control.login import LoginManager from grisu.frontend.model.job import JobObject from grisu.model import GrisuRegistryManager si = LoginManager.loginCommandline("LOCAL") appInfo = GrisuRegistryManager.getDefault(si).getApplicationInformation("ImageJ") print 'app: '+appInfo.getApplicationName() for subloc in appInfo.getAvailableAllSubmissionLocations(): print subloc job = JobObject(si); job.setTimestampJobname("imageJ"); job.setApplication("java"); job.setApplication("ImageJ"); job.setCommandline("echo Hello"); job.setSubmissionLocation("normal:ng2.ivec.org"); job.createJob("/ARCS/StartUp"); job.submitJob(); job.waitForJobToFinish(3); print "Stdout: "+job.getStdOutContent() print "Stderr: "+job.getStdErrContent()