def prepare(self): job = JobObject(self.si) job.setUniqueJobname("echo_job1") job.setCommandline('echo "Hello World"') job.addInputFileUrl("/home/markus/tmp/text0.txt") job.createJob() self.job = job
def prepare(self): job = JobObject(self.si) job.setUniqueJobname("echo_job1") job.setCommandline("echo \"Hello World\"") job.addInputFileUrl("/home/markus/tmp/text0.txt") job.createJob() self.job = job
def prepare(self): job = JobObject(self.si) job.setUniqueJobname("echo_job1") job.setCommandline("echo \"Hello World\"") job.addInputFileUrl("/home/markus/tmp/46mbInput0.bin") job.setApplication(Constants.GENERIC_APPLICATION_NAME) job.setSubmissionLocation(self.subLoc) job.createJob() self.job = job
def prepare(self): job = JobObject(self.si) job.setUniqueJobname("echo_job1") job.setCommandline('echo "Hello World"') job.addInputFileUrl("/home/markus/tmp/46mbInput0.bin") job.setApplication(Constants.GENERIC_APPLICATION_NAME) job.setSubmissionLocation(self.subLoc) job.createJob() self.job = job
def prepare(self): job = JobObject(self.si) job.setUniqueJobname("echo_job1") job.setCommandline("echo \"Hello World\"") job.addInputFileUrl( "gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text0.txt") job.addInputFileUrl( "gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text1.txt") job.addInputFileUrl( "gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text2.txt") job.addInputFileUrl( "gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text3.txt") job.addInputFileUrl( "gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text4.txt") job.setApplication(Constants.GENERIC_APPLICATION_NAME) job.setSubmissionLocation(self.subLoc) job.createJob() self.job = job
def prepare(self): job = JobObject(self.si); job.setUniqueJobname("echo_job1") job.setCommandline("echo \"Hello World\"") job.addInputFileUrl("gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text0.txt"); job.addInputFileUrl("gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text1.txt"); job.addInputFileUrl("gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text2.txt"); job.addInputFileUrl("gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text3.txt"); job.addInputFileUrl("gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text4.txt"); job.setApplication(Constants.GENERIC_APPLICATION_NAME) job.setSubmissionLocation(self.subLoc) job.createJob() self.job = job
batch_job_name = "test_batch"; # create the batchjob batch_job = BatchJobObject(si, batch_job_name, "/nz/nesi", "UnixCommands", Constants.NO_VERSION_INDICATOR_STRING); # now we can calculate the relative path (from every job directory) to the common input file folder pathToInputFiles = batch_job.pathToInputFiles() for i in range(0, numberOfJobs): # create the single job job = JobObject(si) # better to set the application to use explicitely because in that case we don't need to use mds (faster) job.setCommandline('cat ' + pathToInputFiles+'commonJobFile.txt ' + 'singleJobFile.txt') # adding a job-specific input file job.addInputFileUrl("/home/markus/tmp/singleJobFile.txt") # adding the job to the multijob batch_job.addJob(job) # now we are adding a file that can be used by all of the child jobs. it needs to be referenced via the pathToInputFiles() method shown above batch_job.addInputFile('/home/markus/tmp/commonJobFile.txt') batch_job.setDefaultNoCpus(1); batch_job.setDefaultWalltimeInSeconds(60); batch_job.setLocationsToExclude(["gt5test:ng1.canterbury.ac.nz"]) try: print "Creating jobs on the backend and staging files..." # by specifying "True" we tell the backend to automatically distribute the jobs to all available submission locations # this can be finetuned by exluding or including sites. another option would be to specifying the submission location # for every single job and setting "False" below (this would make job submission faster since jobs don't need to be re-distributed/moved on the backend).
print "INFO: Base job name is " + base_job_name # There are three stages, creating the jobs, submitting the jobs, then after they have finished, retrieving the job outputs # Creating a list of jobs jobs = [] print "INFO: Defining " + str(job_count) + " helloworld jobs" for i in range(1, job_count + 1): print "INFO: Defining job " + str(i) + " of " + str(job_count) #The next lines define the actual job's parameters job = JobObject(service_interface) # Create a job job.setJobname(base_job_name + str(i)) # Give it a (hopefully) unique name job.setApplication("python") # Set the application being run job.setApplicationVersion("2.4") # Set the application version, note this is an exact match # job.setSubmissionLocation("all.q:ng2.scenzgrid.org#SGE") # Set the location the job will be submitted to job.addInputFileUrl(os.path.join(current_dir, "helloworld.py")) # Add a file job.setCommandline("python helloworld.py") # Set the command to be run print"INFO: job " + job.getJobname() + " defined" jobs.append(job) # Submit the jobs to be run # Note the exception catching to give more information about a job failing for job in jobs: time_start = time.time() try: print "INFO: Creating job " + job.getJobname() + " on " + backend + " backend, with " + group + " group" job.createJob(group) print "INFO: Submitting job " + job.getJobname() job.submitJob() except (JobsException), error: print "HALT: Exception submitting job!"
from grisu.frontend.control.login import LoginManager from grisu.frontend.model.job import JobObject si = LoginManager.loginCommandline("BeSTGRID-DEV") print "Logged in." job = JobObject(si) job.setUniqueJobname("cat_job", si) job.setCommandline("cat text0.txt") job.addInputFileUrl("/home/markus/tmp/text0.txt") job.createJob("/nz/nesi") # job.setSubmissionLocation('[email protected]:ng2.auckland.ac.nz') job.submitJob() print "Job submitted." job.waitForJobToFinish(10) print "Job finished. Status: " + job.getStatusString(False) print "Stdout: " + job.getStdOutContent() print "Stderr: " + job.getStdErrContent() job.kill(True)
batch_job = BatchJobObject(si, batch_job_name, "/nz/nesi", "cat", Constants.NO_VERSION_INDICATOR_STRING) # now we can calculate the relative path (from every job directory) to the common input file folder pathToInputFiles = batchJob.pathToInputFiles() for i in range(0, numberOfJobs): # create the single job job = JobObject(si) # better to set the application to use explicitely because in that case we don't need to use mds (faster) job.setCommandline('cat ' + pathToInputFiles + 'commonFile.txt ' + 'singleJobFile.txt') # adding a job-specific input file job.addInputFileUrl("/home/markus/tmp/singleJobFile.txt") # adding the job to the multijob batch_job.addJob(job) # now we are adding a file that can be used by all of the child jobs. it needs to be referenced via the pathToInputFiles() method shown above batch_job.addInputFile('/home/markus/tmp/commonJobFile.txt') batch_job.setDefaultNoCpus(1) batch_job.setDefaultWalltimeInSeconds(60) try: print "Creating jobs on the backend and staging files..." # by specifying "True" we tell the backend to automatically distribute the jobs to all available submission locations # this can be finetuned by exluding or including sites. another option would be to specifying the submission location # for every single job and setting "False" below (this would make job submission faster since jobs don't need to be re-distributed/moved on the backend). batch_job.prepareAndCreateJobs(True) except (JobsException), error:
else: print 'Not a file: '+line sys.exit(1) return files_to_upload files_to_upload = read_files(files_file) filemanager = GrisuRegistryManager.getDefault(si).getFileManager() job = JobObject(si) job.setSubmissionLocation('pan:pan.nesi.org.nz') job.setTimestampJobname(jobname_template) job.setCommandline(commandline) job.setCpus(cpus) job.addInputFileUrl('nonmem_wrap.sh') # add input files for file in files_to_upload: job.addInputFileUrl(file) jobname = job.createJob('/nz/nesi') print 'Submitting job...' job.submitJob() print 'Jobname: '+jobname print 'Waiting for job to finish...' job.waitForJobToFinish(jobstate_check_intervall) job_directory = job.getJobDirectoryUrl()
except: print "Cannot write jobname to file" sys.exit(-2) command_arguments = command.split() print input_files new_commandline = "" file = open("/home/jamesboocock/blah.txt", 'a') for arg in command_arguments: file.write(arg + '\n') arg=arg.replace('"','') print("arg: " + arg) if ((os.path.exists(arg)) or (os.path.isfile(arg)==True)) and (arg not in input_files) and ("_file" not in arg): try: job.addInputFileUrl(arg) print "Stagin in: " + arg file.write("stagin in 1" + arg + '\n') except Exception, e: print "Cannot stage in: " + arg print e job.kill(True) sys.exit(-3) elif ((os.path.exists(arg)) or (os.path.isfile(arg)==True)) and (arg not in input_files) and ("_file" in arg): try: folder=arg.split('/')[len(arg.split('/'))-2] fil= arg.split('/')[len(arg.split('/'))-1] argupdate=os.path.join(working_directory,os.path.join((folder.split('.')[0]), fil)) print "argupdate " + argupdate if(os.path.isfile(argupdate)): print "Stagin in: " + argupdate
# There are three stages, creating the jobs, submitting the jobs, then after they have finished, retrieving the job outputs # Creating a list of jobs jobs = [] print "INFO: Defining " + str(job_count) + " helloworld jobs" for i in range(1, job_count + 1): print "INFO: Defining job " + str(i) + " of " + str(job_count) #The next lines define the actual job's parameters job = JobObject(service_interface) # Create a job job.setJobname(base_job_name + str(i)) # Give it a (hopefully) unique name job.setApplication("python") # Set the application being run job.setApplicationVersion( "2.4") # Set the application version, note this is an exact match # job.setSubmissionLocation("all.q:ng2.scenzgrid.org#SGE") # Set the location the job will be submitted to job.addInputFileUrl(os.path.join(current_dir, "helloworld.py")) # Add a file job.setCommandline("python helloworld.py") # Set the command to be run print "INFO: job " + job.getJobname() + " defined" jobs.append(job) # Submit the jobs to be run # Note the exception catching to give more information about a job failing for job in jobs: time_start = time.time() try: print "INFO: Creating job " + job.getJobname( ) + " on " + backend + " backend, with " + group + " group" job.createJob(group) print "INFO: Submitting job " + job.getJobname() job.submitJob() except (JobsException), error:
print 'Creating job...' # create the job object job = JobObject(si) # set a unique jobname job.setUniqueJobname("echo_job1") print 'Set jobname to: ' + job.getJobname() # set the name of the application like it is published in mds. "generic" means not to use mds for the lookup. job.setApplication("generic") # since we are using a "generic" job, we need to specify a submission location. I'll make that easier later on... job.setSubmissionLocation("dque@edda-m:ng2.vpac.org") # set the commandline that needs to be executed job.setCommandline("echo \"Hello World\"") job.addInputFileUrl('/home/markus/test/singleJobFile_0.txt') # create the job on the backend and specify the VO to use job.createJob("/ARCS/NGAdmin") print 'Submitting job...' # submit the job job.submitJob() print 'Waiting for the job to finish...' # this waits until the job is finished. Checks every 10 seconds (which would be too often for a real job) finished = job.waitForJobToFinish(10) if not finished: print "not finished yet." # kill the job on the backend anyway job.kill(True)
file2Name = FileManager.getFilename(file2url) print 'Creating job...' # create the job object job = JobObject(si) # set a unique jobname job.setTimestampJobname("diff_job") print 'Set jobname to: ' + job.getJobname() # setting the application. this means that grisu can figure out the submission location and # you don't have to do that manually job.setApplication("UnixCommands") # set the commandline that needs to be executed job.setCommandline('diff ' + file1Name + ' ' + file2Name) job.addInputFileUrl(file1url) job.addInputFileUrl(file2url) # create the job on the backend and specify the VO to use job.createJob("/ARCS/StartUp") print 'Submitting job...' # submit the job job.submitJob() print 'Waiting for the job to finish...' # this waits until the job is finished. Checks every 10 seconds (which would be too often for a real job) finished = job.waitForJobToFinish(10) if not finished: print "not finished yet." # kill the job on the backend anyway
print 'Not a file: ' + line sys.exit(1) return files_to_upload files_to_upload = read_files(files_file) filemanager = GrisuRegistryManager.getDefault(si).getFileManager() job = JobObject(si) job.setSubmissionLocation('pan:pan.nesi.org.nz') job.setTimestampJobname(jobname_template) job.setCommandline(commandline) job.setCpus(cpus) job.addInputFileUrl('nonmem_wrap.sh') # add input files for file in files_to_upload: job.addInputFileUrl(file) jobname = job.createJob('/nz/nesi') print 'Submitting job...' job.submitJob() print 'Jobname: ' + jobname print 'Waiting for job to finish...' job.waitForJobToFinish(jobstate_check_intervall) job_directory = job.getJobDirectoryUrl() print 'Job finished, jobdirectory: ' + job_directory
print "Creating job..." # create the job object job = JobObject(si) # set a unique jobname job.setTimestampJobname("diff_job") print "Set jobname to: " + job.getJobname() # setting the application. this means that grisu can figure out the submission location and # you don't have to do that manually job.setApplication("UnixCommands") # set the commandline that needs to be executed job.setCommandline("diff " + file1Name + " " + file2Name) job.addInputFileUrl(file1url) job.addInputFileUrl(file2url) # create the job on the backend and specify the VO to use job.createJob("/ARCS/StartUp") print "Submitting job..." # submit the job job.submitJob() print "Waiting for the job to finish..." # this waits until the job is finished. Checks every 10 seconds (which would be too often for a real job) finished = job.waitForJobToFinish(10) if not finished: print "not finished yet." # kill the job on the backend anyway
si = LoginManager.loginCommandline("Local") folder = '/home/markus/test/batch/' original = folder + 'original.txt' jobnames = [] for i in range(10): file = folder+'test'+str(i)+'.txt' job = JobObject(si); job.setUniqueJobname('diff_'+str(i)) job.setApplication('UnixCommands') job.setCommandline('diff original.txt test'+str(i)+'.txt') job.createJob("/ARCS/BeSTGRID") job.addInputFileUrl(file) job.addInputFileUrl(original) job.submitJob() jobnames.append(job.getJobname()) for jobname in jobnames: finished = job.waitForJobToFinish(10) print 'Job: '+jobname print print 'Stdout:' print job.getStdOutContent() print
# Currently the AUT location is not behaving, so always exclude it print "INFO: Adding common files to Batch Job Object " + batch_job_name batch_jobs.addInputFile(os.path.join(current_dir, dictionary_path)) batch_jobs.addInputFile(os.path.join(current_dir, "countbacon.py")) print "INFO: Defining jobs from input directory" job_count = 0 for file_name in os.listdir(input_path): print "INFO: Defining job for " + file_name job_name = base_job_name + "-" + file_name job = JobObject(service_interface) job.setJobname(job_name) job.setApplication("python") # Set the application being run job.setApplicationVersion("2.4") # Set the application version, note this is an exact match job.addInputFileUrl(os.path.join(current_dir, input_path, file_name)) job.setCommandline("python ../countbacon.py ../" + dictionary_path + " " + file_name) print "INFO: " + job.getJobname() + " defined" batch_jobs.addJob(job) print "INFO: " + job.getJobname() + " added to batch " + batch_jobs.getJobname() job_count += 1 print "INFO: " + str(job_count) + " jobs defined" print "INFO: Sending batch " + batch_jobs.getJobname() + " to " + backend + " and staging files..." try: batch_jobs.prepareAndCreateJobs(False) except (JobsException), error: print ("HALT: Exception submitting jobs from BatchJobObject " + batch_jobs.getJobname() + "!") for job in error.getFailures().keySet(): print "Job: " + job.getJobname() + ", Error: " + error.getFailures().get(job).getLocalizedMessage() sys.exit(1)
from grisu.frontend.control.login import LoginManager from grisu.frontend.model.job import JobObject import sys si = LoginManager.loginCommandline("Local") folder = '/home/markus/test/batchWrap/' original = folder + 'original.txt' script = folder + 'wrap.sh' job = JobObject(si); job.setUniqueJobname('diff_wrap') job.setApplication('UnixCommands') job.setCommandline('sh wrap.sh 0 9') job.addInputFileUrl(original) job.addInputFileUrl(script) for i in range(10): file = folder+'test'+str(i)+'.txt' job.addInputFileUrl(file) job.createJob("/ARCS/BeSTGRID") job.submitJob() finished = job.waitForJobToFinish(10) print 'Job: '+job.getJobname() print print 'Stdout:'
print 'Creating job...' # create the job object job = JobObject(si); # set a unique jobname job.setUniqueJobname("echo_job1") print 'Set jobname to: '+ job.getJobname() # set the name of the application like it is published in mds. "generic" means not to use mds for the lookup. job.setApplication("generic") # since we are using a "generic" job, we need to specify a submission location. I'll make that easier later on... job.setSubmissionLocation("dque@edda-m:ng2.vpac.org") # set the commandline that needs to be executed job.setCommandline("echo \"Hello World\"") job.addInputFileUrl('/home/markus/test/singleJobFile_0.txt'); # create the job on the backend and specify the VO to use job.createJob("/ARCS/NGAdmin") print 'Submitting job...' # submit the job job.submitJob() print 'Waiting for the job to finish...' # this waits until the job is finished. Checks every 10 seconds (which would be too often for a real job) finished = job.waitForJobToFinish(10) if not finished: print "not finished yet." # kill the job on the backend anyway job.kill(True);
from grisu.frontend.control.login import LoginManager from grisu.frontend.model.job import JobObject si = LoginManager.loginCommandline("BeSTGRID-DEV") print 'Logged in.' job = JobObject(si) job.setUniqueJobname("cat_job", si) job.setCommandline("cat text0.txt") job.addInputFileUrl('/home/markus/tmp/text0.txt') job.createJob("/nz/nesi") #job.setSubmissionLocation('[email protected]:ng2.auckland.ac.nz') job.submitJob() print 'Job submitted.' job.waitForJobToFinish(10) print 'Job finished. Status: ' + job.getStatusString(False) print "Stdout: " + job.getStdOutContent() print "Stderr: " + job.getStdErrContent() job.kill(True)
files_to_upload = read_files(files_file) if not si: LoginManager.initEnvironment() si = LoginManager.login('bestgrid', True) filemanager = GrisuRegistryManager.getDefault(si).getFileManager() job = JobObject(si) job.setSubmissionLocation('pan:pan.nesi.org.nz') job.setTimestampJobname(jobname_template) job.setCommandline(commandline) # add input files for file in files_to_upload: job.addInputFileUrl(file) jobname = job.createJob('/nz/nesi') print 'Submitting job...' job.submitJob() print 'Jobname: '+jobname print 'Waiting for job to finish...' job.waitForJobToFinish(jobstate_check_intervall) job_directory = job.getJobDirectoryUrl() print 'Job finished, jobdirectory: '+job_directory print 'Downloading results' target = filemanager.downloadUrl(job_directory, File(target_dir), False)
from grisu.frontend.control.login import LoginManager from grisu.frontend.model.job import JobObject si = LoginManager.loginCommandline("BeSTGRID") print 'Logged in.' job = JobObject(si); job.setUniqueJobname("cat_job", si) job.setCommandline("cat text0.txt") job.addInputFileUrl('/home/markus/tmp/text0.txt'); job.createJob("/nz/nesi") #job.setSubmissionLocation('[email protected]:ng2.auckland.ac.nz') job.submitJob() print 'Job submitted.' job.waitForJobToFinish(10) print 'Job finished. Status: '+job.getStatusString(False) print "Stdout: " + job.getStdOutContent() print "Stderr: " + job.getStdErrContent() job.kill(True)
files_to_upload = read_files(files_file) if not si: LoginManager.initEnvironment() si = LoginManager.login('bestgrid', True) filemanager = GrisuRegistryManager.getDefault(si).getFileManager() job = JobObject(si) job.setSubmissionLocation('pan:pan.nesi.org.nz') job.setTimestampJobname(jobname_template) job.setCommandline(commandline) # add input files for file in files_to_upload: job.addInputFileUrl(file) jobname = job.createJob('/nz/nesi') print 'Submitting job...' job.submitJob() print 'Jobname: ' + jobname print 'Waiting for job to finish...' job.waitForJobToFinish(jobstate_check_intervall) job_directory = job.getJobDirectoryUrl() print 'Job finished, jobdirectory: ' + job_directory print 'Downloading results' target = filemanager.downloadUrl(job_directory, File(target_dir), False)
print "INFO: Adding common files to Batch Job Object " + batch_job_name batch_jobs.addInputFile(os.path.join(current_dir, dictionary_path)) batch_jobs.addInputFile(os.path.join(current_dir, "countbacon.py")) print "INFO: Defining jobs from input directory" job_count = 0 for file_name in os.listdir(input_path): print "INFO: Defining job for " + file_name job_name = base_job_name + "-" + file_name job = JobObject(service_interface) job.setJobname(job_name) job.setApplication("python") # Set the application being run job.setApplicationVersion( "2.4") # Set the application version, note this is an exact match job.addInputFileUrl(os.path.join(current_dir, input_path, file_name)) job.setCommandline("python ../countbacon.py ../" + dictionary_path + " " + file_name) print "INFO: " + job.getJobname() + " defined" batch_jobs.addJob(job) print "INFO: " + job.getJobname( ) + " added to batch " + batch_jobs.getJobname() job_count += 1 print "INFO: " + str(job_count) + " jobs defined" print "INFO: Sending batch " + batch_jobs.getJobname( ) + " to " + backend + " and staging files..." try: batch_jobs.prepareAndCreateJobs(False) except (JobsException), error: print("HALT: Exception submitting jobs from BatchJobObject " +