Example #1
0
    def __init__(self, si, subLoc=defaultSubLoc):
        super(simpleStageJob, self).__init__(si, subLoc)

        self.job = JobObject(self.si)
        self.job.setCommandline("echo \"hello world\"")
        self.job.setApplication(Constants.GENERIC_APPLICATION_NAME)
        self.job.setSubmissionLocation(self.subLoc)
Example #2
0
    def prepare(self):

        job = JobObject(self.si)
        job.setUniqueJobname("echo_job1")
        job.setCommandline("echo \"Hello World\"")
        job.addInputFileUrl("/home/markus/tmp/text0.txt")

        job.createJob()
        self.job = job
Example #3
0
    def prepare(self):

        job = JobObject(self.si)
        job.setUniqueJobname("echo_job1")
        job.setCommandline('echo "Hello World"')
        job.addInputFileUrl("/home/markus/tmp/text0.txt")

        job.createJob()
        self.job = job
Example #4
0
    def prepare(self):
        job = JobObject(self.si)
        job.setUniqueJobname("echo_job1")
        job.setCommandline("echo \"Hello World\"")

        job.createJob()
        self.job = job
Example #5
0
    def __init__(self, si, subLoc=defaultSubLoc):
        super(simpleStageJob, self).__init__(si, subLoc)

        self.job = JobObject(self.si)
        self.job.setCommandline('echo "hello world"')
        self.job.setApplication(Constants.GENERIC_APPLICATION_NAME)
        self.job.setSubmissionLocation(self.subLoc)
Example #6
0
    def prepare(self):
        job = JobObject(self.si)
        job.setUniqueJobname("echo_job1")
        job.setCommandline('echo "Hello World"')

        job.createJob()
        self.job = job
Example #7
0
numberOfJobs = 10

# the (unique) name of the batchjob
batch_job_name = "test_batch"

# create the batchjob
batch_job = BatchJobObject(si, batch_job_name, "/nz/nesi", "UnixCommands",
                           Constants.NO_VERSION_INDICATOR_STRING)

# now we can calculate the relative path (from every job directory) to the common input file folder
pathToInputFiles = batch_job.pathToInputFiles()

for i in range(0, numberOfJobs):

    # create the single job
    job = JobObject(si)
    # better to set the application to use explicitely because in that case we don't need to use mds (faster)
    job.setCommandline('cat ' + pathToInputFiles + 'commonJobFile.txt ' +
                       'singleJobFile.txt')
    # adding a job-specific input file
    job.addInputFileUrl("/home/markus/tmp/singleJobFile.txt")
    # adding the job to the multijob
    batch_job.addJob(job)

# now we are adding a file that can be used by all of the child jobs. it needs to be referenced via the pathToInputFiles() method shown above
batch_job.addInputFile('/home/markus/tmp/commonJobFile.txt')
batch_job.setDefaultNoCpus(1)
batch_job.setDefaultWalltimeInSeconds(60)

batch_job.setLocationsToExclude(["gt5test:ng1.canterbury.ac.nz"])
Example #8
0
batch_jobs.setDefaultWalltimeInSeconds(300)
# Set the maximum walltime to 5 minutes
batch_jobs.setLocationsToExclude(["AUT"
                                  ])  # Create a blacklist of sites to exclude
# Currently the AUT location is not behaving, so always exclude it

print "INFO: Adding common files to Batch Job Object " + batch_job_name
batch_jobs.addInputFile(os.path.join(current_dir, dictionary_path))
batch_jobs.addInputFile(os.path.join(current_dir, "countbacon.py"))

print "INFO: Defining jobs from input directory"
job_count = 0
for file_name in os.listdir(input_path):
    print "INFO: Defining job for " + file_name
    job_name = base_job_name + "-" + file_name
    job = JobObject(service_interface)
    job.setJobname(job_name)
    job.setApplication("python")  # Set the application being run
    job.setApplicationVersion(
        "2.4")  # Set the application version, note this is an exact match
    job.addInputFileUrl(os.path.join(current_dir, input_path, file_name))
    job.setCommandline("python ../countbacon.py ../" + dictionary_path + " " +
                       file_name)
    print "INFO: " + job.getJobname() + " defined"
    batch_jobs.addJob(job)
    print "INFO: " + job.getJobname(
    ) + " added to batch " + batch_jobs.getJobname()
    job_count += 1
print "INFO: " + str(job_count) + " jobs defined"

print "INFO: Sending batch " + batch_jobs.getJobname(
# Set the maximum walltime to 5 minutes
exclude_sites = list()
exclude_sites.append("AUT")
batch_jobs.setLocationsToExclude(exclude_sites)  # Create a blacklist of sites to exclude
# Currently the AUT location is not behaving, so always exclude it

print "INFO: Adding common files to Batch Job Object " + batch_job_name
batch_jobs.addInputFile(os.path.join(current_dir, dictionary_path))
batch_jobs.addInputFile(os.path.join(current_dir, "countbacon.py"))

print "INFO: Defining jobs from input directory"
job_count = 0
for file_name in os.listdir(input_path):
    print "INFO: Defining job for " + file_name
    job_name = base_job_name + "-" + file_name
    job = JobObject(service_interface)
    job.setJobname(job_name)
    job.setApplication("python")  # Set the application being run
    job.setApplicationVersion("2.4")  # Set the application version, note this is an exact match
    job.addInputFileUrl(os.path.join(current_dir, input_path, file_name))
    job.setCommandline("python ../countbacon.py ../" + dictionary_path + " " + file_name)
    print "INFO: " + job.getJobname() + " defined"
    batch_jobs.addJob(job)
    print "INFO: " + job.getJobname() + " added to batch " + batch_jobs.getJobname()
    job_count += 1
print "INFO: " + str(job_count) + " jobs defined"

print "INFO: Sending batch " + batch_jobs.getJobname() + " to " + backend + " and staging files..."
try:
    batch_jobs.prepareAndCreateJobs(False)
except (JobsException), error:
Example #10
0
    def prepare(self):
        job = JobObject(self.si)
        job.setUniqueJobname("echo_job1")
        job.setCommandline("echo \"Hello World\"")
        job.addInputFileUrl("/home/markus/tmp/text0.txt")
        job.addInputFileUrl("/home/markus/tmp/text1.txt")
        job.addInputFileUrl("/home/markus/tmp/text2.txt")
        job.addInputFileUrl("/home/markus/tmp/text3.txt")
        job.addInputFileUrl("/home/markus/tmp/text4.txt")

        job.setApplication(Constants.GENERIC_APPLICATION_NAME)
        job.setSubmissionLocation(self.subLoc)

        job.createJob()
        self.job = job
Example #11
0
from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import JobObject
from grisu.model import FileManager
import sys

si = LoginManager.loginCommandline()

print 'Parsing commandline arguments...'
file1url = sys.argv[1]
file1Name = FileManager.getFilename(file1url)
file2url = sys.argv[2]
file2Name = FileManager.getFilename(file2url)

print 'Creating job...'
# create the job object
job = JobObject(si)
# set a unique jobname
job.setTimestampJobname("diff_job")
print 'Set jobname to: ' + job.getJobname()
# setting the application. this means that grisu can figure out the submission location and
# you don't have to do that manually
job.setApplication("UnixCommands")

# set the commandline that needs to be executed
job.setCommandline('diff ' + file1Name + ' ' + file2Name)

job.addInputFileUrl(file1url)
job.addInputFileUrl(file2url)

# create the job on the backend and specify the VO to use
job.createJob("/ARCS/StartUp")
Example #12
0
from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import JobObject
from grisu.model import GrisuRegistryManager

si = LoginManager.loginCommandline("LOCAL")

appInfo = GrisuRegistryManager.getDefault(si).getApplicationInformation(
    "ImageJ")

print 'app: ' + appInfo.getApplicationName()

for subloc in appInfo.getAvailableAllSubmissionLocations():
    print subloc

job = JobObject(si)
job.setTimestampJobname("imageJ")
job.setApplication("java")
job.setApplication("ImageJ")
job.setCommandline("echo Hello")

job.setSubmissionLocation("normal:ng2.ivec.org")

job.createJob("/ARCS/StartUp")
job.submitJob()

job.waitForJobToFinish(3)

print "Stdout: " + job.getStdOutContent()
print "Stderr: " + job.getStdErrContent()
from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import JobObject
import sys

# create a service interface to the BeSTGRID backend
service_interface = LoginManager.loginCommandline("BeSTGRID")

print 'Creating job object...'

job = JobObject(service_interface);

job.setJobname("echo_job-1") # job name must be unique
print 'Set jobname to: '+ job.getJobname()
# set the name of the application as it is published in MDS.
# "generic" means not to use MDS for the lookup.
job.setApplication("generic")
# "generic" jobs require a submission location to be specified
job.setSubmissionLocation("all.q:ng2.scenzgrid.org#SGE")


# set the command that needs to be executed
job.setCommandline("echo \"Hello World\"")

# create the job on the backend and specify the VO to use
job.createJob("/ARCS/BeSTGRID")
print 'Submitting job...'
# submit the job
job.submitJob()

print 'Waiting for the job to finish...'
# this waits until the job is finished. Checks every 10 seconds (which would be too often for a real job)
Example #14
0
   

# to see whats going on we add a simple event listener. Hm. This doesn't seem to work reliably in jython. 
#SystemOutMultiJobLogger(multiJobName)

# create the multipart job 
batch_job = BatchJobObject(si, batch_job_name, "/nz/nesi", "cat", Constants.NO_VERSION_INDICATOR_STRING);

# now we can calculate the relative path (from every job directory) to the common input file folder
pathToInputFiles = batchJob.pathToInputFiles()

for i in range(0, numberOfJobs):

    # create the single job
    job = JobObject(si)

    # better to set the application to use explicitely because in that case we don't need to use mds (faster)
    job.setCommandline('cat ' + pathToInputFiles+'commonFile.txt ' + 'singleJobFile.txt')
    # adding a job-specific input file
    job.addInputFileUrl("/home/markus/tmp/singleJobFile.txt")
    # adding the job to the multijob
    batch_job.addJob(job)
    
# now we are adding a file that can be used by all of the child jobs. it needs to be referenced via the pathToInputFiles() method shown above
batch_job.addInputFile('/home/markus/tmp/commonJobFile.txt')
batch_job.setDefaultNoCpus(1);
batch_job.setDefaultWalltimeInSeconds(60);   
    
    
try:
working_directory = sys.argv[7]
input_files     = list()

job_header="""#!/bin/sh
%s
"""
if group == '':
    group = DEFAULT_GROUP
if queue == '':
    queue = DEFAULT_QUEUE

for f in sys.argv[8:]:
    input_files.append(f)

try:
    job = JobObject(si) 
    job.setSubmissionLocation(queue)
    job.setTimestampJobname("galaxy_" + galaxy_job_id)

    job.setMemory(DEFAULT_MEMORY)
    job.setWalltimeInSeconds(DEFAULT_WALLTIME)

    # stop annoying stats from being written to stderr
    job.addEnvironmentVariable("SUPPRESS_STATS", "true")

#create the job script#

except:
    print "Cannot setup the job environment"
    sys.exit(-4)
Example #16
0
# Setting the number of jobs to be submitted
job_count = 5

# Since there may be many jobs submitted in the workshop, lets make them a bit unique to avoid issues.
base_job_name = str(random.randint(10000, 99999)) + '-hello-'
print "INFO: Base job name is " + base_job_name

# There are three stages, creating the jobs, submitting the jobs, then after they have finished, retrieving the job outputs

# Creating a list of jobs
jobs = []
print "INFO: Defining " + str(job_count) + " helloworld jobs"
for i in range(1, job_count + 1):
    print "INFO: Defining job " + str(i) + " of " + str(job_count)
    #The next lines define the actual job's parameters
    job = JobObject(service_interface)  # Create a job
    job.setJobname(base_job_name + str(i))  # Give it a (hopefully) unique name
    job.setApplication("python")  # Set the application being run
    job.setApplicationVersion(
        "2.4")  # Set the application version, note this is an exact match
    # job.setSubmissionLocation("all.q:ng2.scenzgrid.org#SGE")        # Set the location the job will be submitted to
    job.addInputFileUrl(os.path.join(current_dir,
                                     "helloworld.py"))  # Add a file
    job.setCommandline("python helloworld.py")  # Set the command to be run
    print "INFO: job " + job.getJobname() + " defined"
    jobs.append(job)

# Submit the jobs to be run
# Note the exception catching to give more information about a job failing
for job in jobs:
    time_start = time.time()
Example #17
0
'''
Created on 17/11/2009

@author: markus
'''

from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import JobObject
import sys

HttpProxyManager.useSystemHttpProxy()

si = LoginManager.loginCommandline("bestgrid")

# create the job object
job = JobObject(si)
# set a unique jobname
job.setUniqueJobname("echo_job1")

# set the commandline that needs to be executed
job.setCommandline("echo \"Hello World\"")

# create the job on the backend and specify the VO to use
job.createJob("/nz/nesi")
# submit the job
job.submitJob()

# this waits until the job is finished. Checks every 10 seconds (which would be too often for a real job)
finished = job.waitForJobToFinish(10)

if not finished:
Example #18
0
from grisu.Grython import serviceInterface as si
from grisu.frontend.model.job import JobObject
from grisu.model import FileManager
import sys


print 'Parsing commandline arguments...'
file1url = sys.argv[1]
file1Name = FileManager.getFilename(file1url)
file2url = sys.argv[2]
file2Name = FileManager.getFilename(file2url);


print 'Creating job...'
# create the job object
job = JobObject(si);
# set a unique jobname
job.setTimestampJobname("diff_job")
print 'Set jobname to: '+ job.getJobname()

# set the commandline that needs to be executed
job.setCommandline('diff ' + file1Name+ ' ' + file2Name)

job.addInputFileUrl(file1url);
job.addInputFileUrl(file2url);

# create the job on the backend
job.createJob()
print 'Submitting job...'
# submit the job
job.submitJob()
Example #19
0
    def submit(self):
        
        if self.jobs:
            print 'There is already a run with the name "'+self.jobname+'". Exiting...'
            sys.exit(1)
            
        # uploading input file once, so we don't need to do it for every job again and again
        fm.cp(self.filepath, 'gsiftp://pan.nesi.org.nz/~/inputfiles/'+self.jobname, True)
        
        for i in range(0,self.runs):
            
            # create the job object
            job = JobObject(si);
            # set a unique jobname
            number = str(i+1).zfill(4)
            job.setUniqueJobname(self.jobname+"_run_"+number)
            # set the commandline that needs to be executed
            job.setCommandline(megabeast_path+' '+remote_home_dir+'/inputfiles/'+self.jobname+'/'+self.filename)
            job.setSubmissionLocation('pan:pan.nesi.org.nz')
            
            job.setCpus(self.cpus)

            job.setWalltime(self.walltime)
            
            job.setMemory(self.memory)
            
            job.setApplication('UltraBEAST')
            job.setApplicationVersion('0.1')
            
            #job.addInputFileUrl(self.filepath)
            
            # create the job on the backend and specify the VO to use
            temp_jobname = job.createJob("/nz/nesi")
            print "created job: '"+temp_jobname+"', submitting..."
            # submit the job
            job.submitJob()
            print "submission finished: " + temp_jobname
Example #20
0
'''
Created on 17/11/2009

@author: markus
'''

from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import JobObject
import sys

si = LoginManager.loginCommandline()

print 'Creating job...'
# create the job object
job = JobObject(si)
# set a unique jobname
job.setUniqueJobname("echo_job1")
print 'Set jobname to: ' + job.getJobname()
# set the name of the application like it is published in mds. "generic" means not to use mds for the lookup.
job.setApplication("generic")
# since we are using a "generic" job, we need to specify a submission location. I'll make that easier later on...
job.setSubmissionLocation("dque@edda-m:ng2.vpac.org")

# set the commandline that needs to be executed
job.setCommandline("echo \"Hello World\"")

job.addInputFileUrl('/home/markus/test/singleJobFile_0.txt')

# create the job on the backend and specify the VO to use
job.createJob("/ARCS/NGAdmin")
print 'Submitting job...'
Example #21
0
batch_job_name = "test_batch"

# to see whats going on we add a simple event listener. Hm. This doesn't seem to work reliably in jython.
#SystemOutMultiJobLogger(multiJobName)

# create the multipart job
batch_job = BatchJobObject(si, batch_job_name, "/nz/nesi", "cat",
                           Constants.NO_VERSION_INDICATOR_STRING)

# now we can calculate the relative path (from every job directory) to the common input file folder
pathToInputFiles = batchJob.pathToInputFiles()

for i in range(0, numberOfJobs):

    # create the single job
    job = JobObject(si)

    # better to set the application to use explicitely because in that case we don't need to use mds (faster)
    job.setCommandline('cat ' + pathToInputFiles + 'commonFile.txt ' +
                       'singleJobFile.txt')
    # adding a job-specific input file
    job.addInputFileUrl("/home/markus/tmp/singleJobFile.txt")
    # adding the job to the multijob
    batch_job.addJob(job)

# now we are adding a file that can be used by all of the child jobs. it needs to be referenced via the pathToInputFiles() method shown above
batch_job.addInputFile('/home/markus/tmp/commonJobFile.txt')
batch_job.setDefaultNoCpus(1)
batch_job.setDefaultWalltimeInSeconds(60)

try:
Example #22
0
import random
import sys

# si stands for serviceInterface and holds all session information
si = LoginManager.loginCommandline('bestgrid')

amount_of_jobs_total = 10
amount_of_jobs_concurrent = 4

submitted_jobs = []

# better make that unique for each run, so we can resume workflows easier if necessary (this introduces quite a bit more complexity though)
jobname_base = 'staging_test'

for total in range(1, amount_of_jobs_total+1):
    job = JobObject(si)
    job.setJobname(jobname_base+'_'+str(total))
    # always good to set the application if you know it, processing the job will be faster
    job.setApplication('UnixCommands')
    # also good to set queue if you know where you want to submit your job, not necessary, but processing of the job will be faster
    job.setSubmissionLocation('default:gram5.ceres.auckland.ac.nz')
    # create a random sized outfile
    mbsize = 1024*1024
    random_size = random.randrange(10, 100)
    size_string = str(mbsize*random_size)
    job.setCommandline('dd if=/dev/zero of=outfile bs='+size_string+' count=1')
    
    job.createJob('/nz/nesi')
    job.submitJob()
    print 'created and submitted job: '+ job.getJobname()+' (size: '+str(random_size)+'mb)'
    
# si stands for serviceInterface and holds all session information
si = LoginManager.loginCommandline('bestgrid-test')

amount_of_jobs_total = 10
amount_of_jobs_concurrent = 4

created_jobs = []
submitted_jobs = []
finished_jobs = []

# better make that unique for each run, so we can resume workflows easier if necessary (this introduces quite a bit more complexity though)
jobname_base = 'workflow_test'

for total in range(1, amount_of_jobs_total + 1):
    job = JobObject(si)
    job.setJobname(jobname_base + '_' + str(total))
    # always good to set the application if you know it, processing the job will be faster
    job.setApplication('UnixCommands')
    # also good to set queue if you know where you want to submit your job, not necessary, but processing of the job will be faster
    job.setSubmissionLocation('default:gram5.ceres.auckland.ac.nz')
    # job sleeps for a random time
    random_sleep = random.randrange(5, 75)
    job.setCommandline('sleep ' + str(random_sleep))

    job.createJob('/nz/nesi')
    print 'created job: ' + job.getJobname() + ' (sleeptime: ' + str(
        random_sleep) + ')'

    created_jobs.append(job)
Example #24
0
from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import JobObject
from grisu.model import GrisuRegistryManager

si = LoginManager.loginCommandline("LOCAL")

appInfo = GrisuRegistryManager.getDefault(si).getApplicationInformation("ImageJ")

print 'app: '+appInfo.getApplicationName()

for subloc in appInfo.getAvailableAllSubmissionLocations():
    print subloc

job = JobObject(si);
job.setTimestampJobname("imageJ");
job.setApplication("java");
job.setApplication("ImageJ");
job.setCommandline("echo Hello");

job.setSubmissionLocation("normal:ng2.ivec.org");

job.createJob("/ARCS/StartUp");
job.submitJob();

job.waitForJobToFinish(3);

print "Stdout: "+job.getStdOutContent()
print "Stderr: "+job.getStdErrContent()
Example #25
0
from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import JobObject

si = LoginManager.loginCommandline("BeSTGRID-DEV")

print "Logged in."

job = JobObject(si)
job.setUniqueJobname("cat_job", si)
job.setCommandline("cat text0.txt")
job.addInputFileUrl("/home/markus/tmp/text0.txt")


job.createJob("/nz/nesi")
# job.setSubmissionLocation('[email protected]:ng2.auckland.ac.nz')
job.submitJob()

print "Job submitted."

job.waitForJobToFinish(10)

print "Job finished. Status: " + job.getStatusString(False)
print "Stdout: " + job.getStdOutContent()
print "Stderr: " + job.getStdErrContent()

job.kill(True)
Example #26
0
    def prepare(self):
        job = JobObject(self.si)
        job.setUniqueJobname("echo_job1")
        job.setCommandline('echo "Hello World"')
        job.addInputFileUrl("/home/markus/tmp/text0.txt")
        job.addInputFileUrl("/home/markus/tmp/text1.txt")
        job.addInputFileUrl("/home/markus/tmp/text2.txt")
        job.addInputFileUrl("/home/markus/tmp/text3.txt")
        job.addInputFileUrl("/home/markus/tmp/text4.txt")

        job.setApplication(Constants.GENERIC_APPLICATION_NAME)
        job.setSubmissionLocation(self.subLoc)

        job.createJob()
        self.job = job
Example #27
0
si = LoginManager.loginCommandline('bestgrid')

uem = GrisuRegistryManager.getDefault(si).getUserEnvironmentManager()
fm = GrisuRegistryManager.getDefault(si).getFileManager()

allJobs = uem.getCurrentJobs(False)

myJobs = []
finishedJobs = []

# getting all the jobs for the run we are interested in
for job in allJobs: 
    
    name = job.jobname()
    if name.startswith('staging_test'):
        tempJob = JobObject(si, name)
        myJobs.append(tempJob)
    
fileUrls = []
    
# we could just wait for all the jobs, but looping makes more sense
# since we can stage out finished jobs while some other jobs of the batch 
# are still running
while len(myJobs) > 0:
    
    for job in myJobs:
        
        if job.isFinished():
            if job.isFailed(False):
                print 'Job '+job.getJobname()+' failed. Not doing anything...'
                myJobs.remove(job)
Example #28
0
inputFile1relPath = pathToInputFiles+'inputFile1.txt ' 

inputFile2Url = 'gsiftp://ng2.vpac.org/home/grid-vpac/DC_au_DC_org_DC_arcs_DC_slcs_O_VPAC_CN_Markus_Binsteiner_qTrDzHY7L1aKo3WSy8623-7bjgM/inputFile2.txt'
inputFile2relPath = pathToInputFiles+'inputFile2.txt' 

inputFile3Url = '/home/markus/test/errorFile.txt'
inputFile3relPath = pathToInputFiles + 'errorFile.txt'

for i in range(0, numberOfJobs):
    # create a unique jobname for every job
    jobname = batchJobName+"_"+ str(i)
    
    print 'Creating job: '+jobname
    
    # create the single job
    job = JobObject(si)
    job.setJobname(jobname)
    # better to set the application to use explicitely because in that case we don't need to use mds (faster)
    job.setApplication('UnixCommands')
    if i == 3 or i == 13:
        # this is just to demonstrate how to restart a failed job later on
        job.setCommandline('cat '+inputFile3relPath)
    else:
        job.setCommandline('cat '+ inputFile1relPath + ' ' + inputFile2relPath)

    job.setWalltimeInSeconds(60)
    # adding the job to the multijob
    batchJob.addJob(job)

# this should be set because it's used for the matchmaking/metascheduling
batchJob.setDefaultNoCpus(1);
# get the files that i should have
outfile         = sys.argv[1]
errfile         = sys.argv[2]
error_codefile  = sys.argv[3]
job_name        = sys.argv[4]
working_directory = sys.argv[5]
output_files    = list()

# get list of output files for this job
for f in sys.argv[6:]:
    output_files.append(f)


print output_files
job = JobObject(si, job_name)

# Save stdout and stderr to files to be read by galaxy
try:
    out = open(outfile, "w")
    out.write(job.getStdOutContent())
    out.close()
except:
    print "Cannot open files to write results to"
    sys.exit(-2)
try:
    err = open(errfile, "w")
    err.write(job.getStdErrContent())
except:
# There is no stderr so just write blank file
    print "No stderr So just writing blakn file"
# Setting the number of jobs to be submitted
job_count = 5

# Since there may be many jobs submitted in the workshop, lets make them a bit unique to avoid issues.
base_job_name = str(random.randint(10000, 99999)) + '-hello-'
print "INFO: Base job name is " + base_job_name

# There are three stages, creating the jobs, submitting the jobs, then after they have finished, retrieving the job outputs

# Creating a list of jobs
jobs = []
print "INFO: Defining " + str(job_count) + " helloworld jobs"
for i in range(1, job_count + 1):
    print "INFO: Defining job " + str(i) + " of " + str(job_count)
    #The next lines define the actual job's parameters
    job = JobObject(service_interface)                                # Create a job
    job.setJobname(base_job_name + str(i))                            # Give it a (hopefully) unique name
    job.setApplication("python")                                    # Set the application being run
    job.setApplicationVersion("2.4")                                # Set the application version, note this is an exact match
    # job.setSubmissionLocation("all.q:ng2.scenzgrid.org#SGE")        # Set the location the job will be submitted to 
    job.addInputFileUrl(os.path.join(current_dir, "helloworld.py"))  # Add a file
    job.setCommandline("python helloworld.py")                      # Set the command to be run
    print"INFO: job " + job.getJobname() + " defined"
    jobs.append(job)

# Submit the jobs to be run
# Note the exception catching to give more information about a job failing
for job in jobs:
    time_start = time.time()
    try:
        print "INFO: Creating job " + job.getJobname() + " on " + backend + " backend, with " + group + " group"
Example #31
0
class simpleStageJob(action):
    def __init__(self, si, subLoc=defaultSubLoc):
        super(simpleStageJob, self).__init__(si, subLoc)

        self.job = JobObject(self.si)
        self.job.setCommandline("echo \"hello world\"")
        self.job.setApplication(Constants.GENERIC_APPLICATION_NAME)
        self.job.setSubmissionLocation(self.subLoc)

    def prepare(self):

        self.job.setUniqueJobname(self.name())
        self.job.createJob()

    def execute(self):
        self.job.submitJob()
Example #32
0
from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import JobObject
import sys


si = LoginManager.loginCommandline("Local")
folder = "/home/markus/test/batch/"
original = folder + "original.txt"

jobnames = []

for i in range(10):

    file = folder + "test" + str(i) + ".txt"

    job = JobObject(si)
    job.setUniqueJobname("diff_" + str(i))
    job.setApplication("UnixCommands")
    job.setCommandline("diff original.txt test" + str(i) + ".txt")
    job.createJob("/ARCS/BeSTGRID")
    job.addInputFileUrl(file)
    job.addInputFileUrl(original)
    job.submitJob()

    jobnames.append(job.getJobname())


for jobname in jobnames:

    finished = job.waitForJobToFinish(10)
# si stands for serviceInterface and holds all session information
si = LoginManager.loginCommandline('bestgrid-test')

amount_of_jobs_total = 10
amount_of_jobs_concurrent = 4

created_jobs = []
submitted_jobs = []
finished_jobs = []

# better make that unique for each run, so we can resume workflows easier if necessary (this introduces quite a bit more complexity though)
jobname_base = 'workflow_test'

for total in range(1, amount_of_jobs_total+1):
    job = JobObject(si)
    job.setJobname(jobname_base+'_'+str(total))
    # always good to set the application if you know it, processing the job will be faster
    job.setApplication('UnixCommands')
    # also good to set queue if you know where you want to submit your job, not necessary, but processing of the job will be faster
    job.setSubmissionLocation('default:gram5.ceres.auckland.ac.nz')
    # job sleeps for a random time
    random_sleep = random.randrange(5, 75)
    job.setCommandline('sleep '+str(random_sleep))
    
    job.createJob('/nz/nesi')
    print 'created job: '+ job.getJobname()+' (sleeptime: '+str(random_sleep)+')'
    
    created_jobs.append(job)
    
finished = False
Example #34
0
    def prepare(self):

        job = JobObject(self.si);
        job.setUniqueJobname("echo_job1")
        job.setCommandline("echo \"Hello World\"")
        job.addInputFileUrl("gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text0.txt");
        job.addInputFileUrl("gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text1.txt");
        job.addInputFileUrl("gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text2.txt");
        job.addInputFileUrl("gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text3.txt");
        job.addInputFileUrl("gsiftp://ng2.canterbury.ac.nz/home/gridcloud061/tmp/text4.txt");
        job.setApplication(Constants.GENERIC_APPLICATION_NAME)
        job.setSubmissionLocation(self.subLoc)

        job.createJob()
        
        self.job = job        
Example #35
0
from grisu.frontend.model.job import JobObject
from grisu.model import FileManager
import sys

si = LoginManager.loginCommandline()

print "Parsing commandline arguments..."
file1url = sys.argv[1]
file1Name = FileManager.getFilename(file1url)
file2url = sys.argv[2]
file2Name = FileManager.getFilename(file2url)


print "Creating job..."
# create the job object
job = JobObject(si)
# set a unique jobname
job.setTimestampJobname("diff_job")
print "Set jobname to: " + job.getJobname()
# setting the application. this means that grisu can figure out the submission location and
# you don't have to do that manually
job.setApplication("UnixCommands")

# set the commandline that needs to be executed
job.setCommandline("diff " + file1Name + " " + file2Name)

job.addInputFileUrl(file1url)
job.addInputFileUrl(file2url)

# create the job on the backend and specify the VO to use
job.createJob("/ARCS/StartUp")
Example #36
0
from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import JobObject
import sys


si = LoginManager.loginCommandline("Local")
folder = '/home/markus/test/batch/'
original = folder + 'original.txt'

jobnames = []

for i in range(10):
    
    file = folder+'test'+str(i)+'.txt'
    
    job = JobObject(si);
    job.setUniqueJobname('diff_'+str(i))
    job.setApplication('UnixCommands')
    job.setCommandline('diff original.txt test'+str(i)+'.txt')
    job.createJob("/ARCS/BeSTGRID")
    job.addInputFileUrl(file)
    job.addInputFileUrl(original)
    job.submitJob()
    
    jobnames.append(job.getJobname())


for jobname in jobnames:
    
    finished = job.waitForJobToFinish(10)
    
Example #37
0
class simpleStageJob(action):
    def __init__(self, si, subLoc=defaultSubLoc):
        super(simpleStageJob, self).__init__(si, subLoc)

        self.job = JobObject(self.si)
        self.job.setCommandline('echo "hello world"')
        self.job.setApplication(Constants.GENERIC_APPLICATION_NAME)
        self.job.setSubmissionLocation(self.subLoc)

    def prepare(self):

        self.job.setUniqueJobname(self.name())
        self.job.createJob()

    def execute(self):
        self.job.submitJob()
Example #38
0
'''
@author: markus
'''

from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import JobObject
import sys


si = LoginManager.loginCommandline("Local")
folder = '/home/markus/test/batchWrap/'
original = folder + 'original.txt'
script = folder + 'wrap.sh'

job = JobObject(si);
job.setUniqueJobname('diff_wrap')
job.setApplication('UnixCommands')
job.setCommandline('sh wrap.sh 0 9')
job.addInputFileUrl(original)
job.addInputFileUrl(script)
    
for i in range(10):
    
    file = folder+'test'+str(i)+'.txt'
    job.addInputFileUrl(file)
    
job.createJob("/ARCS/BeSTGRID")
job.submitJob()

finished = job.waitForJobToFinish(10)
    
Example #39
0
Created on 17/11/2009

@author: markus
"""

from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import JobObject
import sys

HttpProxyManager.useSystemHttpProxy()

si = LoginManager.loginCommandline("bestgrid")


# create the job object
job = JobObject(si)
# set a unique jobname
job.setUniqueJobname("echo_job1")

# set the commandline that needs to be executed
job.setCommandline('echo "Hello World"')

# create the job on the backend and specify the VO to use
job.createJob("/nz/nesi")
# submit the job
job.submitJob()

# this waits until the job is finished. Checks every 10 seconds (which would be too often for a real job)
finished = job.waitForJobToFinish(10)

if not finished:
Example #40
0
print 'starting job creation...'

group = '/nz/nesi'

#sub_loc = '[email protected]:ng2.auckland.ac.nz'

batch_job = BatchJobObject(si, basename, group, 'R',
                           Constants.NO_VERSION_INDICATOR_STRING)

batch_job_name = batch_job.getJobname()
print 'jobname on backend: ' + batch_job_name

path_to_inputfile = batch_job.pathToInputFiles() + inputfilename

for i in range(1, gen_jobs + 1):
    job = JobObject(si)
    job.setEmail_address(email)
    job.setEmail_on_job_finish(True)

    job.setCommandline('R --no-readline --no-restore --no-save -f ' +
                       path_to_inputfile)

    batch_job.addJob(job)

batch_job.addInputFile('/home/markus/Desktop/R/' + inputfilename)
batch_job.setDefaultNoCpus(1)
batch_job.setDefaultWalltimeInSeconds(walltime)

print 'preparing jobs on backend...'

batch_job.prepareAndCreateJobs(redistribute)
Example #41
0
inputFile1relPath = pathToInputFiles + 'inputFile1.txt '

inputFile2Url = 'gsiftp://ng2.vpac.org/home/grid-vpac/DC_au_DC_org_DC_arcs_DC_slcs_O_VPAC_CN_Markus_Binsteiner_qTrDzHY7L1aKo3WSy8623-7bjgM/inputFile2.txt'
inputFile2relPath = pathToInputFiles + 'inputFile2.txt'

inputFile3Url = '/home/markus/test/errorFile.txt'
inputFile3relPath = pathToInputFiles + 'errorFile.txt'

for i in range(0, numberOfJobs):
    # create a unique jobname for every job
    jobname = batchJobName + "_" + str(i)

    print 'Creating job: ' + jobname

    # create the single job
    job = JobObject(si)
    job.setJobname(jobname)
    # better to set the application to use explicitely because in that case we don't need to use mds (faster)
    job.setApplication('UnixCommands')
    if i == 3 or i == 13:
        # this is just to demonstrate how to restart a failed job later on
        job.setCommandline('cat ' + inputFile3relPath)
    else:
        job.setCommandline('cat ' + inputFile1relPath + ' ' +
                           inputFile2relPath)

    job.setWalltimeInSeconds(60)
    # adding the job to the multijob
    batchJob.addJob(job)

# this should be set because it's used for the matchmaking/metascheduling
Example #42
0
start = 30
end = 40

pathToInputFiles = batchJob.pathToInputFiles()

inputFile1relPath = pathToInputFiles + 'inputFile1.txt '
inputFile2relPath = pathToInputFiles + 'inputFile2.txt'

for i in range(start, end):
    # create a unique jobname for every job
    jobname = batchJobName + "_" + str(i)

    print 'Creating job: ' + jobname

    # create the single job
    job = JobObject(si)
    job.setJobname(jobname)
    # better to set the application to use explicitely because in that case we don't need to use mds (faster)
    job.setApplication('UnixCommands')
    job.setCommandline('cat ' + inputFile1relPath + ' ' + inputFile2relPath)

    job.setWalltimeInSeconds(60)
    # adding the job to the multijob
    batchJob.addJob(job)

# only start the newly added jobs and wait for the restart to finish
batchJob.restart(False, False, True, True)

# don't forget to exit properly. this cleans up possible existing threads/executors
sys.exit()
Example #43
0
'''
Created on 17/11/2009

@author: markus
'''

from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import JobObject
import sys

si = LoginManager.loginCommandline()
    
print 'Creating job...'
# create the job object
job = JobObject(si);
# set a unique jobname
job.setUniqueJobname("echo_job1")
print 'Set jobname to: '+ job.getJobname()
# set the name of the application like it is published in mds. "generic" means not to use mds for the lookup.
job.setApplication("generic")
# since we are using a "generic" job, we need to specify a submission location. I'll make that easier later on...
job.setSubmissionLocation("dque@edda-m:ng2.vpac.org")

# set the commandline that needs to be executed
job.setCommandline("echo \"Hello World\"")

job.addInputFileUrl('/home/markus/test/singleJobFile_0.txt');

# create the job on the backend and specify the VO to use
job.createJob("/ARCS/NGAdmin")
print 'Submitting job...'
Example #44
0
from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import JobObject

si = LoginManager.loginCommandline("BeSTGRID-DEV")

print 'Logged in.'

job = JobObject(si)
job.setUniqueJobname("cat_job", si)
job.setCommandline("cat text0.txt")
job.addInputFileUrl('/home/markus/tmp/text0.txt')

job.createJob("/nz/nesi")
#job.setSubmissionLocation('[email protected]:ng2.auckland.ac.nz')
job.submitJob()

print 'Job submitted.'

job.waitForJobToFinish(10)

print 'Job finished. Status: ' + job.getStatusString(False)
print "Stdout: " + job.getStdOutContent()
print "Stderr: " + job.getStdErrContent()

job.kill(True)
Example #45
0
start = 30
end = 40

pathToInputFiles = batchJob.pathToInputFiles()

inputFile1relPath = pathToInputFiles+'inputFile1.txt ' 
inputFile2relPath = pathToInputFiles+'inputFile2.txt' 

for i in range(start, end):
    # create a unique jobname for every job
    jobname = batchJobName+"_"+ str(i)
    
    print 'Creating job: '+jobname
    
    # create the single job
    job = JobObject(si)
    job.setJobname(jobname)
    # better to set the application to use explicitely because in that case we don't need to use mds (faster)
    job.setApplication('UnixCommands')
    job.setCommandline('cat '+ inputFile1relPath + ' ' + inputFile2relPath)

    job.setWalltimeInSeconds(60)
    # adding the job to the multijob
    batchJob.addJob(job)

# only start the newly added jobs and wait for the restart to finish
batchJob.restart(False, False, True, True)


# don't forget to exit properly. this cleans up possible existing threads/executors
sys.exit()
Example #46
0
# how many jobs do we want
numberOfJobs = 10

# the (unique) name of the batchjob
batch_job_name = "test_batch";

# create the batchjob 
batch_job = BatchJobObject(si, batch_job_name, "/nz/nesi", "UnixCommands", Constants.NO_VERSION_INDICATOR_STRING);

# now we can calculate the relative path (from every job directory) to the common input file folder
pathToInputFiles = batch_job.pathToInputFiles()

for i in range(0, numberOfJobs):

    # create the single job
    job = JobObject(si)
    # better to set the application to use explicitely because in that case we don't need to use mds (faster)
    job.setCommandline('cat ' + pathToInputFiles+'commonJobFile.txt ' + 'singleJobFile.txt')
    # adding a job-specific input file
    job.addInputFileUrl("/home/markus/tmp/singleJobFile.txt")
    # adding the job to the multijob
    batch_job.addJob(job)
    
# now we are adding a file that can be used by all of the child jobs. it needs to be referenced via the pathToInputFiles() method shown above
batch_job.addInputFile('/home/markus/tmp/commonJobFile.txt')
batch_job.setDefaultNoCpus(1);
batch_job.setDefaultWalltimeInSeconds(60);   
   
batch_job.setLocationsToExclude(["gt5test:ng1.canterbury.ac.nz"])
    
try:
Example #47
0
            if isfile(line):
                files_to_upload.append(line)
            else: 
                print 'Not a file: '+line
                sys.exit(1)
    return files_to_upload

files_to_upload = read_files(files_file)

if not si:
    LoginManager.initEnvironment()
    si = LoginManager.login('bestgrid', True)

filemanager = GrisuRegistryManager.getDefault(si).getFileManager()

job = JobObject(si)
job.setSubmissionLocation('pan:pan.nesi.org.nz')
job.setTimestampJobname(jobname_template)
job.setCommandline(commandline)

# add input files
for file in files_to_upload:
    job.addInputFileUrl(file)
    

jobname = job.createJob('/nz/nesi')
print 'Submitting job...'
job.submitJob()
print 'Jobname: '+jobname

print 'Waiting for job to finish...'
Example #48
0
from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import JobObject
import sys

# create a service interface to the BeSTGRID backend
service_interface = LoginManager.loginCommandline("BeSTGRID")

print 'Creating job object...'

job = JobObject(service_interface)

job.setJobname("echo_job-1")  # job name must be unique
print 'Set jobname to: ' + job.getJobname()
# set the name of the application as it is published in MDS.
# "generic" means not to use MDS for the lookup.
job.setApplication("generic")
# "generic" jobs require a submission location to be specified
job.setSubmissionLocation("all.q:ng2.scenzgrid.org#SGE")

# set the command that needs to be executed
job.setCommandline("echo \"Hello World\"")

# create the job on the backend and specify the VO to use
job.createJob("/ARCS/BeSTGRID")
print 'Submitting job...'
# submit the job
job.submitJob()

print 'Waiting for the job to finish...'
# this waits until the job is finished. Checks every 10 seconds (which would be too often for a real job)
finished = job.waitForJobToFinish(10)
Example #49
0
print 'starting job creation...'

group = '/nz/nesi'

#sub_loc = '[email protected]:ng2.auckland.ac.nz'

batch_job = BatchJobObject(si, basename, group, 'R', Constants.NO_VERSION_INDICATOR_STRING)

batch_job_name = batch_job.getJobname()
print 'jobname on backend: '+batch_job_name

path_to_inputfile = batch_job.pathToInputFiles()+inputfilename

for i in range(1,gen_jobs+1):
    job = JobObject(si)
    job.setEmail_address(email)
    job.setEmail_on_job_finish(True)

    job.setCommandline('R --no-readline --no-restore --no-save -f '+path_to_inputfile)

    batch_job.addJob(job)
    
batch_job.addInputFile('/home/markus/Desktop/R/'+inputfilename)
batch_job.setDefaultNoCpus(1)
batch_job.setDefaultWalltimeInSeconds(walltime)


print 'preparing jobs on backend...'

batch_job.prepareAndCreateJobs(redistribute)
Example #50
0
import random
import sys

# si stands for serviceInterface and holds all session information
si = LoginManager.loginCommandline('bestgrid')

amount_of_jobs_total = 10
amount_of_jobs_concurrent = 4

submitted_jobs = []

# better make that unique for each run, so we can resume workflows easier if necessary (this introduces quite a bit more complexity though)
jobname_base = 'staging_test'

for total in range(1, amount_of_jobs_total + 1):
    job = JobObject(si)
    job.setJobname(jobname_base + '_' + str(total))
    # always good to set the application if you know it, processing the job will be faster
    job.setApplication('UnixCommands')
    # also good to set queue if you know where you want to submit your job, not necessary, but processing of the job will be faster
    job.setSubmissionLocation('default:gram5.ceres.auckland.ac.nz')
    # create a random sized outfile
    mbsize = 1024 * 1024
    random_size = random.randrange(10, 100)
    size_string = str(mbsize * random_size)
    job.setCommandline('dd if=/dev/zero of=outfile bs=' + size_string +
                       ' count=1')

    job.createJob('/nz/nesi')
    job.submitJob()
    print 'created and submitted job: ' + job.getJobname() + ' (size: ' + str(
Example #51
0
                files_to_upload.append(line)
            else:
                print 'Not a file: ' + line
                sys.exit(1)
    return files_to_upload


files_to_upload = read_files(files_file)

if not si:
    LoginManager.initEnvironment()
    si = LoginManager.login('bestgrid', True)

filemanager = GrisuRegistryManager.getDefault(si).getFileManager()

job = JobObject(si)
job.setSubmissionLocation('pan:pan.nesi.org.nz')
job.setTimestampJobname(jobname_template)
job.setCommandline(commandline)

# add input files
for file in files_to_upload:
    job.addInputFileUrl(file)

jobname = job.createJob('/nz/nesi')
print 'Submitting job...'
job.submitJob()
print 'Jobname: ' + jobname

print 'Waiting for job to finish...'
job.waitForJobToFinish(jobstate_check_intervall)
Example #52
0
'''
@author: markus
'''

from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import JobObject
import sys

si = LoginManager.loginCommandline("Local")
folder = '/home/markus/test/batchWrap/'
original = folder + 'original.txt'
script = folder + 'wrap.sh'

job = JobObject(si)
job.setUniqueJobname('diff_wrap')
job.setApplication('UnixCommands')
job.setCommandline('sh wrap.sh 0 9')
job.addInputFileUrl(original)
job.addInputFileUrl(script)

for i in range(10):

    file = folder + 'test' + str(i) + '.txt'
    job.addInputFileUrl(file)

job.createJob("/ARCS/BeSTGRID")
job.submitJob()

finished = job.waitForJobToFinish(10)

print 'Job: ' + job.getJobname()