Esempio n. 1
0
from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import BatchJobObject, JobObject
import sys

batchJobName  =  sys.argv[1]

# display commandline login menu if no local proxy exists
si = LoginManager.loginCommandline()

batchJob = BatchJobObject(si, batchJobName, False)

start = 30
end = 40

pathToInputFiles = batchJob.pathToInputFiles()

inputFile1relPath = pathToInputFiles+'inputFile1.txt ' 
inputFile2relPath = pathToInputFiles+'inputFile2.txt' 

for i in range(start, end):
    # create a unique jobname for every job
    jobname = batchJobName+"_"+ str(i)
    
    print 'Creating job: '+jobname
    
    # create the single job
    job = JobObject(si)
    job.setJobname(jobname)
    # better to set the application to use explicitely because in that case we don't need to use mds (faster)
    job.setApplication('UnixCommands')
    job.setCommandline('cat '+ inputFile1relPath + ' ' + inputFile2relPath)
Esempio n. 2
0
gen_jobs = 40

inputdir = '/home/markus/Desktop/R/'
#inputfilename = 'Evaluation_Markov-ADF-Test-2011-05-09-mc50.r'
inputfilename = 'Evaluation_Markov-ADF-Test-2011-05-09-mc50-test.r'

print 'logging in...'
si = LoginManager.loginCommandline(backend)

print 'starting job creation...'

group = '/nz/nesi'

#sub_loc = '[email protected]:ng2.auckland.ac.nz'

batch_job = BatchJobObject(si, basename, group, 'R', Constants.NO_VERSION_INDICATOR_STRING)

batch_job_name = batch_job.getJobname()
print 'jobname on backend: '+batch_job_name

path_to_inputfile = batch_job.pathToInputFiles()+inputfilename

for i in range(1,gen_jobs+1):
    job = JobObject(si)
    job.setEmail_address(email)
    job.setEmail_on_job_finish(True)

    job.setCommandline('R --no-readline --no-restore --no-save -f '+path_to_inputfile)

    batch_job.addJob(job)
    
Esempio n. 3
0
from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import JobObject, BatchJobObject, \
    JobsException
import sys
import time

si = LoginManager.loginCommandline("BeSTGRID-DEV")

# how many jobs do we want
numberOfJobs = 10

# the (unique) name of the batchjob
batch_job_name = "test_batch"

# create the batchjob
batch_job = BatchJobObject(si, batch_job_name, "/nz/nesi", "UnixCommands",
                           Constants.NO_VERSION_INDICATOR_STRING)

# now we can calculate the relative path (from every job directory) to the common input file folder
pathToInputFiles = batch_job.pathToInputFiles()

for i in range(0, numberOfJobs):

    # create the single job
    job = JobObject(si)
    # better to set the application to use explicitely because in that case we don't need to use mds (faster)
    job.setCommandline('cat ' + pathToInputFiles + 'commonJobFile.txt ' +
                       'singleJobFile.txt')
    # adding a job-specific input file
    job.addInputFileUrl("/home/markus/tmp/singleJobFile.txt")
    # adding the job to the multijob
    batch_job.addJob(job)
Esempio n. 4
0
import sys
import time

si = LoginManager.loginCommandline("BeSTGRID-DEV")

# how many jobs do we want
numberOfJobs = 10

# the (unique) name of the multijob
batch_job_name = "test_batch"

# to see whats going on we add a simple event listener. Hm. This doesn't seem to work reliably in jython.
#SystemOutMultiJobLogger(multiJobName)

# create the multipart job
batch_job = BatchJobObject(si, batch_job_name, "/nz/nesi", "cat",
                           Constants.NO_VERSION_INDICATOR_STRING)

# now we can calculate the relative path (from every job directory) to the common input file folder
pathToInputFiles = batchJob.pathToInputFiles()

for i in range(0, numberOfJobs):

    # create the single job
    job = JobObject(si)

    # better to set the application to use explicitely because in that case we don't need to use mds (faster)
    job.setCommandline('cat ' + pathToInputFiles + 'commonFile.txt ' +
                       'singleJobFile.txt')
    # adding a job-specific input file
    job.addInputFileUrl("/home/markus/tmp/singleJobFile.txt")
    # adding the job to the multijob
Esempio n. 5
0
from grisu.control import DefaultResubmitPolicy
from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import BatchJobObject
from grisu.frontend.model.job import JobException
import sys
import time

batchJobname  =  sys.argv[1]

si = LoginManager.loginCommandline()

# load (but not refresh yet) batchjob, this might take a while
batchJob = BatchJobObject(si, batchJobname, False)

while not batchJob.isFinished(True) and False:
    
    print batchJob.getProgress()
    
    print str(batchJob.getNumberOfFailedJobs())
    
    if batchJob.getNumberOfFailedJobs() > 0:
        
        print str(batchJob.getNumberOfFailedJobs()) + ' failed jobs found. restarting...'
        failedpolicy = DefaultResubmitPolicy()
        batchJob.restart(failedpolicy, True)
        print 'Restart finished.'
        
    time.sleep(5)

jobsToRestart = []
Esempio n. 6
0
    BatchJobDialog
import sys
import time

# display commandline login menu if no local proxy exists
si = LoginManager.loginCommandline()

# how many jobs do we want
numberOfJobs = 20

# the (unique) name of the multijob
batchJobName = JobnameHelpers.calculateTimestampedJobname('exampleBatchJob')

print 'Creating batchjob ' + batchJobName
# create the multipart job
batchJob = BatchJobObject(si, batchJobName, '/ARCS/NGAdmin', 'UnixCommands',
                          Constants.NO_VERSION_INDICATOR_STRING)

# now we can calculate the relative path (from every job directory) to the common input file folder
pathToInputFiles = batchJob.pathToInputFiles()

inputFile1Url = '/home/markus/test/inputFile1.txt'
inputFile1relPath = pathToInputFiles + 'inputFile1.txt '

inputFile2Url = 'gsiftp://ng2.vpac.org/home/grid-vpac/DC_au_DC_org_DC_arcs_DC_slcs_O_VPAC_CN_Markus_Binsteiner_qTrDzHY7L1aKo3WSy8623-7bjgM/inputFile2.txt'
inputFile2relPath = pathToInputFiles + 'inputFile2.txt'

inputFile3Url = '/home/markus/test/errorFile.txt'
inputFile3relPath = pathToInputFiles + 'errorFile.txt'

for i in range(0, numberOfJobs):
    # create a unique jobname for every job
Esempio n. 7
0
from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import JobObject, BatchJobObject, \
    JobsException
import sys
import time

si = LoginManager.loginCommandline("BeSTGRID-DEV")

# how many jobs do we want
numberOfJobs = 10

# the (unique) name of the batchjob
batch_job_name = "test_batch";

# create the batchjob 
batch_job = BatchJobObject(si, batch_job_name, "/nz/nesi", "UnixCommands", Constants.NO_VERSION_INDICATOR_STRING);

# now we can calculate the relative path (from every job directory) to the common input file folder
pathToInputFiles = batch_job.pathToInputFiles()

for i in range(0, numberOfJobs):

    # create the single job
    job = JobObject(si)
    # better to set the application to use explicitely because in that case we don't need to use mds (faster)
    job.setCommandline('cat ' + pathToInputFiles+'commonJobFile.txt ' + 'singleJobFile.txt')
    # adding a job-specific input file
    job.addInputFileUrl("/home/markus/tmp/singleJobFile.txt")
    # adding the job to the multijob
    batch_job.addJob(job)
    
service_interface = LoginManager.loginCommandline(backend)
print "INFO: Service interface to " + backend + " Created."
print "INFO: Service Interface connected as: " + service_interface.getDN()

# Create some base strings to build jobs with
base_job_name = "bacon"
batch_job_name = str(random.randint(10000, 99999)) + "-" + base_job_name
print "INFO: Base job name is " + base_job_name
print "INFO: Batch job name is " + batch_job_name

# Set some job settings
application = "python"
version = "2.4"

print "INFO: Creating a Batch Job Object called " + batch_job_name
batch_jobs = BatchJobObject(service_interface, batch_job_name, group, application, version)
batch_jobs.setConcurrentInputFileUploadThreads(5)  # Set the number of concurrent uploads
batch_jobs.setConcurrentJobCreationThreads(5)  # Set the number of concurrent jobs
batch_jobs.setDefaultNoCpus(1)
# Set the number of CPUs required
batch_jobs.setDefaultWalltimeInSeconds(300)
# Set the maximum walltime to 5 minutes
exclude_sites = list()
exclude_sites.append("AUT")
batch_jobs.setLocationsToExclude(exclude_sites)  # Create a blacklist of sites to exclude
# Currently the AUT location is not behaving, so always exclude it

print "INFO: Adding common files to Batch Job Object " + batch_job_name
batch_jobs.addInputFile(os.path.join(current_dir, dictionary_path))
batch_jobs.addInputFile(os.path.join(current_dir, "countbacon.py"))
Esempio n. 9
0
from grisu.frontend.control.login import LoginManager
from grisu.frontend.model.job import BatchJobObject, JobObject
import sys

batchJobName = sys.argv[1]

# display commandline login menu if no local proxy exists
si = LoginManager.loginCommandline()

batchJob = BatchJobObject(si, batchJobName, False)

start = 30
end = 40

pathToInputFiles = batchJob.pathToInputFiles()

inputFile1relPath = pathToInputFiles + 'inputFile1.txt '
inputFile2relPath = pathToInputFiles + 'inputFile2.txt'

for i in range(start, end):
    # create a unique jobname for every job
    jobname = batchJobName + "_" + str(i)

    print 'Creating job: ' + jobname

    # create the single job
    job = JobObject(si)
    job.setJobname(jobname)
    # better to set the application to use explicitely because in that case we don't need to use mds (faster)
    job.setApplication('UnixCommands')
    job.setCommandline('cat ' + inputFile1relPath + ' ' + inputFile2relPath)
Esempio n. 10
0
gen_jobs = 40

inputdir = '/home/markus/Desktop/R/'
#inputfilename = 'Evaluation_Markov-ADF-Test-2011-05-09-mc50.r'
inputfilename = 'Evaluation_Markov-ADF-Test-2011-05-09-mc50-test.r'

print 'logging in...'
si = LoginManager.loginCommandline(backend)

print 'starting job creation...'

group = '/nz/nesi'

#sub_loc = '[email protected]:ng2.auckland.ac.nz'

batch_job = BatchJobObject(si, basename, group, 'R',
                           Constants.NO_VERSION_INDICATOR_STRING)

batch_job_name = batch_job.getJobname()
print 'jobname on backend: ' + batch_job_name

path_to_inputfile = batch_job.pathToInputFiles() + inputfilename

for i in range(1, gen_jobs + 1):
    job = JobObject(si)
    job.setEmail_address(email)
    job.setEmail_on_job_finish(True)

    job.setCommandline('R --no-readline --no-restore --no-save -f ' +
                       path_to_inputfile)

    batch_job.addJob(job)
Esempio n. 11
0
si = LoginManager.loginCommandline("BeSTGRID-DEV")

# how many jobs do we want
numberOfJobs = 10

# the (unique) name of the multijob
batch_job_name = "test_batch";

   

# to see whats going on we add a simple event listener. Hm. This doesn't seem to work reliably in jython. 
#SystemOutMultiJobLogger(multiJobName)

# create the multipart job 
batch_job = BatchJobObject(si, batch_job_name, "/nz/nesi", "cat", Constants.NO_VERSION_INDICATOR_STRING);

# now we can calculate the relative path (from every job directory) to the common input file folder
pathToInputFiles = batchJob.pathToInputFiles()

for i in range(0, numberOfJobs):

    # create the single job
    job = JobObject(si)

    # better to set the application to use explicitely because in that case we don't need to use mds (faster)
    job.setCommandline('cat ' + pathToInputFiles+'commonFile.txt ' + 'singleJobFile.txt')
    # adding a job-specific input file
    job.addInputFileUrl("/home/markus/tmp/singleJobFile.txt")
    # adding the job to the multijob
    batch_job.addJob(job)
Esempio n. 12
0
import sys
import time

# display commandline login menu if no local proxy exists
si = LoginManager.loginCommandline()

# how many jobs do we want
numberOfJobs = 20

# the (unique) name of the multijob
batchJobName = JobnameHelpers.calculateTimestampedJobname('exampleBatchJob')
  

print 'Creating batchjob '+batchJobName
# create the multipart job 
batchJob = BatchJobObject(si, batchJobName, '/ARCS/NGAdmin', 'UnixCommands', Constants.NO_VERSION_INDICATOR_STRING);

# now we can calculate the relative path (from every job directory) to the common input file folder
pathToInputFiles = batchJob.pathToInputFiles()

inputFile1Url = '/home/markus/test/inputFile1.txt'
inputFile1relPath = pathToInputFiles+'inputFile1.txt ' 

inputFile2Url = 'gsiftp://ng2.vpac.org/home/grid-vpac/DC_au_DC_org_DC_arcs_DC_slcs_O_VPAC_CN_Markus_Binsteiner_qTrDzHY7L1aKo3WSy8623-7bjgM/inputFile2.txt'
inputFile2relPath = pathToInputFiles+'inputFile2.txt' 

inputFile3Url = '/home/markus/test/errorFile.txt'
inputFile3relPath = pathToInputFiles + 'errorFile.txt'

for i in range(0, numberOfJobs):
    # create a unique jobname for every job
Esempio n. 13
0
service_interface = LoginManager.loginCommandline(backend)
print "INFO: Service interface to " + backend + " Created."
print "INFO: Service Interface connected as: " + service_interface.getDN()

# Create some base strings to build jobs with
base_job_name = "bacon"
batch_job_name = str(random.randint(10000, 99999)) + "-" + base_job_name
print "INFO: Base job name is " + base_job_name
print "INFO: Batch job name is " + batch_job_name

# Set some job settings
application = "python"
version = "2.4"

print "INFO: Creating a Batch Job Object called " + batch_job_name
batch_jobs = BatchJobObject(service_interface, batch_job_name, group,
                            application, version)
batch_jobs.setConcurrentInputFileUploadThreads(
    5)  # Set the number of concurrent uploads
batch_jobs.setConcurrentJobCreationThreads(
    5)  # Set the number of concurrent jobs
batch_jobs.setDefaultNoCpus(1)
# Set the number of CPUs required
batch_jobs.setDefaultWalltimeInSeconds(300)
# Set the maximum walltime to 5 minutes
batch_jobs.setLocationsToExclude(["AUT"
                                  ])  # Create a blacklist of sites to exclude
# Currently the AUT location is not behaving, so always exclude it

print "INFO: Adding common files to Batch Job Object " + batch_job_name
batch_jobs.addInputFile(os.path.join(current_dir, dictionary_path))
batch_jobs.addInputFile(os.path.join(current_dir, "countbacon.py"))