Пример #1
0
    # create the single job
    job = JobObject(si)
    # better to set the application to use explicitely because in that case we don't need to use mds (faster)
    job.setCommandline('cat ' + pathToInputFiles + 'commonJobFile.txt ' +
                       'singleJobFile.txt')
    # adding a job-specific input file
    job.addInputFileUrl("/home/markus/tmp/singleJobFile.txt")
    # adding the job to the multijob
    batch_job.addJob(job)

# now we are adding a file that can be used by all of the child jobs. it needs to be referenced via the pathToInputFiles() method shown above
batch_job.addInputFile('/home/markus/tmp/commonJobFile.txt')
batch_job.setDefaultNoCpus(1)
batch_job.setDefaultWalltimeInSeconds(60)

batch_job.setLocationsToExclude(["gt5test:ng1.canterbury.ac.nz"])

try:
    print "Creating jobs on the backend and staging files..."
    # by specifying "True" we tell the backend to automatically distribute the jobs to all available submission locations
    # this can be finetuned by exluding or including sites. another option would be to specifying the submission location
    # for every single job and setting "False" below (this would make job submission faster since jobs don't need to be re-distributed/moved on the backend).
    batch_job.prepareAndCreateJobs(True)
except (JobsException), error:
    for job in error.getFailures().keySet():
        print "Job: " + job.getJobname() + ", Error: " + error.getFailures(
        ).get(job).getLocalizedMessage()

    sys.exit()

print "Job distribution:"
# Set some job settings
application = "python"
version = "2.4"

print "INFO: Creating a Batch Job Object called " + batch_job_name
batch_jobs = BatchJobObject(service_interface, batch_job_name, group, application, version)
batch_jobs.setConcurrentInputFileUploadThreads(5)  # Set the number of concurrent uploads
batch_jobs.setConcurrentJobCreationThreads(5)  # Set the number of concurrent jobs
batch_jobs.setDefaultNoCpus(1)
# Set the number of CPUs required
batch_jobs.setDefaultWalltimeInSeconds(300)
# Set the maximum walltime to 5 minutes
exclude_sites = list()
exclude_sites.append("AUT")
batch_jobs.setLocationsToExclude(exclude_sites)  # Create a blacklist of sites to exclude
# Currently the AUT location is not behaving, so always exclude it

print "INFO: Adding common files to Batch Job Object " + batch_job_name
batch_jobs.addInputFile(os.path.join(current_dir, dictionary_path))
batch_jobs.addInputFile(os.path.join(current_dir, "countbacon.py"))

print "INFO: Defining jobs from input directory"
job_count = 0
for file_name in os.listdir(input_path):
    print "INFO: Defining job for " + file_name
    job_name = base_job_name + "-" + file_name
    job = JobObject(service_interface)
    job.setJobname(job_name)
    job.setApplication("python")  # Set the application being run
    job.setApplicationVersion("2.4")  # Set the application version, note this is an exact match
Пример #3
0
    # create the single job
    job = JobObject(si)
    # better to set the application to use explicitely because in that case we don't need to use mds (faster)
    job.setCommandline('cat ' + pathToInputFiles+'commonJobFile.txt ' + 'singleJobFile.txt')
    # adding a job-specific input file
    job.addInputFileUrl("/home/markus/tmp/singleJobFile.txt")
    # adding the job to the multijob
    batch_job.addJob(job)
    
# now we are adding a file that can be used by all of the child jobs. it needs to be referenced via the pathToInputFiles() method shown above
batch_job.addInputFile('/home/markus/tmp/commonJobFile.txt')
batch_job.setDefaultNoCpus(1);
batch_job.setDefaultWalltimeInSeconds(60);   
   
batch_job.setLocationsToExclude(["gt5test:ng1.canterbury.ac.nz"])
    
try:
    print "Creating jobs on the backend and staging files..."
    # by specifying "True" we tell the backend to automatically distribute the jobs to all available submission locations
    # this can be finetuned by exluding or including sites. another option would be to specifying the submission location 
    # for every single job and setting "False" below (this would make job submission faster since jobs don't need to be re-distributed/moved on the backend).
    batch_job.prepareAndCreateJobs(True)
except (JobsException), error:
    for job in error.getFailures().keySet():
        print "Job: "+job.getJobname()+", Error: "+error.getFailures().get(job).getLocalizedMessage()

    sys.exit()

print "Job distribution:"
print batch_job.getOptimizationResult()
Пример #4
0
print "INFO: Base job name is " + base_job_name
print "INFO: Batch job name is " + batch_job_name

# Set some job settings
application = "python"
version = "2.4"

print "INFO: Creating a Batch Job Object called " + batch_job_name
batch_jobs = BatchJobObject(service_interface, batch_job_name, group, application, version)
batch_jobs.setConcurrentInputFileUploadThreads(5)  # Set the number of concurrent uploads
batch_jobs.setConcurrentJobCreationThreads(5)  # Set the number of concurrent jobs
batch_jobs.setDefaultNoCpus(1)
# Set the number of CPUs required
batch_jobs.setDefaultWalltimeInSeconds(300)
# Set the maximum walltime to 5 minutes
batch_jobs.setLocationsToExclude(["AUT"])  # Create a blacklist of sites to exclude
# Currently the AUT location is not behaving, so always exclude it

print "INFO: Adding common files to Batch Job Object " + batch_job_name
batch_jobs.addInputFile(os.path.join(current_dir, dictionary_path))
batch_jobs.addInputFile(os.path.join(current_dir, "countbacon.py"))

print "INFO: Defining jobs from input directory"
job_count = 0
for file_name in os.listdir(input_path):
    print "INFO: Defining job for " + file_name
    job_name = base_job_name + "-" + file_name
    job = JobObject(service_interface)
    job.setJobname(job_name)
    job.setApplication("python")  # Set the application being run
    job.setApplicationVersion("2.4")  # Set the application version, note this is an exact match
Пример #5
0
# Set some job settings
application = "python"
version = "2.4"

print "INFO: Creating a Batch Job Object called " + batch_job_name
batch_jobs = BatchJobObject(service_interface, batch_job_name, group,
                            application, version)
batch_jobs.setConcurrentInputFileUploadThreads(
    5)  # Set the number of concurrent uploads
batch_jobs.setConcurrentJobCreationThreads(
    5)  # Set the number of concurrent jobs
batch_jobs.setDefaultNoCpus(1)
# Set the number of CPUs required
batch_jobs.setDefaultWalltimeInSeconds(300)
# Set the maximum walltime to 5 minutes
batch_jobs.setLocationsToExclude(["AUT"
                                  ])  # Create a blacklist of sites to exclude
# Currently the AUT location is not behaving, so always exclude it

print "INFO: Adding common files to Batch Job Object " + batch_job_name
batch_jobs.addInputFile(os.path.join(current_dir, dictionary_path))
batch_jobs.addInputFile(os.path.join(current_dir, "countbacon.py"))

print "INFO: Defining jobs from input directory"
job_count = 0
for file_name in os.listdir(input_path):
    print "INFO: Defining job for " + file_name
    job_name = base_job_name + "-" + file_name
    job = JobObject(service_interface)
    job.setJobname(job_name)
    job.setApplication("python")  # Set the application being run
    job.setApplicationVersion(