# set the commandline that needs to be executed job.setCommandline('diff ' + file1Name+ ' ' + file2Name) job.addInputFileUrl(file1url); job.addInputFileUrl(file2url); # create the job on the backend and specify the VO to use job.createJob("/ARCS/StartUp") print 'Submitting job...' # submit the job job.submitJob() print 'Waiting for the job to finish...' # this waits until the job is finished. Checks every 10 seconds (which would be too often for a real job) finished = job.waitForJobToFinish(10) if not finished: print "not finished yet." # kill the job on the backend anyway job.kill(True); else: print 'Job finished. Status: '+job.getStatusString(False) # download and cache the jobs' stdout and display it's content print "Stdout: " + job.getStdOutContent() # download and cache the jobs' stderr and display it's content print "Stderr: " + job.getStdErrContent() # kill and clean the job on the backend job.kill(True) # don't forget to exit properly. this cleans up possible existing threads/executors sys.exit()
print "Job distribution for restarted jobs:" for subLoc in multiPartJob.getOptimizationResult().keySet(): resubmitted = True print subLoc + " : " +multiPartJob.getOptimizationResult().get(subLoc) else: print "Job not restarted (yet)." print "Job not finished yet. Waiting..." time.sleep(3) print "Multipartjob "+multiPartJob.getBatchJobname()+" finished." # finally, everything is ready. We could do a lot more here, but you get the idea... for job in multiPartJob.getJobs(): print "Job: "+job.getJobname()+", Status: "+job.getStatusString(False) print print "Stdout: " print job.getStdOutContent() print print "Stderr: " print job.getStdErrContent() print print print "Finished." # don't forget to exit properly. this cleans up possible existing threads/executors sys.exit()