job.update_jobs_status_from_queue() pipeline_utils.move_results() job.update_jobs_status_from_queue() job.status() job.rotate() #pipeline_utils.archive_logs() except Exception, e: if config.email.send_on_crash: msg = '*** Job pooler has crashed! ***\n\n' msg += 'Fatal error occured while running job pool: %s\n\n' % str( e) msg += ''.join(traceback.format_exception(*sys.exc_info())) notification = mailer.ErrorMailer(msg, subject="Job Pooler crash!") notification.send() sys.stderr.write("Fatal error occurred!\n") raise time.sleep(config.background.sleep) if __name__ == '__main__': parser = pipeline_utils.PipelineOptions(usage="%prog [OPTIONS]", \ description="Start the job pooler.") options, args = parser.parse_args() try: main() except KeyboardInterrupt: print "Exiting..."
for fn in fns: fn = os.path.abspath(fn) rows = jobtracker.query("SELECT * FROM files " \ "WHERE filename='%s' " \ "AND status IN ('added', 'downloaded')" % fn) if not len(rows): print "Cannot remove %s. Either file isn't tracked, " \ "or it doesn't have status 'added' or 'downloaded'." % fn continue rows = jobtracker.query("SELECT * " \ "FROM job_files, files " \ "WHERE job_files.file_id=files.id " \ "AND files.filename='%s'" % fn) if len(rows): print "Cannot remove %s. It is part of a job." % fn continue pipeline_utils.remove_file(fn) if __name__ == '__main__': parser = pipeline_utils.PipelineOptions(usage="%prog FILE [FILE ...]", \ description="Remove files that are not " \ "part of a job.") parser.add_option('-f', '--file', dest='files', action='append', \ help="File that should be removed.", default=[]) options, args = parser.parse_args() main()
def usage(): exit("\nUsage: python add_files.py [directory to pick up file from]\n") def main(): files = args # Leftover arguments on command line for g in options.fileglobs: files += glob.glob(g) for fn in files: if check_file(fn): try: id = create_download(fn) if type(id) == types.IntType: print "File entry created (ID=%d): %s" % (id, fn) except Exception, e: print "Couldn't create an entry for: %s \n\t%s" % (fn, str(e)) if __name__ == "__main__": parser = pipeline_utils.PipelineOptions(usage="%prog [OPTIONS] FILES ...", \ description="Add files to the 'files' " \ "table in the job-tracker database.") parser.add_option('-g', '--glob', dest='fileglobs', action='append', \ help="A (properly quoted) glob expression indentifying " \ "files to add to the job-tracker DB.", \ default=[]) options, args = parser.parse_args() main()
jobtracker.query(queries) if isrunning: print "Stopping job: %s" % job_submits[0]['queue_id'] try: config.jobpooler.queue_manager.delete( job_submits[0]['queue_id']) except pipeline_utils.PipelineError, e: print "PipelineError: %s" % str(e) if __name__ == "__main__": parser = pipeline_utils.PipelineOptions(usage="%prog [OPTIONS] QUEUE_ID [QUEUE_ID ...]", \ description="Stop a job running in the queue. " \ "There are two ways to stop jobs: " \ "1) Failing the job (i.e. the submission " \ "counts towards the job's number of retries, " \ "and 2) Removing the job (the submission " \ "doesn't count towards retries). Both " \ "possibilities are done safely, with respect " \ "to the job-tracker DB. The default is to " \ "remove the jobs (not fail).") parser.add_option('-q', '--queue-id', dest='queue_ids', action='append', \ help="A queue_id of a job to stop. Many -q/--queue-id " \ "options can be provided.", \ default=[]) parser.add_option('-s', '--submit-id', dest='submit_ids', action='append', \ help="A jobsubmit_id of a job to stop. Many -s/--submit-id " \ "options can be provided.", \ default=[]) parser.add_option('-f', '--fail', dest='fail', action='store_true', \ help="Remove jobs from the queue and mark them " \ "as 'failed' in the job-tracker database. " \
fig.canvas.mpl_connect("key_press_event", \ lambda e: (e.key in ('q', 'Q') and fig.close())) timer = fig.canvas.new_timer(10 * 1000) # Time interval in milliseconds timer.add_callback(fig.update) timer.start() if options.plot_file: plt.savefig(options.plot_file) if not options.noninteractive: plt.show() if __name__ == '__main__': parser = pipeline_utils.PipelineOptions(usage="%prog [OPTIONS]") parser.add_option("-n", "--noninteractive", action="store_true", dest="noninteractive", help="Don't plot interactively", default=False) parser.add_option("-f", "--file", dest="plot_file", help="File to save plot to.", default=None) options, args = parser.parse_args() main(options)
"WHERE id=%d" % jobid, \ fetchone=True) if row['status'] in ['new', 'retrying']: jobtracker.query("UPDATE jobs " \ "SET status='terminal_failure', " \ "updated_at='%s', " \ "details='Job was killed manually' " \ "WHERE id=%d" % \ (jobtracker.nowstr(), jobid)) print "Job's status has been set to 'terminal_failure'" pipeline_utils.clean_up(jobid) else: print "Only jobs whose status is 'waiting' or 'retrying' " \ "can be killed. (Current status of job %d: %s)" % \ (jobid, row['status']) if __name__ == '__main__': parser = pipeline_utils.PipelineOptions(usage="%prog ID [ID ...]", \ description="Kill a job. That is set its " \ "status as 'terminal_failure', and " \ "clean up its datafiles (if applicable). ") parser.add_option('-f', '--file', dest='files', action='append', \ help="File belonging to a job that should be killed.", default=[]) parser.add_option('-i', '--id', dest='jobids', action='append', type='int', \ help="ID number of a job that should be killed.", \ default=[]) options, args = parser.parse_args() main()