def aux(args): logger = logging.getLogger('mq.Worker') logger.setLevel(logging.INFO) hdlr = ColorStreamHandler() logger.addHandler(hdlr) config = config_dict(args.config) tags = config.get('TAGS', ()) or args.tags queues = config.get('QUEUES', ()) or args.queues factory = MTQConnection.from_config(config) worker = factory.new_worker(queues=queues, tags=tags, log_worker_output=args.log_output, poll_interval=args.poll_interval) if config.get('exception_handler'): worker.push_exception_handler(config['exception_handler']) if config.get('pre_call'): worker._pre_call = config.get('pre_call') if config.get('post_call'): worker._post_call = config.get('post_call') if args.job_id: job = factory.get_job(args.job_id) if job is None: worker.logger.error('No job %s' % args.job_id) return worker.process_job(job) return worker.work(one=args.one, batch=args.batch)
def main(): parser = ArgumentParser(description=__doc__, version='0.0') group = parser.add_mutually_exclusive_group(required=True) group.add_argument('-j','--job-id', type=ObjectId) group.add_argument('-w','--worker-name') group.add_argument('--worker-id', type=ObjectId) parser.add_argument('-f', '--follow', action='store_true', help=('Dont stop when end of file is reached, but ' 'rather to wait for additional data to be' 'appended to the input.')) args = parser.parse_args() factory = MTQConnection.from_config() if args.job_id: stream = factory.job_stream(args.job_id) elif args.worker_name or args.worker_id: stream = factory.worker_stream(args.worker_name, args.worker_id) try: for line in stream.loglines(args.follow): sys.stdout.write(line) except KeyboardInterrupt: pass
def main(): parser = ArgumentParser(description=__doc__, version='Mongo Task Queue (mtq) v%s' % mtq.__version__) parser.add_argument('-c', '--config', help='Python module containing MTQ settings.') sp = parser.add_subparsers() wparser = sp.add_parser('working', help=('Set all workers as not working. ' 'Live workers will reset working status' 'on next check-in')) wparser.set_defaults(main=working) fparser = sp.add_parser('failed', help='Flag failed jobs as fixed') fparser.set_defaults(main=failed) group = fparser.add_mutually_exclusive_group(required=True) group.add_argument('-a', '--all', action='store_true', help='All jobs') group.add_argument('-f', '--func', help='All jobs with function name') group.add_argument('-i', '--id', type=ObjectId, help='Job Id') fparser = sp.add_parser('finish', help='Flag unfinished jobs as finished (use with caution)') fparser.set_defaults(main=finish) fparser.add_argument('-i', '--id', type=ObjectId, required=True, help='Job Id') sparser = sp.add_parser('shutdown', help=('Schedule workers for shutdown. ' 'Workers will self-terminate on next check-in')) sparser.add_argument('-s', '--status', type=int, help='Status code', default=1) sparser.set_defaults(main=shutdown) group = sparser.add_mutually_exclusive_group(required=True) group.add_argument('-a', '--all', action='store_true', help='All workers') group.add_argument('-k', '--hostname', help='Workers only running on host') group.add_argument('-n', '--name', help='Worker name') group.add_argument('-i', '--id', type=ObjectId, help='Worker Id') args = parser.parse_args() config = config_dict(args.config) factory = MTQConnection.from_config(config) args.main(factory, args)
def main(): parser = ArgumentParser(description=__doc__, version='0.0') parser.add_argument('-c', '--config', help='Python module containing MTQ settings.') parser.add_argument('-m', '--max-age', help='Maximum age of jobs (e.g. 1d) unit may be one of s (seconds), m (minutes), h (hours) or d (days) ', type=max_age, dest='since', metavar='AGE', default=None) group = parser.add_mutually_exclusive_group(required=True) group.add_argument('-q', '--queues', action='store_const', const=queue_stats, dest='action', help='print stats on queues') group.add_argument('-w', '--worker', action='store_const', const=worker_stats, dest='action', help='print stats on workers') group.add_argument('-j', '--jobs', action='store_const', const=print_job_stats, dest='action', help='print stats on jobs') group.add_argument('-s', '--storage', '--db', action='store_const', const=print_db_stats, dest='action', help='print stats on database') args = parser.parse_args() config = config_dict(args.config) factory = MTQConnection.from_config(config) args.action(factory, args)