Example #1
0
def main():
    args = parse_args()

    if args.path:
        sys.path = args.path.split(':') + sys.path

    settings = {}
    if args.config:
        settings = read_config_file(args.config)

    setup_default_arguments(args, settings)

    # Other default arguments
    if args.sentry_dsn is None:
        args.sentry_dsn = settings.get('SENTRY_DSN', None)

    setup_loghandlers(args.verbose)
    setup_redis(args)

    try:
        queues = map(Queue, args.queues)
        w = Worker(queues, name=args.name)

        # Should we configure Sentry?
        if args.sentry_dsn:
            from raven import Client
            from rq.contrib.sentry import register_sentry
            client = Client(args.sentry_dsn)
            register_sentry(client, w)

        w.work(burst=args.burst)
    except ConnectionError as e:
        print(e)
        sys.exit(1)
Example #2
0
def worker(site, queue, type="json", quiet=False, log=None):
    global logger
    logger = log
    try:
        frappe.connect(site=site)
        frappe_logger = frappe.logger(__name__, with_more_info=False)
        frappe_logger.info(
            "Redis Worker starting with pid {}. Process will log to {}".format(
                os.getpid(), LOG_FILENAME))

        # empty init is required to get redis_queue from common_site_config.json
        redis_connection = get_redis_conn()
        if os.environ.get('CI'):
            setup_loghandlers('ERROR')
        with Connection(redis_connection):
            logging_level = "INFO"
            if quiet:
                logging_level = "WARNING"
        q = get_redis_queue(queue)
        # if q.get_length() > 0:
        q.dequeue_and_execute(logging_level, log, site)
        # else:
        #    logger.debug("No tasks available in the queue")
    finally:
        frappe.destroy()
Example #3
0
    def run(self):
        setup_loghandlers()
        self._install_signal_handlers()
        self.log.info('Arbiter started')
        qnames = self.queue_names()
        self.log.info('*** Listening on %s...' % green(', '.join(qnames)))
        while True:
            workers = len(self.WORKERS)
            self.log.info("Loop: %r", workers)
            self.log.info("Stopped: %r", self.stopped)
            if self.stopped:
                self.log.info('Stopping on request.')
                break

            # Naive, but if there is no free worker, we wait for one second.
            must_wait = workers == self.number_of_processes
            self.log.info("Exceded: %r", must_wait)
            if must_wait:
                time.sleep(1)
                continue

            timeout = 5
            result = self.dequeue_job_and_maintain_ttl(timeout)
            self.log.info("Result: %r", result,)
            if result:
                job, queue = result
                self.spawn_worker(job)
Example #4
0
    def _work(self, burst=False, logging_level="INFO",
              date_format=DEFAULT_LOGGING_DATE_FORMAT,
              log_format=DEFAULT_LOGGING_FORMAT,
              max_jobs=None, with_scheduler=False):
        """Starts the work loop.

        Pops and performs all jobs on the current list of queues.  When all
        queues are empty, block and wait for new jobs to arrive on any of the
        queues, unless `burst` mode is enabled.

        The return value indicates whether any jobs were processed.
        """
        setup_loghandlers(logging_level)
        self._install_signal_handlers()

        self.did_perform_work = False
        self.register_birth()
        self.log.info("RQ GEVENT worker (Greenlet pool size={0}) {1!r} started, version {2}".
                      format(self.gevent_pool.size, self.key, VERSION))
        self.set_state(WorkerStatus.STARTED)

        try:
            while True:
                try:
                    self.check_for_suspension(burst)

                    if self.should_run_maintenance_tasks:
                        self.clean_registries()

                    if self._stop_requested:
                        self.log.info('Stopping on request.')
                        break

                    timeout = None if burst else max(1, self.default_worker_ttl - 60)

                    result = self.dequeue_job_and_maintain_ttl(timeout)
                    if result is None and burst:
                        self.log.info("RQ worker {0!r} done, quitting".format(self.key))

                        try:
                            # Make sure dependented jobs are enqueued.
                            get_hub().switch()
                        except LoopExit:
                            pass
                        result = self.dequeue_job_and_maintain_ttl(timeout)

                    if result is None:
                        break
                except StopRequested:
                    break

                job, queue = result
                self.execute_job(job, queue)

        finally:
            if not self.is_horse:
                self.register_death()
        return self.did_perform_work
    def _work(self, burst=False):
        """Starts the work loop.

        Pops and performs all jobs on the current list of queues.  When all
        queues are empty, block and wait for new jobs to arrive on any of the
        queues, unless `burst` mode is enabled.

        The return value indicates whether any jobs were processed.
        """
        setup_loghandlers()
        self._install_signal_handlers()

        self.did_perform_work = False
        self.register_birth()
        self.log.info(
            "RQ gevent worker (greenlet pool size={0}) {1!r} started, version {2}".format(
                self.gevent_pool.size, self.key, VERSION
            )
        )
        self.set_state(WorkerStatus.STARTED)

        try:
            while True:
                try:
                    self.check_for_suspension(burst)

                    if self.should_run_maintenance_tasks:
                        self.clean_registries()

                    if self._stop_requested:
                        self.log.info("Stopping on request.")
                        break

                    timeout = None if burst else max(1, self.default_worker_ttl - 60)

                    result = self.dequeue_job_and_maintain_ttl(timeout)
                    if result is None and burst:
                        self.log.info("RQ worker {0!r} done, quitting".format(self.key))

                        try:
                            # Make sure dependented jobs are enqueued.
                            gevent.wait(self.gevent_greenlets)
                        except LoopExit:
                            pass
                        result = self.dequeue_job_and_maintain_ttl(timeout)

                    if result is None:
                        break
                except StopRequested:
                    break

                job, queue = result
                self.execute_job(job, queue)

        finally:
            if not self.is_horse:
                self.register_death()
        return self.did_perform_work
Example #6
0
    def build_worker():
        """Build worker."""
        log_level = os.getenv('RQ_WORKER_LOG_LEVEL', 'WARNING')
        setup_loghandlers(log_level)
        log.info('worker log level set to {}'.format(log_level))

        rq_worker = Worker(queue_list, connection=WorkerQueues.connection)
        log.info('worker created')

        return rq_worker
Example #7
0
def setup_loghandlers_from_args(args):
    if args.verbose and args.quiet:
        raise RuntimeError("Flags --verbose and --quiet are mutually exclusive.")

    if args.verbose:
        level = 'DEBUG'
    elif args.quiet:
        level = 'WARNING'
    else:
        level = 'INFO'
    setup_loghandlers(level)
Example #8
0
def setup_loghandlers_from_args(args):
    if args.verbose and args.quiet:
        raise RuntimeError("Flags --verbose and --quiet are mutually exclusive.")

    if args.verbose:
        level = 'DEBUG'
    elif args.quiet:
        level = 'WARNING'
    else:
        level = 'INFO'
    setup_loghandlers(level)
Example #9
0
def setup_loghandlers_from_args(verbose, quiet, date_format, log_format):
    if verbose and quiet:
        raise RuntimeError("Flags --verbose and --quiet are mutually exclusive.")

    if verbose:
        level = 'DEBUG'
    elif quiet:
        level = 'WARNING'
    else:
        level = 'INFO'
    setup_loghandlers(level, date_format=date_format, log_format=log_format)
Example #10
0
File: helpers.py Project: nvie/rq
def setup_loghandlers_from_args(verbose, quiet, date_format, log_format):
    if verbose and quiet:
        raise RuntimeError("Flags --verbose and --quiet are mutually exclusive.")

    if verbose:
        level = 'DEBUG'
    elif quiet:
        level = 'WARNING'
    else:
        level = 'INFO'
    setup_loghandlers(level, date_format=date_format, log_format=log_format)
Example #11
0
def start_worker(queue=None):
    '''Wrapper to start rq worker. Connects to redis and monitors these queues.'''
    with frappe.init_site():
        # empty init is required to get redis_queue from common_site_config.json
        redis_connection = get_redis_conn()

    if os.environ.get('CI'):
        setup_loghandlers('ERROR')

    with Connection(redis_connection):
        queues = get_queue_list(queue)
        Worker(queues, name=get_worker_name(queue)).work()
Example #12
0
def start_worker(queue=None):
	'''Wrapper to start rq worker. Connects to redis and monitors these queues.'''
	with frappe.init_site():
		# empty init is required to get redis_queue from common_site_config.json
		redis_connection = get_redis_conn()

	if os.environ.get('CI'):
		setup_loghandlers('ERROR')

	with Connection(redis_connection):
		queues = get_queue_list(queue)
		Worker(queues, name=get_worker_name(queue)).work()
Example #13
0
    def handle(self, *args, **options):
        pid = options.get('pid')
        if pid:
            with open(os.path.expanduser(pid), "w") as fp:
                fp.write(str(os.getpid()))
        sentry_dsn = options.get('sentry-dsn')
        if sentry_dsn is None:
            sentry_dsn = getattr(settings, 'SENTRY_DSN', None)

        # Verbosity is defined by default in BaseCommand for all commands
        verbosity = options.get('verbosity')
        if verbosity >= 2:
            level = 'DEBUG'
        elif verbosity == 0:
            level = 'WARNING'
        else:
            level = 'INFO'
        setup_loghandlers(level)

        sentry_dsn = options.get('sentry-dsn') or getattr(
            settings, 'SENTRY_DSN', None)
        try:
            # Instantiate a worker
            worker_kwargs = {
                'worker_class': options['worker_class'],
                'queue_class': options['queue_class'],
                'job_class': options['job_class'],
                'name': options['name'],
                'default_worker_ttl': options['worker_ttl'],
            }
            w = get_worker(*args, **worker_kwargs)

            # Call use_connection to push the redis connection into LocalStack
            # without this, jobs using RQ's get_current_job() will fail
            use_connection(w.connection)
            # Close any opened DB connection before any fork
            reset_db_connections()

            if sentry_dsn:
                try:
                    from rq.contrib.sentry import register_sentry
                    register_sentry(sentry_dsn)
                except ImportError:
                    self.stdout.write(
                        self.style.ERROR(
                            "Please install sentry-sdk using `pip install sentry-sdk`"
                        ))
                    sys.exit(1)

            w.work(burst=options.get('burst', False))
        except ConnectionError as e:
            print(e)
            sys.exit(1)
Example #14
0
 def dequeue_and_execute(self, logging_level, log, site):
     setup_loghandlers(logging_level)
     _, serialized_task = self.conn.brpop(self.name, timeout=0)
     if serialized_task:
         task = pickle.loads(serialized_task)
         try:
             task.process_task(log, site)
             return task
         except Exception as e:
             frappe.db.rollback()
             frappe.flags.enqueue_after_commit = []
             frappe.logger().error(str(e))
             frappe.logger().error(frappe.get_traceback())
Example #15
0
def main():
    parser = argparse.ArgumentParser(description="Runs RQ scheduler")
    parser.add_argument("-H", "--host", default=os.environ.get("RQ_REDIS_HOST", "localhost"), help="Redis host")
    parser.add_argument(
        "-p", "--port", default=int(os.environ.get("RQ_REDIS_PORT", 6379)), type=int, help="Redis port number"
    )
    parser.add_argument("-d", "--db", default=int(os.environ.get("RQ_REDIS_DB", 0)), type=int, help="Redis database")
    parser.add_argument("-P", "--password", default=os.environ.get("RQ_REDIS_PASSWORD"), help="Redis password")
    parser.add_argument("--verbose", "-v", action="store_true", default=False, help="Show more output")
    parser.add_argument(
        "--url",
        "-u",
        default=os.environ.get("RQ_REDIS_URL"),
        help="URL describing Redis connection details. \
            Overrides other connection arguments if supplied.",
    )
    parser.add_argument(
        "-i",
        "--interval",
        default=60,
        type=int,
        help="How often the scheduler checks for new jobs to add to the \
            queue (in seconds).",
    )
    parser.add_argument("--path", default=".", help="Specify the import path.")
    parser.add_argument("--pid", help="A filename to use for the PID file.", metavar="FILE")

    args = parser.parse_args()

    if args.path:
        sys.path = args.path.split(":") + sys.path

    if args.pid:
        pid = str(os.getpid())
        filename = args.pid
        with open(filename, "w") as f:
            f.write(pid)

    if args.url is not None:
        connection = Redis.from_url(args.url)
    else:
        connection = Redis(args.host, args.port, args.db, args.password)

    if args.verbose:
        level = "DEBUG"
    else:
        level = "INFO"
    setup_loghandlers(level)

    scheduler = Scheduler(connection=connection, interval=args.interval)
    scheduler.run()
Example #16
0
    def work(self, burst=False):
        """Starts the work loop.

        Pops and performs all jobs on the current list of queues.  When all
        queues are empty, block and wait for new jobs to arrive on any of the
        queues, unless `burst` mode is enabled.

        The return value indicates whether any jobs were processed.
        """
        setup_loghandlers()
        self._install_signal_handlers()

        did_perform_work = False
        self.register_birth()
        self.log.info('RQ worker started, version %s' % VERSION)
        self.set_state(WorkerStatus.STARTED)

        try:
            while True:
                if self._stop_requested:
                    self.log.info('Stopping on request.')
                    break

                timeout = None if burst else max(1, self.default_worker_ttl -
                                                 60)
                try:
                    result = self.dequeue_job_and_maintain_ttl(timeout)

                    if result is None and burst:
                        try:
                            # Make sure dependented jobs are enqueued.
                            get_hub().switch()
                        except LoopExit:
                            pass
                        result = self.dequeue_job_and_maintain_ttl(timeout)

                    if result is None:
                        break
                except StopRequested:
                    break

                job, queue = result
                self.execute_job(job, queue)

        finally:
            if not self.is_horse:
                self.register_death()

        return did_perform_work
Example #17
0
def start_worker(queue=None, quiet = False):
	'''Wrapper to start rq worker. Connects to redis and monitors these queues.'''
	with dataent.init_site():
		# empty init is required to get redis_queue from common_site_config.json
		redis_connection = get_redis_conn()

	if os.environ.get('CI'):
		setup_loghandlers('ERROR')

	with Connection(redis_connection):
		queues = get_queue_list(queue)
		logging_level = "INFO"
		if quiet:
			logging_level = "WARNING"
		Worker(queues, name=get_worker_name(queue)).work(logging_level = logging_level)
Example #18
0
    def handle(self, *args, **options):
        pid = options.get('pid')
        if pid:
            with open(os.path.expanduser(pid), "w") as fp:
                fp.write(str(os.getpid()))
        sentry_dsn = options.get('sentry-dsn')
        if sentry_dsn is None:
            sentry_dsn = getattr(settings, 'SENTRY_DSN', None)

        # Verbosity is defined by default in BaseCommand for all commands
        verbosity = options.get('verbosity')
        if verbosity >= 2:
            level = 'DEBUG'
        elif verbosity == 0:
            level = 'WARNING'
        else:
            level = 'INFO'
        setup_loghandlers(level)

        try:
            # Instantiate a worker
            worker_kwargs = {
                'worker_class': options['worker_class'],
                'queue_class': options['queue_class'],
                'job_class': options['job_class'],
                'name': options['name'],
                'default_worker_ttl': options['worker_ttl'],
            }
            w = get_worker(*args, **worker_kwargs)

            # Call use_connection to push the redis connection into LocalStack
            # without this, jobs using RQ's get_current_job() will fail
            use_connection(w.connection)
            # Close any opened DB connection before any fork
            reset_db_connections()

            if sentry_dsn:
                try:
                    from rq.contrib.sentry import register_sentry
                    register_sentry(sentry_dsn)
                except ImportError:
                    self.stdout.write(self.style.ERROR("Please install sentry-sdk using `pip install sentry-sdk`"))
                    sys.exit(1)

            w.work(burst=options.get('burst', False))
        except ConnectionError as e:
            print(e)
            sys.exit(1)
Example #19
0
    def handle(self, *args, **options):
        pid = options.get('pid')
        if pid:
            with open(os.path.expanduser(pid), "w") as fp:
                fp.write(str(os.getpid()))

        # Verbosity is defined by default in BaseCommand for all commands
        verbosity = options.get('verbosity')
        if verbosity >= 2:
            level = 'DEBUG'
        elif verbosity == 0:
            level = 'WARNING'
        else:
            level = 'INFO'
        setup_loghandlers(level)

        sentry_dsn = options.pop('sentry_dsn')
        if sentry_dsn:
            try:
                configure_sentry(sentry_dsn, **options)
            except ImportError:
                self.stderr.write(
                    "Please install sentry-sdk using `pip install sentry-sdk`")
                sys.exit(1)

        try:
            # Instantiate a worker
            worker_kwargs = {
                'worker_class': options['worker_class'],
                'queue_class': options['queue_class'],
                'job_class': options['job_class'],
                'name': options['name'],
                'default_worker_ttl': options['worker_ttl'],
            }
            w = get_worker(*args, **worker_kwargs)

            # Call use_connection to push the redis connection into LocalStack
            # without this, jobs using RQ's get_current_job() will fail
            use_connection(w.connection)
            # Close any opened DB connection before any fork
            reset_db_connections()

            w.work(burst=options.get('burst', False),
                   with_scheduler=options.get('with_scheduler', False),
                   logging_level=level)
        except ConnectionError as e:
            self.stderr.write(str(e))
            sys.exit(1)
Example #20
0
def main():
    args = parse_args()

    if args.path:
        sys.path = args.path.split(':') + sys.path

    settings = {}
    if args.config:
        settings = read_config_file(args.config)

    setup_default_arguments(args, settings)

    # Other default arguments
    if args.sentry_dsn is None:
        args.sentry_dsn = settings.get('SENTRY_DSN',
                                       os.environ.get('SENTRY_DSN', None))

    if args.verbose and args.quiet:
        raise RuntimeError(
            "Flags --verbose and --quiet are mutually exclusive.")

    if args.verbose:
        level = 'DEBUG'
    elif args.quiet:
        level = 'WARNING'
    else:
        level = 'INFO'
    setup_loghandlers(level)
    setup_redis(args)

    cleanup_ghosts()

    try:
        queues = map(Queue, args.queues)
        w = Worker(queues, name=args.name)

        # Should we configure Sentry?
        if args.sentry_dsn:
            from raven import Client
            from rq.contrib.sentry import register_sentry
            client = Client(args.sentry_dsn)
            register_sentry(client, w)

        w.work(burst=args.burst)
    except ConnectionError as e:
        print(e)
        sys.exit(1)
    def work(self, burst=False):
        """Starts the work loop.

        Pops and performs all jobs on the current list of queues.  When all
        queues are empty, block and wait for new jobs to arrive on any of the
        queues, unless `burst` mode is enabled.

        The return value indicates whether any jobs were processed.
        """
        setup_loghandlers()
        self._install_signal_handlers()

        self.did_perform_work = False
        self.register_birth()
        self.log.info('RQ worker started, version %s' % VERSION)
        self.set_state('starting')
        try:
            while True:
                if self.stopped:
                    self.log.info('Stopping on request.')
                    break

                timeout = None if burst else max(1, self.default_worker_ttl - 60)
                try:
                    result = self.dequeue_job_and_maintain_ttl(timeout)

                    if result is None and burst:
                        try:
                            # Make sure dependented jobs are enqueued.
                            gevent.wait(self.children)
                        except LoopExit:
                            pass
                        result = self.dequeue_job_and_maintain_ttl(timeout)

                    if result is None:
                        break
                except StopRequested:
                    break

                job, queue = result
                self.execute_job(job, queue)

        finally:
            if not self.is_horse:
                self.register_death()
        return self.did_perform_work
Example #22
0
def main():
    args = parse_args()

    if args.path:
        sys.path = args.path.split(':') + sys.path

    settings = {}
    if args.config:
        settings = read_config_file(args.config)

    setup_default_arguments(args, settings)

    # Other default arguments
    if args.sentry_dsn is None:
        args.sentry_dsn = settings.get('SENTRY_DSN',
                                       os.environ.get('SENTRY_DSN', None))

    if args.verbose and args.quiet:
        raise RuntimeError("Flags --verbose and --quiet are mutually exclusive.")

    if args.verbose:
        level = 'DEBUG'
    elif args.quiet:
        level = 'WARNING'
    else:
        level = 'INFO'
    setup_loghandlers(level)
    setup_redis(args)

    cleanup_ghosts()

    try:
        queues = map(Queue, args.queues)
        w = Worker(queues, name=args.name)

        # Should we configure Sentry?
        if args.sentry_dsn:
            from raven import Client
            from rq.contrib.sentry import register_sentry
            client = Client(args.sentry_dsn)
            register_sentry(client, w)

        w.work(burst=args.burst)
    except ConnectionError as e:
        print(e)
        sys.exit(1)
Example #23
0
def main():
    parser = argparse.ArgumentParser(description='Runs RQ scheduler')
    parser.add_argument('-H', '--host', default=os.environ.get('RQ_REDIS_HOST', 'localhost'), help="Redis host")
    parser.add_argument('-p', '--port', default=int(os.environ.get('RQ_REDIS_PORT', 6379)), type=int, help="Redis port number")
    parser.add_argument('-d', '--db', default=int(os.environ.get('RQ_REDIS_DB', 0)), type=int, help="Redis database")
    parser.add_argument('-P', '--password', default=os.environ.get('RQ_REDIS_PASSWORD'), help="Redis password")
    parser.add_argument('--verbose', '-v', action='store_true', default=False, help='Show more output')
    parser.add_argument('--url', '-u', default=os.environ.get('RQ_REDIS_URL')
        , help='URL describing Redis connection details. \
            Overrides other connection arguments if supplied.')
    parser.add_argument('-i', '--interval', default=60.0, type=float
        , help="How often the scheduler checks for new jobs to add to the \
            queue (in seconds, can be floating-point for more precision).")
    parser.add_argument('--path', default='.', help='Specify the import path.')
    parser.add_argument('--pid', help='A filename to use for the PID file.', metavar='FILE')
    
    args = parser.parse_args()
    
    if args.path:
        sys.path = args.path.split(':') + sys.path
    
    if args.pid:
        pid = str(os.getpid())
        filename = args.pid
        with open(filename, 'w') as f:
            f.write(pid)
    
    if args.url is not None:
        connection = Redis.from_url(args.url)
    else:
        connection = Redis(args.host, args.port, args.db, args.password)

    if args.verbose:
        level = 'DEBUG'
    else:
        level = 'INFO'
    setup_loghandlers(level)

    scheduler = Scheduler(connection=connection, interval=args.interval)
    scheduler.run()
Example #24
0
def main():
    parser = argparse.ArgumentParser(description='Runs RQ scheduler')
    parser.add_argument('-H', '--host', default=os.environ.get('RQ_REDIS_HOST', 'localhost'), help="Redis host")
    parser.add_argument('-p', '--port', default=int(os.environ.get('RQ_REDIS_PORT', 6379)), type=int, help="Redis port number")
    parser.add_argument('-d', '--db', default=int(os.environ.get('RQ_REDIS_DB', 0)), type=int, help="Redis database")
    parser.add_argument('-P', '--password', default=os.environ.get('RQ_REDIS_PASSWORD'), help="Redis password")
    parser.add_argument('--verbose', '-v', action='store_true', default=False, help='Show more output')
    parser.add_argument('--url', '-u', default=os.environ.get('RQ_REDIS_URL')
        , help='URL describing Redis connection details. \
            Overrides other connection arguments if supplied.')
    parser.add_argument('-i', '--interval', default=60, type=int
        , help="How often the scheduler checks for new jobs to add to the \
            queue (in seconds).")
    parser.add_argument('--path', default='.', help='Specify the import path.')
    parser.add_argument('--pid', help='A filename to use for the PID file.', metavar='FILE')
    
    args = parser.parse_args()
    
    if args.path:
        sys.path = args.path.split(':') + sys.path
    
    if args.pid:
        pid = str(os.getpid())
        filename = args.pid
        with open(filename, 'w') as f:
            f.write(pid)
    
    if args.url is not None:
        connection = Redis.from_url(args.url)
    else:
        connection = Redis(args.host, args.port, args.db, args.password)

    if args.verbose:
        level = 'DEBUG'
    else:
        level = 'INFO'
    setup_loghandlers(level)

    scheduler = Scheduler(connection=connection, interval=args.interval)
    scheduler.run()
Example #25
0
def run_scheduler():
    setup_loghandlers('DEBUG')
    conn_kwargs = {
        'db': app_settings.config.get('REDIS_DB') or 0,
        'password': app_settings.config.get('REDIS_PWD')
    }
    if all(
            app_settings.config.get(attr)
            for attr in ['REDIS_MASTER_DNS', 'REDIS_PORT']):
        master = StrictRedis(host=app_settings.config['REDIS_MASTER_DNS'],
                             port=app_settings.config['REDIS_PORT'],
                             **conn_kwargs)
    else:
        sentinel = Sentinel(app_settings.config['REDIS_SENTINEL'])
        master = sentinel.master_for(app_settings.config['REDIS_MASTER'],
                                     **conn_kwargs)
    scheduler = Scheduler(connection=master)
    while True:
        try:
            scheduler.run()
        except ValueError:
            sleep(600)
Example #26
0
def main():
    args = parse_args()

    if args.path:
        sys.path = args.path.split(':') + sys.path

    settings = {}
    if args.config:
        settings = read_config_file(args.config)

    setup_default_arguments(args, settings)

    # Other default arguments
    if args.sentry_dsn is None:
        args.sentry_dsn = settings.get('SENTRY_DSN', None)

    setup_loghandlers(args.verbose)
    setup_redis(args)

    cleanup_ghosts()

    try:
        queues = map(Queue, args.queues)
        w = Worker(queues, name=args.name)

        # Should we configure Sentry?
        if args.sentry_dsn:
            from raven import Client
            from rq.contrib.sentry import register_sentry
            client = Client(args.sentry_dsn)
            register_sentry(client, w)

        w.work(burst=args.burst)
    except ConnectionError as e:
        print(e)
        sys.exit(1)
Example #27
0
def main():

    # set up minimal argparser to get -c option
    parser = argparse.ArgumentParser(
        add_help=False  # help will be picked up later when we redfine parser
    )
    parser.add_argument('-c', "--config", help='Use an rq config file')
    args, remaining_argv = parser.parse_known_args()

    # config, pass 1: read environment vars
    config = {
        KEY_HOST : os.environ.get('RQ_REDIS_HOST', 'localhost'),
        KEY_PORT : int(os.environ.get('RQ_REDIS_PORT', 6379)),
        KEY_DB : int(os.environ.get('RQ_REDIS_DB', 0)),
        KEY_PASSWORD : os.environ.get('RQ_REDIS_PASSWORD'),
        KEY_URL : os.environ.get('RQ_REDIS_URL')
    }

    # config, pass 2: read config file
    if args.config:
        # bit of a hack, this, but does allow helpers.read_config_file to work...
        sys.path.insert( 0, os.path.dirname(os.path.realpath(args.config)) )
        rq_config = helpers.read_config_file( args.config )
        # map rq settings to our own config dict
        config[KEY_URL] = rq_config.get("REDIS_URL", config[KEY_URL])
        config[KEY_HOST] = rq_config.get("REDIS_HOST", config[KEY_HOST])
        config[KEY_PORT] = rq_config.get("REDIS_PORT", config[KEY_PORT])
        config[KEY_DB] = rq_config.get("REDIS_DB", config[KEY_DB])
        config[KEY_PASSWORD] = rq_config.get("REDIS_PASSWORD",config[KEY_PASSWORD])

    # config, pass 3: read commandline args. overwrites any other config.
    parser = argparse.ArgumentParser(
        parents=[parser]  # inherit from existing parser
    )
    parser.add_argument('-H', '--host', default=config[KEY_HOST], help="Redis host")
    parser.add_argument('-p', '--port', default=config[KEY_PORT], type=int, help="Redis port number")
    parser.add_argument('-d', '--db', default=config[KEY_DB], type=int, help="Redis database")
    parser.add_argument('-P', '--password', default=config[KEY_PASSWORD], help="Redis password")
    parser.add_argument('--verbose', '-v', action='store_true', default=False, help='Show more output')
    parser.add_argument('--url', '-u', default=config[KEY_URL]
        , help='URL describing Redis connection details. \
            Overrides other connection arguments if supplied.')
    parser.add_argument('-i', '--interval', default=60.0, type=float
        , help="How often the scheduler checks for new jobs to add to the \
            queue (in seconds, can be floating-point for more precision).")
    parser.add_argument('--path', default='.', help='Specify the import path.')
    parser.add_argument('--pid', help='A filename to use for the PID file.', metavar='FILE')
    
    args = parser.parse_args( remaining_argv )
    
    if args.path:
        sys.path = args.path.split(':') + sys.path
    
    if args.pid:
        pid = str(os.getpid())
        filename = args.pid
        with open(filename, 'w') as f:
            f.write(pid)
    
    if args.url is not None:
        connection = Redis.from_url(args.url)
    else:
        connection = Redis(args.host, args.port, args.db, args.password)

    if args.verbose:
        level = 'DEBUG'
    else:
        level = 'INFO'
    setup_loghandlers(level)

    scheduler = Scheduler(connection=connection, interval=args.interval)
    scheduler.run()
Example #28
0
    def handle(self, *args, **options):
        pid = options.get('pid')
        if pid:
            with open(os.path.expanduser(pid), "w") as fp:
                fp.write(str(os.getpid()))
        sentry_dsn = options.get('sentry-dsn')
        if sentry_dsn is None:
            sentry_dsn = getattr(settings, 'SENTRY_DSN', None)

        # Verbosity is defined by default in BaseCommand for all commands
        verbosity = options.get('verbosity')
        if verbosity >= 2:
            level = 'DEBUG'
        elif verbosity == 0:
            level = 'WARNING'
        else:
            level = 'INFO'
        setup_loghandlers(level)

        sentry_dsn = options.get('sentry-dsn') or getattr(
            settings, 'SENTRY_DSN', None)
        try:
            # Instantiate a worker
            worker_kwargs = {
                'worker_class': options['worker_class'],
                'queue_class': options['queue_class'],
                'job_class': options['job_class'],
                'name': options['name'],
                'default_worker_ttl': options['worker_ttl'],
            }
            w = get_worker(*args, **worker_kwargs)

            # Call use_connection to push the redis connection into LocalStack
            # without this, jobs using RQ's get_current_job() will fail
            use_connection(w.connection)
            # Close any opened DB connection before any fork
            reset_db_connections()

            if sentry_dsn:
                try:
                    from raven import Client
                    from raven.transport.http import HTTPTransport
                    from rq.contrib.sentry import register_sentry

                    from raven.exceptions import InvalidDsn
                    try:
                        client = Client(sentry_dsn, transport=HTTPTransport)
                        register_sentry(client, w)
                    except InvalidDsn:
                        self.stdout.write(
                            self.style.ERROR(
                                "Invalid DSN. If you use `sentry-sdk` package you have to disable the django-rq sentry plugin by setting `--sentry-dsn=\"\"`."
                            ))
                        sys.exit(1)
                except ImportError:
                    self.stdout.write(
                        self.style.ERROR(
                            "Please install sentry. For example `pip install raven`"
                        ))
                    sys.exit(1)

            w.work(burst=options.get('burst', False))
        except ConnectionError as e:
            print(e)
            sys.exit(1)
Example #29
0
import sys
import os
from rq import Queue, Worker, Connection
from rq.contrib.sentry import register_sentry
from rq.logutils import setup_loghandlers

current_dir = os.path.dirname(os.path.abspath(__file__))
source_dir = os.path.dirname(current_dir)

sys.path.insert(0, source_dir)

if __name__ == '__main__':
    # Tell rq what Redis connection to use
    from pyhackers.app import start_app
    start_app(soft=True)
    from pyhackers.sentry import sentry_client

    setup_loghandlers("DEBUG")

    with Connection():
        q = Queue()
        w = Worker(q)

        register_sentry(sentry_client, w)
        w.work()