Пример #1
0
def queue_json_publish(queue_name, event, processor, call_consume_in_tests=False):
    # type: (str, Union[Dict[str, Any], str], Callable[[Any], None], bool) -> None
    # most events are dicts, but zerver.middleware.write_log_line uses a str
    with queue_lock:
        if settings.USING_RABBITMQ:
            get_queue_client().json_publish(queue_name, event)
        elif call_consume_in_tests:
            # Must be imported here: A top section import leads to obscure not-defined-ish errors.
            from zerver.worker.queue_processors import get_worker
            get_worker(queue_name).consume_wrapper(event)  # type: ignore # https://github.com/python/mypy/issues/3360
        else:
            processor(event)
Пример #2
0
def queue_json_publish(queue_name: str,
                       event: Dict[str, Any],
                       processor: Callable[[Any], None] = None) -> None:
    # most events are dicts, but zerver.middleware.write_log_line uses a str
    with queue_lock:
        if settings.USING_RABBITMQ:
            get_queue_client().json_publish(queue_name, event)
        elif processor:
            processor(event)
        else:
            # Must be imported here: A top section import leads to obscure not-defined-ish errors.
            from zerver.worker.queue_processors import get_worker
            get_worker(queue_name).consume_wrapper(event)
Пример #3
0
def queue_json_publish(queue_name: str,
                       event: Union[Dict[str, Any], str],
                       processor: Callable[[Any], None]=None) -> None:
    # most events are dicts, but zerver.middleware.write_log_line uses a str
    with queue_lock:
        if settings.USING_RABBITMQ:
            get_queue_client().json_publish(queue_name, event)
        elif processor:
            processor(event)
        else:
            # Must be imported here: A top section import leads to obscure not-defined-ish errors.
            from zerver.worker.queue_processors import get_worker
            get_worker(queue_name).consume_wrapper(event)
Пример #4
0
def queue_json_publish(
    queue_name: str,
    event: Dict[str, Any],
    processor: Optional[Callable[[Any], None]] = None,
) -> None:
    if settings.USING_RABBITMQ:
        get_queue_client().json_publish(queue_name, event)
    elif processor:
        processor(event)
    else:
        # Must be imported here: A top section import leads to circular imports
        from zerver.worker.queue_processors import get_worker

        get_worker(queue_name).consume_single_event(event)
Пример #5
0
def queue_json_publish(
    queue_name: str,
    event: Dict[str, Any],
    processor: Optional[Callable[[Any], None]] = None,
) -> None:
    with queue_lock:
        if settings.USING_RABBITMQ:
            get_queue_client().json_publish(queue_name, event)
        elif processor:
            processor(event)
        else:
            # Must be imported here: A top section import leads to obscure not-defined-ish errors.
            from zerver.worker.queue_processors import get_worker
            get_worker(queue_name).consume_wrapper(event)
Пример #6
0
    def __init__(self, queue_name: str, logger: logging.Logger) -> None:
        threading.Thread.__init__(self)
        self.logger = logger
        self.queue_name = queue_name

        with log_and_exit_if_exception(logger, queue_name, threaded=True):
            self.worker = get_worker(queue_name)
Пример #7
0
    def handle(self, *args: Any, **options: Any) -> None:
        logging.basicConfig()
        logger = logging.getLogger("process_queue")

        def exit_with_three(signal: int, frame: Optional[FrameType]) -> None:
            """
            This process is watched by Django's autoreload, so exiting
            with status code 3 will cause this process to restart.
            """
            logger.warning("SIGUSR1 received. Restarting this queue processor.")
            sys.exit(3)

        if not settings.USING_RABBITMQ:
            # Make the warning silent when running the tests
            if settings.TEST_SUITE:
                logger.info("Not using RabbitMQ queue workers in the test suite.")
            else:
                logger.error("Cannot run a queue processor when USING_RABBITMQ is False!")
            raise CommandError

        def run_threaded_workers(queues: List[str], logger: logging.Logger) -> None:
            cnt = 0
            for queue_name in queues:
                if not settings.DEVELOPMENT:
                    logger.info("launching queue worker thread %s", queue_name)
                cnt += 1
                td = ThreadedWorker(queue_name, logger)
                td.start()
            assert len(queues) == cnt
            logger.info("%d queue worker threads were launched", cnt)

        if options["all"]:
            signal.signal(signal.SIGUSR1, exit_with_three)
            autoreload.run_with_reloader(run_threaded_workers, get_active_worker_queues(), logger)
        elif options["multi_threaded"]:
            signal.signal(signal.SIGUSR1, exit_with_three)
            queues = options["multi_threaded"]
            autoreload.run_with_reloader(run_threaded_workers, queues, logger)
        else:
            queue_name = options["queue_name"]
            worker_num = options["worker_num"]

            def signal_handler(signal: int, frame: Optional[FrameType]) -> None:
                logger.info("Worker %d disconnecting from queue %s", worker_num, queue_name)
                worker.stop()
                sys.exit(0)

            logger.info("Worker %d connecting to queue %s", worker_num, queue_name)
            with log_and_exit_if_exception(logger, queue_name, threaded=False):
                worker = get_worker(queue_name)
                with configure_scope() as scope:
                    scope.set_tag("queue_worker", queue_name)
                    scope.set_tag("worker_num", worker_num)

                    worker.setup()
                    signal.signal(signal.SIGTERM, signal_handler)
                    signal.signal(signal.SIGINT, signal_handler)
                    signal.signal(signal.SIGUSR1, signal_handler)
                    worker.ENABLE_TIMEOUTS = True
                    worker.start()
Пример #8
0
    def handle(self, *args, **options):
        logging.basicConfig()
        logger = logging.getLogger('process_queue')

        queue_name = options['queue_name']
        worker_num = options['worker_num']

        if not settings.USING_RABBITMQ:
            logger.error(
                "Cannot run a queue processor when USING_RABBITMQ is False!")
            sys.exit(1)

        logger.info("Worker %d connecting to queue %s" %
                    (worker_num, queue_name))
        worker = get_worker(queue_name)

        def signal_handler(signal, frame):
            logger.info("Worker %d disconnecting from queue %s" %
                        (worker_num, queue_name))
            worker.stop()
            sys.exit(0)

        signal.signal(signal.SIGTERM, signal_handler)
        signal.signal(signal.SIGINT, signal_handler)

        worker.start()
Пример #9
0
    def handle(self, *args, **options):
        logging.basicConfig()
        logger = logging.getLogger('process_queue')

        if not settings.USING_RABBITMQ:
            logger.error("Cannot run a queue processor when USING_RABBITMQ is False!")
            sys.exit(1)

        if options['all']:
            for queue_name in get_active_worker_queues():
                logger.info('launching queue worker thread ' + queue_name)
                td = Threaded_worker(queue_name)
                td.start()
        else:
            queue_name = options['queue_name']
            worker_num = options['worker_num']

            logger.info("Worker %d connecting to queue %s" % (worker_num, queue_name))
            worker = get_worker(queue_name)
            worker.setup()

            def signal_handler(signal, frame):
                logger.info("Worker %d disconnecting from queue %s" % (worker_num, queue_name))
                worker.stop()
                sys.exit(0)
            signal.signal(signal.SIGTERM, signal_handler)
            signal.signal(signal.SIGINT, signal_handler)

            worker.start()
Пример #10
0
    def handle(self, *args, **options):
        # type: (*Any, **Any) -> None
        logging.basicConfig()
        logger = logging.getLogger('process_queue')

        def exit_with_three(signal, frame):
            # type: (int, FrameType) -> None
            """
            This process is watched by Django's autoreload, so exiting
            with status code 3 will cause this process to restart.
            """
            logger.warn("SIGUSR1 received. Restarting this queue processor.")
            sys.exit(3)

        if not settings.USING_RABBITMQ:
            # Make the warning silent when running the tests
            if settings.TEST_SUITE:
                logger.info("Not using RabbitMQ queue workers in the test suite.")
            else:
                logger.error("Cannot run a queue processor when USING_RABBITMQ is False!")
            sys.exit(1)

        def run_threaded_workers(queues, logger):
            # type: (List[str], logging.Logger) -> None
            cnt = 0
            for queue_name in queues:
                if not settings.DEVELOPMENT:
                    logger.info('launching queue worker thread ' + queue_name)
                cnt += 1
                td = Threaded_worker(queue_name)
                td.start()
            assert len(queues) == cnt
            logger.info('%d queue worker threads were launched' % (cnt,))

        if options['all']:
            signal.signal(signal.SIGUSR1, exit_with_three)
            autoreload.main(run_threaded_workers, (get_active_worker_queues(), logger))
        elif options['multi_threaded']:
            signal.signal(signal.SIGUSR1, exit_with_three)
            queues = options['multi_threaded']
            autoreload.main(run_threaded_workers, (queues, logger))
        else:
            queue_name = options['queue_name']
            worker_num = options['worker_num']

            logger.info("Worker %d connecting to queue %s" % (worker_num, queue_name))
            worker = get_worker(queue_name)
            worker.setup()

            def signal_handler(signal, frame):
                # type: (int, FrameType) -> None
                logger.info("Worker %d disconnecting from queue %s" % (worker_num, queue_name))
                worker.stop()
                sys.exit(0)
            signal.signal(signal.SIGTERM, signal_handler)
            signal.signal(signal.SIGINT, signal_handler)
            signal.signal(signal.SIGUSR1, signal_handler)

            worker.start()
Пример #11
0
    def handle(self, *args: Any, **options: Any) -> None:
        logging.basicConfig()
        logger = logging.getLogger('process_queue')

        def exit_with_three(signal: int, frame: FrameType) -> None:
            """
            This process is watched by Django's autoreload, so exiting
            with status code 3 will cause this process to restart.
            """
            logger.warning("SIGUSR1 received. Restarting this queue processor.")
            sys.exit(3)

        if not settings.USING_RABBITMQ:
            # Make the warning silent when running the tests
            if settings.TEST_SUITE:
                logger.info("Not using RabbitMQ queue workers in the test suite.")
            else:
                logger.error("Cannot run a queue processor when USING_RABBITMQ is False!")
            raise CommandError

        def run_threaded_workers(queues: List[str], logger: logging.Logger) -> None:
            cnt = 0
            for queue_name in queues:
                if not settings.DEVELOPMENT:
                    logger.info('launching queue worker thread ' + queue_name)
                cnt += 1
                td = Threaded_worker(queue_name)
                td.start()
            assert len(queues) == cnt
            logger.info('%d queue worker threads were launched' % (cnt,))

        if options['all']:
            signal.signal(signal.SIGUSR1, exit_with_three)
            autoreload.main(run_threaded_workers, (get_active_worker_queues(), logger))
        elif options['multi_threaded']:
            signal.signal(signal.SIGUSR1, exit_with_three)
            queues = options['multi_threaded']
            autoreload.main(run_threaded_workers, (queues, logger))
        else:
            queue_name = options['queue_name']
            worker_num = options['worker_num']

            logger.info("Worker %d connecting to queue %s" % (worker_num, queue_name))
            worker = get_worker(queue_name)
            worker.setup()

            def signal_handler(signal: int, frame: FrameType) -> None:
                logger.info("Worker %d disconnecting from queue %s" % (worker_num, queue_name))
                worker.stop()
                sys.exit(0)
            signal.signal(signal.SIGTERM, signal_handler)
            signal.signal(signal.SIGINT, signal_handler)
            signal.signal(signal.SIGUSR1, signal_handler)

            worker.start()
Пример #12
0
    def handle(self, *args, **options):
        # type: (*Any, **Any) -> None
        logging.basicConfig()
        logger = logging.getLogger('process_queue')

        if not settings.USING_RABBITMQ:
            # Make the warning silent when running the tests
            if settings.TEST_SUITE:
                logger.info(
                    "Not using RabbitMQ queue workers in the test suite.")
            else:
                logger.error(
                    "Cannot run a queue processor when USING_RABBITMQ is False!"
                )
            sys.exit(1)

        def run_threaded_workers(logger):
            # type: (logging.Logger) -> None
            cnt = 0
            for queue_name in get_active_worker_queues():
                if not settings.DEVELOPMENT:
                    logger.info('launching queue worker thread ' + queue_name)
                cnt += 1
                td = Threaded_worker(queue_name)
                td.start()
            logger.info('%d queue worker threads were launched' % (cnt, ))

        if options['all']:
            autoreload.main(run_threaded_workers, (logger, ))
        else:
            queue_name = options['queue_name']
            worker_num = options['worker_num']

            logger.info("Worker %d connecting to queue %s" %
                        (worker_num, queue_name))
            worker = get_worker(queue_name)
            worker.setup()

            def signal_handler(signal, frame):
                # type: (int, FrameType) -> None
                logger.info("Worker %d disconnecting from queue %s" %
                            (worker_num, queue_name))
                worker.stop()
                sys.exit(0)

            signal.signal(signal.SIGTERM, signal_handler)
            signal.signal(signal.SIGINT, signal_handler)

            worker.start()
Пример #13
0
    def handle(self, *args, **options):
        # type: (*Any, **Any) -> None
        logging.basicConfig()
        logger = logging.getLogger('process_queue')

        if not settings.USING_RABBITMQ:
            # Make the warning silent when running the tests
            if settings.TEST_SUITE:
                logger.info("Not using RabbitMQ queue workers in the test suite.")
            else:
                logger.error("Cannot run a queue processor when USING_RABBITMQ is False!")
            sys.exit(1)

        def run_threaded_workers(logger):
            # type: (logging.Logger) -> None
            cnt = 0
            for queue_name in get_active_worker_queues():
                if not settings.DEVELOPMENT:
                    logger.info('launching queue worker thread ' + queue_name)
                cnt += 1
                td = Threaded_worker(queue_name)
                td.start()
            logger.info('%d queue worker threads were launched' % (cnt,))

        if options['all']:
            autoreload.main(run_threaded_workers, (logger,))
        else:
            queue_name = options['queue_name']
            worker_num = options['worker_num']

            logger.info("Worker %d connecting to queue %s" % (worker_num, queue_name))
            worker = get_worker(queue_name)
            worker.setup()

            def signal_handler(signal, frame):
                # type: (int, FrameType) -> None
                logger.info("Worker %d disconnecting from queue %s" % (worker_num, queue_name))
                worker.stop()
                sys.exit(0)
            signal.signal(signal.SIGTERM, signal_handler)
            signal.signal(signal.SIGINT, signal_handler)

            worker.start()
Пример #14
0
    def handle(self, *args, **options):
        logging.basicConfig()
        logger = logging.getLogger('process_queue')

        queue_name = options['queue_name']
        worker_num = options['worker_num']

        def signal_handler(signal, frame):
            logger.info("Worker %d disconnecting from queue %s" % (worker_num, queue_name))
            worker.stop()
            sys.exit(0)

        if not settings.USING_RABBITMQ:
            logger.error("Cannot run a queue processor when USING_RABBITMQ is False!")
            sys.exit(1)

        signal.signal(signal.SIGTERM, signal_handler)
        signal.signal(signal.SIGINT, signal_handler)

        logger.info("Worker %d connecting to queue %s" % (worker_num, queue_name))
        worker = get_worker(queue_name)
        worker.start()
Пример #15
0
 def __init__(self, queue_name):
     # type: (str) -> None
     threading.Thread.__init__(self)
     self.worker = get_worker(queue_name)
Пример #16
0
 def __init__(self, queue_name):
     # type: (str) -> None
     threading.Thread.__init__(self)
     self.worker = get_worker(queue_name)
Пример #17
0
 def __init__(self, queue_name):
     threading.Thread.__init__(self)
     self.worker = get_worker(queue_name)