def test_maximum_queue(): Dispatcher.set_task_queueing(True) for i in range(Dispatcher.MAX_QUEUE_SIZE + 10): Dispatcher.launch(lambda: 0) assert len(Dispatcher._preinit_task_queue) == Dispatcher.MAX_QUEUE_SIZE
def test_queued_tasks_are_executed_in_the_order_they_are_received(): main_thread_id = threading.get_ident() Dispatcher._testing_mode = False Dispatcher._queue_initial_tasks = True class Job: thread_counter = [0] thread_list = [] def __init__(self, num): self.num = num def __lt__(self, other): return id(self) < id(other) def __call__(self): assert main_thread_id != threading.get_ident() self.thread_counter[0] += 1 self.thread_list.append(self.num) for i in range(50): Dispatcher.launch(Job(i)) Dispatcher.flush_queued_initial_tasks() for i in range(50, 100): Dispatcher.launch(Job(i)) Dispatcher._task_worker._queue.join() assert Job.thread_list == list(range(100)) assert Job.thread_counter[0] == 100
def test_queue_tasks_are_flushed_off_the_main_thread(): main_thread_id = threading.get_ident() Dispatcher._testing_mode = False Dispatcher._queue_initial_tasks = False thread_canary = [0] def test_task(): assert main_thread_id != threading.get_ident() thread_canary[0] += 1 Dispatcher._testing_mode = False Dispatcher._queue_initial_tasks = True for i in range(3): Dispatcher.launch(test_task) assert 3 == len(Dispatcher._preinit_task_queue) assert 0 == thread_canary[0] Dispatcher.flush_queued_initial_tasks() Dispatcher._task_worker._queue.join() assert 3 == thread_canary[0] assert 0 == len(Dispatcher._preinit_task_queue)
def task_runner(): for i in range(3): Dispatcher.launch(test_task) assert 3 == len(Dispatcher._preinit_task_queue) assert 0 == thread_canary[0] Dispatcher.flush_queued_initial_tasks()
def task_runner(): for i in range(50): Dispatcher.launch(Job(i)) Dispatcher.flush_queued_initial_tasks() for i in range(50, 100): Dispatcher.launch(Job(i))
def test_tasks_run_off_the_main_thread(): main_thread_id = threading.get_ident() thread_canary = [False] def test_task(): assert main_thread_id != threading.get_ident() assert False is thread_canary[0] thread_canary[0] = True Dispatcher.launch(test_task) Dispatcher._task_worker._queue.join() assert True is thread_canary[0]
def test_overflowing_the_task_queue_records_telemetry(): Dispatcher.set_task_queueing(True) for i in range(110): Dispatcher.launch(lambda: None) assert 100 == len(Dispatcher._preinit_task_queue) assert 10 == Dispatcher._overflow_count Dispatcher.flush_queued_initial_tasks() assert 110 == _builtins.metrics.glean.error.preinit_tasks_overflow.test_get_value() json_content = Glean.test_collect(_builtins.pings.metrics) json_tree = json.loads(json_content) assert 110 == json_tree["metrics"]["counter"]["glean.error.preinit_tasks_overflow"]
def test_dispatched_tasks_throwing_exceptions_are_correctly_handled(): Dispatcher._testing_mode = False Dispatcher._queue_initial_tasks = False thread_canary = [0] def exception_task(): 42 / 0 Dispatcher.launch(exception_task) def working_task(): thread_canary[0] += 1 for i in range(3): Dispatcher.launch(working_task) Dispatcher._task_worker._queue.join() assert 3 == thread_canary[0]
def test_that_thread_joins_before_directory_is_deleted_in_reset(): Dispatcher._testing_mode = False Dispatcher._queue_initial_tasks = False thread_canary = [0] boolean_metric = metrics.BooleanMetricType( disabled=False, category="telemetry", lifetime=Lifetime.APPLICATION, name="boolean_metric", send_in_pings=["store1"], ) def slow_task(): time.sleep(1) # This will cause a Rust panic if the data directory was deleted in # Glean._reset() before this has a chance to run. boolean_metric.set(True) thread_canary[0] = 1 Dispatcher.launch(slow_task) Glean._reset() assert thread_canary[0] == 1