def testBadCheckMessages(self): """Test check_messages method with non-json data.""" result = mock.MagicMock() result.payload = 'non-json-data' self.kombu.queue.get.side_effect = [result, queue.Empty('Empty Queue')] self.assertListEqual(self.kombu.check_messages(), [])
def setUp(self): request = getTurbiniaRequest() self.kombu = celery.TurbiniaKombu('fake_topic') result = mock.MagicMock() result.payload = request.to_json() self.kombu.queue = mock.MagicMock() self.kombu.queue.__len__.return_value = 1 self.kombu.queue.get.side_effect = [result, queue.Empty('Empty Queue')]
def test_queue_callback_worker_get_many(): queue_ = queue.Queue() callback = mock.Mock(spec=()) qct = helper_threads.QueueCallbackWorker(queue_, callback) # Set up an appropriate mock for the queue, and call the queue callback # thread. with mock.patch.object(queue.Queue, 'get') as get: get.side_effect = (mock.sentinel.A, queue.Empty(), mock.sentinel.B, helper_threads.STOP, queue.Empty()) qct() # Assert that we got the expected calls. assert get.call_count == 5 callback.assert_has_calls( [mock.call([(mock.sentinel.A)]), mock.call([(mock.sentinel.B)])])
def test_exit_with_stop(self): q = mock.create_autospec(queue.Queue, instance=True) q.get.side_effect = [_helper_threads.STOP, queue.Empty()] rpc = mock.create_autospec(grpc.RpcContext, instance=True) rpc.is_active.return_value = True generator = _consumer._RequestQueueGenerator(q) generator.rpc = rpc items = list(generator) assert items == []
def test_run_empty(self, m_count): events = [mock.sentinel.event1, mock.sentinel.event2] group = mock.sentinel.group m_queue = mock.Mock() m_queue.get.side_effect = events + [six_queue.Empty()] m_handler = mock.Mock() m_count.return_value = list(range(5)) async_handler = h_async.Async(m_handler, mock.Mock(), mock.Mock()) async_handler._run(group, m_queue) m_handler.assert_has_calls([mock.call(event) for event in events])
def test_exit_when_inactive_empty(self): q = mock.create_autospec(queue.Queue, instance=True) q.get.side_effect = queue.Empty() rpc = mock.create_autospec(grpc.RpcContext, instance=True) rpc.is_active.return_value = False generator = _consumer._RequestQueueGenerator(q) generator.rpc = rpc items = list(generator) assert items == []
def test_exit_when_inactive_empty(self): q = mock.create_autospec(queue.Queue, instance=True) q.get.side_effect = queue.Empty() call = mock.create_autospec(grpc.Call, instance=True) call.is_active.return_value = False generator = bidi._RequestQueueGenerator(q) generator.call = call items = list(generator) assert items == []
def test_yield_initial_and_exit(self): q = mock.create_autospec(queue.Queue, instance=True) q.get.side_effect = queue.Empty() rpc = mock.create_autospec(grpc.RpcContext, instance=True) rpc.is_active.return_value = False generator = _consumer._RequestQueueGenerator( q, initial_request=mock.sentinel.A) generator.rpc = rpc items = list(generator) assert items == [mock.sentinel.A]
def test_yield_initial_callable_and_exit(self): q = mock.create_autospec(queue.Queue, instance=True) q.get.side_effect = queue.Empty() call = mock.create_autospec(grpc.Call, instance=True) call.is_active.return_value = False generator = bidi._RequestQueueGenerator( q, initial_request=lambda: mock.sentinel.A) generator.call = call items = list(generator) assert items == [mock.sentinel.A]
def test_exit_when_inactive_with_item(self): q = mock.create_autospec(queue.Queue, instance=True) q.get.side_effect = [mock.sentinel.A, queue.Empty()] rpc = mock.create_autospec(grpc.RpcContext, instance=True) rpc.is_active.return_value = False generator = _consumer._RequestQueueGenerator(q) generator.rpc = rpc items = list(generator) assert items == [] # Make sure it put the item back. q.put.assert_called_once_with(mock.sentinel.A)
def test_run_stale(self, m_count): events = [mock.sentinel.event1, mock.sentinel.event2] group = mock.sentinel.group m_queue = mock.Mock() m_queue.empty.side_effect = [False, True, True] m_queue.get.side_effect = events + [six_queue.Empty()] m_handler = mock.Mock() m_count.return_value = list(range(5)) async_handler = h_async.Async(m_handler, mock.Mock(), mock.Mock()) with mock.patch('time.sleep'): async_handler._run(group, m_queue) m_handler.assert_called_once_with(mock.sentinel.event2)
def test_queue_callback_worker(): queue_ = queue.Queue() callback = mock.Mock(spec=()) qct = helper_threads.QueueCallbackWorker(queue_, callback) # Set up an appropriate mock for the queue, and call the queue callback # thread. with mock.patch.object(queue.Queue, "get") as get: get.side_effect = (mock.sentinel.A, helper_threads.STOP, queue.Empty()) qct() # Assert that we got the expected calls. assert get.call_count == 3 callback.assert_called_once_with([mock.sentinel.A])
def test_run_empty(self, m_count): events = [mock.sentinel.event1, mock.sentinel.event2] group = mock.sentinel.group m_queue = mock.Mock() m_queue.empty.return_value = True m_queue.get.side_effect = events + [six_queue.Empty()] m_handler = mock.Mock() m_count.return_value = list(range(5)) async_handler = h_async.Async(m_handler, mock.Mock(), mock.Mock()) with mock.patch('time.sleep'): async_handler._run(group, m_queue) m_handler.assert_has_calls([mock.call(event) for event in events]) self.assertEqual(len(events), m_handler.call_count)
def _read(self, timeout_seconds, q): now = time.monotonic() deadline = now + timeout_seconds while q.empty() and now <= deadline: can_read, _, _ = select.select([self._socket], [], [], deadline-now) now = time.monotonic() if can_read and self._socket in can_read: try: new_bytes = seven.bitcast_to_string(self._socket.recv(4096)) if self._logger and new_bytes and len(new_bytes) > 0: self._logger.debug( "pump received bytes: {}".format(new_bytes)) except: # Likely a closed socket. Done with the pump thread. if self._logger: self._logger.debug( "socket read failed, stopping pump read thread\n" + traceback.format_exc(3)) break self._process_new_bytes(new_bytes) if q.empty(): raise queue.Empty() return q.get(True)
def test__thread_main_max_latency(self, time): # Note: this test is a bit brittle as it assumes the operation of # _get_many invokes queue.get() followed by queue._get(). It fails # the "change detector" test in that way. However, this is still a # useful test to verify the queue timeout is appropriately calculated. from six.moves import queue from google.cloud.logging_v2.handlers.transports import background_thread # Use monotonically increasing time. time.side_effect = range(1, 6) worker = self._make_one(_Logger(self.NAME), max_latency=2, max_batch_size=10) worker._queue = mock.create_autospec(queue.Queue, instance=True) worker._queue.get.side_effect = [ { "info": { "message": "1" } }, # Single record. queue.Empty(), # Emulate a queue.get() timeout. { "info": { "message": "1" } }, # Second record. background_thread._WORKER_TERMINATOR, # Stop the thread. queue.Empty(), # Emulate a queue.get() timeout. ] worker._thread_main() self.assertEqual(worker._cloud_logger._num_batches, 2) self.assertTrue(worker._cloud_logger._batch.commit_called) self.assertEqual(worker._cloud_logger._batch.commit_count, 1) # Time should have been called five times. # # For the first batch, it should have been called: # * Once to get the start time. (1) # * Once to get the elapsed time while grabbing the second item. # (2) # # For the second batch, it should have been called: # * Once to get start time. (3) # * Once to get the elapsed time while grabbing the second item. # (3) # * Once to get the elapsed time while grabbing the final # item. (4) # * Once final time to get the elapsed time while receiving # the empty queue. # self.assertEqual(time.call_count, 5) # Queue.get should've been called 5 times as well, but with different # timeouts due to the monotonically increasing time. # # For the first batch, it will be called once without a timeout # (for the first item) and then with timeout=1, as start will be # 1 and now will be 2. # # For the second batch, it will be called once without a timeout # (for the first item) and then with timeout=1, as start will be # 3 and now will be 4, and finally with timeout=0 as start will be 3 # and now will be 5. # worker._queue.get.assert_has_calls([ mock.call(), mock.call(timeout=1), mock.call(), mock.call(timeout=1), mock.call(timeout=0), ])
def blocked_queue_get_nowait(counter_name): global blocked_queues if counter_name not in blocked_queues: raise _queue.Empty() return blocked_queues[counter_name].get_nowait()
def peek_nowait(**kwargs): if not reference_queue.queue: raise queue.Empty() return reference_queue.queue[0]
def queue_generator(rpc): yield mock.sentinel.A yield queue.Empty() yield mock.sentinel.B rpc.is_active.return_value = False yield mock.sentinel.C