def test_get_entries_stores_entries(self): fake_db = self.FakeDB() client = self.default_client(entries_db=fake_db, reactor_=reactor) consumer = self.get_entries(client, 0, 9) consumer.consumed.addCallback(lambda _: test_util.verify_entries(consumer.received, 0, 9)) consumer.consumed.addCallback(lambda _: test_util.verify_entries(fake_db.entries, 0, 9)) return consumer.consumed
def test_get_entries_pause_resume(self): client = self.default_client() producer = client.get_entries(0, 9, batch_size=4) consumer = self.PausingConsumer(4) consumer.registerProducer(producer) d = producer.startProducing(consumer) d.addBoth(consumer.done) self.clock.advance(0) self.assertTrue(test_util.verify_entries(consumer.received, 0, 3)) self.assertEqual(4, len(consumer.received)) self.assertIsNone(consumer.result) producer.resumeProducing() self.assertEqual(10, consumer.result) self.assertTrue(test_util.verify_entries(consumer.received, 0, 9))
def test_get_entries_returns_all_for_limiting_server(self): client = log_client.AsyncLogClient( self.FakeAgent( self.FakeHandler(test_util.DEFAULT_URI, entry_limit=3)), test_util.DEFAULT_URI, reactor=self.clock) consumer = self.get_entries(client, 0, 9) self.assertTrue(test_util.verify_entries(consumer.received, 0, 9))
def test_get_entries_returns_all_for_limiting_server(self): client = log_client.AsyncLogClient(self.FakeAgent( self.FakeHandler(test_util.DEFAULT_URI, entry_limit=3)), test_util.DEFAULT_URI, reactor=self.clock) consumer = self.get_entries(client, 0, 9) self.assertTrue(test_util.verify_entries(consumer.received, 0, 9))
def test_get_entries_returns_partial_if_log_returns_partial(self): client = log_client.AsyncLogClient(self.FakeAgent( self.FakeHandler(test_util.DEFAULT_URI, tree_size=3)), test_util.DEFAULT_URI, reactor=self.clock) consumer = self.get_entries(client, 0, 9) self.assertTrue(consumer.result.check(log_client.HTTPClientError)) self.assertTrue(test_util.verify_entries(consumer.received, 0, 2))
def test_get_entries_pause_resume(self): client = self.default_client() producer = client.get_entries(0, 9, batch_size=4) consumer = self.PausingConsumer(4) consumer.registerProducer(producer) d = producer.startProducing(consumer) d.addBoth(consumer.done) # fire all pending callbacks, and then fire request self.pump_get_entries() self.assertTrue(test_util.verify_entries(consumer.received, 0, 3)) self.assertEqual(4, len(consumer.received)) self.assertIsNone(consumer.result) producer.resumeProducing() # pump next 2 batches self.pump_get_entries(pumps=2) self.assertEqual(10, consumer.result) self.assertTrue(test_util.verify_entries(consumer.received, 0, 9))
def test_get_entries_returns_partial_if_log_returns_partial(self): client = log_client.LogClient(test_util.DEFAULT_URI, self.FakeHandler(test_util.DEFAULT_URI, tree_size=3)) entries = client.get_entries(0, 9) partial = [] for _ in range(3): partial.append(entries.next()) self.assertTrue(test_util.verify_entries(partial, 0, 2)) self.assertRaises(log_client.HTTPClientError, entries.next)
def test_get_entries_returns_partial_if_log_returns_partial(self): client = log_client.AsyncLogClient( self.FakeAgent(self.FakeHandler( test_util.DEFAULT_URI, tree_size=3)), test_util.DEFAULT_URI, reactor=self.clock) consumer = self.get_entries(client, 0, 9) self.assertTrue(consumer.result.check(log_client.HTTPClientError)) self.assertTrue(test_util.verify_entries(consumer.received, 0, 2))
def test_get_entries_returns_all_for_limiting_server(self): client = async_log_client.AsyncLogClient( self.FakeAgent( self.FakeHandler(test_util.DEFAULT_URI, entry_limit=3)), test_util.DEFAULT_URI, reactor=self.clock) consumer = self.get_entries(client, 0, 9) # 1 pump in get_entries and 3 more so we fetch everything self.pump_get_entries(pumps=3) self.assertTrue(test_util.verify_entries(consumer.received, 0, 9))
def test_get_entries_returns_partial_if_log_returns_partial(self): client = log_client.LogClient(test_util.DEFAULT_URI, self.FakeHandler( test_util.DEFAULT_URI, tree_size=3)) entries = client.get_entries(0, 9) partial = [] for _ in range(3): partial.append(entries.next()) self.assertTrue(test_util.verify_entries(partial, 0, 2)) self.assertRaises(log_client.HTTPClientError, entries.next)
def test_get_entries_returns_all_in_batches(self): mock_handler = mock.Mock() fake_responder = self.FakeHandler(test_util.DEFAULT_URI) mock_handler.get_response_body.side_effect = fake_responder.get_response_body client = log_client.LogClient(test_util.DEFAULT_URI, handler=mock_handler) returned_entries = list(client.get_entries(0, 9, batch_size=4)) self.assertTrue(test_util.verify_entries(returned_entries, 0, 9)) self.assertEqual(3, len(mock_handler.get_response_body.call_args_list)) # Same as above, but using a flag to control the batch size. mock_handler.reset_mock() # TODO(ekasper): find a more elegant and robust way to save flags. original = FLAGS.entry_fetch_batch_size FLAGS.entry_fetch_batch_size = 4 returned_entries = list(client.get_entries(0, 9)) FLAGS.entry_fetch_batch_size = original self.assertTrue(test_util.verify_entries(returned_entries, 0, 9)) self.assertEqual(3, len(mock_handler.get_response_body.call_args_list))
def test_get_entries_returns_all_in_batches(self): mock_handler = mock.Mock() fake_responder = self.FakeHandler(test_util.DEFAULT_URI) mock_handler.get_response_body.side_effect = ( fake_responder.get_response_body) client = log_client.LogClient(test_util.DEFAULT_URI, handler=mock_handler) returned_entries = list(client.get_entries(0, 9, batch_size=4)) self.assertTrue(test_util.verify_entries(returned_entries, 0, 9)) self.assertEqual(3, len(mock_handler.get_response_body.call_args_list)) # Same as above, but using a flag to control the batch size. mock_handler.reset_mock() # TODO(ekasper): find a more elegant and robust way to save flags. original = FLAGS.entry_fetch_batch_size FLAGS.entry_fetch_batch_size = 4 returned_entries = list(client.get_entries(0, 9)) FLAGS.entry_fetch_batch_size = original self.assertTrue(test_util.verify_entries(returned_entries, 0, 9)) self.assertEqual(3, len(mock_handler.get_response_body.call_args_list))
def test_get_entries_returns_all_in_batches(self): mock_handler = mock.Mock() fake_responder = self.FakeHandler(test_util.DEFAULT_URI) mock_handler.get_response.side_effect = (fake_responder.get_response) client = async_log_client.AsyncLogClient(self.FakeAgent(mock_handler), test_util.DEFAULT_URI, reactor=self.clock) consumer = self.get_entries(client, 0, 9, batch_size=4) self.assertEqual(10, consumer.result) self.assertTrue(test_util.verify_entries(consumer.received, 0, 9)) self.assertEqual(3, len(mock_handler.get_response.call_args_list))
def test_get_entries_returns_all_in_batches(self): mock_handler = mock.Mock() fake_responder = self.FakeHandler(test_util.DEFAULT_URI) mock_handler.get_response.side_effect = ( fake_responder.get_response) client = log_client.AsyncLogClient(self.FakeAgent(mock_handler), test_util.DEFAULT_URI, reactor=self.clock) consumer = self.get_entries(client, 0, 9, batch_size=4) self.assertEqual(10, consumer.result) self.assertTrue(test_util.verify_entries(consumer.received, 0, 9)) self.assertEqual(3, len(mock_handler.get_response.call_args_list))
def test_get_entries_succedes_after_retry(self): json_entries = test_util.entries_to_json(test_util.make_entries(0, 9)) json_entries["entries"][5]["leaf_input"] = "garbagebase64^^^" client = self.one_shot_client(json_entries) producer = client.get_entries(0, 9) # remove exponential back-off producer._calculate_retry_delay = lambda _: 1 consumer = self.EntryConsumer() d = producer.startProducing(consumer) d.addBoth(consumer.done) # pump retries halfway through (there are actually two delays before # firing requests, so this loop will go only through half of retries) self.pump_get_entries(1, FLAGS.get_entries_max_retries) self.assertFalse(hasattr(consumer, 'result')) json_entries = test_util.entries_to_json(test_util.make_entries(0, 9)) response = self.FakeHandler.make_response(200, "OK", json_content=json_entries) client._handler._agent._responder.get_response.return_value = response self.pump_get_entries(1) self.assertTrue(test_util.verify_entries(consumer.received, 0, 9))
def test_get_entries(self): client = self.default_client() returned_entries = list(client.get_entries(0, 9)) self.assertTrue(test_util.verify_entries(returned_entries, 0, 9))
def test_get_entries_stores_entries(self): fake_db = self.FakeDB() client = self.default_client(entries_db=fake_db) consumer = self.get_entries(client, 0, 9) test_util.verify_entries(consumer.received, 0, 9) test_util.verify_entries(fake_db.entries, 0, 9)
def test_get_entries_returns_all_for_limiting_server(self): client = log_client.LogClient(test_util.DEFAULT_URI, self.FakeHandler(test_util.DEFAULT_URI, entry_limit=3)) returned_entries = list(client.get_entries(0, 9)) self.assertTrue(test_util.verify_entries(returned_entries, 0, 9))
def test_get_entries(self): client = self.default_client() consumer = self.get_entries(client, 0, 9) self.assertEqual(10, consumer.result) self.assertTrue(test_util.verify_entries(consumer.received, 0, 9))
def test_get_entries_returns_all_for_limiting_server(self): client = log_client.LogClient(test_util.DEFAULT_URI, self.FakeHandler( test_util.DEFAULT_URI, entry_limit=3)) returned_entries = list(client.get_entries(0, 9)) self.assertTrue(test_util.verify_entries(returned_entries, 0, 9))
def test_get_entries_tries_to_fetch_if_not_available_in_db(self): fake_db = self.FakeDB() fake_db.scan_entries = mock.Mock(return_value=None) client = self.default_client(entries_db=fake_db) consumer = self.get_entries(client, 0, 9) test_util.verify_entries(consumer.received, 0, 9)