def test_requires_positive_number_of_processes(self): """Show that the number of processes has to be > 0.""" with pytest.raises(ValueError): pool.Pool(None, num_processes=0) with pytest.raises(ValueError): pool.Pool(None, num_processes=-1)
def test_number_of_processes_can_be_arbitrary(self): """Show that the number of processes can be set.""" p = pool.Pool(None, num_processes=100) assert p._processes == 100 assert len(p._pool) == 100 p = pool.Pool(None, num_processes=1) assert p._processes == 1 assert len(p._pool) == 1
def test_number_of_processes_can_be_arbitrary(self): """Show that the number of processes can be set.""" job_queue = queue.Queue() p = pool.Pool(job_queue, num_processes=100) assert p._processes == 100 assert len(p._pool) == 100 job_queue = queue.Queue() p = pool.Pool(job_queue, num_processes=1) assert p._processes == 1 assert len(p._pool) == 1
def test_session_is_called(self): """Ensure that the session function is called.""" job_queue = queue.Queue() session = mock.MagicMock() pool.Pool(job_queue, num_processes=1, session=session) assert session.called is True session.assert_called_once_with()
def test_auth_generator_is_called(self): """Ensure that the auth_generator function is called.""" job_queue = queue.Queue() auth_generator = mock.MagicMock() pool.Pool(job_queue, num_processes=1, auth_generator=auth_generator) assert auth_generator.called is True auth_generator.assert_called_once_with(mock.ANY)
def test_initializer_is_called(self): """Ensure that the initializer function is called.""" job_queue = queue.Queue() initializer = mock.MagicMock() pool.Pool(job_queue, num_processes=1, initializer=initializer) assert initializer.called is True initializer.assert_called_once_with(mock.ANY)
def _fetch_versions(self, addon_map): logger.info("Processing Version urls") q = queue.Queue() logger.info("Filling initial verson page queue") def iterFactory(guid_map): for guid in list(guid_map.keys()): yield "https://addons.mozilla.org/api/v3/addons/addon/%s/versions/" % guid def chunker(seq, size): collector = [] for term in seq: collector.append(term) if len(collector) == size: yield collector collector = [] # Yield any dangling records we collected if len(collector) > 0: yield collector total_processed_addons = 0 for chunk in chunker(iterFactory(addon_map), 500): for i, url in enumerate(chunk): q.put({"method": "GET", "url": url, "timeout": 2.0}) logger.info( "Queue setup - processing initial version page requests") logger.info("%d requests to process" % q.qsize()) p = pool.Pool(q, num_processes=self._max_processes) p.join_all() logger.info("Pool completed - processing responses") last_page_urls = self._handle_version_responses(p) logger.info("Captured %d last page urls" % len(last_page_urls)) total_processed_addons += len(last_page_urls) # Try processing the exceptions once p = pool.Pool.from_exceptions(p.exceptions(), num_processes=self._max_processes) p.join_all() last_page_urls.extend(self._handle_version_responses(p)) # Now fetch the last version of each addon logger.info("Processing last page urls: %d" % len(last_page_urls)) p = pool.Pool.from_urls(last_page_urls, num_processes=self._max_processes) p.join_all() self._handle_last_version_responses(p, addon_map) # Try processing exceptions once p = pool.Pool.from_exceptions(p.exceptions(), num_processes=self._max_processes) p.join_all() self._handle_last_version_responses(p, addon_map) logger.info("Processed %d addons with version info" % total_processed_addons)
def sendRequestQueue(bodies, url): jobs = queue.Queue() for body in bodies: jobs.put({'method': 'POST', 'url': url, 'data': json.dumps(body)}) p = pool.Pool(job_queue=jobs) p.join_all() for response in p.responses(): assert (response.status_code == 200) if (DEBUG): print(response.text)
def test_join_all(self): """Ensure that all threads are joined properly.""" session_threads = [] def _side_effect(*args, **kwargs): thread = mock.MagicMock() session_threads.append(thread) return thread with mock.patch.object(thread, 'SessionThread', side_effect=_side_effect): pool.Pool(None).join_all() for st in session_threads: st.join.assert_called_once_with()
def test_get_exception_returns_none_when_queue_is_empty(self): """Ensure that None is returned when the exception Queue is empty.""" queues = [] def _side_effect(): q = mock.MagicMock() q.get_nowait.side_effect = queue.Empty() queues.append(q) return q with mock.patch.object(queue, 'Queue', side_effect=_side_effect): with mock.patch.object(thread, 'SessionThread'): p = pool.Pool(None) assert len(queues) == 2 assert p.get_exception() is None assert len([q for q in queues if q.get_nowait.called]) == 1
def test_get_exception_returns_thread_exception(self): """Ensure that a ThreadException is made when there's data.""" queues = [] def _side_effect(): q = mock.MagicMock() q.get_nowait.return_value = ({}, None) queues.append(q) return q with mock.patch.object(queue, 'Queue', side_effect=_side_effect): with mock.patch.object(thread, 'SessionThread'): p = pool.Pool(None) assert len(queues) == 2 assert isinstance(p.get_exception(), pool.ThreadException) assert len([q for q in queues if q.get_nowait.called]) == 1
def test_lists_are_correctly_returned(self): """Ensure that exceptions and responses return correct lists.""" def _make_queue(): q = queue.Queue() q.put(({}, None)) return q with mock.patch.object(thread, 'SessionThread'): p = pool.Pool(None) # Set up real queues. p._response_queue = _make_queue() p._exc_queue = _make_queue() excs = list(p.exceptions()) assert len(excs) == 1 for exc in excs: assert isinstance(exc, pool.ThreadException) resps = list(p.responses()) assert len(resps) == 1 for resp in resps: assert isinstance(resp, pool.ThreadResponse)
} }) q.put({ 'method': 'PUT', 'url': 'https://httpbin.org/put', 'files': { 'foo': ('', 'bar') } }) q.put({ 'method': 'GET', 'url': 'https://httpbin.org/stream/100', 'stream': True }) q.put({'method': 'GET', 'url': 'https://httpbin.org/delay/10', 'timeout': 5.0}) for i in range(30): q.put({ 'method': 'GET', 'url': 'https://httpbin.org/get', 'params': { 'i': str(i) }, }) p = pool.Pool(q) p.join_all() responses = list(p.responses()) exceptions = list(p.exceptions())