def setUp(self): self.config = config.DownloaderConfig() self.listener = listener.AggregatingEventListener() self.session = mock.Mock() self.downloader = threaded.HTTPThreadedDownloader(self.config, self.listener, session=self.session)
def test_empty_string_proxy_username(self): """ Yoram Hekma submitted a patch[0] that ensured that an empty string in the proxy username would not count as the user supplying a username. This test ensures that behavior is tested. [0] https://github.com/pulp/nectar/pull/47 """ kwargs = { 'proxy_url': 'https://invalid-proxy.com', 'proxy_port': 1234, 'proxy_username': '', 'proxy_password': '' } proxy_host = urllib.splithost( urllib.splittype(kwargs['proxy_url'])[1])[0] cfg = config.DownloaderConfig(**kwargs) session = threaded.build_session(cfg) self.assertEqual(session.stream, True) self.assertFalse(hasattr(session.auth, 'proxy_username')) self.assertFalse(hasattr(session.auth, 'proxy_password')) # Since the user provided the empty string for the proxy username, the username and password # should be missing in the session proxies. self.assertEqual( session.proxies, { 'http': 'https://%s:%d' % (proxy_host, kwargs['proxy_port']), 'https': 'https://%s:%d' % (proxy_host, kwargs['proxy_port']) })
def test_multiple_downloads(self): cfg = config.DownloaderConfig() lst = listener.AggregatingEventListener() downloader = threaded.HTTPThreadedDownloader(cfg, lst) bogus_file_names = ['notme', 'notmeeither'] all_file_names = self.data_file_names + bogus_file_names url_list = [ 'http://localhost:%d/%s%s' % (self.server_port, self.data_directory, n) for n in all_file_names ] dest_list = [ os.path.join(self.download_dir, n) for n in all_file_names ] request_list = [ request.DownloadRequest(u, d) for u, d in zip(url_list, dest_list) ] downloader.download(request_list) self.assertEqual(len(lst.succeeded_reports), len(self.data_file_names)) self.assertEqual(len(lst.failed_reports), len(bogus_file_names)) for i, dest in enumerate(dest_list[:len(self.data_file_names)]): self.assertTrue(os.path.exists(dest)) self.assertEqual(os.path.getsize(dest), self.data_file_sizes[i]) for dest in dest_list[len(self.data_file_names):]: self.assertFalse(os.path.exists(dest))
def test_invalid_scheme(self): """ Test with an invalid scheme in the URL. """ url = 'ftpx://martin.com/test' nectar_config = config.DownloaderConfig(max_speed=42) self.assertRaises(ValueError, Importer.build_downloader, url, nectar_config)
def test_configure_session(self): kwargs = { 'basic_auth_username': '******', 'basic_auth_password': '******', 'headers': { 'pulp-header': 'awesome!' }, 'ssl_validation': False, 'ssl_client_cert_path': os.path.join(_find_data_directory(), 'pki/bogus/cert.pem'), 'ssl_client_key_path': os.path.join(_find_data_directory(), 'pki/bogus/key.pem'), 'proxy_url': 'https://invalid-proxy.com', 'proxy_port': 1234, 'proxy_username': '******', 'proxy_password': '******' } proxy_host = urllib.splithost( urllib.splittype(kwargs['proxy_url'])[1])[0] cfg = config.DownloaderConfig(**kwargs) session = threaded.build_session(cfg) self.assertEqual(session.stream, True) # other headers get added by the requests library, so we'll just check # for the one we added self.assertEqual(session.headers.get('pulp-header'), 'awesome!') self.assertEqual(session.auth.username, kwargs['basic_auth_username']) self.assertEqual(session.auth.password, kwargs['basic_auth_password']) self.assertEqual(session.auth.proxy_username, kwargs['proxy_username']) self.assertEqual(session.auth.proxy_password, kwargs['proxy_password']) self.assertEqual( session.cert, (kwargs['ssl_client_cert_path'], kwargs['ssl_client_key_path'])) # test proxy username and passwod are url encoded before sending the request self.assertEqual( session.proxies, { 'http': 'https://%s:%s@%s:%d' % (urllib.quote(kwargs['proxy_username']), urllib.quote(kwargs['proxy_password']), proxy_host, kwargs['proxy_port']), 'https': 'https://%s:%s@%s:%d' % (urllib.quote(kwargs['proxy_username']), urllib.quote(kwargs['proxy_password']), proxy_host, kwargs['proxy_port']) })
def test_local(self): """ Test with a file:// scheme. """ url = 'file:///martin.com/test' nectar_config = config.DownloaderConfig(max_concurrent=23) downloader = Importer.build_downloader(url, nectar_config) self.assertTrue(isinstance(downloader, local.LocalFileDownloader)) self.assertEqual(downloader.config.max_concurrent, 23)
def test_http(self): """ Test with http:// as the scheme. """ url = 'http://martin.com/test' nectar_config = config.DownloaderConfig(max_speed=42) downloader = Importer.build_downloader(url, nectar_config) self.assertTrue(isinstance(downloader, threaded.HTTPThreadedDownloader)) self.assertEqual(downloader.config.max_speed, 42)
def test_single_download_success(self): cfg = config.DownloaderConfig() lst = listener.AggregatingEventListener() downloader = threaded.HTTPThreadedDownloader(cfg, lst) file_path = os.path.join(self.data_directory, self.data_file_names[0]) dest_path = os.path.join(self.download_dir, self.data_file_names[0]) url = 'http://localhost:%d/%s' % (self.server_port, file_path) req = request.DownloadRequest(url, dest_path) downloader.download([req]) self.assertTrue(os.path.exists(dest_path)) self.assertEqual(os.path.getsize(dest_path), self.data_file_sizes[0]) self.assertEqual(len(lst.succeeded_reports), 1) self.assertEqual(len(lst.failed_reports), 0)
def test_single_download_failure(self): cfg = config.DownloaderConfig() lst = listener.AggregatingEventListener() downloader = threaded.HTTPThreadedDownloader(cfg, lst) file_name = 'idontexistanddontcreateme' file_path = os.path.join(self.data_directory, file_name) dest_path = os.path.join(self.download_dir, file_name) url = 'http://localhost:%d/%s' % (self.server_port, file_path) req = request.DownloadRequest(url, dest_path) downloader.download([req]) self.assertFalse(os.path.exists(dest_path)) self.assertEqual(len(lst.succeeded_reports), 0) self.assertEqual(len(lst.failed_reports), 1) self.assertTrue(lst.failed_reports[0].error_msg is not None)
def test_https(self, _process_ssl_settings): """ Test with https:// as the scheme. """ url = 'https://martin.com/test' nectar_config = config.DownloaderConfig( ssl_ca_cert='CA Cert', ssl_ca_cert_path='/path/to/ca.crt', ssl_client_cert='Client Cert', ssl_client_cert_path='/path/to/client.crt', ssl_client_key='Client Cert', ssl_client_key_path='/path/to/client.key') downloader = Importer.build_downloader(url, nectar_config) self.assertTrue(isinstance(downloader, threaded.HTTPThreadedDownloader)) self.assertEqual(downloader.config.ssl_ca_cert_path, '/path/to/ca.crt') self.assertEqual(downloader.config.ssl_client_cert_path, '/path/to/client.crt') self.assertEqual(downloader.config.ssl_client_key_path, '/path/to/client.key') _process_ssl_settings.assert_called_once_with()
def test_instantiation(self): cfg = config.DownloaderConfig() lst = listener.DownloadEventListener() try: downloader = threaded.HTTPThreadedDownloader(cfg, lst) except: self.fail('instantiation of requests eventlet downloader failed') self.assertEqual(cfg, downloader.config) self.assertEqual(lst, downloader.event_listener) self.assertEqual(downloader.buffer_size, threaded.DEFAULT_BUFFER_SIZE) self.assertEqual(downloader.max_concurrent, threaded.DEFAULT_MAX_CONCURRENT) self.assertEqual( downloader.progress_interval, datetime.timedelta(seconds=threaded.DEFAULT_PROGRESS_INTERVAL))
def test_download_unhandled_exception(self): with mock.patch('nectar.downloaders.threaded._logger') as mock_logger: cfg = config.DownloaderConfig() lst = listener.AggregatingEventListener() downloader = threaded.HTTPThreadedDownloader(cfg, lst, session=mock.Mock()) downloader._fetch = mock.Mock(side_effect=OSError) downloader.download([mock.Mock()]) self.assertTrue(downloader.is_canceled) expected_log_message = 'Unhandled Exception in Worker Thread' log_calls = [ mock_call[1][0] for mock_call in mock_logger.mock_calls ] self.assertIn(expected_log_message, log_calls[1])
def test_throttling(self): two_seconds = datetime.timedelta(seconds=2) three_seconds = datetime.timedelta(seconds=4) cfg = config.DownloaderConfig(max_speed=256000) # 1/2 size of file lst = listener.AggregatingEventListener() downloader = threaded.HTTPThreadedDownloader(cfg, lst) # use the 500k file, should take >= 2 seconds to download, but < 4 file_path = os.path.join(self.data_directory, self.data_file_names[1]) dest_path = os.path.join(self.download_dir, self.data_file_names[1]) url = 'http://localhost:%d/%s' % (self.server_port, file_path) req = request.DownloadRequest(url, dest_path) start = datetime.datetime.now() downloader.download([req]) finish = datetime.datetime.now() self.assertTrue(finish - start >= two_seconds) self.assertTrue(finish - start < three_seconds)