def test_fetch_with_timeout(self):
        """
        Test that the report state is failed and that the baseurl can be tried again.
        """

        # requests.ConnectionError
        def timeout(*args, **kwargs):
            raise Timeout()

        with mock.patch('nectar.downloaders.threaded._logger') as mock_logger:
            URL = 'http://pulpproject.org/primary.xml'
            req = DownloadRequest(URL, StringIO())
            session = threaded.build_session(self.config)
            session.get = timeout
            report = self.downloader._fetch(req, session)

            self.assertEqual(report.state, report.DOWNLOAD_FAILED)
            self.assertNotIn('pulpproject.org', self.downloader.failed_netlocs)

            session2 = threaded.build_session(self.config)
            session2.get = mock.MagicMock()
            report2 = self.downloader._fetch(req, session2)

            self.assertEqual(report2.state, report2.DOWNLOAD_FAILED)
            self.assertEqual(session2.get.call_count, 1)

            expected_log_message = "Request Timeout - Connection with " \
                                   "http://pulpproject.org/primary.xml timed out."
            log_calls = [
                mock_call[1][0] for mock_call in mock_logger.mock_calls
            ]

            self.assertIn(expected_log_message, log_calls)
    def test_fetch_with_connection_error(self):
        """
        Test that the report state is failed and that the baseurl is not tried again.
        """

        # requests.ConnectionError
        def connection_error(*args, **kwargs):
            raise ConnectionError()

        with mock.patch('nectar.downloaders.threaded._logger') as mock_logger:
            URL = 'http://pulpproject.org/primary.xml'
            req = DownloadRequest(URL, StringIO())
            session = threaded.build_session(self.config)
            session.get = connection_error
            try:
                report = self.downloader._fetch(req, session)
            except ConnectionError:
                raise AssertionError("ConnectionError should be raised")

            self.assertEqual(report.state, report.DOWNLOAD_FAILED)
            self.assertIn('pulpproject.org', self.downloader.failed_netlocs)

            session2 = threaded.build_session(self.config)
            session2.get = mock.MagicMock()
            report2 = self.downloader._fetch(req, session2)

            self.assertEqual(report2.state, report2.DOWNLOAD_FAILED)
            self.assertEqual(session2.get.call_count, 0)

            expected_log_message = "Connection Error - http://pulpproject.org/primary.xml " \
                                   "could not be reached."
            log_calls = [mock_call[1][0] for mock_call in mock_logger.mock_calls]

            self.assertIn(expected_log_message, log_calls)
    def test_fetch_with_timeout(self):
        """
        Test that the report state is failed and that the baseurl can be tried again.
        """

        # requests.ConnectionError
        def timeout(*args, **kwargs):
            raise Timeout()

        with mock.patch('nectar.downloaders.threaded._logger') as mock_logger:
            URL = 'http://pulpproject.org/primary.xml'
            req = DownloadRequest(URL, StringIO())
            session = threaded.build_session(self.config)
            session.get = timeout
            report = self.downloader._fetch(req, session)

            self.assertEqual(report.state, report.DOWNLOAD_FAILED)
            self.assertNotIn('pulpproject.org', self.downloader.failed_netlocs)

            session2 = threaded.build_session(self.config)
            session2.get = mock.MagicMock()
            report2 = self.downloader._fetch(req, session2)

            self.assertEqual(report2.state, report2.DOWNLOAD_FAILED)
            self.assertEqual(session2.get.call_count, 1)

            expected_log_message = "Request Timeout - Connection with " \
                                   "http://pulpproject.org/primary.xml timed out."
            log_calls = [mock_call[1][0] for mock_call in mock_logger.mock_calls]

            self.assertIn(expected_log_message, log_calls)
    def test_build_session(self):
        kwargs = {'basic_auth_username': '******',
                  'basic_auth_password': '******',
                  'ssl_validation': False,
                  'ssl_client_cert_path': os.path.join(_find_data_directory(), 'pki/bogus/cert.pem'),
                  'ssl_client_key_path': os.path.join(_find_data_directory(), 'pki/bogus/key.pem'),
                  'proxy_url': 'https://invalid-proxy.com',
                  'proxy_port': 1234,
                  'proxy_username': '******',
                  'proxy_password': '******'}
        proxy_host = urllib.splithost(urllib.splittype(kwargs['proxy_url'])[1])[0]

        cfg = config.DownloaderConfig(**kwargs)
        session = threaded.build_session(cfg)

        self.assertEqual(session.stream, True)
        self.assertEqual(session.auth, (kwargs['basic_auth_username'], kwargs['basic_auth_password']))
        self.assertEqual(session.cert, (kwargs['ssl_client_cert_path'], kwargs['ssl_client_key_path']))
        self.assertEqual(session.proxies, {'http': 'https://%s:%s@%s:%d' % (kwargs['proxy_username'],
                                                                            kwargs['proxy_password'],
                                                                            proxy_host,
                                                                            kwargs['proxy_port']),
                                           'https': 'http://%s:%s@%s:%d' % (kwargs['proxy_username'],
                                                                            kwargs['proxy_password'],
                                                                            proxy_host,
                                                                            kwargs['proxy_port'])})
Esempio n. 5
0
    def test_empty_string_proxy_username(self):
        """
        Yoram Hekma submitted a patch[0] that ensured that an empty string in the proxy username
        would not count as the user supplying a username. This test ensures that behavior is tested.

        [0] https://github.com/pulp/nectar/pull/47
        """
        kwargs = {
            'proxy_url': 'https://invalid-proxy.com',
            'proxy_port': 1234,
            'proxy_username': '',
            'proxy_password': ''
        }
        proxy_host = urllib.splithost(
            urllib.splittype(kwargs['proxy_url'])[1])[0]

        cfg = config.DownloaderConfig(**kwargs)
        session = threaded.build_session(cfg)

        self.assertEqual(session.stream, True)
        self.assertFalse(hasattr(session.auth, 'proxy_username'))
        self.assertFalse(hasattr(session.auth, 'proxy_password'))

        # Since the user provided the empty string for the proxy username, the username and password
        # should be missing in the session proxies.
        self.assertEqual(
            session.proxies, {
                'http': 'https://%s:%d' % (proxy_host, kwargs['proxy_port']),
                'https': 'https://%s:%d' % (proxy_host, kwargs['proxy_port'])
            })
Esempio n. 6
0
    def test_empty_string_proxy_username(self):
        """
        Yoram Hekma submitted a patch[0] that ensured that an empty string in the proxy username
        would not count as the user supplying a username. This test ensures that behavior is tested.

        [0] https://github.com/pulp/nectar/pull/47
        """
        kwargs = {'proxy_url': 'https://invalid-proxy.com',
                  'proxy_port': 1234,
                  'proxy_username': '',
                  'proxy_password': ''}
        proxy_host = urllib.splithost(urllib.splittype(kwargs['proxy_url'])[1])[0]

        cfg = config.DownloaderConfig(**kwargs)
        session = threaded.build_session(cfg)

        self.assertEqual(session.stream, True)
        self.assertFalse(hasattr(session.auth, 'proxy_username'))
        self.assertFalse(hasattr(session.auth, 'proxy_password'))

        # Since the user provided the empty string for the proxy username, the username and password
        # should be missing in the session proxies.
        self.assertEqual(session.proxies,
                         {'http': 'https://%s:%d' % (proxy_host, kwargs['proxy_port']),
                          'https': 'https://%s:%d' % (proxy_host, kwargs['proxy_port'])})
Esempio n. 7
0
    def test_fetch_with_timeout(self):
        """
        Test that the report state is failed and that the baseurl can be tried again.
        """

        with mock.patch('nectar.downloaders.threaded._logger') as mock_logger:
            URL = 'http://fakeurl/primary.xml'
            req = DownloadRequest(URL, StringIO())
            self.session.get.side_effect = Timeout
            report = self.downloader._fetch(req)

            self.assertEqual(report.state, report.DOWNLOAD_FAILED)
            self.assertNotIn('fakeurl', self.downloader.failed_netlocs)

            session2 = threaded.build_session(self.config)
            session2.get = mock.MagicMock()
            report2 = self.downloader._fetch(req)

            self.assertEqual(report2.state, report2.DOWNLOAD_FAILED)
            self.assertEqual(self.session.get.call_count, 2)

            expected_log_message = "Request Timeout - Connection with " \
                                   "http://fakeurl/primary.xml timed out."
            log_calls = [mock_call[1][0] for mock_call in mock_logger.mock_calls]

            self.assertIn(expected_log_message, log_calls)
    def test_fetch_with_timeout(self):
        """
        Test that the report state is failed and that the baseurl can be tried again.
        """

        with mock.patch('nectar.downloaders.threaded._logger') as mock_logger:
            URL = 'http://fakeurl/primary.xml'
            req = DownloadRequest(URL, StringIO())
            self.session.get.side_effect = Timeout
            report = self.downloader._fetch(req)

            self.assertEqual(report.state, report.DOWNLOAD_FAILED)
            self.assertNotIn('fakeurl', self.downloader.failed_netlocs)

            session2 = threaded.build_session(self.config)
            session2.get = mock.MagicMock()
            report2 = self.downloader._fetch(req)

            self.assertEqual(report2.state, report2.DOWNLOAD_FAILED)
            self.assertEqual(self.session.get.call_count, 2)

            expected_log_message = "Request Timeout - Connection with " \
                                   "http://fakeurl/primary.xml timed out."
            log_calls = [
                mock_call[1][0] for mock_call in mock_logger.mock_calls
            ]

            self.assertIn(expected_log_message, log_calls)
    def test_fetch_with_connection_error_badstatusline(self):
        """
        Test that the baseurl is tried again if ConnectionError reason BadStatusLine happened.
        """

        # requests.ConnectionError
        def connection_error(*args, **kwargs):
            raise ConnectionError('Connection aborted.', httplib.BadStatusLine("''",))

        with mock.patch('nectar.downloaders.threaded._logger') as mock_logger:
            URL = 'http://pulpproject.org/primary.xml'
            req = DownloadRequest(URL, StringIO())
            session = threaded.build_session(self.config)
            session.get = mock.MagicMock()
            session.get.side_effect = connection_error

            self.downloader._fetch(req, session)

            self.assertEqual(session.get.call_count, 2)

            expected_log_msg = ['Download of http://pulpproject.org/primary.xml failed. Re-trying.',
                                'Re-trying http://pulpproject.org/primary.xml due to remote server '
                                'connection failure.',
                                'Download of http://pulpproject.org/primary.xml failed. Re-trying.',
                                'Download of http://pulpproject.org/primary.xml failed and reached '
                                'maximum retries']
            log_calls = [mock_call[1][0] for mock_call in mock_logger.mock_calls]

            self.assertEqual(expected_log_msg, log_calls)
    def test_configure_session(self):
        kwargs = {
            'basic_auth_username':
            '******',
            'basic_auth_password':
            '******',
            'headers': {
                'pulp-header': 'awesome!'
            },
            'ssl_validation':
            False,
            'ssl_client_cert_path':
            os.path.join(_find_data_directory(), 'pki/bogus/cert.pem'),
            'ssl_client_key_path':
            os.path.join(_find_data_directory(), 'pki/bogus/key.pem'),
            'proxy_url':
            'https://invalid-proxy.com',
            'proxy_port':
            1234,
            'proxy_username':
            '******',
            'proxy_password':
            '******'
        }
        proxy_host = urllib.splithost(
            urllib.splittype(kwargs['proxy_url'])[1])[0]

        cfg = config.DownloaderConfig(**kwargs)
        session = threaded.build_session(cfg)

        self.assertEqual(session.stream, True)
        # other headers get added by the requests library, so we'll just check
        # for the one we added
        self.assertEqual(session.headers.get('pulp-header'), 'awesome!')

        self.assertEqual(session.auth.username, kwargs['basic_auth_username'])
        self.assertEqual(session.auth.password, kwargs['basic_auth_password'])
        self.assertEqual(session.auth.proxy_username, kwargs['proxy_username'])
        self.assertEqual(session.auth.proxy_password, kwargs['proxy_password'])

        self.assertEqual(
            session.cert,
            (kwargs['ssl_client_cert_path'], kwargs['ssl_client_key_path']))
        # test proxy username and passwod are url encoded before sending the request
        self.assertEqual(
            session.proxies, {
                'http':
                'https://%s:%s@%s:%d' %
                (urllib.quote(kwargs['proxy_username']),
                 urllib.quote(kwargs['proxy_password']), proxy_host,
                 kwargs['proxy_port']),
                'https':
                'https://%s:%s@%s:%d' %
                (urllib.quote(kwargs['proxy_username']),
                 urllib.quote(kwargs['proxy_password']), proxy_host,
                 kwargs['proxy_port'])
            })
Esempio n. 11
0
    def test_request_headers(self):
        URL = 'http://pulpproject.org/robots.txt'
        req = DownloadRequest(URL, StringIO(), headers={'pulp_header': 'awesome!'})
        response = Response()
        response.status_code = httplib.OK
        response.raw = StringIO('abc')
        session = threaded.build_session(self.config)
        session.get = mock.MagicMock(return_value=response, spec_set=session.get)

        self.downloader._fetch(req, session)

        session.get.assert_called_once_with(URL, headers={'pulp_header': 'awesome!'})
    def test_wrong_content_encoding(self):
        URL = 'http://pulpproject.org/primary.xml.gz'
        req = DownloadRequest(URL, StringIO())
        response = Response()
        response.status_code = httplib.OK
        response.raw = StringIO('abc')
        session = threaded.build_session(self.config)
        session.get = mock.MagicMock(return_value=response, spec_set=session.get)

        report = self.downloader._fetch(req, session)

        self.assertEqual(report.state, DOWNLOAD_SUCCEEDED)
        self.assertEqual(report.bytes_downloaded, 3)
        session.get.assert_called_once_with(URL, headers={'accept-encoding':''})
Esempio n. 13
0
    def test_request_headers(self):
        URL = 'http://pulpproject.org/robots.txt'
        req = DownloadRequest(URL,
                              StringIO(),
                              headers={'pulp_header': 'awesome!'})
        response = Response()
        response.status_code = httplib.OK
        response.raw = StringIO('abc')
        session = threaded.build_session(self.config)
        session.get = mock.MagicMock(return_value=response,
                                     spec_set=session.get)

        self.downloader._fetch(req, session)

        session.get.assert_called_once_with(
            URL, headers={'pulp_header': 'awesome!'})
    def test_normal_content_encoding(self):
        URL = 'http://pulpproject.org/primary.xml'
        req = DownloadRequest(URL, StringIO())
        response = Response()
        response.status_code = httplib.OK
        response.iter_content = mock.MagicMock(return_value=['abc'], spec_set=response.iter_content)
        session = threaded.build_session(self.config)
        session.get = mock.MagicMock(return_value=response, spec_set=session.get)

        report = self.downloader._fetch(req, session)

        self.assertEqual(report.state, DOWNLOAD_SUCCEEDED)
        self.assertEqual(report.bytes_downloaded, 3)
        # passing "None" for headers lets the requests library add whatever
        # headers it thinks are appropriate.
        session.get.assert_called_once_with(URL, headers=None)
Esempio n. 15
0
    def test_wrong_content_encoding(self):
        URL = 'http://pulpproject.org/primary.xml.gz'
        req = DownloadRequest(URL, StringIO())
        response = Response()
        response.status_code = httplib.OK
        response.raw = StringIO('abc')
        session = threaded.build_session(self.config)
        session.get = mock.MagicMock(return_value=response,
                                     spec_set=session.get)

        report = self.downloader._fetch(req, session)

        self.assertEqual(report.state, report.DOWNLOAD_SUCCEEDED)
        self.assertEqual(report.bytes_downloaded, 3)
        session.get.assert_called_once_with(URL,
                                            headers={'accept-encoding': ''})
    def test_response_headers(self):
        """
        Make sure that whatever headers come back on the response get added
        to the report.
        """
        URL = 'http://pulpproject.org/robots.txt'
        req = DownloadRequest(URL, StringIO(), headers={'pulp_header': 'awesome!'})
        response = Response()
        response.status_code = httplib.OK
        response.headers = {'content-length': '1024'}
        response.raw = StringIO('abc')
        session = threaded.build_session(self.config)
        session.get = mock.MagicMock(return_value=response, spec_set=session.get)

        report = self.downloader._fetch(req, session)

        self.assertEqual(report.headers['content-length'], '1024')
Esempio n. 17
0
    def test_normal_content_encoding(self):
        URL = 'http://pulpproject.org/primary.xml'
        req = DownloadRequest(URL, StringIO())
        response = Response()
        response.status_code = httplib.OK
        response.iter_content = mock.MagicMock(return_value=['abc'],
                                               spec_set=response.iter_content)
        session = threaded.build_session(self.config)
        session.get = mock.MagicMock(return_value=response,
                                     spec_set=session.get)

        report = self.downloader._fetch(req, session)

        self.assertEqual(report.state, report.DOWNLOAD_SUCCEEDED)
        self.assertEqual(report.bytes_downloaded, 3)
        # passing "None" for headers lets the requests library add whatever
        # headers it thinks are appropriate.
        session.get.assert_called_once_with(URL, headers={})
Esempio n. 18
0
    def test_response_headers(self):
        """
        Make sure that whatever headers come back on the response get added
        to the report.
        """
        URL = 'http://pulpproject.org/robots.txt'
        req = DownloadRequest(URL,
                              StringIO(),
                              headers={'pulp_header': 'awesome!'})
        response = Response()
        response.status_code = httplib.OK
        response.headers = {'content-length': '1024'}
        response.raw = StringIO('abc')
        session = threaded.build_session(self.config)
        session.get = mock.MagicMock(return_value=response,
                                     spec_set=session.get)

        report = self.downloader._fetch(req, session)

        self.assertEqual(report.headers['content-length'], '1024')
Esempio n. 19
0
    def test_configure_session(self):
        kwargs = {'basic_auth_username': '******',
                  'basic_auth_password': '******',
                  'headers': {'pulp-header': 'awesome!'},
                  'ssl_validation': False,
                  'ssl_client_cert_path': os.path.join(_find_data_directory(),
                                                       'pki/bogus/cert.pem'),
                  'ssl_client_key_path': os.path.join(_find_data_directory(), 'pki/bogus/key.pem'),
                  'proxy_url': 'https://invalid-proxy.com',
                  'proxy_port': 1234,
                  'proxy_username': '******',
                  'proxy_password': '******'}
        proxy_host = urllib.splithost(urllib.splittype(kwargs['proxy_url'])[1])[0]

        cfg = config.DownloaderConfig(**kwargs)
        session = threaded.build_session(cfg)

        self.assertEqual(session.stream, True)
        # other headers get added by the requests library, so we'll just check
        # for the one we added
        self.assertEqual(session.headers.get('pulp-header'), 'awesome!')

        self.assertEqual(session.auth.username, kwargs['basic_auth_username'])
        self.assertEqual(session.auth.password, kwargs['basic_auth_password'])
        self.assertEqual(session.auth.proxy_username, kwargs['proxy_username'])
        self.assertEqual(session.auth.proxy_password, kwargs['proxy_password'])

        self.assertEqual(session.cert,
                         (kwargs['ssl_client_cert_path'], kwargs['ssl_client_key_path']))
        # test proxy username and passwod are url encoded before sending the request
        self.assertEqual(session.proxies,
                         {'http': 'https://%s:%s@%s:%d' % (urllib.quote(kwargs['proxy_username']),
                                                           urllib.quote(kwargs['proxy_password']),
                                                           proxy_host,
                                                           kwargs['proxy_port']),
                          'https': 'https://%s:%s@%s:%d' % (urllib.quote(kwargs['proxy_username']),
                                                            urllib.quote(kwargs['proxy_password']),
                                                            proxy_host,
                                                            kwargs['proxy_port'])})