def result(self):

        if self.uri == "https://wayback.archive-it.org/web/19700101000000/http://connectionerror":
            raise ConnectionError("connectionerror")

        if self.uri == "https://wayback.archive-it.org/web/19700101000000/http://toomanyredirects":
            raise TooManyRedirects("toomanyredirects")

        if self.uri == "https://wayback.archive-it.org/web/19700101000000/http://goodmemento":
            mr = MockResponse(self.uri)
            mr.url = self.uri
            return mr

        if self.uri == "https://wayback.archive-it.org/web/19700101000000/http://goodmementowithredirectstart":

            mr = MockResponse(self.uri)

            mr.history = [
                MockResponse("https://wayback.archive-it.org/web/19700101000000/http://goodmementowithredirectstart"),
                MockResponse("https://wayback.archive-it.org/web/19700101000000/http://goodmementowithredirectend")
            ]

            mr.url = "https://wayback.archive-it.org/web/19700101000000/http://goodmementowithredirectend"

            return mr
Example #2
0
    def test_redirects_exception_is_raised(self):
        register_uri(GET,
                     self.base_url + 'classes/sms',
                     body=json.dumps(TestClient.RESPONSE_BASIC),
                     content_type='text/plain')

        with patch('com.tdigital.sd.admin.client.request') as req_mock:
            req_mock.side_effect = TooManyRedirects('...')
            # Send the request
            self.assertRaises(TooManyRedirects, self.client.get, 'classes/sms')
Example #3
0
def monkeypatch_test_too_many_redirects_exception(
    self,
    method=None,
    url=None,
    headers=None,
    params=None,
    stream=False,
    proxies=None,
    timeout=None,
):
    raise TooManyRedirects("Test Connect Timeout Error")
Example #4
0
def test_get_should_invalid_cache_on_too_many_redirects_error(mocker):
    delete_cache = mocker.patch("cachecontrol.caches.file_cache.FileCache.delete")

    response = Response()
    response.encoding = "utf-8"
    response.raw = BytesIO(encode('{"foo": "bar"}'))
    mocker.patch(
        "cachecontrol.adapter.CacheControlAdapter.send",
        side_effect=[TooManyRedirects(), response],
    )
    repository = PyPiRepository()
    repository._get("https://pypi.org/pypi/async-timeout/json")

    assert delete_cache.called
Example #5
0
def test_get_should_invalid_cache_on_too_many_redirects_error(mocker: MockerFixture):
    delete_cache = mocker.patch("cachecontrol.caches.file_cache.FileCache.delete")

    response = Response()
    response.status_code = 200
    response.encoding = "utf-8"
    response.raw = BytesIO(encode('{"foo": "bar"}'))
    mocker.patch(
        "poetry.utils.authenticator.Authenticator.get",
        side_effect=[TooManyRedirects(), response],
    )
    repository = PyPiRepository()
    repository._get("https://pypi.org/pypi/async-timeout/json")

    assert delete_cache.called
 def test_get_endpoints_uncached_SD_conn_error_should_raise_conn_exception(
         self):
     self.requestGetMock.side_effect = TooManyRedirects()
     library = ServiceDirectory('localhost',
                                8000,
                                'v1',
                                ttr=0.1,
                                ttl=1.0 / 3600,
                                timeout=30)
     self.assertRaises(ConnectionException, library.bind_instance,
                       'test_api')
     self.requestGetMock.assert_called_once_with(ANY,
                                                 timeout=30,
                                                 auth=ANY,
                                                 params=ANY)
def http_get(url):
    try:
        r = requests.get(url)
        # check if captcha
        if "https://www.wg-gesucht.de/cuba.html" in r.url:
            raise TooManyRedirects("Captcha appeared! Exit")

        if r.status_code == 200:
            return r
        else:
            raise requests.HTTPError(
                f"Request failed with status_code {r.status_code}"
            )
    except requests.ConnectionError as e:
        print(url + " probably offline!")
        raise e
Example #8
0
    def test_retrying_on_requests_exception(self, fake_requests):
        fake_response = MagicMock()
        side_effects = [
            Timeout(),
            TooManyRedirects(),
            RequestException(),
            fake_response,  # should not involve a retry
        ]
        fake_requests.get = MagicMock(side_effect=side_effects)
        fake_requests.exceptions.RequestException = RequestException

        conn = SpotifyConnector(session=MagicMock())
        try:
            result = conn._SpotifyConnector__perform_request(url="any")
        except Exception as e:
            self.fail(f"Should not catch exception during request : {e}")

        self.assertEquals(result, fake_response)
Example #9
0
    def resolve_redirects(self,
                          resp,
                          req,
                          stream=False,
                          timeout=None,
                          verify=True,
                          cert=None,
                          proxies=None,
                          **adapter_kwargs):
        """Receives a Response. Returns a generator of Responses."""

        i = 0
        hist = []  # keep track of history

        while resp.is_redirect:
            prepared_request = req.copy()

            if i > 0:
                # Update history and keep track of redirects.
                hist.append(resp)
                new_hist = list(hist)
                resp.history = new_hist

            try:
                resp.content  # Consume socket so it can be released
            except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
                resp.raw.read(decode_content=False)

            if i >= self.max_redirects:
                raise TooManyRedirects('Exceeded %s redirects.' %
                                       self.max_redirects,
                                       response=resp)

            # Release the connection back into the pool.
            resp.close()

            url = resp.headers['location']

            # Handle redirection without scheme (see: RFC 1808 Section 4)
            if url.startswith('//'):
                parsed_rurl = urlparse(resp.url)
                url = '%s:%s' % (parsed_rurl.scheme, url)

            # The scheme should be lower case...
            parsed = urlparse(url)
            url = parsed.geturl()

            # Facilitate relative 'location' headers, as allowed by RFC 7231.
            # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
            # Compliant with RFC3986, we percent encode the url.
            if not parsed.netloc:
                url = urljoin(resp.url, requote_uri(url))
            else:
                url = requote_uri(url)

            prepared_request.url = to_native_string(url)
            # Cache the url, unless it redirects to itself.
            if resp.is_permanent_redirect and req.url != prepared_request.url:
                self.redirect_cache[req.url] = prepared_request.url

            self.rebuild_method(prepared_request, resp)

            # https://github.com/kennethreitz/requests/issues/1084
            if resp.status_code not in (codes.temporary_redirect,
                                        codes.permanent_redirect):
                if 'Content-Length' in prepared_request.headers:
                    del prepared_request.headers['Content-Length']

                prepared_request.body = None

            headers = prepared_request.headers
            try:
                del headers['Cookie']
            except KeyError:
                pass

            # Extract any cookies sent on the response to the cookiejar
            # in the new request. Because we've mutated our copied prepared
            # request, use the old one that we haven't yet touched.
            extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
            prepared_request._cookies.update(self.cookies)
            prepared_request.prepare_cookies(prepared_request._cookies)

            # Rebuild auth and proxy information.
            proxies = self.rebuild_proxies(prepared_request, proxies)
            self.rebuild_auth(prepared_request, resp)

            # Override the original request.
            req = prepared_request

            resp = self.send(req,
                             stream=stream,
                             timeout=timeout,
                             verify=verify,
                             cert=cert,
                             proxies=proxies,
                             allow_redirects=False,
                             **adapter_kwargs)

            extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)

            i += 1
            yield resp
Example #10
0
    def resolve(self, spider, type):
        """This actually executes a request for this URL.

        ``type`` specifies whether a HEAD request suffices, or if you
        need a full request. The trick is that this will cache the
        response, and return the cached response if possible.

        It can therefore be called by different parts of the system
        (the tests, the spider, the mirror) without concern for
        unnecessary network traffic.
        """
        assert type in ('head', 'full')

        # TODO: Consider just raising the error all the way through
        # the rule handling.

        # If we have already tried to resolve this url and there was an
        # error, don't bother again; that is, we skip the
        # upgrade-HEAD-to-GET logic.
        if (self.response and not self.response.ok) or self.exception:
            return self.response

        # Skip if the previous request is sufficient for the requested type
        # (i.e. not a HEAD response when we are asking for a full GET)
        if self.response is not None and (
                    self.response.request.method != 'HEAD' or type=='head'):
            return self.response

        try:
            if type == 'head':
                method = 'HEAD'
            else:
                method = 'POST' if self.post is not None else 'GET'
            request = requests.Request(method, self.original_url, data=self.post)
            if self.previous:
                request.headers['Referer'] = self.previous.original_url
            spider.rules.configure_request(request, self, spider)

            request = spider.session.prepare_request(request)
            response = spider.session.send(
                request,
                # If the url is not saved and not a document, we don't
                # need to access the content. The question is:
                # TODO: Is it better to close() or to keep-alive?
                # This also affects redirects handling, if we don't close
                # we can't use the same connection to resolve redirects.
                stream=True,  # method=='GET'
                # Handle redirects manually
                allow_redirects=False)

            redirects = list(spider.session.resolve_redirects(
                response, request,
                # Important: We do NOT fetch the body of the final url
                # (and hopefully `resolve_redirects` wouldn't waste any
                # time on a large intermediary url either). This is because
                # at this time we only care about the final url. If this
                # url is not to be processed, we will not have wasted
                # bandwidth.
                # TODO: Consider doing the redirect resolving using HEAD.
                stream=True))

            response.redirects = redirects
            if redirects and redirects[-1].url == self.original_url:
                raise TooManyRedirects()

            self.response = response
        except (TooManyRedirects):
            self.response = False
            self.exception = None
        except (ConnectionError, Timeout) as e:
            self.response = False
            self.exception = e

        return self.response
Example #11
0
    def resolve_redirects(self,
                          resp,
                          req,
                          stream=False,
                          timeout=None,
                          verify=True,
                          cert=None,
                          proxies=None,
                          yield_requests=False,
                          **adapter_kwargs):
        """Receives a Response. Returns a generator of Responses or Requests."""

        hist = []  # keep track of history

        url = self.get_redirect_target(resp)
        while url:
            prepared_request = req.copy()

            # Update history and keep track of redirects.
            # resp.history must ignore the original request in this loop
            hist.append(resp)
            resp.history = hist[1:]

            # Consume socket so it can be released
            resp.content

            if self.max_redirects <= len(resp.history):
                raise TooManyRedirects('Exceeded %s redirects.' %
                                       self.max_redirects,
                                       response=resp)

            # Release the connection
            resp.close()

            # Handle redirection without scheme (see: RFC 1808 Section 4)
            if url.startswith('//'):
                parsed_rurl = urlparse(resp.url)
                url = '%s:%s' % (to_native_string(parsed_rurl.scheme), url)

            # The scheme should be lower case...
            parsed = urlparse(url)
            url = parsed.geturl()

            # Facilitate relative 'location' headers, as allowed by RFC 7231.
            # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
            # Compliant with RFC3986, we percent encode the url.
            if not parsed.netloc:
                url = urljoin(resp.url, requote_uri(url))
            else:
                url = requote_uri(url)

            prepared_request.url = to_native_string(url)

            self.rebuild_method(prepared_request, resp)

            # https://github.com/requests/requests/issues/1084
            if resp.status_code not in (codes.temporary_redirect,
                                        codes.permanent_redirect):
                # https://github.com/requests/requests/issues/3490
                purged_headers = ('Content-Length', 'Content-Type',
                                  'Transfer-Encoding')
                for header in purged_headers:
                    prepared_request.headers.pop(header, None)
                prepared_request.body = None

            headers = prepared_request.headers
            try:
                del headers['Cookie']
            except KeyError:
                pass

            # Extract any cookies sent on the response to the cookiejar
            # in the new request. Because we've mutated our copied prepared
            # request, use the old one that we haven't yet touched.
            prepared_request._cookies.extract_cookies(
                MockResponse(HTTPHeaderDict(resp.headers)), MockRequest(req))
            merge_cookies(prepared_request._cookies, self.cookies)
            prepared_request.prepare_cookies(prepared_request._cookies)

            # Rebuild auth and proxy information.
            proxies = self.rebuild_proxies(prepared_request, proxies)
            self.rebuild_auth(prepared_request, resp)

            # Override the original request.
            req = prepared_request
            req.adapt_prepare()

            if yield_requests:
                yield req
            else:
                resp = self.send(req,
                                 stream=stream,
                                 timeout=timeout,
                                 verify=verify,
                                 cert=cert,
                                 proxies=proxies,
                                 allow_redirects=False,
                                 **adapter_kwargs)

                yield resp

                while not resp.done():
                    yield resp
                resp = resp.result()

                self.cookies.extract_cookies(
                    MockResponse(HTTPHeaderDict(resp.headers)),
                    MockRequest(prepared_request))

                # extract redirect url, if any, for the next loop
                url = self.get_redirect_target(resp)
Example #12
0
 def various_errors(domain):
     various_errors.count += 1
     if various_errors.count < 3:
         raise TooManyRedirects()
     else:
         raise Exception()
Example #13
0
 def error(domain):
     raise TooManyRedirects()
Example #14
0
    async def send(self, request, **kwargs):
        """Send a given PreparedRequest.

        :rtype: requests.Response
        """
        allow_redirects = kwargs.pop('allow_redirects', True)
        if not allow_redirects:
            return await self._send(request, **kwargs)

        history = []
        while True:
            resp = await self._send(request, **kwargs)
            resp.history = history[:]
            history.append(resp)
            if not resp.is_redirect:
                return resp

            # Release the connection back into the pool.
            await resp.close()

            if len(history) > self.max_redirects:
                raise TooManyRedirects('Exceeded %s redirects.' %
                                       self.max_redirects,
                                       response=resp)

            next_request = request.copy()
            next_request.url = self._get_next_url(resp)
            next_request.method = self._get_next_method(resp)
            logger.debug(
                f'Redirect to: {next_request.method} {next_request.url}')
            headers = next_request.headers

            # https://github.com/requests/requests/issues/1084
            if resp.status_code not in (307, 308):
                # https://github.com/requests/requests/issues/3490
                purged_headers = ('Content-Length', 'Content-Type',
                                  'Transfer-Encoding')
                for header in purged_headers:
                    next_request.headers.pop(header, None)
                next_request.body = None

            # Attempt to rewind consumed file-like object.
            should_rewind = (('Content-Length' in headers
                              or 'Transfer-Encoding' in headers)
                             and isinstance(next_request.body,
                                            (MultipartBody, StreamBody)))
            if should_rewind:
                logger.debug(
                    f'Rewind request body for redirection: {next_request}')
                next_request.body.rewind()

            try:
                del headers['Cookie']
            except KeyError:
                pass

            # Extract any cookies sent on the response to the cookiejar
            # in the new request. Because we've mutated our copied prepared
            # request, use the old one that we haven't yet touched.
            extract_cookies_to_jar(next_request._cookies, request, resp.raw)
            merge_cookies(next_request._cookies, self.cookies)
            next_request.prepare_cookies(next_request._cookies)

            self.rebuild_auth(next_request, resp)

            # Override the original request.
            request = next_request
Example #15
0
    async def resolve_redirects(self,
                                resp,
                                req,
                                stream=False,
                                timeout=None,
                                verify=True,
                                cert=None,
                                proxies=None,
                                yield_requests=False,
                                **adapter_kwargs):
        """Receives a Response. Returns a generator of Responses or Requests."""
        hist = []  # keep track of history

        url = self.get_redirect_target(resp)
        previous_fragment = urlparse(req.url).fragment
        while url:
            prepared_request = req.copy()

            # Update history and keep track of redirects.
            # resp.history must ignore the original request in this loop
            hist.append(resp)
            resp.history = hist[1:]

            try:
                resp.content  # Consume socket so it can be released
            except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
                resp.raw.read(decode_content=False)

            if len(resp.history) >= self.max_redirects:
                raise TooManyRedirects("Exceeded %s redirects." %
                                       self.max_redirects,
                                       response=resp)

            # Release the connection back into the pool.
            resp.close()

            # Handle redirection without scheme (see: RFC 1808 Section 4)
            if url.startswith("//"):
                parsed_rurl = urlparse(resp.url)
                url = "%s:%s" % (to_native_string(parsed_rurl.scheme), url)

            # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2)
            parsed = urlparse(url)
            if parsed.fragment == "" and previous_fragment:
                parsed = parsed._replace(fragment=previous_fragment)
            elif parsed.fragment:
                previous_fragment = parsed.fragment
            url = parsed.geturl()

            # Facilitate relative 'location' headers, as allowed by RFC 7231.
            # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
            # Compliant with RFC3986, we percent encode the url.
            if not parsed.netloc:
                url = urljoin(resp.url, requote_uri(url))
            else:
                url = requote_uri(url)

            prepared_request.url = to_native_string(url)

            self.rebuild_method(prepared_request, resp)

            # https://github.com/requests/requests/issues/1084
            if resp.status_code not in (
                    codes.temporary_redirect,
                    codes.permanent_redirect,
            ):
                # https://github.com/requests/requests/issues/3490
                purged_headers = ("Content-Length", "Content-Type",
                                  "Transfer-Encoding")
                for header in purged_headers:
                    prepared_request.headers.pop(header, None)
                prepared_request.body = None

            headers = prepared_request.headers
            try:
                del headers["Cookie"]
            except KeyError:
                pass

            # Extract any cookies sent on the response to the cookiejar
            # in the new request. Because we've mutated our copied prepared
            # request, use the old one that we haven't yet touched.
            extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
            merge_cookies(prepared_request._cookies, self.cookies)
            prepared_request.prepare_cookies(prepared_request._cookies)

            # Rebuild auth and proxy information.
            proxies = self.rebuild_proxies(prepared_request, proxies)
            self.rebuild_auth(prepared_request, resp)

            # A failed tell() sets `_body_position` to `object()`. This non-None
            # value ensures `rewindable` will be True, allowing us to raise an
            # UnrewindableBodyError, instead of hanging the connection.
            rewindable = prepared_request._body_position is not None and (
                "Content-Length" in headers or "Transfer-Encoding" in headers)

            # Attempt to rewind consumed file-like object.
            if rewindable:
                rewind_body(prepared_request)

            # Override the original request.
            req = prepared_request

            if yield_requests:
                yield req
            else:

                resp = await self.send(req,
                                       stream=stream,
                                       timeout=timeout,
                                       verify=verify,
                                       cert=cert,
                                       proxies=proxies,
                                       allow_redirects=False,
                                       **adapter_kwargs)

                extract_cookies_to_jar(self.cookies, prepared_request,
                                       resp.raw)

                # extract redirect url, if any, for the next loop
                url = self.get_redirect_target(resp)
                yield resp
Example #16
0
def http_too_many_redirects(url, request):
    ''' Mock TooManyRedirects
    '''
    raise TooManyRedirects('Gazillion')
Example #17
0
from requests.exceptions import TooManyRedirects
from requests.exceptions import InvalidURL

@pytest.mark.parametrize("payload_a,payload_b,expected", [
    ({ "foo": "bar"}, { "one": "two" }, { "foo": "bar", "one": "two" }),
])
def test_merge_payloads(core_client, payload_a, payload_b, expected):
    merged = core_client.merge_payloads(payload_a, payload_b)
    assert merged == expected

@pytest.mark.parametrize("resource,caught,raised", [
    ("foo", SSLError(), cpauto.SSLError),
    ("foo", ConnectionError(), cpauto.ConnectionError),
    ("foo", HTTPError(), cpauto.HTTPError),
    ("foo", Timeout(), cpauto.Timeout),
    ("foo", TooManyRedirects(), cpauto.TooManyRedirects),
    ("foo", InvalidURL(), cpauto.InvalidURL),
])
def test_http_post_exceptions(core_client, mgmt_server_base_uri, resource, caught, raised):
    endpoint = mgmt_server_base_uri + resource
    with responses.RequestsMock(assert_all_requests_are_fired=False) as rsps:
        rsps.add(responses.POST, endpoint,
                 body=caught, status=200,
                 content_type='application/json')

        with pytest.raises(raised):
            r = core_client.http_post(endpoint=resource, payload={})

@pytest.mark.parametrize("params", [
    ({}),
    ({ "continue-last-session": True}),
Example #18
0
    def request(self,
                method,
                url,
                params=None,
                data=None,
                headers=None,
                cookies=None,
                files=None,
                auth=None,
                timeout=None,
                allow_redirects=True,
                proxies=None,
                hooks=None,
                stream=None,
                verify=None,
                cert=None,
                json=None,
                **kwargs):
        """Constructs a :class:`Request <Request>`, prepares it and sends it.
        Returns :class:`Response <Response>` object.

        :param method: method for the new :class:`Request` object.
        :param url: URL for the new :class:`Request` object.
        :param params: (optional) Dictionary or bytes to be sent in the query
            string for the :class:`Request`.
        :param data: (optional) Dictionary, bytes, or file-like object to send
            in the body of the :class:`Request`.
        :param json: (optional) json to send in the body of the
            :class:`Request`.
        :param headers: (optional) Dictionary of HTTP Headers to send with the
            :class:`Request`.
        :param cookies: (optional) Dict or CookieJar object to send with the
            :class:`Request`.
        :param files: (optional) Dictionary of ``'filename': file-like-objects``
            for multipart encoding upload.
        :param auth: (optional) Auth tuple or callable to enable
            Basic/Digest/Custom HTTP Auth.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple
        :param allow_redirects: (optional) Set to True by default.
        :type allow_redirects: bool
        :param proxies: (optional) Dictionary mapping protocol or protocol and
            hostname to the URL of the proxy.
        :param stream: (optional) whether to immediately download the response
            content. Defaults to ``False``.
        :param verify: (optional) whether the SSL cert will be verified.
            A CA_BUNDLE path can also be provided. Defaults to ``True``.
        :param cert: (optional) if String, path to ssl client cert file (.pem).
            If Tuple, ('cert', 'key') pair.
        :rtype: requests.Response
    """
        #===============================================================================================================
        # add by mz
        error_type = kwargs.get("error_type")
        if error_type:
            from requests.exceptions import InvalidURL, URLRequired, ConnectTimeout, ConnectionError, SSLError, ReadTimeout
            from requests.exceptions import InvalidSchema, MissingSchema, ChunkedEncodingError, ContentDecodingError
            from requests.exceptions import RequestException, HTTPError, ProxyError, Timeout, RetryError, StreamConsumedError
            from requests.exceptions import TooManyRedirects

            get_error = {
                "InvalidURL": InvalidURL(),
                "URLRequired": URLRequired(),
                "ConnectTimeout": ConnectTimeout(),
                "ConnectionError": ConnectionError(),
                "SSLError": SSLError(),
                "ReadTimeout": ReadTimeout(),
                "InvalidSchema": InvalidSchema(),
                "MissingSchema": MissingSchema(),
                "ChunkedEncodingError": ChunkedEncodingError(),
                "ContentDecodingError": ContentDecodingError(),
                "StreamConsumedError": StreamConsumedError(),
                "TooManyRedirects": TooManyRedirects(),
                "RequestException": RequestException(),
                "HTTPError": HTTPError(),
                "ProxyError": ProxyError(),
                "Timeout": Timeout(),
                "RetryError": RetryError
            }

            error_ = get_error[error_type]
            raise error_
        #===============================================================================================================

        # Create the Request
        req = Request(
            method=method.upper(),
            url=url,
            headers=headers,
            files=files,
            data=data or {},
            json=json,
            params=params or {},
            auth=auth,
            cookies=cookies,
            hooks=hooks,
        )
        prep = self.prepare_request(req)

        proxies = proxies or {}

        settings = self.merge_environment_settings(prep.url, proxies, stream,
                                                   verify, cert)

        # Send the request.
        send_kwargs = {
            'timeout': timeout,
            'allow_redirects': allow_redirects,
        }
        send_kwargs.update(settings)
        resp = self.send(prep, **send_kwargs)
        return resp
Example #19
0
 def test_network_errors(self):
     self.check_network_error(ConnectionError(), 'Connection error')
     self.check_network_error(Timeout(), 'Request timeout')
     self.check_network_error(TooManyRedirects(), 'Too many redirects')
Example #20
0
def test__resource_too_many_redirects(api, json_data):
    api_get_mock = mock.Mock()
    api_get_mock.side_effect = TooManyRedirects()
    api.session.get = api_get_mock
    with pytest.raises(TooManyRedirects):
        api.artists(artistName="foo")