Example #1
0
 def proxy_url_for(self, url):
     """Retrirves the corresponding proxy url for a given url. """
     parsed_url = urlparse(url)
     proxy = self._proxies.get(parsed_url.scheme)
     if proxy:
         proxy = self._fix_proxy_url(proxy)
     return proxy
Example #2
0
 def test_copy_snapshot_injects_presigned_url(self):
     self.add_copy_snapshot_response(self.snapshot_id)
     with self.http_stubber:
         result = self.client.copy_snapshot(
             SourceRegion='us-west-2',
             SourceSnapshotId=self.snapshot_id,
         )
     self.assertEqual(result['SnapshotId'], self.snapshot_id)
     self.assertEqual(len(self.http_stubber.requests), 1)
     snapshot_request = self.http_stubber.requests[0]
     body = parse_qs(snapshot_request.body)
     self.assertIn('PresignedUrl', body)
     presigned_url = urlparse(body['PresignedUrl'][0])
     self.assertEqual(presigned_url.scheme, 'https')
     self.assertEqual(presigned_url.netloc, 'ec2.us-west-2.amazonaws.com')
     query_args = parse_qs(presigned_url.query)
     self.assertEqual(query_args['Action'], ['CopySnapshot'])
     self.assertEqual(query_args['Version'], ['2016-11-15'])
     self.assertEqual(query_args['SourceRegion'], ['us-west-2'])
     self.assertEqual(query_args['DestinationRegion'], ['us-east-1'])
     self.assertEqual(query_args['SourceSnapshotId'], [self.snapshot_id])
     self.assertEqual(query_args['X-Amz-Algorithm'], ['AWS4-HMAC-SHA256'])
     expected_credential = 'access_key/20110909/us-west-2/ec2/aws4_request'
     self.assertEqual(query_args['X-Amz-Credential'], [expected_credential])
     self.assertEqual(query_args['X-Amz-Date'], ['20110909T233600Z'])
     self.assertEqual(query_args['X-Amz-Expires'], ['3600'])
     self.assertEqual(query_args['X-Amz-SignedHeaders'], ['host'])
     expected_signature = (
         'a94a6b52afdf3daa34c2e2a38a62b72c8dac129c9904c61aa1a5d86e38628537')
     self.assertEqual(query_args['X-Amz-Signature'], [expected_signature])
Example #3
0
 def _path_url(self, url):
     parsed_url = urlparse(url)
     path = parsed_url.path
     if not path:
         path = '/'
     if parsed_url.query:
         path = path + '?' + parsed_url.query
     return path
Example #4
0
 def _prepare_url(self, original):
     url = original.url
     if original.params:
         url_parts = urlparse(url)
         delim = '&' if url_parts.query else '?'
         params = urlencode(list(original.params.items()), doseq=True)
         url = delim.join((url, params))
     return url
Example #5
0
def mask_proxy_url(proxy_url):
    """
    Mask proxy url credentials.

    :type proxy_url: str
    :param proxy_url: The proxy url, i.e. https://username:[email protected]

    :return: Masked proxy url, i.e. https://***:***@proxy.com
    """
    mask = '*' * 3
    parsed_url = urlparse(proxy_url)
    if parsed_url.username:
        proxy_url = proxy_url.replace(parsed_url.username, mask, 1)
    if parsed_url.password:
        proxy_url = proxy_url.replace(parsed_url.password, mask, 1)
    return proxy_url
    def _get_request_target(self, url, proxy_url):
        has_proxy = proxy_url is not None

        if not has_proxy:
            return self._path_url(url)

        # HTTP proxies expect the request_target to be the absolute url to know
        # which host to establish a connection to. urllib3 also supports
        # forwarding for HTTPS through the 'use_forwarding_for_https' parameter.
        proxy_scheme = urlparse(proxy_url).scheme
        using_https_forwarding_proxy = (proxy_scheme == 'https'
                                        and self._proxies_kwargs.get(
                                            'use_forwarding_for_https', False))

        if using_https_forwarding_proxy or url.startswith('http:'):
            return url
        else:
            return self._path_url(url)
Example #7
0
def should_bypass_proxies(url):
    """
    Returns whether we should bypass proxies or not.
    """
    # NOTE: requests allowed for ip/cidr entries in no_proxy env that we don't
    # support current as urllib only checks DNS suffix
    # If the system proxy settings indicate that this URL should be bypassed,
    # don't proxy.
    # The proxy_bypass function is incredibly buggy on OS X in early versions
    # of Python 2.6, so allow this call to fail. Only catch the specific
    # exceptions we've seen, though: this call failing in other ways can reveal
    # legitimate problems.
    try:
        if proxy_bypass(urlparse(url).netloc):
            return True
    except (TypeError, socket.gaierror):
        pass

    return False
Example #8
0
def should_bypass_proxies(url):
    """
    Returns whether we should bypass proxies or not.
    """
    # NOTE: requests allowed for ip/cidr entries in no_proxy env that we don't
    # support current as urllib only checks DNS suffix
    # If the system proxy settings indicate that this URL should be bypassed,
    # don't proxy.
    # The proxy_bypass function is incredibly buggy on OS X in early versions
    # of Python 2.6, so allow this call to fail. Only catch the specific
    # exceptions we've seen, though: this call failing in other ways can reveal
    # legitimate problems.
    try:
        if proxy_bypass(urlparse(url).netloc):
            return True
    except (TypeError, socket.gaierror):
        pass

    return False
Example #9
0
    def __call__(self, request):
        # Parse request URL
        url = urlparse(request.url)

        # Prepare AWS request
        awsrequest = AWSRequest(
            method=request.method,
            url=f'{url.scheme}://{url.netloc}{url.path}',
            data=request.body,
            params=dict(parse_qsl(url.query)),
        )

        # Sign request
        self.sigv4.add_auth(awsrequest)

        # Re-add original headers
        for key, val in request.headers.items():
            if key not in awsrequest.headers:
                awsrequest.headers[key] = val

        # Return prepared request
        return awsrequest.prepare()
Example #10
0
 def _get_auth_from_url(self, url):
     parsed_url = urlparse(url)
     try:
         return unquote(parsed_url.username), unquote(parsed_url.password)
     except (AttributeError, TypeError):
         return None, None
Example #11
0
def _urlparse(url):
    if isinstance(url, six.binary_type):
        # Not really necessary, but it helps to reduce noise on Python 2.x
        url = url.decode('utf8')
    return urlparse(url)
 def _urlparse(self, url):
     if isinstance(url, six.binary_type):
         # Not really necessary, but it helps to reduce noise on Python 2.x
         url = url.decode('utf8')
     return urlparse(url)
Example #13
0
 def _serialize_url(self, url, event_dict, **kwargs):
     event_dict['Fqdn'] = urlparse(url).netloc
Example #14
0
 def _urlparse(self, url):
     if isinstance(url, six.binary_type):
         # Not really necessary, but it helps to reduce noise on Python 2.x
         url = url.decode('utf8')
     return dict(urlparse(url)._asdict())  # Needs an unordered dict here
Example #15
0
 def _serialize_url(self, url, event_dict, **kwargs):
     event_dict['Fqdn'] = urlparse(url).netloc
Example #16
0
 def assertDesiredUrl(self, url, base, params):
     self.assertEqual(len(url.splitlines()), 1, "Expects only 1 line")
     self.assertTrue(url.startswith(base), "URL mismatch")
     url = url.strip()  # Otherwise the last param contains a trailing CRLF
     self.assertEqual(parse_qs(urlparse(url).query), params)
Example #17
0
 def assertDesiredUrl(self, url, base, params):
     self.assertEqual(len(url.splitlines()), 1, "Expects only 1 line")
     self.assertTrue(url.startswith(base), "URL mismatch")
     url = url.strip()  # Otherwise the last param contains a trailing CRLF
     self.assertEqual(parse_qs(urlparse(url).query), params)
Example #18
0
 def _urlparse(self, url):
     if isinstance(url, six.binary_type):
         # Not really necessary, but it helps to reduce noise on Python 2.x
         url = url.decode('utf8')
     return dict(urlparse(url)._asdict())  # Needs an unordered dict here
Example #19
0
    def send(self, request):
        try:
            proxy_url = self._proxy_config.proxy_url_for(request.url)
            manager = self._get_connection_manager(request.url, proxy_url)
            conn = manager.connection_from_url(request.url)
            self._setup_ssl_cert(conn, request.url, self._verify)
            if ensure_boolean(
                    os.environ.get('BOTO_EXPERIMENTAL__ADD_PROXY_HOST_HEADER',
                                   '')):
                # This is currently an "experimental" feature which provides
                # no guarantees of backwards compatibility. It may be subject
                # to change or removal in any patch version. Anyone opting in
                # to this feature should strictly pin botocore.
                host = urlparse(request.url).hostname
                conn.proxy_headers['host'] = host

            request_target = self._get_request_target(request.url, proxy_url)
            urllib_response = conn.urlopen(
                method=request.method,
                url=request_target,
                body=request.body,
                headers=request.headers,
                retries=Retry(False),
                assert_same_host=False,
                preload_content=False,
                decode_content=False,
                chunked=self._chunked(request.headers),
            )

            http_response = botocore.awsrequest.AWSResponse(
                request.url,
                urllib_response.status,
                urllib_response.headers,
                urllib_response,
            )

            if not request.stream_output:
                # Cause the raw stream to be exhausted immediately. We do it
                # this way instead of using preload_content because
                # preload_content will never buffer chunked responses
                http_response.content

            return http_response
        except URLLib3SSLError as e:
            raise SSLError(endpoint_url=request.url, error=e)
        except (NewConnectionError, socket.gaierror) as e:
            raise EndpointConnectionError(endpoint_url=request.url, error=e)
        except ProxyError as e:
            raise ProxyConnectionError(proxy_url=proxy_url, error=e)
        except URLLib3ConnectTimeoutError as e:
            raise ConnectTimeoutError(endpoint_url=request.url, error=e)
        except URLLib3ReadTimeoutError as e:
            raise ReadTimeoutError(endpoint_url=request.url, error=e)
        except ProtocolError as e:
            raise ConnectionClosedError(error=e,
                                        request=request,
                                        endpoint_url=request.url)
        except Exception as e:
            message = 'Exception received when sending urllib3 HTTP request'
            logger.debug(message, exc_info=True)
            raise HTTPClientError(error=e)