def test_no_proxy(self): """ Starting with Agent 5.0.0, there should always be a local forwarder running and all payloads should go through it. So we should make sure that we pass the no_proxy environment variable that will be used by requests (See: https://github.com/kennethreitz/requests/pull/945 ) """ from requests.utils import get_environ_proxies from os import environ as env env["http_proxy"] = "http://localhost:3128" env["https_proxy"] = env["http_proxy"] env["HTTP_PROXY"] = env["http_proxy"] env["HTTPS_PROXY"] = env["http_proxy"] self.assertTrue("no_proxy" in env) self.assertEquals(env["no_proxy"], "127.0.0.1,localhost,169.254.169.254") self.assertEquals({}, get_environ_proxies("http://localhost:17123/intake")) expected_proxies = { 'http': 'http://localhost:3128', 'https': 'http://localhost:3128', 'no': '127.0.0.1,localhost,169.254.169.254' } environ_proxies = get_environ_proxies("https://www.google.com") self.assertEquals(expected_proxies, environ_proxies, (expected_proxies, environ_proxies)) # Clear the env variables set del env["http_proxy"] del env["https_proxy"] del env["HTTP_PROXY"] del env["HTTPS_PROXY"]
def test_no_proxy(self): """ Starting with Agent 5.0.0, there should always be a local forwarder running and all payloads should go through it. So we should make sure that we pass the no_proxy environment variable that will be used by requests (See: https://github.com/kennethreitz/requests/pull/945 ) """ from requests.utils import get_environ_proxies from os import environ as env env["http_proxy"] = "http://localhost:3128" env["https_proxy"] = env["http_proxy"] env["HTTP_PROXY"] = env["http_proxy"] env["HTTPS_PROXY"] = env["http_proxy"] self.assertTrue("no_proxy" in env) self.assertEquals(env["no_proxy"], "127.0.0.1,localhost,169.254.169.254") self.assertEquals({}, get_environ_proxies( "http://localhost:17123/intake")) expected_proxies = { 'http': 'http://localhost:3128', 'https': 'http://localhost:3128', 'no': '127.0.0.1,localhost,169.254.169.254' } environ_proxies = get_environ_proxies("https://www.google.com") self.assertEquals(expected_proxies, environ_proxies, (expected_proxies, environ_proxies)) # Clear the env variables set del env["http_proxy"] del env["https_proxy"] del env["HTTP_PROXY"] del env["HTTPS_PROXY"]
def test_get_environ_proxies(self): """ Ensures that IP addresses are correctly matches with ranges in no_proxy variable """ from requests.utils import get_environ_proxies os.environ["no_proxy"] = "127.0.0.1,localhost.localdomain,192.168.0.0/24,172.16.1.1" assert get_environ_proxies("http://localhost.localdomain:5000/v1.0/") == {} assert get_environ_proxies("http://www.requests.com/") != {}
def test_get_environ_proxies(self): """Ensures that IP addresses are correctly matches with ranges in no_proxy variable.""" from requests.utils import get_environ_proxies os.environ['no_proxy'] = "127.0.0.1,localhost.localdomain,192.168.0.0/24,172.16.1.1" assert get_environ_proxies( 'http://localhost.localdomain:5000/v1.0/') == {} assert get_environ_proxies('http://www.requests.com/') != {}
def _request(self, request): try: prepared = self.session.prepare_request(request) proxies = get_environ_proxies(prepared.url) self.session.send(prepared, timeout=self._timeout, proxies=proxies) except Exception as exc: code = 400 exc_tuple = ( requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout, ) is_timeout = isinstance(exc, exc_tuple) if is_timeout: self.ui.warning(TIMEOUT_WARNING) code = 499 else: self.ui.debug('Exception {}: {}'.format(type(exc), exc)) raise try: callback = request.kwargs['hooks']['response'] except AttributeError: callback = request.hooks['response'][0] response = FakeResponse(code, 'No Response') callback(response)
def test_get_environ_proxies_respects_no_proxy(self): '''This test confirms that the no_proxy environment setting is respected by get_environ_proxies().''' # Store the current environment settings. try: old_http_proxy = os.environ['http_proxy'] except KeyError: old_http_proxy = None try: old_no_proxy = os.environ['no_proxy'] except KeyError: old_no_proxy = None # Set up some example environment settings. os.environ['http_proxy'] = 'http://www.example.com/' os.environ['no_proxy'] = r'localhost,.0.0.1:8080' # Set up expected proxy return values. proxy_yes = {'http': 'http://www.example.com/'} proxy_no = {} # Check that we get the right things back. self.assertEqual(proxy_yes, get_environ_proxies('http://www.google.com/')) self.assertEqual(proxy_no, get_environ_proxies('http://localhost/test')) self.assertEqual(proxy_no, get_environ_proxies('http://127.0.0.1:8080/')) self.assertEqual(proxy_yes, get_environ_proxies('http://127.0.0.1:8081/')) # Return the settings to what they were. if old_http_proxy: os.environ['http_proxy'] = old_http_proxy else: del os.environ['http_proxy'] if old_no_proxy: os.environ['no_proxy'] = old_no_proxy else: del os.environ['no_proxy']
def __init__(self, proxies=None): if not self._initialized: self.logger = logging.getLogger(__name__) self.session = Session() self.proxies = proxies self.logger.info( "Using the following custom proxies: {}".format(proxies)) self.logger.info("Using the following system proxies: {}".format( utils.get_environ_proxies("https://example.com"))) self._initialized = True
def test_no_proxy(self): """ Starting with Agent 5.0.0, there should always be a local forwarder running and all payloads should go through it. So we should make sure that we pass the no_proxy environment variable that will be used by requests (See: https://github.com/kennethreitz/requests/pull/945 ) """ from requests.utils import get_environ_proxies from os import environ as env env["http_proxy"] = "http://localhost:3128" env["https_proxy"] = env["http_proxy"] env["HTTP_PROXY"] = env["http_proxy"] env["HTTPS_PROXY"] = env["http_proxy"] self.assertTrue("no_proxy" in env) self.assertEquals(env["no_proxy"], "127.0.0.1,localhost,169.254.169.254") self.assertEquals({}, get_environ_proxies("http://localhost:18123/intake")) expected_proxies = { 'http': 'http://localhost:3128', 'https': 'http://localhost:3128', 'no': '127.0.0.1,localhost,169.254.169.254' } environ_proxies = { k: v for k, v in get_environ_proxies( "https://www.google.com").iteritems() if k in ["http", "https", "no"] } self.assertEquals(expected_proxies, environ_proxies, (expected_proxies, environ_proxies)) # Clear the env variables set del env["http_proxy"] del env["https_proxy"] if "HTTP_PROXY" in env: # on some platforms (e.g. Windows) env var names are case-insensitive, so we have to avoid # deleting the same key twice del env["HTTP_PROXY"] del env["HTTPS_PROXY"]
def send_request(method, path, inject_header=True, **kwargs): json = None url = urljoin(dockercloud.rest_host.encode(), path.strip("/").encode()) if not url.endswith(b"/"): url = b"%s/" % url user_agent = 'python-dockercloud/%s' % dockercloud.__version__ if dockercloud.user_agent: user_agent = "%s %s" % (dockercloud.user_agent, user_agent) # construct headers headers = {'Content-Type': 'application/json', 'User-Agent': user_agent} headers.update(dockercloud.auth.get_auth_header()) # construct request s = get_session() request = Request(method, url, headers=headers, **kwargs) # get environment proxies env_proxies = utils.get_environ_proxies(url) or {} kw_args = {'proxies': env_proxies} # make the request req = s.prepare_request(request) logger.info("Prepared Request: %s, %s, %s, %s" % (req.method, req.url, req.headers, kwargs)) response = s.send(req, **kw_args) status_code = getattr(response, 'status_code', None) logger.info("Response: Status %s, %s, %s" % (str(status_code), response.headers, response.text)) # handle the response if not status_code: # Most likely network trouble raise ApiError("No Response (%s %s)" % (method, url)) elif 200 <= status_code <= 299: # Success if status_code != 204: # Try to parse the response. try: json = response.json() if response.headers and inject_header: json["dockercloud_action_uri"] = response.headers.get( "X-DockerCloud-Action-URI", "") except TypeError: raise ApiError("JSON Parse Error (%s %s). Response: %s" % (method, url, response.text)) else: json = None else: # Server returned an error if status_code == 401: raise AuthError("Not authorized") else: raise ApiError("Status %s (%s %s). Response: %s" % (str(status_code), method, url, response.text)) return json
def connect(self): session = self.session source_uri = self.source_uri proxies = source_uri.proxies or {} if not proxies.get(source_uri.scheme): if source_uri.kwargs.get('trust_env', False): # Set environment's proxies. no_proxy = proxies.get( 'no_proxy') if proxies is not None else None env_proxies = get_environ_proxies(source_uri.uri, no_proxy=no_proxy) for (k, v) in env_proxies.items(): proxies.setdefault(k, v) cookies = source_uri.cookies verify = source_uri.kwargs.get('verify', True) uri, headers = self._build_uri_headers() timeout = self.kwargs.get('timeout', None) or HTTPClient.TIMEOUT try: resp = requests.get(source_uri.uri, headers=headers, proxies=proxies, cookies=cookies, timeout=timeout, stream=True, verify=verify) except requests.exceptions.Timeout as error: raise nbdler.error.TimeoutError(f"{uri}") from error except BaseException as error: log.debug(f'{error}', format_exc()) raise nbdler.error.FatalError() from error else: total_length = content_range_fullsize( resp.headers.get('content-range')) response = URIResponse( str(resp.url), list(resp.headers.items()), resp.status_code, resp.reason, total_length, content_type_mimetype(resp.headers.get('content-type')), self.progress.range, resp.status_code == 206) if self.resume_capability is None: if resp.status_code not in (206, 200): raise nbdler.error.FatalError( f"[{resp.status_code} {resp.reason}] '{resp.url}'") self.resume_capability = resp.status_code == 206 elif self.resume_capability is True: if not resp.status_code == 206: raise nbdler.error.FatalError( f"[{resp.status_code} {resp.reason}] '{resp.url}'") self.session = session self.resp = resp return response
def test_no_proxy(self): """ Starting with Agent 2.0.0, there should always be a local forwarder running and all payloads should go through it. So we should make sure that we pass the no_proxy environment variable that will be used by requests (See: https://github.com/kennethreitz/requests/pull/945 ) """ from requests.utils import get_environ_proxies from os import environ as env env["http_proxy"] = "http://localhost:3128" env["https_proxy"] = env["http_proxy"] env["HTTP_PROXY"] = env["http_proxy"] env["HTTPS_PROXY"] = env["http_proxy"] self.assertTrue("no_proxy" in env) self.assertEquals(env["no_proxy"], "127.0.0.1,localhost,169.254.169.254") self.assertEquals({}, get_environ_proxies("http://localhost:17123/intake")) expected_proxies = { 'http': 'http://localhost:3128', 'https': 'http://localhost:3128', 'no': '127.0.0.1,localhost,169.254.169.254' } environ_proxies = get_environ_proxies("https://www.google.com") if os.environ.get('TRAVIS') and 'travis_apt' in environ_proxies: # Travis CI adds a `travis_apt` proxy which breaks this test if it's not removed. environ_proxies.pop("travis_apt", None) self.assertEquals(expected_proxies, environ_proxies, (expected_proxies, environ_proxies)) # Clear the env variables set del env["http_proxy"] del env["https_proxy"] if "HTTP_PROXY" in env: # on some platforms (e.g. Windows) env var names are case-insensitive, so we have to avoid # deleting the same key twice del env["HTTP_PROXY"] del env["HTTPS_PROXY"]
def test_no_proxy(self): """ Starting with Agent 2.0.0, there should always be a local forwarder running and all payloads should go through it. So we should make sure that we pass the no_proxy environment variable that will be used by requests (See: https://github.com/kennethreitz/requests/pull/945 ) """ from os import environ as env env["http_proxy"] = "http://localhost:3128" env["https_proxy"] = env["http_proxy"] env["HTTP_PROXY"] = env["http_proxy"] env["HTTPS_PROXY"] = env["http_proxy"] set_no_proxy_settings() self.assertTrue("no_proxy" in env) self.assertEquals(env["no_proxy"], "127.0.0.1,localhost,169.254.169.254") self.assertEquals( {}, get_environ_proxies("http://localhost:17123/api/v1/series")) expected_proxies = { 'http': 'http://localhost:3128', 'https': 'http://localhost:3128', 'no': '127.0.0.1,localhost,169.254.169.254' } environ_proxies = get_environ_proxies("https://www.google.com") if os.environ.get('TRAVIS') and 'travis_apt' in environ_proxies: # Travis CI adds a `travis_apt` proxy which breaks this test if it's not removed. environ_proxies.pop("travis_apt", None) self.assertEquals(expected_proxies, environ_proxies, (expected_proxies, environ_proxies)) # Clear the env variables set env.pop("http_proxy", None) env.pop("https_proxy", None) env.pop("HTTP_PROXY", None) env.pop("HTTPS_PROXY", None)
def test_no_proxy(self): """ Starting with Agent 5.0.0, there should always be a local forwarder running and all payloads should go through it. So we should make sure that we pass the no_proxy environment variable that will be used by requests (See: https://github.com/kennethreitz/requests/pull/945 ) """ from requests.utils import get_environ_proxies from os import environ as env env["http_proxy"] = "http://localhost:3128" env["https_proxy"] = env["http_proxy"] env["HTTP_PROXY"] = env["http_proxy"] env["HTTPS_PROXY"] = env["http_proxy"] self.assertTrue("no_proxy" in env) self.assertEquals(env["no_proxy"], "127.0.0.1,localhost,169.254.169.254") self.assertEquals({}, get_environ_proxies( "http://localhost:17123/intake")) expected_proxies = { 'http': 'http://localhost:3128', 'https': 'http://localhost:3128', 'no': '127.0.0.1,localhost,169.254.169.254' } environ_proxies = get_environ_proxies("https://www.google.com") # travis uses the TRAVIS_APT_PROXY variable breaking the test with assertEqual. # We only need that the expected_proxies are among the environ_proxies so a set inclusion is enough self.assertLessEqual(set(expected_proxies.values()), set(environ_proxies.values()), (expected_proxies, environ_proxies)) # Clear the env variables set del env["http_proxy"] del env["https_proxy"] if "HTTP_PROXY" in env: # on some platforms (e.g. Windows) env var names are case-insensitive, so we have to avoid # deleting the same key twice del env["HTTP_PROXY"] del env["HTTPS_PROXY"]
def test_no_proxy(self): """ Starting with Agent 2.0.0, there should always be a local forwarder running and all payloads should go through it. So we should make sure that we pass the no_proxy environment variable that will be used by requests (See: https://github.com/kennethreitz/requests/pull/945 ) """ from requests.utils import get_environ_proxies from os import environ as env env["http_proxy"] = "http://localhost:3128" env["https_proxy"] = env["http_proxy"] env["HTTP_PROXY"] = env["http_proxy"] env["HTTPS_PROXY"] = env["http_proxy"] self.assertTrue("no_proxy" in env) self.assertEquals(env["no_proxy"], "127.0.0.1,localhost,169.254.169.254") self.assertEquals({}, get_environ_proxies( "http://localhost:17123/intake")) expected_proxies = { 'http': 'http://localhost:3128', 'https': 'http://localhost:3128', 'no': '127.0.0.1,localhost,169.254.169.254' } environ_proxies = get_environ_proxies("https://www.google.com") if os.environ.get('TRAVIS') and 'travis_apt' in environ_proxies: # Travis CI adds a `travis_apt` proxy which breaks this test if it's not removed. environ_proxies.pop("travis_apt", None) self.assertEquals(expected_proxies, environ_proxies, (expected_proxies, environ_proxies)) # Clear the env variables set del env["http_proxy"] del env["https_proxy"] if "HTTP_PROXY" in env: # on some platforms (e.g. Windows) env var names are case-insensitive, so we have to avoid # deleting the same key twice del env["HTTP_PROXY"] del env["HTTPS_PROXY"]
def send_request(method, path, inject_header=True, **kwargs): json = None url = urljoin(dockercloud.rest_host.rstrip("/"), path.strip("/").encode("ascii", "ignore")) if not url.endswith("/"): url = "%s/" % url user_agent = 'python-dockercloud/%s' % dockercloud.__version__ if dockercloud.user_agent: user_agent = "%s %s" % (dockercloud.user_agent, user_agent) # construct headers headers = {'Content-Type': 'application/json', 'User-Agent': user_agent} headers.update(dockercloud.auth.get_auth_header()) # construct request s = get_session() request = Request(method, url, headers=headers, **kwargs) # get environment proxies env_proxies = utils.get_environ_proxies(url) or {} kw_args = {'proxies': env_proxies} # make the request req = s.prepare_request(request) logger.info("Prepared Request: %s, %s, %s, %s" % (req.method, req.url, req.headers, kwargs)) response = s.send(req, **kw_args) status_code = getattr(response, 'status_code', None) logger.info("Response: Status %s, %s, %s" % (str(status_code), response.headers, response.text)) # handle the response if not status_code: # Most likely network trouble raise ApiError("No Response (%s %s)" % (method, url)) elif 200 <= status_code <= 299: # Success if status_code != 204: # Try to parse the response. try: json = response.json() if response.headers and inject_header: json["dockercloud_action_uri"] = response.headers.get("X-DockerCloud-Action-URI", "") except TypeError: raise ApiError("JSON Parse Error (%s %s). Response: %s" % (method, url, response.text)) else: json = None else: # Server returned an error if status_code == 401: raise AuthError("Not authorized") else: raise ApiError("Status %s (%s %s). Response: %s" % (str(status_code), method, url, response.text)) return json
def send_request(method, path, inject_header=True, **kwargs): json = None url = urljoin(tutum.base_url, path.strip("/")) if not url.endswith("/"): url = "%s/" % url user_agent = 'python-tutum/%s' % tutum.__version__ if tutum.user_agent: user_agent = "%s %s" % (tutum.user_agent, user_agent) # construct headers headers = {'Content-Type': 'application/json', 'User-Agent': user_agent} headers.update(tutum.auth.get_auth_header()) tutum.logger.info("%s %s %s %s" % (method, url, headers, kwargs)) # construct request s = Session() req = Request(method, url, headers=headers, **kwargs) # get environment proxies env_proxies = utils.get_environ_proxies(url) or {} kw_args = {'proxies': env_proxies} # make the request response = s.send(req.prepare(), **kw_args) status_code = getattr(response, 'status_code', None) tutum.logger.info("Status: %s" % str(status_code)) # handle the response if not status_code: # Most likely network trouble raise TutumApiError("No Response (%s %s)" % (method, url)) elif 200 <= status_code <= 299: # Success if status_code != 204: # Try to parse the response. try: json = response.json() if response.headers and inject_header: json["tutum_action_uri"] = response.headers.get( "X-Tutum-Action-URI", "") except TypeError: raise TutumApiError("JSON Parse Error (%s %s). Response: %s" % (method, url, response.text)) else: json = None else: # Server returned an error. if status_code == 401: raise TutumAuthError("Not authorized") else: raise TutumApiError("Status %s (%s %s). Response: %s" % (str(status_code), method, url, response.text)) tutum.logger.info("Response: %s" % json) return json
def test_no_proxy(self): """ Starting with Agent 5.0.0, there should always be a local forwarder running and all payloads should go through it. So we should make sure that we pass the no_proxy environment variable that will be used by requests (See: https://github.com/kennethreitz/requests/pull/945 ) """ from os import environ as env env["http_proxy"] = "http://localhost:3128" env["https_proxy"] = env["http_proxy"] env["HTTP_PROXY"] = env["http_proxy"] env["HTTPS_PROXY"] = env["http_proxy"] set_no_proxy_settings() self.assertTrue("no_proxy" in env) self.assertEquals(env["no_proxy"], "127.0.0.1,localhost,169.254.169.254") self.assertEquals({}, get_environ_proxies( "http://localhost:18123/api/v1/series")) expected_proxies = { 'http': 'http://localhost:3128', 'https': 'http://localhost:3128', 'no': '127.0.0.1,localhost,169.254.169.254' } environ_proxies = {k:v for k,v in get_environ_proxies("https://www.google.com").iteritems() if k in ["http", "https", "no"]} self.assertEquals(expected_proxies, environ_proxies, (expected_proxies, environ_proxies)) # Clear the env variables set env.pop("http_proxy", None) env.pop("https_proxy", None) env.pop("HTTP_PROXY", None) env.pop("HTTPS_PROXY", None)
def test_get_environ_proxies_ip_ranges(self): """ Ensures that IP addresses are correctly matches with ranges in no_proxy variable """ from requests.utils import get_environ_proxies os.environ['no_proxy'] = "192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1" assert get_environ_proxies('http://192.168.0.1:5000/') == {} assert get_environ_proxies('http://192.168.0.1/') == {} assert get_environ_proxies('http://172.16.1.1/') == {} assert get_environ_proxies('http://172.16.1.1:5000/') == {} assert get_environ_proxies('http://192.168.1.1:5000/') != {} assert get_environ_proxies('http://192.168.1.1/') != {}
def send_request(method, path, inject_header=True, **kwargs): json = None url = urljoin(tutum.base_url, path.strip("/")) if not url.endswith("/"): url = "%s/" % url user_agent = 'python-tutum/%s' % tutum.__version__ if tutum.user_agent: user_agent = "%s %s" % (tutum.user_agent, user_agent) # construct headers headers = {'Content-Type': 'application/json', 'User-Agent': user_agent} headers.update(tutum.auth.get_auth_header()) tutum.logger.info("%s %s %s %s" % (method, url, headers, kwargs)) # construct request s = Session() req = Request(method, url, headers=headers, **kwargs) # get environment proxies env_proxies = utils.get_environ_proxies(url) or {} kw_args = {'proxies': env_proxies} # make the request response = s.send(req.prepare(), **kw_args) status_code = getattr(response, 'status_code', None) tutum.logger.info("Status: %s" % str(status_code)) # handle the response if not status_code: # Most likely network trouble raise TutumApiError("No Response (%s %s)" % (method, url)) elif 200 <= status_code <= 299: # Success if status_code != 204: # Try to parse the response. try: json = response.json() if response.headers and inject_header: json["tutum_action_uri"] = response.headers.get("X-Tutum-Action-URI", "") except TypeError: raise TutumApiError("JSON Parse Error (%s %s). Response: %s" % (method, url, response.text)) else: json = None else: # Server returned an error. if status_code == 401: raise TutumAuthError("Not authorized") else: raise TutumApiError("Status %s (%s %s). Response: %s" % (str(status_code), method, url, response.text)) tutum.logger.info("Response: %s" % json) return json
def status_401(self, r): login_url = r.json()["urls"]["login_url"] if r.request.url == login_url: raise ValueError("Log in was not successful.") creds = {'email': self.session.email, 'password': self.session.password} login_r = self.session.post( login_url, headers={"Content-Type": "application/json"}, data=json.dumps(creds) ) # Repeat the request now that we've logged in. What a hack. r.request.headers['Cookie'] = login_r.headers['Set-Cookie'] env_proxies = get_environ_proxies(r.request.url, no_proxy=None) r2 = self.session.send(r.request, proxies=env_proxies) # Add the previous requests to r.history so e.g. cookies get grabbed. r2.history.append(r) r2.history.append(login_r) return r2
def test_not_bypass(self, url): assert get_environ_proxies(url, no_proxy=None) != {}
def test_not_bypass_no_proxy_keyword(self, url, monkeypatch): # This is testing that the 'no_proxy' argument overrides the # environment variable 'no_proxy' monkeypatch.setenv('http_proxy', 'http://proxy.example.com:3128/') no_proxy = '192.168.1.1,requests.com' assert get_environ_proxies(url, no_proxy=no_proxy) != {}
def test_bypass_no_proxy_keyword(self, url): no_proxy = '192.168.1.1,requests.com' assert get_environ_proxies(url, no_proxy=no_proxy) == {}
def _get_proxies(url): # We could also support getting proxies from a config file, # but for now proxy support is taken from the environment. return get_environ_proxies(url)
def __init__(self, requests, router): self.requests = requests self.router = router self.proxies = get_environ_proxies('') or {}
def test_bypass(self, url): assert get_environ_proxies(url) == {}