def proxy_settings(self): """Retrieves network proxy settings from system (OS settings) or an environment file.""" # first check in the system settings if getproxies(): proxy_settings = getproxies() logger.info("Proxies settings found in the system: {}".format( proxy_settings)) else: proxy_settings = None logger.info("No proxy detected in the system.") # then check the environment file if environ.get("HTTP_PROXY") or environ.get("HTTPS_PROXY"): proxy_settings = { "http": environ.get("HTTP_PROXY"), "https": environ.get("HTTPS_PROXY"), } logger.info( "Proxies settings found in environment vars (maybe loaded from .env file): {}" .format(proxy_settings)) else: logger.info("No proxy settings found in the environment vars.") proxy_settings = None return proxy_settings
def _make_pool( self, parsed_dsn, # type: Dsn http_proxy, # type: Optional[str] https_proxy, # type: Optional[str] ca_certs, # type: Optional[Any] ): # type: (...) -> Union[PoolManager, ProxyManager] proxy = None no_proxy = self._in_no_proxy(parsed_dsn) # try HTTPS first if parsed_dsn.scheme == "https" and (https_proxy != ""): proxy = https_proxy or (not no_proxy and getproxies().get("https")) # maybe fallback to HTTP proxy if not proxy and (http_proxy != ""): proxy = http_proxy or (not no_proxy and getproxies().get("http")) opts = self._get_pool_options(ca_certs) if proxy: return urllib3.ProxyManager(proxy, **opts) else: return urllib3.PoolManager(**opts)
def fix_set_proxy_env(): """ Set http_proxy/https_proxy environment variables (for requests, pip, ...) from user-specified settings or, if none, from system settings on OS X and from registry on Windos. """ # save default proxies so that setting can be reset global default_proxies if default_proxies is None: default_proxies = getproxies( ) # can also read windows and macos settings settings = QSettings() proxies = getproxies() for scheme in set(["http", "https"]) | set(proxies): from_settings = settings.value("network/" + scheme + "-proxy", "", type=str) from_default = default_proxies.get(scheme, "") env_scheme = scheme + '_proxy' if from_settings: os.environ[env_scheme] = from_settings elif from_default: os.environ[ env_scheme] = from_default # crucial for windows/macos support else: os.environ.pop(env_scheme, "")
def _configure_session( self, parsed_dsn, # type: Dsn http_proxy, # type: Optional[str] https_proxy, # type: Optional[str] ): # type: (...) -> None no_proxy = self._in_no_proxy(parsed_dsn) proxy = { "https": https_proxy or (not no_proxy and getproxies().get("https")), "http": http_proxy or (not no_proxy and getproxies().get("http")), } self._session.proxies.update(proxy)
def test_connect_env_var_used(self, ClientRequestMock): req = ClientRequest('GET', URL('http://www.python.org'), proxy_from_env=True, loop=self.loop) self.assertEqual(str(req.proxy), 'http://proxy.example.com') # mock all the things! connector = aiohttp.TCPConnector(loop=self.loop) connector._resolve_host = make_mocked_coro([mock.MagicMock()]) proto = mock.Mock(**{ 'transport.get_extra_info.return_value': False, }) self.loop.create_connection = make_mocked_coro( (proto.transport, proto)) conn = self.loop.run_until_complete(connector.connect(req)) self.assertEqual(req.url, URL('http://www.python.org')) self.assertIs(conn._protocol, proto) self.assertIs(conn.transport, proto.transport) ClientRequestMock.assert_called_with( 'GET', URL('http://proxy.example.com'), auth=None, fingerprint=None, headers={'Host': 'www.python.org'}, ssl_context=None, verify_ssl=None, loop=self.loop) self.assertIn('http', getproxies())
def test_proxy(self, proxy: Optional[str]) -> None: session = PipSession(trusted_hosts=[]) if not proxy: # if user didn't pass --proxy then try to get it from the system. env_proxy = getproxies().get("http", None) proxy = urlparse(env_proxy).netloc if env_proxy else None if proxy: # set proxy scheme to session.proxies session.proxies = { "http": f"{proxy}", "https": f"{proxy}", "ftp": f"{proxy}", } connection_error = None try: session.request("GET", "https://pypi.org", timeout=1) except requests.exceptions.ConnectionError as e: connection_error = e assert connection_error is None, ( f"Invalid proxy {proxy} or session.proxies: " f"{session.proxies} is not correctly passed to session.request.")
def urlopen(url): """Opens an url with urllib2""" timeout = 5 # Proxy an ssl configuration pref = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Addons") if pref.GetBool("NoProxyCheck", True): proxies = {} else: if pref.GetBool("SystemProxyCheck", False): proxy = urllib2.getproxies() proxies = {"http": proxy.get('http'), "https": proxy.get('http')} elif pref.GetBool("UserProxyCheck", False): proxy = pref.GetString("ProxyUrl", "") proxies = {"http": proxy, "https": proxy} if ssl_ctx: handler = urllib2.HTTPSHandler(context=ssl_ctx) else: handler = {} proxy_support = urllib2.ProxyHandler(proxies) opener = urllib2.build_opener(proxy_support, handler) urllib2.install_opener(opener) # Url opening req = urllib2.Request(url, headers={'User-Agent': "Magic Browser"}) try: u = urllib2.urlopen(req, timeout=timeout) except Exception: return None else: return u
def parse_proxy(p: Optional[str] = None): proxy: Optional[str] = None if not p: proxy = getproxies().get("https") elif p != "ignore": proxy = p return urlparse(proxy) if proxy else None
def check_proxy(self, specific: dict = {}): """Check if proxy settings are set on the OS. Returns: -- 1 when direct connection works fine -- 2 when direct connection fails and any proxy is set in the OS -- 3 and settings when direct connection fails but a proxy is set see: https://docs.python.org/2/library/urllib.html#urllib.getproxies """ os_proxies = getproxies() if len(os_proxies) == 0 and self.check_internet_connection: logger.info("No proxy needed nor set. Direct connection works.") return 1 elif len(os_proxies) == 0 and not self.check_internet_connection: logger.error("Proxy not set in the OS. Needs to be specified") return 2 else: # env["http_proxy"] = os_proxies.get("http") env["https_proxy"] = os_proxies.get("https") # proxy = ProxyHandler( {"http": os_proxies.get("http"), "https": os_proxies.get("https")} ) opener = build_opener(proxy) install_opener(opener) urlopen("http://www.google.com") return 3, os_proxies
def proxies_from_env() -> Dict[str, ProxyInfo]: proxy_urls = { k: URL(v) for k, v in getproxies().items() if k in ("http", "https", "ws", "wss") } netrc_obj = netrc_from_env() stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()} ret = {} for proto, val in stripped.items(): proxy, auth = val if proxy.scheme in ("https", "wss"): client_logger.warning("%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy) continue if netrc_obj and auth is None: auth_from_netrc = None if proxy.host is not None: auth_from_netrc = netrc_obj.authenticators(proxy.host) if auth_from_netrc is not None: # auth_from_netrc is a (`user`, `account`, `password`) tuple, # `user` and `account` both can be username, # if `user` is None, use `account` *logins, password = auth_from_netrc login = logins[0] if logins[0] else logins[-1] auth = BasicAuth(cast(str, login), cast(str, password)) ret[proto] = ProxyInfo(proxy, auth) return ret
def test_connect_env_var_not_overwriting(self, ClientRequestMock): req = ClientRequest( 'GET', URL('http://www.python.org'), proxy_from_env=True, proxy=URL('http://proxy.example.com'), loop=self.loop ) self.assertEqual(str(req.proxy), 'http://proxy.example.com') # mock all the things! connector = aiohttp.TCPConnector(loop=self.loop) connector._resolve_host = make_mocked_coro([mock.MagicMock()]) proto = mock.Mock() self.loop.create_connection = make_mocked_coro( (proto.transport, proto)) conn = self.loop.run_until_complete(connector.connect(req)) self.assertEqual(req.url, URL('http://www.python.org')) self.assertIs(conn._protocol, proto) self.assertIs(conn.transport, proto.transport) ClientRequestMock.assert_called_with( 'GET', URL('http://proxy.example.com'), auth=None, headers={'Host': 'www.python.org'}, loop=self.loop) self.assertIn('http', getproxies())
def fix_set_proxy_env(): """ Set http_proxy/https_proxy environment variables (for requests, pip, ...) from system settings on OS X and from registry on Windos. On unix, no-op. """ for scheme, proxy in getproxies().items(): os.environ[scheme + '_proxy'] = proxy
def update_proxy(self, proxy, proxy_auth, proxy_from_env): if proxy_from_env and not proxy: proxies = getproxies() proxy_url = proxies.get(self.original_url.scheme) if not proxy_url: proxy_url = proxies.get('socks4') or proxies.get('socks5') proxy = URL(proxy_url) if proxy_url else None if proxy and proxy.scheme not in ['http', 'socks4', 'socks5']: raise ValueError( "Only http, socks4 and socks5 proxies are supported") if proxy and proxy_auth: if proxy.scheme == 'http' and \ not isinstance(proxy_auth, aiohttp.BasicAuth): raise ValueError("proxy_auth must be None or " "BasicAuth() tuple for http proxy") if proxy.scheme == 'socks4' and \ not isinstance(proxy_auth, Socks4Auth): raise ValueError("proxy_auth must be None or Socks4Auth() " "tuple for socks4 proxy") if proxy.scheme == 'socks5' and \ not isinstance(proxy_auth, Socks5Auth): raise ValueError("proxy_auth must be None or Socks5Auth() " "tuple for socks5 proxy") self.proxy = proxy self.proxy_auth = proxy_auth
def set_proxy(proxy, user=None, password=""): """ Set the HTTP proxy for Python to download through. If ``proxy`` is None then tries to set proxy from environment or system settings. :param proxy: The HTTP proxy server to use. For example: 'http://proxy.example.com:3128/' :param user: The username to authenticate with. Use None to disable authentication. :param password: The password to authenticate with. """ from nltk import compat if proxy is None: # Try and find the system proxy settings try: proxy = getproxies()["http"] except KeyError: raise ValueError("Could not detect default proxy settings") # Set up the proxy handler proxy_handler = ProxyHandler({"https": proxy, "http": proxy}) opener = build_opener(proxy_handler) if user is not None: # Set up basic proxy authentication if provided password_manager = HTTPPasswordMgrWithDefaultRealm() password_manager.add_password(realm=None, uri=proxy, user=user, passwd=password) opener.add_handler(ProxyBasicAuthHandler(password_manager)) opener.add_handler(ProxyDigestAuthHandler(password_manager)) # Overide the existing url opener install_opener(opener)
def run_osm_update(ctx): update_env = { "PG_CONNECTION_STRING": f"postgis://{ctx.pg.user}:{ctx.pg.password}@{ctx.pg.host}:{ctx.pg.port}/{ctx.pg.database}", "OSMOSIS_WORKING_DIR": ctx.update_tiles_dir, "IMPOSM_DATA_DIR": ctx.generated_files_dir, } if not check_generated_cache(ctx.generated_files_dir): sys.exit(1) # osmosis reads proxy parameters from JAVACMD_OPTIONS variable proxies = getproxies() java_cmd_options = "" if proxies.get("http"): http_proxy = urlparse(proxies["http"]) java_cmd_options += f"-Dhttp.proxyHost={http_proxy.hostname} -Dhttp.proxyPort={http_proxy.port} " if proxies.get("https"): https_proxy = urlparse(proxies["https"]) java_cmd_options += f"-Dhttps.proxyHost={https_proxy.hostname} -Dhttps.proxyPort={https_proxy.port} " if java_cmd_options: update_env["JAVACMD_OPTIONS"] = java_cmd_options lock_path = get_import_lock_path(ctx) with FileLock(lock_path) as lock: ctx.run( f"{os.path.join(os.getcwd(), 'osm_update.sh')} --config {ctx.imposm_config_dir}", env=update_env, )
def callback(request): code = request.GET.get('code') config = ConfigParser() config.read('./config.ini') default_section = config['DEFAULT'] client_id = default_section['client_id'] client_secret = default_section['client_secret'] try: res = requests.post('https://github.com/login/oauth/access_token', json={ 'client_id': client_id, 'client_secret': client_secret, 'code': code }, headers={ 'Accept': 'application/json' }, proxies=getproxies()) data = res.json() token_section = config['TOKEN'] token_section['access_token'] = data.get('access_token') token_section['scope'] = data.get('scope') token_section['token_type'] = data.get('token_type') token_section['last_update'] = str(datetime.now()) with open(CONFIG_FILE_NAME, 'w') as file: config.write(file) except requests.RequestException as e: data = { 'error': 'request error', 'error_description': str(e), 'error_uri': None } is_error = 'error' in data return render(request, 'github/callback.html', context={'data': data, 'is_error': is_error})
def fetch_ktools_tar(self, location, url, attempts=3, timeout=15, cooldown=1): last_error = None proxy_config = urlrequest.getproxies() self.announce('Retrieving ktools from: {}'.format(url), INFO) self.announce('Proxy configuration: {}'.format(proxy_config), INFO) if proxy_config: # Handle Proxy config proxy_handler = urlrequest.ProxyHandler(proxy_config) opener = urlrequest.build_opener(proxy_handler) urlrequest.install_opener(opener) for i in range(attempts): try: if proxy_config: # Proxied connection req = urlrequest.urlopen(urlrequest.Request(url), timeout=timeout) break else: # Non proxied connection req = urlrequest.urlopen(url, timeout=timeout) break except URLError as e: self.announce('Fetch ktools tar failed: {} (attempt {})'.format(e, (i+1)), WARN) last_error = e sleep(cooldown) else: self.announce('Failed to get ktools tar after {} attempts'.format(attempts), ERROR) if last_error: raise last_error with open(location, 'wb') as f: f.write(req.read())
def test_connect_env_var_not_overwriting(self, ClientRequestMock): req = ClientRequest('GET', URL('http://www.python.org'), proxy_from_env=True, proxy=URL('http://proxy.example.com'), loop=self.loop) self.assertEqual(str(req.proxy), 'http://proxy.example.com') # mock all the things! connector = aiohttp.TCPConnector(loop=self.loop) connector._resolve_host = make_mocked_coro([mock.MagicMock()]) proto = mock.Mock() self.loop.create_connection = make_mocked_coro( (proto.transport, proto)) conn = self.loop.run_until_complete(connector.connect(req)) self.assertEqual(req.url, URL('http://www.python.org')) self.assertIs(conn._protocol, proto) self.assertIs(conn.transport, proto.transport) ClientRequestMock.assert_called_with( 'GET', URL('http://proxy.example.com'), auth=None, headers={'Host': 'www.python.org'}, loop=self.loop) self.assertIn('http', getproxies())
def get_proxy_url(url): proxies = getproxies() parsed = urlparse.urlparse(url) proxy_keys = [ parsed.scheme + '://' + parsed.netloc, parsed.scheme, 'all://' + parsed.netloc, 'all' ] # Set port if not defined explicitly in url. port = parsed.port if port is None and parsed.scheme == 'http': port = 80 elif port is None and parsed.scheme == 'https': port = 443 hostname = parsed.hostname is not None and parsed.hostname or '' # Determine if proxy should be used based on no_proxy entries. # Note this does not support no_proxy ip or cidr entries. if proxy_bypass("%s:%s" % (hostname, port)): return None for key in proxy_keys: if key in proxies: return proxies[key] return None
def __init__( self, username: str, password: str, challenge_type: Optional[str] = "none", headers: Optional[CaseInsensitiveDictType] = None, proxies: Optional[Proxies] = None, **kwargs: Any, ) -> None: self.session: requests.Session = requests.session() self.session.headers = HEADERS if headers is None else headers self.session.proxies = getproxies() if proxies is None else proxies self.session.verify = certifi.where() self.expires_at = datetime.strptime("1970", "%Y").replace( tzinfo=pytz.UTC) # some time in the past self.username: str = username self.password: str = password if challenge_type not in ["email", "sms", "none"]: raise ValueError("challenge_type must be email, sms, or none") self.challenge_type: str = challenge_type self.device_token: str = kwargs.pop("device_token", str(uuid.uuid4())) self.oauth: OAuth = kwargs.pop("ouath", OAuth()) super().__init__(**kwargs)
def proxies_from_env(): proxy_urls = { k: URL(v) for k, v in getproxies().items() if k in ('http', 'https') } netrc_obj = netrc_from_env() stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()} ret = {} for proto, val in stripped.items(): proxy, auth = val if proxy.scheme == 'https': client_logger.warning( "HTTPS proxies %s are not supported, ignoring", proxy) continue if netrc_obj and auth is None: auth_from_netrc = netrc_obj.authenticators(proxy.host) if auth_from_netrc is not None: # auth_from_netrc is a (`user`, `account`, `password`) tuple, # `user` and `account` both can be username, # if `user` is None, use `account` *logins, password = auth_from_netrc auth = BasicAuth(logins[0] if logins[0] else logins[-1], password) ret[proto] = ProxyInfo(proxy, auth) return ret
def upload_file_for_scan(): post_headers = { 'authorization': api_key, } try: files = { 'file': (Constants.PROPER_NAME, open(Constants.INPUT_FILE, 'rb'), 'application/octet-stream') } app_upload_request = requests.post( mobsf_url_and_endpoints['api_app_upload'], files=files, headers=post_headers, proxies=getproxies(), auth=auth) if app_upload_request.status_code == 200: global scan_dictionary scan_dictionary = json.loads( app_upload_request.content.decode('utf-8')) else: print( f"Failed upload application, error message : {app_upload_request.content}" ) except Exception as error: print(error)
def LyricWikia(artist, title): proxy = request.getproxies() url = 'http://lyrics.wikia.com/api.php?action=lyrics&artist={artist}&song={title}&fmt=json&func=getSong'.format( artist=artist, title=title).replace(" ", "%20") r = requests.get(url, timeout=15, proxies=proxy) # We got some bad formatted JSON data... So we need to fix stuff :/ returned = r.text returned = returned.replace("\'", "\"") returned = returned.replace("song = ", "") returned = json.loads(returned) if returned["lyrics"] != "Not found": # set the url to the url we just received, and retrieving it timed = True url = returned["url"] + "/lrc" r = requests.get(url, timeout=15, proxies=proxy) if r.status_code == 404: timed = False url = returned["url"] r = requests.get(url, timeout=15, proxies=proxy) soup = BeautifulSoup(r.text, 'html.parser') soup = soup.find("div", {"class": "lyricbox"}) [elem.extract() for elem in soup.findAll('div')] [elem.replaceWith('\n') for elem in soup.findAll('br')] # with old BeautifulSoup the following is needed..? For recent versions, this isn't needed/doesn't work try: # soup = BeautifulSoup(str(soup), convertEntities=BeautifulSoup.HTML_ENTITIES) soup = BeautifulSoup(str(soup), 'html.parser') except: pass soup = BeautifulSoup(re.sub(r'(<!--[.\s\S]*-->)', '', str(soup)), 'html.parser') [elem.extract() for elem in soup.findAll('script')] return soup.getText(), url, timed else: return "error", "", False
def proxies_from_env() -> Dict[str, ProxyInfo]: proxy_urls = {k: URL(v) for k, v in getproxies().items() if k in ('http', 'https')} netrc_obj = netrc_from_env() stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()} ret = {} for proto, val in stripped.items(): proxy, auth = val if proxy.scheme == 'https': client_logger.warning( "HTTPS proxies %s are not supported, ignoring", proxy) continue if netrc_obj and auth is None: auth_from_netrc = None if proxy.host is not None: auth_from_netrc = netrc_obj.authenticators(proxy.host) if auth_from_netrc is not None: # auth_from_netrc is a (`user`, `account`, `password`) tuple, # `user` and `account` both can be username, # if `user` is None, use `account` *logins, password = auth_from_netrc login = logins[0] if logins[0] else logins[-1] auth = BasicAuth(cast(str, login), cast(str, password)) ret[proto] = ProxyInfo(proxy, auth) return ret
def check_version() -> bool: proxy = request.getproxies() try: return get_version() >= \ float(requests.get("https://api.github.com/repos/SimonIT/spotifylyrics/tags", timeout=5, proxies=proxy) .json()[0]["name"]) except Exception: return True
class Config: PROXY = request.getproxies() if os.name == "nt": SETTINGS_DIR = os.getenv("APPDATA") + "\\SpotifyLyrics\\" else: SETTINGS_DIR = os.path.expanduser("~") + "/.SpotifyLyrics/" DEFAULT_LYRICS_DIR = os.path.join(SETTINGS_DIR, "lyrics") LYRICS_DIR = DEFAULT_LYRICS_DIR
def __init__(self, insert_key, host=None, port=443): host = host or self.HOST headers = self.HEADERS.copy() headers.update({ "Api-Key": insert_key, "Content-Encoding": "gzip", "Content-Type": "application/json", }) retries = urllib3.Retry(total=False, connect=None, read=None, redirect=0, status=None) # Check if https traffic should be proxied and pass the proxy # information to the connectionpool proxies = getproxies() proxy = proxies.get("https", None) proxy_headers = None if proxy: proxy = parse_url(proxy) _logger.info("Using proxy host={0!r} port={1!r}".format( proxy.host, proxy.port)) if proxy.scheme.lower() != "http": _logger.warning("Contacting https destinations through " "{} proxies is not supported.".format( proxy.scheme)) proxy = None elif proxy.auth: # https://tools.ietf.org/html/rfc7617 # # The username/password encoding is not specified by a standard. # "this specification continues to leave the default encoding undefined" # # parse_url will encode non-ascii characters into a # percent-encoded string. As a result, we make the assumption # that anything returned from parse_url is utf-8 encoded. # # This is, of course, not guaranteed to be interpreted # correctly by the proxy server, but the failure mode will # hopefully be interpreted as an incorrect username/password # combination rather than causing a security issue where # information may be leaked (control characters, etc.) proxy_headers = urllib3.make_headers( proxy_basic_auth=proxy.auth) self._pool = self.POOL_CLS( host=host, port=port, retries=retries, headers=headers, strict=True, _proxy=proxy, _proxy_headers=proxy_headers, ) self._headers = self._pool.headers
def __init__( self, reactor: ISynapseReactor, tls_client_options_factory: Optional[FederationPolicyForHTTPS], user_agent: bytes, ip_blacklist: IPSet, proxy_reactor: Optional[ISynapseReactor] = None, _srv_resolver: Optional[SrvResolver] = None, _well_known_resolver: Optional[WellKnownResolver] = None, ): self._reactor = reactor self._clock = Clock(reactor) self._pool = HTTPConnectionPool(reactor) self._pool.retryAutomatically = False self._pool.maxPersistentPerHost = 5 self._pool.cachedConnectionTimeout = 2 * 60 if proxy_reactor is None: self.proxy_reactor = reactor else: self.proxy_reactor = proxy_reactor proxies = getproxies() https_proxy = proxies["https"].encode() if "https" in proxies else None self._agent = Agent.usingEndpointFactory( self._reactor, MatrixHostnameEndpointFactory( reactor, self.proxy_reactor, tls_client_options_factory, _srv_resolver, https_proxy, ), pool=self._pool, ) self.user_agent = user_agent if _well_known_resolver is None: # Note that the name resolver has already been wrapped in a # IPBlacklistingResolver by MatrixFederationHttpClient. _well_known_resolver = WellKnownResolver( self._reactor, agent=BlacklistingAgentWrapper( ProxyAgent( self._reactor, self.proxy_reactor, pool=self._pool, contextFactory=tls_client_options_factory, use_proxy=True, ), ip_blacklist=ip_blacklist, ), user_agent=self.user_agent, ) self._well_known_resolver = _well_known_resolver
def __init__(self): """ Initialize the default options. """ super(Configuration, self).__init__() ## checking options self["allowedschemes"] = [] self['cookiefile'] = None self['robotstxt'] = True self["debugmemory"] = False self["localwebroot"] = None self["maxfilesizeparse"] = 1 * 1024 * 1024 self["maxfilesizedownload"] = 5 * 1024 * 1024 self["maxnumurls"] = None self["maxrunseconds"] = None self["maxrequestspersecond"] = 10 self["maxhttpredirects"] = 10 self["nntpserver"] = os.environ.get("NNTP_SERVER", None) self["proxy"] = request.getproxies() self["sslverify"] = True self["threads"] = 10 self["timeout"] = 60 self["aborttimeout"] = 300 self["recursionlevel"] = -1 self["useragent"] = UserAgent ## authentication self["authentication"] = [] self["loginurl"] = None self["loginuserfield"] = "login" self["loginpasswordfield"] = "password" self["loginextrafields"] = {} ## filtering self["externlinks"] = [] self["ignorewarnings"] = [] self["internlinks"] = [] self["checkextern"] = False self["ignoreclasses"] = [] ## plugins self["pluginfolders"] = get_plugin_folders() self["enabledplugins"] = [] ## output self['trace'] = False self['quiet'] = False self["verbose"] = False self["warnings"] = True self["fileoutput"] = [] self['output'] = 'text' self["status"] = False self["status_wait_seconds"] = 5 self['logger'] = None self.loggers = {} from ..logger import LoggerClasses for c in LoggerClasses: key = c.LoggerName self[key] = {} self.loggers[key] = c
def _get_proxies(): proxies = getproxies() filtered_proxies = {} for key, val in proxies.items(): if key.startswith('http'): if not val.startswith('http'): filtered_proxies[key] = 'http://%s' % val else: filtered_proxies[key] = val return filtered_proxies
def update_proxy(self, proxy, proxy_auth, proxy_from_env): if proxy_from_env and not proxy: proxy_url = getproxies().get(self.original_url.scheme) proxy = URL(proxy_url) if proxy_url else None if proxy and not proxy.scheme == 'http': raise ValueError("Only http proxies are supported") if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth): raise ValueError("proxy_auth must be None or BasicAuth() tuple") self.proxy = proxy self.proxy_auth = proxy_auth
def __init__(self, auth_encoding='latin-1'): self.auth_encoding = auth_encoding self.proxies = {} for type_, url in getproxies().items(): try: self.proxies[type_] = self._get_proxy(url, type_) # some values such as '/var/run/docker.sock' can't be parsed # by _parse_proxy and as such should be skipped except ValueError: continue
def _in_no_proxy(self, parsed_dsn): # type: (Dsn) -> bool no_proxy = getproxies().get("no") if not no_proxy: return False for host in no_proxy.split(","): host = host.strip() if parsed_dsn.host.endswith(host) or parsed_dsn.netloc.endswith(host): return True return False
def function150(self, arg2369, arg455, arg1310): if (proxy_from_env and (not arg2369)): var3723 = getproxies().get(self.attribute683.scheme) arg2369 = (URL(var3723) if var3723 else None) if (proxy and (not (arg2369.scheme == 'http'))): raise ValueError('Only http proxies are supported') if (proxy_auth and (not isinstance(arg455, helpers.BasicAuth))): raise ValueError('proxy_auth must be None or BasicAuth() tuple') self.attribute1478 = arg2369 self.attribute563 = arg455
def check_profanity(qu): print(getproxies()) connection = urlopen(r"http://www.wdyl.com/profanity?q="+ urlencode(str(qu))) output = connection.read() if "true" in output: print("Profanity Alert!!!") elif "false" in output: print("This document has no curse words!"); else: print("Please mannualy check for Profanity"); connection.close()
def get_proxy_tuple(): """ Return system http proxy as a urlparse tuple or () if unset. """ proxydict = getproxies() proxystr = proxydict.get('http') or proxydict.get('https') or '' if proxystr: proxy = urlparse(proxystr) else: proxy = () return proxy
def new_requests_session(proxies=True, agent=None): if agent is None: agent = "devpi" else: agent = "devpi-%s/%s" % agent agent += " (py%s; %s)" % (sys.version.split()[0], sys.platform) session = Session() session.headers["user-agent"] = agent if proxies: session.proxies = getproxies() session.ConnectionError = ConnectionError session.RequestException = RequestException return session
def proxies_from_env(): proxy_urls = {k: URL(v) for k, v in getproxies().items() if k in ('http', 'https')} stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()} ret = {} for proto, val in stripped.items(): proxy, auth = val if proxy.scheme == 'https': client_logger.warning( "HTTPS proxies %s are not supported, ignoring", proxy) continue ret[proto] = ProxyInfo(proxy, auth) return ret
def fix_set_proxy_env(): """ Set http_proxy/https_proxy environment variables (for requests, pip, ...) from user-specified settings or, if none, from system settings on OS X and from registry on Windos. """ # save default proxies so that setting can be reset global default_proxies if default_proxies is None: default_proxies = getproxies() # can also read windows and macos settings settings = QSettings() proxies = getproxies() for scheme in set(["http", "https"]) | set(proxies): from_settings = settings.value("network/" + scheme + "-proxy", "", type=str) from_default = default_proxies.get(scheme, "") env_scheme = scheme + '_proxy' if from_settings: os.environ[env_scheme] = from_settings elif from_default: os.environ[env_scheme] = from_default # crucial for windows/macos support else: os.environ.pop(env_scheme, "")
def test_connect_env_var_https_ignored(self, ClientRequestMock): req = ClientRequest( 'GET', URL('http://www.python.org'), proxy_from_env=True, loop=self.loop ) self.assertIsNone(req.proxy) # mock all the things! connector = aiohttp.TCPConnector(loop=self.loop) connector._resolve_host = make_mocked_coro([mock.MagicMock()]) proto = mock.Mock() self.loop.create_connection = make_mocked_coro( (proto.transport, proto)) conn = self.loop.run_until_complete(connector.connect(req)) self.assertEqual(req.url, URL('http://www.python.org')) self.assertIs(conn._protocol, proto) self.assertIs(conn.transport, proto.transport) # we patch only the connector, it should not be called ClientRequestMock.assert_not_called() self.assertIn('https', getproxies()) self.assertNotIn('http', getproxies())
def get_proxies(debug=True): proxies = getproxies() for key, proxy in list(proxies.items()): if not proxy or '..' in proxy: del proxies[key] continue if proxy.startswith(key+'://'): proxy = proxy[len(key)+3:] if key == 'https' and proxy.startswith('http://'): proxy = proxy[7:] if proxy.endswith('/'): proxy = proxy[:-1] if len(proxy) > 4: proxies[key] = proxy else: prints('Removing invalid', key, 'proxy:', proxy) del proxies[key] if proxies and debug: prints('Using proxies:', repr(proxies)) return proxies
def test_connect_env_var_https_used(self, ClientRequestMock): proxy_req = ClientRequest('GET', URL('http://proxy.example.com'), loop=self.loop) ClientRequestMock.return_value = proxy_req proxy_resp = ClientResponse('get', URL('http://proxy.example.com')) proxy_resp._loop = self.loop proxy_req.send = send_mock = mock.Mock() send_mock.return_value = proxy_resp proxy_resp.start = make_mocked_coro(mock.Mock(status=200)) connector = aiohttp.TCPConnector(loop=self.loop) connector._resolve_host = make_mocked_coro( [{'hostname': 'hostname', 'host': '127.0.0.1', 'port': 80, 'family': socket.AF_INET, 'proto': 0, 'flags': 0}]) tr, proto = mock.Mock(), mock.Mock() self.loop.create_connection = make_mocked_coro((tr, proto)) req = ClientRequest( 'GET', URL('https://www.python.org'), proxy_from_env=True, loop=self.loop, ) self.loop.run_until_complete(connector._create_connection(req)) self.assertEqual(req.url.path, '/') self.assertEqual(proxy_req.method, 'CONNECT') self.assertEqual(proxy_req.url, URL('https://www.python.org')) tr.close.assert_called_once_with() tr.get_extra_info.assert_called_with('socket', default=None) self.loop.run_until_complete(proxy_req.close()) proxy_resp.close() self.loop.run_until_complete(req.close()) self.assertIn('https', getproxies())
try: from urllib.request import getproxies except ImportError: # py2 from urllib2 import getproxies from geopy.compat import urlopen from geopy.exc import GeocoderServiceError from geopy.geocoders.base import Geocoder CERT_SELFSIGNED_CA = os.path.join(os.path.dirname(__file__), 'selfsigned_ca.pem') # Are system proxies set? System proxies are set in: # - Environment variables (HTTP_PROXY/HTTPS_PROXY) on Unix; # - System Configuration Framework on macOS; # - Registry's Internet Settings section on Windows. WITH_SYSTEM_PROXIES = bool(getproxies()) class DummyGeocoder(Geocoder): def geocode(self, location): geo_request = self._call_geocoder(location, raw=True) geo_html = geo_request.read() return geo_html if geo_html else None class ProxyTestCase(unittest.TestCase): remote_website_http = "http://example.org/" remote_website_https = "https://example.org/" timeout = 5 def setUp(self):
log = getLogger(__name__) # 1. get proxies if needed. a proxy for each protocol # 2. handle authentication # basic, digest, and nltm (windows) authentications should be handled. # 3. handle any protocol # typically http, https, ftp # 1. get the proxies list # urllib can only get proxies on windows and mac. so on linux or if the user # wants to specify the proxy there has to be a way to do that. TODO get proxies # from condarc and overrwrite any system proxies # the proxies are in a dict {'http':'http://proxy:8080'} # protocol:proxyserver proxies_dict = get_proxy_servers() or urllib2.getproxies() #2. handle authentication proxypwdmgr = urllib2.HTTPPasswordMgrWithDefaultRealm() def get_userandpass(proxytype='',realm=''): """a function to get username and password from terminal. can be replaced with anything like some gui""" import getpass uname = input(proxytype + ' proxy username:') pword = getpass.getpass() return uname, pword
else: # Python 2.x import urllib2 import urlparse log = getLogger(__name__) # 1. get proxies if needed. a proxy for each protocol # 2. handle authentication # basic, digest, and nltm (windows) authentications should be handled. # 3. handle any protocol # typically http, https, ftp # 1. get the proxies list proxies_dict=urllib2.getproxies() # urllib can only get proxies on windows and mac. so on linux or if the user # wants to specify the proxy there has to be a way to do that. TODO get proxies # from condarc and overrwrite any system proxies # the proxies are in a dict {'http':'http://proxy:8080'} # protocol:proxyserver if get_proxy_servers(): proxies_dict = get_proxy_servers() #2. handle authentication proxypwdmgr = urllib2.HTTPPasswordMgrWithDefaultRealm() def get_userandpass(proxytype='',realm=''):
def __init__(self, verbose): QtGui.QMainWindow.__init__(self) self.setWindowTitle("Photini photo metadata editor") self.selection = list() # logger window self.loggerwindow = LoggerWindow(verbose) self.logger = logging.getLogger(self.__class__.__name__) # config store self.config_store = ConfigStore('editor') # set network proxy proxies = getproxies() if 'http' in proxies: parsed = urlparse(proxies['http']) QNetworkProxy.setApplicationProxy( QNetworkProxy(QNetworkProxy.HttpProxy, parsed.hostname, parsed.port)) # restore size size = self.width(), self.height() self.resize(*eval( self.config_store.get('main_window', 'size', str(size)))) # image selector self.image_list = ImageList(self.config_store) self.image_list.selection_changed.connect(self.new_selection) self.image_list.new_metadata.connect(self.new_metadata) # prepare list of tabs and associated stuff self.tab_list = ( {'name' : '&Descriptive metadata', 'class' : Descriptive}, {'name' : '&Technical metadata', 'class' : Technical}, {'name' : 'Map (&Google)', 'class' : GoogleMap}, {'name' : 'Map (&Bing)', 'class' : BingMap}, {'name' : 'Map (&OSM)', 'class' : OpenStreetMap}, {'name' : '&Flickr upload', 'class' : FlickrUploader}, {'name' : '&Picasa upload', 'class' : PicasaUploader}, {'name' : '&Import from camera', 'class' : Importer}, ) for tab in self.tab_list: tab['key'] = tab['name'].replace('&', '').replace(' ', '_') tab['key'] = tab['key'].replace('(', '').replace(')', '').lower() if tab['class']: tab['object'] = tab['class'](self.config_store, self.image_list) else: tab['object'] = None # file menu file_menu = self.menuBar().addMenu('File') open_action = QtGui.QAction('Open images', self) open_action.setShortcuts(['Ctrl+O']) open_action.triggered.connect(self.image_list.open_files) file_menu.addAction(open_action) self.save_action = QtGui.QAction('Save images with new data', self) self.save_action.setShortcuts(['Ctrl+S']) self.save_action.setEnabled(False) self.save_action.triggered.connect(self.image_list.save_files) file_menu.addAction(self.save_action) self.close_action = QtGui.QAction('Close selected images', self) self.close_action.setEnabled(False) self.close_action.triggered.connect(self.close_files) file_menu.addAction(self.close_action) close_all_action = QtGui.QAction('Close all images', self) close_all_action.triggered.connect(self.close_all_files) file_menu.addAction(close_all_action) file_menu.addSeparator() quit_action = QtGui.QAction('Quit', self) quit_action.setShortcuts(['Ctrl+Q', 'Ctrl+W']) quit_action.triggered.connect( QtGui.QApplication.instance().closeAllWindows) file_menu.addAction(quit_action) # options menu options_menu = self.menuBar().addMenu('Options') settings_action = QtGui.QAction('Settings', self) settings_action.triggered.connect(self.edit_settings) options_menu.addAction(settings_action) options_menu.addSeparator() for tab in self.tab_list: tab['action'] = QtGui.QAction(tab['name'].replace('&', ''), self) tab['action'].setCheckable(True) if tab['class']: tab['action'].setChecked( eval(self.config_store.get('tabs', tab['key'], 'True'))) else: tab['action'].setEnabled(False) tab['action'].triggered.connect(self.add_tabs) options_menu.addAction(tab['action']) # help menu help_menu = self.menuBar().addMenu('Help') about_action = QtGui.QAction('About Photini', self) about_action.triggered.connect(self.about) help_menu.addAction(about_action) help_menu.addSeparator() help_action = QtGui.QAction('Photini documentation', self) help_action.triggered.connect(self.open_docs) help_menu.addAction(help_action) # main application area self.central_widget = QtGui.QSplitter() self.central_widget.setOrientation(Qt.Vertical) self.central_widget.setChildrenCollapsible(False) self.tabs = QtGui.QTabWidget() self.tabs.currentChanged.connect(self.new_tab) self.add_tabs() self.central_widget.addWidget(self.tabs) self.central_widget.addWidget(self.image_list) size = self.central_widget.sizes() self.central_widget.setSizes(eval( self.config_store.get('main_window', 'split', str(size)))) self.central_widget.splitterMoved.connect(self.new_split) self.setCentralWidget(self.central_widget)