def urlopen(url, proxies=None, data=None): global _opener if not proxies: proxies = urllib.getproxies() headers = { 'User-Agent': UA_STR, 'Accept-Encoding': 'gzip;q=1.0, deflate;q=0.9, identity;q=0.5' } req = urllib2.Request(url, data, headers) proxy_support = urllib2.ProxyHandler(proxies) if _opener is None: pwd_manager = handlepasswd() handlers = [ proxy_support, urllib2.UnknownHandler, HttpWithGzipHandler, urllib2.HTTPBasicAuthHandler(pwd_manager), urllib2.ProxyBasicAuthHandler(pwd_manager), urllib2.HTTPDigestAuthHandler(pwd_manager), urllib2.ProxyDigestAuthHandler(pwd_manager), urllib2.HTTPDefaultErrorHandler, urllib2.HTTPRedirectHandler, ] if hasattr(httplib, 'HTTPS'): handlers.append(HttpsWithGzipHandler) _opener = urllib2.build_opener(*handlers) # print _opener.handlers urllib2.install_opener(_opener) return _opener.open(req)
def setUp(self): super(ProxyAuthTests, self).setUp() # Ignore proxy bypass settings in the environment. def restore_environ(old_environ): os.environ.clear() os.environ.update(old_environ) self.addCleanup(restore_environ, os.environ.copy()) os.environ['NO_PROXY'] = '' os.environ['no_proxy'] = '' self.digest_auth_handler = DigestAuthHandler() self.digest_auth_handler.set_users({self.USER: self.PASSWD}) self.digest_auth_handler.set_realm(self.REALM) # With Digest Authentication def create_fake_proxy_handler(*args, **kwargs): return FakeProxyHandler(self.digest_auth_handler, *args, **kwargs) self.server = LoopbackHttpServerThread(create_fake_proxy_handler) self.server.start() self.server.ready.wait() self.addCleanup(self.server.stop) proxy_url = "http://127.0.0.1:%d" % self.server.port handler = urllib2.ProxyHandler({"http" : proxy_url}) self.proxy_digest_handler = urllib2.ProxyDigestAuthHandler() self.opener = urllib2.build_opener(handler, self.proxy_digest_handler)
def get_opener(user=None, password=None, proxy=None, debuglevel=0): """Construct an URL opener object. It considers the given credentials and proxy. @return: URL opener @rtype: urllib2.OpenerDirector """ from . import httputil pwd_manager = PasswordManager(user, password) handlers = [ urllib2.UnknownHandler, httputil.HttpWithGzipHandler(debuglevel=debuglevel), urllib2.HTTPBasicAuthHandler(pwd_manager), urllib2.HTTPDigestAuthHandler(pwd_manager), ] if proxy: handlers.insert(0, urllib2.ProxyHandler({ "http": proxy, "https": proxy })) handlers.extend([ urllib2.ProxyBasicAuthHandler(pwd_manager), urllib2.ProxyDigestAuthHandler(pwd_manager), ]) if hasattr(httplib, 'HTTPS'): handlers.append(httputil.HttpsWithGzipHandler(debuglevel=debuglevel)) return urllib2.build_opener(*handlers)
def setUp(self): FakeProxyHandler.digest_auth_handler.set_users( {self.USER: self.PASSWD}) FakeProxyHandler.digest_auth_handler.set_realm(self.REALM) self.server = LoopbackHttpServerThread(FakeProxyHandler) self.server.start() self.server.ready.wait() proxy_url = "http://127.0.0.1:%d" % self.server.port handler = urllib2.ProxyHandler({"http": proxy_url}) self._digest_auth_handler = urllib2.ProxyDigestAuthHandler() self.opener = urllib2.build_opener(handler, self._digest_auth_handler)
def setUp(self): FakeProxyHandler.digest_auth_handler.set_users( {self.USER: self.PASSWD}) FakeProxyHandler.digest_auth_handler.set_realm(self.REALM) self.server = LoopbackHttpServerThread(self.PORT, FakeProxyHandler) self.server.start() self.server.ready.wait() handler = urllib2.ProxyHandler({"http": self.PROXY_URL}) self._digest_auth_handler = urllib2.ProxyDigestAuthHandler() self.opener = urllib2.build_opener(handler, self._digest_auth_handler)
def buildOpener(self): ''' Build a URL opener based on the information in options.''' handlers = [] # Proxy server handling if self._proxy_user is not None and self._proxy_server is not None: # we have already checked to see if proxy_server, proxy_password, and proxy_port are valid proxy_server_http = 'http://' + self._proxy_server + ':' + str(self._proxy_port) + '/' proxy_server_https = 'https://' + self._proxy_server + ':' + str(self._proxy_port) + '/' proxy_password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() proxy_password_mgr.add_password(realm=None, uri=proxy_server_http, user=self._proxy_user, passwd=self._proxy_password) proxy_password_mgr.add_password(realm=None, uri=proxy_server_https, user=self._proxy_user, passwd=self._proxy_password) proxy_basicauth_handler = urllib2.ProxyBasicAuthHandler(proxy_password_mgr) proxy_digestauth_handler = urllib2.ProxyDigestAuthHandler(proxy_password_mgr) proxy_handler = urllib2.ProxyHandler({'http': proxy_server_http, 'https': proxy_server_https}) handlers.extend([proxy_handler, proxy_basicauth_handler, proxy_digestauth_handler]) elif self._proxy_server is not None: # we have already checked to see if proxy_server and proxy_port are valid proxy_server_http = 'http://' + self._proxy_server + ':' + str(self._proxy_port) + '/' proxy_server_https = 'https://' + self._proxy_server + ':' + str(self._proxy_port) + '/' proxy_handler = urllib2.ProxyHandler({'http': proxy_server_http, 'https': proxy_server_https}) handlers.extend([proxy_handler]) # HTTP auth handling if self._api_user is not None: # We have already checked to ensure password is valid. site_password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() site_password_mgr.add_password(realm=None, uri=self._url, user=self._api_user, passwd=self._api_password) site_basicauth_handler = urllib2.HTTPBasicAuthHandler(site_password_mgr) site_digestauth_handler = urllib2.HTTPDigestAuthHandler(site_password_mgr) handlers.extend([site_basicauth_handler, site_digestauth_handler]) # Debugging #handlers.extend(urllib2.HTTPHandler(debuglevel=1)) return urllib2.build_opener(*handlers)
def main(): args = parse_args() password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm() password_manager.add_password(None, args.url, args.user, args.password) auth_handler = urllib2.ProxyDigestAuthHandler(password_manager) proxy_support = urllib2.ProxyHandler({"http": args.proxy}) opener = urllib2.build_opener(proxy_support, auth_handler) urllib2.install_opener(opener) handle = urllib2.urlopen(args.url) page = handle.read() print(page),
def setUp(self): self.digest_auth_handler = DigestAuthHandler() self.digest_auth_handler.set_users({self.USER: self.PASSWD}) self.digest_auth_handler.set_realm(self.REALM) def create_fake_proxy_handler(*args, **kwargs): return FakeProxyHandler(self.digest_auth_handler, *args, **kwargs) self.server = LoopbackHttpServerThread(create_fake_proxy_handler) self.server.start() self.server.ready.wait() proxy_url = "http://127.0.0.1:%d" % self.server.port handler = urllib2.ProxyHandler({"http" : proxy_url}) self.proxy_digest_handler = urllib2.ProxyDigestAuthHandler() self.opener = urllib2.build_opener(handler, self.proxy_digest_handler)
def __build_opener(self, url): """ Build the proxy opener """ opener = None if self.need_authentication(): password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, url, self.__proxy_user, self.__proxy_pass) proxy_basic_auth_handler = urllib2.ProxyBasicAuthHandler(password_mgr) proxy_digest_auth_handler = urllib2.ProxyDigestAuthHandler(password_mgr) proxy_ntlm_auth_handler = HTTPNtlmAuthHandler.HTTPNtlmAuthHandler(password_mgr) opener = urllib2.build_opener(self.__proxy_handler, proxy_basic_auth_handler, proxy_digest_auth_handler, proxy_ntlm_auth_handler) else: opener = urllib2.build_opener(self.__proxy_handler) return opener
def main(): banner() parser = optparse.OptionParser(formatter=optparse.TitledHelpFormatter()) parser.set_usage("python proxy.py -u <remoteurl> -l <localport> [options]") parser.add_option('-u', '--url', help='url of the remote webshell', dest='url', action='store') parser.add_option('-l', '--lport', help='local listening port', dest='local_port', action='store', type='int') #Verbosity parser.add_option('-v', '--verbose', help='Verbose (outputs packet size)', dest='verbose', action='store_true', default=Defaults['verbose']) #Legacy options legacyGroup = optparse.OptionGroup( parser, "No SOCKS Options", "Options are ignored if SOCKS proxy is used") legacyGroup.add_option('-n', '--no-socks', help='Do not use Socks Proxy', dest='useSocks', action='store_false', default=Defaults['useSocks']) legacyGroup.add_option( '-r', '--rport', help='remote port of service for the webshell to connect to', dest='remote_port', action='store', type='int', default=Defaults['remote_port']) legacyGroup.add_option( '-a', '--addr', help='address for remote webshell to connect to (default = 127.0.0.1)', dest='remote_ip', action='store', default=Defaults['remote_ip']) parser.add_option_group(legacyGroup) #Proxy options proxyGroup = optparse.OptionGroup( parser, "Upstream Proxy Options", "Tunnel connection through a local Proxy") proxyGroup.add_option('-x', '--up-proxy', help='Upstream proxy (http://proxyserver.com:3128)', dest='upProxy', action='store', default=Defaults['upProxy']) proxyGroup.add_option('-A', '--auth', help='Upstream proxy requires authentication', dest='upProxyAuth', action='store_true', default=Defaults['upProxyAuth']) parser.add_option_group(proxyGroup) #Advanced options advancedGroup = optparse.OptionGroup(parser, "Advanced Options") parser.add_option( '-b', '--buffer', help='HTTP request size (some webshels have limitations on the size)', dest='bufferSize', action='store', type='int', default=Defaults['bufferSize']) advancedGroup.add_option( '-q', '--ping-interval', help='webshprx pinging thread interval (default = 0.5)', dest='ping_delay', action='store', type='float', default=Defaults['ping_delay']) advancedGroup.add_option( '-s', '--start-ping', help= 'Start the pinging thread first - some services send data first (eg. SSH)', dest='start_p_thread', action='store_true', default=Defaults['start_p_thread']) parser.add_option_group(advancedGroup) (args, opts) = parser.parse_args() options = dict(Defaults.items() + vars(args).items() ) if args else Defaults #If missing options use Default if not options['local_port']: parser.print_help() parser.error("Missing local port") if not options['url']: parser.print_help() parser.error("Missing URL") if options['upProxyAuth']: #Upstream Proxy requires authentication username = raw_input("Proxy Authentication\nUsername:"******"Password:"******"Missing Proxy URL") else: from urlparse import urlparse u = urlparse(options['upProxy']) prx = "%s://%s:%s@%s" % (u.scheme, username, passwd, u.netloc) password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, prx, username, passwd) proxy_handler = urllib2.ProxyHandler({u.scheme: prx}) proxy_basic_handler = urllib2.ProxyBasicAuthHandler(password_mgr) proxy_digest_handler = urllib2.ProxyDigestAuthHandler(password_mgr) options['upProxyAuth'] = [ proxy_handler, proxy_basic_handler, proxy_digest_handler ] try: T = TunnaClient(options) TunnaThread = threading.Thread(name='TunnaThread', target=T.run(), args=(options, )) TunnaThread.start() while True: sleep(10) except (KeyboardInterrupt, SystemExit) as e: print '[!] Received Interrupt or Something Went Wrong' if DEBUG > 0: import traceback print traceback.format_exc() if 'T' in locals(): T.__del__() if 'TunnaThread' in locals() and TunnaThread.isAlive(): TunnaThread._Thread__stop() sys.exit() except Exception as e: if DEBUG > 0: import traceback print traceback.format_exc() print "General Exception:", e
@param user: The username to authenticate with. Use C{None} to disable authentication. @param password: The password to authenticate with. """ import urllib import urllib2 if proxy is None: # Try and find the system proxy settings try: proxy = urllib.getproxies()['http'] except KeyError: raise ValueError('Could not detect default proxy settings') # Set up the proxy handler proxy_handler = urllib2.ProxyHandler({'http': proxy}) opener = urllib2.build_opener(proxy_handler) if user is not None: # Set up basic proxy authentication if provided password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm() password_manager.add_password(realm=None, uri=proxy, user=user, passwd=password) opener.add_handler(urllib2.ProxyBasicAuthHandler(password_manager)) opener.add_handler(urllib2.ProxyDigestAuthHandler(password_manager)) # Overide the existing url opener urllib2.install_opener(opener)
def download(self, url, error_message, timeout, tries): """ Downloads a URL and returns the contents Uses the proxy settings from the Package Control.sublime-settings file, however there seem to be a decent number of proxies that this code does not work with. Patches welcome! :param url: The URL to download :param error_message: A string to include in the console error that is printed when an error occurs :param timeout: The int number of seconds to set the timeout to :param tries: The int number of times to try and download the URL in the case of a timeout or HTTP 503 error :return: The string contents of the URL, or False on error """ http_proxy = self.settings.get('http_proxy') https_proxy = self.settings.get('https_proxy') if http_proxy or https_proxy: proxies = {} if http_proxy: proxies['http'] = http_proxy if https_proxy: proxies['https'] = https_proxy proxy_handler = urllib2.ProxyHandler(proxies) else: proxy_handler = urllib2.ProxyHandler() password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm() proxy_username = self.settings.get('proxy_username') proxy_password = self.settings.get('proxy_password') if proxy_username and proxy_password: if http_proxy: password_manager.add_password(None, http_proxy, proxy_username, proxy_password) if https_proxy: password_manager.add_password(None, https_proxy, proxy_username, proxy_password) handlers = [proxy_handler] if os.name == 'nt': ntlm_auth_handler = ProxyNtlmAuthHandler(password_manager) handlers.append(ntlm_auth_handler) basic_auth_handler = urllib2.ProxyBasicAuthHandler(password_manager) digest_auth_handler = urllib2.ProxyDigestAuthHandler(password_manager) handlers.extend([digest_auth_handler, basic_auth_handler]) debug = self.settings.get('debug') if debug: console_write(u"Urllib2 Debug Proxy", True) console_write(u" http_proxy: %s" % http_proxy) console_write(u" https_proxy: %s" % https_proxy) console_write(u" proxy_username: %s" % proxy_username) console_write(u" proxy_password: %s" % proxy_password) secure_url_match = re.match('^https://([^/]+)', url) if secure_url_match != None: secure_domain = secure_url_match.group(1) bundle_path = self.check_certs(secure_domain, timeout) if not bundle_path: return False bundle_path = bundle_path.encode(sys.getfilesystemencoding()) handlers.append(ValidatingHTTPSHandler(ca_certs=bundle_path, debug=debug, passwd=password_manager, user_agent=self.settings.get('user_agent'))) else: handlers.append(DebuggableHTTPHandler(debug=debug, passwd=password_manager)) urllib2.install_opener(urllib2.build_opener(*handlers)) while tries > 0: tries -= 1 try: request = urllib2.Request(url, headers={ "User-Agent": self.settings.get('user_agent'), # Don't be alarmed if the response from the server does not # select one of these since the server runs a relatively new # version of OpenSSL which supports compression on the SSL # layer, and Apache will use that instead of HTTP-level # encoding. "Accept-Encoding": "gzip,deflate"}) http_file = urllib2.urlopen(request, timeout=timeout) self.handle_rate_limit(http_file, url) result = http_file.read() encoding = http_file.headers.get('Content-Encoding') return self.decode_response(encoding, result) except (httplib.HTTPException) as (e): error_string = u'%s HTTP exception %s (%s) downloading %s.' % ( error_message, e.__class__.__name__, unicode_from_os(e), url) console_write(error_string, True) except (urllib2.HTTPError) as (e): # Make sure we obey Github's rate limiting headers self.handle_rate_limit(e, url) # Bitbucket and Github return 503 a decent amount if unicode_from_os(e.code) == '503': error_string = u'Downloading %s was rate limited, trying again' % url console_write(error_string, True) continue error_string = u'%s HTTP error %s downloading %s.' % ( error_message, unicode_from_os(e.code), url) console_write(error_string, True) except (urllib2.URLError) as (e): # Bitbucket and Github timeout a decent amount if unicode_from_os(e.reason) == 'The read operation timed out' \ or unicode_from_os(e.reason) == 'timed out': error_string = u'Downloading %s timed out, trying again' % url console_write(error_string, True) continue error_string = u'%s URL error %s downloading %s.' % ( error_message, unicode_from_os(e.reason), url) console_write(error_string, True) break return False