def test_auth_str_bytes(): # https://github.com/httplib2/httplib2/pull/115 # Proxy-Authorization b64encode() TypeError: a bytes-like object is required, not 'str' with tests.server_const_http(request_count=2) as uri: uri_parsed = urllib.parse.urlparse(uri) http = httplib2.Http( proxy_info=httplib2.ProxyInfo( httplib2.socks.PROXY_TYPE_HTTP, proxy_host=uri_parsed.hostname, proxy_port=uri_parsed.port, proxy_rdns=True, proxy_user=u"user_str", proxy_pass=u"pass_str", ) ) response, _ = http.request(uri, "GET") assert response.status == 200 with tests.server_const_http(request_count=2) as uri: uri_parsed = urllib.parse.urlparse(uri) http = httplib2.Http( proxy_info=httplib2.ProxyInfo( httplib2.socks.PROXY_TYPE_HTTP, proxy_host=uri_parsed.hostname, proxy_port=uri_parsed.port, proxy_rdns=True, proxy_user=b"user_bytes", proxy_pass=b"pass_bytes", ) ) response, _ = http.request(uri, "GET") assert response.status == 200
def __init__(self, http_proxy=None, https_proxy=None, cafile=None, disable_certificate_validation=False): self._http_proxy = http_proxy self._https_proxy = https_proxy # Initialize httplib2.ProxyInfo used by oauth2client and a proxy option # dictionary used to generate the urllib2.ProxyHandler used by suds and # urllib2. For the ProxyInfo object, the HTTPS proxy will always be used # over the HTTP proxy if it is available. self.proxy_info = None self._proxy_option = {} if self._https_proxy: self.proxy_info = httplib2.ProxyInfo( socks.PROXY_TYPE_HTTP, self._https_proxy.host, self._https_proxy.port, proxy_user=self._https_proxy.username, proxy_pass=self._https_proxy.password) self._proxy_option['https'] = str(self._https_proxy) if self._http_proxy: if not self.proxy_info: self.proxy_info = httplib2.ProxyInfo( socks.PROXY_TYPE_HTTP, self._http_proxy.host, self._http_proxy.port, proxy_user=self._http_proxy.username, proxy_pass=self._http_proxy.password) self._proxy_option['http'] = str(self._http_proxy) self.disable_certificate_validation = disable_certificate_validation self.cafile = None if disable_certificate_validation else cafile # Initialize the context used to generate the urllib2.HTTPSHandler (in # Python 2.7.9+ and 3.4+) used by suds and urllib2. self._ssl_context = self._InitSSLContext( self.cafile, self.disable_certificate_validation)
def testSetProxyInfo(self): """Tests SetProxyInfo for various proxy use cases in boto file.""" valid_proxy_types = ['socks4', 'socks5', 'http'] valid_proxy_host = ['hostname', '1.2.3.4', None] valid_proxy_port = [8888, 0] valid_proxy_user = ['foo', None] valid_proxy_pass = ['Bar', None] valid_proxy_rdns = [True, False, None] proxy_type_spec = { 'socks4': httplib2.socks.PROXY_TYPE_SOCKS4, 'socks5': httplib2.socks.PROXY_TYPE_SOCKS5, 'http': httplib2.socks.PROXY_TYPE_HTTP, 'https': httplib2.socks.PROXY_TYPE_HTTP } #Generate all input combination values boto_proxy_config_test_values = [{ 'proxy_host': p_h, 'proxy_type': p_t, 'proxy_port': p_p, 'proxy_user': p_u, 'proxy_pass': p_s, 'proxy_rdns': p_d } for p_h in valid_proxy_host for p_s in valid_proxy_pass for p_p in valid_proxy_port for p_u in valid_proxy_user for p_t in valid_proxy_types for p_d in valid_proxy_rdns] #Test all input combination values with SetEnvironmentForTest({'http_proxy': 'http://host:50'}): for test_values in boto_proxy_config_test_values: proxy_type = proxy_type_spec.get(test_values.get('proxy_type')) proxy_host = test_values.get('proxy_host') proxy_port = test_values.get('proxy_port') proxy_user = test_values.get('proxy_user') proxy_pass = test_values.get('proxy_pass') proxy_rdns = bool(test_values.get('proxy_rdns')) # Added to force socks proxies not to use rdns as in SetProxyInfo() if not (proxy_type == proxy_type_spec['http']): proxy_rdns = False expected = httplib2.ProxyInfo(proxy_host=proxy_host, proxy_type=proxy_type, proxy_port=proxy_port, proxy_user=proxy_user, proxy_pass=proxy_pass, proxy_rdns=proxy_rdns) # Checks to make sure environment variable fallbacks are working if not (expected.proxy_host and expected.proxy_port): expected = httplib2.ProxyInfo(proxy_type_spec['http'], 'host', 50) # Assume proxy_rnds is True if a proxy environment variable exists. if test_values.get('proxy_rdns') == None: expected.proxy_rdns = True self._AssertProxyInfosEqual(boto_util.SetProxyInfo(test_values), expected)
def build_http_connection(config, timeout=120): """ @config: dict like, proxy and account information are in the following format { "username": xx, "password": yy, "proxy_url": zz, "proxy_port": aa, "proxy_username": bb, "proxy_password": cc, } @return: Http2.Http object """ proxy_info = None if config.get("proxy_url") and config.get("proxy_port"): if config.get("proxy_username") and config.get("proxy_password"): proxy_info = httplib2.ProxyInfo( proxy_type=httplib2.socks.PROXY_TYPE_HTTP, proxy_host=config["proxy_url"], proxy_port=config["proxy_port"], proxy_user=config["proxy_username"], proxy_pass=config["proxy_password"]) else: proxy_info = httplib2.ProxyInfo( proxy_type=httplib2.socks.PROXY_TYPE_HTTP, proxy_host=config["proxy_url"], proxy_port=config["proxy_port"]) http = httplib2.Http(proxy_info=proxy_info, timeout=timeout, disable_ssl_certificate_validation=True) if config.get("username") and config.get("password"): http.add_credentials(config["username"], config["password"]) return http
def service(self): """Setup the service object. """ self._check_access_token() if self._service is None: build_content = _get_build_content( self._api_name, self._api_version, discoveryServiceUrl=self._url, cache_discovery=True, cache=DiscoveryCache(), proxy_info=self.proxy_info, ) self._service = build_from_document(build_content, credentials=self.creds) if self.no_verify: self._service._http.http.disable_ssl_certificate_validation = True if self.proxy_info: self._service._http.http.proxy_info = httplib2.ProxyInfo( httplib2.socks.PROXY_TYPE_HTTP_NO_TUNNEL, self.proxy_info["host"], self.proxy_info["port"], proxy_user=self.proxy_info["user"], proxy_pass=self.proxy_info["password"], ) return self._service
def get_authenticated_service(user): """ 让用户到google上进行认证,返回认证后的http服务 :param request: :return: """ storage = Storage(CredentialsModel, 'id', user, 'credential') credential = storage.get() if credential is None or credential.invalid is True: result = None else: SETTING_FILE = 'production' # 如果是VPS中运行,则不使用代理 if SETTING_FILE == 'production': http = httplib2.Http() http = credential.authorize(http) # 如果是本地运行,则使用代理,貌似使用socks5代理会出错,尝试使用http代理 if SETTING_FILE == 'local': myproxy = httplib2.ProxyInfo( proxy_type=httplib2.socks.PROXY_TYPE_HTTP, proxy_host='127.0.0.1', proxy_port=8118) http = httplib2.Http(proxy_info=myproxy) http = credential.authorize(http) service = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, http=http) result = service return result
def _prepare(self, uri, kwargs): r = urlparse(uri) if r.scheme == "http": raise Exception("Refusing to send credentials over http") proxyKey = '%s_proxy' % r.scheme proxyUri = proxyKey in os.environ and os.environ[proxyKey] or None proxyInfo = None if proxyUri: r = urlparse(proxyUri) proxyInfo = httplib2.ProxyInfo(socks.PROXY_TYPE_HTTP, r.hostname, r.port or 3128) http = httplib2.Http( ca_certs=self.ca_certs, proxy_info=proxyInfo, disable_ssl_certificate_validation=self.ssl_no_verify) if not 'headers' in kwargs: kwargs['headers'] = {} # We can't force auth through httplib2, do it manually auth = base64.encodestring(self.username + ":" + self.password) kwargs['headers']['Authorization'] = "Basic " + auth kwargs['headers']['Accept'] = 'application/json' if 'body' in kwargs: kwargs['headers']['Content-Type'] = 'application/json' kwargs['body'] = json.dumps(kwargs['body']) if 'params' in kwargs: uri = uri + '?' + urllib.parse.urlencode(kwargs['params']) del kwargs['params'] return http, uri
def GetNewHttp(http_class=httplib2.Http, **kwargs): """Creates and returns a new httplib2.Http instance. Args: http_class: Optional custom Http class to use. **kwargs: Arguments to pass to http_class constructor. Returns: An initialized httplib2.Http instance. """ proxy_info = httplib2.ProxyInfo( proxy_type=3, proxy_host=boto.config.get('Boto', 'proxy', None), proxy_port=boto.config.getint('Boto', 'proxy_port', 0), proxy_user=boto.config.get('Boto', 'proxy_user', None), proxy_pass=boto.config.get('Boto', 'proxy_pass', None), proxy_rdns=boto.config.get('Boto', 'proxy_rdns', False)) # Some installers don't package a certs file with httplib2, so use the # one included with gsutil. kwargs['ca_certs'] = GetCertsFile() # Use a non-infinite SSL timeout to avoid hangs during network flakiness. kwargs['timeout'] = SSL_TIMEOUT http = http_class(proxy_info=proxy_info, **kwargs) http.disable_ssl_certificate_validation = (not config.getbool( 'Boto', 'https_validate_certificates')) return http
def get_authenticated_service_s2s(): """ 使用oauth2 server to server的方式获取认证的google服务 :param user: :return: """ credentials = ServiceAccountCredentials.from_json_keyfile_name( GOOGLE_KEY_FILE, SCOPES) #SETTING_FILE = 'production' # 如果是VPS中运行,则不使用代理 if SETTING_FILE == 'production': proxy_http = None # 如果是本地运行,则使用代理,使用的是http代理,记得使用privoxy将s5代理转化为http代理 # 同时不用开vpn if SETTING_FILE == 'local': myproxy = httplib2.ProxyInfo( proxy_type=httplib2.socks.PROXY_TYPE_HTTP, proxy_host='127.0.0.1', proxy_port=8118) proxy_http = httplib2.Http(proxy_info=myproxy) youtube_service = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, credentials=credentials, http=proxy_http) return youtube_service
def translate(inputfile, outputfile): if config['proxy_host']: proxy = httplib2.ProxyInfo(proxy_type=httplib2.socks.PROXY_TYPE_SOCKS5, proxy_host=config['proxy_host'], proxy_port=config['proxy_port']) http = httplib2.Http(proxy_info=proxy) else: http = httplib2.Http() service = build('translate', 'v2', developerKey=config['api_key'], http=http) try: subs = pysrt.open(inputfile, encoding='utf-8') query = [subs[i].text for i in range(len(subs))] result = service.translations().list(source='en', target='zh_CN', q=query).execute() for i in range(len(subs)): tmp_text = result[u'translations'][i][u'translatedText'] tmp_text = tmp_text.replace(u',', ' ') tmp_text = tmp_text.replace(u'。', ' ') tmp_text = tmp_text.replace(u'?', ' ?') tmp_text = tmp_text.replace(u'--', u'——') tmp_text = tmp_text.replace(u'-', u'——') tmp_text = tmp_text.replace(u'“', u' " ') tmp_text = tmp_text.replace(u'”', u' " ') subs[i].text = tmp_text + " <<<-- " + subs[i].text + " -->>>" subs.save(outputfile, encoding='utf-8') print('Oops! Translate OK!') except HttpError: print('Oops! Google service error!')
def __init__(self, sid): self.sid = sid self.h = httplib2.Http(timeout=15, proxy_info=httplib2.ProxyInfo( httplib2.socks.PROXY_TYPE_HTTP_NO_TUNNEL, "127.0.0.1", 23333)) Thread.__init__(self)
def get_user_id(self, code): try: if self.HTTP_PROXY: proxy = httplib2.ProxyInfo( proxy_type=httplib2.socks.PROXY_TYPE_HTTP, proxy_host=self.HTTP_PROXY["host"], proxy_port=self.HTTP_PROXY["port"], proxy_rdns=True, ) http = httplib2.Http(proxy_info=proxy) else: http = httplib2.Http() creds = self.flow.step2_exchange(code, http=http) http = creds.authorize(http) r = http.request( "https://www.googleapis.com/plus/v1/people/me/openIdConnect") if len(r) > 1: user_profile = json.loads(r[1]) if user_profile.get("email_verified") == "true": return {"email": user_profile["email"]} else: return { "error": ("Your email is not verified: {}".format( user_profile.get("error", ""))) } else: return {"error": "Can't get user's email"} except Exception as e: self.logger.exception("Can't get user info") return {"error": "Can't get your email: {}".format(e)}
def test_headers(): headers = {'key0': 'val0', 'key1': 'val1'} pi = httplib2.ProxyInfo(httplib2.socks.PROXY_TYPE_HTTP, 'localhost', 1234, proxy_headers=headers) assert pi.proxy_headers == headers
def build_client(self): service_account_data = self.credential.content['service_account'] service_account_data['private_key'] = service_account_data[ 'private_key'].replace('\\n', '\n') credentials = service_account.Credentials.from_service_account_info( service_account_data, scopes=self.credential.scopes) if HTTP_PROXY: _, host, port = HTTP_PROXY.split(':') try: port = int(port) except ValueError: raise EnvironmentError('HTTP_PROXY incorrect format') proxied_http = httplib2.Http(proxy_info=httplib2.ProxyInfo( httplib2.socks.PROXY_TYPE_HTTP, host.replace('//', ''), port)) authorized_http = google_auth_httplib2.AuthorizedHttp( credentials, http=proxied_http) service = googleapiclient.discovery.build('compute', 'v1', http=authorized_http) else: service = googleapiclient.discovery.build( 'compute', 'v1', credentials=credentials, ) return service
def get_http_client_with_proxy(proxy: bool) -> httplib2.Http: """ Create an http client with proxy with whom to use when using a proxy. :param proxy: Whether to use a proxy. :return: ProxyInfo object. """ proxy_info = {} if proxy: proxies = handle_proxy() https_proxy = proxies.get("https") http_proxy = proxies.get("http") proxy_conf = https_proxy if https_proxy else http_proxy if proxy_conf: if not proxy_conf.startswith("https") and not proxy_conf.startswith("http"): proxy_conf = "https://" + proxy_conf parsed_proxy = urllib.parse.urlparse(proxy_conf) proxy_info = httplib2.ProxyInfo( proxy_type=httplib2.socks.PROXY_TYPE_HTTP, proxy_host=parsed_proxy.hostname, proxy_port=parsed_proxy.port, proxy_user=parsed_proxy.username, proxy_pass=parsed_proxy.password, ) return httplib2.Http(proxy_info=proxy_info, timeout=TIMEOUT_TIME)
def _get_build_content( serviceName, version, discoveryServiceUrl=DISCOVERY_URI, cache_discovery=True, cache=None, proxy_info=None, ): params = {"api": serviceName, "apiVersion": version} discovery_http = httplib2.Http(timeout=60) if proxy_info: discovery_http.proxy_info = httplib2.ProxyInfo( httplib2.socks.PROXY_TYPE_HTTP_NO_TUNNEL, proxy_info["host"], proxy_info["port"], proxy_user=proxy_info["user"], proxy_pass=proxy_info["password"], ) discovery_http.disable_ssl_certificate_validation = True for discovery_url in (discoveryServiceUrl, V2_DISCOVERY_URI): requested_url = uritemplate.expand(discovery_url, params) try: return _retrieve_discovery_doc(requested_url, discovery_http, cache_discovery, cache) except HttpError as e: if e.resp.status == 404: continue else: raise e raise Exception("name: %s version: %s" % (serviceName, version))
def youtube_builder(usingproxy=True): import httplib2 from httplib2 import socks from apiclient.discovery import build p = httplib2.ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host='10.62.36.14', proxy_port=80) # http connection theHttp = httplib2.Http(proxy_info=p) # https connection (need to disable ssl) theHttp = httplib2.Http(proxy_info=p, disable_ssl_certificate_validation=True) # Building youtube connection DEVELOPER_KEY = "AIzaSyBBHyzwTo0G-F6q3_aL59fYHshiRNS9Sow" YOUTUBE_API_SERVICE_NAME = "youtube" YOUTUBE_API_VERSION = "v3" if usingproxy == True: youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY, http=theHttp) else: youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY) # Returning youtube object return youtube
def get_http_client_with_proxy(proxy: bool, insecure: bool): # pragma: no cover """ Args: proxy (bool): Whether to use a proxy. insecure (bool): Whether to disable ssl and use an insecure connection. Returns: httplib2 object with the proper settings for google api client Action: Create a http client with proxy with whom to use when using a proxy. """ if proxy: proxies = handle_proxy() https_proxy = proxies.get('https') http_proxy = proxies.get('http') proxy_conf = https_proxy if https_proxy else http_proxy # if no proxy_conf - ignore proxy if proxy_conf: if not proxy_conf.startswith( 'https') and not proxy_conf.startswith('http'): proxy_conf = 'https://' + proxy_conf parsed_proxy = urllib.parse.urlparse(proxy_conf) proxy_info = httplib2.ProxyInfo( proxy_type=httplib2.socks.PROXY_TYPE_HTTP, proxy_host=parsed_proxy.hostname, proxy_port=parsed_proxy.port, proxy_user=parsed_proxy.username, proxy_pass=parsed_proxy.password) return httplib2.Http(proxy_info=proxy_info, disable_ssl_certificate_validation=insecure) return httplib2.Http(disable_ssl_certificate_validation=insecure)
def select_category(request): try: credentials = get_credentials() http = credentials.authorize(httplib2.Http()) service = discovery.build('drive', 'v3', http=http) FoldersData = SearchFolders(service, ColumbiaFintechFolderID) except: credentials = get_credentials() p = httplib2.ProxyInfo(socks.PROXY_TYPE_HTTP, 'proxy.server', 3128) http = credentials.authorize(httplib2.Http(proxy_info = p)) service = discovery.build('drive', 'v3', http=http) FoldersData = SearchFolders(service, ColumbiaFintechFolderID) Categories_arr = [] folders_rest = [] ind = 0 for Folder in FoldersData: ind = ind + 1 if (ind <= 10): Categories_arr.append([Folder['name'],Folder['name']]) else: folders_rest.append(Folder['name']) Categories_arr.append(["Other reports",folders_rest]) Categories_arr.append(["Custom search","custom"]) context = dict() context['Categories'] = Categories_arr return render(request, 'FintechExplorerApp/Categories.html', context)
def __init__(self, enable_proxy=False): consumer = oauth.Consumer(key=CONSUMER_KEY, secret=CONSUMER_SECRET) access_token = oauth.Token(key=ACCESS_KEY, secret=ACCESS_SECRET) self.client = oauth.Client(consumer, access_token) if enable_proxy: self.client.proxy_info = httplib2.ProxyInfo( socks.PROXY_TYPE_HTTP, 'localhost', 1080)
def _fetchUrl(self, url): '''Fetch a URL Args: url: The URL to retrieve Returns: A string containing the body of the response. ''' # Open and return the URL try: if self._urllib is httplib2: params = {'timeout': TIMEOUT} if self.proxy_info is not None: try: host = self.proxy_info['HOST'] port = self.proxy_info['PORT'] except KeyError, err: raise ValueError( 'You should supply a value for HOST and PORT when working with proxies. %s' % err) params.update({ 'proxy_info': httplib2.ProxyInfo(socks.PROXY_TYPE_HTTP_NO_TUNNEL, host, port) }) http = httplib2.Http(**params) resp, content = http.request(url) url_data = content else:
def solr_interface(): '''Wrapper function to initialize a :class:`sunburnt.SolrInterface` based on django settings and evironment. Uses **SOLR_SERVER_URL** and **SOLR_CA_CERT_PATH** if one is set. Additionally, if an **HTTP_PROXY** is set in the environment, it will be configured. ''' http_opts = {} if hasattr(settings, 'SOLR_CA_CERT_PATH'): http_opts['ca_certs'] = settings.SOLR_CA_CERT_PATH if getattr(settings, 'SOLR_DISABLE_CERT_CHECK', False): http_opts['disable_ssl_certificate_validation'] = True # use http proxy if set in ENV http_proxy = os.getenv('HTTP_PROXY', None) solr_url = urlparse(settings.SOLR_SERVER_URL) # NOTE: using Squid with httplib2 requires no-tunneling proxy option # - non-tunnel proxy does not work with https if http_proxy and solr_url.scheme == 'http': parsed_proxy = urlparse(http_proxy) proxy_info = httplib2.ProxyInfo( proxy_type=httplib2.socks.PROXY_TYPE_HTTP_NO_TUNNEL, proxy_host=parsed_proxy.hostname, proxy_port=parsed_proxy.port) http_opts['proxy_info'] = proxy_info http = httplib2.Http(**http_opts) solr_opts = {'http_connection': http} # since we have the schema available, don't bother requesting it # from solr every time we initialize a new connection if hasattr(settings, 'SOLR_SCHEMA'): solr_opts['schemadoc'] = settings.SOLR_SCHEMA solr = sunburnt.SolrInterface(settings.SOLR_SERVER_URL, **solr_opts) return solr
def get_http_client(proxy: bool, verify: bool, timeout: int = 60) -> httplib2.Http: """ Validate proxy and prepares Http object. :param proxy: Boolean indicates whether to use proxy or not. :param verify: Boolean indicates whether to use ssl certification. :param timeout: Timeout value for request. :return: ProxyInfo object. :raises DemistoException: If there is any other issues while preparing proxy. """ proxy_info = {} proxies = handle_proxy() if proxy: https_proxy = proxies['https'] if not https_proxy.startswith('https') and not https_proxy.startswith('http'): https_proxy = 'https://' + https_proxy parsed_proxy = urllib.parse.urlparse(https_proxy) proxy_info = httplib2.ProxyInfo( proxy_type=httplib2.socks.PROXY_TYPE_HTTP, proxy_host=parsed_proxy.hostname, proxy_port=parsed_proxy.port, proxy_user=parsed_proxy.username, proxy_pass=parsed_proxy.password) return httplib2.Http(proxy_info=proxy_info, disable_ssl_certificate_validation=not verify, timeout=timeout)
def get_user_id(self, code): try: if self.HTTP_PROXY: proxy = httplib2.ProxyInfo( proxy_type=httplib2.socks.PROXY_TYPE_HTTP, proxy_host=self.HTTP_PROXY['host'], proxy_port=self.HTTP_PROXY['port'], proxy_rdns=True) http = httplib2.Http(proxy_info=proxy) else: http = httplib2.Http() creds = self.flow.step2_exchange(code, http=http) http = creds.authorize(http) r = http.request( 'https://www.googleapis.com/plus/v1/people/me/openIdConnect') if len(r) > 1: user_profile = json.loads(r[1]) if user_profile.get('email_verified') == 'true': return {'email': user_profile['email']} else: return { 'error': ('Your email is not verified: {}'.format( user_profile.get('error', ''))) } else: return {'error': "Can't get user's email"} except Exception as e: self.logger.exception("Can't get user info") return {'error': "Can't get your email: {}".format(e)}
def main(argv): # TODO #http_proxy=None http_proxy = httplib2.ProxyInfo(socks.PROXY_TYPE_HTTP, 'proxy.jf.intel.com', 911) #print http_proxy # Authenticate and construct service. #service, flags = sample_tools.init( service, flags = init(argv, 'calendar', 'v3', __doc__, __file__, scope='https://www.googleapis.com/auth/calendar', http_proxy=http_proxy) # Get calendar id by name (summary) calendarID = getCalendarID(service, 'WORK') #calendarID = getCalendarID(service, 'FUN STUFF') if calendarID == None: print('No calendar was found') return calendar = service.calendars().get(calendarId=calendarID).execute() listEvents(service, calendarID) addEvent(service, calendarID)
def test_headers(): headers = {"key0": "val0", "key1": "val1"} pi = httplib2.ProxyInfo(httplib2.socks.PROXY_TYPE_HTTP, "localhost", 1234, proxy_headers=headers) assert pi.proxy_headers == headers
def get_http_client_with_proxy(proxy, insecure): """ Create an http client with proxy with whom to use when using a proxy. :param proxy: Whether to use a proxy. :param insecure: Whether to disable ssl and use an insecure connection. :return: """ if proxy: proxies = handle_proxy() https_proxy = proxies.get('https') http_proxy = proxies.get('http') proxy_conf = https_proxy if https_proxy else http_proxy # if no proxy_conf - ignore proxy if proxy_conf: if not proxy_conf.startswith( 'https') and not proxy_conf.startswith('http'): proxy_conf = 'https://' + proxy_conf parsed_proxy = urllib.parse.urlparse(proxy_conf) proxy_info = httplib2.ProxyInfo( proxy_type=httplib2.socks.PROXY_TYPE_HTTP, proxy_host=parsed_proxy.hostname, proxy_port=parsed_proxy.port, proxy_user=parsed_proxy.username, proxy_pass=parsed_proxy.password) return httplib2.Http( proxy_info=proxy_info, disable_ssl_certificate_validation=insecure) return httplib2.Http(disable_ssl_certificate_validation=insecure)
def __init__(self, cache_key_base, access_token_cache=None, datetime_strategy=datetime.datetime, auth_uri=None, token_uri=None, disable_ssl_certificate_validation=False, proxy_host=None, proxy_port=None, proxy_user=None, proxy_pass=None, ca_certs_file=None): # datetime_strategy is used to invoke utcnow() on; it is injected into the # constructor for unit testing purposes. self.auth_uri = auth_uri self.token_uri = token_uri self.cache_key_base = cache_key_base self.datetime_strategy = datetime_strategy self.access_token_cache = access_token_cache or InMemoryTokenCache() self.disable_ssl_certificate_validation = disable_ssl_certificate_validation self.ca_certs_file = ca_certs_file if proxy_host and proxy_port: self._proxy_info = httplib2.ProxyInfo(socks.PROXY_TYPE_HTTP, proxy_host, proxy_port, proxy_user=proxy_user, proxy_pass=proxy_pass, proxy_rdns=True) else: self._proxy_info = None
def GetHttpProxyInfo(): """Get ProxyInfo object or callable to be passed to httplib2.Http. httplib2.Http can issue requests through a proxy. That information is passed via either ProxyInfo objects or a callback function that receives the protocol the request is made on and returns the proxy address. If users set the gcloud properties, we create a ProxyInfo object with those settings. If users do not set gcloud properties, we return a function that can be called to get default settings. Returns: httplib2 ProxyInfo object or callable function that returns a Proxy Info object given the protocol (http, https) """ proxy_settings = GetProxyProperties() if proxy_settings: return httplib2.ProxyInfo(proxy_settings['proxy_type'], proxy_settings['proxy_address'], proxy_settings['proxy_port'], proxy_rdns=proxy_settings['proxy_rdns'], proxy_user=proxy_settings['proxy_user'], proxy_pass=proxy_settings['proxy_pass']) return GetDefaultProxyInfo
def init(self, ua): import httplib2, httplib pi = httplib2.ProxyInfo(3, self.proxy, self.proxyp) con = httplib2.Http(proxy_info=pi) con.follow_redirects = False headers = {'Referer': 'http://' + self.url} resp, body = con.request(self.url, 'GET', headers=headers) #print 'resp 1', resp.status, resp, body if resp.status != 200: print "Fail: Session closed y operator" return cookies = resp['set-cookie'] headers['User-Agent'] = ua headers['Cookie'] = cookies headers['Referer'] = self.url headers['Origin'] = 'http://moipodpiski.ssl.mts.ru/' inputs = {} data = { 'url': self.url, 'headers': headers, 'inputs': inputs, 'proxy' : self.proxy, 'proxyp':self.proxyp } import pickle pickle.dump(data, open('session_mts_%s.subscribe' % self.subid, "w")) print " captcha: pass", return