def httpPost(self, host, params, needGzip=False): if GtConfig.getHttpProxyIp() is not None: #如果通过代理访问我们接口,需要自行配置代理,示例如下: ipport = GtConfig.getHttpProxyIp( ) + ":" + GtConfig.getHttpProxyPort() print ipport opener = urllib2.build_opener(urllib2.ProxyHandler({ipport}), urllib2.HTTPHandler(debuglevel=1)) urllib2.install_opener(opener) data_json = json.dumps(params) headers = dict() headers['Gt-Action'] = params.get("action") if needGzip: out = StringIO.StringIO() with gzip.GzipFile(fileobj=out, mode="w") as f: f.write(data_json) data_json = out.getvalue() headers['Content-Encoding'] = 'gzip' headers['Accept-Encoding'] = 'gzip' req = urllib2.Request(host, data_json, headers) retry_time_limit = GtConfig.getHttpTryCount() isFail = True tryTime = 0 res_stream = None while isFail and tryTime < retry_time_limit: try: if '_create_unverified_context' in dir(ssl): ct = ssl._create_unverified_context() res_stream = urllib2.urlopen( req, timeout=GtConfig.getHttpConnectionTimeOut(), context=ct) else: res_stream = urllib2.urlopen( req, timeout=GtConfig.getHttpConnectionTimeOut()) isFail = False except Exception as e: isFail = True tryTime += 1 #print("try " + str(tryTime) + " time failed, time out.") if res_stream is None: return None page_str = res_stream.read() if needGzip: compressedstream = StringIO.StringIO(page_str) with gzip.GzipFile(fileobj=compressedstream) as f: data = f.read() return eval(data) else: return eval(page_str)
def httpPost(self, host, params, needGzip=False): if GtConfig.getHttpProxyIp() is not None: #如果通过代理访问我们接口,需要自行配置代理,示例如下: ipport = GtConfig.getHttpProxyIp( ) + ":" + GtConfig.getHttpProxyPort() opener = urllib2.build_opener( urllib2.ProxyHandler({ipport}), urllib2.HTTPHandler(debuglevel=1)) urllib2.install_opener(opener) data_json = json.dumps(params) headers = dict() headers['Gt-Action'] = params.get("action") if needGzip: out = StringIO.StringIO() with gzip.GzipFile(fileobj=out, mode="w") as f: f.write(data_json) data_json = out.getvalue() headers['Content-Encoding'] = 'gzip' headers['Accept-Encoding'] = 'gzip' req = urllib2.Request(host, data_json, headers) retry_time_limit = GtConfig.getHttpTryCount() isFail = True tryTime = 0 res_stream = None while isFail and tryTime < retry_time_limit: try: if '_create_unverified_context' in dir(ssl): ct = ssl._create_unverified_context() res_stream = urllib2.urlopen( req, timeout=GtConfig.getHttpConnectionTimeOut(), context=ct) else: res_stream = urllib2.urlopen( req, timeout=GtConfig.getHttpConnectionTimeOut()) isFail = False except Exception as e: isFail = True tryTime += 1 #print("try " + str(tryTime) + " time failed, time out.") if res_stream is None: return None page_str = res_stream.read() if needGzip: compressedstream = StringIO.StringIO(page_str) with gzip.GzipFile(fileobj=compressedstream) as f: data = f.read() return eval(data) else: return eval(page_str)