def __init__(self, userAgent = 'Mozilla/4.0 (compatible; MSIE 8.0)', followLocation = 1, # follow redirects? autoReferer = 1, # allow 'referer' to be set normally? verifySSL = 0, # tell SSL to verify IDs? useCookies = True, # will hold all pycurl cookies useSOCKS = False, # use SOCKS5 proxy? proxy = 'localhost', # SOCKS host proxyPort = 8080, # SOCKS port proxyType = 5, # SOCKS protocol verbose = False, debug = False, ) : self.followLocation = followLocation self.autoReferer = autoReferer self.verifySSL = verifySSL self.useCookies = useCookies self.useSOCKS = useSOCKS self.proxy = proxy self.proxyPort = proxyPort self.proxyType = proxyType self.pco = pycurl.Curl() self.pco.setopt(pycurl.CAINFO, os.path.join('.', 'linode', 'cloud-cacerts.pem')) self.pco.setopt(pycurl.USERAGENT, userAgent) self.pco.setopt(pycurl.FOLLOWLOCATION, followLocation) self.pco.setopt(pycurl.MAXREDIRS, 20) self.pco.setopt(pycurl.CONNECTTIMEOUT, 30) self.pco.setopt(pycurl.AUTOREFERER, autoReferer) # SSL verification (True/False) self.pco.setopt(pycurl.SSL_VERIFYPEER, verifySSL) self.pco.setopt(pycurl.SSL_VERIFYHOST, verifySSL) if useCookies == True : cjf = os.tmpfile() # potential security risk here; see python documentation self.pco.setopt(pycurl.COOKIEFILE, cjf.name) self.pco.setopt(pycurl.COOKIEJAR, cjf.name) if useSOCKS : # if you wish to use SOCKS, it is configured through these parms self.pco.setopt(pycurl.PROXY, proxy) self.pco.setopt(pycurl.PROXYPORT, proxyPort) self.pco.setopt(pycurl.PROXYTYPE, proxyType) if verbose : self.pco.setopt(pycurl.VERBOSE, 1) if debug : print 'PyCurl version info:' print pycurl.version_info() print self.pco.setopt(pycurl.DEBUGFUNCTION, self.debug) return
def initHandle(self): """ sets common options to curl handle """ self.c.setopt(pycurl.FOLLOWLOCATION, 1) self.c.setopt(pycurl.MAXREDIRS, 10) self.c.setopt(pycurl.CONNECTTIMEOUT, 30) self.c.setopt(pycurl.NOSIGNAL, 1) self.c.setopt(pycurl.NOPROGRESS, 1) if hasattr(pycurl, "AUTOREFERER"): self.c.setopt(pycurl.AUTOREFERER, 1) self.c.setopt(pycurl.SSL_VERIFYPEER, 0) self.c.setopt(pycurl.LOW_SPEED_TIME, 60) self.c.setopt(pycurl.LOW_SPEED_LIMIT, 5) if hasattr(pycurl, "USE_SSL"): self.c.setopt(pycurl.USE_SSL, pycurl.CURLUSESSL_TRY) # self.c.setopt(pycurl.VERBOSE, 1) self.c.setopt(pycurl.USERAGENT, "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0") if pycurl.version_info()[7]: self.c.setopt(pycurl.ENCODING, "gzip, deflate") self.c.setopt(pycurl.HTTPHEADER, ["Accept: */*", "Accept-Language: en-US, en", "Accept-Charset: ISO-8859-1, utf-8;q=0.7,*;q=0.7", "Connection: keep-alive", "Keep-Alive: 300", "Expect:"])
def __init__(self, base_url="", fakeheaders=()): super().__init__(base_url, fakeheaders) self.set_option(pycurl.SSL_VERIFYPEER, True) self.set_option(pycurl.ENCODING, "") # accept all encodings # workaround buggy pycurl versions before Dec 2013 self.payload = None self.payload_io = BytesIO() self.set_option(pycurl.WRITEFUNCTION, self.payload_io.write) def header_callback(x): if isinstance(x, str): # workaround buggy pycurl versions self.hdr += x else: self.hdr += x.decode("ascii") self.set_option(pycurl.HEADERFUNCTION, header_callback) ssl_library = pycurl.version_info()[5] # use the only one secure cipher that Sina supports if "OpenSSL" in ssl_library or "LibreSSL" in ssl_library: self.set_option(pycurl.SSL_CIPHER_LIST, "ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384") elif "GnuTLS".lower() in ssl_library.lower(): # not sure about the capitalization, use lower case self.set_option(pycurl.SSL_CIPHER_LIST, "PFS") else: raise NotImplemented("Unsupported SSL/TLS library (%s)!" % ssl_library)
def initHandle(self): """ sets common options to curl handle """ self.c.setopt(pycurl.FOLLOWLOCATION, 1) self.c.setopt(pycurl.MAXREDIRS, 5) self.c.setopt(pycurl.CONNECTTIMEOUT, 30) self.c.setopt(pycurl.NOSIGNAL, 1) self.c.setopt(pycurl.NOPROGRESS, 1) if hasattr(pycurl, "AUTOREFERER"): self.c.setopt(pycurl.AUTOREFERER, 1) self.c.setopt(pycurl.SSL_VERIFYPEER, 0) # Interval for low speed, detects connection loss, but can abort dl if hoster stalls the download self.c.setopt(pycurl.LOW_SPEED_TIME, 45) self.c.setopt(pycurl.LOW_SPEED_LIMIT, 5) # don't save the cookies self.c.setopt(pycurl.COOKIEFILE, "") self.c.setopt(pycurl.COOKIEJAR, "") #self.c.setopt(pycurl.VERBOSE, 1) self.c.setopt(pycurl.USERAGENT, "Mozilla/5.0 (Windows NT 6.1; Win64; x64;en; rv:5.0) Gecko/20110619 Firefox/5.0") if pycurl.version_info()[7]: self.c.setopt(pycurl.ENCODING, "gzip, deflate") self.c.setopt(pycurl.HTTPHEADER, ["Accept: */*", "Accept-Language: en-US,en", "Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7", "Connection: keep-alive", "Keep-Alive: 300", "Expect:"])
def init_handle(self): """Sets common options to curl handle.""" self.setopt(pycurl.FOLLOWLOCATION, 1) self.setopt(pycurl.MAXREDIRS, 5) self.setopt(pycurl.CONNECTTIMEOUT, 30) self.setopt(pycurl.NOSIGNAL, 1) self.setopt(pycurl.NOPROGRESS, 1) if hasattr(pycurl, 'AUTOREFERER'): self.setopt(pycurl.AUTOREFERER, 1) self.setopt(pycurl.SSL_VERIFYPEER, 0) # Interval for low speed, detects connection loss, but can abort dl if # hoster stalls the download self.setopt(pycurl.LOW_SPEED_TIME, 45) self.setopt(pycurl.LOW_SPEED_LIMIT, 5) # do not save the cookies self.setopt(pycurl.COOKIEFILE, '') self.setopt(pycurl.COOKIEJAR, '') # self.setopt(pycurl.VERBOSE, 1) self.setopt( pycurl.USERAGENT, 'Mozilla/5.0 (Windows NT 10.0; Win64; rv:53.0) ' 'Gecko/20100101 Firefox/53.0') if pycurl.version_info()[7]: self.setopt(pycurl.ENCODING, 'gzip,deflate') self.headers.update( {'Accept': '*/*', 'Accept-Language': 'en-US,en', 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7', 'Connection': 'keep-alive', 'Keep-Alive': '300', 'Expect': ''})
def _curl_request(curl, timeout=30, redirect=True, verbose=False): curl.setopt(pycurl.FOLLOWLOCATION, int(redirect)) curl.setopt(pycurl.MAXREDIRS, 5) if timeout: curl.setopt(pycurl.CONNECTTIMEOUT, timeout) curl.setopt(pycurl.NOSIGNAL, 1) curl.setopt(pycurl.NOPROGRESS, 1) if hasattr(pycurl, 'AUTOREFERER'): curl.setopt(pycurl.AUTOREFERER, 1) curl.setopt(pycurl.SSL_VERIFYPEER, 0) curl.setopt(pycurl.LOW_SPEED_TIME, 30) curl.setopt(pycurl.LOW_SPEED_LIMIT, 5) if verbose: curl.setopt(pycurl.VERBOSE, 1) curl.setopt(pycurl.USERAGENT, user_agent) if pycurl.version_info()[7]: curl.setopt(pycurl.ENCODING, 'gzip, deflate')
def __init__(self, base_url="", fakeheaders=()): super().__init__(base_url, fakeheaders) self.set_option(pycurl.SSL_VERIFYPEER, True) self.set_option(pycurl.ENCODING, "") # accept all encodings # workaround buggy pycurl versions before Dec 2013 self.payload = None self.payload_io = BytesIO() self.set_option(pycurl.WRITEFUNCTION, self.payload_io.write) def header_callback(x): if isinstance(x, str): # workaround buggy pycurl versions self.hdr += x else: self.hdr += x.decode("ascii") self.set_option(pycurl.HEADERFUNCTION, header_callback) # use the only one secure cipher that Sina supports if "OpenSSL" in pycurl.version_info()[5]: self.set_option(pycurl.SSL_CIPHER_LIST, "ECDHE-RSA-AES256-SHA") else: # Assume GnuTLS. what? You've built libcurl with NSS? Hum... self.set_option(pycurl.SSL_CIPHER_LIST, "PFS")
def _process_queue(self): with stack_context.NullContext(): while True: started = 0 while self._free_list and self._requests: started += 1 curl = self._free_list.pop() (request, callback) = self._requests.popleft() curl.info = { "headers": httputil.HTTPHeaders(), "buffer": cStringIO.StringIO(), "request": request, "callback": callback, "curl_start_time": monotime(), } # Disable IPv6 to mitigate the effects of this bug # on curl versions <= 7.21.0 # http://sourceforge.net/tracker/?func=detail&aid=3017819&group_id=976&atid=100976 if pycurl.version_info()[2] <= 0x71500: # 7.21.0 curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4) _curl_setup_request(curl, request, curl.info["buffer"], curl.info["headers"]) self._multi.add_handle(curl) if not started: break
def is_libcurl_compiled_with_async_dns_resolver(): """Per this (http://tornado.readthedocs.org/en/latest/httpclient.html), if you've configured Tornado to use async curl_httpclient, you'll want to make sure that libcurl has been compiled with async DNS resolver. The programmatic approach to checking for libcurl being compiled with async DNS resolve is a mess of gory details. It was this mess that drove the need for this function. Specifically, this function implements all the gory details so the caller doesn't have to worry about them! This function is intended to be used in an application's mainline in the following manner: #!/usr/bin/env python import logging from tor_async_util import is_libcurl_compiled_with_async_dns_resolver _logger = logging.getLogger(__name__) if __name__ == "__main__": if not is_libcurl_compiled_with_async_dns_resolver(): msg = ( "libcurl does not appear to have been " "compiled with aysnc dns resolve which " "may result in timeouts on async requests" ) _logger.warning(msg) If you really want to understand the details start with the following article: http://stackoverflow.com/questions/25998063/how-can-i-tell-if-the-libcurl-installed-has-asynchronous-dns-enabled Other references that you'll find useful on your question for understanding: http://curl.haxx.se/libcurl/ https://github.com/bagder/curl/blob/master/include/curl/curl.h#L2286 If you don't care about the implementation details just know that this function returns True if libcurl has been compiled with async DNS resolver otherwise this functions returns False. """ try: version_info = pycurl.version_info() features = version_info[4] # to understand CURL_VERSION_ASYNCHDNS see # https://github.com/bagder/curl/blob/master/include/curl/curl.h#L2286 CURL_VERSION_ASYNCHDNS = 1 << 7 return (features & CURL_VERSION_ASYNCHDNS) == CURL_VERSION_ASYNCHDNS except Exception as ex: fmt = ( "Error trying to figure out if libcurl is complied with " "async DNS resolver - %s" ) msg = fmt % ex _logger.debug(msg) return False
def decorated(*args, **kwargs): # easier to check that pycurl supports https, although # theoretically it is not the same test. # pycurl.version_info()[8] is a tuple of protocols supported by libcurl if "https" not in pycurl.version_info()[8]: raise nose.plugins.skip.SkipTest("libcurl does not support ssl") return fn(*args, **kwargs)
def __init__(self, base, _from_transport=None): super(PyCurlTransport, self).__init__(base, _from_transport=_from_transport) if base.startswith('https'): # Check availability of https into pycurl supported # protocols supported = pycurl.version_info()[8] if 'https' not in supported: raise errors.DependencyNotPresent('pycurl', 'no https support') self.cabundle = ca_bundle.get_ca_path()
def __init__(self, url): self.host = url self.resource = '' self.caller = pycurl.Curl() self.validateHostName(url) header = 'PopDB API/1.0 (CMS) %s/%s %s/%s (%s)' % (pycurl.__name__, pycurl.version_info()[1], platform.system(), platform.release(), platform.processor()) self.caller.setopt(pycurl.HTTPHEADER, ['User-agent: %s' % header]) #self.caller.setopt(self.caller.URL, url) self.caller.setopt(self.caller.VERBOSE, True) self.caller.setopt(self.caller.SSL_VERIFYPEER, 0)
def _queryTier0DataSvc( self, url ): """ Queries Tier0DataSvc. url: Tier0DataSvc URL. @returns: dictionary, from whence the required information must be retrieved according to the API call. Raises if connection error, bad response, or timeout after retries occur. """ cHandle = pycurl.Curl() cHandle.setopt( cHandle.SSL_VERIFYPEER, 0 ) cHandle.setopt( cHandle.SSL_VERIFYHOST, 0 ) cHandle.setopt( cHandle.URL, url ) cHandle.setopt( cHandle.HTTPHEADER, [ "User-Agent: ConditionWebServices/1.0 python/%d.%d.%d PycURL/%s" % ( sys.version_info[ :3 ] + ( pycurl.version_info()[ 1 ], ) ) , ] ) cHandle.setopt( cHandle.TIMEOUT, self._timeOut ) if self._proxy: cHandle.setopt( cHandle.PROXY, self._proxy ) if self._debug: cHandle.setopt( cHandle.VERBOSE, 1 ) retry = 0 while retry < self._retries: try: jsonCall = cStringIO.StringIO() cHandle.setopt( cHandle.WRITEFUNCTION, jsonCall.write ) cHandle.perform() if cHandle.getinfo( cHandle.RESPONSE_CODE ) != 200: _raise_http_error( cHandle, jsonCall.getvalue(), self._proxy ) return json.loads( jsonCall.getvalue().replace("'", '"') ) except pycurl.error as pyCURLerror: errorCode, errorMessage = pyCURLerror if self._debug: errStr = """Unable to establish connection to Tier0DataSvc from URL \"%s\"""" %( url, ) if self._proxy: errStr += """ using proxy \"%s\"""" %( str( self._proxy ), ) errStr += """ with timeout \"%d\".\nThe reason is: \"%s\" (error code \"%d\").""" %( self._timeOut, errorMessage, errorCode ) logging.error("pycurl.error: %s", errStr) retry += 1 if retry < self._retries: # no sleep in last iteration time.sleep( self._retryPeriod ) except ResponseError as r: if self._debug: logging.error("ResponseError: %s", r) retry += 1 if retry < self._retries: # no sleep in last iteration time.sleep( self._retryPeriod ) finally: jsonCall.close() errStr = """Unable to get Tier0DataSvc data from URL \"%s\"""" %( url, ) if self._proxy: errStr += """ using proxy \"%s\"""" %( str( self._proxy ), ) errStr += """ with timeout \"%d\" since maximum number of retries \"%d\" with retry period \"%d\" was reached.""" % ( self._timeOut, self._retries, self._retryPeriod ) raise Tier0Error( errStr )
def _useragent(): vi = pycurl.version_info() ua = "pycurl_wrapper: libcurl/%s %s %s" % (vi[1], vi[5], vi[3]) try: apt_ua = file("/etc/apt/apt.conf.d/01turnkey").read() m = re.search(r" \((.*?)\)", apt_ua) if m: ua += " (%s)" % m.groups(1) except: pass return ua
def __init__(self, url=None, url_data=None, body=None, response_stream=None, timeout=60): if not (url): return None self._url = url self._r = pycurl.Curl() # set options for the request - of note is the fact that we do not verify the peer or the host - because # CERN certificates are self-signed, and we only need the encryption from HTTPS, not the certificate checks. self._r.setopt(self._r.CONNECTTIMEOUT, timeout) user_agent = "User-Agent: ConditionWebServices/1.0 python/%d.%d.%d PycURL/%s" % ( sys.version_info[:3] + (pycurl.version_info()[1], )) self._r.setopt(self._r.USERAGENT, user_agent) # we don't need to verify who signed the certificate or who the host is self._r.setopt(self._r.SSL_VERIFYPEER, 0) self._r.setopt(self._r.SSL_VERIFYHOST, 0) self._response = StringIO() if body: if isinstance(body, dict): body = urlencode(body) elif isinstance(body, list): body = json.dumps(body) self._r.setopt(self._r.POSTFIELDS, body) if url_data: if isinstance(url_data, dict): url_data = urlencode(url_data) else: exit( "URL data '%s' for request to URL '%s' was not valid - should be a dictionary." % (str(url_data), url)) # set the URL with url parameters if they were given self._r.setopt(self._r.URL, url + (("?%s" % url_data) if url_data else "")) if response_stream and not isinstance(response_stream, StringIO): response_stream = StringIO() # copy reference to instance variable self._response = response_stream elif not (response_stream): self._response = StringIO() self._r.setopt(self._r.WRITEFUNCTION, self._response.write)
def test_socket_open_bad(self): self.curl.setopt(pycurl.OPENSOCKETFUNCTION, socket_open_bad) self.curl.setopt(self.curl.URL, 'http://%s:8380/success' % localhost) try: self.curl.perform() except pycurl.error as e: # libcurl 7.38.0 for some reason fails with a timeout # (and spends 5 minutes on this test) if pycurl.version_info()[1].split('.') == ['7', '38', '0']: self.assertEqual(pycurl.E_OPERATION_TIMEDOUT, e.args[0]) else: self.assertEqual(pycurl.E_COULDNT_CONNECT, e.args[0]) else: self.fail('Should have raised')
def set_version_info(): version_info = lambda x: False version_info_vector = list(pycurl.version_info()) version_info_vector.reverse() versions = set(['libz_version', 'libidn', 'version', 'ares']) for i in ('age', 'version', 'version_num', 'host', 'features', 'ssl_version', 'ssl_version_num', 'libz_version', 'protocols', 'ares', 'ares_num', 'libidn', 'iconv_ver_num', 'libssh_version', 'brotli_ver_num', 'brotli_version'): # When walking of the version_info_vector is done, no more values if len(version_info_vector) == 0: break value = version_info_vector.pop() if i in versions: setattr(version_info, i, version_tuple(value)) else: setattr(version_info, i, value) features = set() features_mapping = { 'VERSION_ASYNCHDNS': 'AsynchDNS', 'VERSION_GSSNEGOTIATE': 'GSS-Negotiate', 'VERSION_IDN': 'IDN', 'VERSION_IPV6': 'IPv6', 'VERSION_LARGEFILE': 'Largefile', 'VERSION_NTLM': 'NTLM', 'VERSION_NTLM_WB': 'NTLM_WB', 'VERSION_SSL': 'SSL', 'VERSION_LIBZ': 'libz', 'VERSION_UNIX_SOCKETS': 'UnixSockets', 'VERSION_KERBEROS5': 'Kerberos', 'VERSION_SPNEGO': 'SPNEGO', 'VERSION_HTTP2': 'HTTP2', 'VERSION_GSSAPI': 'GSS-API', 'VERSION_TLSAUTH_SRP': 'TLS-SRP', 'VERSION_PSL': 'PSL' } for i in ('VERSION_IPV6', 'VERSION_KERBEROS4', 'VERSION_KERBEROS5', 'VERSION_SSL', 'VERSION_LIBZ', 'VERSION_NTLM', 'VERSION_GSSNEGOTIATE', 'VERSION_DEBUG', 'VERSION_CURLDEBUG', 'VERSION_ASYNCHDNS', 'VERSION_SPNEGO', 'VERSION_LARGEFILE', 'VERSION_IDN', 'VERSION_SSPI', 'VERSION_GSSAPI', 'VERSION_CONV', 'VERSION_TLSAUTH_SRP', 'VERSION_NTLM_WB', 'VERSION_HTTP2', 'VERSION_UNIX_SOCKETS', 'VERSION_PSL', 'VERSION_HTTPS_PROXY', 'VERSION_MULTI_SSL', 'VERSION_BROTLI'): if hasattr(pycurl, i): if version_info.features & getattr(pycurl, i) != 0: features.add(features_mapping.get(i, i)) version_info.features = features return version_info
def blind_try(url, opts={}): """Blindly return the http code for a url. Don't ask any questions.""" curl = pycurl.Curl() curl.setopt(curl.NOBODY, True) curl.setopt(curl.URL, url) curl.setopt(curl.FOLLOWLOCATION, opts.get("follow", False)) curl.setopt(curl.USERAGENT, opts.get("useragent", pycurl.version_info()[1])) curl.perform() code = curl.getinfo(pycurl.HTTP_CODE) print("> HEAD {url:40}:{code}".format(url=url, code=code)) return code
def __init__(self, userAgent = 'Mozilla/4.0 (compatible; MSIE 8.0)', followLocation = 1, # follow redirects? autoReferer = 1, # allow 'referer' to be set normally? verifySSL = 0, # tell SSL to verify IDs? useCookies = True, # will hold all pycurl cookies useSOCKS = False, # use SOCKS5 proxy? proxy = 'localhost', # SOCKS host proxyPort = 8080, # SOCKS port proxyType = 5, # SOCKS protocol verbose = False, debug = False, ) : self.followLocation = followLocation self.autoReferer = autoReferer self.verifySSL = verifySSL self.useCookies = useCookies self.useSOCKS = useSOCKS self.proxy = proxy self.proxyPort = proxyPort self.proxyType = proxyType self.pco = pycurl.Curl() self.pco.setopt(pycurl.USERAGENT, userAgent) self.pco.setopt(pycurl.FOLLOWLOCATION, followLocation) self.pco.setopt(pycurl.MAXREDIRS, 20) self.pco.setopt(pycurl.CONNECTTIMEOUT, 30) self.pco.setopt(pycurl.AUTOREFERER, autoReferer) # SSL verification (True/False) self.pco.setopt(pycurl.SSL_VERIFYPEER, verifySSL) self.pco.setopt(pycurl.SSL_VERIFYHOST, verifySSL) if useCookies == True : cjf = os.tmpfile() # potential security risk here; see python documentation self.pco.setopt(pycurl.COOKIEFILE, cjf.name) self.pco.setopt(pycurl.COOKIEJAR, cjf.name) if useSOCKS : # if you wish to use SOCKS, it is configured through these parms self.pco.setopt(pycurl.PROXY, proxy) self.pco.setopt(pycurl.PROXYPORT, proxyPort) self.pco.setopt(pycurl.PROXYTYPE, proxyType) if verbose : self.pco.setopt(pycurl.VERBOSE, 1) if debug : print('PyCurl version info:') print(pycurl.version_info()) print() self.pco.setopt(pycurl.DEBUGFUNCTION, self.debug) return
def get_version_info(url): """ Queries the server-side for the commit hash it is currently using. Note: this is the commit hash used by /data/services/common/CondDBFW on the server-side. """ request = pycurl.Curl() request.setopt(request.CONNECTTIMEOUT, 60) user_agent = "User-Agent: ConditionWebServices/1.0 python/%d.%d.%d PycURL/%s" % ( sys.version_info[:3] + (pycurl.version_info()[1], )) request.setopt(request.USERAGENT, user_agent) # we don't need to verify who signed the certificate or who the host is request.setopt(request.SSL_VERIFYPEER, 0) request.setopt(request.SSL_VERIFYHOST, 0) response_buffer = StringIO() request.setopt(request.WRITEFUNCTION, response_buffer.write) request.setopt(request.URL, url + "conddbfw_version/") request.perform() return json.loads(response_buffer.getvalue())
def decorated(*args, **kwargs): # easier to check that pycurl supports https, although # theoretically it is not the same test. # pycurl.version_info()[8] is a tuple of protocols supported by libcurl if 'https' not in pycurl.version_info()[8]: raise nose.plugins.skip.SkipTest('libcurl does not support ssl') # XXX move to pycurl library if 'OpenSSL/' in pycurl.version: current_backend = 'openssl' elif 'GnuTLS/' in pycurl.version: current_backend = 'gnutls' elif 'NSS/' in pycurl.version: current_backend = 'nss' else: current_backend = 'none' if current_backend not in backends: raise nose.plugins.skip.SkipTest('SSL backend is %s' % current_backend) return fn(*args, **kwargs)
def decorated(*args, **kwargs): # easier to check that pycurl supports https, although # theoretically it is not the same test. # pycurl.version_info()[8] is a tuple of protocols supported by libcurl if "https" not in pycurl.version_info()[8]: raise nose.plugins.skip.SkipTest("libcurl does not support ssl") # XXX move to pycurl library if "OpenSSL/" in pycurl.version: current_backend = "openssl" elif "GnuTLS/" in pycurl.version: current_backend = "gnutls" elif "NSS/" in pycurl.version: current_backend = "nss" else: current_backend = "none" if current_backend not in backends: raise nose.plugins.skip.SkipTest("SSL backend is %s" % current_backend) return fn(*args, **kwargs)
def init_handle(self): """ Sets common options to curl handle. """ self.c.setopt(pycurl.FOLLOWLOCATION, 1) self.c.setopt(pycurl.MAXREDIRS, 5) self.c.setopt(pycurl.CONNECTTIMEOUT, 30) self.c.setopt(pycurl.NOSIGNAL, 1) self.c.setopt(pycurl.NOPROGRESS, 1) if hasattr(pycurl, "AUTOREFERER"): self.c.setopt(pycurl.AUTOREFERER, 1) self.c.setopt(pycurl.SSL_VERIFYPEER, 0) # Interval for low speed, detects connection loss, but can abort dl if # hoster stalls the download self.c.setopt(pycurl.LOW_SPEED_TIME, 45) self.c.setopt(pycurl.LOW_SPEED_LIMIT, 5) # do not save the cookies self.c.setopt(pycurl.COOKIEFILE, b"") self.c.setopt(pycurl.COOKIEJAR, b"") # self.c.setopt(pycurl.VERBOSE, 1) self.c.setopt( pycurl.USERAGENT, "Mozilla/5.0 (Windows NT 6.1; Win64; x64;en; rv:5.0) Gecko/20110619 Firefox/5.0" ) if pycurl.version_info()[7]: self.c.setopt(pycurl.ENCODING, b"gzip, deflate") self.headers.update({ 'Accept': "*/*", 'Accept-Language': "en-US,en", 'Accept-Charset': "ISO-8859-1,utf-8;q=0.7,*;q=0.7", 'Connection': "keep-alive", 'Keep-Alive': "300", 'Expect': "" })
def __init__( self, api_cookies, web_cookies, cipher=None, api_args={'api': '1'}, api_url='https://ceiba.ntu.edu.tw/course/f03067/app/login.php', file_url='https://ceiba.ntu.edu.tw', web_url='https://ceiba.ntu.edu.tw'): self.logger = logging.getLogger(__name__) self.curl = pycurl.Curl() self.api_cookie = ';'.join( map(lambda x: '{}={}'.format(*x), api_cookies.items())) self.web_cookie = ';'.join( map(lambda x: '{}={}'.format(*x), web_cookies.items())) self.api_args = api_args self.api_url = api_url self.file_url = file_url self.web_url = web_url self.api_cache = None self.web_cache = dict() if not cipher: tls_backend = pycurl.version_info()[5].split('/')[0] if tls_backend == 'OpenSSL' or tls_backend == 'LibreSSL': cipher = 'ECDHE-RSA-AES128-GCM-SHA256' elif tls_backend == 'GnuTLS': cipher = 'ECDHE-RSA-AES128-GCM-SHA256' elif tls_backend == 'NSS': cipher = 'ecdhe_rsa_aes_128_gcm_sha_256' else: assert False, 'TLS 實作 {} 尚未支援'.format(tls_backend) self.curl.setopt(pycurl.USE_SSL, pycurl.USESSL_ALL) self.curl.setopt(pycurl.SSL_CIPHER_LIST, cipher) self.curl.setopt(pycurl.PROTOCOLS, pycurl.PROTO_HTTPS) self.curl.setopt(pycurl.REDIR_PROTOCOLS, pycurl.PROTO_HTTPS) self.curl.setopt(pycurl.DEFAULT_PROTOCOL, 'https') self.curl.setopt(pycurl.FOLLOWLOCATION, False)
def init_handle(self): """ sets common options to curl handle. """ self.c.setopt(pycurl.FOLLOWLOCATION, 1) self.c.setopt(pycurl.MAXREDIRS, 10) self.c.setopt(pycurl.CONNECTTIMEOUT, 30) self.c.setopt(pycurl.NOSIGNAL, 1) self.c.setopt(pycurl.NOPROGRESS, 1) if hasattr(pycurl, "AUTOREFERER"): self.c.setopt(pycurl.AUTOREFERER, 1) self.c.setopt(pycurl.SSL_VERIFYPEER, 0) self.c.setopt(pycurl.LOW_SPEED_TIME, 60) self.c.setopt(pycurl.LOW_SPEED_LIMIT, 5) if hasattr(pycurl, "USE_SSL"): self.c.setopt(pycurl.USE_SSL, pycurl.USESSL_TRY) # self.c.setopt(pycurl.VERBOSE, 1) self.c.setopt( pycurl.USERAGENT, b"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36", ) if pycurl.version_info()[7]: self.c.setopt(pycurl.ENCODING, b"gzip, deflate") self.c.setopt( pycurl.HTTPHEADER, [ b"Accept: */*", b"Accept-Language: en-US,en", b"Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7", b"Connection: keep-alive", b"Keep-Alive: 300", b"Expect:", ], )
# Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. try: import simplejson as json except: import json import logging import pycurl import tempfile from exceptions import * import os _PYCURL_SSL = pycurl.version_info()[5].split('/')[0] log = logging.getLogger(__name__) class PycurlRequest(object): def _set_ssl(self): self.curl_handle.setopt(pycurl.SSL_VERIFYPEER, self.verify) if self.verify: self.curl_handle.setopt(pycurl.SSL_VERIFYHOST, 2) else: self.curl_handle.setopt(pycurl.SSL_VERIFYHOST, 0) if self.ucert: self.curl_handle.setopt(pycurl.SSLCERT, self.ucert) if self.ukey: self.curl_handle.setopt(pycurl.SSLKEY, self.ukey)
self.write(bytestr) self.flush() self.seek(0) try: from io import BytesIO except ImportError: from StringIO import StringIO as BytesIO def base_url(): return "http://localhost/irods-rest/0.9.0/" if [int(x) for x in pycurl.version_info()[1].split('.')][:2] <= [7, 29]: StringIO = old_pycurl_mock_StringIO BytesIO = old_pycurl_mock_BytesIO def authenticate(_user_name, _password, _auth_type): buffer = StringIO() creds = _user_name + ':' + _password buff = creds.encode('ascii') token = base64.b64encode(buff, None) c = pycurl.Curl() c.setopt(pycurl.HTTPHEADER, ['Authorization: Native ' + token]) c.setopt(c.CUSTOMREQUEST, 'POST') url = base_url() + 'auth'
def decorated(*args, **kwargs): # pycurl.version_info()[8] is a tuple of protocols supported by libcurl if 'telnet' not in pycurl.version_info()[8]: raise unittest.SkipTest('libcurl does not support telnet') return fn(*args, **kwargs)
# You should have received a copy of the GNU Affero General Public License # along with Shinken. If not, see <http://www.gnu.org/licenses/>. import cPickle import zlib import json # Pycurl part import pycurl pycurl.global_init(pycurl.GLOBAL_ALL) import urllib from StringIO import StringIO from shinken.bin import VERSION from shinken.log import logger PYCURL_VERSION = pycurl.version_info()[1] class HTTPException(Exception): pass HTTPExceptions = (HTTPException,) class FileReader: def __init__(self, fp): self.fp = fp def read_callback(self, size): return self.fp.read(size) class HTTPClient(object):
def decorated(*args, **kwargs): if not pycurl.version_info()[4] & pycurl.VERSION_IPV6: raise nose.plugins.skip.SkipTest('libcurl does not support ipv6') return fn(*args, **kwargs)
def pycurl_version_less_than(*spec): import pycurl version = [int(part) for part in pycurl.version_info()[1].split(".")] return version_less_than_spec(version, spec)
import pycurl from pycurl import Curl, CurlMulti class opts: verbose = 1 if "-q" in sys.argv: opts.verbose = opts.verbose - 1 print "Python", sys.version print "PycURL %s (compiled against 0x%x)" % ( pycurl.version, pycurl.COMPILE_LIBCURL_VERSION_NUM) print "PycURL version info", pycurl.version_info() print " %s, compiled %s" % (pycurl.__file__, pycurl.COMPILE_DATE) # /*********************************************************************** # // test misc # ************************************************************************/ if 1: c = Curl() assert c.URL is pycurl.URL del c # /*********************************************************************** # // test handles # ************************************************************************/
sys.path = get_sys_path() import pycurl from pycurl import Curl, CurlMulti class opts: verbose = 1 if "-q" in sys.argv: opts.verbose = opts.verbose - 1 print "Python", sys.version print "pycurl %s (compiled against 0x%x)" % (pycurl.version, pycurl.COMPILE_LIBCURL_VERSION_NUM) print "pycurl version info", pycurl.version_info() print " %s, compiled %s" % (pycurl.__file__, pycurl.COMPILE_DATE) # /*********************************************************************** # // test misc # ************************************************************************/ if 1: c = Curl() assert c.URL is pycurl.URL del c # /*********************************************************************** # // test handles
def _queryTier0DataSvc( self, url ): """ Queries Tier0DataSvc. url: Tier0DataSvc URL. @returns: dictionary, from whence the required information must be retrieved according to the API call. Raises if connection error, bad response, or timeout after retries occur. """ userAgent = "User-Agent: ConditionWebServices/1.0 python/%d.%d.%d PycURL/%s" % ( sys.version_info[ :3 ] + ( pycurl.version_info()[ 1 ], ) ) proxy = "" if self._proxy: proxy = ' --proxy=%s ' % self._proxy debug = " -s -S " if self._debug: debug = " -v " cmd = '/usr/bin/curl -k -L --user-agent "%s" %s --connect-timeout %i --retry %i %s %s ' % (userAgent, proxy, self._timeOut, self._retries, debug, url) # time the curl to understand if re-tries have been carried out start = time.time() process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdoutdata, stderrdata) = process.communicate() retcode = process.returncode end = time.time() if retcode != 0 or stderrdata: # if the first curl has failed, logg its stderror and prepare and independent retry msg = "looks like curl returned an error: retcode=%s and took %s seconds" % (retcode,(end-start),) msg += ' msg = "'+str(stderrdata)+'"' logging.error(msg) time.sleep(10) cmd = '/usr/bin/curl -k -L --user-agent "%s" %s --connect-timeout %i --retry %i %s %s ' % (userAgent, proxy, self._timeOut, self._retries, "-v", url) process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdoutdata, stderrdata) = process.communicate() retcode = process.returncode if retcode != 0: msg = "looks like curl returned an error for the second time: retcode=%s" % (retcode,) msg += ' msg = "'+str(stderrdata)+'"' logging.error(msg) raise Tier0Error(msg) else : msg = "curl returned ok upon the second try" logging.info(msg) return json.loads( ''.join(stdoutdata).replace( "'", '"').replace(' None', ' "None"') )
def pycurl_version_less_than(*spec): import pycurl version = [int(part) for part in pycurl.version_info()[1].split('.')] return version_less_than_spec(version, spec)
def fetch(self, path, **kwargs): def disable_cert_check(curl): # Our certificate was not signed by a CA, so don't check it curl.setopt(pycurl.SSL_VERIFYPEER, 0) self.http_client.fetch(self.get_url(path).replace('http', 'https'), self.stop, prepare_curl_callback=disable_cert_check, **kwargs) return self.wait() def test_ssl(self): response = self.fetch('/') self.assertEqual(response.body, "Hello world") def test_large_post(self): response = self.fetch('/', method='POST', body='A' * 5000) self.assertEqual(response.body, "Got 5000 bytes in POST") if (ssl is None or pycurl is None or (pycurl.version_info()[5].startswith('GnuTLS') and pycurl.version_info()[2] < 0x71400)): # Don't try to run ssl tests if we don't have the ssl module (python 2.5). # Additionally, when libcurl (< 7.21.0) is compiled against gnutls # instead of openssl (which is the default on at least some versions of # ubuntu), libcurl does the ssl handshake in blocking mode. That will # cause this test to deadlock as the blocking network ops happen in # the same IOLoop as the server. del SSLTest
import asyncurl import utils import version import cStringIO WEBVID_USER_AGENT = 'libwebvi/%s %s' % (version.VERSION, pycurl.version) MOZILLA_USER_AGENT = 'Mozilla/5.0 (X11; U; Linux i686 (x86_64); en-US; rv:1.9.1.5) Gecko/20091102 Firefox/3.5.5' try: from libmimms import libmms except ImportError, exc: pass # Mapping from curl error codes to webvi errors. The error constants # are defined only in pycurl 7.16.1 and newer. if pycurl.version_info()[2] >= 0x071001: CURL_ERROR_CODE_MAPPING = \ {pycurl.E_OK: 0, pycurl.E_OPERATION_TIMEOUTED: 408, pycurl.E_OUT_OF_MEMORY: 500, pycurl.E_PARTIAL_FILE: 504, pycurl.E_READ_ERROR: 504, pycurl.E_RECV_ERROR: 504, pycurl.E_REMOTE_FILE_NOT_FOUND: 404, pycurl.E_TOO_MANY_REDIRECTS: 404, pycurl.E_UNSUPPORTED_PROTOCOL: 500, pycurl.E_URL_MALFORMAT: 400, pycurl.E_COULDNT_CONNECT: 506, pycurl.E_COULDNT_RESOLVE_HOST: 506, pycurl.E_COULDNT_RESOLVE_PROXY: 506, pycurl.E_FILE_COULDNT_READ_FILE: 404,
def get_version_info(url): """ Queries the server-side for the commit hash it is currently using. Note: this is the commit hash used by /data/services/common/CondDBFW on the server-side. """ request = pycurl.Curl() request.setopt(request.CONNECTTIMEOUT, 60) user_agent = "User-Agent: ConditionWebServices/1.0 python/%d.%d.%d PycURL/%s" % (sys.version_info[ :3 ] + (pycurl.version_info()[1],)) request.setopt(request.USERAGENT, user_agent) # we don't need to verify who signed the certificate or who the host is request.setopt(request.SSL_VERIFYPEER, 0) request.setopt(request.SSL_VERIFYHOST, 0) response_buffer = StringIO() request.setopt(request.WRITEFUNCTION, response_buffer.write) request.setopt(request.URL, url + "conddbfw_version/") request.perform() return json.loads(response_buffer.getvalue())
# Timeout not supported Timeout = None # For DEBUGFUNCTION callback CURLINFO_TEXT = 0 CURLINFO_HEADER_IN = 1 CURLINFO_HEADER_OUT = 2 # Loggers LOGGER = logging.getLogger('curl') LOGGER_TEXT = LOGGER.getChild('text') LOGGER_HEADER_IN = LOGGER.getChild('header_in') LOGGER_HEADER_OUT = LOGGER.getChild('header_out') DEBUGFUNCTION_LOGGERS = {LOGGER_TEXT, LOGGER_HEADER_IN, LOGGER_HEADER_OUT} VERSION_INFO = pycurl.version_info() class Request: def __init__(self, prepared, *, curl=None, timeout=None, allow_redirects=True, max_redirects=-1): self.prepared = prepared self.curl = curl or pycurl.Curl() self.timeout = timeout self.allow_redirects = allow_redirects self.max_redirects = max_redirects
import time import socket import cPickle import zlib import json # Pycurl part import pycurl pycurl.global_init(pycurl.GLOBAL_ALL) import urllib from StringIO import StringIO from shinken.bin import VERSION from shinken.log import logger PYCURL_VERSION = pycurl.version_info()[1] class HTTPException(Exception): pass HTTPExceptions = (HTTPException, ) class HTTPClient(object): def __init__(self, address='', port=0, use_ssl=False, timeout=3,
def NavigaTor(controller, num_circuits=1, num_rttprobes=1, num_ttfbprobes=1, num_bwprobes=1, probesleep=0, num_threads=1, output='probe_', network_protection=True): """ Configure Tor client and start threads for probing the RTT and/or TTFB of Tor circuits. "controller": authenticated Tor Controller from stem.control. "num_circuits": number of circuits to be probed. "num_rttprobes": number of RTT probes to be taken for each circuit. "num_ttfbprobes": number of TTFB probes to be taken for each circuit. "num_bwprobes": number of bw probes to be taken for each circuit. "probesleep": number of seconds to wait between probes. "num_threads": number of threads to start that actually do the probing. "output": prefix for output file(s). "network_protection": Anti-Hammering protection for the Tor network. """ # RouterStatusEntryV3 support in Stem assert get_distribution('stem').version > '1.4.0', \ 'Stem module version must be greater then 1.4.0.' # socks5 + hostname support has been added in 7.21.7 assert pycurl.version_info()[1] >= '7.21.7', \ 'pycurl version (%s) must be >= 7.21.7' % pycurl.version_info()[1] # Validate input parameters. assert isinstance(controller, Controller), \ 'Controller has wrong type: %s.' % type(controller) for i in num_circuits, num_rttprobes, num_ttfbprobes, num_bwprobes,\ num_threads: assert isinstance(i, int), '%s has wrong type: %s.' % (i, type(i)) # Maximum number of circuits that can be probed is limited by # the unique destination IP calculation. Currently there is no need to # raise this limit. max_circuits = 255 + 255 * 256 + 255 * pow(256, 2) - 1 assert num_circuits in range(1, max_circuits), \ 'num_circuits is out of range: %d.' % (num_circuits) assert controller.get_version() > Version('0.2.3'), \ ('Your tor version (%s) is too old. ' % controller.get_version() + 'Tor version 0.2.3.x is required.') assert controller.get_version() < Version('0.2.4'), \ ('Your tor version (%s) is too new. ' % controller.get_version() + 'Tor version 0.2.3.x is required.') try: # Configure tor client controller.set_conf("__DisablePredictedCircuits", "1") controller.set_conf("__LeaveStreamsUnattached", "1") controller.set_conf("MaxClientCircuitsPending", "1024") # Workaround ticket 9543. 10s average for each RTT probe and # 10s for each TTFB probe should be enough. max_dirtiness = (num_rttprobes + num_ttfbprobes) * 10 if int(controller.get_conf("MaxCircuitDirtiness")) < max_dirtiness: controller.set_conf("MaxCircuitDirtiness", str(max_dirtiness)) # Close all non-internal circuits. for circ in controller.get_circuits(): if not circ.build_flags or 'IS_INTERNAL' not in circ.build_flags: controller.close_circuit(circ.id) manager = _Manager(controller, num_circuits, num_rttprobes, num_ttfbprobes, num_bwprobes, probesleep, num_threads, output, network_protection) while True: manager.join(1) if not manager.is_alive(): break except KeyboardInterrupt: pass finally: controller.reset_conf("__DisablePredictedCircuits") controller.reset_conf("__LeaveStreamsUnattached") controller.reset_conf("MaxCircuitDirtiness") controller.reset_conf("MaxClientCircuitsPending") controller.close()
#!/usr/bin/env python #-*- coding:utf8 -*- import pycurl import StringIO print pycurl.version_info() html = StringIO.StringIO() url = r'https://mail.qq.com/cgi-bin/loginpage' c = pycurl.Curl() c.setopt(pycurl.URL, url) c.setopt(pycurl.SSL_VERIFYHOST, False) c.setopt(pycurl.SSL_VERIFYPEER, False) c.setopt(pycurl.WRITEFUNCTION, html.write) c.setopt(pycurl.FOLLOWLOCATION, 1) c.perform() print c.getinfo(pycurl.HTTP_CODE), c.getinfo(pycurl.EFFECTIVE_URL) print html.getvalue()
def _queryTier0DataSvc( self, url ): """ Queries Tier0DataSvc. url: Tier0DataSvc URL. @returns: dictionary, from whence the required information must be retrieved according to the API call. Raises if connection error, bad response, or timeout after retries occur. """ userAgent = "User-Agent: DQMIntegration/2.0 python/%d.%d.%d PycURL/%s" % ( sys.version_info[ :3 ] + ( pycurl.version_info()[ 1 ], ) ) proxy = "" if self._proxy: proxy = ' --proxy %s ' % self._proxy debug = " -s -S " if self._debug: debug = " -v " cmd = '/usr/bin/curl -k -L --user-agent "%s" %s --connect-timeout %i --retry %i %s %s ' % (userAgent, proxy, self._timeOut, self._retries, debug, url) process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdoutdata, stderrdata) = process.communicate() retcode = process.returncode if retcode != 0 or stderrdata: msg = "looks like curl returned an error: retcode=%s" % (retcode,) msg += ' msg = "'+str(stderrdata)+'"' raise Tier0Error(msg) return json.loads( ''.join(stdoutdata).replace( "'", '"').replace(' None', ' "None"') )
import pycurl from pycurl import Curl, CurlMulti class opts: verbose = 1 if "-q" in sys.argv: opts.verbose = opts.verbose - 1 print "Python", sys.version print "PycURL %s (compiled against 0x%x)" % (pycurl.version, pycurl.COMPILE_LIBCURL_VERSION_NUM) print "PycURL version info", pycurl.version_info() print " %s, compiled %s" % (pycurl.__file__, pycurl.COMPILE_DATE) # /*********************************************************************** # // test misc # ************************************************************************/ if 1: c = Curl() assert c.URL is pycurl.URL del c # /*********************************************************************** # // test handles
#!/usr/bin/env python import pycurl print(pycurl.version_info())
def version(): return "GetService 0.0.1 (Pycurl version %s)" % pycurl.version_info()[1]
def __init__(self, curl_protocol, host, rootdir, http_parse=None): """ Initialize a CurlDownloader. :param curl_protocol: (real) protocol to use :type curl_protocol: str (see :py:var:~CurlDownload.ALL_PROTOCOLS) :param host: server name :type host: str :param rootdir: base directory :type rootdir: str :param http_parse: object used to extract file information from HTML pages :type http_parse: py:class:HTTPParse. """ DownloadInterface.__init__(self) self.logger.debug('Download') # Check for ssh support curl_opts_info = pycurl.version_info() curl_opts = [] for opt in curl_opts_info: if isinstance(opt, tuple): for o in opt: curl_opts.append(o) else: curl_opts.append(opt) if 'sftp' not in curl_opts: CurlDownload.ALL_PROTOCOLS = CurlDownload.FTP_PROTOCOL_FAMILY + CurlDownload.HTTP_PROTOCOL_FAMILY self.logger.warning("sftp not supported by curl: %s" % str(curl_opts_info)) # Initialize curl_protocol. # Note that we don't change that field in set_protocol since this # method uses the protocol from the configuration file. It's not clear # what to do in this case. curl_protocol = curl_protocol.lower() if curl_protocol not in self.ALL_PROTOCOLS: raise ValueError( "curl_protocol must be one of %s (case insensitive). Got %s." % (self.ALL_PROTOCOLS, curl_protocol)) self.curl_protocol = curl_protocol # Initialize protocol specific constants if self.curl_protocol in self.FTP_PROTOCOL_FAMILY: self.protocol_family = "ftp" self._parse_result = self._ftp_parse_result self.ERRCODE_OK = [221, 226] elif self.curl_protocol in self.HTTP_PROTOCOL_FAMILY: self.protocol_family = "http" self._parse_result = self._http_parse_result self.ERRCODE_OK = [200] elif self.curl_protocol in self.SFTP_PROTOCOL_FAMILY: self.protocol_family = "sftp" self._parse_result = self._ftp_parse_result self.ERRCODE_OK = [0] else: # Should not happen since we check before raise ValueError("Unknown protocol") self.rootdir = rootdir self.set_server(host) self.headers = {} self.http_parse = http_parse # Create the cURL object # This object is shared by all operations to use the cache. # Before using it, call method:`_basic_curl_configuration`. self.crl = pycurl.Curl() # # Initialize options # # Should we skip SSL verification (cURL -k/--insecure option) self.ssl_verifyhost = True self.ssl_verifypeer = True # Path to the certificate of the server (cURL --cacert option; PEM format) self.ssl_server_cert = None # Keep alive self.tcp_keepalive = 0 # FTP method (cURL --ftp-method option) self.ftp_method = pycurl.FTPMETHOD_DEFAULT # Use cURL default # TODO: Don't store default values in BiomajConfig.DEFAULTS for # ssh_hosts_file and ssh_new_hosts # known_hosts file self.ssh_hosts_file = BiomajConfig.DEFAULTS["ssh_hosts_file"] # How to treat unknown host self.ssh_new_host = self.VALID_SSH_NEW_HOST[ BiomajConfig.DEFAULTS["ssh_new_host"]] # Allow redirections self.allow_redirections = True
def test_ssl(self): response = self.fetch('/') self.assertEqual(response.body, "Hello world") def test_large_post(self): response = self.fetch('/', method='POST', body='A'*5000) self.assertEqual(response.body, "Got 5000 bytes in POST") def test_non_ssl_request(self): # Make sure the server closes the connection when it gets a non-ssl # connection, rather than waiting for a timeout or otherwise # misbehaving. self.http_client.fetch(self.get_url("/"), self.stop, request_timeout=3600, connect_timeout=3600) response = self.wait() self.assertEqual(response.code, 599) if (ssl is None or pycurl is None or (pycurl.version_info()[5].startswith('GnuTLS') and pycurl.version_info()[2] < 0x71400)): # Don't try to run ssl tests if we don't have the ssl module (python 2.5). # Additionally, when libcurl (< 7.21.0) is compiled against gnutls # instead of openssl (which is the default on at least some versions of # ubuntu), libcurl does the ssl handshake in blocking mode. That will # cause this test to deadlock as the blocking network ops happen in # the same IOLoop as the server. del SSLTest
class CurlDispatcher(object): """ The CurlDispatcher is responsible for connecting, sending, and receiving data to a server. """ import pycurl pycurl.version_info() def __init__(self, endpoint, username, password, verify_ssl): """ The CurlDispatcher constructor. :param endpoint: the server URL :type endpoint: str :param username: the username for authentication :type username: str :param password: the password for authentication :type password: str :param verify_ssl: If True, ssl errors will cause an exception to be raised, otherwise, if False, they are ignored. :type verify_ssl: bool """ self._endpoint = endpoint self._credentials = str.format('{u}:{p}', u=username, p=password) \ if (username or password) else None self._verify_ssl = verify_ssl self._timeout = 300 self._connect_timeout = 30 def timeout(self, timeout_in_sec): """ Set the time to wait for a response before timeout. :param timeout_in_sec: the read timeout in seconds. :type timeout_in_sec: int :raise ValueError: if timeout_in_sec is less than 0 """ temp_timeout = int(timeout_in_sec) if temp_timeout < 0: raise ValueError("Read Timeout less than 0") self._timeout = temp_timeout def connect_timeout(self, timeout_in_sec): """ Set the time to wait for a connection to be established before timeout. :param timeout_in_sec: the connection timeout in seconds. :type timeout_in_sec: int :raise ValueError: if timeout_in_sec is less than 0 """ temp_timeout = int(timeout_in_sec) if temp_timeout < 0: raise ValueError("Connection Timeout less than 0") self._connect_timeout = temp_timeout def restore_timeout_defaults(self): """ Restores the Connection and Read Timeout to their original durations of 30 seconds for connection timeout and 300 seconds (5 minutes) for read timeout. """ self._timeout = 300 self._connect_timeout = 30 def post(self, data): """ Post data to the associated endpoint and await the server's response. :param data: the data to be posted. :type data: str or json """ try: from io import BytesIO assert BytesIO except ImportError: from io import StringIO as BytesIO with closing(CurlDispatcher.pycurl.Curl()) as c: c.setopt(c.URL, self._endpoint) obuffer = BytesIO() c.setopt(c.POSTFIELDS, data) c.setopt(c.WRITEFUNCTION, obuffer.write) c.setopt(c.CONNECTTIMEOUT, self._connect_timeout) c.setopt(c.TIMEOUT, self._timeout) if self._credentials: c.setopt(c.HTTPAUTH, c.HTTPAUTH_BASIC) c.setopt(c.USERPWD, self._credentials) if not self._verify_ssl: c.setopt(c.SSL_VERIFYPEER, 0) c.setopt(c.SSL_VERIFYHOST, 0) c.perform() return obuffer.getvalue().decode('utf-8')
def version(): return "GetService 0.0.1 (Pycurl version %s)" % pycurl.version_info( )[1]