Example #1
0
def encode_username_password(username, password):
    """Encodes a username/password pair in the format used by HTTP auth.

    The return value is a byte string in the form ``username:password``.

    .. versionadded:: 5.1
    """
    if isinstance(username, unicode_type):
        username = unicodedata.normalize('NFC', username)
    if isinstance(password, unicode_type):
        password = unicodedata.normalize('NFC', password)
    return utf8(username) + b":" + utf8(password)
Example #2
0
 def resolve(self, host, port, family=0):
     # getHostByName doesn't accept IP addresses, so if the input
     # looks like an IP address just return it immediately.
     if twisted.internet.abstract.isIPAddress(host):
         resolved = host
         resolved_family = socket.AF_INET
     elif twisted.internet.abstract.isIPv6Address(host):
         resolved = host
         resolved_family = socket.AF_INET6
     else:
         deferred = self.resolver.getHostByName(utf8(host))
         fut = Future()
         deferred.addBoth(fut.set_result)
         resolved = yield fut
         if isinstance(resolved, failure.Failure):
             try:
                 resolved.raiseException()
             except twisted.names.error.DomainError as e:
                 raise IOError(e)
         elif twisted.internet.abstract.isIPAddress(resolved):
             resolved_family = socket.AF_INET
         elif twisted.internet.abstract.isIPv6Address(resolved):
             resolved_family = socket.AF_INET6
         else:
             resolved_family = socket.AF_UNSPEC
     if family != socket.AF_UNSPEC and family != resolved_family:
         raise Exception('Requested socket family %d but got %d' %
                         (family, resolved_family))
     result = [
         (resolved_family, (resolved, port)),
     ]
     raise gen.Return(result)
Example #3
0
def parse_body_arguments(content_type, body, arguments, files, headers=None):
    """Parses a form request body.

    Supports ``application/x-www-form-urlencoded`` and
    ``multipart/form-data``.  The ``content_type`` parameter should be
    a string and ``body`` should be a byte string.  The ``arguments``
    and ``files`` parameters are dictionaries that will be updated
    with the parsed contents.
    """
    if headers and 'Content-Encoding' in headers:
        gen_log.warning("Unsupported Content-Encoding: %s",
                        headers['Content-Encoding'])
        return
    if content_type.startswith("application/x-www-form-urlencoded"):
        try:
            uri_arguments = parse_qs_bytes(native_str(body), keep_blank_values=True)
        except Exception as e:
            gen_log.warning('Invalid x-www-form-urlencoded body: %s', e)
            uri_arguments = {}
        for name, values in uri_arguments.items():
            if values:
                arguments.setdefault(name, []).extend(values)
    elif content_type.startswith("multipart/form-data"):
        try:
            fields = content_type.split(";")
            for field in fields:
                k, sep, v = field.strip().partition("=")
                if k == "boundary" and v:
                    parse_multipart_form_data(utf8(v), body, arguments, files)
                    break
            else:
                raise ValueError("multipart boundary not found")
        except Exception as e:
            gen_log.warning("Invalid multipart/form-data: %s", e)
Example #4
0
    def generate(self, writer):
        value = self.value

        # Compress whitespace if requested, with a crude heuristic to avoid
        # altering preformatted whitespace.
        if "<pre>" not in value:
            value = filter_whitespace(self.whitespace, value)

        if value:
            writer.write_line('_tt_append(%r)' % escape.utf8(value), self.line)
Example #5
0
 def _on_request_token(self, authorize_url, callback_uri, callback,
                       response_fut):
     try:
         response = response_fut.result()
     except Exception as e:
         raise Exception("Could not get request token: %s" % e)
     request_token = _oauth_parse_response(response.body)
     data = (base64.b64encode(escape.utf8(request_token["key"])) + b"|" +
             base64.b64encode(escape.utf8(request_token["secret"])))
     self.set_cookie("_oauth_request_token", data)
     args = dict(oauth_token=request_token["key"])
     if callback_uri == "oob":
         self.finish(authorize_url + "?" + urllib_parse.urlencode(args))
         callback()
         return
     elif callback_uri:
         args["oauth_callback"] = urlparse.urljoin(
             self.request.full_url(), callback_uri)
     self.redirect(authorize_url + "?" + urllib_parse.urlencode(args))
     callback()
Example #6
0
def _oauth10a_signature(consumer_token, method, url, parameters={}, token=None):
    """Calculates the HMAC-SHA1 OAuth 1.0a signature for the given request.

    See http://oauth.net/core/1.0a/#signing_process
    """
    parts = urlparse.urlparse(url)
    scheme, netloc, path = parts[:3]
    normalized_url = scheme.lower() + "://" + netloc.lower() + path

    base_elems = []
    base_elems.append(method.upper())
    base_elems.append(normalized_url)
    base_elems.append("&".join("%s=%s" % (k, _oauth_escape(str(v)))
                               for k, v in sorted(parameters.items())))

    base_string = "&".join(_oauth_escape(e) for e in base_elems)
    key_elems = [escape.utf8(urllib_parse.quote(consumer_token["secret"], safe='~'))]
    key_elems.append(escape.utf8(urllib_parse.quote(token["secret"], safe='~') if token else ""))
    key = b"&".join(key_elems)

    hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1)
    return binascii.b2a_base64(hash.digest())[:-1]
Example #7
0
 def reverse(self, *args):
     if self._path is None:
         raise ValueError("Cannot reverse url regex " + self.regex.pattern)
     assert len(args) == self._group_count, "required number of arguments " \
                                            "not found"
     if not len(args):
         return self._path
     converted_args = []
     for a in args:
         if not isinstance(a, (unicode_type, bytes)):
             a = str(a)
         converted_args.append(url_escape(utf8(a), plus=False))
     return self._path % tuple(converted_args)
Example #8
0
    def get_authenticated_user(self, callback, http_client=None):
        """Gets the OAuth authorized user and access token.

        This method should be called from the handler for your
        OAuth callback URL to complete the registration process. We run the
        callback with the authenticated user dictionary.  This dictionary
        will contain an ``access_key`` which can be used to make authorized
        requests to this service on behalf of the user.  The dictionary will
        also contain other fields such as ``name``, depending on the service
        used.

        .. deprecated:: 5.1

           The ``callback`` argument is deprecated and will be removed in 6.0.
           Use the returned awaitable object instead.
        """
        future = callback
        request_key = escape.utf8(self.get_argument("oauth_token"))
        oauth_verifier = self.get_argument("oauth_verifier", None)
        request_cookie = self.get_cookie("_oauth_request_token")
        if not request_cookie:
            future.set_exception(AuthError(
                "Missing OAuth request token cookie"))
            return
        self.clear_cookie("_oauth_request_token")
        cookie_key, cookie_secret = [
            base64.b64decode(escape.utf8(i)) for i in request_cookie.split("|")]
        if cookie_key != request_key:
            future.set_exception(AuthError(
                "Request token does not match cookie"))
            return
        token = dict(key=cookie_key, secret=cookie_secret)
        if oauth_verifier:
            token["verifier"] = oauth_verifier
        if http_client is None:
            http_client = self.get_auth_http_client()
        fut = http_client.fetch(self._oauth_access_token_url(token))
        fut.add_done_callback(wrap(functools.partial(self._on_access_token, callback)))
Example #9
0
 def _format_chunk(self, chunk):
     if self._expected_content_remaining is not None:
         self._expected_content_remaining -= len(chunk)
         if self._expected_content_remaining < 0:
             # Close the stream now to stop further framing errors.
             self.stream.close()
             raise httputil.HTTPOutputError(
                 "Tried to write more data than Content-Length")
     if self._chunking_output and chunk:
         # Don't write out empty chunks because that means END-OF-STREAM
         # with chunked encoding
         return utf8("%x" % len(chunk)) + b"\r\n" + chunk + b"\r\n"
     else:
         return chunk
Example #10
0
 def environ(request):
     """Converts a `tornado.httputil.HTTPServerRequest` to a WSGI environment.
     """
     hostport = request.host.split(":")
     if len(hostport) == 2:
         host = hostport[0]
         port = int(hostport[1])
     else:
         host = request.host
         port = 443 if request.protocol == "https" else 80
     environ = {
         "REQUEST_METHOD":
         request.method,
         "SCRIPT_NAME":
         "",
         "PATH_INFO":
         to_wsgi_str(
             escape.url_unescape(request.path, encoding=None, plus=False)),
         "QUERY_STRING":
         request.query,
         "REMOTE_ADDR":
         request.remote_ip,
         "SERVER_NAME":
         host,
         "SERVER_PORT":
         str(port),
         "SERVER_PROTOCOL":
         request.version,
         "wsgi.version": (1, 0),
         "wsgi.url_scheme":
         request.protocol,
         "wsgi.input":
         BytesIO(escape.utf8(request.body)),
         "wsgi.errors":
         sys.stderr,
         "wsgi.multithread":
         False,
         "wsgi.multiprocess":
         True,
         "wsgi.run_once":
         False,
     }
     if "Content-Type" in request.headers:
         environ["CONTENT_TYPE"] = request.headers.pop("Content-Type")
     if "Content-Length" in request.headers:
         environ["CONTENT_LENGTH"] = request.headers.pop("Content-Length")
     for key, value in request.headers.items():
         environ["HTTP_" + key.replace("-", "_").upper()] = value
     return environ
Example #11
0
    def __call__(self, request):
        data = {}
        response = []

        def start_response(status, response_headers, exc_info=None):
            data["status"] = status
            data["headers"] = response_headers
            return response.append

        app_response = self.wsgi_application(WSGIContainer.environ(request),
                                             start_response)
        try:
            response.extend(app_response)
            body = b"".join(response)
        finally:
            if hasattr(app_response, "close"):
                app_response.close()
        if not data:
            raise Exception("WSGI app did not call start_response")

        status_code, reason = data["status"].split(' ', 1)
        status_code = int(status_code)
        headers = data["headers"]
        header_set = set(k.lower() for (k, v) in headers)
        body = escape.utf8(body)
        if status_code != 304:
            if "content-length" not in header_set:
                headers.append(("Content-Length", str(len(body))))
            if "content-type" not in header_set:
                headers.append(("Content-Type", "text/html; charset=UTF-8"))
        if "server" not in header_set:
            headers.append(
                ("Server", "TornadoServer/%s" % tornado_py2.version))

        start_line = httputil.ResponseStartLine("HTTP/1.1", status_code,
                                                reason)
        header_obj = httputil.HTTPHeaders()
        for key, value in headers:
            header_obj.add(key, value)
        request.connection.write_headers(start_line, header_obj, chunk=body)
        request.connection.finish()
        self._log(status_code, request)
Example #12
0
def load_translations(directory, encoding=None):
    """Loads translations from CSV files in a directory.

    Translations are strings with optional Python-style named placeholders
    (e.g., ``My name is %(name)s``) and their associated translations.

    The directory should have translation files of the form ``LOCALE.csv``,
    e.g. ``es_GT.csv``. The CSV files should have two or three columns: string,
    translation, and an optional plural indicator. Plural indicators should
    be one of "plural" or "singular". A given string can have both singular
    and plural forms. For example ``%(name)s liked this`` may have a
    different verb conjugation depending on whether %(name)s is one
    name or a list of names. There should be two rows in the CSV file for
    that string, one with plural indicator "singular", and one "plural".
    For strings with no verbs that would change on translation, simply
    use "unknown" or the empty string (or don't include the column at all).

    The file is read using the `csv` module in the default "excel" dialect.
    In this format there should not be spaces after the commas.

    If no ``encoding`` parameter is given, the encoding will be
    detected automatically (among UTF-8 and UTF-16) if the file
    contains a byte-order marker (BOM), defaulting to UTF-8 if no BOM
    is present.

    Example translation ``es_LA.csv``::

        "I love you","Te amo"
        "%(name)s liked this","A %(name)s les gustó esto","plural"
        "%(name)s liked this","A %(name)s le gustó esto","singular"

    .. versionchanged:: 4.3
       Added ``encoding`` parameter. Added support for BOM-based encoding
       detection, UTF-16, and UTF-8-with-BOM.
    """
    global _translations
    global _supported_locales
    _translations = {}
    for path in os.listdir(directory):
        if not path.endswith(".csv"):
            continue
        locale, extension = path.split(".")
        if not re.match("[a-z]+(_[A-Z]+)?$", locale):
            gen_log.error("Unrecognized locale %r (path: %s)", locale,
                          os.path.join(directory, path))
            continue
        full_path = os.path.join(directory, path)
        if encoding is None:
            # Try to autodetect encoding based on the BOM.
            with open(full_path, 'rb') as f:
                data = f.read(len(codecs.BOM_UTF16_LE))
            if data in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE):
                encoding = 'utf-16'
            else:
                # utf-8-sig is "utf-8 with optional BOM". It's discouraged
                # in most cases but is common with CSV files because Excel
                # cannot read utf-8 files without a BOM.
                encoding = 'utf-8-sig'
        if PY3:
            # python 3: csv.reader requires a file open in text mode.
            # Force utf8 to avoid dependence on $LANG environment variable.
            f = open(full_path, "r", encoding=encoding)
        else:
            # python 2: csv can only handle byte strings (in ascii-compatible
            # encodings), which we decode below. Transcode everything into
            # utf8 before passing it to csv.reader.
            f = BytesIO()
            with codecs.open(full_path, "r", encoding=encoding) as infile:
                f.write(escape.utf8(infile.read()))
            f.seek(0)
        _translations[locale] = {}
        for i, row in enumerate(csv.reader(f)):
            if not row or len(row) < 2:
                continue
            row = [escape.to_unicode(c).strip() for c in row]
            english, translation = row[:2]
            if len(row) > 2:
                plural = row[2] or "unknown"
            else:
                plural = "unknown"
            if plural not in ("plural", "singular", "unknown"):
                gen_log.error("Unrecognized plural indicator %r in %s line %d",
                              plural, path, i + 1)
                continue
            _translations[locale].setdefault(plural, {})[english] = translation
        f.close()
    _supported_locales = frozenset(
        list(_translations.keys()) + [_default_locale])
    gen_log.debug("Supported locales: %s", sorted(_supported_locales))
Example #13
0
 def write_headers(self, start_line, headers, chunk=None, callback=None):
     """Implements `.HTTPConnection.write_headers`."""
     lines = []
     if self.is_client:
         self._request_start_line = start_line
         lines.append(
             utf8('%s %s HTTP/1.1' % (start_line[0], start_line[1])))
         # Client requests with a non-empty body must have either a
         # Content-Length or a Transfer-Encoding.
         self._chunking_output = (start_line.method
                                  in ('POST', 'PUT', 'PATCH')
                                  and 'Content-Length' not in headers
                                  and 'Transfer-Encoding' not in headers)
     else:
         self._response_start_line = start_line
         lines.append(
             utf8('HTTP/1.1 %d %s' % (start_line[1], start_line[2])))
         self._chunking_output = (
             # TODO: should this use
             # self._request_start_line.version or
             # start_line.version?
             self._request_start_line.version == 'HTTP/1.1' and
             # 1xx, 204 and 304 responses have no body (not even a zero-length
             # body), and so should not have either Content-Length or
             # Transfer-Encoding headers.
             start_line.code not in (204, 304)
             and (start_line.code < 100 or start_line.code >= 200) and
             # No need to chunk the output if a Content-Length is specified.
             'Content-Length' not in headers and
             # Applications are discouraged from touching Transfer-Encoding,
             # but if they do, leave it alone.
             'Transfer-Encoding' not in headers)
         # If connection to a 1.1 client will be closed, inform client
         if (self._request_start_line.version == 'HTTP/1.1'
                 and self._disconnect_on_finish):
             headers['Connection'] = 'close'
         # If a 1.0 client asked for keep-alive, add the header.
         if (self._request_start_line.version == 'HTTP/1.0'
                 and self._request_headers.get('Connection',
                                               '').lower() == 'keep-alive'):
             headers['Connection'] = 'Keep-Alive'
     if self._chunking_output:
         headers['Transfer-Encoding'] = 'chunked'
     if (not self.is_client and (self._request_start_line.method == 'HEAD'
                                 or start_line.code == 304)):
         self._expected_content_remaining = 0
     elif 'Content-Length' in headers:
         self._expected_content_remaining = int(headers['Content-Length'])
     else:
         self._expected_content_remaining = None
     # TODO: headers are supposed to be of type str, but we still have some
     # cases that let bytes slip through. Remove these native_str calls when those
     # are fixed.
     header_lines = (native_str(n) + ": " + native_str(v)
                     for n, v in headers.get_all())
     if PY3:
         lines.extend(l.encode('latin1') for l in header_lines)
     else:
         lines.extend(header_lines)
     for line in lines:
         if b'\n' in line:
             raise ValueError('Newline in header: ' + repr(line))
     future = None
     if self.stream.closed():
         future = self._write_future = Future()
         future.set_exception(iostream.StreamClosedError())
         future.exception()
     else:
         if callback is not None:
             warnings.warn(
                 "callback argument is deprecated, use returned Future instead",
                 DeprecationWarning)
             self._write_callback = stack_context.wrap(callback)
         else:
             future = self._write_future = Future()
         data = b"\r\n".join(lines) + b"\r\n\r\n"
         if chunk:
             data += self._format_chunk(chunk)
         self._pending_write = self.stream.write(data)
         future_add_done_callback(self._pending_write,
                                  self._on_write_complete)
     return future
Example #14
0
 def body(self, value):
     self._body = utf8(value)
Example #15
0
    def _curl_setup_request(self, curl, request, buffer, headers):
        curl.setopt(pycurl.URL, native_str(request.url))

        # libcurl's magic "Expect: 100-continue" behavior causes delays
        # with servers that don't support it (which include, among others,
        # Google's OpenID endpoint).  Additionally, this behavior has
        # a bug in conjunction with the curl_multi_socket_action API
        # (https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3039744&group_id=976),
        # which increases the delays.  It's more trouble than it's worth,
        # so just turn off the feature (yes, setting Expect: to an empty
        # value is the official way to disable this)
        if "Expect" not in request.headers:
            request.headers["Expect"] = ""

        # libcurl adds Pragma: no-cache by default; disable that too
        if "Pragma" not in request.headers:
            request.headers["Pragma"] = ""

        curl.setopt(pycurl.HTTPHEADER, [
            "%s: %s" % (native_str(k), native_str(v))
            for k, v in request.headers.get_all()
        ])

        curl.setopt(
            pycurl.HEADERFUNCTION,
            functools.partial(self._curl_header_callback, headers,
                              request.header_callback))
        if request.streaming_callback:

            def write_function(chunk):
                self.io_loop.add_callback(request.streaming_callback, chunk)
        else:
            write_function = buffer.write
        curl.setopt(pycurl.WRITEFUNCTION, write_function)
        curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects)
        curl.setopt(pycurl.MAXREDIRS, request.max_redirects)
        curl.setopt(pycurl.CONNECTTIMEOUT_MS,
                    int(1000 * request.connect_timeout))
        curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout))
        if request.user_agent:
            curl.setopt(pycurl.USERAGENT, native_str(request.user_agent))
        else:
            curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)")
        if request.network_interface:
            curl.setopt(pycurl.INTERFACE, request.network_interface)
        if request.decompress_response:
            curl.setopt(pycurl.ENCODING, "gzip,deflate")
        else:
            curl.setopt(pycurl.ENCODING, "none")
        if request.proxy_host and request.proxy_port:
            curl.setopt(pycurl.PROXY, request.proxy_host)
            curl.setopt(pycurl.PROXYPORT, request.proxy_port)
            if request.proxy_username:
                credentials = httputil.encode_username_password(
                    request.proxy_username, request.proxy_password)
                curl.setopt(pycurl.PROXYUSERPWD, credentials)

            if (request.proxy_auth_mode is None
                    or request.proxy_auth_mode == "basic"):
                curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_BASIC)
            elif request.proxy_auth_mode == "digest":
                curl.setopt(pycurl.PROXYAUTH, pycurl.HTTPAUTH_DIGEST)
            else:
                raise ValueError("Unsupported proxy_auth_mode %s" %
                                 request.proxy_auth_mode)
        else:
            curl.setopt(pycurl.PROXY, '')
            curl.unsetopt(pycurl.PROXYUSERPWD)
        if request.validate_cert:
            curl.setopt(pycurl.SSL_VERIFYPEER, 1)
            curl.setopt(pycurl.SSL_VERIFYHOST, 2)
        else:
            curl.setopt(pycurl.SSL_VERIFYPEER, 0)
            curl.setopt(pycurl.SSL_VERIFYHOST, 0)
        if request.ca_certs is not None:
            curl.setopt(pycurl.CAINFO, request.ca_certs)
        else:
            # There is no way to restore pycurl.CAINFO to its default value
            # (Using unsetopt makes it reject all certificates).
            # I don't see any way to read the default value from python so it
            # can be restored later.  We'll have to just leave CAINFO untouched
            # if no ca_certs file was specified, and require that if any
            # request uses a custom ca_certs file, they all must.
            pass

        if request.allow_ipv6 is False:
            # Curl behaves reasonably when DNS resolution gives an ipv6 address
            # that we can't reach, so allow ipv6 unless the user asks to disable.
            curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
        else:
            curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER)

        # Set the request method through curl's irritating interface which makes
        # up names for almost every single method
        curl_options = {
            "GET": pycurl.HTTPGET,
            "POST": pycurl.POST,
            "PUT": pycurl.UPLOAD,
            "HEAD": pycurl.NOBODY,
        }
        custom_methods = set(["DELETE", "OPTIONS", "PATCH"])
        for o in curl_options.values():
            curl.setopt(o, False)
        if request.method in curl_options:
            curl.unsetopt(pycurl.CUSTOMREQUEST)
            curl.setopt(curl_options[request.method], True)
        elif request.allow_nonstandard_methods or request.method in custom_methods:
            curl.setopt(pycurl.CUSTOMREQUEST, request.method)
        else:
            raise KeyError('unknown method ' + request.method)

        body_expected = request.method in ("POST", "PATCH", "PUT")
        body_present = request.body is not None
        if not request.allow_nonstandard_methods:
            # Some HTTP methods nearly always have bodies while others
            # almost never do. Fail in this case unless the user has
            # opted out of sanity checks with allow_nonstandard_methods.
            if ((body_expected and not body_present)
                    or (body_present and not body_expected)):
                raise ValueError(
                    'Body must %sbe None for method %s (unless '
                    'allow_nonstandard_methods is true)' %
                    ('not ' if body_expected else '', request.method))

        if body_expected or body_present:
            if request.method == "GET":
                # Even with `allow_nonstandard_methods` we disallow
                # GET with a body (because libcurl doesn't allow it
                # unless we use CUSTOMREQUEST). While the spec doesn't
                # forbid clients from sending a body, it arguably
                # disallows the server from doing anything with them.
                raise ValueError('Body must be None for GET request')
            request_buffer = BytesIO(utf8(request.body or ''))

            def ioctl(cmd):
                if cmd == curl.IOCMD_RESTARTREAD:
                    request_buffer.seek(0)

            curl.setopt(pycurl.READFUNCTION, request_buffer.read)
            curl.setopt(pycurl.IOCTLFUNCTION, ioctl)
            if request.method == "POST":
                curl.setopt(pycurl.POSTFIELDSIZE, len(request.body or ''))
            else:
                curl.setopt(pycurl.UPLOAD, True)
                curl.setopt(pycurl.INFILESIZE, len(request.body or ''))

        if request.auth_username is not None:
            if request.auth_mode is None or request.auth_mode == "basic":
                curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
            elif request.auth_mode == "digest":
                curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST)
            else:
                raise ValueError("Unsupported auth_mode %s" %
                                 request.auth_mode)

            userpwd = httputil.encode_username_password(
                request.auth_username, request.auth_password)
            curl.setopt(pycurl.USERPWD, userpwd)
            curl_log.debug("%s %s (username: %r)", request.method, request.url,
                           request.auth_username)
        else:
            curl.unsetopt(pycurl.USERPWD)
            curl_log.debug("%s %s", request.method, request.url)

        if request.client_cert is not None:
            curl.setopt(pycurl.SSLCERT, request.client_cert)

        if request.client_key is not None:
            curl.setopt(pycurl.SSLKEY, request.client_key)

        if request.ssl_options is not None:
            raise ValueError("ssl_options not supported in curl_httpclient")

        if threading.activeCount() > 1:
            # libcurl/pycurl is not thread-safe by default.  When multiple threads
            # are used, signals should be disabled.  This has the side effect
            # of disabling DNS timeouts in some environments (when libcurl is
            # not linked against ares), so we don't do it when there is only one
            # thread.  Applications that use many short-lived threads may need
            # to set NOSIGNAL manually in a prepare_curl_callback since
            # there may not be any other threads running at the time we call
            # threading.activeCount.
            curl.setopt(pycurl.NOSIGNAL, 1)
        if request.prepare_curl_callback is not None:
            request.prepare_curl_callback(curl)