def _SetupSSL(path): global CERT_PATH if os.path.exists(path): CERT_PATH = path else: CERT_PATH = None logging.warning('%s missing; without this urlfetch will not be able to ' 'validate SSL certificates.', path) if not fancy_urllib.can_validate_certs(): logging.warning('No ssl package found. urlfetch will not be able to ' 'validate SSL certificates.')
def _Authenticate(self): """Save the cookie jar after authentication.""" if self.cert_file_available and not fancy_urllib.can_validate_certs(): logger.warn("""ssl module not found. Without the ssl module, the identity of the remote host cannot be verified, and connections may NOT be secure. To fix this, please install the ssl module from http://pypi.python.org/pypi/ssl . To learn more, see http://code.google.com/appengine/kb/general.html#rpcssl .""") super(HttpRpcServer, self)._Authenticate() if self.cookie_jar.filename is not None and self.save_cookies: logger.info("Saving authentication cookies to %s", self.cookie_jar.filename) self.cookie_jar.save()
def _SetupSSL(path): global CERT_PATH if os.path.exists(path): CERT_PATH = path else: CERT_PATH = None logging.warning( '%s missing; without this urlfetch will not be able to ' 'validate SSL certificates.', path) if not fancy_urllib.can_validate_certs(): logging.warning('No ssl package found. urlfetch will not be able to ' 'validate SSL certificates.')
def _Authenticate(self): """Save the cookie jar after authentication.""" if self.cert_file_available and not fancy_urllib.can_validate_certs(): # TODO(user): This warning will not fire if the user is already logged # in; we may also also want to warn on existing connections. logger.warn("""ssl module not found. Without the ssl module, the identity of the remote host cannot be verified, and connections may NOT be secure. To fix this, please install the ssl module from http://pypi.python.org/pypi/ssl . To learn more, see https://developers.google.com/appengine/kb/general#rpcssl""") super(HttpRpcServer, self)._Authenticate() if self.cookie_jar.filename is not None and self.save_cookies: logger.debug("Saving authentication cookies to %s", self.cookie_jar.filename) self.cookie_jar.save() self._CheckCookie()
def _CreateRequest(self, url, data=None): """Creates a new urllib request.""" req = super(HttpRpcServer, self)._CreateRequest(url, data) if self.cert_file_available and fancy_urllib.can_validate_certs(): req.set_ssl_info(ca_certs=self.certpath) return req
def _CanValidateCerts(): return (_successfully_imported_fancy_urllib and fancy_urllib.can_validate_certs())
def _RetrieveURL(self, url, payload, method, headers, request, response, follow_redirects=True, deadline=_API_CALL_DEADLINE, validate_certificate=_API_CALL_VALIDATE_CERTIFICATE_DEFAULT): """Retrieves a URL. Args: url: String containing the URL to access. payload: Request payload to send, if any; None if no payload. method: HTTP method to use (e.g., 'GET') headers: List of additional header objects to use for the request. request: Request object from original request. response: Response object to populate with the response data. follow_redirects: optional setting (defaulting to True) for whether or not we should transparently follow redirects (up to MAX_REDIRECTS) deadline: Number of seconds to wait for the urlfetch to finish. validate_certificate: If true, do not send request to server unless the certificate is valid, signed by a trusted CA and the hostname matches the certificate. Raises: Raises an apiproxy_errors.ApplicationError exception with FETCH_ERROR in cases where: - MAX_REDIRECTS is exceeded - The protocol of the redirected URL is bad or missing. """ last_protocol = '' last_host = '' for redirect_number in xrange(MAX_REDIRECTS + 1): parsed = urlparse.urlparse(url) protocol, host, path, parameters, query, fragment = parsed port = urllib.splitport(urllib.splituser(host)[1])[1] if not _IsAllowedPort(port): logging.warning( 'urlfetch received %s ; port %s is not allowed in production!' % (url, port)) if protocol and not host: logging.error('Missing host on redirect; target url is %s' % url) raise apiproxy_errors.ApplicationError( urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR) if not host and not protocol: host = last_host protocol = last_protocol adjusted_headers = { 'User-Agent': 'AppEngine-Google; (+http://code.google.com/appengine)', 'Host': host, 'Accept-Encoding': 'gzip', } if payload is not None: adjusted_headers['Content-Length'] = len(payload) if method == 'POST' and payload: adjusted_headers['Content-Type'] = 'application/x-www-form-urlencoded' for header in headers: if header.key().title().lower() == 'user-agent': adjusted_headers['User-Agent'] = ( '%s %s' % (header.value(), adjusted_headers['User-Agent'])) else: adjusted_headers[header.key().title()] = header.value() logging.debug('Making HTTP request: host = %s, ' 'url = %s, payload = %s, headers = %s', host, url, payload, adjusted_headers) try: if protocol == 'http': connection = httplib.HTTPConnection(host) elif protocol == 'https': if (validate_certificate and fancy_urllib.can_validate_certs() and CERT_PATH): connection_class = fancy_urllib.create_fancy_connection( ca_certs=CERT_PATH) connection = connection_class(host) else: connection = httplib.HTTPSConnection(host) else: error_msg = 'Redirect specified invalid protocol: "%s"' % protocol logging.error(error_msg) raise apiproxy_errors.ApplicationError( urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg) last_protocol = protocol last_host = host if query != '': full_path = path + '?' + query else: full_path = path orig_timeout = socket.getdefaulttimeout() try: socket.setdefaulttimeout(deadline) connection.request(method, full_path, payload, adjusted_headers) http_response = connection.getresponse() if method == 'HEAD': http_response_data = '' else: http_response_data = http_response.read() finally: socket.setdefaulttimeout(orig_timeout) connection.close() except (fancy_urllib.InvalidCertificateException, fancy_urllib.SSLError), e: raise apiproxy_errors.ApplicationError( urlfetch_service_pb.URLFetchServiceError.SSL_CERTIFICATE_ERROR, str(e)) except (httplib.error, socket.error, IOError), e: raise apiproxy_errors.ApplicationError( urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, str(e))
def _RetrieveURL( self, url, payload, method, headers, request, response, follow_redirects=True, deadline=_API_CALL_DEADLINE, validate_certificate=_API_CALL_VALIDATE_CERTIFICATE_DEFAULT): """Retrieves a URL. Args: url: String containing the URL to access. payload: Request payload to send, if any; None if no payload. method: HTTP method to use (e.g., 'GET') headers: List of additional header objects to use for the request. request: Request object from original request. response: Response object to populate with the response data. follow_redirects: optional setting (defaulting to True) for whether or not we should transparently follow redirects (up to MAX_REDIRECTS) deadline: Number of seconds to wait for the urlfetch to finish. validate_certificate: If true, do not send request to server unless the certificate is valid, signed by a trusted CA and the hostname matches the certificate. Raises: Raises an apiproxy_errors.ApplicationError exception with FETCH_ERROR in cases where: - MAX_REDIRECTS is exceeded - The protocol of the redirected URL is bad or missing. """ last_protocol = '' last_host = '' for redirect_number in xrange(MAX_REDIRECTS + 1): parsed = urlparse.urlparse(url) protocol, host, path, parameters, query, fragment = parsed port = urllib.splitport(urllib.splituser(host)[1])[1] if not _IsAllowedPort(port): logging.warning( 'urlfetch received %s ; port %s is not allowed in production!' % (url, port)) if protocol and not host: logging.error('Missing host on redirect; target url is %s' % url) raise apiproxy_errors.ApplicationError( urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR) if not host and not protocol: host = last_host protocol = last_protocol adjusted_headers = { 'User-Agent': 'AppEngine-Google; (+http://code.google.com/appengine)', 'Host': host, 'Accept-Encoding': 'gzip', } if payload is not None: adjusted_headers['Content-Length'] = len(payload) if method == 'POST' and payload: adjusted_headers[ 'Content-Type'] = 'application/x-www-form-urlencoded' for header in headers: if header.key().title().lower() == 'user-agent': adjusted_headers['User-Agent'] = ( '%s %s' % (header.value(), adjusted_headers['User-Agent'])) else: adjusted_headers[header.key().title()] = header.value() logging.debug( 'Making HTTP request: host = %s, ' 'url = %s, payload = %s, headers = %s', host, url, payload, adjusted_headers) try: if protocol == 'http': connection = httplib.HTTPConnection(host) elif protocol == 'https': if (validate_certificate and fancy_urllib.can_validate_certs() and CERT_PATH): connection_class = fancy_urllib.create_fancy_connection( ca_certs=CERT_PATH) connection = connection_class(host) else: connection = httplib.HTTPSConnection(host) else: error_msg = 'Redirect specified invalid protocol: "%s"' % protocol logging.error(error_msg) raise apiproxy_errors.ApplicationError( urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg) last_protocol = protocol last_host = host if query != '': full_path = path + '?' + query else: full_path = path orig_timeout = socket.getdefaulttimeout() try: socket.setdefaulttimeout(deadline) connection.request(method, full_path, payload, adjusted_headers) http_response = connection.getresponse() if method == 'HEAD': http_response_data = '' else: http_response_data = http_response.read() finally: socket.setdefaulttimeout(orig_timeout) connection.close() except (fancy_urllib.InvalidCertificateException, fancy_urllib.SSLError), e: raise apiproxy_errors.ApplicationError( urlfetch_service_pb.URLFetchServiceError. SSL_CERTIFICATE_ERROR, str(e)) except (httplib.error, socket.error, IOError), e: raise apiproxy_errors.ApplicationError( urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, str(e))