def MakeSyncCall(self, service, call, request, response, request_id=None): """The main RPC entry point. Args: service: Must be name as provided to service_name of constructor. call: A string representing the rpc to make. Must be part of the underlying services methods and impemented by _Dynamic_<call>. request: A protocol buffer of the type corresponding to 'call'. response: A protocol buffer of the type corresponding to 'call'. request_id: A unique string identifying the request associated with the API call. """ assert service == self.__service_name, ('Expected "%s" service name, ' 'was "%s"' % (self.__service_name, service)) if request.ByteSize() > self.__max_request_size: raise apiproxy_errors.RequestTooLargeError( 'The request to API call %s.%s() was too large.' % (service, call)) messages = [] assert request.IsInitialized(messages), messages if self.__error: raise self.__error else: method = getattr(self, '_Dynamic_' + call) if self._ACCEPTS_REQUEST_ID: method(request, response, request_id) else: method(request, response)
def test_datastore_emulator_request_too_large(self): fake_put_request = datastore_pb.PutRequest() fake_put_request.Encode = lambda: 'x' * (apiproxy_stub.MAX_REQUEST_SIZE + 1) expected_remote_response = remote_api_pb.Response() expected_remote_response.set_exception( pickle.dumps( apiproxy_errors.RequestTooLargeError( apiproxy_stub.REQ_SIZE_EXCEEDS_LIMIT_MSG_TEMPLATE % ('datastore_v3', 'Put')))) self._assert_remote_call(expected_remote_response, fake_put_request, 'datastore_v3', 'Put')
def CheckRequest(self, service, call, request): """Check if a request meet some common restrictions. Args: service: Must be name as provided to service_name of constructor. call: A string representing the rpc to make. request: A protocol buffer of the type corresponding to 'call'. """ assert service == self.__service_name, ('Expected "%s" service name, ' 'was "%s"' % (self.__service_name, service)) if request.ByteSize() > self.__max_request_size: raise apiproxy_errors.RequestTooLargeError( REQ_SIZE_EXCEEDS_LIMIT_MSG_TEMPLATE % (service, call)) messages = [] assert request.IsInitialized(messages), messages
def MakeSyncCall(self, service, call, request, response, request_id=None, environ=None): """The main RPC entry point. Args: service: Must be name as provided to service_name of constructor. call: A string representing the rpc to make. Must be part of the underlying services methods and impemented by _Dynamic_<call>. request: A protocol buffer of the type corresponding to 'call'. response: A protocol buffer of the type corresponding to 'call'. request_id: A unique string identifying the request associated with the API call. """ if environ and service == 'urlfetch' and call == 'Fetch': os.environ.update(environ) assert service == self.__service_name, ('Expected "%s" service name, ' 'was "%s"' % (self.__service_name, service)) if request.ByteSize() > self.__max_request_size: raise apiproxy_errors.RequestTooLargeError( 'The request to API call %s.%s() was too large.' % (service, call)) messages = [] assert request.IsInitialized(messages), messages exception_type, frequency = self.__error_dict.get(call, (None, None)) if exception_type and frequency: if random.random() <= frequency: raise exception_type if self.__error: if random.random() <= self.__error_rate: raise self.__error method = getattr(self, '_Dynamic_' + call) if self._ACCEPTS_REQUEST_ID: method(request, response, request_id) else: method(request, response)
def MakeSyncCall(self, service, call, request, response, request_id=None): """ The main RPC entry point. Args: service: Must be name as provided to service_name of constructor. call: A string representing the rpc to make. Must be part of the underlying services methods and impemented by _Dynamic_<call>. request: A protocol buffer of the type corresponding to 'call'. response: A protocol buffer of the type corresponding to 'call'. request_id: A unique string identifying the request associated with the API call. """ assert service == self._service_name, ('Expected "%s" service name, ' 'was "%s"' % (self._service_name, service)) if request.ByteSize() > self._max_request_size: raise apiproxy_errors.RequestTooLargeError( 'The request to API call %s.%s() was too large.' % (service, call)) messages = [] assert request.IsInitialized(messages), messages remote_api_request = remote_api_pb.Request() remote_api_request.set_service_name(service) remote_api_request.set_method(call) remote_api_request.set_request(request.Encode()) if request_id is not None: remote_api_request.set_request_id(request_id) url = 'http://{}'.format(self._location) request_handle = urllib2.Request(url, remote_api_request.Encode()) response_handle = urllib2.urlopen(request_handle) remote_api_response = remote_api_pb.Response(response_handle.read()) if remote_api_response.has_application_error(): error_pb = remote_api_response.application_error() raise apiproxy_errors.ApplicationError(error_pb.code(), error_pb.detail()) if remote_api_response.has_exception(): raise pickle.loads(remote_api_response.exception()) response.ParseFromString(remote_api_response.response())
def _handle_POST(self, environ, start_response): """Handles a POST request containing a serialized remote_api_pb.Request. Args: environ: An environ dict for the request as defined in PEP-333. start_response: A start_response function with semantics defined in PEP-333. Returns: A single element list containing the string body of the HTTP response. """ start_response('200 OK', [('Content-Type', 'application/octet-stream')]) start_time = time.time() response = remote_api_pb.Response() try: request = remote_api_pb.Request() # NOTE: Exceptions encountered when parsing the PB or handling the request # will be propagated back to the caller the same way as exceptions raised # by the actual API call. if environ.get('HTTP_TRANSFER_ENCODING') == 'chunked': # CherryPy concatenates all chunks when 'wsgi.input' is read but v3.2.2 # will not return even when all of the data in all chunks has been # read. See: https://bitbucket.org/cherrypy/cherrypy/issue/1131. wsgi_input = environ['wsgi.input'].read(2**32) else: wsgi_input = environ['wsgi.input'].read(int(environ['CONTENT_LENGTH'])) request.ParseFromString(wsgi_input) service = request.service_name() service_stub = apiproxy_stub_map.apiproxy.GetStub(service) if isinstance(service_stub, datastore_grpc_stub.DatastoreGrpcStub): # len(request.request()) is equivalent to calling ByteSize() on # deserialized request.request. if len(request.request()) > apiproxy_stub.MAX_REQUEST_SIZE: raise apiproxy_errors.RequestTooLargeError( apiproxy_stub.REQ_SIZE_EXCEEDS_LIMIT_MSG_TEMPLATE % ( service, request.method())) response = service_stub.MakeSyncCallForRemoteApi(request) else: if request.has_request_id(): request_id = request.request_id() environ['HTTP_HOST'] = self._balanced_address op = getattr(service_stub.request_data, 'register_request_id', None) if callable(op): op(environ, request_id) api_response = _execute_request(request).Encode() response.set_response(api_response) except Exception, e: if isinstance(e, apiproxy_errors.ApplicationError): level = logging.DEBUG application_error = response.mutable_application_error() application_error.set_code(e.application_error) application_error.set_detail(e.error_detail) else: # If the runtime instance is not Python, it won't be able to unpickle # the exception so use level that won't be ignored by default. level = logging.ERROR # Even if the runtime is Python, the exception may be unpicklable if # it requires importing a class blocked by the sandbox so just send # back the exception representation. # But due to our use of the remote API, at least some apiproxy errors # are generated in the Dev App Server main instance and not in the # language runtime and wrapping them causes different behavior from # prod so don't wrap them. if not isinstance(e, apiproxy_errors.Error): e = RuntimeError(repr(e)) # While not strictly necessary for ApplicationError, do this to limit # differences with remote_api:handler.py. response.set_exception(pickle.dumps(e)) logging.log(level, 'Exception while handling %s.%s()\n%s', request.service_name(), request.method(), traceback.format_exc())
def flush_function_too_large_error(self, *args, **kwds): raise apiproxy_errors.RequestTooLargeError()
def testSet_TooBig(self, mock_cached_pickled_string): e = apiproxy_errors.RequestTooLargeError('too big!') mock_cached_pickled_string.side_effect = e layered_cache.Set('foo', 'bar') self.assertEqual('bar', stored_object.Get('foo'))
def _PreHookHandler(self, service, call, request, unused_response): """Raises an error if request size is too large.""" if request.ByteSize() > self._max_request_size: raise apiproxy_errors.RequestTooLargeError( apiproxy_stub.REQ_SIZE_EXCEEDS_LIMIT_MSG_TEMPLATE % (service, call))