def __call__(self, request, metadata=None): """ This is where the actually calls come through when the stub is called such as stub.PostInputs(). They get passed to this method which actually makes the request. Args: request: the proto object for the request. It must be the proper type for the request or the server will complain. Note: this doesn't type check the incoming request in the client but does make sure it can serialize before sending to the server atleast. metadata: not used currently, just added to match grpc. Returns: response: the proto object that this method returns. """ if metadata is not None: raise Exception("No support currently for metadata field.") # There is no __self__ attribute on the request_serializer unfortunately. expected_object_name = self.request_message_descriptor.name if type(request).__name__ != expected_object_name: raise Exception("The input request must be of type: %s from %s" % (expected_object_name, self.request_message_descriptor.file.name)) params = protobuf_to_dict(request) url, method = _pick_proper_endpoint(self.resources, params) response_json = self.http_client.execute_request(method, params, url) # Get the actual message object to construct message = self.response_deserializer result = dict_to_protobuf(message, response_json) return result
def __call__(self, request, metadata=None): # type: (Message, tuple) -> Message """ This is where the actually calls come through when the stub is called such as stub.PostInputs(). They get passed to this method which actually makes the request. Args: request: the proto object for the request. It must be the proper type for the request or the server will complain. Note: this doesn't type check the incoming request in the client but does make sure it can serialize before sending to the server atleast. metadata: not used currently, just added to match grpc. Returns: response: the proto object that this method returns. """ if metadata is not None: raise Exception("No support currently for metadata field.") # There is no __self__ attribute on the request_serializer unfortunately. expected_object_name = self.request_message_descriptor.name if type(request).__name__ != expected_object_name: raise Exception("The input request must be of type: %s from %s" % (expected_object_name, self.request_message_descriptor.file.name)) params = protobuf_to_dict(request, use_integers_for_enums=False, ignore_show_empty=True) url, method, url_fields = _pick_proper_endpoint(self.resources, params) for url_field in url_fields: del params[url_field] response_json = self.http_client.execute_request(method, params, url) # Get the actual message object to construct message = self.response_deserializer result = dict_to_protobuf(message, response_json, ignore_unknown_fields=True) return result
def __call__(self, request, metadata=None): """ This is where the actually calls come through when the stub is called such as stub.PostInputs(). They get passed to this method which actually makes the request. Args: request: the proto object for the request. It must be the proper type for the request or the server will complain. Note: this doesn't type check the incoming request in the client but does make sure it can serialize before sending to the server atleast. metadata: not used currently, just added to match grpc. Returns: response: the proto object that this method returns. """ if metadata is not None: raise Exception("No support currently for metadata field.") # There is no __self__ attribute on the request_serializer unfortunately. expected_object_name = self.request_message_descriptor.name if type(request).__name__ != expected_object_name: raise Exception("The input request must be of type: %s from %s" % (expected_object_name, self.request_message_descriptor.file.name)) params = protobuf_to_dict(request) url, method = _pick_proper_endpoint(self.resources, params) headers = { 'Content-Type': 'application/json', 'X-Clarifai-Client': 'python:%s' % CLIENT_VERSION, 'Python-Client': '%s:%s' % (OS_VER, PYTHON_VERSION), 'Authorization': self.headers['Authorization'] } logger.debug("=" * 100) succinct_payload = _mangle_base64_values(params) logger.debug("%s %s\nHEADERS:\n%s\nPAYLOAD:\n%s", method, url, pformat(headers), pformat(succinct_payload)) if method == 'GET': res = requests.get(url, params=params, headers=headers) elif method == "POST": res = requests.post(url, data=json.dumps(params), headers=headers) elif method == "DELETE": res = requests.delete(url, data=json.dumps(params), headers=headers) elif method == "PATCH": res = requests.patch(url, data=json.dumps(params), headers=headers) elif method == "PUT": res = requests.put(url, data=json.dumps(params), headers=headers) else: raise Exception("Unsupported request type: '%s'" % method) try: response_json = json.loads(res.content.decode('utf-8')) except ValueError: logger.exception("Could not get valid JSON from server response.") logger.debug("\nRESULT:\n%s", pformat(res.content.decode('utf-8'))) return res else: logger.debug("\nRESULT:\n%s", pformat(response_json)) if int(res.status_code / 100) != 2: error = ApiError(url, params, method, res) logger.warn("%s", str(error)) raise error # Get the actual message object to construct message = self.response_deserializer result = dict_to_protobuf(message, response_json) return result