def test_w_date(self): import datetime from google.protobuf.struct_pb2 import Value from google.cloud.proto.spanner.v1.type_pb2 import Type, DATE VALUE = datetime.date.today() field_type = Type(code=DATE) value_pb = Value(string_value=VALUE.isoformat()) self.assertEqual(self._callFUT(value_pb, field_type), VALUE)
def test_log_proto_defaults(self): from google.protobuf.struct_pb2 import Struct, Value message = Struct(fields={'foo': Value(bool_value=True)}) client = _Client(project=self.PROJECT, connection=object()) logger = _Logger() batch = self._makeOne(logger, client=client) batch.log_proto(message) self.assertEqual(batch.entries, [('proto', message, None, None, None, None)])
def test_w_int(self): from google.protobuf.struct_pb2 import Value from google.cloud.spanner_v1.proto.type_pb2 import Type, INT64 VALUE = 12345 field_type = Type(code=INT64) value_pb = Value(string_value=str(VALUE)) self.assertEqual(self._callFUT(value_pb, field_type), VALUE)
def test_w_float(self): from google.protobuf.struct_pb2 import Value from google.cloud.spanner_v1.proto.type_pb2 import Type, FLOAT64 VALUE = 3.14159 field_type = Type(code=FLOAT64) value_pb = Value(number_value=VALUE) self.assertEqual(self._callFUT(value_pb, field_type), VALUE)
def test_w_bytes(self): from google.protobuf.struct_pb2 import Value from google.cloud.spanner_v1.proto.type_pb2 import Type, BYTES VALUE = b'Value' field_type = Type(code=BYTES) value_pb = Value(string_value=VALUE) self.assertEqual(self._callFUT(value_pb, field_type), VALUE)
def test_w_bool(self): from google.protobuf.struct_pb2 import Value from google.cloud.spanner_v1.proto.type_pb2 import Type, BOOL VALUE = True field_type = Type(code=BOOL) value_pb = Value(bool_value=VALUE) self.assertEqual(self._callFUT(value_pb, field_type), VALUE)
def test_w_list_value(self): from google.protobuf.struct_pb2 import Value STRING = 'STRING' PI = 3.1415926 value_pb = Value() value_pb.list_value.values.add(string_value=STRING) value_pb.list_value.values.add(bool_value=True) value_pb.list_value.values.add(number_value=PI) self.assertEqual(self._callFUT(value_pb), [STRING, True, PI])
def test_w_string(self): from google.protobuf.struct_pb2 import Value from google.cloud.spanner_v1.proto.type_pb2 import Type, STRING VALUE = u"Value" field_type = Type(code=STRING) value_pb = Value(string_value=VALUE) self.assertEqual(self._callFUT(value_pb, field_type), VALUE)
def test_list_entries_with_paging(self): from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value payload = {'message': 'MESSAGE', 'weather': 'sunny'} struct_pb = Struct(fields={ key: Value(string_value=value) for key, value in payload.items() }) self._list_entries_with_paging_helper(payload, struct_pb)
def test_log_proto_explicit(self): import datetime from google.cloud.logging.resource import Resource from google.cloud.logging.entries import ProtobufEntry from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value message = Struct(fields={'foo': Value(bool_value=True)}) LABELS = {'foo': 'bar', 'baz': 'qux'} IID = 'IID' SEVERITY = 'CRITICAL' METHOD = 'POST' URI = 'https://api.example.com/endpoint' STATUS = '500' TRACE = '12345678-1234-5678-1234-567812345678' SPANID = '000000000000004a' REQUEST = { 'requestMethod': METHOD, 'requestUrl': URI, 'status': STATUS, } TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource( type='gae_app', labels={ 'module_id': 'default', 'version_id': 'test', } ) ENTRY = ProtobufEntry( payload=message, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE, span_id=SPANID, trace_sampled=True, ) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_proto( message, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE, span_id=SPANID, trace_sampled=True, ) self.assertEqual(batch.entries, [ENTRY])
def _merge_struct(lhs, rhs, type_): """Helper for '_merge_by_type'.""" fields = type_.struct_type.fields lhs, rhs = list(lhs.list_value.values), list(rhs.list_value.values) # Sanity check: If either list is empty, short-circuit. # This is effectively a no-op. if not len(lhs) or not len(rhs): return Value(list_value=ListValue(values=(lhs + rhs))) candidate_type = fields[len(lhs) - 1].type first = rhs.pop(0) if first.HasField("null_value") or candidate_type.code in _UNMERGEABLE_TYPES: lhs.append(first) else: last = lhs.pop() lhs.append(_merge_by_type(last, first, candidate_type)) return Value(list_value=ListValue(values=lhs + rhs))
def test_w_null(self): from google.protobuf.struct_pb2 import Value, NULL_VALUE from google.cloud.spanner_v1 import Type from google.cloud.spanner_v1 import TypeCode field_type = Type(code=TypeCode.STRING) value_pb = Value(null_value=NULL_VALUE) self.assertEqual(self._callFUT(value_pb, field_type), None)
def test_commit_w_alternate_client(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value from google.cloud.logging.logger import Logger from google.cloud.logging.logger import _GLOBAL_RESOURCE TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} message = Struct(fields={'foo': Value(bool_value=True)}) DEFAULT_LABELS = {'foo': 'spam'} LABELS = { 'foo': 'bar', 'baz': 'qux', } SEVERITY = 'CRITICAL' METHOD = 'POST' URI = 'https://api.example.com/endpoint' STATUS = '500' REQUEST = { 'requestMethod': METHOD, 'requestUrl': URI, 'status': STATUS, } client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() logger = Logger('logger_name', client1, labels=DEFAULT_LABELS) ENTRIES = [ { 'textPayload': TEXT, 'labels': LABELS, 'resource': _GLOBAL_RESOURCE._to_dict() }, { 'jsonPayload': STRUCT, 'severity': SEVERITY, 'resource': _GLOBAL_RESOURCE._to_dict() }, { 'protoPayload': json.loads(MessageToJson(message)), 'httpRequest': REQUEST, 'resource': _GLOBAL_RESOURCE._to_dict() }, ] batch = self._make_one(logger, client=client1) batch.log_text(TEXT, labels=LABELS) batch.log_struct(STRUCT, severity=SEVERITY) batch.log_proto(message, http_request=REQUEST) batch.commit(client=client2) self.assertEqual(list(batch.entries), []) self.assertEqual(api._write_entries_called_with, (ENTRIES, logger.full_name, None, DEFAULT_LABELS))
def test_w_numeric(self): import decimal from google.protobuf.struct_pb2 import Value from google.cloud.spanner_v1.proto.type_pb2 import Type, NUMERIC VALUE = decimal.Decimal("99999999999999999999999999999.999999999") field_type = Type(code=NUMERIC) value_pb = Value(string_value=str(VALUE)) self.assertEqual(self._callFUT(value_pb, field_type), VALUE)
def test_w_empty_value(self): from google.protobuf.struct_pb2 import Value from google.cloud.spanner_v1 import Type from google.cloud.spanner_v1 import TypeCode field_type = Type(code=TypeCode.STRING) value_pb = Value() with self.assertRaises(ValueError): self._callFUT(value_pb, field_type)
def test_w_unknown_type(self): from google.protobuf.struct_pb2 import Value from google.cloud.spanner_v1 import Type from google.cloud.spanner_v1 import TypeCode field_type = Type(code=TypeCode.TYPE_CODE_UNSPECIFIED) value_pb = Value(string_value="Borked") with self.assertRaises(ValueError): self._callFUT(value_pb, field_type)
def get_prediction(instance): logging.info('Sending prediction request to AI Platform ...') try: pb_instance = json_format.ParseDict(instance, Value()) response = aip_client.predict(endpoint=aip_endpoint_name, instances=[pb_instance]) return list(response.predictions[0]) except Exception as err: logging.error(f'Prediction request failed: {type(err)}: {err}') return None
def test_w_array_empty(self): from google.protobuf.struct_pb2 import Value, ListValue from google.cloud.spanner_v1 import Type from google.cloud.spanner_v1 import TypeCode field_type = Type(code=TypeCode.ARRAY, array_element_type=Type(code=TypeCode.INT64)) value_pb = Value(list_value=ListValue(values=[])) self.assertEqual(self._callFUT(value_pb, field_type), [])
def test_create_grpc_response_customdata(): user_model = UserObject() request_data = np.array([[5, 6, 7]]) datadef = scu.array_to_grpc_datadef("ndarray", request_data) request = prediction_pb2.SeldonMessage(data=datadef) raw_response = any_pb2.Any(value=b"testdata") sm = scu.construct_response(user_model, True, request, raw_response) assert sm.data.WhichOneof("data_oneof") is None emptyValue = Value() assert sm.customData != emptyValue
def test_create_grpc_response_jsondata(): user_model = UserObject() request_data = np.array([[5, 6, 7]]) datadef = scu.array_to_grpc_datadef("ndarray", request_data) request = prediction_pb2.SeldonMessage(data=datadef) raw_response = {"output": "data"} sm = scu.construct_response(user_model, True, request, raw_response) assert sm.data.WhichOneof("data_oneof") == None emptyValue = Value() assert sm.jsonData != emptyValue
def test_create_rest_response_jsondata(): user_model = UserObject() request_data = np.array([[5, 6, 7]]) datadef = scu.array_to_rest_datadef("ndarray", request_data) json_request = { "jsonData": datadef } raw_response = {"output": "data"} json_response = scu.construct_response_json(user_model, True, json_request, raw_response) assert "data" not in json_response emptyValue = Value() assert json_response["jsonData"] != emptyValue
def test_write_entries_multiple(self): from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value json_payload = {'payload': 'PAYLOAD', 'type': 'json'} json_struct_pb = Struct(fields={ key: Value(string_value=value) for key, value in json_payload.items() }) self._write_entries_multiple_helper(json_payload, json_struct_pb)
def test_w_struct_value(self): from google.protobuf.struct_pb2 import Value STRING = 'STRING' PI = 3.1415926 value_pb = Value() value_pb.struct_value.fields['string'].string_value = STRING value_pb.struct_value.fields['bool'].bool_value = True value_pb.struct_value.fields['number'].number_value = PI self.assertEqual(self._callFUT(value_pb), {'string': STRING, 'bool': True, 'number': PI})
def test_commit_w_alternate_client(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value from google.cloud.logging.logger import Logger from google.cloud.logging.entries import _GLOBAL_RESOURCE TEXT = "This is the entry text" STRUCT = {"message": TEXT, "weather": "partly cloudy"} message = Struct(fields={"foo": Value(bool_value=True)}) DEFAULT_LABELS = {"foo": "spam"} LABELS = {"foo": "bar", "baz": "qux"} SEVERITY = "CRITICAL" METHOD = "POST" URI = "https://api.example.com/endpoint" STATUS = "500" REQUEST = { "requestMethod": METHOD, "requestUrl": URI, "status": STATUS } client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() logger = Logger("logger_name", client1, labels=DEFAULT_LABELS) ENTRIES = [ { "textPayload": TEXT, "labels": LABELS, "resource": _GLOBAL_RESOURCE._to_dict(), }, { "jsonPayload": STRUCT, "severity": SEVERITY, "resource": _GLOBAL_RESOURCE._to_dict(), }, { "protoPayload": json.loads(MessageToJson(message)), "httpRequest": REQUEST, "resource": _GLOBAL_RESOURCE._to_dict(), }, ] batch = self._make_one(logger, client=client1) batch.log_text(TEXT, labels=LABELS) batch.log_struct(STRUCT, severity=SEVERITY) batch.log_proto(message, http_request=REQUEST) batch.commit(client=client2) self.assertEqual(list(batch.entries), []) self.assertEqual( api._write_entries_called_with, (ENTRIES, logger.full_name, None, DEFAULT_LABELS), )
def test_log_proto_explicit(self): import datetime from google.cloud.logging.resource import Resource from google.cloud.logging.entries import ProtobufEntry from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value message = Struct(fields={"foo": Value(bool_value=True)}) LABELS = {"foo": "bar", "baz": "qux"} IID = "IID" SEVERITY = "CRITICAL" METHOD = "POST" URI = "https://api.example.com/endpoint" STATUS = "500" TRACE = "12345678-1234-5678-1234-567812345678" SPANID = "000000000000004a" REQUEST = { "requestMethod": METHOD, "requestUrl": URI, "status": STATUS } TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource(type="gae_app", labels={ "module_id": "default", "version_id": "test" }) ENTRY = ProtobufEntry( payload=message, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE, span_id=SPANID, trace_sampled=True, ) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_proto( message, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE, span_id=SPANID, trace_sampled=True, ) self.assertEqual(batch.entries, [ENTRY])
def test_w_float_str(self): from google.protobuf.struct_pb2 import Value from google.cloud.spanner_v1 import Type from google.cloud.spanner_v1 import TypeCode VALUE = "3.14159" field_type = Type(code=TypeCode.FLOAT64) value_pb = Value(string_value=VALUE) expected_value = 3.14159 self.assertEqual(self._callFUT(value_pb, field_type), expected_value)
def to_value(self: Message) -> Value: """Converts a message type to a :class:`~google.protobuf.struct_pb2.Value` object. Args: message: the message to convert Returns: the message as a :class:`~google.protobuf.struct_pb2.Value` object """ tmp_dict = json_format.MessageToDict(self._pb) return json_format.ParseDict(tmp_dict, Value())
def _merge_float64(lhs, rhs, type_): # pylint: disable=unused-argument """Helper for '_merge_by_type'.""" lhs_kind = lhs.WhichOneof("kind") if lhs_kind == "string_value": return Value(string_value=lhs.string_value + rhs.string_value) rhs_kind = rhs.WhichOneof("kind") array_continuation = (lhs_kind == "number_value" and rhs_kind == "string_value" and rhs.string_value == "") if array_continuation: return lhs raise Unmergeable(lhs, rhs, type_)
def test_w_json(self): import json from google.protobuf.struct_pb2 import Value from google.cloud.spanner_v1 import Type from google.cloud.spanner_v1 import TypeCode VALUE = {"id": 27863, "Name": "Anamika"} str_repr = json.dumps(VALUE, sort_keys=True, separators=(",", ":")) field_type = Type(code=TypeCode.JSON) value_pb = Value(string_value=str_repr) self.assertEqual(self._callFUT(value_pb, field_type), VALUE) VALUE = None str_repr = json.dumps(VALUE, sort_keys=True, separators=(",", ":")) field_type = Type(code=TypeCode.JSON) value_pb = Value(string_value=str_repr) self.assertEqual(self._callFUT(value_pb, field_type), {})
def test_log_proto_w_explicit_client_labels_severity_httpreq(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value message = Struct(fields={'foo': Value(bool_value=True)}) DEFAULT_LABELS = {'foo': 'spam'} LABELS = {'foo': 'bar', 'baz': 'qux'} IID = 'IID' SEVERITY = 'CRITICAL' METHOD = 'POST' URI = 'https://api.example.com/endpoint' STATUS = '500' REQUEST = { 'requestMethod': METHOD, 'requestUrl': URI, 'status': STATUS, } ENTRIES = [{ 'logName': 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME), 'protoPayload': json.loads(MessageToJson(message)), 'resource': { 'type': 'global', 'labels': {}, }, 'labels': LABELS, 'insertId': IID, 'severity': SEVERITY, 'httpRequest': REQUEST, }] client1 = _Client(self.PROJECT) client2 = _Client(self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() logger = self._make_one(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) logger.log_proto(message, client=client2, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST) self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))