def test_registered_type(self): from google.protobuf import any_pb2 from google.protobuf import descriptor_pool from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value pool = descriptor_pool.Default() type_name = "google.protobuf.Struct" # Make sure the descriptor is known in the registry. descriptor = pool.FindMessageTypeByName(type_name) self.assertEqual(descriptor.name, "Struct") type_url = "type.googleapis.com/" + type_name field_name = "foo" field_value = "Bar" struct_pb = Struct( fields={field_name: Value(string_value=field_value)}) any_pb = any_pb2.Any(type_url=type_url, value=struct_pb.SerializeToString()) entry_pb = LogEntryPB(proto_payload=any_pb, log_name="all-good") result = self._call_fut(LogEntryPB.pb(entry_pb)) expected_proto = { "logName": entry_pb.log_name, "protoPayload": { "@type": type_url, "value": { field_name: field_value } }, } self.assertEqual(result, expected_proto)
def to_literal( self, ctx: FlyteContext, python_val: DoltTable, python_type: typing.Type[DoltTable], expected: LiteralType, ) -> Literal: if not isinstance(python_val, DoltTable): raise AssertionError( f"Value cannot be converted to a table: {python_val}") conf = python_val.config if python_val.data is not None and python_val.config.tablename is not None: db = dolt.Dolt(conf.db_path) with tempfile.NamedTemporaryFile() as f: python_val.data.to_csv(f.name, index=False) dolt_int.save( db=db, tablename=conf.tablename, filename=f.name, branch_conf=conf.branch_conf, meta_conf=conf.meta_conf, remote_conf=conf.remote_conf, save_args=conf.io_args, ) s = Struct() s.update(python_val.to_dict()) return Literal(Scalar(generic=s))
def decode(obj: bytes) -> Message: """ Decode bytes into a message using Protobuf. First, try to parse the input as a Protobuf 'Message'; if it fails, parse the bytes as struct. """ message_pb = ProtobufMessage() message_pb.ParseFromString(obj) message_type = message_pb.WhichOneof("message") if message_type == "body": body = dict(message_pb.body) # pylint: disable=no-member msg = TMessage(_body=body) return msg if message_type == "dialogue_message": dialogue_message_pb = ( message_pb.dialogue_message # pylint: disable=no-member ) message_id = dialogue_message_pb.message_id target = dialogue_message_pb.target dialogue_starter_reference = dialogue_message_pb.dialogue_starter_reference dialogue_responder_reference = ( dialogue_message_pb.dialogue_responder_reference ) body_json = Struct() body_json.ParseFromString(dialogue_message_pb.content) body = dict(body_json) body["message_id"] = message_id body["target"] = target body["dialogue_reference"] = ( dialogue_starter_reference, dialogue_responder_reference, ) return TMessage(_body=body) raise ValueError("Message type not recognized.") # pragma: nocover
def test_commit_w_bound_client(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} message = Struct(fields={'foo': Value(bool_value=True)}) IID1 = 'IID1' IID2 = 'IID2' IID3 = 'IID3' RESOURCE = { 'type': 'global', } ENTRIES = [ {'textPayload': TEXT, 'insertId': IID1}, {'jsonPayload': STRUCT, 'insertId': IID2}, {'protoPayload': json.loads(MessageToJson(message)), 'insertId': IID3}, ] client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = _Logger() batch = self._makeOne(logger, client=client) batch.log_text(TEXT, insert_id=IID1) batch.log_struct(STRUCT, insert_id=IID2) batch.log_proto(message, insert_id=IID3) batch.commit() self.assertEqual(list(batch.entries), []) self.assertEqual(api._write_entries_called_with, (ENTRIES, logger.full_name, RESOURCE, None))
def execute_partitioned_dml(self, dml, params=None, param_types=None): """Execute a partitionable DML statement. :type dml: str :param dml: DML statement :type params: dict, {str -> column value} :param params: values for parameter replacement. Keys must match the names used in ``dml``. :type param_types: dict[str -> Union[dict, .types.Type]] :param param_types: (Optional) maps explicit types for one or more param values; required if parameters are passed. :rtype: int :returns: Count of rows affected by the DML statement. """ if params is not None: if param_types is None: raise ValueError( "Specify 'param_types' when passing 'params'.") params_pb = Struct(fields={ key: _make_value_pb(value) for key, value in params.items() }) else: params_pb = None api = self.spanner_api txn_options = TransactionOptions( partitioned_dml=TransactionOptions.PartitionedDml()) metadata = _metadata_with_prefix(self.name) with SessionCheckout(self._pool) as session: txn = api.begin_transaction(session.name, txn_options, metadata=metadata) txn_selector = TransactionSelector(id=txn.id) restart = functools.partial( api.execute_streaming_sql, session.name, dml, transaction=txn_selector, params=params_pb, param_types=param_types, metadata=metadata, ) iterator = _restart_on_unavailable(restart) result_set = StreamedResultSet(iterator) list(result_set) # consume all partials return result_set.stats.row_count_lower_bound
def test_log_proto_w_default_labels(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value message = Struct(fields={'foo': Value(bool_value=True)}) DEFAULT_LABELS = {'foo': 'spam'} ENTRIES = [{ 'logName': 'projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME), 'protoPayload': json.loads(MessageToJson(message)), 'resource': { 'type': 'global', 'labels': {}, }, 'labels': DEFAULT_LABELS, }] client = _Client(self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) logger.log_proto(message) self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
def test_log_proto_w_timestamp(self): import json import datetime from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value message = Struct(fields={'foo': Value(bool_value=True)}) TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) ENTRIES = [{ 'logName': 'projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME), 'protoPayload': json.loads(MessageToJson(message)), 'timestamp': '2016-12-31T00:01:02.999999Z', 'resource': { 'type': 'global', 'labels': {}, }, }] client = _Client(self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = self._make_one(self.LOGGER_NAME, client=client) logger.log_proto(message, timestamp=TIMESTAMP) self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
def test_from_pb_w_metadata_and_kwargs(self): from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value from google.cloud import operation as MUT from google.cloud._testing import _Monkey type_url = "type.googleapis.com/%s" % (Struct.DESCRIPTOR.full_name, ) type_url_map = {type_url: Struct} client = _Client() meta = Struct(fields={"foo": Value(string_value="Bar")}) metadata_pb = Any(type_url=type_url, value=meta.SerializeToString()) operation_pb = operations_pb2.Operation(name=self.OPERATION_NAME, metadata=metadata_pb) klass = self._get_target_class() with _Monkey(MUT, _TYPE_URL_MAP=type_url_map): operation = klass.from_pb(operation_pb, client, baz="qux") self.assertEqual(operation.name, self.OPERATION_NAME) self.assertIs(operation.client, client) self.assertEqual(operation.metadata, meta) self.assertEqual(operation.caller_metadata, {"baz": "qux"})
def test_from_pb_w_metadata_and_kwargs(self): from google.longrunning import operations_pb2 from google.protobuf.any_pb2 import Any from google.protobuf.struct_pb2 import Struct, Value from google.cloud import operation as MUT from unit_tests._testing import _Monkey TYPE_URI = 'type.googleapis.com/%s' % (Struct.DESCRIPTOR.full_name, ) type_url_map = {TYPE_URI: Struct} client = _Client() meta = Struct(fields={'foo': Value(string_value=u'Bar')}) metadata_pb = Any(type_url=TYPE_URI, value=meta.SerializeToString()) operation_pb = operations_pb2.Operation(name=self.OPERATION_NAME, metadata=metadata_pb) klass = self._getTargetClass() with _Monkey(MUT, _TYPE_URL_MAP=type_url_map): operation = klass.from_pb(operation_pb, client, baz='qux') self.assertEqual(operation.name, self.OPERATION_NAME) self.assertIs(operation.client, client) pb_metadata = operation.pb_metadata self.assertIsInstance(pb_metadata, Struct) self.assertEqual(list(pb_metadata.fields), ['foo']) self.assertEqual(pb_metadata.fields['foo'].string_value, 'Bar') self.assertEqual(operation.metadata, {'baz': 'qux'})
def test_log_proto_w_default_labels(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value message = Struct(fields={"foo": Value(bool_value=True)}) DEFAULT_LABELS = {"foo": "spam"} ENTRIES = [{ "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "protoPayload": json.loads(MessageToJson(message)), "resource": { "type": "global", "labels": {} }, "labels": DEFAULT_LABELS, }] client = _Client(self.PROJECT) api = client.logging_api = _DummyLoggingAPI() logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) logger.log_proto(message) self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None))
def test_log_proto_w_default_labels(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value message = Struct(fields={'foo': Value(bool_value=True)}) DEFAULT_LABELS = {'foo': 'spam'} conn = _Connection({}) client = _Client(self.PROJECT, conn) logger = self._makeOne(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) logger.log_proto(message) self.assertEqual(len(conn._requested), 1) req = conn._requested[0] SENT = { 'entries': [{ 'logName': 'projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME), 'protoPayload': json.loads(MessageToJson(message)), 'resource': { 'type': 'global', }, 'labels': DEFAULT_LABELS, }], } self.assertEqual(req['method'], 'POST') self.assertEqual(req['path'], '/entries:write') self.assertEqual(req['data'], SENT)
def _make_params_pb(params, param_types): """Helper for :meth:`execute_update`. :type params: dict, {str -> column value} :param params: values for parameter replacement. Keys must match the names used in ``dml``. :type param_types: dict[str -> Union[dict, .types.Type]] :param param_types: (Optional) maps explicit types for one or more param values; required if parameters are passed. :rtype: Union[None, :class:`Struct`] :returns: a struct message for the passed params, or None :raises ValueError: If ``param_types`` is None but ``params`` is not None. :raises ValueError: If ``params`` is None but ``param_types`` is not None. """ if params is not None: if param_types is None: raise ValueError( "Specify 'param_types' when passing 'params'.") return Struct(fields={ key: _make_value_pb(value) for key, value in params.items() }) else: if param_types is not None: raise ValueError( "Specify 'params' when passing 'param_types'.") return {}
def test_log_proto_explicit(self): import datetime from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value message = Struct(fields={'foo': Value(bool_value=True)}) LABELS = {'foo': 'bar', 'baz': 'qux'} IID = 'IID' SEVERITY = 'CRITICAL' METHOD = 'POST' URI = 'https://api.example.com/endpoint' STATUS = '500' REQUEST = { 'requestMethod': METHOD, 'requestUrl': URI, 'status': STATUS, } TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_proto(message, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP) self.assertEqual( batch.entries, [('proto', message, LABELS, IID, SEVERITY, REQUEST, TIMESTAMP)])
def awarenessOn(channel): message = Message() condition = Struct() condition["enabled"] = True message.pack(condition) message.topic = "RobotGateway.0.SetAwareness" channel.publish(message)
def document_info_handler(d, message_type): verified = d.pop("verified", False) if not verified: return d s = Struct() s.update(verified) d["verified"] = s return d
def decode(obj: bytes) -> Message: """Decode bytes into a message using Protobuf.""" body_json = Struct() body_json.ParseFromString(obj) body = dict(body_json) msg = Message(body=body) return msg
def activate(self, experiment_key, user_id, attributes): attrs = Struct() for k, v in attributes.items(): attrs.update({k: v}) user = pb2.User(id=user_id, attributes=attrs) activate_request = pb2.ActivateRequest(experiment_key=experiment_key, user=user) return self.stub.Activate(activate_request).variation
def write_dict_to_proto(data: Dict[str, Any], path: str) -> str: from google.protobuf.struct_pb2 import Struct s = Struct() s.update(data) with open(f"{path}", "wb") as f: f.write(s.SerializeToString()) return f"{path}"
def __init__(self, query_plan=None, **query_stats): from google.protobuf.struct_pb2 import Struct from google.cloud.spanner._helpers import _make_value_pb self.query_plan = query_plan self.query_stats = Struct(fields={ key: _make_value_pb(value) for key, value in query_stats.items() })
def test_log_proto_defaults(self): from google.protobuf.struct_pb2 import Struct, Value message = Struct(fields={'foo': Value(bool_value=True)}) client = _Client(project=self.PROJECT, connection=object()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_proto(message) self.assertEqual(batch.entries, [('proto', message, None, None, None, None)])
def test_list_entries_with_paging(self): from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value payload = {'message': 'MESSAGE', 'weather': 'sunny'} struct_pb = Struct(fields={ key: Value(string_value=value) for key, value in payload.items() }) self._list_entries_with_paging_helper(payload, struct_pb)
async def execute_action(self, game_id, data_dict): data_struct = Struct() data_struct.update(data_dict) response = await self.stub.ExecuteAction( eda_games_pb2.ExecuteActionRequest( idgame=game_id, data=data_struct, )) return GameState.from_protobuf_game_state_response(response)
def testGetBuildStatusWithValidId(self): """Tests for GetBuildStatus with a valid ID.""" properties_dict = { 'cidb_id': '1234', 'bot_id': 'swarm-cros-34', 'cbb_branch': 'master', 'cbb_config': 'sludge-paladin-tryjob', 'cbb_master_build_id': '4321', 'platform_version': '11721.0.0', 'milestone_version': '74', 'full_version': 'R74-11721.0.0-b3457724', 'critical': '1', 'build_type': 'Try', } start_time = Timestamp() start_time.GetCurrentTime() fake_properties = Struct( fields={ key: Value(string_value=value) for key, value in properties_dict.items() }) fake_output = build_pb2.Build.Output(properties=fake_properties) fake_build = build_pb2.Build(id=1234, start_time=start_time, status=2, output=fake_output) self.PatchObject(buildbucket_v2.BuildbucketV2, 'GetBuild', return_value=fake_build) expected_valid_status = { 'build_config': 'sludge-paladin-tryjob', 'start_time': datetime.fromtimestamp(start_time.seconds), 'finish_time': None, 'id': 1234, 'status': constants.BUILDER_STATUS_INFLIGHT, 'chrome_version': None, 'platform_version': '11721.0.0', 'milestone_version': '74', 'full_version': 'R74-11721.0.0-b3457724', 'important': 1, 'buildbucket_id': 1234, 'summary': None, 'master_build_id': 4321, 'bot_hostname': 'swarm-cros-34', 'builder_name': None, 'build_number': None, 'buildbot_generation': None, 'waterfall': None, 'deadline': None, 'build_type': 'Try', 'metadata_url': None, 'toolchain_url': None, 'branch': 'master' } bbv2 = buildbucket_v2.BuildbucketV2() status = bbv2.GetBuildStatus(1234) self.assertEqual(status, expected_valid_status)
def _make_result_set_stats(query_plan=None, **kw): from google.cloud.spanner_v1.proto.result_set_pb2 import ResultSetStats from google.protobuf.struct_pb2 import Struct from google.cloud.spanner_v1._helpers import _make_value_pb query_stats = Struct( fields={key: _make_value_pb(value) for key, value in kw.items()}) return ResultSetStats(query_plan=query_plan, query_stats=query_stats)
def test_log_proto_explicit(self): import datetime from google.cloud.logging.resource import Resource from google.cloud.logging.entries import ProtobufEntry from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value message = Struct(fields={'foo': Value(bool_value=True)}) LABELS = {'foo': 'bar', 'baz': 'qux'} IID = 'IID' SEVERITY = 'CRITICAL' METHOD = 'POST' URI = 'https://api.example.com/endpoint' STATUS = '500' TRACE = '12345678-1234-5678-1234-567812345678' SPANID = '000000000000004a' REQUEST = { 'requestMethod': METHOD, 'requestUrl': URI, 'status': STATUS, } TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) RESOURCE = Resource( type='gae_app', labels={ 'module_id': 'default', 'version_id': 'test', } ) ENTRY = ProtobufEntry( payload=message, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE, span_id=SPANID, trace_sampled=True, ) client = _Client(project=self.PROJECT, connection=_make_credentials()) logger = _Logger() batch = self._make_one(logger, client=client) batch.log_proto( message, labels=LABELS, insert_id=IID, severity=SEVERITY, http_request=REQUEST, timestamp=TIMESTAMP, resource=RESOURCE, trace=TRACE, span_id=SPANID, trace_sampled=True, ) self.assertEqual(batch.entries, [ENTRY])
def _execute_update_helper(self, count=0, query_options=None): from google.protobuf.struct_pb2 import Struct from google.cloud.spanner_v1.proto.result_set_pb2 import ( ResultSet, ResultSetStats, ) from google.cloud.spanner_v1.proto.transaction_pb2 import TransactionSelector from google.cloud.spanner_v1._helpers import ( _make_value_pb, _merge_query_options, ) MODE = 2 # PROFILE stats_pb = ResultSetStats(row_count_exact=1) database = _Database() api = database.spanner_api = self._make_spanner_api() api.execute_sql.return_value = ResultSet(stats=stats_pb) session = _Session(database) transaction = self._make_one(session) transaction._transaction_id = self.TRANSACTION_ID transaction._execute_sql_count = count row_count = transaction.execute_update( DML_QUERY_WITH_PARAM, PARAMS, PARAM_TYPES, query_mode=MODE, query_options=query_options, ) self.assertEqual(row_count, 1) expected_transaction = TransactionSelector(id=self.TRANSACTION_ID) expected_params = Struct( fields={key: _make_value_pb(value) for (key, value) in PARAMS.items()} ) expected_query_options = database._instance._client._query_options if query_options: expected_query_options = _merge_query_options( expected_query_options, query_options ) api.execute_sql.assert_called_once_with( self.SESSION_NAME, DML_QUERY_WITH_PARAM, transaction=expected_transaction, params=expected_params, param_types=PARAM_TYPES, query_mode=MODE, query_options=expected_query_options, seqno=count, metadata=[("google-cloud-resource-prefix", database.name)], ) self.assertEqual(transaction._execute_sql_count, count + 1)
def execute_sql(self, sql, params=None, param_types=None, query_mode=None, resume_token=b''): """Perform an ``ExecuteStreamingSql`` API request for rows in a table. :type sql: str :param sql: SQL query statement :type params: dict, {str -> column value} :param params: values for parameter replacement. Keys must match the names used in ``sql``. :type param_types: dict :param param_types: (Optional) maps explicit types for one or more param values; required if parameters are passed. :type query_mode: :class:`google.cloud.proto.spanner.v1.ExecuteSqlRequest.QueryMode` :param query_mode: Mode governing return of results / query plan. See https://cloud.google.com/spanner/reference/rpc/google.spanner.v1#google.spanner.v1.ExecuteSqlRequest.QueryMode1 :type resume_token: bytes :param resume_token: token for resuming previously-interrupted query :rtype: :class:`~google.cloud.spanner.streamed.StreamedResultSet` :returns: a result set instance which can be used to consume rows. """ if params is not None: if param_types is None: raise ValueError( "Specify 'param_types' when passing 'params'.") params_pb = Struct(fields={ key: _make_value_pb(value) for key, value in params.items() }) else: params_pb = None database = self._session._database options = _options_with_prefix(database.name) transaction = self._make_txn_selector() api = database.spanner_api iterator = api.execute_streaming_sql(self._session.name, sql, transaction=transaction, params=params_pb, param_types=param_types, query_mode=query_mode, resume_token=resume_token, options=options) return StreamedResultSet(iterator)
def execute_update(self, dml, params=None, param_types=None, query_mode=None): """Perform an ``ExecuteSql`` API request with DML. :type dml: str :param dml: SQL DML statement :type params: dict, {str -> column value} :param params: values for parameter replacement. Keys must match the names used in ``dml``. :type param_types: dict[str -> Union[dict, .types.Type]] :param param_types: (Optional) maps explicit types for one or more param values; required if parameters are passed. :type query_mode: :class:`google.cloud.spanner_v1.proto.ExecuteSqlRequest.QueryMode` :param query_mode: Mode governing return of results / query plan. See https://cloud.google.com/spanner/reference/rpc/google.spanner.v1#google.spanner.v1.ExecuteSqlRequest.QueryMode1 :rtype: int :returns: Count of rows affected by the DML statement. """ if params is not None: if param_types is None: raise ValueError( "Specify 'param_types' when passing 'params'.") params_pb = Struct(fields={ key: _make_value_pb(value) for key, value in params.items() }) else: params_pb = None database = self._session._database metadata = _metadata_with_prefix(database.name) transaction = self._make_txn_selector() api = database.spanner_api response = api.execute_sql( self._session.name, dml, transaction=transaction, params=params_pb, param_types=param_types, query_mode=query_mode, seqno=self._execute_sql_count, metadata=metadata, ) self._execute_sql_count += 1 return response.stats.row_count_exact
def test_commit_w_alternate_client(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value from google.cloud.logging.logger import Logger from google.cloud.logging.logger import _GLOBAL_RESOURCE TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} message = Struct(fields={'foo': Value(bool_value=True)}) DEFAULT_LABELS = {'foo': 'spam'} LABELS = { 'foo': 'bar', 'baz': 'qux', } SEVERITY = 'CRITICAL' METHOD = 'POST' URI = 'https://api.example.com/endpoint' STATUS = '500' REQUEST = { 'requestMethod': METHOD, 'requestUrl': URI, 'status': STATUS, } client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) api = client2.logging_api = _DummyLoggingAPI() logger = Logger('logger_name', client1, labels=DEFAULT_LABELS) ENTRIES = [ { 'textPayload': TEXT, 'labels': LABELS, 'resource': _GLOBAL_RESOURCE._to_dict() }, { 'jsonPayload': STRUCT, 'severity': SEVERITY, 'resource': _GLOBAL_RESOURCE._to_dict() }, { 'protoPayload': json.loads(MessageToJson(message)), 'httpRequest': REQUEST, 'resource': _GLOBAL_RESOURCE._to_dict() }, ] batch = self._make_one(logger, client=client1) batch.log_text(TEXT, labels=LABELS) batch.log_struct(STRUCT, severity=SEVERITY) batch.log_proto(message, http_request=REQUEST) batch.commit(client=client2) self.assertEqual(list(batch.entries), []) self.assertEqual(api._write_entries_called_with, (ENTRIES, logger.full_name, None, DEFAULT_LABELS))
def test_write_entries_multiple(self): from google.protobuf.struct_pb2 import Struct from google.protobuf.struct_pb2 import Value json_payload = {'payload': 'PAYLOAD', 'type': 'json'} json_struct_pb = Struct(fields={ key: Value(string_value=value) for key, value in json_payload.items() }) self._write_entries_multiple_helper(json_payload, json_struct_pb)