Ejemplo n.º 1
0
def _log_entry_mapping_to_pb(mapping):
    """Helper for :meth:`write_entries`, et aliae

    Ideally, would use a function from :mod:`protobuf.json_format`, but
    the right one isn't public.  See:
    https://github.com/google/protobuf/issues/1351
    """
    # pylint: disable=too-many-branches
    entry_pb = LogEntry()

    optional_scalar_keys = {
        'logName': 'log_name',
        'insertId': 'insert_id',
        'textPayload': 'text_payload',
    }

    for key, pb_name in optional_scalar_keys.items():
        if key in mapping:
            setattr(entry_pb, pb_name, mapping[key])

    if 'resource' in mapping:
        entry_pb.resource.type = mapping['resource']['type']

    if 'severity' in mapping:
        severity = mapping['severity']
        if isinstance(severity, str):
            severity = LogSeverity.Value(severity)
        entry_pb.severity = severity

    if 'timestamp' in mapping:
        timestamp = _datetime_to_pb_timestamp(mapping['timestamp'])
        entry_pb.timestamp.CopyFrom(timestamp)

    if 'labels' in mapping:
        for key, value in mapping['labels'].items():
            entry_pb.labels[key] = value

    if 'jsonPayload' in mapping:
        for key, value in mapping['jsonPayload'].items():
            entry_pb.json_payload[key] = value

    if 'protoPayload' in mapping:
        Parse(json.dumps(mapping['protoPayload']), entry_pb.proto_payload)

    if 'httpRequest' in mapping:
        _http_request_mapping_to_pb(
            mapping['httpRequest'], entry_pb.http_request)

    if 'operation' in mapping:
        _log_operation_mapping_to_pb(
            mapping['operation'], entry_pb.operation)

    return entry_pb
Ejemplo n.º 2
0
def _log_entry_mapping_to_pb(mapping):
    """Helper for :meth:`write_entries`, et aliae

    Performs "impedance matching" between the protobuf attrs and the keys
    expected in the JSON API.
    """
    # pylint: disable=too-many-branches
    entry_pb = LogEntry()

    optional_scalar_keys = {
        'logName': 'log_name',
        'insertId': 'insert_id',
        'textPayload': 'text_payload',
    }

    for key, pb_name in optional_scalar_keys.items():
        if key in mapping:
            setattr(entry_pb, pb_name, mapping[key])

    if 'resource' in mapping:
        entry_pb.resource.type = mapping['resource']['type']

    if 'severity' in mapping:
        severity = mapping['severity']
        if isinstance(severity, str):
            severity = LogSeverity.Value(severity)
        entry_pb.severity = severity

    if 'timestamp' in mapping:
        timestamp = _datetime_to_pb_timestamp(mapping['timestamp'])
        entry_pb.timestamp.CopyFrom(timestamp)

    if 'labels' in mapping:
        for key, value in mapping['labels'].items():
            entry_pb.labels[key] = value

    if 'jsonPayload' in mapping:
        for key, value in mapping['jsonPayload'].items():
            entry_pb.json_payload[key] = value

    if 'protoPayload' in mapping:
        Parse(json.dumps(mapping['protoPayload']), entry_pb.proto_payload)

    if 'httpRequest' in mapping:
        _http_request_mapping_to_pb(
            mapping['httpRequest'], entry_pb.http_request)

    if 'operation' in mapping:
        _log_operation_mapping_to_pb(
            mapping['operation'], entry_pb.operation)

    return entry_pb
Ejemplo n.º 3
0
def _log_entry_mapping_to_pb(mapping):
    """Helper for :meth:`write_entries`, et aliae

    Performs "impedance matching" between the protobuf attrs and the keys
    expected in the JSON API.
    """
    # pylint: disable=too-many-branches
    entry_pb = LogEntry()

    optional_scalar_keys = {
        'logName': 'log_name',
        'insertId': 'insert_id',
        'textPayload': 'text_payload',
    }

    for key, pb_name in optional_scalar_keys.items():
        if key in mapping:
            setattr(entry_pb, pb_name, mapping[key])

    if 'resource' in mapping:
        entry_pb.resource.type = mapping['resource']['type']

    if 'severity' in mapping:
        severity = mapping['severity']
        if isinstance(severity, str):
            severity = LogSeverity.Value(severity)
        entry_pb.severity = severity

    if 'timestamp' in mapping:
        timestamp = _datetime_to_pb_timestamp(mapping['timestamp'])
        entry_pb.timestamp.CopyFrom(timestamp)

    if 'labels' in mapping:
        for key, value in mapping['labels'].items():
            entry_pb.labels[key] = value

    if 'jsonPayload' in mapping:
        for key, value in mapping['jsonPayload'].items():
            entry_pb.json_payload[key] = value

    if 'protoPayload' in mapping:
        Parse(json.dumps(mapping['protoPayload']), entry_pb.proto_payload)

    if 'httpRequest' in mapping:
        _http_request_mapping_to_pb(
            mapping['httpRequest'], entry_pb.http_request)

    if 'operation' in mapping:
        _log_operation_mapping_to_pb(
            mapping['operation'], entry_pb.operation)

    return entry_pb
Ejemplo n.º 4
0
    def test_list_entries_no_paging(self):
        import datetime

        from google.api.monitored_resource_pb2 import MonitoredResource
        from google.gax import INITIAL_PAGE
        from google.logging.v2.log_entry_pb2 import LogEntry

        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud._helpers import UTC
        from google.cloud._testing import _GAXPageIterator
        from google.cloud.logging import DESCENDING
        from google.cloud.logging.client import Client
        from google.cloud.logging.entries import TextEntry
        from google.cloud.logging.logger import Logger

        TOKEN = 'TOKEN'
        TEXT = 'TEXT'
        resource_pb = MonitoredResource(type='global')
        timestamp = datetime.datetime.utcnow().replace(tzinfo=UTC)
        timestamp_pb = _datetime_to_pb_timestamp(timestamp)
        entry_pb = LogEntry(log_name=self.LOG_PATH,
                            resource=resource_pb,
                            timestamp=timestamp_pb,
                            text_payload=TEXT)
        response = _GAXPageIterator([entry_pb], page_token=TOKEN)
        gax_api = _GAXLoggingAPI(_list_log_entries_response=response)
        client = Client(project=self.PROJECT,
                        credentials=object(),
                        use_gax=True)
        api = self._make_one(gax_api, client)

        iterator = api.list_entries([self.PROJECT], self.FILTER, DESCENDING)
        entries = list(iterator)
        next_token = iterator.next_page_token

        # First check the token.
        self.assertEqual(next_token, TOKEN)
        # Then check the entries returned.
        self.assertEqual(len(entries), 1)
        entry = entries[0]
        self.assertIsInstance(entry, TextEntry)
        self.assertEqual(entry.payload, TEXT)
        self.assertIsInstance(entry.logger, Logger)
        self.assertEqual(entry.logger.name, self.LOG_NAME)
        self.assertIsNone(entry.insert_id)
        self.assertEqual(entry.timestamp, timestamp)
        self.assertIsNone(entry.labels)
        self.assertIsNone(entry.severity)
        self.assertIsNone(entry.http_request)

        projects, filter_, order_by, page_size, options = (
            gax_api._list_log_entries_called_with)
        self.assertEqual(projects, [self.PROJECT])
        self.assertEqual(filter_, self.FILTER)
        self.assertEqual(order_by, DESCENDING)
        self.assertEqual(page_size, 0)
        self.assertIs(options.page_token, INITIAL_PAGE)
Ejemplo n.º 5
0
    def _list_entries_with_paging_helper(self, payload, struct_pb):
        import datetime

        from google.api.monitored_resource_pb2 import MonitoredResource
        from google.logging.v2.log_entry_pb2 import LogEntry
        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud._helpers import UTC
        from google.cloud._testing import _GAXPageIterator
        from google.cloud.logging.client import Client
        from google.cloud.logging.entries import StructEntry
        from google.cloud.logging.logger import Logger

        SIZE = 23
        TOKEN = 'TOKEN'
        NEW_TOKEN = 'NEW_TOKEN'
        resource_pb = MonitoredResource(type='global')
        timestamp = datetime.datetime.utcnow().replace(tzinfo=UTC)
        timestamp_pb = _datetime_to_pb_timestamp(timestamp)
        entry_pb = LogEntry(log_name=self.LOG_PATH,
                            resource=resource_pb,
                            timestamp=timestamp_pb,
                            json_payload=struct_pb)
        response = _GAXPageIterator([entry_pb], page_token=NEW_TOKEN)
        gax_api = _GAXLoggingAPI(_list_log_entries_response=response)
        client = Client(project=self.PROJECT,
                        credentials=object(),
                        use_gax=True)
        api = self._make_one(gax_api, client)

        iterator = api.list_entries([self.PROJECT],
                                    page_size=SIZE,
                                    page_token=TOKEN)
        entries = list(iterator)
        next_token = iterator.next_page_token

        # First check the token.
        self.assertEqual(next_token, NEW_TOKEN)
        self.assertEqual(len(entries), 1)
        entry = entries[0]
        self.assertIsInstance(entry, StructEntry)
        self.assertEqual(entry.payload, payload)
        self.assertIsInstance(entry.logger, Logger)
        self.assertEqual(entry.logger.name, self.LOG_NAME)
        self.assertIsNone(entry.insert_id)
        self.assertEqual(entry.timestamp, timestamp)
        self.assertIsNone(entry.labels)
        self.assertIsNone(entry.severity)
        self.assertIsNone(entry.http_request)

        projects, filter_, order_by, page_size, options = (
            gax_api._list_log_entries_called_with)
        self.assertEqual(projects, [self.PROJECT])
        self.assertEqual(filter_, '')
        self.assertEqual(order_by, '')
        self.assertEqual(page_size, SIZE)
        self.assertEqual(options.page_token, TOKEN)
Ejemplo n.º 6
0
def _log_entry_mapping_to_pb(mapping):
    """Helper for :meth:`write_entries`, et aliae

    Performs "impedance matching" between the protobuf attrs and
    the keys expected in the JSON API.
    """
    entry_pb = LogEntry()
    if 'timestamp' in mapping:
        mapping['timestamp'] = _datetime_to_rfc3339(mapping['timestamp'])
    ParseDict(mapping, entry_pb)
    return entry_pb
Ejemplo n.º 7
0
    def test_list_entries_no_paging(self):
        import datetime

        from google.api.monitored_resource_pb2 import MonitoredResource
        from google.gax import INITIAL_PAGE
        from google.logging.v2.log_entry_pb2 import LogEntry

        from google.cloud._helpers import _datetime_to_pb_timestamp
        from google.cloud._testing import _GAXPageIterator
        from google.cloud.logging import DESCENDING

        TOKEN = 'TOKEN'
        TEXT = 'TEXT'
        resource_pb = MonitoredResource(type='global')
        timestamp_pb = _datetime_to_pb_timestamp(datetime.datetime.utcnow())
        entry_pb = LogEntry(log_name=self.LOG_NAME,
                            resource=resource_pb,
                            timestamp=timestamp_pb,
                            text_payload=TEXT)
        response = _GAXPageIterator([entry_pb], TOKEN)
        gax_api = _GAXLoggingAPI(_list_log_entries_response=response)
        api = self._makeOne(gax_api)

        entries, next_token = api.list_entries([self.PROJECT], self.FILTER,
                                               DESCENDING)

        self.assertEqual(len(entries), 1)
        entry = entries[0]
        self.assertIsInstance(entry, dict)
        self.assertEqual(entry['logName'], self.LOG_NAME)
        self.assertEqual(entry['resource'], {'type': 'global'})
        self.assertEqual(entry['textPayload'], TEXT)
        self.assertEqual(next_token, TOKEN)

        projects, filter_, order_by, page_size, options = (
            gax_api._list_log_entries_called_with)
        self.assertEqual(projects, [self.PROJECT])
        self.assertEqual(filter_, self.FILTER)
        self.assertEqual(order_by, DESCENDING)
        self.assertEqual(page_size, 0)
        self.assertIs(options.page_token, INITIAL_PAGE)
Ejemplo n.º 8
0
    def _list_entries_with_paging_helper(self, payload, struct_pb):
        import datetime

        from google.api.monitored_resource_pb2 import MonitoredResource
        from google.logging.v2.log_entry_pb2 import LogEntry
        from google.cloud._testing import _GAXPageIterator
        from google.cloud._helpers import _datetime_to_pb_timestamp

        SIZE = 23
        TOKEN = 'TOKEN'
        NEW_TOKEN = 'NEW_TOKEN'
        resource_pb = MonitoredResource(type='global')
        timestamp_pb = _datetime_to_pb_timestamp(datetime.datetime.utcnow())
        entry_pb = LogEntry(log_name=self.LOG_NAME,
                            resource=resource_pb,
                            timestamp=timestamp_pb,
                            json_payload=struct_pb)
        response = _GAXPageIterator([entry_pb], NEW_TOKEN)
        gax_api = _GAXLoggingAPI(_list_log_entries_response=response)
        api = self._makeOne(gax_api)

        entries, next_token = api.list_entries([self.PROJECT],
                                               page_size=SIZE,
                                               page_token=TOKEN)

        self.assertEqual(len(entries), 1)
        entry = entries[0]
        self.assertIsInstance(entry, dict)
        self.assertEqual(entry['logName'], self.LOG_NAME)
        self.assertEqual(entry['resource'], {'type': 'global'})
        self.assertEqual(entry['jsonPayload'], payload)
        self.assertEqual(next_token, NEW_TOKEN)

        projects, filter_, order_by, page_size, options = (
            gax_api._list_log_entries_called_with)
        self.assertEqual(projects, [self.PROJECT])
        self.assertEqual(filter_, '')
        self.assertEqual(order_by, '')
        self.assertEqual(page_size, SIZE)
        self.assertEqual(options.page_token, TOKEN)
Ejemplo n.º 9
0
    def _make_log_entry_with_extras(self, labels, iid, type_url, now):
        from google.api.monitored_resource_pb2 import MonitoredResource
        from google.logging.v2.log_entry_pb2 import LogEntry
        from google.logging.v2.log_entry_pb2 import LogEntryOperation
        from google.logging.type.http_request_pb2 import HttpRequest
        from google.logging.type.log_severity_pb2 import WARNING
        from google.protobuf.any_pb2 import Any

        from google.cloud._helpers import _datetime_to_pb_timestamp

        resource_pb = MonitoredResource(type='global', labels=labels)
        proto_payload = Any(type_url=type_url)
        timestamp_pb = _datetime_to_pb_timestamp(now)
        request_pb = HttpRequest(
            request_url='http://example.com/requested',
            request_method='GET',
            status=200,
            referer='http://example.com/referer',
            user_agent='AGENT',
            cache_hit=True,
            request_size=256,
            response_size=1024,
            remote_ip='1.2.3.4',
        )
        operation_pb = LogEntryOperation(
            producer='PRODUCER',
            first=True,
            last=True,
            id='OPID',
        )
        entry_pb = LogEntry(log_name=self.LOG_NAME,
                            resource=resource_pb,
                            proto_payload=proto_payload,
                            timestamp=timestamp_pb,
                            severity=WARNING,
                            insert_id=iid,
                            http_request=request_pb,
                            labels=labels,
                            operation=operation_pb)
        return entry_pb