def _Run(self, args):
        region = properties.VALUES.functions.region.Get()
        log_filter = [
            'resource.type="cloud_function"',
            'resource.labels.region="%s"' % region, 'logName:"cloud-functions"'
        ]

        if args.name:
            log_filter.append('resource.labels.function_name="%s"' % args.name)
        if args.execution_id:
            log_filter.append('labels.execution_id="%s"' % args.execution_id)
        if args.min_log_level:
            log_filter.append('severity>=%s' % args.min_log_level.upper())
        if args.start_time:
            order = 'ASC'
            log_filter.append('timestamp>="%s"' %
                              logging_util.FormatTimestamp(args.start_time))
        else:
            order = 'DESC'
        if args.end_time:
            log_filter.append('timestamp<="%s"' %
                              logging_util.FormatTimestamp(args.end_time))
        log_filter = ' '.join(log_filter)
        # TODO(b/36057251): Consider using paging for listing more than 1000 log
        # entries. However, reversing the order of received latest N entries before
        # a specified timestamp would be problematic with paging.

        entries = logging_common.FetchLogs(log_filter,
                                           order_by=order,
                                           limit=args.limit)

        if order == 'DESC':
            entries = reversed(
                list(entries))  # Force generator expansion with list.

        for entry in entries:
            row = {'log': entry.textPayload}
            if entry.severity:
                severity = str(entry.severity)
                if severity in flags.SEVERITIES:
                    # Use short form (first letter) for expected severities.
                    row['level'] = severity[0]
                else:
                    # Print full form of unexpected severities.
                    row['level'] = severity
            if entry.resource and entry.resource.labels:
                for label in entry.resource.labels.additionalProperties:
                    if label.key == 'function_name':
                        row['name'] = label.value
            if entry.labels:
                for label in entry.labels.additionalProperties:
                    if label.key == 'execution_id':
                        row['execution_id'] = label.value
            if entry.timestamp:
                row['time_utc'] = util.FormatTimestamp(entry.timestamp)
            yield row
Пример #2
0
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Yields:
      Objects representing log entries.
    """
        log.warn('This command is deprecated. '
                 'Please use `gcloud preview app logs read` instead.')
        logging_client = self.context['logging_client']
        logging = self.context['logging_messages']

        project = properties.VALUES.core.project.Get(required=True)

        log_filter = (
            'resource.type="cloud_function" '
            'labels."cloudfunctions.googleapis.com/region"="{0}" '.format(
                args.region))
        if args.name:
            log_filter += (
                'labels."cloudfunctions.googleapis.com/function_name"="{0}" '.
                format(args.name))
        if args.execution_id:
            log_filter += 'labels."execution_id"="{0}" '.format(
                args.execution_id)
        if args.min_log_level:
            log_filter += 'severity>={0} '.format(args.min_log_level)
        if args.start_time:
            order = 'asc'
            start_time = args.start_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
            log_filter += 'timestamp>="{0}" '.format(start_time)
        else:
            order = 'desc'
        if args.end_time:
            end_time = args.end_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
            log_filter += 'timestamp<="{0}" '.format(end_time)
        # TODO(user): Consider using paging for listing more than 1000 log entries.
        # However, reversing the order of received latest N entries before a
        # specified timestamp would be problematic with paging.
        request = logging.ListLogEntriesRequest(
            projectIds=[project],
            filter=log_filter,
            orderBy='timestamp {0}'.format(order),
            pageSize=args.limit)
        response = logging_client.entries.List(request=request)

        entries = response.entries if order == 'asc' else reversed(
            response.entries)
        for entry in entries:
            row = dict(log=entry.textPayload)
            if entry.severity:
                severity = str(entry.severity)
                if severity in GetLogs.SEVERITIES:
                    # Use short form (first letter) for expected severities.
                    row['level'] = severity[0]
                else:
                    # Print full form of unexpected severities.
                    row['level'] = severity
            for label in entry.labels.additionalProperties:
                if label.key == 'cloudfunctions.googleapis.com/function_name':
                    row['name'] = label.value
                if label.key == 'execution_id':
                    row['execution_id'] = label.value
            if entry.timestamp:
                row['time_utc'] = util.FormatTimestamp(entry.timestamp)
            yield row
Пример #3
0
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Yields:
      Objects representing log entries.
    """
        log_filter = [
            'resource.type="cloud_function"',
            'resource.labels.region="%s"' % args.region
        ]

        if args.name:
            log_filter.append('resource.labels.function_name="%s"' % args.name)
        if args.execution_id:
            log_filter.append('labels.execution_id="%s"' % args.execution_id)
        if args.min_log_level:
            log_filter.append('severity>=%s' % args.min_log_level)
        if args.start_time:
            order = 'ASC'
            log_filter.append('timestamp>="%s"' %
                              logging_util.FormatTimestamp(args.start_time))
        else:
            order = 'DESC'
        if args.end_time:
            log_filter.append('timestamp<="%s"' %
                              logging_util.FormatTimestamp(args.end_time))
        log_filter = ' '.join(log_filter)
        # TODO(user): Consider using paging for listing more than 1000 log entries.
        # However, reversing the order of received latest N entries before a
        # specified timestamp would be problematic with paging.

        entries = logging_common.FetchLogs(log_filter,
                                           order_by=order,
                                           limit=args.limit)

        if order == 'DESC':
            entries = reversed(
                list(entries))  # Force generator expansion with list.

        for entry in entries:
            row = {'log': entry.textPayload}
            if entry.severity:
                severity = str(entry.severity)
                if severity in GetLogs.SEVERITIES:
                    # Use short form (first letter) for expected severities.
                    row['level'] = severity[0]
                else:
                    # Print full form of unexpected severities.
                    row['level'] = severity
            if entry.resource:
                for label in entry.resource.labels.additionalProperties:
                    if label.key == 'function_name':
                        row['name'] = label.value
            for label in entry.labels.additionalProperties:
                if label.key == 'execution_id':
                    row['execution_id'] = label.value
            if entry.timestamp:
                row['time_utc'] = util.FormatTimestamp(entry.timestamp)
            yield row
Пример #4
0
 def testFormatTimestamp_unexpectedInput(self):
   formatted = util.FormatTimestamp('2015-12-01 12:34:56')
   self.assertEqual('2015-12-01 12:34:56', formatted)
Пример #5
0
 def testFormatTimestamp_expectedInput(self):
   formatted = util.FormatTimestamp('2015-12-01T12:34:56.789012345Z')
   self.assertEqual('2015-12-01 12:34:56.789', formatted)
Пример #6
0
    def _Run(self, args):
        region = properties.VALUES.functions.region.Get()
        log_filter = [
            'resource.type="cloud_function"',
            'resource.labels.region="%s"' % region, 'logName:"cloud-functions"'
        ]

        if args.name:
            log_filter.append('resource.labels.function_name="%s"' % args.name)
        if args.execution_id:
            log_filter.append('labels.execution_id="%s"' % args.execution_id)
        if args.min_log_level:
            log_filter.append('severity>=%s' % args.min_log_level.upper())
        if args.start_time:
            order = 'ASC'
            log_filter.append('timestamp>="%s"' %
                              logging_util.FormatTimestamp(args.start_time))
        else:
            order = 'DESC'
        if args.end_time:
            log_filter.append('timestamp<="%s"' %
                              logging_util.FormatTimestamp(args.end_time))
        log_filter = ' '.join(log_filter)

        entries = logging_common.FetchLogs(log_filter,
                                           order_by=order,
                                           limit=args.limit)

        if order == 'DESC':
            entries = reversed(
                list(entries))  # Force generator expansion with list.

        for entry in entries:
            message = entry.textPayload
            if entry.jsonPayload:
                props = [
                    prop.value
                    for prop in entry.jsonPayload.additionalProperties
                    if prop.key == 'message'
                ]
                if len(props) == 1 and hasattr(props[0], 'string_value'):
                    message = props[0].string_value
            row = {'log': message}
            if entry.severity:
                severity = six.text_type(entry.severity)
                if severity in flags.SEVERITIES:
                    # Use short form (first letter) for expected severities.
                    row['level'] = severity[0]
                else:
                    # Print full form of unexpected severities.
                    row['level'] = severity
            if entry.resource and entry.resource.labels:
                for label in entry.resource.labels.additionalProperties:
                    if label.key == 'function_name':
                        row['name'] = label.value
            if entry.labels:
                for label in entry.labels.additionalProperties:
                    if label.key == 'execution_id':
                        row['execution_id'] = label.value
            if entry.timestamp:
                row['time_utc'] = util.FormatTimestamp(entry.timestamp)
            yield row