def _Run(self, args):
        region = properties.VALUES.functions.region.Get()
        log_filter = [
            'resource.type="cloud_function"',
            'resource.labels.region="%s"' % region, 'logName:"cloud-functions"'
        ]

        if args.name:
            log_filter.append('resource.labels.function_name="%s"' % args.name)
        if args.execution_id:
            log_filter.append('labels.execution_id="%s"' % args.execution_id)
        if args.min_log_level:
            log_filter.append('severity>=%s' % args.min_log_level.upper())
        if args.start_time:
            order = 'ASC'
            log_filter.append('timestamp>="%s"' %
                              logging_util.FormatTimestamp(args.start_time))
        else:
            order = 'DESC'
        if args.end_time:
            log_filter.append('timestamp<="%s"' %
                              logging_util.FormatTimestamp(args.end_time))
        log_filter = ' '.join(log_filter)
        # TODO(b/36057251): Consider using paging for listing more than 1000 log
        # entries. However, reversing the order of received latest N entries before
        # a specified timestamp would be problematic with paging.

        entries = logging_common.FetchLogs(log_filter,
                                           order_by=order,
                                           limit=args.limit)

        if order == 'DESC':
            entries = reversed(
                list(entries))  # Force generator expansion with list.

        for entry in entries:
            row = {'log': entry.textPayload}
            if entry.severity:
                severity = str(entry.severity)
                if severity in flags.SEVERITIES:
                    # Use short form (first letter) for expected severities.
                    row['level'] = severity[0]
                else:
                    # Print full form of unexpected severities.
                    row['level'] = severity
            if entry.resource and entry.resource.labels:
                for label in entry.resource.labels.additionalProperties:
                    if label.key == 'function_name':
                        row['name'] = label.value
            if entry.labels:
                for label in entry.labels.additionalProperties:
                    if label.key == 'execution_id':
                        row['execution_id'] = label.value
            if entry.timestamp:
                row['time_utc'] = util.FormatTimestamp(entry.timestamp)
            yield row
예제 #2
0
  def Run(self, args):
    """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Returns:
      The list of log entries.
    """
    # Take into account freshness only if all requirements are met.
    if (args.freshness and args.order == 'DESC' and
        (not args.log_filter or 'timestamp' not in args.log_filter)):
      # Argparser returns freshness in seconds.
      freshness = datetime.timedelta(seconds=args.freshness)
      # Cloud Logging uses timestamps in UTC timezone.
      last_timestamp = datetime.datetime.utcnow() - freshness
      # Construct timestamp filter.
      log_filter = ('timestamp>="%s"' % util.FormatTimestamp(last_timestamp))
      # Append any user supplied filters.
      if args.log_filter:
        log_filter += ' AND (%s)' % args.log_filter
    else:
      log_filter = args.log_filter

    parent = None
    if args.organization:
      parent = 'organizations/%s' % args.organization

    return common.FetchLogs(log_filter,
                            order_by=args.order,
                            limit=args.limit,
                            parent=parent)
예제 #3
0
 def testReadLimit(self):
     default_timestamp = util.FormatTimestamp(fixture.MOCK_UTC_TIME -
                                              datetime.timedelta(days=1))
     self._setExpect('timestamp>="{0}"'.format(default_timestamp),
                     page_size=1)
     generator = self.RunLogging('read --limit 1 --format=disable')
     self.assertEqual(list(generator), self._entries[:1])
예제 #4
0
 def testReadFilters(self):
     custom_timestamp = util.FormatTimestamp(fixture.MOCK_UTC_TIME -
                                             datetime.timedelta(hours=10))
     self._setExpect(
         'timestamp>="{0}" AND severity=INFO logName=my-log'.format(
             custom_timestamp))
     self.RunLogging('read "severity=INFO logName=my-log" --freshness=10h')
예제 #5
0
    def _Run(self, args, is_alpha=False):
        # Take into account freshness only if all requirements are met.
        if (args.freshness and args.order == 'desc' and
            (not args.log_filter or 'timestamp' not in args.log_filter)):
            # Argparser returns freshness in seconds.
            freshness = datetime.timedelta(seconds=args.freshness)
            # Cloud Logging uses timestamps in UTC timezone.
            last_timestamp = datetime.datetime.utcnow() - freshness
            # Construct timestamp filter.
            log_filter = ('timestamp>="%s"' %
                          util.FormatTimestamp(last_timestamp))
            # Append any user supplied filters.
            if args.log_filter:
                log_filter += ' AND %s' % args.log_filter
        else:
            log_filter = args.log_filter

        parent = util.GetParentFromArgs(args)
        if is_alpha and args.IsSpecified('location'):
            parent = util.CreateResourceName(
                util.CreateResourceName(
                    util.CreateResourceName(parent, 'locations',
                                            args.location), 'buckets',
                    args.bucket), 'views', args.view)
        return common.FetchLogs(log_filter,
                                order_by=args.order,
                                limit=args.limit,
                                parent=parent)
예제 #6
0
 def testReadFolderEntries(self):
     default_timestamp = util.FormatTimestamp(fixture.MOCK_UTC_TIME -
                                              datetime.timedelta(days=1))
     default_filter = 'timestamp>="{0}"'.format(default_timestamp)
     self.mock_client_v2.entries.List.Expect(
         self.msgs.ListLogEntriesRequest(resourceNames=['folders/123'],
                                         filter=default_filter,
                                         orderBy='timestamp desc',
                                         pageSize=1000),
         self.msgs.ListLogEntriesResponse(entries=self._entries))
     generator = self.RunLogging('read --folder 123 --format=disable')
     self.assertEqual(list(generator), self._entries)
예제 #7
0
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Returns:
      The list of log entries.
    """
        client = self.context['logging_client_v2beta1']
        messages = self.context['logging_messages_v2beta1']
        project = properties.VALUES.core.project.Get(required=True)

        if args.limit is not None and args.limit < 0:
            args.limit = None

        if args.order == 'DESC':
            order_by = 'timestamp desc'
        else:
            order_by = 'timestamp asc'

        # Take into account freshness only if all requirements are met.
        if (args.freshness and args.order == 'DESC' and
            (not args.log_filter or 'timestamp' not in args.log_filter)):
            # Argparser returns freshness in seconds.
            freshness = datetime.timedelta(seconds=args.freshness)
            # Cloud Logging uses timestamps in UTC timezone.
            last_timestamp = datetime.datetime.utcnow() - freshness
            # Construct timestamp filter.
            log_filter = ('timestamp>="%s"' %
                          util.FormatTimestamp(last_timestamp))
            # Append any user supplied filters.
            if args.log_filter:
                log_filter += ' AND (%s)' % args.log_filter
        else:
            log_filter = args.log_filter

        request = messages.ListLogEntriesRequest(projectIds=[project],
                                                 filter=log_filter,
                                                 orderBy=order_by)

        # The backend has an upper limit of 1000 for page_size.
        page_size = args.limit or 1000

        return list_pager.YieldFromList(client.entries,
                                        request,
                                        field='entries',
                                        limit=args.limit,
                                        batch_size=page_size,
                                        batch_size_attribute='pageSize')
예제 #8
0
 def testReadFromView(self):
     default_timestamp = util.FormatTimestamp(fixture.MOCK_UTC_TIME -
                                              datetime.timedelta(days=1))
     self.mock_client_v2.entries.List.Expect(
         self.msgs.ListLogEntriesRequest(
             resourceNames=[
                 'projects/my-project/locations/my-location/'
                 'buckets/my-bucket/views/my-view'
             ],
             filter='timestamp>="{0}"'.format(default_timestamp),
             orderBy='timestamp desc',
             pageSize=1000),
         self.msgs.ListLogEntriesResponse(entries=self._entries))
     generator = self.RunLogging(
         'read --format=disable --location=my-location '
         '--bucket=my-bucket --view=my-view')
     self.assertEqual(list(generator), self._entries)
예제 #9
0
def MakeTimestampFilters(args):
    """Create filters for the minimum log timestamp.

  This function creates an upper bound on the timestamp of log entries.
  A filter clause is returned if order == 'desc' and timestamp is not in
  the log-filter argument.

  Args:
    args: An argparse namespace object.

  Returns:
    A list of strings that are clauses in a Cloud Logging filter expression.
  """
    if (args.order == 'desc'
            and (not args.log_filter or 'timestamp' not in args.log_filter)):
        # Argparser returns freshness in seconds.
        freshness = datetime.timedelta(seconds=args.freshness)
        # Cloud Logging uses timestamps in UTC timezone.
        last_timestamp = datetime.datetime.utcnow() - freshness
        # Construct timestamp filter.
        return ['timestamp>="%s"' % util.FormatTimestamp(last_timestamp)]
    else:
        return []
예제 #10
0
 def testReadWithDefaultValues(self):
     default_timestamp = util.FormatTimestamp(fixture.MOCK_UTC_TIME -
                                              datetime.timedelta(days=1))
     self._setExpect('timestamp>="{0}"'.format(default_timestamp))
     generator = self.RunLogging('read --format=disable')
     self.assertEqual(list(generator), self._entries)
예제 #11
0
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Yields:
      Objects representing log entries.
    """
        log_filter = [
            'resource.type="cloud_function"',
            'resource.labels.region="%s"' % args.region
        ]

        if args.name:
            log_filter.append('resource.labels.function_name="%s"' % args.name)
        if args.execution_id:
            log_filter.append('labels.execution_id="%s"' % args.execution_id)
        if args.min_log_level:
            log_filter.append('severity>=%s' % args.min_log_level)
        if args.start_time:
            order = 'ASC'
            log_filter.append('timestamp>="%s"' %
                              logging_util.FormatTimestamp(args.start_time))
        else:
            order = 'DESC'
        if args.end_time:
            log_filter.append('timestamp<="%s"' %
                              logging_util.FormatTimestamp(args.end_time))
        log_filter = ' '.join(log_filter)
        # TODO(user): Consider using paging for listing more than 1000 log entries.
        # However, reversing the order of received latest N entries before a
        # specified timestamp would be problematic with paging.

        entries = logging_common.FetchLogs(log_filter,
                                           order_by=order,
                                           limit=args.limit)

        if order == 'DESC':
            entries = reversed(
                list(entries))  # Force generator expansion with list.

        for entry in entries:
            row = {'log': entry.textPayload}
            if entry.severity:
                severity = str(entry.severity)
                if severity in GetLogs.SEVERITIES:
                    # Use short form (first letter) for expected severities.
                    row['level'] = severity[0]
                else:
                    # Print full form of unexpected severities.
                    row['level'] = severity
            if entry.resource:
                for label in entry.resource.labels.additionalProperties:
                    if label.key == 'function_name':
                        row['name'] = label.value
            for label in entry.labels.additionalProperties:
                if label.key == 'execution_id':
                    row['execution_id'] = label.value
            if entry.timestamp:
                row['time_utc'] = util.FormatTimestamp(entry.timestamp)
            yield row
예제 #12
0
def _Run(args, release_track):
    """Display log entries produced by Google Cloud Functions."""
    if args.execution_id:
        raise exceptions.FunctionsError(EXECUTION_ID_NOT_SUPPORTED)

    region = properties.VALUES.functions.region.GetOrFail()
    log_filter = [
        'resource.type="cloud_run_revision"',
        'resource.labels.location="%s"' % region,
        'logName:"run.googleapis.com"'
    ]

    if args.name:
        log_filter.append('resource.labels.service_name="%s"' % args.name)
    if args.min_log_level:
        log_filter.append('severity>=%s' % args.min_log_level.upper())

    log_filter.append('timestamp>="%s"' % logging_util.FormatTimestamp(
        args.start_time
        or datetime.datetime.utcnow() - datetime.timedelta(days=7)))

    if args.end_time:
        log_filter.append('timestamp<="%s"' %
                          logging_util.FormatTimestamp(args.end_time))

    log_filter = ' '.join(log_filter)

    entries = list(
        logging_common.FetchLogs(log_filter, order_by='DESC',
                                 limit=args.limit))

    if args.name and not entries:
        # Check if the function even exists in the given region.
        try:
            client = api_util.GetClientInstance(release_track=release_track)
            messages = api_util.GetMessagesModule(release_track=release_track)
            client.projects_locations_functions.Get(
                messages.CloudfunctionsProjectsLocationsFunctionsGetRequest(
                    name='projects/%s/locations/%s/functions/%s' %
                    (properties.VALUES.core.project.GetOrFail(), region,
                     args.name)))
        except (HttpForbiddenError, HttpNotFoundError):
            # The function doesn't exist in the given region.
            log.warning(
                'There is no function named `%s` in region `%s`. Perhaps you '
                'meant to specify `--region` or update the `functions/region` '
                'configuration property?' % (args.name, region))

    for entry in entries:
        message = entry.textPayload
        if entry.jsonPayload:
            props = [
                prop.value for prop in entry.jsonPayload.additionalProperties
                if prop.key == 'message'
            ]
            if len(props) == 1 and hasattr(props[0], 'string_value'):
                message = props[0].string_value
        row = {'log': message}
        if entry.severity:
            severity = six.text_type(entry.severity)
            if severity in flags.SEVERITIES:
                # Use short form (first letter) for expected severities.
                row['level'] = severity[0]
            else:
                # Print full form of unexpected severities.
                row['level'] = severity
        if entry.resource and entry.resource.labels:
            for label in entry.resource.labels.additionalProperties:
                if label.key == 'service_name':
                    row['name'] = label.value
        if entry.timestamp:
            row['time_utc'] = api_util.FormatTimestamp(entry.timestamp)
        yield row
예제 #13
0
  def _Run(self, args):
    region = properties.VALUES.functions.region.Get()
    log_filter = [
        'resource.type="cloud_function"',
        'resource.labels.region="%s"' % region, 'logName:"cloud-functions"'
    ]

    if args.name:
      log_filter.append('resource.labels.function_name="%s"' % args.name)
    if args.execution_id:
      log_filter.append('labels.execution_id="%s"' % args.execution_id)
    if args.min_log_level:
      log_filter.append('severity>=%s' % args.min_log_level.upper())

    log_filter.append('timestamp>="%s"' % logging_util.FormatTimestamp(
        args.start_time or
        datetime.datetime.utcnow() - datetime.timedelta(days=7)))

    if args.end_time:
      log_filter.append('timestamp<="%s"' %
                        logging_util.FormatTimestamp(args.end_time))

    log_filter = ' '.join(log_filter)

    entries = list(
        logging_common.FetchLogs(log_filter, order_by='ASC', limit=args.limit))

    if args.name and not entries:
      # Check if the function even exists in the given region.
      try:
        client = util.GetApiClientInstance()
        messages = client.MESSAGES_MODULE
        client.projects_locations_functions.Get(
            messages.CloudfunctionsProjectsLocationsFunctionsGetRequest(
                name='projects/%s/locations/%s/functions/%s' %
                (properties.VALUES.core.project.Get(required=True), region,
                 args.name)))
      except (HttpForbiddenError, HttpNotFoundError):
        # The function doesn't exist in the given region.
        log.warning(
            'There is no function named `%s` in region `%s`. Perhaps you '
            'meant to specify `--region` or update the `functions/region` '
            'configuration property?' % (args.name, region))

    for entry in entries:
      message = entry.textPayload
      if entry.jsonPayload:
        props = [
            prop.value
            for prop in entry.jsonPayload.additionalProperties
            if prop.key == 'message'
        ]
        if len(props) == 1 and hasattr(props[0], 'string_value'):
          message = props[0].string_value
      row = {'log': message}
      if entry.severity:
        severity = six.text_type(entry.severity)
        if severity in flags.SEVERITIES:
          # Use short form (first letter) for expected severities.
          row['level'] = severity[0]
        else:
          # Print full form of unexpected severities.
          row['level'] = severity
      if entry.resource and entry.resource.labels:
        for label in entry.resource.labels.additionalProperties:
          if label.key == 'function_name':
            row['name'] = label.value
      if entry.labels:
        for label in entry.labels.additionalProperties:
          if label.key == 'execution_id':
            row['execution_id'] = label.value
      if entry.timestamp:
        row['time_utc'] = util.FormatTimestamp(entry.timestamp)
      yield row
예제 #14
0
    def _Run(self, args):
        region = properties.VALUES.functions.region.Get()
        log_filter = [
            'resource.type="cloud_function"',
            'resource.labels.region="%s"' % region, 'logName:"cloud-functions"'
        ]

        if args.name:
            log_filter.append('resource.labels.function_name="%s"' % args.name)
        if args.execution_id:
            log_filter.append('labels.execution_id="%s"' % args.execution_id)
        if args.min_log_level:
            log_filter.append('severity>=%s' % args.min_log_level.upper())
        if args.start_time:
            order = 'ASC'
            log_filter.append('timestamp>="%s"' %
                              logging_util.FormatTimestamp(args.start_time))
        else:
            order = 'DESC'
        if args.end_time:
            log_filter.append('timestamp<="%s"' %
                              logging_util.FormatTimestamp(args.end_time))
        log_filter = ' '.join(log_filter)

        entries = logging_common.FetchLogs(log_filter,
                                           order_by=order,
                                           limit=args.limit)

        if order == 'DESC':
            entries = reversed(
                list(entries))  # Force generator expansion with list.

        for entry in entries:
            message = entry.textPayload
            if entry.jsonPayload:
                props = [
                    prop.value
                    for prop in entry.jsonPayload.additionalProperties
                    if prop.key == 'message'
                ]
                if len(props) == 1 and hasattr(props[0], 'string_value'):
                    message = props[0].string_value
            row = {'log': message}
            if entry.severity:
                severity = six.text_type(entry.severity)
                if severity in flags.SEVERITIES:
                    # Use short form (first letter) for expected severities.
                    row['level'] = severity[0]
                else:
                    # Print full form of unexpected severities.
                    row['level'] = severity
            if entry.resource and entry.resource.labels:
                for label in entry.resource.labels.additionalProperties:
                    if label.key == 'function_name':
                        row['name'] = label.value
            if entry.labels:
                for label in entry.labels.additionalProperties:
                    if label.key == 'execution_id':
                        row['execution_id'] = label.value
            if entry.timestamp:
                row['time_utc'] = util.FormatTimestamp(entry.timestamp)
            yield row