def testBasicCalls(self): # FetchLogs() returns a generator so we use list() to force its execution. self._setExpect(filter_spec=None) list(common.FetchLogs()) self._setExpect(filter_spec='myField=hello') list(common.FetchLogs(log_filter='myField=hello'))
def _Run(self, args, is_alpha=False): # Take into account freshness only if all requirements are met. if (args.freshness and args.order == 'desc' and (not args.log_filter or 'timestamp' not in args.log_filter)): # Argparser returns freshness in seconds. freshness = datetime.timedelta(seconds=args.freshness) # Cloud Logging uses timestamps in UTC timezone. last_timestamp = datetime.datetime.utcnow() - freshness # Construct timestamp filter. log_filter = ('timestamp>="%s"' % util.FormatTimestamp(last_timestamp)) # Append any user supplied filters. if args.log_filter: log_filter += ' AND %s' % args.log_filter else: log_filter = args.log_filter parent = util.GetParentFromArgs(args) if is_alpha and args.IsSpecified('location'): parent = util.CreateResourceName( util.CreateResourceName( util.CreateResourceName(parent, 'locations', args.location), 'buckets', args.bucket), 'views', args.view) return common.FetchLogs(log_filter, order_by=args.order, limit=args.limit, parent=parent)
def Print(self): """Print GCL logs to the console.""" parent = 'projects/{project_id}'.format(project_id=self.project_id) log_filter = ( 'logName="projects/{project_id}/logs/cloudbuild" AND ' 'resource.type="build" AND ' # timestamp needed for faster querying in GCL 'timestamp>="{timestamp}" AND ' 'resource.labels.build_id="{build_id}"').format( project_id=self.project_id, timestamp=self.timestamp, build_id=self.build_id) if self.is_cb4a: log_filter = ('labels."k8s-pod/tekton_dev/taskRun"="{build_id}" AND ' 'timestamp>="{timestamp}"').format( build_id=self.build_id, timestamp=self.timestamp) output_logs = common.FetchLogs( log_filter=log_filter, order_by='asc', parent=parent) self._PrintFirstLine() for output in output_logs: text = self._ValidateScreenReader(output.textPayload) self._PrintLogLine(text) self._PrintLastLine()
def Print(self): """Print GCL logs to the console.""" parent = 'projects/{project_id}'.format(project_id=self.project_id) log_filter = ( 'logName="projects/{project_id}/logs/cloudbuild" AND ' 'resource.type="build" AND ' # timestamp needed for faster querying in GCL 'timestamp>="{timestamp}" AND ' 'resource.labels.build_id="{build_id}"').format( project_id=self.project_id, timestamp=self.timestamp, build_id=self.build_id) if self.is_cb4a: # The labels starting with 'k8s-pod/' in the log entries from GKE-on-GCP # clusters are different from other labels. The dots '.' in the labels are # converted to '_'. For example, 'k8s-pod/tekton.dev/taskRun' is # converted to 'k8s-pod/tekton_dev/taskRun'. log_filter = ( '(labels."k8s-pod/tekton.dev/taskRun"="{build_id}" OR ' 'labels."k8s-pod/tekton_dev/taskRun"="{build_id}") AND ' 'timestamp>="{timestamp}"').format(build_id=self.build_id, timestamp=self.timestamp) output_logs = common.FetchLogs(log_filter=log_filter, order_by='asc', parent=parent) self._PrintFirstLine() for output in output_logs: text = self._ValidateScreenReader(output.textPayload) self._PrintLogLine(text) self._PrintLastLine()
def Run(self, args): """This is what gets called when the user runs this command. Args: args: an argparse namespace. All the arguments that were provided to this command invocation. Returns: The list of log entries. """ printer = logs_util.LogPrinter() printer.RegisterFormatter(logs_util.FormatRequestLogEntry) printer.RegisterFormatter(logs_util.FormatNginxLogEntry) printer.RegisterFormatter(logs_util.FormatAppEntry) project = properties.VALUES.core.project.Get(required=True) filters = logs_util.GetFilters(project, args.logs, args.service, args.version, args.level) lines = [] # pylint: disable=g-builtin-op, For the .keys() method for entry in common.FetchLogs(log_filter=' AND '.join(filters), order_by='DESC', limit=args.limit): lines.append(printer.Format(entry)) for line in reversed(lines): log.out.Print(line)
def Run(self, args): """This is what gets called when the user runs this command. Args: args: an argparse namespace. All the arguments that were provided to this command invocation. Returns: The list of log entries. """ # Take into account freshness only if all requirements are met. if (args.freshness and args.order == 'DESC' and (not args.log_filter or 'timestamp' not in args.log_filter)): # Argparser returns freshness in seconds. freshness = datetime.timedelta(seconds=args.freshness) # Cloud Logging uses timestamps in UTC timezone. last_timestamp = datetime.datetime.utcnow() - freshness # Construct timestamp filter. log_filter = ('timestamp>="%s"' % util.FormatTimestamp(last_timestamp)) # Append any user supplied filters. if args.log_filter: log_filter += ' AND (%s)' % args.log_filter else: log_filter = args.log_filter parent = None if args.organization: parent = 'organizations/%s' % args.organization return common.FetchLogs(log_filter, order_by=args.order, limit=args.limit, parent=parent)
def Run(self, args): filters = [args.log_filter] if args.IsSpecified('log_filter') else [] filters.append('resource.labels.service_name = "%s"' % args.service) filters += read_logs_lib.MakeTimestampFilters(args) return common.FetchLogs(read_logs_lib.JoinFilters(filters), order_by=args.order, limit=args.limit)
def _Run(self, args): region = properties.VALUES.functions.region.Get() log_filter = [ 'resource.type="cloud_function"', 'resource.labels.region="%s"' % region, 'logName:"cloud-functions"' ] if args.name: log_filter.append('resource.labels.function_name="%s"' % args.name) if args.execution_id: log_filter.append('labels.execution_id="%s"' % args.execution_id) if args.min_log_level: log_filter.append('severity>=%s' % args.min_log_level.upper()) if args.start_time: order = 'ASC' log_filter.append('timestamp>="%s"' % logging_util.FormatTimestamp(args.start_time)) else: order = 'DESC' if args.end_time: log_filter.append('timestamp<="%s"' % logging_util.FormatTimestamp(args.end_time)) log_filter = ' '.join(log_filter) # TODO(b/36057251): Consider using paging for listing more than 1000 log # entries. However, reversing the order of received latest N entries before # a specified timestamp would be problematic with paging. entries = logging_common.FetchLogs(log_filter, order_by=order, limit=args.limit) if order == 'DESC': entries = reversed( list(entries)) # Force generator expansion with list. for entry in entries: row = {'log': entry.textPayload} if entry.severity: severity = str(entry.severity) if severity in flags.SEVERITIES: # Use short form (first letter) for expected severities. row['level'] = severity[0] else: # Print full form of unexpected severities. row['level'] = severity if entry.resource and entry.resource.labels: for label in entry.resource.labels.additionalProperties: if label.key == 'function_name': row['name'] = label.value if entry.labels: for label in entry.labels.additionalProperties: if label.key == 'execution_id': row['execution_id'] = label.value if entry.timestamp: row['time_utc'] = util.FormatTimestamp(entry.timestamp) yield row
def GetLogs(self, log_position): """Retrieve a batch of logs.""" filters = [ 'resource.type="ml_job"', 'resource.labels.job_id="{0}"'.format(self.job_id), log_position.GetFilterLowerBound(), log_position.GetFilterUpperBound(datetime.datetime.utcnow()) ] return logging_common.FetchLogs(log_filter=' AND '.join(filters), order_by='ASC', limit=self.LOG_BATCH_SIZE)
def Print(self): """Print GCL logs to the console.""" output_logs = common.FetchLogs( log_filter=self.log_filter, order_by='asc', parent=self.parent) self._PrintFirstLine(' REMOTE RUN OUTPUT ') for output in output_logs: text = self._ValidateScreenReader(output.textPayload) self._PrintLogLine(text) self._PrintLastLine()
def GetLogs(self, log_position, utcnow=None): """Retrieve a batch of logs.""" if utcnow is None: utcnow = datetime.datetime.utcnow() filters = ['resource.type="ml_job"', 'resource.labels.job_id="{0}"'.format(self.job_id), log_position.GetFilterLowerBound(), log_position.GetFilterUpperBound(utcnow)] if self.task_name: filters.append('resource.labels.task_name="{}"'.format(self.task_name)) return logging_common.FetchLogs( log_filter=' AND '.join(filters), order_by='ASC', limit=self.LOG_BATCH_SIZE)
def _Run(self, args): filter_clauses = read_logs_lib.MakeTimestampFilters(args) filter_clauses += [args.log_filter] if args.log_filter else [] parent = util.GetParentFromArgs(args) if args.IsSpecified('location'): parent = util.CreateResourceName( util.CreateResourceName( util.CreateResourceName(parent, 'locations', args.location), 'buckets', args.bucket), 'views', args.view) return common.FetchLogs( read_logs_lib.JoinFilters(filter_clauses, operator='AND') or None, order_by=args.order, limit=args.limit, parent=parent)
def GetLogs(self): """Retrieves a batch of logs. After we fetch the logs, we ensure that none of the logs have been seen before. Along the way, we update the most recent timestamp. Returns: A list of valid log entries. """ utcnow = datetime.datetime.utcnow() lower_filter = self.log_position.GetFilterLowerBound() upper_filter = self.log_position.GetFilterUpperBound(utcnow) new_filter = self.base_filters + [lower_filter, upper_filter] entries = logging_common.FetchLogs( log_filter=' AND '.join(new_filter), order_by='ASC', limit=self.LOG_BATCH_SIZE) return [entry for entry in entries if self.log_position.Update(entry.timestamp, entry.insertId)]
def _GetTailStartingTimestamp(filters, offset=None): """Returns the starting timestamp to start streaming logs from. Args: filters: [str], existing filters, should not contain timestamp constraints. offset: int, how many entries ago we should pick the starting timestamp. If not provided, unix time zero will be returned. Returns: str, A timestamp that can be used as lower bound or None if no lower bound is necessary. """ if not offset: return None entries = list( logging_common.FetchLogs(log_filter=' AND '.join(filters), order_by='DESC', limit=offset)) if len(entries) < offset: return None return list(entries)[-1].timestamp
def Run(self, args): filters = [args.log_filter] if args.IsSpecified('log_filter') else [] filters.append('resource.type = %s \n' % 'cloud_run_revision') filters.append('resource.labels.service_name = %s \n' % args.service) filters.append('resource.labels.location = %s \n' % flags.GetRegion(args, prompt=False)) filters.append('severity >= DEFAULT') filters += read_logs_lib.MakeTimestampFilters(args) lines = [] logs = common.FetchLogs(read_logs_lib.JoinFilters(filters), order_by=args.order, limit=args.limit) for log_line in logs: output_log = FormatLog(log_line) if output_log: lines.append(output_log) for line in reversed(lines): log.out.Print(line)
def Run(self, args): """This is what gets called when the user runs this command. Args: args: an argparse namespace. All the arguments that were provided to this command invocation. Returns: The list of log entries. """ # Logging API filters later to be AND-joined filters = ['resource.type="gae_app"'] # Argument handling if args.service: filters.append('resource.labels.module_id="{0}"'.format( args.service)) if args.version: filters.append('resource.labels.version_id="{0}"'.format( args.version)) if args.level != 'any': filters.append('severity>={0}'.format(args.level.upper())) printer = logs_util.LogPrinter() printer.RegisterFormatter(logs_util.FormatRequestLogEntry) printer.RegisterFormatter(logs_util.FormatAppEntry) lines = [] log_id = lambda log_short: 'appengine.googleapis.com/%s' % log_short log_ids = sorted([log_id(log_short) for log_short in args.logs]) # pylint: disable=g-builtin-op, For the .keys() method for entry in common.FetchLogs(log_filter=' AND '.join(filters), log_ids=sorted(log_ids), order_by='DESC', limit=args.limit): lines.append(printer.Format(entry)) for line in reversed(lines): log.out.Print(line)
def testOrdering(self): self._setExpect(filter_spec=None) list(common.FetchLogs(order_by='desc')) self._setExpect(filter_spec=None, order_by='timestamp asc') list(common.FetchLogs(order_by='asc'))
def _Run(args, release_track): """Display log entries produced by Google Cloud Functions.""" if args.execution_id: raise exceptions.FunctionsError(EXECUTION_ID_NOT_SUPPORTED) region = properties.VALUES.functions.region.GetOrFail() log_filter = [ 'resource.type="cloud_run_revision"', 'resource.labels.location="%s"' % region, 'logName:"run.googleapis.com"' ] if args.name: log_filter.append('resource.labels.service_name="%s"' % args.name) if args.min_log_level: log_filter.append('severity>=%s' % args.min_log_level.upper()) log_filter.append('timestamp>="%s"' % logging_util.FormatTimestamp( args.start_time or datetime.datetime.utcnow() - datetime.timedelta(days=7))) if args.end_time: log_filter.append('timestamp<="%s"' % logging_util.FormatTimestamp(args.end_time)) log_filter = ' '.join(log_filter) entries = list( logging_common.FetchLogs(log_filter, order_by='DESC', limit=args.limit)) if args.name and not entries: # Check if the function even exists in the given region. try: client = api_util.GetClientInstance(release_track=release_track) messages = api_util.GetMessagesModule(release_track=release_track) client.projects_locations_functions.Get( messages.CloudfunctionsProjectsLocationsFunctionsGetRequest( name='projects/%s/locations/%s/functions/%s' % (properties.VALUES.core.project.GetOrFail(), region, args.name))) except (HttpForbiddenError, HttpNotFoundError): # The function doesn't exist in the given region. log.warning( 'There is no function named `%s` in region `%s`. Perhaps you ' 'meant to specify `--region` or update the `functions/region` ' 'configuration property?' % (args.name, region)) for entry in entries: message = entry.textPayload if entry.jsonPayload: props = [ prop.value for prop in entry.jsonPayload.additionalProperties if prop.key == 'message' ] if len(props) == 1 and hasattr(props[0], 'string_value'): message = props[0].string_value row = {'log': message} if entry.severity: severity = six.text_type(entry.severity) if severity in flags.SEVERITIES: # Use short form (first letter) for expected severities. row['level'] = severity[0] else: # Print full form of unexpected severities. row['level'] = severity if entry.resource and entry.resource.labels: for label in entry.resource.labels.additionalProperties: if label.key == 'service_name': row['name'] = label.value if entry.timestamp: row['time_utc'] = api_util.FormatTimestamp(entry.timestamp) yield row
def _Run(self, args): region = properties.VALUES.functions.region.Get() log_filter = [ 'resource.type="cloud_function"', 'resource.labels.region="%s"' % region, 'logName:"cloud-functions"' ] if args.name: log_filter.append('resource.labels.function_name="%s"' % args.name) if args.execution_id: log_filter.append('labels.execution_id="%s"' % args.execution_id) if args.min_log_level: log_filter.append('severity>=%s' % args.min_log_level.upper()) log_filter.append('timestamp>="%s"' % logging_util.FormatTimestamp( args.start_time or datetime.datetime.utcnow() - datetime.timedelta(days=7))) if args.end_time: log_filter.append('timestamp<="%s"' % logging_util.FormatTimestamp(args.end_time)) log_filter = ' '.join(log_filter) entries = list( logging_common.FetchLogs(log_filter, order_by='ASC', limit=args.limit)) if args.name and not entries: # Check if the function even exists in the given region. try: client = util.GetApiClientInstance() messages = client.MESSAGES_MODULE client.projects_locations_functions.Get( messages.CloudfunctionsProjectsLocationsFunctionsGetRequest( name='projects/%s/locations/%s/functions/%s' % (properties.VALUES.core.project.Get(required=True), region, args.name))) except (HttpForbiddenError, HttpNotFoundError): # The function doesn't exist in the given region. log.warning( 'There is no function named `%s` in region `%s`. Perhaps you ' 'meant to specify `--region` or update the `functions/region` ' 'configuration property?' % (args.name, region)) for entry in entries: message = entry.textPayload if entry.jsonPayload: props = [ prop.value for prop in entry.jsonPayload.additionalProperties if prop.key == 'message' ] if len(props) == 1 and hasattr(props[0], 'string_value'): message = props[0].string_value row = {'log': message} if entry.severity: severity = six.text_type(entry.severity) if severity in flags.SEVERITIES: # Use short form (first letter) for expected severities. row['level'] = severity[0] else: # Print full form of unexpected severities. row['level'] = severity if entry.resource and entry.resource.labels: for label in entry.resource.labels.additionalProperties: if label.key == 'function_name': row['name'] = label.value if entry.labels: for label in entry.labels.additionalProperties: if label.key == 'execution_id': row['execution_id'] = label.value if entry.timestamp: row['time_utc'] = util.FormatTimestamp(entry.timestamp) yield row
def Run(self, args): """This is what gets called when the user runs this command. Args: args: an argparse namespace. All the arguments that were provided to this command invocation. Yields: Objects representing log entries. """ log_filter = [ 'resource.type="cloud_function"', 'resource.labels.region="%s"' % args.region ] if args.name: log_filter.append('resource.labels.function_name="%s"' % args.name) if args.execution_id: log_filter.append('labels.execution_id="%s"' % args.execution_id) if args.min_log_level: log_filter.append('severity>=%s' % args.min_log_level) if args.start_time: order = 'ASC' log_filter.append('timestamp>="%s"' % logging_util.FormatTimestamp(args.start_time)) else: order = 'DESC' if args.end_time: log_filter.append('timestamp<="%s"' % logging_util.FormatTimestamp(args.end_time)) log_filter = ' '.join(log_filter) # TODO(user): Consider using paging for listing more than 1000 log entries. # However, reversing the order of received latest N entries before a # specified timestamp would be problematic with paging. entries = logging_common.FetchLogs(log_filter, order_by=order, limit=args.limit) if order == 'DESC': entries = reversed( list(entries)) # Force generator expansion with list. for entry in entries: row = {'log': entry.textPayload} if entry.severity: severity = str(entry.severity) if severity in GetLogs.SEVERITIES: # Use short form (first letter) for expected severities. row['level'] = severity[0] else: # Print full form of unexpected severities. row['level'] = severity if entry.resource: for label in entry.resource.labels.additionalProperties: if label.key == 'function_name': row['name'] = label.value for label in entry.labels.additionalProperties: if label.key == 'execution_id': row['execution_id'] = label.value if entry.timestamp: row['time_utc'] = util.FormatTimestamp(entry.timestamp) yield row
def testTestLimit(self): self._setExpect(filter_spec=None, page_size=5) list(common.FetchLogs(limit=5)) self._setExpect(filter_spec=None, page_size=1000) list(common.FetchLogs(limit=1005))
def _Run(self, args): region = properties.VALUES.functions.region.Get() log_filter = [ 'resource.type="cloud_function"', 'resource.labels.region="%s"' % region, 'logName:"cloud-functions"' ] if args.name: log_filter.append('resource.labels.function_name="%s"' % args.name) if args.execution_id: log_filter.append('labels.execution_id="%s"' % args.execution_id) if args.min_log_level: log_filter.append('severity>=%s' % args.min_log_level.upper()) if args.start_time: order = 'ASC' log_filter.append('timestamp>="%s"' % logging_util.FormatTimestamp(args.start_time)) else: order = 'DESC' if args.end_time: log_filter.append('timestamp<="%s"' % logging_util.FormatTimestamp(args.end_time)) log_filter = ' '.join(log_filter) entries = logging_common.FetchLogs(log_filter, order_by=order, limit=args.limit) if order == 'DESC': entries = reversed( list(entries)) # Force generator expansion with list. for entry in entries: message = entry.textPayload if entry.jsonPayload: props = [ prop.value for prop in entry.jsonPayload.additionalProperties if prop.key == 'message' ] if len(props) == 1 and hasattr(props[0], 'string_value'): message = props[0].string_value row = {'log': message} if entry.severity: severity = six.text_type(entry.severity) if severity in flags.SEVERITIES: # Use short form (first letter) for expected severities. row['level'] = severity[0] else: # Print full form of unexpected severities. row['level'] = severity if entry.resource and entry.resource.labels: for label in entry.resource.labels.additionalProperties: if label.key == 'function_name': row['name'] = label.value if entry.labels: for label in entry.labels.additionalProperties: if label.key == 'execution_id': row['execution_id'] = label.value if entry.timestamp: row['time_utc'] = util.FormatTimestamp(entry.timestamp) yield row