def testPeriodicallyWarnsAboutSuppression(self): # pylint: disable=g-import-not-at-top from googlecloudsdk.api_lib.logging import tailing # pylint: enable=g-import-not-at-top responses = [ _ResponseWithSuppression(rate_limited=5), _ResponseWithSuppression(not_consumed=8), _ResponseWithSuppression(rate_limited=8, not_consumed=11), _ResponseWithSuppression(rate_limited=5), _ResponseWithSuppression(rate_limited=5), _ResponseWithSuppression(rate_limited=8), ] elapsed_seconds = [4, 4, 4, 0, 0, 3] tail_stub = mock.Mock() tail_stub.is_active = True def Recv(): self._advance_time_seconds(elapsed_seconds[Recv.i]) Recv.i += 1 if Recv.i == len(responses): tail_stub.is_active = False return responses[Recv.i - 1] Recv.i = 0 tail_stub.recv.side_effect = Recv for _ in tailing.TailLogs(tail_stub, ['projects/irrelevant'], 'textPayload:*', **self._kwargs): pass rate_limited_messages = 3 # Expect that the final three rate limited messages are all combined. not_consumed_messages = 2 self.assertEqual(len(self._error_messages), rate_limited_messages + not_consumed_messages)
def testYieldsEntriesFromStream(self): # pylint: disable=g-import-not-at-top from googlecloudsdk.api_lib.logging import tailing # pylint: enable=g-import-not-at-top expected_entries = [_EntryWithId(id) for id in 'abcdefgh'] responses = [ _ResponseWithEntries(expected_entries[:3]), _ResponseWithEntries(expected_entries[3:5]), _ResponseWithEntries(expected_entries[5:]), ] tail_stub = mock.Mock() tail_stub.is_active = True def Recv(): Recv.responses_remaining -= 1 if Recv.responses_remaining == 0: tail_stub.is_active = False return responses[Recv.responses_remaining] Recv.responses_remaining = len(responses) tail_stub.recv.side_effect = Recv entries = list(entry for entry in tailing.TailLogs( tail_stub, ['projects/fake-resource'], 'filterstring', ** self._kwargs)) self.assertCountEqual(entries, expected_entries)
def testSendsCorrectRequest(self): # pylint: disable=g-import-not-at-top from googlecloudsdk.api_lib.logging import tailing from googlecloudsdk.third_party.logging_v2.proto import logging_pb2 # pylint: enable=g-import-not-at-top tail_stub = mock.Mock() tail_stub.is_active = True def Recv(): tail_stub.is_active = False return logging_pb2.TailLogEntriesResponse() tail_stub.recv.side_effect = Recv expected_resource_names = ['blah', 'blah2'] expected_logs_filter = 'only cool logs' expected_buffer_window_seconds = 45 for _ in tailing.TailLogs( tail_stub, expected_resource_names, expected_logs_filter, buffer_window_seconds=expected_buffer_window_seconds, **self._kwargs): pass tail_stub.send.assert_called_once() request = tail_stub.send.call_args[0][0] self.assertEqual(request.buffer_window.ToTimedelta().total_seconds(), expected_buffer_window_seconds) self.assertCountEqual(request.resource_names, expected_resource_names) self.assertEqual(request.filter, expected_logs_filter)
def _Run(self, args): try: # pylint: disable=g-import-not-at-top from googlecloudsdk.api_lib.logging import tailing from googlecloudsdk.third_party.logging_v2.gapic.transports.logging_service_v2_grpc_transport import LoggingServiceV2GrpcTransport # pylint: enable=g-import-not-at-top except ImportError: raise NoGRPCInstalledError() log.err.Print('Initializing tail session.') parent = util.GetParentFromArgs(args) if args.IsSpecified('location'): parent = util.CreateResourceName( util.CreateResourceName( util.CreateResourceName(parent, 'locations', args.location), 'buckets', args.bucket), 'views', args.view) buffer_window_seconds = None if args.buffer_window: if args.buffer_window < 0 or args.buffer_window > 60: log.error('The buffer window must be set between 0s and 1m.') buffer_window_seconds = args.buffer_window transport = LoggingServiceV2GrpcTransport( credentials=StoredCredentials(), address='logging.googleapis.com:443') # By default and up to the INFO verbosity, all console output is included in # the log file. When tailing logs, coarse filters could cause very large # files. So, we limit the log file to WARNING logs and above. log.SetLogFileVerbosity(logging.WARNING) return tailing.TailLogs( bidi.BidiRpc(transport.tail_log_entries), [parent], args.log_filter or '', buffer_window_seconds=buffer_window_seconds)
def testOpensAndClosesStub(self): # pylint: disable=g-import-not-at-top from googlecloudsdk.api_lib.logging import tailing from googlecloudsdk.third_party.logging_v2.proto import logging_pb2 # pylint: enable=g-import-not-at-top tail_stub = mock.Mock() tail_stub.is_active = True def Recv(): tail_stub.is_active = False return logging_pb2.TailLogEntriesResponse() tail_stub.recv.side_effect = Recv for _ in tailing.TailLogs(tail_stub, ['projects/irrelevant'], 'textPayload:*', **self._kwargs): pass tail_stub.open.assert_called_once() tail_stub.close.assert_called_once()
def Tail(self): """Tail the GCL logs and print any new bytes to the console.""" parent = 'projects/{project_id}'.format(project_id=self.project_id) log_filter = ('logName="projects/{project_id}/logs/cloudbuild" AND ' 'resource.type="build" AND ' 'resource.labels.build_id="{build_id}"').format( project_id=self.project_id, build_id=self.build_id) if self.transport: try: # pylint: disable=g-import-not-at-top from googlecloudsdk.api_lib.logging import tailing from google.api_core import bidi # pylint: enable=g-import-not-at-top # TODO(b/178405272): remove NameError exception handling except (ImportError, NameError): self._PrintLogLine( 'To live stream log output for this build, please ensure the' ' grpc module is installed. Run:\npip install grpcio\n') return if not self.stop: self.tail_stub = bidi.BidiRpc(self.transport.tail_log_entries) output_logs = tailing.TailLogs( self.tail_stub, [parent], log_filter, buffer_window_seconds=self.buffer_window_seconds) self._PrintFirstLine() for output in output_logs: text = self._ValidateScreenReader(output.text_payload) self._PrintLogLine(text) self._PrintLastLine(' BUILD FINISHED; TRUNCATING OUTPUT LOGS ') if self.log_url: self._PrintLogLine( 'Logs are available at [{log_url}].'.format( log_url=self.log_url)) return
def testFlushesFinalSuppressionCounts(self): # pylint: disable=g-import-not-at-top from googlecloudsdk.api_lib.logging import tailing # pylint: enable=g-import-not-at-top responses = [ _ResponseWithSuppression(rate_limited=5), _ResponseWithSuppression(not_consumed=8), _ResponseWithSuppression(rate_limited=8, not_consumed=11), _ResponseWithSuppression(rate_limited=5), _ResponseWithSuppression(rate_limited=5), _ResponseWithSuppression(rate_limited=8), ] elapsed_seconds = [4, 4, 4, 0, 0, 3] tail_stub = mock.Mock() tail_stub.is_active = True def Recv(): self._advance_time_seconds(elapsed_seconds[Recv.i]) Recv.i += 1 if Recv.i == len(responses): tail_stub.is_active = False return responses[Recv.i - 1] Recv.i = 0 tail_stub.recv.side_effect = Recv for _ in tailing.TailLogs(tail_stub, ['projects/irrelevant'], 'textPayload:*', **self._kwargs): pass # Expect just two cumulative messages at the end, one for each suppression # type received. Also expect the help message. expected_cumulative_count_messages = 2 expected_help_messages = 1 self.assertEqual( len(self._warning_messages), expected_cumulative_count_messages + expected_help_messages)
def testGracefullyHandlesInvalidArgument(self): # pylint: disable=g-import-not-at-top import grpc from googlecloudsdk.api_lib.logging import tailing # pylint: enable=g-import-not-at-top tail_stub = mock.Mock() tail_stub.is_active = True def Recv(): error = grpc.RpcError() error.code = lambda: grpc.StatusCode.INVALID_ARGUMENT error.details = lambda: 'Did not work...' raise error tail_stub.recv.side_effect = Recv try: for _ in tailing.TailLogs(tail_stub, ['projects/irrelevant'], 'textPayload:*', **self._kwargs): pass except grpc.RpcError: self.fail() self.assertEqual(len(self._warning_messages), 1) self.assertEqual(len(self._debug_messages), 1)