Exemple #1
0
    def test_must_tail_logs_with_single_data_fetch(self, time_mock):

        self.fetcher.fetch = Mock()

        self.fetcher.fetch.side_effect = [
            self.mock_events1,
            # Return empty data for `max_retries` number of polls
            self.mock_events_empty,
            self.mock_events_empty,
            self.mock_events_empty,
        ]

        expected_fetch_calls = [
            # First fetch returns data
            call(ANY, start=self.start_time, filter_pattern=self.filter_pattern),
            # Three empty fetches
            call(ANY, start=to_datetime(13), filter_pattern=self.filter_pattern),
            call(ANY, start=to_datetime(13), filter_pattern=self.filter_pattern),
            call(ANY, start=to_datetime(13), filter_pattern=self.filter_pattern),
        ]

        # One per poll
        expected_sleep_calls = [call(self.poll_interval) for i in expected_fetch_calls]

        result_itr = self.fetcher.tail(
            self.log_group_name,
            start=self.start_time,
            filter_pattern=self.filter_pattern,
            max_retries=self.max_retries,
            poll_interval=self.poll_interval,
        )

        self.assertEqual(self.mock_events1, list(result_itr))
        self.assertEqual(expected_fetch_calls, self.fetcher.fetch.call_args_list)
        self.assertEqual(expected_sleep_calls, time_mock.sleep.call_args_list)
Exemple #2
0
    def test_must_tail_logs_with_multiple_data_fetches(self, time_mock):

        self.fetcher.fetch = Mock()

        self.fetcher.fetch.side_effect = [
            self.mock_events1,

            # Just one empty fetch
            self.mock_events_empty,

            # This fetch returns data
            self.mock_events2,

            # Return empty data for `max_retries` number of polls
            self.mock_events_empty,
            self.mock_events_empty,
            self.mock_events_empty,
        ]

        expected_fetch_calls = [
            # First fetch returns data
            call(ANY, start=self.start_time, filter_pattern=self.filter_pattern),

            # This fetch was empty
            call(ANY, start=to_datetime(13), filter_pattern=self.filter_pattern),

            # This fetch returned data
            call(ANY, start=to_datetime(13), filter_pattern=self.filter_pattern),

            # Three empty fetches
            call(ANY, start=to_datetime(15), filter_pattern=self.filter_pattern),
            call(ANY, start=to_datetime(15), filter_pattern=self.filter_pattern),
            call(ANY, start=to_datetime(15), filter_pattern=self.filter_pattern),
        ]

        # One per poll
        expected_sleep_calls = [
            call(self.poll_interval) for i in expected_fetch_calls
        ]

        result_itr = self.fetcher.tail(self.log_group_name,
                                       start=self.start_time,
                                       filter_pattern=self.filter_pattern,
                                       max_retries=self.max_retries,
                                       poll_interval=self.poll_interval)

        self.assertEquals(self.mock_events1 + self.mock_events2, list(result_itr))
        self.assertEquals(expected_fetch_calls, self.fetcher.fetch.call_args_list)
        self.assertEquals(expected_sleep_calls, time_mock.sleep.call_args_list)
Exemple #3
0
    def test_without_start_time(self, time_mock):

        self.fetcher.fetch = Mock()

        self.fetcher.fetch.return_value = self.mock_events_empty

        expected_fetch_calls = [
            # Three empty fetches, all with default start time
            call(ANY, start=to_datetime(0), filter_pattern=ANY),
            call(ANY, start=to_datetime(0), filter_pattern=ANY),
            call(ANY, start=to_datetime(0), filter_pattern=ANY),
        ]

        result_itr = self.fetcher.tail(self.log_group_name,
                                       filter_pattern=self.filter_pattern,
                                       max_retries=self.max_retries,
                                       poll_interval=self.poll_interval)

        self.assertEquals([], list(result_itr))
        self.assertEquals(expected_fetch_calls, self.fetcher.fetch.call_args_list)
 def test_get_last_event_time_unknown_last_time(self):
     current_timestamp = datetime.utcnow()
     self.deployer._client.describe_stack_events = MagicMock(side_effect=KeyError)
     # Convert to milliseconds from seconds
     last_stack_event_timestamp = to_datetime(self.deployer.get_last_event_time("test") * 1000)
     self.assertEqual(last_stack_event_timestamp.year, current_timestamp.year)
     self.assertEqual(last_stack_event_timestamp.month, current_timestamp.month)
     self.assertEqual(last_stack_event_timestamp.day, current_timestamp.day)
     self.assertEqual(last_stack_event_timestamp.hour, current_timestamp.hour)
     self.assertEqual(last_stack_event_timestamp.minute, current_timestamp.minute)
     self.assertEqual(last_stack_event_timestamp.second, current_timestamp.second)
Exemple #5
0
    def test_without_start_time(self, time_mock):

        self.fetcher.fetch = Mock()

        self.fetcher.fetch.return_value = self.mock_events_empty

        expected_fetch_calls = [
            # Three empty fetches, all with default start time
            call(ANY, start=to_datetime(0), filter_pattern=ANY),
            call(ANY, start=to_datetime(0), filter_pattern=ANY),
            call(ANY, start=to_datetime(0), filter_pattern=ANY),
        ]

        result_itr = self.fetcher.tail(self.log_group_name,
                                       filter_pattern=self.filter_pattern,
                                       max_retries=self.max_retries,
                                       poll_interval=self.poll_interval)

        self.assertEquals([], list(result_itr))
        self.assertEquals(expected_fetch_calls,
                          self.fetcher.fetch.call_args_list)
Exemple #6
0
    def setUp(self):

        self.fetcher = LogsFetcher(Mock())

        self.log_group_name = "name"
        self.filter_pattern = "pattern"

        self.start_time = to_datetime(10)
        self.max_retries = 3
        self.poll_interval = 1

        self.mock_events1 = [
            LogEvent(self.log_group_name, {"timestamp": 11}),
            LogEvent(self.log_group_name, {"timestamp": 12}),
        ]
        self.mock_events2 = [
            LogEvent(self.log_group_name, {"timestamp": 13}),
            LogEvent(self.log_group_name, {"timestamp": 14}),
        ]
        self.mock_events_empty = []
Exemple #7
0
    def setUp(self):

        self.fetcher = LogsFetcher(Mock())

        self.log_group_name = "name"
        self.filter_pattern = "pattern"

        self.start_time = to_datetime(10)
        self.max_retries = 3
        self.poll_interval = 1

        self.mock_events1 = [
            LogEvent(self.log_group_name, {"timestamp": 11}),
            LogEvent(self.log_group_name, {"timestamp": 12}),
        ]
        self.mock_events2 = [
            LogEvent(self.log_group_name, {"timestamp": 13}),
            LogEvent(self.log_group_name, {"timestamp": 14}),
        ]
        self.mock_events_empty = []
Exemple #8
0
    def tail(self, log_group_name, start=None, filter_pattern=None, max_retries=1000, poll_interval=0.3):
        """
        ** This is a long blocking call **

        Fetches logs from CloudWatch logs similar to the ``fetch`` method, but instead of stopping after all logs have
        been fetched, this method continues to poll CloudWatch for new logs. So this essentially simulates the
        ``tail -f`` bash command.

        If no logs are available, then it keep polling for ``timeout`` number of seconds before exiting. This method
        polls CloudWatch at around ~3 Calls Per Second to stay below the 5TPS limit.

        Parameters
        ----------
        log_group_name : str
            Name of CloudWatch Logs Group to query.

        start : datetime.datetime
            Optional start time for logs. Defaults to '5m ago'

        filter_pattern : str
            Expression to filter the logs by. This is passed directly to CloudWatch, so any expression supported by
            CloudWatch Logs API is supported here.

        max_retries : int
            When logs are not available, this value determines the number of times to retry fetching logs before giving
            up. This counter is reset every time new logs are available.

        poll_interval : float
            Number of fractional seconds wait before polling again. Defaults to 300milliseconds.
            If no new logs available, this method will stop polling after ``max_retries * poll_interval`` seconds

        Yields
        ------
        samcli.lib.logs.event.LogEvent
            Object containing the information from each log event returned by CloudWatch Logs
        """

        # On every poll, startTime of the API call is the timestamp of last record observed
        latest_event_time = 0  # Start of epoch
        if start:
            latest_event_time = to_timestamp(start)

        counter = max_retries
        while counter > 0:

            LOG.debug("Tailing logs from %s starting at %s", log_group_name, str(latest_event_time))

            has_data = False
            counter -= 1
            events_itr = self.fetch(log_group_name,
                                    start=to_datetime(latest_event_time),
                                    filter_pattern=filter_pattern)

            # Find the timestamp of the most recent log event.
            for event in events_itr:
                has_data = True

                if event.timestamp_millis > latest_event_time:
                    latest_event_time = event.timestamp_millis

                # Yield the event back so it behaves similar to ``fetch``
                yield event

            # This poll fetched logs. Reset the retry counter and set the timestamp for next poll
            if has_data:
                counter = max_retries
                latest_event_time += 1  # one extra millisecond to fetch next log event

            # We already fetched logs once. Sleep for some time before querying again.
            # This also helps us scoot under the TPS limit for CloudWatch API call.
            time.sleep(poll_interval)