示例#1
0
    def get_log_events(
        self,
        log_stream_name: str,
        start_time: datetime = None,
        end_time: datetime = None,
        start_from_head: bool = False,
        limit: int = None,
        next_token: str = None,
    ):
        """
        Get the log stream events.

        :param log_stream_name: Log stream name
        :param start_time: Start time of interval of interest for log events. ISO 8601 format: YYYY-MM-DDThh:mm:ssTZD
        :param end_time: End time of interval of interest for log events. ISO 8601 format: YYYY-MM-DDThh:mm:ssTZD
        :param start_from_head: If the value is true, the earliest log events are returned first.
            If the value is false, the latest log events are returned first. The default value is false.
        :param limit: The maximum number of log events returned. If you don't specify a value,
            the maximum is as many log events as can fit in a response size of 1 MB, up to 10,000 log events.
        :param next_token: Token for paginated requests.
        """
        if not AWSApi.instance().cfn.stack_exists(self.stack_name):
            raise NotFoundClusterActionError(
                f"Cluster {self.name} does not exist.")

        try:
            log_events_response = AWSApi.instance().logs.get_log_events(
                log_group_name=self.stack.log_group_name,
                log_stream_name=log_stream_name,
                end_time=datetime_to_epoch(end_time) if end_time else None,
                start_time=datetime_to_epoch(start_time)
                if start_time else None,
                limit=limit,
                start_from_head=start_from_head,
                next_token=next_token,
            )

            return LogStream(self.stack_name, log_stream_name,
                             log_events_response)
        except AWSClientError as e:
            if e.message.startswith("The specified log group"):
                LOGGER.debug("Log Group %s doesn't exist.",
                             self.stack.log_group_name)
                raise NotFoundClusterActionError(
                    f"CloudWatch logging is not enabled for cluster {self.name}."
                )
            if e.message.startswith("The specified log stream"):
                LOGGER.debug("Log Stream %s doesn't exist.", log_stream_name)
                raise NotFoundClusterActionError(
                    f"The specified log stream {log_stream_name} does not exist."
                )
            raise _cluster_error_mapper(
                e, f"Unexpected error when retrieving log events: {e}.")
示例#2
0
    def validate(self, log_stream_prefix=None):
        """Check filter consistency."""
        if self.start_time >= self.end_time:
            raise FiltersParserError(
                "Start time must be earlier than end time.")

        event_in_window = AWSApi.instance().logs.filter_log_events(
            log_group_name=self._log_group_name,
            log_stream_name_prefix=log_stream_prefix,
            start_time=datetime_to_epoch(self.start_time),
            end_time=datetime_to_epoch(self.end_time),
        )
        if not event_in_window:
            raise FiltersParserError(
                f"No log events in the log group {self._log_group_name} in interval starting "
                f"at {self.start_time} and ending at {self.end_time}" +
                (f", with log stream name prefix '{log_stream_prefix}'"
                 if log_stream_prefix else ""))
示例#3
0
    def get_log_events(
        self,
        log_stream_name: str,
        start_time: datetime = None,
        end_time: datetime = None,
        start_from_head: bool = False,
        limit: int = None,
        next_token: str = None,
    ):
        """
        Get the log stream events.

        :param log_stream_name: Log stream name
        :param start_time: Start time of interval of interest for log events. ISO 8601 format: YYYY-MM-DDThh:mm:ssTZD
        :param end_time: End time of interval of interest for log events. ISO 8601 format: YYYY-MM-DDThh:mm:ssTZD
        :param start_from_head: If the value is true, the earliest log events are returned first.
            If the value is false, the latest log events are returned first. The default value is false.
        :param limit: The maximum number of log events returned. If you don't specify a value,
            the maximum is as many log events as can fit in a response size of 1 MB, up to 10,000 log events.
        :param next_token: Token for paginated requests.
        """
        try:
            # get Image Builder log stream events
            log_events_response = AWSApi.instance().logs.get_log_events(
                log_group_name=self._log_group_name,
                log_stream_name=log_stream_name,
                end_time=datetime_to_epoch(end_time) if end_time else None,
                start_time=datetime_to_epoch(start_time) if start_time else None,
                limit=limit,
                start_from_head=start_from_head,
                next_token=next_token,
            )
            return LogStream(self.image_id, log_stream_name, log_events_response)
        except AWSClientError as e:
            if e.message.startswith("The specified log group"):
                LOGGER.debug("Log Group %s doesn't exist.", self._log_group_name)
                raise NotFoundImageBuilderActionError(
                    ("Unable to find image logs, please double check if image id=" f"{self.image_id} is correct.")
                )
            if e.message.startswith("The specified log stream"):
                LOGGER.debug("Log Stream %s doesn't exist.", log_stream_name)
                raise NotFoundImageBuilderActionError(f"The specified log stream {log_stream_name} does not exist.")
            raise ImageBuilderActionError(f"Unexpected error when retrieving log events: {e}")
示例#4
0
 def create_export_task(
     self,
     log_group_name,
     bucket,
     bucket_prefix=None,
     log_stream_name_prefix=None,
     start_time: datetime.datetime = None,
     end_time: datetime.datetime = None,
 ):
     """Start the task that will export a log group name to an s3 bucket, and return the task ID."""
     kwargs = {
         "logGroupName": log_group_name,
         "fromTime": start_time and datetime_to_epoch(start_time),
         "to": end_time and datetime_to_epoch(end_time),
         "destination": bucket,
         "destinationPrefix": bucket_prefix,
     }
     if log_stream_name_prefix:
         kwargs["logStreamNamePrefix"] = log_stream_name_prefix
     return self._client.create_export_task(**kwargs).get("taskId")
示例#5
0
def test_datetime_to_epoch(set_tz, time_isoformat, time_zone, expect_output):
    set_tz(time_zone)
    time.tzset()
    datetime_ = utils.to_utc_datetime(time_isoformat)
    assert_that(utils.datetime_to_epoch(datetime_)).is_equal_to(expect_output)