def get_log_events( self, log_stream_name: str, start_time: datetime = None, end_time: datetime = None, start_from_head: bool = False, limit: int = None, next_token: str = None, ): """ Get the log stream events. :param log_stream_name: Log stream name :param start_time: Start time of interval of interest for log events. ISO 8601 format: YYYY-MM-DDThh:mm:ssTZD :param end_time: End time of interval of interest for log events. ISO 8601 format: YYYY-MM-DDThh:mm:ssTZD :param start_from_head: If the value is true, the earliest log events are returned first. If the value is false, the latest log events are returned first. The default value is false. :param limit: The maximum number of log events returned. If you don't specify a value, the maximum is as many log events as can fit in a response size of 1 MB, up to 10,000 log events. :param next_token: Token for paginated requests. """ if not AWSApi.instance().cfn.stack_exists(self.stack_name): raise NotFoundClusterActionError( f"Cluster {self.name} does not exist.") try: log_events_response = AWSApi.instance().logs.get_log_events( log_group_name=self.stack.log_group_name, log_stream_name=log_stream_name, end_time=datetime_to_epoch(end_time) if end_time else None, start_time=datetime_to_epoch(start_time) if start_time else None, limit=limit, start_from_head=start_from_head, next_token=next_token, ) return LogStream(self.stack_name, log_stream_name, log_events_response) except AWSClientError as e: if e.message.startswith("The specified log group"): LOGGER.debug("Log Group %s doesn't exist.", self.stack.log_group_name) raise NotFoundClusterActionError( f"CloudWatch logging is not enabled for cluster {self.name}." ) if e.message.startswith("The specified log stream"): LOGGER.debug("Log Stream %s doesn't exist.", log_stream_name) raise NotFoundClusterActionError( f"The specified log stream {log_stream_name} does not exist." ) raise _cluster_error_mapper( e, f"Unexpected error when retrieving log events: {e}.")
def get_log_events( self, log_stream_name: str, start_time: datetime = None, end_time: datetime = None, start_from_head: bool = False, limit: int = None, next_token: str = None, ): """ Get the log stream events. :param log_stream_name: Log stream name :param start_time: Start time of interval of interest for log events. ISO 8601 format: YYYY-MM-DDThh:mm:ssTZD :param end_time: End time of interval of interest for log events. ISO 8601 format: YYYY-MM-DDThh:mm:ssTZD :param start_from_head: If the value is true, the earliest log events are returned first. If the value is false, the latest log events are returned first. The default value is false. :param limit: The maximum number of log events returned. If you don't specify a value, the maximum is as many log events as can fit in a response size of 1 MB, up to 10,000 log events. :param next_token: Token for paginated requests. """ try: # get Image Builder log stream events log_events_response = AWSApi.instance().logs.get_log_events( log_group_name=self._log_group_name, log_stream_name=log_stream_name, end_time=datetime_to_epoch(end_time) if end_time else None, start_time=datetime_to_epoch(start_time) if start_time else None, limit=limit, start_from_head=start_from_head, next_token=next_token, ) return LogStream(self.image_id, log_stream_name, log_events_response) except AWSClientError as e: if e.message.startswith("The specified log group"): LOGGER.debug("Log Group %s doesn't exist.", self._log_group_name) raise NotFoundImageBuilderActionError( ("Unable to find image logs, please double check if image id=" f"{self.image_id} is correct.") ) if e.message.startswith("The specified log stream"): LOGGER.debug("Log Stream %s doesn't exist.", log_stream_name) raise NotFoundImageBuilderActionError(f"The specified log stream {log_stream_name} does not exist.") raise ImageBuilderActionError(f"Unexpected error when retrieving log events: {e}")
def test_execute(self, mocker, set_env, test_datadir, args): mocked_result = [ LogStream( FAKE_ID, "logstream", { "events": [ { "timestamp": 1622802790248, "message": ("2021-06-04 10:33:10,248 [DEBUG] CloudFormation client initialized " "with endpoint https://cloudformation.eu-west-1.amazonaws.com" ), "ingestionTime": 1622802842382, }, { "timestamp": 1622802790248, "message": ("2021-06-04 10:33:10,248 [DEBUG] Describing resource HeadNodeLaunchTemplate in " "stack test22"), "ingestionTime": 1622802842382, }, { "timestamp": 1622802790390, "message": ("2021-06-04 10:33:10,390 [INFO] -----------------------Starting build" "-----------------------"), "ingestionTime": 1622802842382, }, ], "nextForwardToken": "f/3618", "nextBackwardToken": "b/3619", "ResponseMetadata": {}, }, ) ] * 2 + [LogStream(FAKE_ID, "logstream", {})] get_image_log_events_mock = mocker.patch( "pcluster.api.controllers.image_logs_controller.ImageBuilder.get_log_events", side_effect=mocked_result) set_env("AWS_DEFAULT_REGION", "us-east-1") base_args = ["get-image-log-events"] command = base_args + self._build_cli_args({**REQUIRED_ARGS, **args}) os.environ["TZ"] = "Europe/London" time.tzset() out = run(command) expected = json.loads( (test_datadir / "pcluster-out.txt").read_text().strip()) assert_that(expected).is_equal_to(out) assert_that(get_image_log_events_mock.call_args).is_length(2) if args.get("limit", None): limit_val = get_image_log_events_mock.call_args[1].get("limit") assert_that(limit_val).is_type_of(int) # verify arguments kwargs = { "start_time": args.get("start_time", None) and to_utc_datetime(args["start_time"]), "end_time": args.get("end_time", None) and to_utc_datetime(args["end_time"]), "start_from_head": True if args.get("start_from_head", None) else None, "limit": int(args["limit"]) if args.get("limit", None) else None, "next_token": args.get("next_token", None), } get_image_log_events_mock.assert_called_with("log-stream-name", **kwargs)
def test_successful_get_cluster_log_events_request(self, client, mocker, mock_cluster_stack, region, next_token, start_from_head, limit, start_time, end_time): cluster_name = "cluster" log_stream_name = "logstream" mock_log_events = [ { "ingestionTime": 1627524017632, "message": "Jan 01 00:00:00 ip-10-0-0-1 systemd: Started Session c20325 of " "user root.", "timestamp": 1609459200000, }, { "ingestionTime": 1627524017632, "message": "Jan 01 00:00:00 ip-10-0-0-1 systemd: Removed slice User Slice " "of root.", "timestamp": 1609459207000, }, ] uid = "00000000-dddd-4444-bbbb-555555555555" mock_log_events_response = { "ResponseMetadata": { "HTTPHeaders": { "content-length": "12345", "content-type": "application/x-amz-json-1.1", "date": "Fri, 01 Jan 2021 00:00:00 GMT", "x-amzn-requestid": uid, }, "HTTPStatusCode": 200, "RequestId": uid, "RetryAttempts": 0, }, "events": mock_log_events, "nextBackwardToken": "b/123", "nextForwardToken": "f/456", } mock_log_stream = LogStream(cluster_name, log_stream_name, mock_log_events_response) get_log_events_mock = mocker.patch( "pcluster.models.cluster.Cluster.get_log_events", auto_spec=True, return_value=mock_log_stream, ) mock_cluster_stack() response = self._send_test_request(client, cluster_name, log_stream_name, region, next_token, start_from_head, limit, start_time, end_time) expected_args = { "start_time": start_time and to_utc_datetime(start_time), "end_time": end_time and to_utc_datetime(end_time), "limit": limit, "start_from_head": start_from_head, "next_token": next_token, } get_log_events_mock.assert_called_with(log_stream_name, **expected_args) expected = { "events": [ { "message": "Jan 01 00:00:00 ip-10-0-0-1 systemd: Started Session c20325 of user root.", "timestamp": "2021-01-01T00:00:00.000Z", }, { "message": "Jan 01 00:00:00 ip-10-0-0-1 systemd: Removed slice User Slice of root.", "timestamp": "2021-01-01T00:00:07.000Z", }, ], "nextToken": "f/456", "prevToken": "b/123", } assert_that(response.status_code).is_equal_to(200) assert_that(response.get_json()).is_equal_to(expected)