def retrieve_cloudwatch_logs( log_group, info_type, do_zip=False, specific_log_stream=None, cloudwatch_log_source=logs_operations_constants.LOG_SOURCES.INSTANCE_LOG_SOURCE ): """ Retrieves CloudWatch logs for every stream under `log_group` unless `specific_log_stream` is specified. :param log_group: CloudWatch logGroup :param info_type: tail: to get the last 100 lines and returns the result to the terminal 'bundle': get all of the logs and save them to a dir under .elasticbeanstalk/logs/ :param do_zip: If True, zip the logs for the user :param specific_log_stream: Get logs for specific stream :param cloudwatch_log_source: the cloudwatch-log-source to pull from: instance or environment-health """ log_streams = cloudwatch.get_all_stream_names( log_group_name=log_group, log_stream_name_prefix=specific_log_stream ) if info_type == logs_operations_constants.INFORMATION_FORMAT.BUNDLE: logs_location = _setup_logs_folder(cloudwatch_log_source) for log_stream in log_streams: full_logs = get_cloudwatch_log_stream_events(log_group, log_stream) _write_full_logs_to_file(full_logs, logs_location, log_stream) if do_zip: _zip_logs_location(logs_location) else: _attempt_update_symlink_to_latest_logs_retrieved(logs_location) else: stream_logs_in_terminal(log_group, log_streams)
def test_get_all_stream_names(self, make_api_call_mock): make_api_call_mock.return_value = mock_responses.DESCRIBE_LOG_STREAMS_RESPONSE self.assertEqual([ 'archive-health-2018-03-26', 'archive-health-2018-03-27', 'archive-health-2018-03-28', ], cloudwatch.get_all_stream_names('some-log-group'))
def cloudwatch_log_stream_names(log_group, log_stream_name_prefix): """ Returns all of the logStream names associated with `log_group` with the prefix, `log_stream_name_prefix` if one is specified :param log_group: A CloudWatch logGroup whose logStream names to retrieve :param log_stream_name_prefix: A prefix to filter logStream names by :return: All of the logStream names associated with `log_group` with the prefix, `log_stream_name_prefix` if one is specified """ return cloudwatch.get_all_stream_names( log_group_name=log_group, log_stream_name_prefix=log_stream_name_prefix)
def cloudwatch_log_stream_names(log_group, log_stream_name_prefix): """ Returns all of the logStream names associated with `log_group` with the prefix, `log_stream_name_prefix` if one is specified :param log_group: A CloudWatch logGroup whose logStream names to retrieve :param log_stream_name_prefix: A prefix to filter logStream names by :return: All of the logStream names associated with `log_group` with the prefix, `log_stream_name_prefix` if one is specified """ return cloudwatch.get_all_stream_names( log_group_name=log_group, log_stream_name_prefix=log_stream_name_prefix )
def stream_cloudwatch_logs(env_name, sleep_time=2, log_group=None, instance_id=None): """ This function will stream logs to the terminal for the log group given, if multiple streams are found we will spawn multiple threads with each stream switch between them to stream all streams at the same time. :param env_name: environment name :param sleep_time: sleep time to refresh the logs from cloudwatch :param log_group: cloudwatch log group :param instance_id: since all of our log streams are instance ids we require this if we want a single stream """ if log_group is None: log_group = 'awseb-{0}-activity'.format(env_name) log_name = 'eb-activity.log' else: log_name = get_log_name(log_group) stream_names = [] streamer = io.get_event_streamer() streamer.prompt = ' -- {0} -- (Ctrl+C to exit)'.format(log_name) jobs = [] while True: try: new_names = cloudwatch.get_all_stream_names(log_group, instance_id) except: raise NotFoundError(strings['cloudwatch-stream.notsetup']) if len(new_names) == 0: raise NotFoundError(strings['cloudwatch-logs.nostreams'].replace( '{log_group}', log_group)) for name in new_names: if name not in stream_names: stream_names.append(name) p = threading.Thread(target=_stream_single_stream, args=(log_group, name, streamer, sleep_time)) p.daemon = True jobs.append(p) p.start() time.sleep(0.2) # offset threads time.sleep(10)