예제 #1
0
파일: logsops.py 프로젝트: mschmutz1/jokic
def _handle_tail_logs(instance_id_list):
    data = []
    for instance_id, url in iteritems(instance_id_list):
        data.append('============= ' + str(instance_id) + ' ==============')
        log_result = utils.get_data_from_url(url)
        data.append(utils.decode_bytes(log_result))
    io.echo_with_pager(os.linesep.join(data))
예제 #2
0
def _handle_tail_logs(instance_id_list):
    data = []
    for instance_id, url in iteritems(instance_id_list):
        data.append('============= ' + str(instance_id) + ' ==============')
        log_result = utils.get_data_from_url(url)
        data.append(utils.decode_bytes(log_result))
    io.echo_with_pager(os.linesep.join(data))
예제 #3
0
def stream_logs_in_terminal(log_group, log_streams):
    """
    Prints logs of each of the `log_streams` to terminal using a scoll-able pager as opposed to printing all
    available information at once.
    :param log_group: name of the CloudWatch log group within which to find `stream_name`
    :param log_streams: the list of log streams belonging to the `log_group` whose events to print to terminal
    :return: None
    """
    all_logs = ''
    for log_stream in log_streams:
        tail_logs = get_cloudwatch_log_stream_events(
            log_group,
            log_stream,
            num_log_events=TAIL_LOG_SIZE
        )
        all_logs += '{linesep}{linesep}============= ' \
                    '{log_stream} - {log_group} ==============' \
                    '{linesep}{linesep}'.format(
                        log_stream=str(log_stream),
                        log_group=log_group,
                        linesep=os.linesep
                    )
        all_logs += tail_logs

    io.echo_with_pager(all_logs)
예제 #4
0
def paginate_cloudwatch_logs(platform_name, version, formatter=None):
    log_group_name = _get_platform_builder_group_name(platform_name)
    next_token = None
    start_time = None

    while True:
        try:
            messages, next_token, start_time = get_cloudwatch_messages(
                log_group_name, version, formatter, next_token, start_time)
            if messages:
                io.echo_with_pager("\n".join(messages))
            else:
                break
        except ServiceError as e:
            # Something went wrong getting the stream
            # It probably doesnt exist anymore.
            io.log_error(e)
            break
        except Exception as e:
            # We want to swallow all exceptions or else they will be
            # printed as a stack trace to the Console
            # Exceptions are typically connections reset and
            # Various things
            LOG.debug('Exception raised: ' + str(e))
            LOG.debug(traceback.format_exc())
예제 #5
0
def print_events(app_name, env_name, follow, platform_arn=None):
    if follow:
        follow_events(app_name, env_name, platform_arn)
    else:
        events = elasticbeanstalk.get_new_events(app_name,
                                                 env_name,
                                                 None,
                                                 platform_arn=platform_arn)

        data = []
        for event in reversed(events):
            data.append(commonops.get_event_string(event, long_format=True))
        io.echo_with_pager(os.linesep.join(data))
예제 #6
0
    def do_command(self):
        verbose = self.app.pargs.verbose
        solution_stacks = platformops.get_all_platforms()
        platform_arns = platformops.list_custom_platform_versions(platform_version='latest')

        if verbose:
            lst = [s.name for s in solution_stacks]
            lst.extend(platform_arns)
        else:
            lst = sorted(set([s.pythonify() for s in solution_stacks]))
            lst.extend([PlatformVersion.get_platform_name(arn) for arn in platform_arns])

        if len(lst) > 20:
            io.echo_with_pager(os.linesep.join(lst))
        else:
            io.echo(*lst, sep=os.linesep)
예제 #7
0
    def do_command(self):
        all_platforms = self.app.pargs.all_platforms
        status = self.app.pargs.status

        if not all_platforms:
            platform_name = fileoperations.get_platform_name()
        else:
            platform_name = None

        versions = platformops.list_custom_platform_versions(
            platform_name=platform_name, status=status, show_status=True)

        if len(versions) > 20:
            io.echo_with_pager(os.linesep.join(versions))
        else:
            io.echo(*versions, sep=os.linesep)
예제 #8
0
    def do_command(self):
        verbose = self.app.pargs.verbose
        solution_stacks = platformops.get_all_platforms()

        if verbose:
            platform_arns = platformops.list_custom_platform_versions()
            lst = [s.name for s in solution_stacks]
            lst.extend(platform_arns)
        else:
            platform_arns = platformops.list_custom_platform_versions(platform_version='latest')
            lst = sorted(set([s.pythonify() for s in solution_stacks]))
            lst.extend([PlatformVersion.get_platform_name(arn) for arn in platform_arns])

        if len(lst) > 20:
            io.echo_with_pager(os.linesep.join(lst))
        else:
            io.echo(*lst, sep=os.linesep)
예제 #9
0
    def do_command(self):
        all_platforms = self.app.pargs.all_platforms
        status = self.app.pargs.status

        if not all_platforms:
            platform_name = fileoperations.get_platform_name()
        else:
            platform_name = None

        versions = platformops.list_custom_platform_versions(
            platform_name=platform_name,
            status=status,
            show_status=True
        )

        if len(versions) > 20:
            io.echo_with_pager(os.linesep.join(versions))
        else:
            io.echo(*versions, sep=os.linesep)
예제 #10
0
def stream_logs_in_terminal(log_group, log_streams):
    """
    Prints logs of each of the `log_streams` to terminal using a scoll-able pager as opposed to printing all
    available information at once.
    :param log_group: name of the CloudWatch log group within which to find `stream_name`
    :param log_streams: the list of log streams belonging to the `log_group` whose events to print to terminal
    :return: None
    """
    all_logs = ''
    for log_stream in log_streams:
        tail_logs = get_cloudwatch_log_stream_events(
            log_group,
            log_stream,
            num_log_events=TAIL_LOG_SIZE
        )
        all_logs += '{linesep}{linesep}============= {log_stream} - {log_group} =============={linesep}{linesep}'.format(
            log_stream=str(log_stream),
            log_group=log_group,
            linesep=os.linesep
        )
        all_logs += tail_logs

    io.echo_with_pager(all_logs)
예제 #11
0
파일: logsops.py 프로젝트: mschmutz1/jokic
def paginate_cloudwatch_logs(platform_name, version, formatter=None):
    """
    Method periodically polls CloudWatch get_log_events to retrieve the logs for the logStream `version` within the logGroup
    defined by `version`
    :param platform_name: A CloudWatch logGroup in which the logStream `version` exists
    :param version: A CloudWatch logStream to poll
    :param formatter: The object that formats the output to be displayed in the terminal
    :return: None
    """
    log_group_name = _get_platform_builder_group_name(platform_name)
    next_token = None
    start_time = None
    messages_handler = (lambda messages: io.echo_with_pager(os.linesep.join(messages)))

    get_cloudwatch_messages(log_group_name, version, formatter, next_token, start_time, messages_handler, sleep_time=4)
예제 #12
0
def paginate_cloudwatch_logs(platform_name, version, formatter=None):
    """
    Method periodically polls CloudWatch get_log_events to retrieve the logs for the logStream `version` within the logGroup
    defined by `version`
    :param platform_name: A CloudWatch logGroup in which the logStream `version` exists
    :param version: A CloudWatch logStream to poll
    :param formatter: The object that formats the output to be displayed in the terminal
    :return: None
    """
    log_group_name = _get_platform_builder_group_name(platform_name)
    next_token = None
    start_time = None
    messages_handler = (lambda messages: io.echo_with_pager(os.linesep.join(messages)))

    get_cloudwatch_messages(log_group_name, version, formatter, next_token, start_time, messages_handler, sleep_time=4)
예제 #13
0
def echo(platforms):
    if len(platforms) > 20:
        io.echo_with_pager(os.linesep.join(platforms))
    else:
        io.echo(*platforms, sep=os.linesep)
예제 #14
0
 def test_echo_with_pager(self, pager_mock):
     io.echo_with_pager('some text')
     pager_mock.assert_called_once_with('some text')
예제 #15
0
def get_logs(env_name, info_type, do_zip=False, instance_id=None):
    # Get logs
    result = elasticbeanstalk.retrieve_environment_info(env_name, info_type)
    """
        Results are ordered with latest last, we just want the latest
    """
    log_list = {}
    for log in result['EnvironmentInfo']:
        i_id = log['Ec2InstanceId']
        url = log['Message']
        log_list[i_id] = url

    if instance_id:
        try:
            log_list = {instance_id: log_list[instance_id]}
        except KeyError:
            raise NotFoundError(strings['beanstalk-logs.badinstance'].replace(
                '{instance_id}', instance_id))

    if info_type == 'bundle':
        # save file, unzip, place in logs directory
        logs_folder_name = datetime.now().strftime("%y%m%d_%H%M%S")
        logs_location = fileoperations.get_logs_location(logs_folder_name)
        #get logs for each instance
        for i_id, url in iteritems(log_list):
            zip_location = utils.save_file_from_url(url, logs_location,
                                                    i_id + '.zip')
            instance_folder = os.path.join(logs_location, i_id)
            fileoperations.unzip_folder(zip_location, instance_folder)
            fileoperations.delete_file(zip_location)

        fileoperations.set_user_only_permissions(logs_location)
        if do_zip:
            fileoperations.zip_up_folder(logs_location, logs_location + '.zip')
            fileoperations.delete_directory(logs_location)

            logs_location += '.zip'
            fileoperations.set_user_only_permissions(logs_location)
            io.echo(strings['logs.location'].replace('{location}',
                                                     logs_location))
        else:
            io.echo(strings['logs.location'].replace('{location}',
                                                     logs_location))
            # create symlink to logs/latest
            latest_location = fileoperations.get_logs_location('latest')
            try:
                os.unlink(latest_location)
            except OSError:
                # doesn't exist. Ignore
                pass
            try:
                os.symlink(logs_location, latest_location)
                io.echo('Updated symlink at', latest_location)
            except OSError:
                #Oh well.. we tried.
                ## Probably on windows, or logs/latest is not a symlink
                pass

    else:
        # print logs
        data = []
        for i_id, url in iteritems(log_list):
            data.append('============= ' + str(i_id) + ' ==============')
            log_result = utils.get_data_from_url(url)
            data.append(utils.decode_bytes(log_result))
        io.echo_with_pager(os.linesep.join(data))
예제 #16
0
def retrieve_cloudwatch_logs(log_group,
                             info_type,
                             do_zip=False,
                             instance_id=None):
    # Get the log streams, a.k.a. the instance ids in the log group
    """
        Retrieves cloudwatch logs for every stream under the log group unless the instance_id is specified. If tail
         logs is enabled we will only get the last 100 lines and return the result to a pager for the user to use. If
         bundle info type is chosen we will get all of the logs and save them to a dir under .elasticbeanstalk/logs/
        and if zip is enabled we will zip those logs for the user.
        :param log_group: cloudwatch log group
        :param info_type: can be 'tail' or 'bundle'
        :param do_zip: boolean to determine if we should zip the logs we retrieve
        :param instance_id: if we only want a single instance we can specify it here
    """
    log_streams = cloudwatch.describe_log_streams(
        log_group, log_stream_name_prefix=instance_id)
    instance_ids = []

    if len(log_streams['logStreams']) == 0:
        io.log_error(strings['logs.nostreams'])

    for stream in log_streams['logStreams']:
        instance_ids.append(stream['logStreamName'])

    # This is analogous to getting the full logs
    if info_type == 'bundle':
        # Create directory to store logs
        logs_folder_name = datetime.now().strftime("%y%m%d_%H%M%S")
        logs_location = fileoperations.get_logs_location(logs_folder_name)
        os.makedirs(logs_location)
        # Get logs for each instance
        for instance_id in instance_ids:
            full_logs = get_cloudwatch_stream_logs(log_group, instance_id)
            full_filepath = '{0}/{1}.log'.format(logs_location, instance_id)
            log_file = open(full_filepath, 'w+')
            log_file.write(full_logs)
            log_file.close()
            fileoperations.set_user_only_permissions(full_filepath)

        if do_zip:
            fileoperations.zip_up_folder(logs_location, logs_location + '.zip')
            fileoperations.delete_directory(logs_location)

            logs_location += '.zip'
            fileoperations.set_user_only_permissions(logs_location)
            io.echo(strings['logs.location'].replace('{location}',
                                                     logs_location))
        else:
            io.echo(strings['logs.location'].replace('{location}',
                                                     logs_location))
            # create symlink to logs/latest
            latest_location = fileoperations.get_logs_location('latest')
            try:
                os.unlink(latest_location)
            except OSError:
                # doesn't exist. Ignore
                pass
            try:
                os.symlink(logs_location, latest_location)
                io.echo('Updated symlink at', latest_location)
            except OSError:
                # Oh well.. we tried.
                ## Probably on windows, or logs/latest is not a symlink
                pass

    else:
        # print logs
        all_logs = ""
        for instance_id in instance_ids:
            tail_logs = get_cloudwatch_stream_logs(
                log_group, instance_id, num_log_events=TAIL_LOG_SIZE)
            all_logs += '\n\n============= {0} - {1} ==============\n\n'.format(
                str(instance_id), get_log_name(log_group))
            all_logs += tail_logs
        io.echo_with_pager(all_logs)