Exemple #1
0
def _process_logs(data, config_regexps, config_actions, auto_actions):
    logs = {}
    for name, logdata in data.items():
        try:
            # files
            files = _process_log_files(logdata)
            # regexps
            regexps = _process_log_regexps(logdata, name, config_regexps)
            # patterns
            garbage = _unify_patterns(logdata.get('garbage'))
            ignore = _unify_nested_patterns(logdata.get('ignore'))
            # actions
            actions = _process_log_actions(logdata, name, auto_actions,
                                           config_actions)
        except ConfigError as exc:
            raise ConfigError('invalid log definition ({}): {}'.format(
                name, exc)) from exc
        if not actions:
            warning_echo(
                'useless log definition ({}): no actions defined'.format(name))
        logs[name] = {
            'files': tuple(sorted(files)),
            'regexps': regexps,
            'garbage': garbage,
            'ignore': ignore,
            'actions': tuple(sorted(actions))
        }
    return logs
Exemple #2
0
def _write_offset_file(path, inode, offset):
    try:
        with open(path, 'w') as offset_file:
            os.fchmod(offset_file.fileno(), 0o600)
            offset_file.write('{}\n{}\n'.format(inode, offset))  # pragma: no branch
    except OSError as exc:
        warning_echo('Could not write: {} ({})'.format(path, exc))
Exemple #3
0
def _process_logs(data, config_regexps, config_actions, auto_actions):
    logs = {}
    for name, logdata in data.items():
        try:
            # files
            files = _process_log_files(logdata)
            # regexps
            regexps = _process_log_regexps(logdata, name, config_regexps)
            # patterns
            garbage = _unify_patterns(logdata.get("garbage"))
            ignore = _unify_nested_patterns(logdata.get("ignore"))
            # actions
            actions = _process_log_actions(logdata, name, auto_actions, config_actions)
        except ConfigError as exc:
            raise ConfigError("invalid log definition ({}): {}".format(name, exc)) from exc
        if not actions:
            warning_echo("useless log definition ({}): no actions defined".format(name))
        logs[name] = {
            "files": tuple(sorted(files)),
            "regexps": regexps,
            "garbage": garbage,
            "ignore": ignore,
            "actions": tuple(sorted(actions)),
        }
    return logs
Exemple #4
0
def _write_offset_file(path, inode, offset):
    try:
        with open(path, 'w') as offset_file:
            os.fchmod(offset_file.fileno(), 0o600)
            offset_file.write('{}\n{}\n'.format(inode,
                                                offset))  # pragma: no branch
    except OSError as exc:
        warning_echo('Could not write: {} ({})'.format(path, exc))
Exemple #5
0
def process_log(name):
    """Let logstapo loose on a specifig log.

    :param name: The name of the log to process
    :return: A ``(lines, failed)`` tuple. `lines` is a list of
            ``(line, data)`` tuples and `failed` is a list of raw
            lines that could not be parsed.
    """
    config = current_config.data  # avoid context lookup all the time
    verbosity = config['verbosity']
    data = config['logs'][name]
    garbage = data['garbage']
    ignore = data['ignore']
    regexps = [config['regexps'][regex_name] for regex_name in config['logs'][name]['regexps']]
    if verbosity >= 1:  # pragma: no cover
        verbose_echo(1, "*** Processing log '{}' ({})".format(name, ', '.join(data['files'])))
        if garbage:
            verbose_echo(1, '  Garbage patterns:')
            for pattern in garbage:
                verbose_echo(1, '    - {}'.format(pattern.pattern))
        if ignore:
            verbose_echo(1, '  Ignore patterns:')
            for source, patterns in ignore.items():
                if source.pattern is None:
                    verbose_echo(1, '    - Any source'.format(source.pattern))
                else:
                    verbose_echo(1, '    - Source: {}'.format(source.pattern))
                for pattern in patterns:
                    verbose_echo(1, '      - {}'.format(pattern.pattern))
    lines = itertools.chain.from_iterable(_iter_log_lines(f, config['dry_run']) for f in data['files'])
    invalid = []
    other = []
    garbage_count = 0
    ignored_count = 0
    for line in lines:
        if garbage and any(x.test(line) for x in garbage):
            garbage_count += 1
            debug_echo('garbage: ' + line)
            continue
        parsed = _parse_line(line, regexps)
        if parsed is None:
            warning_echo('[{}] Could not parse: {}'.format(name, line))
            invalid.append(line)
            continue
        if _check_ignored(parsed, ignore):
            ignored_count += 1
            debug_echo('ignored: ' + line)
            continue
        verbose_echo(2, line)
        other.append((line, parsed))
    verbose_echo(1, 'Stats: {} garbage / {} invalid / {} ignored / {} other'.format(garbage_count, len(invalid),
                                                                                    ignored_count, len(other)))
    return other, invalid
Exemple #6
0
def logtail(path, offset_path=None, *, dry_run=False):
    """Yield new lines from a logfile.

    :param path: The path to the file to read from
    :param offset_path: The path to the file where offset/inode
                        information will be stored.  If not set,
                        ``<file>.offset`` will be used.
    :param dry_run: If ``True``, the offset file will not be modified
                    or created.
    """
    if offset_path is None:
        offset_path = path + '.offset'

    try:
        logfile = open(path, encoding='utf-8', errors='replace')
    except OSError as exc:
        warning_echo('Could not read: {} ({})'.format(path, exc))
        return

    closer = ExitStack()
    closer.enter_context(logfile)
    with closer:
        line_iter = iter([])
        stat = os.stat(logfile.fileno())
        debug_echo('logfile inode={}, size={}'.format(stat.st_ino, stat.st_size))
        inode, offset = _parse_offset_file(offset_path)
        if inode is not None:
            if stat.st_ino == inode:
                debug_echo('inodes are the same')
                if offset == stat.st_size:
                    debug_echo('offset points to eof')
                    return
                elif offset > stat.st_size:
                    warning_echo('File shrunk since last read: {} ({} < {})'.format(path, stat.st_size, offset))
                    offset = 0
            else:
                debug_echo('inode changed, checking for rotated file')
                rotated_path = _check_rotated_file(path, inode)
                if rotated_path is not None:
                    try:
                        rotated_file = open(rotated_path, encoding='utf-8', errors='replace')
                    except OSError as exc:
                        warning_echo('Could not read rotated file: {} ({})'.format(rotated_path, exc))
                    else:
                        closer.enter_context(rotated_file)
                        rotated_file.seek(offset)
                        line_iter = itertools.chain(line_iter, iter(rotated_file))
                offset = 0
        logfile.seek(offset)
        line_iter = itertools.chain(line_iter, iter(logfile))
        for line in line_iter:
            line = line.strip()
            yield line
        pos = logfile.tell()
        debug_echo('reached end of logfile at {}'.format(pos))
        if not dry_run:
            debug_echo('writing offset file: ' + offset_path)
            _write_offset_file(offset_path, stat.st_ino, pos)
        else:
            debug_echo('dry run - not writing offset file')
Exemple #7
0
def logtail(path, offset_path=None, *, dry_run=False):
    """Yield new lines from a logfile.

    :param path: The path to the file to read from
    :param offset_path: The path to the file where offset/inode
                        information will be stored.  If not set,
                        ``<file>.offset`` will be used.
    :param dry_run: If ``True``, the offset file will not be modified
                    or created.
    """
    if offset_path is None:
        offset_path = path + '.offset'

    try:
        logfile = open(path, encoding='utf-8', errors='replace')
    except OSError as exc:
        warning_echo('Could not read: {} ({})'.format(path, exc))
        return

    closer = ExitStack()
    closer.enter_context(logfile)
    with closer:
        line_iter = iter([])
        stat = os.stat(logfile.fileno())
        debug_echo('logfile inode={}, size={}'.format(stat.st_ino,
                                                      stat.st_size))
        inode, offset = _parse_offset_file(offset_path)
        if inode is not None:
            if stat.st_ino == inode:
                debug_echo('inodes are the same')
                if offset == stat.st_size:
                    debug_echo('offset points to eof')
                    return
                elif offset > stat.st_size:
                    warning_echo(
                        'File shrunk since last read: {} ({} < {})'.format(
                            path, stat.st_size, offset))
                    offset = 0
            else:
                debug_echo('inode changed, checking for rotated file')
                rotated_path = _check_rotated_file(path, inode)
                if rotated_path is not None:
                    try:
                        rotated_file = open(rotated_path,
                                            encoding='utf-8',
                                            errors='replace')
                    except OSError as exc:
                        warning_echo(
                            'Could not read rotated file: {} ({})'.format(
                                rotated_path, exc))
                    else:
                        closer.enter_context(rotated_file)
                        rotated_file.seek(offset)
                        line_iter = itertools.chain(line_iter,
                                                    iter(rotated_file))
                offset = 0
        logfile.seek(offset)
        line_iter = itertools.chain(line_iter, iter(logfile))
        for line in line_iter:
            line = line.strip()
            yield line
        pos = logfile.tell()
        debug_echo('reached end of logfile at {}'.format(pos))
        if not dry_run:
            debug_echo('writing offset file: ' + offset_path)
            _write_offset_file(offset_path, stat.st_ino, pos)
        else:
            debug_echo('dry run - not writing offset file')
Exemple #8
0
def test_warning_echo(mocker):
    secho = mocker.patch('logstapo.util.click.secho')
    util.warning_echo('test')
    assert secho.called