def report(rel):
    """Fires if the machine is running Fedora."""

    if "Fedora" in rel.product:
        return make_response("IS_FEDORA")
    else:
        return make_response("IS_NOT_FEDORA")
def report(rel, hostname):
    """Fires if the machine is running Fedora."""

    if "Fedora" in rel.product:
        return make_response(ERROR_KEY_IS_FEDORA,
                             hostname=hostname.hostname,
                             product=rel.product)
    else:
        return make_response(ERROR_KEY_IS_NOT_FEDORA,
                             hostname=hostname.hostname,
                             product=rel.product)
def integration_test():

    input_data = InputData("test_fedora")
    input_data.add(Specs.redhat_release, FEDORA)
    expected = make_response("IS_FEDORA", product="Fedora")

    yield input_data, expected

    input_data = InputData("test_rhel")
    input_data.add(Specs.redhat_release, RHEL)
    expected = make_response("IS_NOT_FEDORA", product="Red Hat Enterprise Linux Server")

    yield input_data, expected
Exemple #4
0
def test_validate_good_response():
    assert plugins.make_response("a_test", foo="bar") == {
        "type": "rule",
        "error_key": "a_test",
        "foo": "bar"
    }
    assert plugins.make_fail("a_test", foo="bar") == {
        "type": "rule",
        "error_key": "a_test",
        "foo": "bar"
    }
    assert plugins.make_pass("a_test", foo="bar") == {
        "type": "pass",
        "pass_key": "a_test",
        "foo": "bar"
    }
    assert plugins.make_fingerprint("a_test", foo="bar") == {
        "type": "fingerprint",
        "fingerprint_key": "a_test",
        "foo": "bar"
    }
    assert plugins.make_metadata(foo="bar") == {
        "type": "metadata",
        "foo": "bar"
    }
    assert plugins.make_metadata_key("foo", "bar") == {
        "type": "metadata_key",
        "key": "foo",
        "value": "bar"
    }
Exemple #5
0
def rsyslog_dropping_messages(shared):
    """
    Use the file_dropped_messages scan to pick up if any
    """
    msgs = shared[Messages]
    drops_by_process = msgs.dropped_messages

    # If we have an empty dict, because no messages were dropped, skip out now.
    if not drops_by_process:
        return

    # Try to determine the defaults for rate limiting from the configuration
    # file
    interval, limit = shared[find_rate_limiting_params]
    max_burst = max(p['max'] for p in drops_by_process.values())
    if max_burst <= limit:
        # Simple logic - do not recommend reducing the burst lines
        return

    return make_response(
        ERROR_KEY,
        drops_by_process=drops_by_process,
        current_interval=interval,
        current_limit=limit,
        new_limit=max_burst,
        new_config="$SysLogRateLimitBurst {m}".format(m=max_burst),
    )
Exemple #6
0
def test_make_response_too_big():
    content = "foo" * 50000
    assert plugins.make_response("TESTING", big=content) == {
        "type": "rule",
        "error_key": "TESTING",
        "max_detail_length_error": len(json.dumps({"error_key": "TESTING", "type": "rule", "big": content}))
    }
def test_vulnerable_kernel_integration():
    comp = vulnerable_kernel.report
    for kernel, i in generate_inputs(VULNERABLE):
        expected = make_response(ERROR_KEY, kernel=kernel)
        run_test(comp, i, expected)

    for _, i in generate_inputs(NOT_VULNERABLE):
        run_test(comp, i, None)
def test_integration_tests():
    comp = insights_heartbeat.is_insights_heartbeat

    input_data = InputData(name="Match: no kernel")
    input_data.add(Specs.hostname, insights_heartbeat.HOST)
    expected = make_response(insights_heartbeat.ERROR_KEY)
    run_test(comp, input_data, expected)

    input_data = InputData(name="No Match: bad hostname")
    input_data.add(Specs.hostname, NON_MATCHING_HOSTNAME)
    run_test(comp, input_data, None)
Exemple #9
0
def localhost_in_hosts(shared):
    """
    If 'localhost' is not in the set of host names, then inform the user.
    """
    hosts = shared[Hosts]

    if 'localhost' not in hosts.all_names:
        return make_response(
            ERROR_KEY,
            message=MESSAGE,
            hosts_defined=hosts.all_names,
        )
def report(installed_rpms, sshd_config):
    errors = {}
    errors = check_auth_method(sshd_config, errors)
    errors = check_log_level(sshd_config, errors)
    errors = check_permit_root(sshd_config, errors)
    errors = check_protocol(sshd_config, errors)

    if errors:
        openssh_version = installed_rpms.get_max('openssh')
        return make_response(ERROR_KEY,
                             errors=errors,
                             openssh=openssh_version.package)
def integration_tests():
    """
    InputData acts as the data source for the parsers
    so that they may execute and then be used as input
    to the rule.  So this is essentially and end-to-end
    test of the component chain.
    """
    input_data = InputData("GOOD_CONFIG")
    input_data.add(LocalSpecs.sshd_config, GOOD_CONFIG)
    input_data.add(Specs.installed_rpms, OPENSSH_RPM)
    yield input_data, None

    input_data = InputData("BAD_CONFIG")
    input_data.add(LocalSpecs.sshd_config, BAD_CONFIG)
    input_data.add(Specs.installed_rpms, OPENSSH_RPM)
    errors = {
        'AuthenticationMethods': 'badkey',
        'LogLevel': 'normal',
        'PermitRootLogin': '******',
        'Protocol': '1'
    }
    expected = make_response(sshd_secure.ERROR_KEY,
                             errors=errors,
                             openssh=EXPECTED_OPENSSH)
    yield input_data, expected

    input_data = InputData("DEFAULT_CONFIG")
    input_data.add(LocalSpecs.sshd_config, DEFAULT_CONFIG)
    input_data.add(Specs.installed_rpms, OPENSSH_RPM)
    errors = {
        'AuthenticationMethods': 'default',
        'LogLevel': 'default',
        'PermitRootLogin': '******'
    }
    expected = make_response(sshd_secure.ERROR_KEY,
                             errors=errors,
                             openssh=EXPECTED_OPENSSH)
    yield input_data, expected
def report(running_browsers):
    """
    Collect all running browser processes, if any of them is ran by root user, issue warning.
    """
    running_browsers_as_root = set(p["COMMAND"] for p in running_browsers if p["USER"] == "root")

    if 'chromium-browse' in running_browsers_as_root:
        # ps auxcww has a 15-char limit for process name. Convert
        # 'chromium-browse' to 'chromium-browser' just in case.
        running_browsers_as_root.remove('chromium-browse')
        running_browsers_as_root.add('chromium-browser')

    if running_browsers_as_root:
        return make_response(ERROR_KEY, browsers=sorted(running_browsers_as_root))
def integration_tests():
    # Test that should pass
    data = InputData("localhost_in_hosts")
    data.add('hosts', HOSTS_WITH_LOCALHOST)
    yield data, []

    # Test that should fail
    data = InputData("localhost_not_in_hosts")
    data.add('hosts', HOSTS_WITHOUT_LOCALHOST)
    expected = make_response(localhost_in_hosts.ERROR_KEY,
                             message=localhost_in_hosts.MESSAGE,
                             hosts_defined=set({
                                 'fte.example.com', 'nonlocal.example.com',
                                 'nonlocal2.fte.example.com'
                             }))
    yield data, [expected]
def test_vulnerable_kernel():
    for kernel in NOT_VULNERABLE:
        uname_line = UNAME_TEMPLATE % kernel
        result = vulnerable_kernel.report(Uname(context_wrap(uname_line)))
        expected = None
        if not (result == expected):
            print(result)
            print(expected)
            assert result == expected
            assert False
    for kernel in VULNERABLE:
        uname_line = UNAME_TEMPLATE % kernel
        result = vulnerable_kernel.report(Uname(context_wrap(uname_line)))
        expected = make_response(ERROR_KEY, kernel=kernel)
        if not (result == expected):
            print(result)
            print(expected)
            assert result == expected
            assert False
Exemple #15
0
def test_missing_error_key():
    with pytest.raises(plugins.ValidationException):
        plugins.make_response(None, foo="bar")
    with pytest.raises(plugins.ValidationException):
        plugins.make_metadata_key(None, "foo")
Exemple #16
0
def integration_tests():
    # Test that should pass
    data = InputData("good_test_1")
    data.add('messages', MESSAGES)
    data.add('rsyslog.conf', RSYSLOG_CONF_DEFAULT_LIMITS)
    yield data, []

    data = InputData("good_test_2")
    data.add('messages', MESSAGES_WITH_FEW_DROPS)
    data.add('rsyslog.conf', RSYSLOG_CONF_DEFAULT_LIMITS)
    yield data, []

    # Test that should fail
    data = InputData("bad_default_limit_high_drops")
    data.add('messages', MESSAGES_WITH_MANY_DROPS)
    data.add('rsyslog.conf', RSYSLOG_CONF_DEFAULT_LIMITS)
    expected = make_response(
        rsyslog_dropping_messages.ERROR_KEY,
        drops_by_process={'55082': {
            'count': 725,
            'max': 245,
            'lines': 3
        }},
        current_interval=5,
        current_limit=200,
        new_limit=245,
        new_config="$SysLogRateLimitBurst {m}".format(m=245),
    )
    yield data, [expected]

    data = InputData("bad_low_limit_high_drops")
    data.add('messages', MESSAGES_WITH_MANY_DROPS)
    data.add('rsyslog.conf', RSYSLOG_CONF_LOW_BURST)
    expected = make_response(
        rsyslog_dropping_messages.ERROR_KEY,
        drops_by_process={'55082': {
            'count': 725,
            'max': 245,
            'lines': 3
        }},
        current_interval=5,
        current_limit=10,
        new_limit=245,
        new_config="$SysLogRateLimitBurst {m}".format(m=245),
    )
    yield data, [expected]

    data = InputData("bad_low_limit_few_drops")
    data.add('messages', MESSAGES_WITH_FEW_DROPS)
    data.add('rsyslog.conf', RSYSLOG_CONF_LOW_BURST)
    expected = make_response(
        rsyslog_dropping_messages.ERROR_KEY,
        drops_by_process={'sshd': {
            'count': 23,
            'max': 12,
            'lines': 2
        }},
        current_interval=5,
        current_limit=10,
        new_limit=12,
        new_config="$SysLogRateLimitBurst {m}".format(m=12),
    )
    yield data, [expected]
Exemple #17
0
def is_insights_heartbeat(hostname):
    hostname = hostname.hostname
    if hostname == HOST:
        return make_response(ERROR_KEY)
Exemple #18
0
def test_disallow_invalid_keys():
    for bad in [[], None, set(), "", 1, lambda x: x]:
        with pytest.raises(plugins.ValidationException):
            plugins.make_response(bad)
        with pytest.raises(plugins.ValidationException):
            plugins.make_metadata_key(bad, "foo")
Exemple #19
0
def test_str_without_type():
    d = plugins.make_response("TESTING", foo="bar")
    del d["type"]
    str(d)
    assert True
Exemple #20
0
def test_always_fires():
    i = InputData()
    expected = make_response("ALWAYS_FIRES", kernel="this is junk")
    run_test(always_fires.report, i, expected)
def report():
    if True:
        return make_response("ALWAYS_FIRES", kernel="this is junk")
'''.strip()

PS_BAD_1 = 'root 18681  0.4  0.0 100388  1820 ?        S    14:01   0:00 firefox'
PS_BAD_2 = 'root 18681  0.4  0.0 100388  1820 ?        S    14:01   0:00 chrome'
PS_BAD_3 = 'root 18681  0.4  0.0 100388  1820 ?        S    14:01   0:00 chrome-sandbox'
PS_BAD_4 = 'root 18681  0.4  0.0 100388  1820 ?        S    14:01   0:00 chromium-browser'
PS_BAD_5 = PS_BAD_1 + '\n' + PS_BAD_2
PS_BAD_6 = PS_BAD_1 + '\n' + PS_BAD_2 + '\n' + PS_BAD_2
PS_BAD_7 = 'root 18681  0.4  0.0 100388  1820 ?        S    14:01   0:00 chromium-browse'
PS_BAD_8 = PS_BAD_4 + '\n' + PS_BAD_7
PS_GOOD_1 = 'sherr 18681  0.4  0.0 100388  1820 ?        S    14:01   0:00 firefox'
PS_GOOD_2 = 'root 18681  0.4  0.0 100388  1820 ?        S    14:01   0:00 xfsalloc'
PS_MISSING = ''

PS_TESTS = [
    (PS_AUXCWW_LINES.format(PS_BAD_1), [make_response(ERROR_KEY, browsers=['firefox'])]),
    (PS_AUXCWW_LINES.format(PS_BAD_2), [make_response(ERROR_KEY, browsers=['chrome'])]),
    (PS_AUXCWW_LINES.format(PS_BAD_3), [make_response(ERROR_KEY,
                                                      browsers=['chrome-sandbox'])]),
    (PS_AUXCWW_LINES.format(PS_BAD_4), [make_response(ERROR_KEY,
                                                      browsers=['chromium-browser'])]),
    (PS_AUXCWW_LINES.format(PS_BAD_5), [make_response(ERROR_KEY,
                                                      browsers=['chrome', 'firefox'])]),
    (PS_AUXCWW_LINES.format(PS_BAD_6), [make_response(ERROR_KEY,
                                                      browsers=['chrome', 'firefox'])]),
    (PS_AUXCWW_LINES.format(PS_BAD_7), [make_response(ERROR_KEY,
                                                      browsers=['chromium-browser'])]),
    (PS_AUXCWW_LINES.format(PS_BAD_8), [make_response(ERROR_KEY,
                                                      browsers=['chromium-browser'])]),
    (PS_AUXCWW_LINES.format(PS_GOOD_1), []),
    (PS_AUXCWW_LINES.format(PS_GOOD_2), []),
Exemple #23
0
def test_disallow_type_key():
    with pytest.raises(plugins.ValidationException):
        plugins.make_response("foo", type="dance off")
def report(uname):
    if uname.fixed_by('2.6.32-431.11.2.el6', introduced_in='2.6.32-431.el6'):
        return make_response("VULNERABLE_KERNEL", kernel=uname.kernel)
def test_heartbeat():
    expected_result = make_response(insights_heartbeat.ERROR_KEY)
    assert expected_result == insights_heartbeat.is_insights_heartbeat(good)
    assert insights_heartbeat.is_insights_heartbeat(bad) is None
def report(rel):
    if "Fedora" in rel.product:
        return make_response("IS_FEDORA")