def test_vulnerable_kernel_integration():
    comp = vulnerable_kernel.report
    for kernel, i in generate_inputs(VULNERABLE):
        expected = make_response(ERROR_KEY, kernel=kernel)
        run_test(comp, i, expected)

    for _, i in generate_inputs(NOT_VULNERABLE):
        run_test(comp, i, None)
def test_lscpu():
    input_data = InputData().add(Specs.lscpu, LSCPU_1)
    result = run_test(system_profile, input_data)
    assert result["cores_per_socket"] == 1

    input_data = InputData().add(Specs.lscpu, LSCPU_2)
    result = run_test(system_profile, input_data)
    assert result.get("cores_per_socket") is None
Beispiel #3
0
def test_integration_tests():
    comp = insights_heartbeat.is_insights_heartbeat

    input_data = InputData(name="Match: no kernel")
    input_data.add(Specs.hostname, insights_heartbeat.HOST)
    expected = make_fail(insights_heartbeat.ERROR_KEY)
    run_test(comp, input_data, expected)

    input_data = InputData(name="No Match: bad hostname")
    input_data.add(Specs.hostname, NON_MATCHING_HOSTNAME)
    run_test(comp, input_data, None)
def test_gcp_license_codes():
    input_data = InputData().add(Specs.gcp_license_codes, GCP_LICENSE_CODES_1)
    result = run_test(system_profile, input_data)
    assert result["is_marketplace"] is True

    input_data = InputData().add(Specs.gcp_license_codes, GCP_LICENSE_CODES_2)
    result = run_test(system_profile, input_data)
    assert result["is_marketplace"] is True

    input_data = InputData().add(Specs.gcp_license_codes,
                                 GCP_LICENSE_CODES_BAD)
    result = run_test(system_profile, input_data)
    assert result.get("is_marketplace") is None
Beispiel #5
0
def test_rpmostree_status_full():
    input_data = InputData().add(Specs.rpm_ostree_status, DATA_1)
    result = run_test(system_profile, input_data)
    deployments = result["rpm_ostree_deployments"]
    assert len(deployments) == 2

    dep = deployments[0]
    assert dep == {
        "id":
        "fedora-silverblue-63335a77f9853618ba1a5f139c5805e82176a2a040ef5e34d7402e12263af5bb.0",
        "checksum":
        "63335a77f9853618ba1a5f139c5805e82176a2a040ef5e34d7402e12263af5bb",
        "origin": "fedora/33/x86_64/silverblue",
        "osname": "fedora-silverblue",
        "version": "33.21",
        "booted": True,
        "pinned": False,
    }

    dep = deployments[1]
    assert dep == {
        "id":
        "fedora-silverblue-775d54e89bc74731ec27db04f12510c0269c8cbab3ad5e39e0a4d693231ef072.0",
        "checksum":
        "775d54e89bc74731ec27db04f12510c0269c8cbab3ad5e39e0a4d693231ef072",
        "origin": "fedora/33/x86_64/silverblue",
        "osname": "fedora-silverblue",
        "version": "33.17",
        "booted": False,
        "pinned": False,
    }
Beispiel #6
0
    def _run_rule(rule, input_data, return_make_none=False):
        """
        Fixture for rule integration testing

        Use this fixture to create an integration test for your rule plugin.

        Sample code::

            def test_myrule(run_rule):
                input_data = InputData('my test name')
                input_data.add(Specs.installed_rpms, RPMS_DATA, path='optional_spec_path')
                expected = make_fail(ERROR_KEY, bad_data=data_expected)
                results = run_rule(my_rule, input_data)
                assert results == expected

        Arguments:
            rule (object): Your rule function object.
            data (InputData):  InputData obj containing all of the necessary data
                for the test.
            return_make_none (bool): Set to true if you are testing for ``make_none()``
                results in your CI tests instead of ``None``.
        """
        result = run_test(rule, input_data, return_make_none=return_make_none)
        # Check result for skip to be compatible with archive_provider decorator
        # Return None instead of result indicating missing component(s)
        if (result is not None and 'type' in result
                and (result['type'] == 'skip' or
                     (result['type'] == 'none' and not return_make_none))):
            return None
        else:
            return result
Beispiel #7
0
def test_cpuinfo():
    input_data = InputData().add(Specs.cpuinfo, CPU_INFO_1)
    result = run_test(system_profile, input_data)
    assert result["cpu_flags"] == [
        "fpu", "vme", "de", "pse", "tsc", "msr", "pae", "mce"
    ]
    assert result["cpu_model"] == "Intel(R) Xeon(R) CPU E5-2690 0 @ 2.90GHz"
    assert result["number_of_cpus"] == 2
    assert result["number_of_sockets"] == 2
Beispiel #8
0
def test_rpmostree_status_simple():
    input_data = InputData().add(Specs.rpm_ostree_status, DATA_0)
    result = run_test(system_profile, input_data)
    deployments = result["rpm_ostree_deployments"]
    assert len(deployments) == 1
    dep = deployments[0]
    assert dep == {
        "id":
        "rhel-f0c0294860db563e5906db8c9f257d2bfebe40c93e0320b0e380b879f545e267.0",
        "checksum":
        "f0c0294860db563e5906db8c9f257d2bfebe40c93e0320b0e380b879f545e267",
        "origin": "edge:rhel/8/x86_64/edge",
        "osname": "rhel",
        "booted": True,
        "pinned": False
    }
Beispiel #9
0
    def _run_rule(name, rule, input_data):
        """
        Fixture for rule integration testing

        Use this fixture to create an integration test for your rule plugin.

        Sample code::

            def test_myrule(run_rule):
                input_data = {'spec': Specs.installed_rpms, 'data': RPMS_DATA}
                expected = make_fail(ERROR_KEY, bad_data=data_expected)
                results = run_rule('my test name', my_rule, input_data)
                assert results == expected

        Arguments:
            name (str): Name to identify this test in output.
            rule (object): Your rule function object.
            data (list or dict):  List of dict of each data spec your rule requires
                to trigger.  If a single input data spec then a dict can be passed instead.
                Each dict must include both ``spec`` and ``data`` keys, and may optionally
                include ``path`` if necessary for the spec.

        Return:
            results of call to make_pass, make_fail, etc., or None

        Raises:
            KeyError: Raises if either spec or data keywords are not present.
        """
        idata = InputData(name)
        input_data = input_data if isinstance(input_data,
                                              list) else [input_data]
        for d in input_data:
            if 'path' in d:
                idata.add(d['spec'], d['data'], path=d['path'])
            else:
                idata.add(d['spec'], d['data'])
        return run_test(rule, idata)
Beispiel #10
0
def test_display_name():
    input_data = InputData().add(Specs.display_name, DISPLAY_NAME_1)
    result = run_test(system_profile, input_data)
    assert result["display_name"] == "foo-bar"
def test_ansible_host():
    input_data = InputData().add(Specs.ansible_host, ANSIBLE_HOST_1)
    result = run_test(system_profile, input_data)
    assert result["ansible_host"] == "foo-bar"
Beispiel #12
0
def test_greenboot_status_red():
    input_data = InputData().add(Specs.greenboot_status, RED)
    result = run_test(system_profile, input_data)
    assert result["greenboot_status"] == "red"
    assert result["greenboot_fallback_detected"] is False
Beispiel #13
0
def test_always_fires():
    i = InputData()
    expected = make_pass("ALWAYS_FIRES", kernel="this is junk")
    run_test(always_fires.report, i, expected)
Beispiel #14
0
def test_greenboot_status_red():
    input_data = InputData().add(Specs.greenboot_status, RED)
    result = run_test(system_profile, input_data)
    assert result["host_type"] == "edge"
Beispiel #15
0
def test_uname():
    input_data = InputData().add(Specs.uname, UNAME_1)
    result = run_test(system_profile, input_data)
    assert result["arch"] == "x86_64"
Beispiel #16
0
def test_greenboot_status_fallback():
    input_data = InputData().add(Specs.greenboot_status, FALLBACK)
    result = run_test(system_profile, input_data)
    assert result["greenboot_status"] == "green"
    assert result["greenboot_fallback_detected"] is True
Beispiel #17
0
def test_integration(component, compare_func, input_data, expected):
    actual = tests.run_test(component, input_data)
    compare_func(actual, expected)