def test_authn_client_can_read_and_write(kafka_client: client.KafkaClient,
                                         service_account, setup_principals):
    try:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        service_options = {
            "service": {
                "name": config.SERVICE_NAME,
                "service_account": service_account["name"],
                "service_account_secret": service_account["secret"],
                "security": {
                    "transport_encryption": {
                        "enabled": True
                    },
                    "ssl_authentication": {
                        "enabled": True
                    },
                },
            }
        }
        config.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=service_options,
        )

        topic_name = "tls.topic"
        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
        kafka_client.create_topic(topic_name)
        kafka_client.check_users_can_read_and_write(["kafka-tester"],
                                                    topic_name)
    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
Esempio n. 2
0
def _configure_kafka_cluster(
        kafka_client: client.KafkaClient, zookeeper_service: typing.Dict,
        allow_access_if_no_acl: bool) -> client.KafkaClient:
    zookeeper_dns = sdk_networks.get_endpoint(
        zookeeper_service["package_name"],
        zookeeper_service["service"]["name"], "clientport")["dns"]

    sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
    service_options = _get_service_options(allow_access_if_no_acl,
                                           kafka_client.kerberos,
                                           zookeeper_dns)

    config.install(
        config.PACKAGE_NAME,
        config.SERVICE_NAME,
        config.DEFAULT_BROKER_COUNT,
        additional_options=service_options,
    )

    kafka_server = {**service_options, **{"package_name": config.PACKAGE_NAME}}

    sdk_cmd.svc_cli(
        kafka_server["package_name"],
        kafka_server["service"]["name"],
        "topic create {}".format(TOPIC_NAME),
    )

    kafka_client.connect()

    # Clear the ACLs
    kafka_client.remove_acls("authorized", TOPIC_NAME)
    return kafka_client
def test_authz_acls_not_required(kafka_client: client.KafkaClient,
                                 kafka_server: dict,
                                 kerberos: sdk_auth.KerberosEnvironment):

    topic_name = "authz.test"
    sdk_cmd.svc_cli(
        kafka_server["package_name"],
        kafka_server["service"]["name"],
        "topic create {}".format(topic_name),
    )

    kafka_client.connect()

    # Since no ACLs are specified, all users can read and write.
    kafka_client.check_users_can_read_and_write(
        ["authorized", "unauthorized", "super"], topic_name)

    log.info("Writing and reading: Adding acl for authorized user")
    kafka_client.add_acls("authorized", topic_name)

    # After adding ACLs the authorized user and super user should still have access to the topic.
    kafka_client.check_users_can_read_and_write(["authorized", "super"],
                                                topic_name)
    kafka_client.check_users_are_not_authorized_to_read_and_write(
        ["unauthorized"], topic_name)
Esempio n. 4
0
def kafka_server(service_account, kafka_client: client.KafkaClient):
    service_options = {
        "service": {
            "name": config.SERVICE_NAME,
            "service_account": service_account["name"],
            "service_account_secret": service_account["secret"],
            "security": {
                "transport_encryption": {
                    "enabled": True
                }
            },
        }
    }

    sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
    try:
        sdk_install.install(
            config.PACKAGE_NAME,
            service_name=config.SERVICE_NAME,
            expected_running_tasks=config.DEFAULT_TASK_COUNT,
            additional_options=service_options,
            timeout_seconds=30 * 60,
        )
        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
        kafka_client.create_topic(config.DEFAULT_TOPIC_NAME)
        kafka_client.check_topic_partition_count(
            config.DEFAULT_TOPIC_NAME, config.DEFAULT_PARTITION_COUNT)

        yield {**service_options, **{"package_name": config.PACKAGE_NAME}}
    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
Esempio n. 5
0
def test_authz_acls_not_required(kafka_client: client.KafkaClient, kerberos,
                                 service_account, setup_principals):
    try:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        service_options = {
            "service": {
                "name": config.SERVICE_NAME,
                "service_account": service_account["name"],
                "service_account_secret": service_account["secret"],
                "security": {
                    "kerberos": {
                        "enabled": True,
                        "kdc": {
                            "hostname": kerberos.get_host(),
                            "port": int(kerberos.get_port())
                        },
                        "realm": kerberos.get_realm(),
                        "keytab_secret": kerberos.get_keytab_path(),
                    },
                    "transport_encryption": {
                        "enabled": True
                    },
                    "authorization": {
                        "enabled": True,
                        "super_users": "User:{}".format("super"),
                        "allow_everyone_if_no_acl_found": True,
                    },
                },
            }
        }

        config.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=service_options,
        )

        topic_name = "authz.test"
        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
        kafka_client.create_topic(topic_name)

        # Clear the ACLs
        kafka_client.remove_acls("authorized", topic_name)

        # Since no ACLs are specified, all users can read and write.
        kafka_client.check_users_can_read_and_write(
            ["authorized", "unauthorized", "super"], topic_name)

        log.info("Writing and reading: Adding acl for authorized user")
        kafka_client.add_acls("authorized", topic_name)

        # After adding ACLs the authorized user and super user should still have access to the topic.
        kafka_client.check_users_can_read_and_write(["authorized", "super"],
                                                    topic_name)
        kafka_client.check_users_are_not_authorized_to_read_and_write(
            ["unauthorized"], topic_name)

    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
def test_authn_client_can_read_and_write(kafka_client: client.KafkaClient,
                                         service_account, setup_principals):
    try:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        service_options = {
            "service": {
                "name": config.SERVICE_NAME,
                "service_account": service_account["name"],
                "service_account_secret": service_account["secret"],
                "security": {
                    "transport_encryption": {
                        "enabled":
                        True,
                        "ciphers":
                        "TLS_RSA_WITH_AES_128_GCM_SHA256,TLS_RSA_WITH_AES_128_CBC_SHA256,TLS_RSA_WITH_AES_256_GCM_SHA384,TLS_RSA_WITH_AES_256_CBC_SHA256,TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384",
                    },
                    "ssl_authentication": {
                        "enabled": True
                    },
                },
            }
        }
        config.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=service_options,
        )

        kafka_server = {
            **service_options,
            **{
                "package_name": config.PACKAGE_NAME
            }
        }

        topic_name = "tls.topic"
        sdk_cmd.svc_cli(
            kafka_server["package_name"],
            kafka_server["service"]["name"],
            "topic create {}".format(topic_name),
            parse_json=True,
        )

        kafka_client.connect()

        user = "******"
        write_success, read_successes, _ = kafka_client.can_write_and_read(
            user, topic_name)

        assert write_success, "Write failed (user={})".format(user)
        assert read_successes, ("Read failed (user={}): "
                                "MESSAGES={} "
                                "read_successes={}".format(
                                    user, kafka_client.MESSAGES,
                                    read_successes))

    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
Esempio n. 7
0
def kafka_server(configure_security, kafka_client: client.KafkaClient):
    try:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        config.install(config.PACKAGE_NAME, config.SERVICE_NAME,
                       config.DEFAULT_BROKER_COUNT)
        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
        yield
    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
Esempio n. 8
0
def kafka_server(kafka_client: client.KafkaClient, configure_security):
    try:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        config.install(config.PACKAGE_NAME, config.SERVICE_NAME, config.DEFAULT_BROKER_COUNT)
        kafka_client.connect()

        yield {"package_name": config.PACKAGE_NAME, "service": {"name": config.SERVICE_NAME}}
    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
def test_authz_acls_required(kafka_client: client.KafkaClient,
                             kafka_server: dict,
                             kerberos: sdk_auth.KerberosEnvironment):

    topic_name = "authz.test"
    sdk_cmd.svc_cli(
        kafka_server["package_name"],
        kafka_server["service"]["name"],
        "topic create {}".format(topic_name),
    )

    kafka_client.connect(kafka_server)

    # Since no ACLs are specified, only the super user can read and write
    for user in ["super"]:
        log.info("Checking write / read permissions for user=%s", user)
        write_success, read_successes, _ = kafka_client.can_write_and_read(
            user, kafka_server, topic_name, kerberos)
        assert write_success, "Write failed (user={})".format(user)
        assert read_successes, ("Read failed (user={}): "
                                "MESSAGES={} "
                                "read_successes={}".format(
                                    user, kafka_client.MESSAGES,
                                    read_successes))

    for user in ["authorized", "unauthorized"]:
        log.info("Checking lack of write / read permissions for user=%s", user)
        write_success, _, read_messages = kafka_client.can_write_and_read(
            user, kafka_server, topic_name, kerberos)
        assert not write_success, "Write not expected to succeed (user={})".format(
            user)
        assert auth.is_not_authorized(
            read_messages), "Unauthorized expected (user={}".format(user)

    log.info("Writing and reading: Adding acl for authorized user")
    kafka_client.add_acls("authorized", kafka_server, topic_name)

    # After adding ACLs the authorized user and super user should still have access to the topic.
    for user in ["authorized", "super"]:
        log.info("Checking write / read permissions for user=%s", user)
        write_success, read_successes, _ = kafka_client.can_write_and_read(
            user, kafka_server, topic_name, kerberos)
        assert write_success, "Write failed (user={})".format(user)
        assert read_successes, ("Read failed (user={}): "
                                "MESSAGES={} "
                                "read_successes={}".format(
                                    user, kafka_client.MESSAGES,
                                    read_successes))

    for user in ["unauthorized"]:
        log.info("Checking lack of write / read permissions for user=%s", user)
        write_success, _, read_messages = kafka_client.can_write_and_read(
            user, kafka_server, topic_name, kerberos)
        assert not write_success, "Write not expected to succeed (user={})".format(
            user)
        assert auth.is_not_authorized(
            read_messages), "Unauthorized expected (user={}".format(user)
Esempio n. 10
0
def test_pod_replace_on_overlay(kafka_client: client.KafkaClient):
    test_utils.replace_broker_pod(
        config.PACKAGE_NAME,
        config.SERVICE_NAME,
        config.DEFAULT_POD_TYPE,
        config.DEFAULT_BROKER_COUNT,
    )
    kafka_client.connect(config.DEFAULT_BROKER_COUNT)
    test_overlay_network_deployment_and_endpoints()
def test_forward_kerberos_off_tls_on_plaintext_off(kafka_client: client.KafkaClient):
    update_options = {"service": {"security": {"kerberos": {"enabled": False}}}}

    update_service(config.PACKAGE_NAME, config.SERVICE_NAME, update_options)
    with pytest.raises(AssertionError):
        kafka_client._is_tls = False
        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
    kafka_client._is_tls = True
    assert kafka_client.connect(config.DEFAULT_BROKER_COUNT)
    kafka_client.check_users_can_read_and_write([TLS_USER], TOPIC_NAME)
Esempio n. 12
0
def test_authn_client_can_read_and_write(kafka_client: client.KafkaClient,
                                         service_account, setup_principals):
    try:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        service_options = {
            "service": {
                "name": config.SERVICE_NAME,
                "service_account": service_account["name"],
                "service_account_secret": service_account["secret"],
                "security": {
                    "transport_encryption": {
                        "enabled": True
                    },
                    "ssl_authentication": {
                        "enabled": True
                    }
                }
            }
        }
        config.install(config.PACKAGE_NAME,
                       config.SERVICE_NAME,
                       config.DEFAULT_BROKER_COUNT,
                       additional_options=service_options)

        kafka_server = {
            **service_options,
            **{
                "package_name": config.PACKAGE_NAME
            }
        }

        topic_name = "tls.topic"
        sdk_cmd.svc_cli(kafka_server["package_name"],
                        kafka_server["service"]["name"],
                        "topic create {}".format(topic_name),
                        json=True)

        kafka_client.connect(kafka_server)

        user = "******"
        write_success, read_successes, _ = kafka_client.can_write_and_read(
            user, kafka_server, topic_name, None)

        assert write_success, "Write failed (user={})".format(user)
        assert read_successes, "Read failed (user={}): " \
                               "MESSAGES={} " \
                               "read_successes={}".format(user,
                                                          kafka_client.MESSAGES,
                                                          read_successes)

    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
def test_client_can_read_and_write(kafka_client: client.KafkaClient,
                                   kafka_server, kerberos):

    topic_name = "tls.topic"
    sdk_cmd.svc_cli(
        kafka_server["package_name"],
        kafka_server["service"]["name"],
        "topic create {}".format(topic_name),
    )

    kafka_client.connect()

    kafka_client.check_users_can_read_and_write(["client"], topic_name)
Esempio n. 14
0
def test_authz_acls_required(kafka_client: client.KafkaClient, service_account,
                             setup_principals):

    try:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        service_options = {
            "service": {
                "name": config.SERVICE_NAME,
                "service_account": service_account["name"],
                "service_account_secret": service_account["secret"],
                "security": {
                    "transport_encryption": {
                        "enabled": True
                    },
                    "ssl_authentication": {
                        "enabled": True
                    },
                    "authorization": {
                        "enabled": True,
                        "super_users": "User:{}".format("super")
                    },
                },
            }
        }
        config.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=service_options,
        )

        topic_name = "authz.test"
        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
        kafka_client.create_topic(topic_name)
        # Since no ACLs are specified, only the super user can read and write
        kafka_client.check_users_can_read_and_write(["super"], topic_name)
        kafka_client.check_users_are_not_authorized_to_read_and_write(
            ["authorized", "unauthorized"], topic_name)

        log.info("Writing and reading: Adding acl for authorized user")
        kafka_client.add_acls("authorized", topic_name)

        # After adding ACLs the authorized user and super user should still have access to the topic.
        kafka_client.check_users_can_read_and_write(["authorized", "super"],
                                                    topic_name)

        kafka_client.check_users_are_not_authorized_to_read_and_write(
            ["unauthorized"], topic_name)

    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
Esempio n. 15
0
def configure_package(configure_security, kafka_client: client.KafkaClient):
    try:
        install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        config.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=sdk_networks.ENABLE_VIRTUAL_NETWORKS_OPTIONS,
        )

        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
        yield  # let the test session execute
    finally:
        install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
def test_reverse_kerberos_on_tls_on_plaintext_on(kerberized_kafka_client: client.KafkaClient):
    update_options = {
        "service": {
            "security": {"transport_encryption": {"enabled": True, "allow_plaintext": True}}
        }
    }

    update_service(config.PACKAGE_NAME, config.SERVICE_NAME, update_options)

    kerberized_kafka_client._is_tls = False
    assert kerberized_kafka_client.connect(config.DEFAULT_BROKER_COUNT)
    kerberized_kafka_client.check_users_can_read_and_write([TLS_USER], TOPIC_NAME)
    kerberized_kafka_client._is_tls = True
    assert kerberized_kafka_client.connect(config.DEFAULT_BROKER_COUNT)
    kerberized_kafka_client.check_users_can_read_and_write([TLS_USER], TOPIC_NAME)
def kafka_server(kerberos, kafka_client: client.KafkaClient):
    """
    A pytest fixture that installs a Kerberized kafka service.

    On teardown, the service is uninstalled.
    """

    super_principal = "super"

    service_options = {
        "service": {
            "name": config.SERVICE_NAME,
            "security": {
                "kerberos": {
                    "enabled": True,
                    "kdc": {
                        "hostname": kerberos.get_host(),
                        "port": int(kerberos.get_port())
                    },
                    "realm": kerberos.get_realm(),
                    "keytab_secret": kerberos.get_keytab_path(),
                },
                "authorization": {
                    "enabled": True,
                    "super_users": "User:{}".format(super_principal),
                    "allow_everyone_if_no_acl_found": True,
                },
            },
        }
    }

    sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
    try:
        sdk_install.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=service_options,
            timeout_seconds=30 * 60,
        )

        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
        yield
    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
def kafka_server(kerberos, service_account, kafka_client: client.KafkaClient):
    """
    A pytest fixture that installs a Kerberized kafka service.

    On teardown, the service is uninstalled.
    """
    service_kerberos_options = {
        "service": {
            "name": config.SERVICE_NAME,
            "service_account": service_account["name"],
            "service_account_secret": service_account["secret"],
            "security": {
                "custom_domain": sdk_hosts.get_crypto_id_domain(),
                "kerberos": {
                    "enabled": True,
                    "kdc": {
                        "hostname": kerberos.get_host(),
                        "port": int(kerberos.get_port())
                    },
                    "realm": kerberos.get_realm(),
                    "keytab_secret": kerberos.get_keytab_path(),
                },
                "transport_encryption": {
                    "enabled": True
                },
            },
        }
    }

    sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
    try:
        sdk_install.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=service_kerberos_options,
            timeout_seconds=30 * 60,
        )

        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
        yield
    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
def test_client_can_read_and_write(kafka_client: client.KafkaClient,
                                   kafka_server, kerberos):

    topic_name = "authn.test"
    sdk_cmd.svc_cli(kafka_server["package_name"],
                    kafka_server["service"]["name"],
                    "topic create {}".format(topic_name),
                    json=True)

    kafka_client.connect(kafka_server)

    user = "******"
    write_success, read_successes, _ = kafka_client.can_write_and_read(
        user, kafka_server, topic_name, kerberos)
    assert write_success, "Write failed (user={})".format(user)
    assert read_successes, "Read failed (user={}): " \
                           "MESSAGES={} " \
                           "read_successes={}".format(user,
                                                      kafka_client.MESSAGES,
                                                      read_successes)
def test_reverse_kerberos_on_tls_on_plaintext_off(
    kerberized_kafka_client: client.KafkaClient, kerberos: sdk_auth.KerberosEnvironment
):
    update_options = {
        "service": {
            "security": {
                "kerberos": {
                    "enabled": True,
                    "kdc": {"hostname": kerberos.get_host(), "port": int(kerberos.get_port())},
                    "realm": kerberos.get_realm(),
                    "keytab_secret": kerberos.get_keytab_path(),
                }
            }
        }
    }

    update_service(config.PACKAGE_NAME, config.SERVICE_NAME, update_options)
    with pytest.raises(AssertionError):
        kerberized_kafka_client._is_tls = False
        kerberized_kafka_client.connect(config.DEFAULT_BROKER_COUNT)
    kerberized_kafka_client._is_tls = True
    assert kerberized_kafka_client.connect(config.DEFAULT_BROKER_COUNT)
    kerberized_kafka_client.check_users_can_read_and_write([TLS_USER], TOPIC_NAME)
def _configure_kafka_cluster(
        kafka_client: client.KafkaClient, zookeeper_service: typing.Dict,
        allow_access_if_no_acl: bool) -> client.KafkaClient:
    zookeeper_dns = sdk_networks.get_endpoint(
        zookeeper_service["package_name"],
        zookeeper_service["service"]["name"], "clientport")["dns"]

    sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
    service_options = _get_service_options(allow_access_if_no_acl,
                                           kafka_client.kerberos,
                                           zookeeper_dns)

    config.install(
        config.PACKAGE_NAME,
        config.SERVICE_NAME,
        config.DEFAULT_BROKER_COUNT,
        additional_options=service_options,
    )

    kafka_client.connect(config.DEFAULT_BROKER_COUNT)

    # Clear the ACLs
    return kafka_client
Esempio n. 22
0
def kafka_server(kerberos, zookeeper_service, kafka_client: client.KafkaClient):

    # Get the zookeeper DNS values
    zookeeper_dns = sdk_networks.get_endpoint(
        zookeeper_service["package_name"], zookeeper_service["service"]["name"], "clientport"
    )["dns"]

    service_options = {
        "service": {
            "name": config.SERVICE_NAME,
            "security": {
                "kerberos": {
                    "enabled": True,
                    "enabled_for_zookeeper": True,
                    "kdc": {"hostname": kerberos.get_host(), "port": int(kerberos.get_port())},
                    "realm": kerberos.get_realm(),
                    "keytab_secret": kerberos.get_keytab_path(),
                }
            },
        },
        "kafka": {"kafka_zookeeper_uri": ",".join(zookeeper_dns)},
    }

    sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
    try:
        sdk_install.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=service_options,
            timeout_seconds=30 * 60,
        )

        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
        yield
    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
Esempio n. 23
0
def configure_package(configure_security, kafka_client: client.KafkaClient):
    try:
        sdk_install.uninstall(config.PACKAGE_NAME, FOLDERED_NAME)

        sdk_upgrade.test_upgrade(
            config.PACKAGE_NAME,
            FOLDERED_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options={
                "service": {
                    "name": FOLDERED_NAME
                },
                "brokers": {
                    "cpus": 0.5
                }
            },
        )

        # wait for brokers to finish registering before starting tests
        kafka_client.connect(config.DEFAULT_BROKER_COUNT)

        yield  # let the test session execute
    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, FOLDERED_NAME)
def test_authz_acls_not_required(kafka_client: client.KafkaClient,
                                 zookeeper_server, kerberos):
    try:
        zookeeper_dns = sdk_networks.get_endpoint(
            zookeeper_server["package_name"],
            zookeeper_server["service"]["name"], "clientport")["dns"]

        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        service_options = {
            "service": {
                "name": config.SERVICE_NAME,
                "security": {
                    "kerberos": {
                        "enabled": True,
                        "enabled_for_zookeeper": True,
                        "kdc": {
                            "hostname": kerberos.get_host(),
                            "port": int(kerberos.get_port())
                        },
                        "realm": kerberos.get_realm(),
                        "keytab_secret": kerberos.get_keytab_path(),
                    },
                    "authorization": {
                        "enabled": True,
                        "super_users": "User:{}".format("super"),
                        "allow_everyone_if_no_acl_found": True,
                    },
                },
            },
            "kafka": {
                "kafka_zookeeper_uri": ",".join(zookeeper_dns)
            },
        }

        config.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=service_options,
        )

        kafka_server = {
            **service_options,
            **{
                "package_name": config.PACKAGE_NAME
            }
        }

        topic_name = "authz.test"
        sdk_cmd.svc_cli(
            kafka_server["package_name"],
            kafka_server["service"]["name"],
            "topic create {}".format(topic_name),
        )

        kafka_client.connect(kafka_server)

        # Clear the ACLs
        kafka_client.remove_acls("authorized", kafka_server, topic_name)

        # Since no ACLs are specified, all users can read and write.
        for user in ["authorized", "unauthorized", "super"]:
            log.info("Checking write / read permissions for user=%s", user)
            write_success, read_successes, _ = kafka_client.can_write_and_read(
                user, kafka_server, topic_name, kerberos)
            assert write_success, "Write failed (user={})".format(user)
            assert read_successes, ("Read failed (user={}): "
                                    "MESSAGES={} "
                                    "read_successes={}".format(
                                        user, kafka_client.MESSAGES,
                                        read_successes))

        log.info("Writing and reading: Adding acl for authorized user")
        kafka_client.add_acls("authorized", kafka_server, topic_name)

        # After adding ACLs the authorized user and super user should still have access to the topic.
        for user in ["authorized", "super"]:
            log.info("Checking write / read permissions for user=%s", user)
            write_success, read_successes, _ = kafka_client.can_write_and_read(
                user, kafka_server, topic_name, kerberos)
            assert write_success, "Write failed (user={})".format(user)
            assert read_successes, ("Read failed (user={}): "
                                    "MESSAGES={} "
                                    "read_successes={}".format(
                                        user, kafka_client.MESSAGES,
                                        read_successes))

        for user in ["unauthorized"]:
            log.info("Checking lack of write / read permissions for user=%s",
                     user)
            write_success, _, read_messages = kafka_client.can_write_and_read(
                user, kafka_server, topic_name, kerberos)
            assert not write_success, "Write not expected to succeed (user={})".format(
                user)
            assert auth.is_not_authorized(
                read_messages), "Unauthorized expected (user={}".format(user)

    finally:
        # Ensure that we clean up the ZK state.
        kafka_client.remove_acls("authorized", kafka_server, topic_name)

        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
Esempio n. 25
0
def test_pod_replace(kafka_client: client.KafkaClient):
    test_utils.replace_broker_pod(config.PACKAGE_NAME, FOLDERED_NAME,
                                  config.DEFAULT_POD_TYPE,
                                  config.DEFAULT_BROKER_COUNT)
    kafka_client.connect(config.DEFAULT_BROKER_COUNT)
def test_authz_acls_required(kafka_client: client.KafkaClient, service_account, setup_principals):

    try:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        service_options = {
            "service": {
                "name": config.SERVICE_NAME,
                "service_account": service_account["name"],
                "service_account_secret": service_account["secret"],
                "security": {
                    "transport_encryption": {"enabled": True},
                    "ssl_authentication": {"enabled": True},
                    "authorization": {"enabled": True, "super_users": "User:{}".format("super")},
                },
            }
        }
        config.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=service_options,
        )

        kafka_server = {**service_options, **{"package_name": config.PACKAGE_NAME}}

        topic_name = "authz.test"
        sdk_cmd.svc_cli(
            kafka_server["package_name"],
            kafka_server["service"]["name"],
            "topic create {}".format(topic_name),
        )

        kafka_client.connect(kafka_server)

        # Since no ACLs are specified, only the super user can read and write
        for user in ["super"]:
            log.info("Checking write / read permissions for user=%s", user)
            write_success, read_successes, _ = kafka_client.can_write_and_read(
                user, kafka_server, topic_name, None
            )
            assert write_success, "Write failed (user={})".format(user)
            assert read_successes, (
                "Read failed (user={}): "
                "MESSAGES={} "
                "read_successes={}".format(user, kafka_client.MESSAGES, read_successes)
            )

        for user in ["authorized", "unauthorized"]:
            log.info("Checking lack of write / read permissions for user=%s", user)
            write_success, _, read_messages = kafka_client.can_write_and_read(
                user, kafka_server, topic_name, None
            )
            assert not write_success, "Write not expected to succeed (user={})".format(user)
            assert auth.is_not_authorized(read_messages), "Unauthorized expected (user={}".format(
                user
            )

        log.info("Writing and reading: Adding acl for authorized user")
        kafka_client.add_acls("authorized", kafka_server, topic_name)

        # After adding ACLs the authorized user and super user should still have access to the topic.
        for user in ["authorized", "super"]:
            log.info("Checking write / read permissions for user=%s", user)
            write_success, read_successes, _ = kafka_client.can_write_and_read(
                user, kafka_server, topic_name, None
            )
            assert write_success, "Write failed (user={})".format(user)
            assert read_successes, (
                "Read failed (user={}): "
                "MESSAGES={} "
                "read_successes={}".format(user, kafka_client.MESSAGES, read_successes)
            )

        for user in ["unauthorized"]:
            log.info("Checking lack of write / read permissions for user=%s", user)
            write_success, _, read_messages = kafka_client.can_write_and_read(
                user, kafka_server, topic_name, None
            )
            assert not write_success, "Write not expected to succeed (user={})".format(user)
            assert auth.is_not_authorized(read_messages), "Unauthorized expected (user={}".format(
                user
            )

    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
def test_initial_kerberos_off_tls_off_plaintext_off(kafka_client: client.KafkaClient):
    assert kafka_client.connect(config.DEFAULT_BROKER_COUNT)
    kafka_client.create_topic(TOPIC_NAME)
    kafka_client.check_users_can_read_and_write(["default"], TOPIC_NAME)
def test_authz_acls_required(kafka_client: client.KafkaClient, kerberos,
                             service_account, setup_principals):
    try:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        service_options = {
            "service": {
                "name": config.SERVICE_NAME,
                "service_account": service_account["name"],
                "service_account_secret": service_account["secret"],
                "security": {
                    "kerberos": {
                        "enabled": True,
                        "kdc": {
                            "hostname": kerberos.get_host(),
                            "port": int(kerberos.get_port())
                        },
                        "realm": kerberos.get_realm(),
                        "keytab_secret": kerberos.get_keytab_path(),
                    },
                    "transport_encryption": {
                        "enabled":
                        True,
                        "ciphers":
                        "TLS_RSA_WITH_AES_128_GCM_SHA256,TLS_RSA_WITH_AES_128_CBC_SHA256,TLS_RSA_WITH_AES_256_GCM_SHA384,TLS_RSA_WITH_AES_256_CBC_SHA256,TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384",
                    },
                    "authorization": {
                        "enabled": True,
                        "super_users": "User:{}".format("super")
                    },
                },
            }
        }
        config.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=service_options,
        )

        kafka_server = {
            **service_options,
            **{
                "package_name": config.PACKAGE_NAME
            }
        }

        topic_name = "authz.test"
        sdk_cmd.svc_cli(
            kafka_server["package_name"],
            kafka_server["service"]["name"],
            "topic create {}".format(topic_name),
            parse_json=True,
        )

        kafka_client.connect()

        # Clear the ACLs
        kafka_client.remove_acls("authorized", topic_name)

        # Since no ACLs are specified, only the super user can read and write
        for user in ["super"]:
            log.info("Checking write / read permissions for user=%s", user)
            write_success, read_successes, _ = kafka_client.can_write_and_read(
                user, topic_name)
            assert write_success, "Write failed (user={})".format(user)
            assert read_successes, ("Read failed (user={}): "
                                    "MESSAGES={} "
                                    "read_successes={}".format(
                                        user, kafka_client.MESSAGES,
                                        read_successes))

        for user in ["authorized", "unauthorized"]:
            log.info("Checking lack of write / read permissions for user=%s",
                     user)
            write_success, _, read_messages = kafka_client.can_write_and_read(
                user, topic_name)
            assert not write_success, "Write not expected to succeed (user={})".format(
                user)
            assert auth.is_not_authorized(
                read_messages), "Unauthorized expected (user={}".format(user)

        log.info("Writing and reading: Adding acl for authorized user")
        kafka_client.add_acls("authorized", topic_name)

        # After adding ACLs the authorized user and super user should still have access to the topic.
        for user in ["authorized", "super"]:
            log.info("Checking write / read permissions for user=%s", user)
            write_success, read_successes, _ = kafka_client.can_write_and_read(
                user, topic_name)
            assert write_success, "Write failed (user={})".format(user)
            assert read_successes, ("Read failed (user={}): "
                                    "MESSAGES={} "
                                    "read_successes={}".format(
                                        user, kafka_client.MESSAGES,
                                        read_successes))

        for user in ["unauthorized"]:
            log.info("Checking lack of write / read permissions for user=%s",
                     user)
            write_success, _, read_messages = kafka_client.can_write_and_read(
                user, topic_name)
            assert not write_success, "Write not expected to succeed (user={})".format(
                user)
            assert auth.is_not_authorized(
                read_messages), "Unauthorized expected (user={}".format(user)

    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)