def test_authz_acls_not_required(kafka_client: client.KafkaClient,
                                 kafka_server: dict,
                                 kerberos: sdk_auth.KerberosEnvironment):

    topic_name = "authz.test"
    sdk_cmd.svc_cli(
        kafka_server["package_name"],
        kafka_server["service"]["name"],
        "topic create {}".format(topic_name),
    )

    kafka_client.connect()

    # Since no ACLs are specified, all users can read and write.
    kafka_client.check_users_can_read_and_write(
        ["authorized", "unauthorized", "super"], topic_name)

    log.info("Writing and reading: Adding acl for authorized user")
    kafka_client.add_acls("authorized", topic_name)

    # After adding ACLs the authorized user and super user should still have access to the topic.
    kafka_client.check_users_can_read_and_write(["authorized", "super"],
                                                topic_name)
    kafka_client.check_users_are_not_authorized_to_read_and_write(
        ["unauthorized"], topic_name)
def test_custom_zookeeper(kafka_client: client.KafkaClient):
    broker_ids = sdk_tasks.get_task_ids(FOLDERED_NAME,
                                        "{}-".format(config.DEFAULT_POD_TYPE))

    # create a topic against the default zk:
    kafka_client.create_topic(config.DEFAULT_TOPIC_NAME)

    marathon_config = sdk_marathon.get_config(FOLDERED_NAME)
    # should be using default path when this envvar is empty/unset:
    assert marathon_config["env"]["KAFKA_ZOOKEEPER_URI"] == ""

    # use a custom zk path that's WITHIN the 'dcos-service-' path, so that it's automatically cleaned up in uninstall:
    zk_path = "master.mesos:2181/{}/CUSTOMPATH".format(
        sdk_utils.get_zk_path(FOLDERED_NAME))
    marathon_config["env"]["KAFKA_ZOOKEEPER_URI"] = zk_path
    sdk_marathon.update_app(marathon_config)

    sdk_tasks.check_tasks_updated(FOLDERED_NAME,
                                  "{}-".format(config.DEFAULT_POD_TYPE),
                                  broker_ids)
    sdk_plan.wait_for_completed_deployment(FOLDERED_NAME)

    # wait for brokers to finish registering
    kafka_client.check_broker_count(config.DEFAULT_BROKER_COUNT)

    zookeeper = sdk_networks.get_endpoint_string(config.PACKAGE_NAME,
                                                 FOLDERED_NAME, "zookeeper")
    assert zookeeper == zk_path

    # topic created earlier against default zk should no longer be present:
    rc, stdout, _ = sdk_cmd.svc_cli(config.PACKAGE_NAME, FOLDERED_NAME,
                                    "topic list")
    assert rc == 0, "Topic list command failed"

    assert config.DEFAULT_TOPIC_NAME not in json.loads(stdout)
示例#3
0
def _configure_kafka_cluster(
        kafka_client: client.KafkaClient, zookeeper_service: typing.Dict,
        allow_access_if_no_acl: bool) -> client.KafkaClient:
    zookeeper_dns = sdk_networks.get_endpoint(
        zookeeper_service["package_name"],
        zookeeper_service["service"]["name"], "clientport")["dns"]

    sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
    service_options = _get_service_options(allow_access_if_no_acl,
                                           kafka_client.kerberos,
                                           zookeeper_dns)

    config.install(
        config.PACKAGE_NAME,
        config.SERVICE_NAME,
        config.DEFAULT_BROKER_COUNT,
        additional_options=service_options,
    )

    kafka_server = {**service_options, **{"package_name": config.PACKAGE_NAME}}

    sdk_cmd.svc_cli(
        kafka_server["package_name"],
        kafka_server["service"]["name"],
        "topic create {}".format(TOPIC_NAME),
    )

    kafka_client.connect()

    # Clear the ACLs
    kafka_client.remove_acls("authorized", TOPIC_NAME)
    return kafka_client
def test_authn_client_can_read_and_write(kafka_client: client.KafkaClient,
                                         service_account, setup_principals):
    try:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        service_options = {
            "service": {
                "name": config.SERVICE_NAME,
                "service_account": service_account["name"],
                "service_account_secret": service_account["secret"],
                "security": {
                    "transport_encryption": {
                        "enabled":
                        True,
                        "ciphers":
                        "TLS_RSA_WITH_AES_128_GCM_SHA256,TLS_RSA_WITH_AES_128_CBC_SHA256,TLS_RSA_WITH_AES_256_GCM_SHA384,TLS_RSA_WITH_AES_256_CBC_SHA256,TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384",
                    },
                    "ssl_authentication": {
                        "enabled": True
                    },
                },
            }
        }
        config.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=service_options,
        )

        kafka_server = {
            **service_options,
            **{
                "package_name": config.PACKAGE_NAME
            }
        }

        topic_name = "tls.topic"
        sdk_cmd.svc_cli(
            kafka_server["package_name"],
            kafka_server["service"]["name"],
            "topic create {}".format(topic_name),
            parse_json=True,
        )

        kafka_client.connect()

        user = "******"
        write_success, read_successes, _ = kafka_client.can_write_and_read(
            user, topic_name)

        assert write_success, "Write failed (user={})".format(user)
        assert read_successes, ("Read failed (user={}): "
                                "MESSAGES={} "
                                "read_successes={}".format(
                                    user, kafka_client.MESSAGES,
                                    read_successes))

    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
示例#5
0
def kafka_server(kafka_client: client.KafkaClient, configure_security):
    try:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        config.install(config.PACKAGE_NAME, config.SERVICE_NAME, config.DEFAULT_BROKER_COUNT)
        kafka_client.connect()

        yield {"package_name": config.PACKAGE_NAME, "service": {"name": config.SERVICE_NAME}}
    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
def test_pod_replace_on_overlay(kafka_client: client.KafkaClient):
    test_utils.replace_broker_pod(
        config.PACKAGE_NAME,
        config.SERVICE_NAME,
        config.DEFAULT_POD_TYPE,
        config.DEFAULT_BROKER_COUNT,
    )
    kafka_client.connect(config.DEFAULT_BROKER_COUNT)
    test_overlay_network_deployment_and_endpoints()
示例#7
0
def kafka_server(configure_security, kafka_client: client.KafkaClient):
    try:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        config.install(config.PACKAGE_NAME, config.SERVICE_NAME,
                       config.DEFAULT_BROKER_COUNT)
        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
        yield
    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
示例#8
0
def test_authn_client_can_read_and_write(kafka_client: client.KafkaClient,
                                         service_account, setup_principals):
    try:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        service_options = {
            "service": {
                "name": config.SERVICE_NAME,
                "service_account": service_account["name"],
                "service_account_secret": service_account["secret"],
                "security": {
                    "transport_encryption": {
                        "enabled": True
                    },
                    "ssl_authentication": {
                        "enabled": True
                    }
                }
            }
        }
        config.install(config.PACKAGE_NAME,
                       config.SERVICE_NAME,
                       config.DEFAULT_BROKER_COUNT,
                       additional_options=service_options)

        kafka_server = {
            **service_options,
            **{
                "package_name": config.PACKAGE_NAME
            }
        }

        topic_name = "tls.topic"
        sdk_cmd.svc_cli(kafka_server["package_name"],
                        kafka_server["service"]["name"],
                        "topic create {}".format(topic_name),
                        json=True)

        kafka_client.connect(kafka_server)

        user = "******"
        write_success, read_successes, _ = kafka_client.can_write_and_read(
            user, kafka_server, topic_name, None)

        assert write_success, "Write failed (user={})".format(user)
        assert read_successes, "Read failed (user={}): " \
                               "MESSAGES={} " \
                               "read_successes={}".format(user,
                                                          kafka_client.MESSAGES,
                                                          read_successes)

    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
示例#9
0
    def permission_test(c: client.KafkaClient, topic_name: str):
        # Since no ACLs are specified, only the super user can read and write
        c.check_users_can_read_and_write(["super"], topic_name)
        c.check_users_are_not_authorized_to_read_and_write(
            ["authorized", "unauthorized"], topic_name)

        log.info("Writing and reading: Adding acl for authorized user")
        c.add_acls("authorized", topic_name)

        # After adding ACLs the authorized user and super user should still have access to the topic.
        c.check_users_can_read_and_write(["authorized", "super"], topic_name)
        c.check_users_are_not_authorized_to_read_and_write(["unauthorized"],
                                                           topic_name)
def test_client_can_read_and_write(kafka_client: client.KafkaClient,
                                   kafka_server, kerberos):

    topic_name = "tls.topic"
    sdk_cmd.svc_cli(
        kafka_server["package_name"],
        kafka_server["service"]["name"],
        "topic create {}".format(topic_name),
    )

    kafka_client.connect()

    kafka_client.check_users_can_read_and_write(["client"], topic_name)
示例#11
0
def configure_package(configure_security, kafka_client: client.KafkaClient):
    try:
        install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        config.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=sdk_networks.ENABLE_VIRTUAL_NETWORKS_OPTIONS,
        )

        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
        yield  # let the test session execute
    finally:
        install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
def _test_permissions(
    kafka_client: client.KafkaClient,
    zookeeper_service: typing.Dict,
    allow_access_if_no_acl: bool,
    permission_test: typing.Callable[[client.KafkaClient, str], None],
):
    try:
        checker = _configure_kafka_cluster(kafka_client, zookeeper_service,
                                           allow_access_if_no_acl)
        permission_test(checker, TOPIC_NAME)
    finally:
        # Ensure that we clean up the ZK state.
        kafka_client.remove_acls("authorized", TOPIC_NAME)

        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
def test_authz_acls_not_required(kafka_client: client.KafkaClient):
    topic_name = "authz.test"
    kafka_client.create_topic(topic_name)

    # Since no ACLs are specified, all users can read and write.
    kafka_client.check_users_can_read_and_write(
        ["authorized", "unauthorized", "super"], topic_name)

    log.info("Writing and reading: Adding acl for authorized user")
    kafka_client.add_acls("authorized", topic_name)

    # After adding ACLs the authorized user and super user should still have access to the topic.
    kafka_client.check_users_can_read_and_write(["authorized", "super"],
                                                topic_name)
    kafka_client.check_users_are_not_authorized_to_read_and_write(
        ["unauthorized"], topic_name)
def setup_principals(kafka_client: client.KafkaClient):
    client_id = kafka_client.get_id()

    transport_encryption.create_tls_artifacts(cn="kafka-tester", marathon_task=client_id)
    transport_encryption.create_tls_artifacts(cn="authorized", marathon_task=client_id)
    transport_encryption.create_tls_artifacts(cn="unauthorized", marathon_task=client_id)
    transport_encryption.create_tls_artifacts(cn="super", marathon_task=client_id)
def test_authz_acls_required(kafka_client: client.KafkaClient,
                             kafka_server: dict,
                             kerberos: sdk_auth.KerberosEnvironment):

    topic_name = "authz.test"
    sdk_cmd.svc_cli(
        kafka_server["package_name"],
        kafka_server["service"]["name"],
        "topic create {}".format(topic_name),
    )

    kafka_client.connect(kafka_server)

    # Since no ACLs are specified, only the super user can read and write
    for user in ["super"]:
        log.info("Checking write / read permissions for user=%s", user)
        write_success, read_successes, _ = kafka_client.can_write_and_read(
            user, kafka_server, topic_name, kerberos)
        assert write_success, "Write failed (user={})".format(user)
        assert read_successes, ("Read failed (user={}): "
                                "MESSAGES={} "
                                "read_successes={}".format(
                                    user, kafka_client.MESSAGES,
                                    read_successes))

    for user in ["authorized", "unauthorized"]:
        log.info("Checking lack of write / read permissions for user=%s", user)
        write_success, _, read_messages = kafka_client.can_write_and_read(
            user, kafka_server, topic_name, kerberos)
        assert not write_success, "Write not expected to succeed (user={})".format(
            user)
        assert auth.is_not_authorized(
            read_messages), "Unauthorized expected (user={}".format(user)

    log.info("Writing and reading: Adding acl for authorized user")
    kafka_client.add_acls("authorized", kafka_server, topic_name)

    # After adding ACLs the authorized user and super user should still have access to the topic.
    for user in ["authorized", "super"]:
        log.info("Checking write / read permissions for user=%s", user)
        write_success, read_successes, _ = kafka_client.can_write_and_read(
            user, kafka_server, topic_name, kerberos)
        assert write_success, "Write failed (user={})".format(user)
        assert read_successes, ("Read failed (user={}): "
                                "MESSAGES={} "
                                "read_successes={}".format(
                                    user, kafka_client.MESSAGES,
                                    read_successes))

    for user in ["unauthorized"]:
        log.info("Checking lack of write / read permissions for user=%s", user)
        write_success, _, read_messages = kafka_client.can_write_and_read(
            user, kafka_server, topic_name, kerberos)
        assert not write_success, "Write not expected to succeed (user={})".format(
            user)
        assert auth.is_not_authorized(
            read_messages), "Unauthorized expected (user={}".format(user)
示例#16
0
def test_authn_client_can_read_and_write(kafka_client: client.KafkaClient,
                                         service_account, setup_principals):
    try:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        service_options = {
            "service": {
                "name": config.SERVICE_NAME,
                "service_account": service_account["name"],
                "service_account_secret": service_account["secret"],
                "security": {
                    "transport_encryption": {
                        "enabled": True
                    },
                    "ssl_authentication": {
                        "enabled": True
                    },
                },
            }
        }
        config.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=service_options,
        )

        kafka_server = {
            **service_options,
            **{
                "package_name": config.PACKAGE_NAME
            }
        }

        topic_name = "tls.topic"
        sdk_cmd.svc_cli(
            kafka_server["package_name"],
            kafka_server["service"]["name"],
            "topic create {}".format(topic_name),
        )

        kafka_client.connect()

        kafka_client.check_users_can_read_and_write(["kafka-tester"],
                                                    topic_name)
    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
def kafka_server(kerberos, kafka_client: client.KafkaClient):
    """
    A pytest fixture that installs a Kerberized kafka service.

    On teardown, the service is uninstalled.
    """

    super_principal = "super"

    service_options = {
        "service": {
            "name": config.SERVICE_NAME,
            "security": {
                "kerberos": {
                    "enabled": True,
                    "kdc": {
                        "hostname": kerberos.get_host(),
                        "port": int(kerberos.get_port())
                    },
                    "realm": kerberos.get_realm(),
                    "keytab_secret": kerberos.get_keytab_path(),
                },
                "authorization": {
                    "enabled": True,
                    "super_users": "User:{}".format(super_principal),
                    "allow_everyone_if_no_acl_found": True,
                },
            },
        }
    }

    sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
    try:
        sdk_install.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=service_options,
            timeout_seconds=30 * 60,
        )

        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
        yield
    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
def test_forward_kerberos_on_tls_off_plaintext_off(
    kerberized_kafka_client: client.KafkaClient, kerberos: sdk_auth.KerberosEnvironment
):
    update_options = {
        "service": {
            "security": {
                "kerberos": {
                    "enabled": True,
                    "kdc": {"hostname": kerberos.get_host(), "port": int(kerberos.get_port())},
                    "realm": kerberos.get_realm(),
                    "keytab_secret": kerberos.get_keytab_path(),
                }
            }
        }
    }

    update_service(config.PACKAGE_NAME, config.SERVICE_NAME, update_options)
    assert kerberized_kafka_client.connect(config.DEFAULT_BROKER_COUNT)
    kerberized_kafka_client.check_users_can_read_and_write([TLS_USER], TOPIC_NAME)
def kafka_server(kerberos, service_account, kafka_client: client.KafkaClient):
    """
    A pytest fixture that installs a Kerberized kafka service.

    On teardown, the service is uninstalled.
    """
    service_kerberos_options = {
        "service": {
            "name": config.SERVICE_NAME,
            "service_account": service_account["name"],
            "service_account_secret": service_account["secret"],
            "security": {
                "custom_domain": sdk_hosts.get_crypto_id_domain(),
                "kerberos": {
                    "enabled": True,
                    "kdc": {
                        "hostname": kerberos.get_host(),
                        "port": int(kerberos.get_port())
                    },
                    "realm": kerberos.get_realm(),
                    "keytab_secret": kerberos.get_keytab_path(),
                },
                "transport_encryption": {
                    "enabled": True
                },
            },
        }
    }

    sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
    try:
        sdk_install.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=service_kerberos_options,
            timeout_seconds=30 * 60,
        )

        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
        yield
    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
def test_client_can_read_and_write(kafka_client: client.KafkaClient,
                                   kafka_server, kerberos):

    topic_name = "authn.test"
    sdk_cmd.svc_cli(kafka_server["package_name"],
                    kafka_server["service"]["name"],
                    "topic create {}".format(topic_name),
                    json=True)

    kafka_client.connect(kafka_server)

    user = "******"
    write_success, read_successes, _ = kafka_client.can_write_and_read(
        user, kafka_server, topic_name, kerberos)
    assert write_success, "Write failed (user={})".format(user)
    assert read_successes, "Read failed (user={}): " \
                           "MESSAGES={} " \
                           "read_successes={}".format(user,
                                                      kafka_client.MESSAGES,
                                                      read_successes)
def test_forward_kerberos_off_tls_on_plaintext_off(kafka_client: client.KafkaClient):
    update_options = {"service": {"security": {"kerberos": {"enabled": False}}}}

    update_service(config.PACKAGE_NAME, config.SERVICE_NAME, update_options)
    with pytest.raises(AssertionError):
        kafka_client._is_tls = False
        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
    kafka_client._is_tls = True
    assert kafka_client.connect(config.DEFAULT_BROKER_COUNT)
    kafka_client.check_users_can_read_and_write([TLS_USER], TOPIC_NAME)
示例#22
0
def kafka_server(service_account, kafka_client: client.KafkaClient):
    service_options = {
        "service": {
            "name": config.SERVICE_NAME,
            "service_account": service_account["name"],
            "service_account_secret": service_account["secret"],
            "security": {
                "transport_encryption": {
                    "enabled": True
                }
            },
        }
    }

    sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
    try:
        sdk_install.install(
            config.PACKAGE_NAME,
            service_name=config.SERVICE_NAME,
            expected_running_tasks=config.DEFAULT_TASK_COUNT,
            additional_options=service_options,
            timeout_seconds=30 * 60,
        )
        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
        kafka_client.create_topic(config.DEFAULT_TOPIC_NAME)
        kafka_client.check_topic_partition_count(
            config.DEFAULT_TOPIC_NAME, config.DEFAULT_PARTITION_COUNT)

        yield {**service_options, **{"package_name": config.PACKAGE_NAME}}
    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
def test_reverse_kerberos_on_tls_on_plaintext_on(kerberized_kafka_client: client.KafkaClient):
    update_options = {
        "service": {
            "security": {"transport_encryption": {"enabled": True, "allow_plaintext": True}}
        }
    }

    update_service(config.PACKAGE_NAME, config.SERVICE_NAME, update_options)

    kerberized_kafka_client._is_tls = False
    assert kerberized_kafka_client.connect(config.DEFAULT_BROKER_COUNT)
    kerberized_kafka_client.check_users_can_read_and_write([TLS_USER], TOPIC_NAME)
    kerberized_kafka_client._is_tls = True
    assert kerberized_kafka_client.connect(config.DEFAULT_BROKER_COUNT)
    kerberized_kafka_client.check_users_can_read_and_write([TLS_USER], TOPIC_NAME)
def _configure_kafka_cluster(
        kafka_client: client.KafkaClient, zookeeper_service: typing.Dict,
        allow_access_if_no_acl: bool) -> client.KafkaClient:
    zookeeper_dns = sdk_networks.get_endpoint(
        zookeeper_service["package_name"],
        zookeeper_service["service"]["name"], "clientport")["dns"]

    sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
    service_options = _get_service_options(allow_access_if_no_acl,
                                           kafka_client.kerberos,
                                           zookeeper_dns)

    config.install(
        config.PACKAGE_NAME,
        config.SERVICE_NAME,
        config.DEFAULT_BROKER_COUNT,
        additional_options=service_options,
    )

    kafka_client.connect(config.DEFAULT_BROKER_COUNT)

    # Clear the ACLs
    return kafka_client
示例#25
0
def kafka_server(kerberos, zookeeper_service, kafka_client: client.KafkaClient):

    # Get the zookeeper DNS values
    zookeeper_dns = sdk_networks.get_endpoint(
        zookeeper_service["package_name"], zookeeper_service["service"]["name"], "clientport"
    )["dns"]

    service_options = {
        "service": {
            "name": config.SERVICE_NAME,
            "security": {
                "kerberos": {
                    "enabled": True,
                    "enabled_for_zookeeper": True,
                    "kdc": {"hostname": kerberos.get_host(), "port": int(kerberos.get_port())},
                    "realm": kerberos.get_realm(),
                    "keytab_secret": kerberos.get_keytab_path(),
                }
            },
        },
        "kafka": {"kafka_zookeeper_uri": ",".join(zookeeper_dns)},
    }

    sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
    try:
        sdk_install.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=service_options,
            timeout_seconds=30 * 60,
        )

        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
        yield
    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
示例#26
0
def configure_package(configure_security, kafka_client: client.KafkaClient):
    try:
        sdk_install.uninstall(config.PACKAGE_NAME, FOLDERED_NAME)

        sdk_upgrade.test_upgrade(
            config.PACKAGE_NAME,
            FOLDERED_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options={
                "service": {
                    "name": FOLDERED_NAME
                },
                "brokers": {
                    "cpus": 0.5
                }
            },
        )

        # wait for brokers to finish registering before starting tests
        kafka_client.connect(config.DEFAULT_BROKER_COUNT)

        yield  # let the test session execute
    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, FOLDERED_NAME)
示例#27
0
def test_pod_replace(kafka_client: client.KafkaClient):
    test_utils.replace_broker_pod(config.PACKAGE_NAME, FOLDERED_NAME,
                                  config.DEFAULT_POD_TYPE,
                                  config.DEFAULT_BROKER_COUNT)
    kafka_client.connect(config.DEFAULT_BROKER_COUNT)
示例#28
0
def test_topic_delete_overlay(kafka_client: client.KafkaClient):
    kafka_client.check_topic_deletion(config.EPHEMERAL_TOPIC_NAME)
    kafka_client.check_topic_partition_count(config.EPHEMERAL_TOPIC_NAME,
                                             config.DEFAULT_PARTITION_COUNT)
示例#29
0
def test_authz_acls_required(kafka_client: client.KafkaClient, service_account,
                             setup_principals):

    try:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        service_options = {
            "service": {
                "name": config.SERVICE_NAME,
                "service_account": service_account["name"],
                "service_account_secret": service_account["secret"],
                "security": {
                    "transport_encryption": {
                        "enabled": True
                    },
                    "ssl_authentication": {
                        "enabled": True
                    },
                    "authorization": {
                        "enabled": True,
                        "super_users": "User:{}".format("super")
                    },
                },
            }
        }
        config.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=service_options,
        )

        kafka_server = {
            **service_options,
            **{
                "package_name": config.PACKAGE_NAME
            }
        }

        topic_name = "authz.test"
        sdk_cmd.svc_cli(
            kafka_server["package_name"],
            kafka_server["service"]["name"],
            "topic create {}".format(topic_name),
        )

        kafka_client.connect()

        # Since no ACLs are specified, only the super user can read and write
        kafka_client.check_users_can_read_and_write(["super"], topic_name)
        kafka_client.check_users_are_not_authorized_to_read_and_write(
            ["authorized", "unauthorized"], topic_name)

        log.info("Writing and reading: Adding acl for authorized user")
        kafka_client.add_acls("authorized", topic_name)

        # After adding ACLs the authorized user and super user should still have access to the topic.
        kafka_client.check_users_can_read_and_write(["authorized", "super"],
                                                    topic_name)

        kafka_client.check_users_are_not_authorized_to_read_and_write(
            ["unauthorized"], topic_name)

    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
示例#30
0
def test_client_can_read_and_write(kafka_client: client.KafkaClient):
    topic_name = "authn.test"
    kafka_client.create_topic(topic_name)
    kafka_client.check_users_can_read_and_write(["client"], topic_name)