Ejemplo n.º 1
0
def kafka_server(service_account, kafka_client: client.KafkaClient):
    service_options = {
        "service": {
            "name": config.SERVICE_NAME,
            "service_account": service_account["name"],
            "service_account_secret": service_account["secret"],
            "security": {
                "transport_encryption": {
                    "enabled": True
                }
            },
        }
    }

    sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
    try:
        sdk_install.install(
            config.PACKAGE_NAME,
            service_name=config.SERVICE_NAME,
            expected_running_tasks=config.DEFAULT_TASK_COUNT,
            additional_options=service_options,
            timeout_seconds=30 * 60,
        )
        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
        kafka_client.create_topic(config.DEFAULT_TOPIC_NAME)
        kafka_client.check_topic_partition_count(
            config.DEFAULT_TOPIC_NAME, config.DEFAULT_PARTITION_COUNT)

        yield {**service_options, **{"package_name": config.PACKAGE_NAME}}
    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
Ejemplo n.º 2
0
def test_custom_zookeeper(kafka_client: client.KafkaClient):
    broker_ids = sdk_tasks.get_task_ids(FOLDERED_NAME,
                                        "{}-".format(config.DEFAULT_POD_TYPE))

    # create a topic against the default zk:
    kafka_client.create_topic(config.DEFAULT_TOPIC_NAME)

    marathon_config = sdk_marathon.get_config(FOLDERED_NAME)
    # should be using default path when this envvar is empty/unset:
    assert marathon_config["env"]["KAFKA_ZOOKEEPER_URI"] == ""

    # use a custom zk path that's WITHIN the 'dcos-service-' path, so that it's automatically cleaned up in uninstall:
    zk_path = "master.mesos:2181/{}/CUSTOMPATH".format(
        sdk_utils.get_zk_path(FOLDERED_NAME))
    marathon_config["env"]["KAFKA_ZOOKEEPER_URI"] = zk_path
    sdk_marathon.update_app(marathon_config)

    sdk_tasks.check_tasks_updated(FOLDERED_NAME,
                                  "{}-".format(config.DEFAULT_POD_TYPE),
                                  broker_ids)
    sdk_plan.wait_for_completed_deployment(FOLDERED_NAME)

    # wait for brokers to finish registering
    kafka_client.check_broker_count(config.DEFAULT_BROKER_COUNT)

    zookeeper = sdk_networks.get_endpoint_string(config.PACKAGE_NAME,
                                                 FOLDERED_NAME, "zookeeper")
    assert zookeeper == zk_path

    # topic created earlier against default zk should no longer be present:
    rc, stdout, _ = sdk_cmd.svc_cli(config.PACKAGE_NAME, FOLDERED_NAME,
                                    "topic list")
    assert rc == 0, "Topic list command failed"

    assert config.DEFAULT_TOPIC_NAME not in json.loads(stdout)
Ejemplo n.º 3
0
def test_authn_client_can_read_and_write(kafka_client: client.KafkaClient,
                                         service_account, setup_principals):
    try:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        service_options = {
            "service": {
                "name": config.SERVICE_NAME,
                "service_account": service_account["name"],
                "service_account_secret": service_account["secret"],
                "security": {
                    "transport_encryption": {
                        "enabled": True
                    },
                    "ssl_authentication": {
                        "enabled": True
                    },
                },
            }
        }
        config.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=service_options,
        )

        topic_name = "tls.topic"
        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
        kafka_client.create_topic(topic_name)
        kafka_client.check_users_can_read_and_write(["kafka-tester"],
                                                    topic_name)
    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
Ejemplo n.º 4
0
def test_authz_acls_not_required(kafka_client: client.KafkaClient, kerberos,
                                 service_account, setup_principals):
    try:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        service_options = {
            "service": {
                "name": config.SERVICE_NAME,
                "service_account": service_account["name"],
                "service_account_secret": service_account["secret"],
                "security": {
                    "kerberos": {
                        "enabled": True,
                        "kdc": {
                            "hostname": kerberos.get_host(),
                            "port": int(kerberos.get_port())
                        },
                        "realm": kerberos.get_realm(),
                        "keytab_secret": kerberos.get_keytab_path(),
                    },
                    "transport_encryption": {
                        "enabled": True
                    },
                    "authorization": {
                        "enabled": True,
                        "super_users": "User:{}".format("super"),
                        "allow_everyone_if_no_acl_found": True,
                    },
                },
            }
        }

        config.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=service_options,
        )

        topic_name = "authz.test"
        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
        kafka_client.create_topic(topic_name)

        # Clear the ACLs
        kafka_client.remove_acls("authorized", topic_name)

        # Since no ACLs are specified, all users can read and write.
        kafka_client.check_users_can_read_and_write(
            ["authorized", "unauthorized", "super"], topic_name)

        log.info("Writing and reading: Adding acl for authorized user")
        kafka_client.add_acls("authorized", topic_name)

        # After adding ACLs the authorized user and super user should still have access to the topic.
        kafka_client.check_users_can_read_and_write(["authorized", "super"],
                                                    topic_name)
        kafka_client.check_users_are_not_authorized_to_read_and_write(
            ["unauthorized"], topic_name)

    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
Ejemplo n.º 5
0
def test_authz_acls_required(kafka_client: client.KafkaClient, service_account,
                             setup_principals):

    try:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
        service_options = {
            "service": {
                "name": config.SERVICE_NAME,
                "service_account": service_account["name"],
                "service_account_secret": service_account["secret"],
                "security": {
                    "transport_encryption": {
                        "enabled": True
                    },
                    "ssl_authentication": {
                        "enabled": True
                    },
                    "authorization": {
                        "enabled": True,
                        "super_users": "User:{}".format("super")
                    },
                },
            }
        }
        config.install(
            config.PACKAGE_NAME,
            config.SERVICE_NAME,
            config.DEFAULT_BROKER_COUNT,
            additional_options=service_options,
        )

        topic_name = "authz.test"
        kafka_client.connect(config.DEFAULT_BROKER_COUNT)
        kafka_client.create_topic(topic_name)
        # Since no ACLs are specified, only the super user can read and write
        kafka_client.check_users_can_read_and_write(["super"], topic_name)
        kafka_client.check_users_are_not_authorized_to_read_and_write(
            ["authorized", "unauthorized"], topic_name)

        log.info("Writing and reading: Adding acl for authorized user")
        kafka_client.add_acls("authorized", topic_name)

        # After adding ACLs the authorized user and super user should still have access to the topic.
        kafka_client.check_users_can_read_and_write(["authorized", "super"],
                                                    topic_name)

        kafka_client.check_users_are_not_authorized_to_read_and_write(
            ["unauthorized"], topic_name)

    finally:
        sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
def test_authz_acls_not_required(kafka_client: client.KafkaClient):
    topic_name = "authz.test"
    kafka_client.create_topic(topic_name)

    # Since no ACLs are specified, all users can read and write.
    kafka_client.check_users_can_read_and_write(
        ["authorized", "unauthorized", "super"], topic_name)

    log.info("Writing and reading: Adding acl for authorized user")
    kafka_client.add_acls("authorized", topic_name)

    # After adding ACLs the authorized user and super user should still have access to the topic.
    kafka_client.check_users_can_read_and_write(["authorized", "super"],
                                                topic_name)
    kafka_client.check_users_are_not_authorized_to_read_and_write(
        ["unauthorized"], topic_name)
Ejemplo n.º 7
0
def test_client_can_read_and_write(kafka_client: client.KafkaClient):
    topic_name = "authn.test"
    kafka_client.create_topic(topic_name)
    kafka_client.check_users_can_read_and_write(["client"], topic_name)
Ejemplo n.º 8
0
def test_topic_offsets_increase_with_writes(kafka_client: client.KafkaClient):
    package_name = config.PACKAGE_NAME
    service_name = config.SERVICE_NAME

    def offset_is_valid(result) -> bool:
        initial = result[0]
        offsets = result[1]

        LOG.info("Checking validity with initial=%s offsets=%s", initial,
                 offsets)
        has_elements = bool(
            topics.filter_empty_offsets(offsets, additional=initial))
        # The return of this function triggers the restart.
        return not has_elements

    @retrying.retry(
        stop_max_delay=5 * 60 * 1000,
        wait_exponential_multiplier=1000,
        wait_exponential_max=60 * 1000,
        retry_on_result=offset_is_valid,
    )
    def get_offset_change(topic_name, initial_offsets=[]):
        """
        Run:
            `dcos kafa topic offsets --time="-1"`
        until the output is not the initial output specified
        """
        LOG.info("Getting offsets for %s", topic_name)
        rc, stdout, _ = sdk_cmd.svc_cli(
            package_name, service_name,
            'topic offsets --time="-1" {}'.format(topic_name))
        assert rc == 0, "Topic offsets failed"
        offsets = json.loads(stdout)
        LOG.info("offsets=%s", offsets)
        return initial_offsets, offsets

    topic_name = str(uuid.uuid4())
    LOG.info("Creating topic: %s", topic_name)
    kafka_client.create_topic(topic_name)

    _, offset_info = get_offset_change(topic_name)

    # offset_info is a list of (partition index, offset) key-value pairs sum the
    # integer representations of the offsets
    initial_offset = sum(
        map(lambda partition: sum(map(int, partition.values())), offset_info))
    LOG.info("Initial offset=%s", initial_offset)

    num_messages = 10
    LOG.info("Sending %s messages", num_messages)
    rc, stdout, _ = sdk_cmd.svc_cli(
        package_name, service_name,
        "topic producer_test {} {}".format(topic_name, num_messages))
    assert rc == 0, "Producer test failed"
    write_info = json.loads(stdout)
    assert len(write_info) == 1
    assert write_info["message"].startswith(
        "Output: {} records sent".format(num_messages))

    _, post_write_offset_info = get_offset_change(topic_name, offset_info)

    post_write_offset = sum(
        map(lambda partition: sum(map(int, partition.values())),
            post_write_offset_info))
    LOG.info("Post-write offset=%s", post_write_offset)

    assert post_write_offset > initial_offset
def test_initial_kerberos_off_tls_off_plaintext_off(kafka_client: client.KafkaClient):
    assert kafka_client.connect(config.DEFAULT_BROKER_COUNT)
    kafka_client.create_topic(TOPIC_NAME)
    kafka_client.check_users_can_read_and_write(["default"], TOPIC_NAME)