Beispiel #1
0
def test_users_have_appropriate_permissions(hdfs_client, kerberos):
    # "hdfs" is a superuser
    sdk_auth.kinit(
        hdfs_client["id"], keytab=config.KEYTAB, principal=kerberos.get_principal("hdfs")
    )

    alice_dir = "/users/alice"
    config.run_client_command(" && ".join([
        config.hdfs_command(c) for c in [
            "mkdir -p {}".format(alice_dir),
            "chown alice:users {}".format(alice_dir),
            "chmod 700 {}".format(alice_dir),
        ]
    ]))

    test_filename = "{}/{}".format(alice_dir, config.get_unique_filename("test_kerberos_auth_user_permissions"))

    # alice has read/write access to her directory
    sdk_auth.kdestroy(hdfs_client["id"])
    sdk_auth.kinit(
        hdfs_client["id"], keytab=config.KEYTAB, principal=kerberos.get_principal("alice")
    )

    config.hdfs_client_write_data(test_filename)
    config.hdfs_client_read_data(test_filename)
    _, stdout, _ = config.hdfs_client_list_files(alice_dir)
    assert test_filename in stdout

    # bob doesn't have read/write access to alice's directory
    sdk_auth.kdestroy(hdfs_client["id"])
    sdk_auth.kinit(hdfs_client["id"], keytab=config.KEYTAB, principal=kerberos.get_principal("bob"))

    config.hdfs_client_write_data(test_filename, expect_failure_message="put: Permission denied: user=bob")
    config.hdfs_client_read_data(test_filename, expect_failure_message="cat: Permission denied: user=bob")
def test_users_have_appropriate_permissions(hdfs_client, kerberos):
    # "hdfs" is a superuser
    sdk_auth.kinit(
        hdfs_client["id"], keytab=config.KEYTAB, principal=kerberos.get_principal("hdfs")
    )

    alice_dir = "/users/alice"
    config.run_client_command(" && ".join([
        config.hdfs_command(c) for c in [
            "mkdir -p {}".format(alice_dir),
            "chown alice:users {}".format(alice_dir),
            "chmod 700 {}".format(alice_dir),
        ]
    ]))

    test_filename = "{}/{}".format(alice_dir, config.get_unique_filename("test_ssl_kerberos_auth_user_permissions"))

    # alice has read/write access to her directory
    sdk_auth.kdestroy(hdfs_client["id"])
    sdk_auth.kinit(
        hdfs_client["id"], keytab=config.KEYTAB, principal=kerberos.get_principal("alice")
    )

    config.hdfs_client_write_data(test_filename)
    config.hdfs_client_read_data(test_filename)
    _, stdout, _ = config.hdfs_client_list_files(alice_dir)
    assert test_filename in stdout

    # bob doesn't have read/write access to alice's directory
    sdk_auth.kdestroy(hdfs_client["id"])
    sdk_auth.kinit(hdfs_client["id"], keytab=config.KEYTAB, principal=kerberos.get_principal("bob"))

    config.hdfs_client_write_data(test_filename, expect_failure_message="put: Permission denied: user=bob")
    config.hdfs_client_read_data(test_filename, expect_failure_message="cat: Permission denied: user=bob")
Beispiel #3
0
def test_detect_racks():
    success, stdout, _ = config.run_client_command(config.hadoop_command("dfsadmin -printTopology"))
    assert success

    # expecting e.g. "Rack: /aws/us-west-2b\n..."
    rack = stdout.split("\n")[0]
    assert rack.startswith("Rack: /")
    assert sdk_fault_domain.is_valid_zone(rack[len("Rack: /") :])
Beispiel #4
0
def test_detect_racks():
    success, stdout, _ = config.run_client_command(
        config.hadoop_command("dfsadmin -printTopology"))
    assert success

    # expecting e.g. "Rack: /aws/us-west-2b\n..."
    rack = stdout.split("\n")[0]
    assert rack.startswith("Rack: /")
    assert sdk_fault_domain.is_valid_zone(rack[len("Rack: /"):])
def test_users_have_appropriate_permissions(hdfs_client, kerberos):
    # "hdfs" is a superuser
    sdk_auth.kinit(hdfs_client["id"],
                   keytab=config.KEYTAB,
                   principal=kerberos.get_principal("hdfs"))

    alice_dir = "/users/alice"
    success = True
    cmd_lists = [
        ["mkdir", "-p", alice_dir],
        ["chown", "alice:users", alice_dir],
        ["chmod", "700", alice_dir],
    ]
    for cmd_list in cmd_lists:
        cmd = config.hdfs_command(" ".join(cmd_list))
        cmd_success = config.run_client_command(cmd)
        if not cmd_success:
            log.error("Error executing: %s", cmd)
        success = success and cmd_success

    if not success:
        log.error("Error creating %s", alice_dir)
        raise Exception("Error creating user directory")

    # alice has read/write access to her directory
    sdk_auth.kdestroy(hdfs_client["id"])
    sdk_auth.kinit(hdfs_client["id"],
                   keytab=config.KEYTAB,
                   principal=kerberos.get_principal("alice"))

    test_filename = "{}/{}".format(
        alice_dir,
        config.get_unique_filename("test_kerberos_auth_user_permissions"))
    config.hdfs_client_write_data(test_filename)
    config.hdfs_client_read_data(test_filename)
    _, stdout, _ = config.hdfs_client_list_files(alice_dir)
    assert test_filename in stdout

    # bob doesn't have read/write access to alice's directory
    sdk_auth.kdestroy(hdfs_client["id"])
    sdk_auth.kinit(hdfs_client["id"],
                   keytab=config.KEYTAB,
                   principal=kerberos.get_principal("bob"))

    config.hdfs_client_write_data(
        test_filename,
        expect_failure_message="put: Permission denied: user=bob")
    config.hdfs_client_read_data(
        test_filename,
        expect_failure_message="cat: Permission denied: user=bob")
def test_verify_https_ports(hdfs_client, node_type, port):
    """
    Verify that HTTPS port is open name, journal and data node types.
    """

    task_id = "{}-0-node".format(node_type)
    host = sdk_hosts.autoip_host(config.SERVICE_NAME, task_id, port)

    ca_bundle = transport_encryption.fetch_dcos_ca_bundle(hdfs_client["id"])

    ok, stdout, stderr = config.run_client_command("curl -v --cacert {} https://{}".format(ca_bundle, host))
    assert ok

    assert "SSL connection using TLS1.2 / ECDHE_RSA_AES_128_GCM_SHA256" in stderr
    assert "server certificate verification OK" in stderr
    assert "common name: {}.{} (matched)".format(task_id, config.SERVICE_NAME) in stderr

    # In the Kerberos case we expect a 401 error
    assert "401 Authentication required" in stdout
def test_users_have_appropriate_permissions(hdfs_client, kerberos):
    # "hdfs" is a superuser
    sdk_auth.kinit(
        hdfs_client["id"], keytab=config.KEYTAB, principal=kerberos.get_principal("hdfs")
    )

    alice_dir = "/users/alice"
    success = True
    cmd_lists = [
        ["mkdir", "-p", alice_dir],
        ["chown", "alice:users", alice_dir],
        ["chmod", "700", alice_dir],
    ]
    for cmd_list in cmd_lists:
        cmd = config.hdfs_command(" ".join(cmd_list))
        cmd_success = config.run_client_command(cmd)
        if not cmd_success:
            log.error("Error executing: %s", cmd)
        success = success and cmd_success

    if not success:
        log.error("Error creating %s", alice_dir)
        raise Exception("Error creating user directory")

    # alice has read/write access to her directory
    sdk_auth.kdestroy(hdfs_client["id"])
    sdk_auth.kinit(
        hdfs_client["id"], keytab=config.KEYTAB, principal=kerberos.get_principal("alice")
    )

    test_filename = "{}/{}".format(alice_dir, config.get_unique_filename("test_kerberos_auth_user_permissions"))
    config.hdfs_client_write_data(test_filename)
    config.hdfs_client_read_data(test_filename)
    _, stdout, _ = config.hdfs_client_list_files(alice_dir)
    assert test_filename in stdout

    # bob doesn't have read/write access to alice's directory
    sdk_auth.kdestroy(hdfs_client["id"])
    sdk_auth.kinit(hdfs_client["id"], keytab=config.KEYTAB, principal=kerberos.get_principal("bob"))

    config.hdfs_client_write_data(test_filename, expect_failure_message="put: Permission denied: user=bob")
    config.hdfs_client_read_data(test_filename, expect_failure_message="cat: Permission denied: user=bob")
def test_verify_https_ports(hdfs_client, node_type, port):
    """
    Verify that HTTPS port is open name, journal and data node types.
    """

    task_id = "{}-0-node".format(node_type)
    host = sdk_hosts.autoip_host(config.SERVICE_NAME, task_id, port)

    ca_bundle = transport_encryption.fetch_dcos_ca_bundle(hdfs_client["id"])

    ok, stdout, stderr = config.run_client_command(
        "curl -v --cacert {} https://{}".format(ca_bundle, host))
    assert ok

    assert "server certificate verification OK" in stderr
    assert "common name: {}.{} (matched)".format(task_id,
                                                 config.SERVICE_NAME) in stderr

    # In the Kerberos case we expect a 401 error
    assert "401 Authentication required" in stdout
def is_name_node_active(namenode):
    success, stdout, _ = config.run_client_command(config.hadoop_command("haadmin -getServiceState {}".format(namenode)))
    return success and stdout.strip() == "active"