コード例 #1
0
ファイル: test.py プロジェクト: zhulibo86/ClickHouse
def test_read_write_storage(started_cluster):
    hdfs_api = HDFSApi("root")

    node1.query("create table SimpleHDFSStorage (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://hdfs1:9000/simple_storage', 'TSV')")
    node1.query("insert into SimpleHDFSStorage values (1, 'Mark', 72.53)")
    assert hdfs_api.read_data("/simple_storage") == "1\tMark\t72.53\n"
    assert node1.query("select * from SimpleHDFSStorage") == "1\tMark\t72.53\n"
コード例 #2
0
ファイル: test.py プロジェクト: zz412000428/ClickHouse
def test_read_write_storage_with_globs(started_cluster):
    hdfs_api = HDFSApi("root")

    for i in ["1", "2", "3"]:
        hdfs_api.write_data("/storage" + i, i + "\tMark\t72.53\n")
        assert hdfs_api.read_data("/storage" + i) == i + "\tMark\t72.53\n"

    node1.query(
        "create table HDFSStorageWithRange (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://hdfs1:9000/storage{1..5}', 'TSV')"
    )
    node1.query(
        "create table HDFSStorageWithEnum (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://hdfs1:9000/storage{1,2,3,4,5}', 'TSV')"
    )
    node1.query(
        "create table HDFSStorageWithQuestionMark (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://hdfs1:9000/storage?', 'TSV')"
    )
    node1.query(
        "create table HDFSStorageWithAsterisk (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://hdfs1:9000/storage*', 'TSV')"
    )

    assert node1.query("select count(*) from HDFSStorageWithRange") == '3\n'
    assert node1.query("select count(*) from HDFSStorageWithEnum") == '3\n'
    assert node1.query(
        "select count(*) from HDFSStorageWithQuestionMark") == '3\n'
    assert node1.query("select count(*) from HDFSStorageWithAsterisk") == '3\n'
コード例 #3
0
def test_redirect(start_cluster):
    hdfs_api = HDFSApi("root")
    hdfs_api.write_data("/simple_storage", "1\t\n")
    assert hdfs_api.read_data("/simple_storage") == "1\t\n"
    node7.query(
        "CREATE TABLE table_test_7_1 (word String) ENGINE=URL('http://hdfs1:50070/webhdfs/v1/simple_storage?op=OPEN&namenoderpcaddress=hdfs1:9000&offset=0', CSV)")
    assert "not allowed" in node7.query_and_get_error("SET max_http_get_redirects=1; SELECT * from table_test_7_1")
コード例 #4
0
ファイル: test.py プロジェクト: zhulibo86/ClickHouse
def test_read_write_table(started_cluster):
    hdfs_api = HDFSApi("root")
    data = "1\tSerialize\t555.222\n2\tData\t777.333\n"
    hdfs_api.write_data("/simple_table_function", data)

    assert hdfs_api.read_data("/simple_table_function") == data

    assert node1.query("select * from hdfs('hdfs://hdfs1:9000/simple_table_function', 'TSV', 'id UInt64, text String, number Float64')") == data
コード例 #5
0
ファイル: test.py プロジェクト: zhulibo86/ClickHouse
def test_read_write_table_with_parameter_none(started_cluster):
    hdfs_api = HDFSApi("root")
    data = "1\tHello Jessica\t555.222\n2\tI rolled a joint\t777.333\n"
    hdfs_api.write_data("/simple_table_function.gz", data)

    assert hdfs_api.read_data("/simple_table_function.gz") == data

    assert node1.query("select * from hdfs('hdfs://hdfs1:9000/simple_table_function.gz', 'TSV', 'id UInt64, text String, number Float64', 'none')") == data
コード例 #6
0
ファイル: test.py プロジェクト: chipitsine/ClickHouse
def test_read_write_table(started_cluster):
    hdfs_api = HDFSApi("root")
    data = "1\tSerialize\t555.222\n2\tData\t777.333\n"
    hdfs_api.write_data("/simple_table_function", data)

    assert hdfs_api.read_data("/simple_table_function") == data

    assert node1.query("select * from hdfs('hdfs://hdfs1:9000/simple_table_function', 'TSV', 'id UInt64, text String, number Float64')") == data
コード例 #7
0
ファイル: test.py プロジェクト: zhulibo86/ClickHouse
def test_write_table(started_cluster):
    hdfs_api = HDFSApi("root")

    node1.query("create table OtherHDFSStorage (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://hdfs1:9000/other_storage', 'TSV')")
    node1.query("insert into OtherHDFSStorage values (10, 'tomas', 55.55), (11, 'jack', 32.54)")

    result = "10\ttomas\t55.55\n11\tjack\t32.54\n"
    assert hdfs_api.read_data("/other_storage") == result
    assert node1.query("select * from OtherHDFSStorage order by id") == result
コード例 #8
0
ファイル: test.py プロジェクト: chipitsine/ClickHouse
def test_write_table(started_cluster):
    hdfs_api = HDFSApi("root")

    node1.query("create table OtherHDFSStorage (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://hdfs1:9000/other_storage', 'TSV')")
    node1.query("insert into OtherHDFSStorage values (10, 'tomas', 55.55), (11, 'jack', 32.54)")

    result = "10\ttomas\t55.55\n11\tjack\t32.54\n"
    assert hdfs_api.read_data("/other_storage") == result
    assert node1.query("select * from OtherHDFSStorage order by id") == result
コード例 #9
0
ファイル: test.py プロジェクト: chipitsine/ClickHouse
def test_read_write_storage(started_cluster):

    hdfs_api = HDFSApi("root")
    hdfs_api.write_data("/simple_storage", "1\tMark\t72.53\n")

    assert hdfs_api.read_data("/simple_storage") == "1\tMark\t72.53\n"

    node1.query("create table SimpleHDFSStorage (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://hdfs1:9000/simple_storage', 'TSV')")
    assert node1.query("select * from SimpleHDFSStorage") == "1\tMark\t72.53\n"
コード例 #10
0
ファイル: test.py プロジェクト: zp672087110/ClickHouse
def test_url_without_redirect(started_cluster):
    hdfs_api = HDFSApi("root")
    hdfs_api.write_data("/simple_storage", "1\tMark\t72.53\n")
    assert hdfs_api.read_data("/simple_storage") == "1\tMark\t72.53\n"

    # access datanode port directly
    node1.query(
        "create table WebHDFSStorage (id UInt32, name String, weight Float64) ENGINE = URL('http://hdfs1:50075/webhdfs/v1/simple_storage?op=OPEN&namenoderpcaddress=hdfs1:9000&offset=0', 'TSV')"
    )
    assert node1.query("select * from WebHDFSStorage") == "1\tMark\t72.53\n"
コード例 #11
0
ファイル: test.py プロジェクト: zp672087110/ClickHouse
def test_url_with_redirect_not_allowed(started_cluster):
    hdfs_api = HDFSApi("root")
    hdfs_api.write_data("/simple_storage", "1\tMark\t72.53\n")
    assert hdfs_api.read_data("/simple_storage") == "1\tMark\t72.53\n"

    # access proxy port without allowing redirects
    node1.query(
        "create table WebHDFSStorageWithoutRedirect (id UInt32, name String, weight Float64) ENGINE = URL('http://hdfs1:50070/webhdfs/v1/simple_storage?op=OPEN&namenoderpcaddress=hdfs1:9000&offset=0', 'TSV')"
    )
    with pytest.raises(Exception):
        assert node1.query("select * from WebHDFSStorageWithoutRedirect"
                           ) == "1\tMark\t72.53\n"
コード例 #12
0
ファイル: test.py プロジェクト: zp672087110/ClickHouse
def test_url_with_redirect_allowed(started_cluster):
    hdfs_api = HDFSApi("root")
    hdfs_api.write_data("/simple_storage", "1\tMark\t72.53\n")
    assert hdfs_api.read_data("/simple_storage") == "1\tMark\t72.53\n"

    # access proxy port with allowing redirects
    # http://localhost:50070/webhdfs/v1/b?op=OPEN&namenoderpcaddress=hdfs1:9000&offset=0
    node1.query(
        "create table WebHDFSStorageWithRedirect (id UInt32, name String, weight Float64) ENGINE = URL('http://hdfs1:50070/webhdfs/v1/simple_storage?op=OPEN&namenoderpcaddress=hdfs1:9000&offset=0', 'TSV')"
    )
    assert node1.query(
        "SET max_http_get_redirects=1; select * from WebHDFSStorageWithRedirect"
    ) == "1\tMark\t72.53\n"
コード例 #13
0
ファイル: test.py プロジェクト: hodgesds/ClickHouse-1
def test_read_write_storage_with_globs(started_cluster):
    hdfs_api = HDFSApi("root")

    node1.query(
        "create table HDFSStorageWithRange (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://hdfs1:9000/storage{1..5}', 'TSV')"
    )
    node1.query(
        "create table HDFSStorageWithEnum (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://hdfs1:9000/storage{1,2,3,4,5}', 'TSV')"
    )
    node1.query(
        "create table HDFSStorageWithQuestionMark (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://hdfs1:9000/storage?', 'TSV')"
    )
    node1.query(
        "create table HDFSStorageWithAsterisk (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://hdfs1:9000/storage*', 'TSV')"
    )

    for i in ["1", "2", "3"]:
        hdfs_api.write_data("/storage" + i, i + "\tMark\t72.53\n")
        assert hdfs_api.read_data("/storage" + i) == i + "\tMark\t72.53\n"

    assert node1.query("select count(*) from HDFSStorageWithRange") == "3\n"
    assert node1.query("select count(*) from HDFSStorageWithEnum") == "3\n"
    assert node1.query(
        "select count(*) from HDFSStorageWithQuestionMark") == "3\n"
    assert node1.query("select count(*) from HDFSStorageWithAsterisk") == "3\n"

    try:
        node1.query("insert into HDFSStorageWithEnum values (1, 'NEW', 4.2)")
        assert False, "Exception have to be thrown"
    except Exception as ex:
        print ex
        assert "in readonly mode" in str(ex)

    try:
        node1.query(
            "insert into HDFSStorageWithQuestionMark values (1, 'NEW', 4.2)")
        assert False, "Exception have to be thrown"
    except Exception as ex:
        print ex
        assert "in readonly mode" in str(ex)

    try:
        node1.query(
            "insert into HDFSStorageWithAsterisk values (1, 'NEW', 4.2)")
        assert False, "Exception have to be thrown"
    except Exception as ex:
        print ex
        assert "in readonly mode" in str(ex)