Example #1
0
def test_create():
    s = fake.session()
    dataset = fake.dataset()

    attrs = tuple(
        [
            tc.SubAttribute(
                name=str(i),
                is_nullable=True,
                type=tc.attribute.type.Array(tc.attribute.type.STRING),
            )
            for i in range(4)
        ]
    )

    attr = tc.attribute.create(
        s,
        dataset,
        name="attr",
        is_nullable=False,
        type=tc.attribute.type.Record(attributes=attrs),
    )

    assert attr.name == "attr"
    assert not attr.is_nullable
    assert isinstance(attr.type, tc.attribute.type.Record)
    assert attr.type.attributes == attrs
Example #2
0
def test_upsert_infer_primary_key():
    s = fake.session()
    dataset = fake.dataset()

    df = pd.DataFrame(_records_json)

    response = tc.dataframe.upsert(s, dataset, df)
    assert response == _response_json
Example #3
0
def test_delete_primary_key_not_found():
    s = fake.session()
    dataset = fake.dataset()

    with pytest.raises(tc.primary_key.NotFound):
        tc.record.delete(s,
                         dataset,
                         _records_json,
                         primary_key_name="wrong_primary_key")
Example #4
0
def test_delete():
    s = fake.session()
    dataset = fake.dataset()

    response = tc.record.delete(s,
                                dataset,
                                _records_json,
                                primary_key_name="primary_key")
    assert response == _response_json
Example #5
0
def test_upsert_primary_key_not_found():
    s = fake.session()
    dataset = fake.dataset()

    df = pd.DataFrame(_records_json)

    with pytest.raises(tc.primary_key.NotFound):
        tc.dataframe.upsert(s,
                            dataset,
                            df,
                            primary_key_name="wrong_primary_key")
Example #6
0
def test_update():
    s = fake.session()
    dataset = fake.dataset()

    updates = [
        tc.record._create_command(record, primary_key_name="primary_key")
        for record in _records_json
    ]

    response = tc.record._update(s, dataset, updates)
    assert response == _response_json
Example #7
0
def test_upsert_index_column_name_collision():
    s = fake.session()
    dataset = fake.dataset()

    df = pd.DataFrame(_records_json_2)
    df.index.name = "primary_key"

    # create column in `df` with same name as index and matching "primary_key"
    df.insert(0, df.index.name, df.index)

    with pytest.raises(tc.primary_key.Ambiguous):
        tc.dataframe.upsert(s, dataset, df, primary_key_name="primary_key")
Example #8
0
def test_attributes():
    s = fake.session()
    dataset = fake.dataset()

    attrs = tc.dataset.attributes(s, dataset)

    row_num = attrs[0]
    assert row_num.name == "RowNum"
    assert row_num.type == tc.attribute.type.STRING

    geom = attrs[1]
    assert geom.name == "geom"
    assert isinstance(geom.type, tc.attribute.type.Record)
Example #9
0
def test_materialize_async():
    s = fake.session()
    dataset = fake.dataset()

    op = tc.dataset._materialize_async(s, dataset)

    assert op.type == "SPARK"
    assert op.description == "Materialize views to Elastic"
    assert op.status == {
        "state": "PENDING",
        "startTime": "",
        "endTime": "",
        "message": "Job has not yet been submitted to Spark",
    }
Example #10
0
def test_upsert_index_as_primary_key():
    s = fake.session()
    dataset = fake.dataset()

    df = pd.DataFrame(
        _records_json_2,
        index=[record["primary_key"] for record in _records_with_keys_json_2],
    )
    df.index.name = "primary_key"

    response = tc.dataframe.upsert(s,
                                   dataset,
                                   df,
                                   primary_key_name="primary_key")
    assert response == _response_json
Example #11
0
def test_from_resource_id():
    s = fake.session()
    dataset = fake.dataset()

    attrs = tuple(
        [
            tc.SubAttribute(
                name=str(i),
                is_nullable=True,
                type=tc.attribute.type.Array(tc.attribute.type.STRING),
            )
            for i in range(4)
        ]
    )

    attr = tc.attribute.from_resource_id(s, dataset, "attr")

    assert attr.name == "attr"
    assert not attr.is_nullable
    assert isinstance(attr.type, tc.attribute.type.Record)
    assert attr.type.attributes == attrs
Example #12
0
def test_delete_infer_primary_key():
    s = fake.session()
    dataset = fake.dataset()

    response = tc.record.delete(s, dataset, _records_json)
    assert response == _response_json
Example #13
0
def test_delete_dataset_not_found():
    s = fake.session()
    dataset = fake.dataset()

    with pytest.raises(tc.dataset.NotFound):
        tc.dataset.delete(s, dataset)
Example #14
0
def test_create_reserved_attribute_name():
    s = fake.session()
    dataset = fake.dataset()

    with pytest.raises(tc.attribute.ReservedName):
        tc.attribute.create(s, dataset, name="clusterId", is_nullable=False)
Example #15
0
def test_create_attribute_exists():
    s = fake.session()
    dataset = fake.dataset()

    with pytest.raises(tc.attribute.AlreadyExists):
        tc.attribute.create(s, dataset, name="attr", is_nullable=False)
Example #16
0
def test_from_resource_id_attribute_not_found():
    s = fake.session()
    dataset = fake.dataset()

    with pytest.raises(tc.attribute.NotFound):
        tc.attribute.from_resource_id(s, dataset, "attr")
Example #17
0
def test_stream():
    s = fake.session()
    dataset = fake.dataset()

    records = tc.record.stream(s, dataset)
    assert list(records) == _records_json
Example #18
0
def test_delete_all():
    s = fake.session()
    dataset = fake.dataset()

    tc.record.delete_all(s, dataset)
Example #19
0
def test_delete_cascading():
    s = fake.session()
    dataset = fake.dataset()

    tc.dataset.delete(s, dataset, cascade=True)
Example #20
0
def test_delete():
    s = fake.session()
    dataset = fake.dataset()

    tc.dataset.delete(s, dataset)