Esempio n. 1
0
def test_set_parent():

    # Generate a random cloud of points
    xyz = np.random.randn(2, 3)
    name = "test_points"

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testProject.geoh5"
        workspace = Workspace(h5file_path)
        group_a = ContainerGroup.create(workspace)
        entity = Points.create(workspace, vertices=xyz, name=name, parent=group_a)
        entity.add_data({"random": {"values": np.random.randn(xyz.shape[0])}})
        group_b = ContainerGroup.create(workspace, name="group_b")
        entity.parent = group_b

        workspace = Workspace(h5file_path)
        group_reload = workspace.get_entity("group_b")[0]
        entity_reload = workspace.get_entity(name)[0]
        data_reload = workspace.get_entity("random")[0]

        assert entity_reload.parent == group_reload, "Parent different than expected."
        assert (
            entity_reload in group_reload.children
        ), "Entity not in the list of children."
        assert (
            data_reload in entity_reload.children
        ), "Data not in list of entity children."
Esempio n. 2
0
def test_remove_root():

    # Generate a random cloud of points
    n_data = 12
    xyz = np.random.randn(n_data, 3)

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testProject.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)
        points = Points.create(workspace, vertices=xyz)
        data = points.add_data({
            "DataValues": {
                "association": "VERTEX",
                "values": np.random.randn(n_data),
            },
            "DataValues2": {
                "association": "VERTEX",
                "values": np.random.randn(n_data),
            },
        })

        group_name = "SomeGroup"
        data_group = points.add_data_to_group(data, group_name)

        workspace.finalize()

        # Remove the root
        with File(h5file_path, "r+") as project:
            base = list(project.keys())[0]
            del project[base]["Root"]
            del project[base]["Groups"]
            del project[base]["Types"]["Group types"]

        # Read the data back in from a fresh workspace
        new_workspace = Workspace(h5file_path)

        rec_points = new_workspace.get_entity(points.name)[0]
        rec_group = rec_points.find_or_create_property_group(name=group_name)
        rec_data = new_workspace.get_entity(data[0].name)[0]

        compare_entities(
            points,
            rec_points,
            ignore=["_parent", "_existing_h5_entity", "_property_groups"],
        )
        compare_entities(data[0],
                         rec_data,
                         ignore=["_parent", "_existing_h5_entity"])
        compare_entities(data_group,
                         rec_group,
                         ignore=["_parent", "_existing_h5_entity"])
def test_no_data_values():

    # Generate a random cloud of points
    n_data = 12
    xyz = np.random.randn(n_data, 3)
    float_values = np.random.randn(n_data)
    float_values[3:5] = np.nan

    int_values = np.random.randint(n_data, size=n_data).astype(float)
    int_values[2:5] = np.nan

    all_nan = np.ones(n_data)

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testProject.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)
        points = Points.create(workspace, vertices=xyz)
        data_objs = points.add_data({
            "DataFloatValues": {
                "association": "VERTEX",
                "values": float_values
            },
            "DataIntValues": {
                "values": int_values,
                "type": "INTEGER",
            },
            "NoValues": {
                "association": "VERTEX"
            },
            "AllNanValues": {
                "association": "VERTEX",
                "values": all_nan
            },
        })
        data_objs[-1].values = None  # Reset all values to nan
        workspace.finalize()

        # Read the data back in from a fresh workspace
        new_workspace = Workspace(h5file_path)

        for data in data_objs:
            rec_data = new_workspace.get_entity(data.name)[0]

            if data.values is None:
                assert rec_data.values is None, "Data 'values' saved should None"
            else:
                assert all(
                    np.isnan(rec_data.values) == np.isnan(data.values)
                ), "Mismatch between input and recovered data values"
Esempio n. 4
0
def test_create_point_data():

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"test.geoh5"
        workspace = Workspace(h5file_path)
        group = ContainerGroup.create(workspace, parent=None)
        assert (group.parent == workspace.root
                ), "Assigned parent=None should default to Root."

        group = ContainerGroup.create(workspace)
        assert (group.parent == workspace.root
                ), "Creation without parent should default to Root."

        points = Points.create(workspace, parent=group)

        assert points.parent == group, "Parent setter did not work."
Esempio n. 5
0
def test_create_point_data():

    new_name = "TestName"

    # Generate a random cloud of points
    xyz = np.random.randn(12, 3)
    values = np.random.randn(12)

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testPoints.geoh5"
        workspace = Workspace(h5file_path)
        points = Points.create(workspace, vertices=xyz, allow_move=False)
        data = points.add_data(
            {"DataValues": {
                "association": "VERTEX",
                "values": values
            }})
        tag = points.add_data(
            {"my_comment": {
                "association": "OBJECT",
                "values": "hello_world"
            }})
        # Change some data attributes for testing
        data.allow_delete = False
        data.allow_move = True
        data.allow_rename = False
        data.name = new_name
        # Fake ANALYST creating a StatsCache
        with fetch_h5_handle(h5file_path) as h5file:
            etype_handle = H5Writer.fetch_handle(h5file, data.entity_type)
            etype_handle.create_group("StatsCache")
        # Trigger replace of values
        data.values = values * 2.0
        workspace.finalize()
        # Read the data back in from a fresh workspace
        new_workspace = Workspace(h5file_path)
        rec_obj = new_workspace.get_entity("Points")[0]
        rec_data = new_workspace.get_entity(new_name)[0]
        rec_tag = new_workspace.get_entity("my_comment")[0]
        compare_entities(points, rec_obj)
        compare_entities(data, rec_data)
        compare_entities(tag, rec_tag)
        with fetch_h5_handle(h5file_path) as h5file:
            etype_handle = H5Writer.fetch_handle(h5file, rec_data.entity_type)
            assert (
                etype_handle.get("StatsCache") is None
            ), "StatsCache was not properly deleted on update of values"
Esempio n. 6
0
def test_user_comments():

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"group_object_comment.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)

        object_base = Points.create(workspace, name="myObject")
        object_comment = "object text comment"
        author = "John Doe"
        object_base.add_comment(object_comment, author=author)

        workspace.finalize()

        # Read the comments back in
        ws2 = Workspace(h5file_path)
        object_base = ws2.get_entity("myObject")[0]
        assert (object_base.comments.values[0]["Author"] == author
                ), "Issue with 'Author of object comments"
        assert (object_base.comments.values[0]["Text"] == object_comment
                ), "Issue with 'Text' of object comments"

        # Repeat with Group comments
        group = ContainerGroup.create(ws2, name="myGroup")
        group_comment_1 = "group text comment"
        group_comment_2 = "my other comment"
        author = "Jane Doe"
        group.add_comment(group_comment_1)
        group.add_comment(group_comment_2)
        ws2.finalize()

        ws3 = Workspace(h5file_path)
        group_in = ws3.get_entity("myGroup")[0]

        assert group_in.comments.values[0]["Author"] == ",".join(
            ws3.contributors), "Issue with 'Author of object comments"
        assert (group_in.comments.values[0]["Text"] == group_comment_1
                ), "Issue with 'Text' of group comments"
        assert (group_in.comments.values[1]["Text"] == group_comment_2
                ), "Issue with 'Text' of group comments"
def test_save_modified_properties(
    write_attributes,
    write_coordinates,
    write_data_values,
):
    n_data = 12
    xyz = np.random.randn(n_data, 3)

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testPoints.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)
        points = Points.create(workspace)
        workspace.finalize()

        assert write_attributes.called, f"{write_attributes} was not called."
        assert (not write_coordinates.called
                ), f"{write_coordinates} should not have been called."
        assert (not write_data_values.called
                ), f"{write_data_values} should not have been called."

        points.vertices = xyz
        assert ("vertices" in points.modified_attributes
                ), "'vertices' should be in list of 'modified_attributes' "

        workspace.finalize()

        assert write_coordinates.called, f"{write_coordinates} should have been called."
        assert (not write_data_values.called
                ), f"{write_data_values} should not have been called."

        points.add_data({"rando": {"values": np.ones(n_data)}})

        assert write_data_values.called, f"{write_data_values} should have been called."

        points.name = "hello_world"

        assert ("attributes" in points.modified_attributes
                ), "'attributes' should be in list of 'modified_attributes' "
Esempio n. 8
0
def test_create_reference_data():

    name = "MyTestPointset"

    # Generate a random cloud of points with reference values
    n_data = 12
    values = np.random.randint(1, high=8, size=n_data)
    refs = np.unique(values)
    value_map = {}
    for ref in refs:
        value_map[ref] = "".join(
            random.choice(string.ascii_lowercase) for i in range(8))

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testPoints.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)

        points = Points.create(workspace,
                               vertices=np.random.randn(n_data, 3),
                               name=name,
                               allow_move=False)

        data = points.add_data({
            "DataValues": {
                "type": "referenced",
                "values": values,
                "value_map": value_map,
            }
        })

        new_workspace = Workspace(h5file_path)
        rec_obj = new_workspace.get_entity(name)[0]
        rec_data = new_workspace.get_entity("DataValues")[0]

        compare_entities(points, rec_obj)
        compare_entities(data, rec_data)