Esempio n. 1
0
def test_insert_drillhole_data():

    well_name = "bullseye"
    n_data = 10
    collocation = 1e-5

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testCurve.geoh5"
        # Create a workspace
        workspace = Workspace(h5file_path)
        max_depth = 100
        well = Drillhole.create(
            workspace,
            collar=np.r_[0.0, 10.0, 10],
            surveys=np.c_[np.linspace(0, max_depth, n_data),
                          np.linspace(-89, -75, n_data),
                          np.ones(n_data) * 45.0, ],
            name=well_name,
            default_collocation_distance=collocation,
        )
        # Add log-data
        data_object = well.add_data({
            "log_values": {
                "depth": np.sort(np.random.rand(n_data) * max_depth),
                "values": np.random.randint(1, high=8, size=n_data),
            }
        })

        workspace.finalize()

        # Add more data with single match
        old_depths = well.get_data("DEPTH")[0].values
        indices = np.where(~np.isnan(old_depths))[0]
        insert = np.random.randint(0, high=len(indices) - 1, size=2)
        new_depths = old_depths[indices[insert]]
        new_depths[0] -= 2e-6  # Out of tolerance
        new_depths[1] -= 5e-7  # Within tolerance

        match_test = well.add_data({
            "match_depth": {
                "depth": new_depths,
                "values": np.random.randint(1, high=8, size=2),
                "collocation_distance": 1e-6,
            }
        })

        assert (well.n_vertices == n_data +
                1), "Error adding values with collocated tolerance"
        assert np.isnan(data_object.values[indices[insert][0]]
                        ), "Old values not re-sorted properly after insertion"

        insert_ind = np.where(~np.isnan(match_test.values))[0]
        if insert[0] <= insert[1]:
            assert all(ind in [indices[insert][0], indices[insert][1] + 1]
                       for ind in insert_ind), "Depth insertion error"
        else:
            assert all(ind in [indices[insert][0], indices[insert][1]]
                       for ind in insert_ind), "Depth insertion error"
def test_octree():

    name = "MyTestOctree"

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"octree.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)

        # Create an octree mesh with variable dimensions
        mesh = Octree.create(
            workspace,
            name=name,
            origin=[0, 0, 0],
            u_count=32,
            v_count=16,
            w_count=8,
            u_cell_size=1.0,
            v_cell_size=1.0,
            w_cell_size=2.0,
            rotation=45,
        )

        assert mesh.n_cells == 8, "Number of octree cells after base_refine is wrong"

        # Refine
        workspace.save_entity(mesh)
        workspace.finalize()

        # Read the mesh back in
        new_workspace = Workspace(h5file_path)
        rec_obj = new_workspace.get_entity(name)[0]

        compare_entities(mesh, rec_obj)
Esempio n. 3
0
def test_xyz_dataype():
    # TODO: no file on disk should be required for this test
    #       as workspace does not have to be saved
    with tempfile.TemporaryDirectory() as tempdir:
        the_workspace = Workspace(Path(tempdir) / f"{__name__}.geoh5")

        x_datatype = DataType.for_x_data(the_workspace)
        assert x_datatype.uid == GeometricDataConstants.x_datatype_uid()
        assert (
            DataType.find(the_workspace, GeometricDataConstants.x_datatype_uid())
            is x_datatype
        )
        # make sure another call does no re-create another type
        assert DataType.for_x_data(the_workspace) is x_datatype

        y_datatype = DataType.for_y_data(the_workspace)
        assert y_datatype.uid == GeometricDataConstants.y_datatype_uid()
        assert (
            DataType.find(the_workspace, GeometricDataConstants.y_datatype_uid())
            is y_datatype
        )
        # make sure another call does no re-create another type
        assert DataType.for_y_data(the_workspace) is y_datatype

        z_datatype = DataType.for_z_data(the_workspace)
        assert z_datatype.uid == GeometricDataConstants.z_datatype_uid()
        assert (
            DataType.find(the_workspace, GeometricDataConstants.z_datatype_uid())
            is z_datatype
        )
        # make sure another call does no re-create another type
        assert DataType.for_z_data(the_workspace) is z_datatype
def test_outside_survey():
    # Create a simple well
    dist = np.random.rand(2) * 100.0
    azm = [np.random.randn(1) * 180.0] * 2
    dip = [np.random.randn(1) * 180.0] * 2

    collar = np.r_[0.0, 10.0, 10.0]

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testCurve.geoh5"
        workspace = Workspace(h5file_path)
        well = Drillhole.create(workspace,
                                collar=collar,
                                surveys=np.c_[dist, dip, azm])
        depths = [0.0, 1000.0]
        locations = well.desurvey(depths)
        solution = (
            collar[None, :] +
            np.c_[depths * np.cos(np.deg2rad(450.0 - azm[-1] % 360.0)) *
                  np.cos(np.deg2rad(dip[-1])),
                  depths * np.sin(np.deg2rad(450.0 - azm[-1] % 360.0)) *
                  np.cos(np.deg2rad(dip[-1])),
                  depths * np.sin(np.deg2rad(dip[-1])), ])

        np.testing.assert_array_almost_equal(locations, solution, decimal=3)
Esempio n. 5
0
def test_set_parent():

    # Generate a random cloud of points
    xyz = np.random.randn(2, 3)
    name = "test_points"

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testProject.geoh5"
        workspace = Workspace(h5file_path)
        group_a = ContainerGroup.create(workspace)
        entity = Points.create(workspace, vertices=xyz, name=name, parent=group_a)
        entity.add_data({"random": {"values": np.random.randn(xyz.shape[0])}})
        group_b = ContainerGroup.create(workspace, name="group_b")
        entity.parent = group_b

        workspace = Workspace(h5file_path)
        group_reload = workspace.get_entity("group_b")[0]
        entity_reload = workspace.get_entity(name)[0]
        data_reload = workspace.get_entity("random")[0]

        assert entity_reload.parent == group_reload, "Parent different than expected."
        assert (
            entity_reload in group_reload.children
        ), "Entity not in the list of children."
        assert (
            data_reload in entity_reload.children
        ), "Data not in list of entity children."
def test_create_property_group():
    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"prop_group_test.geoh5"
        # Create a workspace
        workspace = Workspace(h5file_path)
        curve = Curve.create(
            workspace,
            vertices=np.c_[np.linspace(0, 2 * np.pi, 12),
                           np.zeros(12),
                           np.zeros(12)],
        )
        # Add data
        props = []
        for i in range(4):
            values = np.cos(curve.vertices[:, 0] / (i + 1))
            props += [
                curve.add_data({f"Period{i+1}": {
                    "values": values
                }},
                               property_group="myGroup")
            ]

        # Property group object should have been created
        prop_group = curve.find_or_create_property_group(name="myGroup")
        # Create a new group by data name
        single_data_group = curve.add_data_to_group(f"Period{1}", "Singleton")

        assert (workspace.find_data(
            single_data_group.properties[0]).name == f"Period{1}"
                ), "Failed at creating a property group by data name"
        workspace.finalize()

        # Re-open the workspace
        workspace = Workspace(h5file_path)
        rec_object = workspace.get_entity(curve.uid)[0]
        # Read the property_group back in
        rec_prop_group = rec_object.find_or_create_property_group(
            name="myGroup")

        attrs = rec_prop_group.attribute_map
        check_list = [
            attr for attr in attrs.values()
            if getattr(rec_prop_group, attr) != getattr(prop_group, attr)
        ]
        assert (
            len(check_list) == 0
        ), f"Attribute{check_list} of PropertyGroups in output differ from input"

        # Copy an object without children
        new_curve = rec_object.copy(copy_children=False)

        assert (
            new_curve.property_groups == []
        ), "Property_groups not properly removed on copy without children."
Esempio n. 7
0
def test_remove_root():

    # Generate a random cloud of points
    n_data = 12
    xyz = np.random.randn(n_data, 3)

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testProject.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)
        points = Points.create(workspace, vertices=xyz)
        data = points.add_data({
            "DataValues": {
                "association": "VERTEX",
                "values": np.random.randn(n_data),
            },
            "DataValues2": {
                "association": "VERTEX",
                "values": np.random.randn(n_data),
            },
        })

        group_name = "SomeGroup"
        data_group = points.add_data_to_group(data, group_name)

        workspace.finalize()

        # Remove the root
        with File(h5file_path, "r+") as project:
            base = list(project.keys())[0]
            del project[base]["Root"]
            del project[base]["Groups"]
            del project[base]["Types"]["Group types"]

        # Read the data back in from a fresh workspace
        new_workspace = Workspace(h5file_path)

        rec_points = new_workspace.get_entity(points.name)[0]
        rec_group = rec_points.find_or_create_property_group(name=group_name)
        rec_data = new_workspace.get_entity(data[0].name)[0]

        compare_entities(
            points,
            rec_points,
            ignore=["_parent", "_existing_h5_entity", "_property_groups"],
        )
        compare_entities(data[0],
                         rec_data,
                         ignore=["_parent", "_existing_h5_entity"])
        compare_entities(data_group,
                         rec_group,
                         ignore=["_parent", "_existing_h5_entity"])
Esempio n. 8
0
def test_create_point_data():

    new_name = "TestName"

    # Generate a random cloud of points
    xyz = np.random.randn(12, 3)
    values = np.random.randn(12)

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testPoints.geoh5"
        workspace = Workspace(h5file_path)
        points = Points.create(workspace, vertices=xyz, allow_move=False)
        data = points.add_data(
            {"DataValues": {
                "association": "VERTEX",
                "values": values
            }})
        tag = points.add_data(
            {"my_comment": {
                "association": "OBJECT",
                "values": "hello_world"
            }})
        # Change some data attributes for testing
        data.allow_delete = False
        data.allow_move = True
        data.allow_rename = False
        data.name = new_name
        # Fake ANALYST creating a StatsCache
        with fetch_h5_handle(h5file_path) as h5file:
            etype_handle = H5Writer.fetch_handle(h5file, data.entity_type)
            etype_handle.create_group("StatsCache")
        # Trigger replace of values
        data.values = values * 2.0
        workspace.finalize()
        # Read the data back in from a fresh workspace
        new_workspace = Workspace(h5file_path)
        rec_obj = new_workspace.get_entity("Points")[0]
        rec_data = new_workspace.get_entity(new_name)[0]
        rec_tag = new_workspace.get_entity("my_comment")[0]
        compare_entities(points, rec_obj)
        compare_entities(data, rec_data)
        compare_entities(tag, rec_tag)
        with fetch_h5_handle(h5file_path) as h5file:
            etype_handle = H5Writer.fetch_handle(h5file, rec_data.entity_type)
            assert (
                etype_handle.get("StatsCache") is None
            ), "StatsCache was not properly deleted on update of values"
def test_create_surface_data():

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testSurface.geoh5"

        workspace = Workspace(h5file_path)

        # Create a grid of points and triangulate
        x, y = np.meshgrid(np.arange(10), np.arange(10))
        x, y = x.ravel(), y.ravel()
        z = np.random.randn(x.shape[0])

        xyz = np.c_[x, y, z]

        simplices = np.unique(np.random.randint(0, xyz.shape[0] - 1,
                                                (xyz.shape[0], 3)),
                              axis=1)

        # Create random data
        values = np.mean(np.c_[x[simplices[:, 0]], x[simplices[:, 1]],
                               x[simplices[:, 2]]],
                         axis=1)

        # Create a geoh5 surface
        surface = Surface.create(workspace,
                                 name="mySurf",
                                 vertices=xyz,
                                 cells=simplices)

        data = surface.add_data({"TMI": {"values": values}})

        # Read the object from a different workspace object on the same file
        new_workspace = Workspace(h5file_path)

        rec_obj = new_workspace.get_entity("mySurf")[0]
        rec_data = rec_obj.get_data("TMI")[0]

        compare_entities(surface, rec_obj)
        compare_entities(data, rec_data)
Esempio n. 10
0
def test_create_reference_data():

    name = "MyTestPointset"

    # Generate a random cloud of points with reference values
    n_data = 12
    values = np.random.randint(1, high=8, size=n_data)
    refs = np.unique(values)
    value_map = {}
    for ref in refs:
        value_map[ref] = "".join(
            random.choice(string.ascii_lowercase) for i in range(8))

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testPoints.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)

        points = Points.create(workspace,
                               vertices=np.random.randn(n_data, 3),
                               name=name,
                               allow_move=False)

        data = points.add_data({
            "DataValues": {
                "type": "referenced",
                "values": values,
                "value_map": value_map,
            }
        })

        new_workspace = Workspace(h5file_path)
        rec_obj = new_workspace.get_entity(name)[0]
        rec_data = new_workspace.get_entity("DataValues")[0]

        compare_entities(points, rec_obj)
        compare_entities(data, rec_data)
def test_group_instantiation(group_class):
    # TODO: no file on disk should be required for this test
    #       as workspace does not have to be saved
    with tempfile.TemporaryDirectory() as tempdir:
        the_workspace = Workspace(Path(tempdir) / f"{__name__}.geoh5")

        group_type = group_class.find_or_create_type(the_workspace)
        isinstance(group_type, GroupType)
        assert group_type.workspace is the_workspace
        assert group_type.uid == group_class.default_type_uid()
        assert the_workspace.find_type(group_type.uid, GroupType) is group_type
        assert GroupType.find(the_workspace, group_type.uid) is group_type

        # searching for the wrong type
        assert the_workspace.find_type(group_type.uid, ObjectType) is None

        if the_workspace.root is not None:
            type_used_by_root = the_workspace.root.entity_type is group_type
        created_group = group_class(group_type, name="test group")
        assert created_group.uid is not None
        assert created_group.uid.int != 0
        assert created_group.name == "test group"
        assert created_group.entity_type is group_type

        # should find the type instead of re-creating one
        assert group_class.find_or_create_type(the_workspace) is group_type

        _can_find(the_workspace, created_group)

        # now, make sure that unused data and types do not remain reference in the workspace
        group_type_uid = group_type.uid
        group_type = None  # type: ignore
        # group_type is still referenced by created_group, so it should be tracked by the workspace
        assert the_workspace.find_type(group_type_uid, GroupType) is not None

        created_group_uid = created_group.uid
        created_group = None  # type: ignore
        # no more reference on create_group, so it should be gone from the workspace
        assert the_workspace.find_group(created_group_uid) is None

        if type_used_by_root:
            # type is still used by the workspace root, so still tracked by the workspace
            assert the_workspace.find_type(group_type_uid,
                                           GroupType) is not None
        else:
            # no more reference on group_type, so it should be gone from the workspace
            assert the_workspace.find_type(group_type_uid, GroupType) is None
def test_workspace_context():
    # TODO: no file on disk should be required for this test
    #       as workspace does not have to be saved
    with tempfile.TemporaryDirectory() as tempdir:
        with active_workspace(Workspace(Path(tempdir) / "w1.geoh5")) as ws1:
            assert Workspace.active() == ws1
            with active_workspace(Workspace(Path(tempdir) /
                                            "w2.geoh5")) as ws2:
                assert Workspace.active() == ws2
            assert Workspace.active() == ws1
        with pytest.raises(RuntimeError) as error:
            Workspace.active()
        assert "no active workspace" in str(error.value).lower()
Esempio n. 13
0
def test_no_data_values():

    # Generate a random cloud of points
    n_data = 12
    xyz = np.random.randn(n_data, 3)
    float_values = np.random.randn(n_data)
    float_values[3:5] = np.nan

    int_values = np.random.randint(n_data, size=n_data).astype(float)
    int_values[2:5] = np.nan

    all_nan = np.ones(n_data)

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testProject.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)
        points = Points.create(workspace, vertices=xyz)
        data_objs = points.add_data({
            "DataFloatValues": {
                "association": "VERTEX",
                "values": float_values
            },
            "DataIntValues": {
                "values": int_values,
                "type": "INTEGER",
            },
            "NoValues": {
                "association": "VERTEX"
            },
            "AllNanValues": {
                "association": "VERTEX",
                "values": all_nan
            },
        })
        data_objs[-1].values = None  # Reset all values to nan
        workspace.finalize()

        # Read the data back in from a fresh workspace
        new_workspace = Workspace(h5file_path)

        for data in data_objs:
            rec_data = new_workspace.get_entity(data.name)[0]

            if data.values is None:
                assert rec_data.values is None, "Data 'values' saved should None"
            else:
                assert all(
                    np.isnan(rec_data.values) == np.isnan(data.values)
                ), "Mismatch between input and recovered data values"
Esempio n. 14
0
def test_create_point_data():

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"test.geoh5"
        workspace = Workspace(h5file_path)
        group = ContainerGroup.create(workspace, parent=None)
        assert (group.parent == workspace.root
                ), "Assigned parent=None should default to Root."

        group = ContainerGroup.create(workspace)
        assert (group.parent == workspace.root
                ), "Creation without parent should default to Root."

        points = Points.create(workspace, parent=group)

        assert points.parent == group, "Parent setter did not work."
Esempio n. 15
0
def test_create_grid_2d_data():

    name = "MyTestGrid2D"

    # Generate a 2D array
    n_x, n_y = 10, 15
    values, _ = np.meshgrid(np.linspace(0, np.pi, n_x),
                            np.linspace(0, np.pi, n_y))

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"test2Grid.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)

        grid = Grid2D.create(
            workspace,
            origin=[0, 0, 0],
            u_cell_size=20.0,
            v_cell_size=30.0,
            u_count=n_x,
            v_count=n_y,
            name=name,
            allow_move=False,
        )

        data = grid.add_data({"DataValues": {"values": values}})
        grid.rotation = 45.0

        workspace.finalize()

        # Read the data back in from a fresh workspace
        new_workspace = Workspace(h5file_path)

        rec_obj = new_workspace.get_entity(name)[0]

        rec_data = new_workspace.get_entity("DataValues")[0]

        compare_entities(grid, rec_obj)
        compare_entities(data, rec_data)
Esempio n. 16
0
def test_create_group():

    group_name = "MyTestContainer"

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testGroup.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)

        group = ContainerGroup.create(workspace, name=group_name)
        workspace.save_entity(group)
        workspace.finalize()

        # Read the group back in
        rec_obj = workspace.get_entity(group_name)[0]

        compare_entities(group, rec_obj)
Esempio n. 17
0
def test_data_instantiation(data_class):
    # TODO: no file on disk should be required for this test
    #       as workspace does not have to be saved
    with tempfile.TemporaryDirectory() as tempdir:
        the_workspace = Workspace(Path(tempdir) / f"{__name__}.geoh5")

        data_type = DataType.create(the_workspace, data_class)
        assert data_type.uid is not None
        assert data_type.uid.int != 0
        assert data_type.name == "Entity"
        assert data_type.units is None
        assert data_type.primitive_type == data_class.primitive_type()
        assert the_workspace.find_type(data_type.uid, DataType) is data_type
        assert DataType.find(the_workspace, data_type.uid) is data_type

        # searching for the wrong type
        assert the_workspace.find_type(data_type.uid, ObjectType) is None

        created_data = data_class(data_type,
                                  association=DataAssociationEnum.VERTEX,
                                  name="test")
        assert created_data.uid is not None
        assert created_data.uid.int != 0
        assert created_data.name == "test"
        assert created_data.association == DataAssociationEnum.VERTEX

        _can_find(the_workspace, created_data)

        # now, make sure that unused data and types do not remain reference in the workspace
        data_type_uid = data_type.uid
        data_type = None  # type: ignore
        # data_type is still referenced by created_data, so it should survive in the workspace
        assert the_workspace.find_type(data_type_uid, DataType) is not None

        created_data_uid = created_data.uid
        created_data = None  # type: ignore
        # no more reference on created_data, so it should be gone from the workspace
        assert the_workspace.find_data(created_data_uid) is None

        # no more reference on data_type, so it should be gone from the workspace
        assert the_workspace.find_type(data_type_uid, DataType) is None
Esempio n. 18
0
def test_object_instantiation(object_class):
    # TODO: no file on disk should be required for this test
    #       as workspace does not have to be saved
    with tempfile.TemporaryDirectory() as tempdir:
        the_workspace = Workspace(Path(tempdir) / f"{__name__}.geoh5")

        object_type = object_class.find_or_create_type(the_workspace)
        isinstance(object_type, ObjectType)
        assert object_type.workspace is the_workspace
        assert object_type.uid == object_class.default_type_uid()
        assert ObjectType.find(the_workspace, object_type.uid) is object_type
        assert the_workspace.find_type(object_type.uid,
                                       ObjectType) is object_type

        # searching for the wrong type
        assert the_workspace.find_type(object_type.uid, GroupType) is None

        created_object = object_class(object_type, name="test")
        assert created_object.uid is not None
        assert created_object.uid.int != 0
        assert created_object.name == "test"
        assert created_object.entity_type is object_type

        # should find the type instead of re-creating one
        assert object_class.find_or_create_type(the_workspace) is object_type

        _can_find(the_workspace, created_object)

        # now, make sure that unused data and types do not remain reference in the workspace
        object_type_uid = object_type.uid
        object_type = None  # type: ignore
        # object_type is still referenced by created_group, so it should be tracked by the workspace
        assert the_workspace.find_type(object_type_uid, ObjectType) is not None

        created_object_uid = created_object.uid
        created_object = None  # type: ignore
        # no more reference on created_object, so it should be gone from the workspace
        assert the_workspace.find_object(created_object_uid) is None

        # no more reference on object_type, so it should be gone from the workspace
        assert the_workspace.find_type(object_type_uid, ObjectType) is None
def test_custom_group_instantiation():
    with pytest.raises(RuntimeError):
        assert CustomGroup.default_type_uid() is None

    # TODO: no file on disk should be required for this test
    #       as workspace does not have to be saved
    with tempfile.TemporaryDirectory() as tempdir:
        the_workspace = Workspace(Path(tempdir) / f"{__name__}.geoh5")

        with pytest.raises(RuntimeError):
            # cannot get a pre-defined type for a CustomGroup
            CustomGroup.find_or_create_type(the_workspace)

        group_type = GroupType.create_custom(
            the_workspace,
            name="test custom",
            description="test custom description")
        assert group_type.name == "test custom"
        assert group_type.description == "test custom description"

        isinstance(group_type, GroupType)
        assert group_type.workspace is the_workspace
        # GroupType.create_custom() uses the generate UUID for the group as its class ID
        assert the_workspace.find_type(group_type.uid, GroupType) is group_type
        assert GroupType.find(the_workspace, group_type.uid) is group_type

        created_group = CustomGroup(group_type, name="test custom group")
        assert created_group.uid is not None
        assert created_group.uid.int != 0
        assert created_group.name == "test custom group"
        assert created_group.entity_type is group_type

        _can_find(the_workspace, created_group)

        # now, make sure that unused data and types do not remain reference in the workspace
        group_type_uid = group_type.uid
        group_type = None
        # group_type is referenced by created_group, so it should survive in the workspace
        assert the_workspace.find_type(group_type_uid, GroupType) is not None

        created_group_uid = created_group.uid
        created_group = None
        # no more reference on group_type, so it should be gone from the workspace
        assert the_workspace.find_data(created_group_uid) is None
        # no more reference on created_group, so it should be gone from the workspace
        assert the_workspace.find_type(group_type_uid, GroupType) is None
def test_save_modified_properties(
    write_attributes,
    write_coordinates,
    write_data_values,
):
    n_data = 12
    xyz = np.random.randn(n_data, 3)

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testPoints.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)
        points = Points.create(workspace)
        workspace.finalize()

        assert write_attributes.called, f"{write_attributes} was not called."
        assert (not write_coordinates.called
                ), f"{write_coordinates} should not have been called."
        assert (not write_data_values.called
                ), f"{write_data_values} should not have been called."

        points.vertices = xyz
        assert ("vertices" in points.modified_attributes
                ), "'vertices' should be in list of 'modified_attributes' "

        workspace.finalize()

        assert write_coordinates.called, f"{write_coordinates} should have been called."
        assert (not write_data_values.called
                ), f"{write_data_values} should not have been called."

        points.add_data({"rando": {"values": np.ones(n_data)}})

        assert write_data_values.called, f"{write_data_values} should have been called."

        points.name = "hello_world"

        assert ("attributes" in points.modified_attributes
                ), "'attributes' should be in list of 'modified_attributes' "
def test_modify_property_group():
    def compare_objects(object_a, object_b, ignore=None):
        if ignore is None:
            ignore = ["_workspace", "_children", "_parent"]
        for attr in object_a.__dict__.keys():
            if attr in ignore:
                continue
            if isinstance(getattr(object_a, attr[1:]), ABC):
                compare_objects(
                    getattr(object_a, attr[1:]), getattr(object_b, attr[1:])
                )
            else:
                assert np.all(
                    getattr(object_a, attr[1:]) == getattr(object_b, attr[1:])
                ), f"Output attribute {attr[1:]} for {object_a} do not match input {object_b}"

    obj_name = "myCurve"
    # Generate a curve with multiple data
    xyz = np.c_[np.linspace(0, 2 * np.pi, 12), np.zeros(12), np.zeros(12)]

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"prop_group_test.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)

        curve = Curve.create(workspace, vertices=xyz, name=obj_name)

        # Add data
        props = []
        for i in range(4):
            values = np.cos(xyz[:, 0] / (i + 1))
            props += [
                curve.add_data(
                    {f"Period{i+1}": {"values": values}}, property_group="myGroup"
                )
            ]

        children_list = curve.get_data_list()
        assert all(
            f"Period{i + 1}" in children_list for i in range(4)
        ), "Missing data children"
        # Property group object should have been created
        prop_group = curve.find_or_create_property_group(name="myGroup")

        # Remove on props from the list
        curve.remove_data_from_group(children_list[0], name="myGroup")
        curve.remove_data_from_group(props[-2:], name="myGroup")

        assert len(prop_group.properties) == 1, "Error removing a property_group"

        workspace.finalize()

        # Re-open the workspace
        workspace = Workspace(h5file_path)

        # Read the property_group back in
        rec_curve = workspace.get_entity(obj_name)[0]
        rec_prop_group = rec_curve.find_or_create_property_group(name="myGroup")
        compare_objects(rec_prop_group, prop_group)

        fetch_group = workspace.fetch_property_groups(rec_curve)
        assert len(fetch_group) == 1, "Issues reading property groups from workspace"
        compare_objects(fetch_group[0], prop_group)
def test_create_survey_dcip():

    name = "TestCurrents"
    n_data = 12

    with tempfile.TemporaryDirectory() as tempdir:
        path = Path(tempdir) / r"testDC.geoh5"

        # Create a workspace
        workspace = Workspace(path)

        # Create sources along line
        x_loc, y_loc = np.meshgrid(np.arange(n_data), np.arange(-1, 3))
        vertices = np.c_[x_loc.ravel(), y_loc.ravel(), np.zeros_like(x_loc).ravel()]
        parts = np.kron(np.arange(4), np.ones(n_data)).astype("int")
        currents = CurrentElectrode.create(
            workspace, name=name, vertices=vertices, parts=parts
        )
        currents.add_default_ab_cell_id()
        potentials = PotentialElectrode.create(
            workspace, name=name + "_rx", vertices=vertices
        )
        n_dipoles = 9
        dipoles = []
        current_id = []
        for val in currents.ab_cell_id.values:
            cell_id = int(currents.ab_map[val]) - 1

            for dipole in range(n_dipoles):
                dipole_ids = currents.cells[cell_id, :] + 2 + dipole

                if (
                    any(dipole_ids > (potentials.n_vertices - 1))
                    or len(np.unique(parts[dipole_ids])) > 1
                ):
                    continue

                dipoles += [dipole_ids]
                current_id += [val]

        potentials.cells = np.vstack(dipoles).astype("uint32")

        fake_ab = potentials.add_data(
            {"fake_ab": {"values": np.random.randn(potentials.n_cells)}}
        )

        with pytest.raises(TypeError):
            potentials.ab_cell_id = fake_ab

        potentials.ab_cell_id = np.hstack(current_id).astype("int32")

        # Change again only the values
        ab_data = potentials.get_data("A-B Cell ID")[0]
        new_values = ab_data.values
        new_values[0] = 5
        potentials.ab_cell_id = new_values

        assert (
            len(potentials.get_data("A-B Cell ID")) == 1
        ), "Issue with new A-B Cell ID data created"

        fake_meta = {
            "Current Electrodes": uuid.uuid4(),
            "Potential Electrodes": uuid.uuid4(),
            "One too many key": uuid.uuid4(),
        }
        with pytest.raises(ValueError):
            potentials.metadata = fake_meta

        del fake_meta["One too many key"]

        with pytest.raises(IndexError):
            potentials.metadata = fake_meta

        fake_meta["Current Electrodes"] = currents.uid

        with pytest.raises(IndexError):
            potentials.metadata = fake_meta

        fake_meta["Potential Electrodes"] = potentials.uid

        potentials.current_electrodes = currents
        assert (
            currents.potential_electrodes == potentials
        ), "Error assigning the potentiel_electrodes."
        assert (
            potentials.current_electrodes == currents
        ), "Error assigning the current_electrodes."

        assert (
            currents.metadata
            == potentials.metadata
            == {
                "Current Electrodes": currents.uid,
                "Potential Electrodes": potentials.uid,
            }
        ), "Error assigning metadata"

        # Repeat the other way
        with pytest.raises(TypeError) as info:
            potentials.current_electrodes = None
        assert info.type == TypeError, "Code did not catch TypeError"

        with pytest.raises(TypeError) as info:
            currents.potential_electrodes = None
        assert info.type == TypeError, "Code did not catch TypeError"

        setattr(potentials, "_current_electrodes", None)
        setattr(currents, "_potential_electrodes", None)

        currents.potential_electrodes = potentials
        assert (
            currents.potential_electrodes == potentials
        ), "Error assigning the potentiel_electrodes."
        assert (
            potentials.current_electrodes == currents
        ), "Error assigning the current_electrodes."

        assert (
            currents.metadata
            == potentials.metadata
            == {
                "Current Electrodes": currents.uid,
                "Potential Electrodes": potentials.uid,
            }
        ), "Error assigning metadata"
        workspace.finalize()

        # Re-open the workspace and read data back in
        new_workspace = Workspace(path)
        currents_rec = new_workspace.get_entity(name)[0]
        potentials_rec = new_workspace.get_entity(name + "_rx")[0]

        # Check entities
        compare_entities(
            currents, currents_rec, ignore=["_potential_electrodes", "_parent"]
        )
        compare_entities(
            potentials, potentials_rec, ignore=["_current_electrodes", "_parent"]
        )
def test_create_block_model_data():

    name = "MyTestBlockModel"

    # Generate a 3D array
    n_x, n_y, n_z = 8, 9, 10

    nodal_x = np.r_[
        0,
        np.cumsum(
            np.r_[
                np.pi / n_x * 1.5 ** np.arange(3)[::-1],
                np.ones(n_x) * np.pi / n_x,
                np.pi / n_x * 1.5 ** np.arange(4),
            ]
        ),
    ]
    nodal_y = np.r_[
        0,
        np.cumsum(
            np.r_[
                np.pi / n_y * 1.5 ** np.arange(5)[::-1],
                np.ones(n_y) * np.pi / n_y,
                np.pi / n_y * 1.5 ** np.arange(6),
            ]
        ),
    ]
    nodal_z = -np.r_[
        0,
        np.cumsum(
            np.r_[
                np.pi / n_z * 1.5 ** np.arange(7)[::-1],
                np.ones(n_z) * np.pi / n_z,
                np.pi / n_z * 1.5 ** np.arange(8),
            ]
        ),
    ]

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"block_model.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)

        grid = BlockModel.create(
            workspace,
            origin=[0, 0, 0],
            u_cell_delimiters=nodal_x,
            v_cell_delimiters=nodal_y,
            z_cell_delimiters=nodal_z,
            name=name,
            rotation=30,
            allow_move=False,
        )

        data = grid.add_data(
            {
                "DataValues": {
                    "association": "CELL",
                    "values": (
                        np.cos(grid.centroids[:, 0])
                        * np.cos(grid.centroids[:, 1])
                        * np.cos(grid.centroids[:, 2])
                    ),
                }
            }
        )

        # Read the data back in from a fresh workspace
        new_workspace = Workspace(h5file_path)

        rec_obj = new_workspace.get_entity(name)[0]
        rec_data = new_workspace.get_entity("DataValues")[0]

        compare_entities(grid, rec_obj)
        compare_entities(data, rec_data)
Esempio n. 24
0
def test_copy_survey_dcip():

    name = "TestCurrents"
    n_data = 12

    with tempfile.TemporaryDirectory() as tempdir:
        path = Path(tempdir) / r"testDC.geoh5"

        # Create a workspace
        workspace = Workspace(path)

        # Create sources along line
        x_loc, y_loc = np.meshgrid(np.arange(n_data), np.arange(-1, 3))
        vertices = np.c_[x_loc.ravel(), y_loc.ravel(), np.zeros_like(x_loc).ravel()]
        parts = np.kron(np.arange(4), np.ones(n_data)).astype("int")
        currents = CurrentElectrode.create(
            workspace, name=name, vertices=vertices, parts=parts
        )
        currents.add_default_ab_cell_id()
        potentials = PotentialElectrode.create(
            workspace, name=name + "_rx", vertices=vertices
        )
        n_dipoles = 9
        dipoles = []
        current_id = []
        for val in currents.ab_cell_id.values:
            cell_id = int(currents.ab_map[val]) - 1

            for dipole in range(n_dipoles):
                dipole_ids = currents.cells[cell_id, :] + 2 + dipole

                if (
                    any(dipole_ids > (potentials.n_vertices - 1))
                    or len(np.unique(parts[dipole_ids])) > 1
                ):
                    continue

                dipoles += [dipole_ids]
                current_id += [val]

        potentials.cells = np.vstack(dipoles).astype("uint32")
        potentials.add_data(
            {"fake_ab": {"values": np.random.randn(potentials.n_cells)}}
        )
        potentials.ab_cell_id = np.hstack(current_id).astype("int32")
        currents.potential_electrodes = potentials
        workspace.finalize()

        # Copy the survey to a new workspace
        path = Path(tempdir) / r"testDC_copy_current.geoh5"
        new_workspace = Workspace(path)
        currents.copy(parent=new_workspace)

        # Re-open the workspace and read data back in
        new_workspace = Workspace(path)
        currents_rec = new_workspace.get_entity(name)[0]
        potentials_rec = new_workspace.get_entity(name + "_rx")[0]

        # Check entities
        compare_entities(
            currents, currents_rec, ignore=["_potential_electrodes", "_parent"]
        )
        compare_entities(
            potentials, potentials_rec, ignore=["_current_electrodes", "_parent"]
        )

        # Repeat with potential entity
        path = Path(tempdir) / r"testDC_copy_potential.geoh5"
        new_workspace = Workspace(path)
        potentials.copy(parent=new_workspace)

        # Re-open the workspace and read data back in
        new_workspace = Workspace(path)
        currents_rec = new_workspace.get_entity(name)[0]
        potentials_rec = new_workspace.get_entity(name + "_rx")[0]

        # Check entities
        compare_entities(
            currents, currents_rec, ignore=["_potential_electrodes", "_parent"]
        )
        compare_entities(
            potentials, potentials_rec, ignore=["_current_electrodes", "_parent"]
        )
def test_create_drillhole_data():

    well_name = "bullseye"
    n_data = 10
    collocation = 1e-5

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testCurve.geoh5"
        # Create a workspace
        workspace = Workspace(h5file_path)
        max_depth = 100
        well = Drillhole.create(
            workspace,
            collar=np.r_[0.0, 10.0, 10],
            surveys=np.c_[np.linspace(0, max_depth, n_data),
                          np.linspace(-89, -75, n_data),
                          np.ones(n_data) * 45.0, ],
            name=well_name,
            default_collocation_distance=collocation,
        )
        value_map = {}
        for ref in range(8):
            value_map[ref] = "".join(
                random.choice(string.ascii_lowercase) for i in range(8))

        # Create random from-to
        from_to_a = np.sort(
            np.random.uniform(low=0.05, high=max_depth, size=(50, ))).reshape(
                (-1, 2))
        from_to_b = np.vstack([from_to_a[0, :], [30.1, 55.5], [56.5, 80.2]])

        # Add from-to data
        data_objects = well.add_data({
            "interval_values": {
                "values": np.random.randn(from_to_a.shape[0]),
                "from-to": from_to_a,
            },
            "int_interval_list": {
                "values": [1, 2, 3],
                "from-to": from_to_b,
                "value_map": {
                    1: "Unit_A",
                    2: "Unit_B",
                    3: "Unit_C"
                },
                "type": "referenced",
            },
        })

        assert well.n_cells == (
            from_to_a.shape[0] +
            2), "Error with number of cells on interval data creation."
        assert well.n_vertices == (
            from_to_a.size +
            4), "Error with number of vertices on interval data creation."
        assert not np.any(np.isnan(
            well.get_data("FROM")[0].values)), "FROM values not fully set."
        assert not np.any(np.isnan(
            well.get_data("TO")[0].values)), "TO values not fully set."
        assert (well.get_data("TO")[0].values.shape[0] ==
                well.get_data("FROM")[0].values.shape[0] ==
                well.n_cells), "Shape or FROM to n_cells differ."

        # Add log-data
        data_objects += [
            well.add_data({
                "log_values": {
                    "depth": np.sort(np.random.rand(n_data) * max_depth),
                    "type": "referenced",
                    "values": np.random.randint(1, high=8, size=n_data),
                    "value_map": value_map,
                }
            })
        ]
        workspace.finalize()

        new_count = from_to_a.size + 4 + n_data
        assert well.n_vertices == (
            new_count
        ), "Error with new number of vertices on log data creation."
        # Re-open the workspace and read data back in
        new_workspace = Workspace(h5file_path)
        # Check entities
        compare_entities(
            well,
            new_workspace.get_entity(well_name)[0],
            ignore=["_default_collocation_distance"],
        )
        compare_entities(
            data_objects[0],
            new_workspace.get_entity("interval_values")[0],
            ignore=["_parent"],
        )
        compare_entities(
            data_objects[1],
            new_workspace.get_entity("int_interval_list")[0],
            ignore=["_parent"],
        )
        compare_entities(
            data_objects[2],
            new_workspace.get_entity("log_values")[0],
            ignore=["_parent"],
        )
Esempio n. 26
0
def test_delete_entities():

    xyz = np.random.randn(12, 3)
    values = np.random.randn(12)

    with tempfile.TemporaryDirectory() as tempdir:
        # Create a workspace
        workspace = Workspace(str(Path(tempdir) / r"testPoints.geoh5"))

        group = ContainerGroup.create(workspace)
        curve_1 = Curve.create(workspace, vertices=xyz, parent=group)
        curve_1.add_data(
            {"DataValues": {
                "association": "VERTEX",
                "values": values
            }})

        # Create second object with data sharing type
        curve_2 = Curve.create(workspace, vertices=xyz, parent=group)

        # Add data
        for i in range(4):
            values = np.random.randn(curve_2.n_vertices)
            if i == 0:  # Share the data type
                curve_2.add_data(
                    {
                        f"Period{i + 1}": {
                            "values": values,
                            "entity_type": curve_1.children[0].entity_type,
                        }
                    },
                    property_group="myGroup",
                )
            else:
                curve_2.add_data({f"Period{i + 1}": {
                    "values": values
                }},
                                 property_group="myGroup")
        uid_out = curve_2.children[1].uid

        workspace.remove_entity(curve_2.children[0])
        workspace.remove_entity(curve_2.children[0])

        assert (uid_out not in curve_2.find_or_create_property_group(
            name="myGroup").properties
                ), "Data uid was not removed from the property_group"
        assert (len(workspace.data) == 3
                ), "Data were not fully removed from the workspace."
        assert (len(curve_2.children) == 2
                ), "Data were not fully removed from the parent object."
        assert (len(workspace.types) == 6
                ), "Data types were not properly removed from the workspace."

        # Remove entire object with data
        workspace.remove_entity(curve_2)

        del curve_2  # Needed since still referenced in current script
        collect()
        assert (len(workspace.groups) == 2
                ), "Group was not fully removed from the workspace."
        assert (len(workspace.objects) == 1
                ), "Object was not fully removed from the workspace."
        assert (len(workspace.data) == 1
                ), "Data were not properly removed from the workspace."
        assert (len(workspace.types) == 4
                ), "Data types were not properly removed from the workspace."

        # Re-open the project and check all was removed
        workspace = Workspace(str(Path(tempdir) / r"testPoints.geoh5"))
        assert (len(workspace.groups) == 2
                ), "Groups were not properly written to the workspace."
        assert (len(workspace.objects) == 1
                ), "Objects were not properly written to the workspace."
        assert (len(workspace.data) == 1
                ), "Data were not properly written to the workspace."
        assert (len(workspace.types) == 4
                ), "Types were not properly written to the workspace."
Esempio n. 27
0
def test_create_curve_data():

    curve_name = "TestCurve"

    # Generate a random cloud of points
    n_data = 12

    with tempfile.TemporaryDirectory() as tempdir:

        h5file_path = Path(tempdir) / r"testCurve.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)

        curve = Curve.create(workspace,
                             vertices=np.random.randn(n_data, 3),
                             name=curve_name)

        # Get and change the parts
        parts = curve.parts
        parts[-3:] = 1
        curve.parts = parts

        data_objects = curve.add_data({
            "vertexValues": {
                "values": np.random.randn(curve.n_vertices)
            },
            "cellValues": {
                "values": np.random.randn(curve.n_cells)
            },
        })

        workspace.finalize()
        # Re-open the workspace and read data back in
        ws2 = Workspace(h5file_path)

        obj_rec = ws2.get_entity(curve_name)[0]
        data_vert_rec = ws2.get_entity("vertexValues")[0]
        data_cell_rec = ws2.get_entity("cellValues")[0]

        # Check entities
        compare_entities(curve, obj_rec)
        compare_entities(data_objects[0], data_vert_rec)
        compare_entities(data_objects[1], data_cell_rec)

        # Modify and write
        obj_rec.vertices = np.random.randn(n_data, 3)
        data_vert_rec.values = np.random.randn(n_data)
        ws2.finalize()

        # Read back and compare
        ws3 = Workspace(h5file_path)
        obj = ws3.get_entity(curve_name)[0]
        data_vertex = ws3.get_entity("vertexValues")[0]

        compare_entities(obj_rec, obj)
        compare_entities(data_vert_rec, data_vertex)
Esempio n. 28
0
def test_user_comments():

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"group_object_comment.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)

        object_base = Points.create(workspace, name="myObject")
        object_comment = "object text comment"
        author = "John Doe"
        object_base.add_comment(object_comment, author=author)

        workspace.finalize()

        # Read the comments back in
        ws2 = Workspace(h5file_path)
        object_base = ws2.get_entity("myObject")[0]
        assert (object_base.comments.values[0]["Author"] == author
                ), "Issue with 'Author of object comments"
        assert (object_base.comments.values[0]["Text"] == object_comment
                ), "Issue with 'Text' of object comments"

        # Repeat with Group comments
        group = ContainerGroup.create(ws2, name="myGroup")
        group_comment_1 = "group text comment"
        group_comment_2 = "my other comment"
        author = "Jane Doe"
        group.add_comment(group_comment_1)
        group.add_comment(group_comment_2)
        ws2.finalize()

        ws3 = Workspace(h5file_path)
        group_in = ws3.get_entity("myGroup")[0]

        assert group_in.comments.values[0]["Author"] == ",".join(
            ws3.contributors), "Issue with 'Author of object comments"
        assert (group_in.comments.values[0]["Text"] == group_comment_1
                ), "Issue with 'Text' of group comments"
        assert (group_in.comments.values[1]["Text"] == group_comment_2
                ), "Issue with 'Text' of group comments"
Esempio n. 29
0
def test_copy_entity():

    # Generate a random cloud of points
    n_data = 12
    xyz = np.random.randn(n_data, 3)

    # Create surface
    cells = np.unique(np.random.randint(0, xyz.shape[0] - 1, (xyz.shape[0], 3)), axis=1)

    objects = {
        Points: {"name": "Something", "vertices": np.random.randn(n_data, 3)},
        Surface: {
            "name": "Surface",
            "vertices": np.random.randn(n_data, 3),
            "cells": cells,
        },
        Curve: {
            "name": "Curve",
            "vertices": np.random.randn(n_data, 3),
        },
        Octree: {
            "origin": [0, 0, 0],
            "u_count": 32,
            "v_count": 16,
            "w_count": 8,
            "u_cell_size": 1.0,
            "v_cell_size": 1.0,
            "w_cell_size": 2.0,
            "rotation": 45,
        },
    }

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testProject.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)
        for obj, kwargs in objects.items():
            entity = obj.create(workspace, **kwargs)

            if getattr(entity, "vertices", None) is not None:
                values = np.random.randn(entity.n_vertices)
            else:
                values = np.random.randn(entity.n_cells)

            entity.add_data({"DataValues": {"values": values}})

        workspace = Workspace(h5file_path)
        new_workspace = Workspace(Path(tempdir) / r"testProject_2.geoh5")
        for entity in workspace.objects:
            entity.copy(parent=new_workspace)

        # workspace = Workspace(h5file_path)
        for entity in workspace.objects:

            # Read the data back in from a fresh workspace
            rec_entity = new_workspace.get_entity(entity.uid)[0]
            rec_data = new_workspace.get_entity(entity.children[0].uid)[0]

            compare_entities(entity, rec_entity, ignore=["_parent"])
            compare_entities(entity.children[0], rec_data, ignore=["_parent"])