def test_octree():

    name = "MyTestOctree"

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"octree.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)

        # Create an octree mesh with variable dimensions
        mesh = Octree.create(
            workspace,
            name=name,
            origin=[0, 0, 0],
            u_count=32,
            v_count=16,
            w_count=8,
            u_cell_size=1.0,
            v_cell_size=1.0,
            w_cell_size=2.0,
            rotation=45,
        )

        assert mesh.n_cells == 8, "Number of octree cells after base_refine is wrong"

        # Refine
        workspace.save_entity(mesh)
        workspace.finalize()

        # Read the mesh back in
        new_workspace = Workspace(h5file_path)
        rec_obj = new_workspace.get_entity(name)[0]

        compare_entities(mesh, rec_obj)
Exemple #2
0
def test_create_curve_data():

    curve_name = "TestCurve"

    # Generate a random cloud of points
    n_data = 12

    with tempfile.TemporaryDirectory() as tempdir:

        h5file_path = Path(tempdir) / r"testCurve.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)

        curve = Curve.create(workspace,
                             vertices=np.random.randn(n_data, 3),
                             name=curve_name)

        # Get and change the parts
        parts = curve.parts
        parts[-3:] = 1
        curve.parts = parts

        data_objects = curve.add_data({
            "vertexValues": {
                "values": np.random.randn(curve.n_vertices)
            },
            "cellValues": {
                "values": np.random.randn(curve.n_cells)
            },
        })

        workspace.finalize()
        # Re-open the workspace and read data back in
        ws2 = Workspace(h5file_path)

        obj_rec = ws2.get_entity(curve_name)[0]
        data_vert_rec = ws2.get_entity("vertexValues")[0]
        data_cell_rec = ws2.get_entity("cellValues")[0]

        # Check entities
        compare_entities(curve, obj_rec)
        compare_entities(data_objects[0], data_vert_rec)
        compare_entities(data_objects[1], data_cell_rec)

        # Modify and write
        obj_rec.vertices = np.random.randn(n_data, 3)
        data_vert_rec.values = np.random.randn(n_data)
        ws2.finalize()

        # Read back and compare
        ws3 = Workspace(h5file_path)
        obj = ws3.get_entity(curve_name)[0]
        data_vertex = ws3.get_entity("vertexValues")[0]

        compare_entities(obj_rec, obj)
        compare_entities(data_vert_rec, data_vertex)
def test_create_group():

    group_name = "MyTestContainer"

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testGroup.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)

        group = ContainerGroup.create(workspace, name=group_name)
        workspace.save_entity(group)
        workspace.finalize()

        # Read the group back in
        rec_obj = workspace.get_entity(group_name)[0]

        compare_entities(group, rec_obj)
Exemple #4
0
def test_create_grid_2d_data():

    name = "MyTestGrid2D"

    # Generate a 2D array
    n_x, n_y = 10, 15
    values, _ = np.meshgrid(np.linspace(0, np.pi, n_x),
                            np.linspace(0, np.pi, n_y))

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"test2Grid.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)

        grid = Grid2D.create(
            workspace,
            origin=[0, 0, 0],
            u_cell_size=20.0,
            v_cell_size=30.0,
            u_count=n_x,
            v_count=n_y,
            name=name,
            allow_move=False,
        )

        data = grid.add_data({"DataValues": {"values": values}})
        grid.rotation = 45.0

        workspace.finalize()

        # Read the data back in from a fresh workspace
        new_workspace = Workspace(h5file_path)

        rec_obj = new_workspace.get_entity(name)[0]

        rec_data = new_workspace.get_entity("DataValues")[0]

        compare_entities(grid, rec_obj)
        compare_entities(data, rec_data)
def test_create_surface_data():

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testSurface.geoh5"

        workspace = Workspace(h5file_path)

        # Create a grid of points and triangulate
        x, y = np.meshgrid(np.arange(10), np.arange(10))
        x, y = x.ravel(), y.ravel()
        z = np.random.randn(x.shape[0])

        xyz = np.c_[x, y, z]

        simplices = np.unique(np.random.randint(0, xyz.shape[0] - 1,
                                                (xyz.shape[0], 3)),
                              axis=1)

        # Create random data
        values = np.mean(np.c_[x[simplices[:, 0]], x[simplices[:, 1]],
                               x[simplices[:, 2]]],
                         axis=1)

        # Create a geoh5 surface
        surface = Surface.create(workspace,
                                 name="mySurf",
                                 vertices=xyz,
                                 cells=simplices)

        data = surface.add_data({"TMI": {"values": values}})

        # Read the object from a different workspace object on the same file
        new_workspace = Workspace(h5file_path)

        rec_obj = new_workspace.get_entity("mySurf")[0]
        rec_data = rec_obj.get_data("TMI")[0]

        compare_entities(surface, rec_obj)
        compare_entities(data, rec_data)
Exemple #6
0
def test_create_reference_data():

    name = "MyTestPointset"

    # Generate a random cloud of points with reference values
    n_data = 12
    values = np.random.randint(1, high=8, size=n_data)
    refs = np.unique(values)
    value_map = {}
    for ref in refs:
        value_map[ref] = "".join(
            random.choice(string.ascii_lowercase) for i in range(8))

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testPoints.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)

        points = Points.create(workspace,
                               vertices=np.random.randn(n_data, 3),
                               name=name,
                               allow_move=False)

        data = points.add_data({
            "DataValues": {
                "type": "referenced",
                "values": values,
                "value_map": value_map,
            }
        })

        new_workspace = Workspace(h5file_path)
        rec_obj = new_workspace.get_entity(name)[0]
        rec_data = new_workspace.get_entity("DataValues")[0]

        compare_entities(points, rec_obj)
        compare_entities(data, rec_data)
def test_remove_root():

    # Generate a random cloud of points
    n_data = 12
    xyz = np.random.randn(n_data, 3)

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testProject.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)
        points = Points.create(workspace, vertices=xyz)
        data = points.add_data({
            "DataValues": {
                "association": "VERTEX",
                "values": np.random.randn(n_data),
            },
            "DataValues2": {
                "association": "VERTEX",
                "values": np.random.randn(n_data),
            },
        })

        group_name = "SomeGroup"
        data_group = points.add_data_to_group(data, group_name)

        workspace.finalize()

        # Remove the root
        with File(h5file_path, "r+") as project:
            base = list(project.keys())[0]
            del project[base]["Root"]
            del project[base]["Groups"]
            del project[base]["Types"]["Group types"]

        # Read the data back in from a fresh workspace
        new_workspace = Workspace(h5file_path)

        rec_points = new_workspace.get_entity(points.name)[0]
        rec_group = rec_points.find_or_create_property_group(name=group_name)
        rec_data = new_workspace.get_entity(data[0].name)[0]

        compare_entities(
            points,
            rec_points,
            ignore=["_parent", "_existing_h5_entity", "_property_groups"],
        )
        compare_entities(data[0],
                         rec_data,
                         ignore=["_parent", "_existing_h5_entity"])
        compare_entities(data_group,
                         rec_group,
                         ignore=["_parent", "_existing_h5_entity"])
Exemple #8
0
def test_create_point_data():

    new_name = "TestName"

    # Generate a random cloud of points
    xyz = np.random.randn(12, 3)
    values = np.random.randn(12)

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testPoints.geoh5"
        workspace = Workspace(h5file_path)
        points = Points.create(workspace, vertices=xyz, allow_move=False)
        data = points.add_data(
            {"DataValues": {
                "association": "VERTEX",
                "values": values
            }})
        tag = points.add_data(
            {"my_comment": {
                "association": "OBJECT",
                "values": "hello_world"
            }})
        # Change some data attributes for testing
        data.allow_delete = False
        data.allow_move = True
        data.allow_rename = False
        data.name = new_name
        # Fake ANALYST creating a StatsCache
        with fetch_h5_handle(h5file_path) as h5file:
            etype_handle = H5Writer.fetch_handle(h5file, data.entity_type)
            etype_handle.create_group("StatsCache")
        # Trigger replace of values
        data.values = values * 2.0
        workspace.finalize()
        # Read the data back in from a fresh workspace
        new_workspace = Workspace(h5file_path)
        rec_obj = new_workspace.get_entity("Points")[0]
        rec_data = new_workspace.get_entity(new_name)[0]
        rec_tag = new_workspace.get_entity("my_comment")[0]
        compare_entities(points, rec_obj)
        compare_entities(data, rec_data)
        compare_entities(tag, rec_tag)
        with fetch_h5_handle(h5file_path) as h5file:
            etype_handle = H5Writer.fetch_handle(h5file, rec_data.entity_type)
            assert (
                etype_handle.get("StatsCache") is None
            ), "StatsCache was not properly deleted on update of values"
def test_create_survey_dcip():

    name = "TestCurrents"
    n_data = 12

    with tempfile.TemporaryDirectory() as tempdir:
        path = Path(tempdir) / r"testDC.geoh5"

        # Create a workspace
        workspace = Workspace(path)

        # Create sources along line
        x_loc, y_loc = np.meshgrid(np.arange(n_data), np.arange(-1, 3))
        vertices = np.c_[x_loc.ravel(), y_loc.ravel(), np.zeros_like(x_loc).ravel()]
        parts = np.kron(np.arange(4), np.ones(n_data)).astype("int")
        currents = CurrentElectrode.create(
            workspace, name=name, vertices=vertices, parts=parts
        )
        currents.add_default_ab_cell_id()
        potentials = PotentialElectrode.create(
            workspace, name=name + "_rx", vertices=vertices
        )
        n_dipoles = 9
        dipoles = []
        current_id = []
        for val in currents.ab_cell_id.values:
            cell_id = int(currents.ab_map[val]) - 1

            for dipole in range(n_dipoles):
                dipole_ids = currents.cells[cell_id, :] + 2 + dipole

                if (
                    any(dipole_ids > (potentials.n_vertices - 1))
                    or len(np.unique(parts[dipole_ids])) > 1
                ):
                    continue

                dipoles += [dipole_ids]
                current_id += [val]

        potentials.cells = np.vstack(dipoles).astype("uint32")

        fake_ab = potentials.add_data(
            {"fake_ab": {"values": np.random.randn(potentials.n_cells)}}
        )

        with pytest.raises(TypeError):
            potentials.ab_cell_id = fake_ab

        potentials.ab_cell_id = np.hstack(current_id).astype("int32")

        # Change again only the values
        ab_data = potentials.get_data("A-B Cell ID")[0]
        new_values = ab_data.values
        new_values[0] = 5
        potentials.ab_cell_id = new_values

        assert (
            len(potentials.get_data("A-B Cell ID")) == 1
        ), "Issue with new A-B Cell ID data created"

        fake_meta = {
            "Current Electrodes": uuid.uuid4(),
            "Potential Electrodes": uuid.uuid4(),
            "One too many key": uuid.uuid4(),
        }
        with pytest.raises(ValueError):
            potentials.metadata = fake_meta

        del fake_meta["One too many key"]

        with pytest.raises(IndexError):
            potentials.metadata = fake_meta

        fake_meta["Current Electrodes"] = currents.uid

        with pytest.raises(IndexError):
            potentials.metadata = fake_meta

        fake_meta["Potential Electrodes"] = potentials.uid

        potentials.current_electrodes = currents
        assert (
            currents.potential_electrodes == potentials
        ), "Error assigning the potentiel_electrodes."
        assert (
            potentials.current_electrodes == currents
        ), "Error assigning the current_electrodes."

        assert (
            currents.metadata
            == potentials.metadata
            == {
                "Current Electrodes": currents.uid,
                "Potential Electrodes": potentials.uid,
            }
        ), "Error assigning metadata"

        # Repeat the other way
        with pytest.raises(TypeError) as info:
            potentials.current_electrodes = None
        assert info.type == TypeError, "Code did not catch TypeError"

        with pytest.raises(TypeError) as info:
            currents.potential_electrodes = None
        assert info.type == TypeError, "Code did not catch TypeError"

        setattr(potentials, "_current_electrodes", None)
        setattr(currents, "_potential_electrodes", None)

        currents.potential_electrodes = potentials
        assert (
            currents.potential_electrodes == potentials
        ), "Error assigning the potentiel_electrodes."
        assert (
            potentials.current_electrodes == currents
        ), "Error assigning the current_electrodes."

        assert (
            currents.metadata
            == potentials.metadata
            == {
                "Current Electrodes": currents.uid,
                "Potential Electrodes": potentials.uid,
            }
        ), "Error assigning metadata"
        workspace.finalize()

        # Re-open the workspace and read data back in
        new_workspace = Workspace(path)
        currents_rec = new_workspace.get_entity(name)[0]
        potentials_rec = new_workspace.get_entity(name + "_rx")[0]

        # Check entities
        compare_entities(
            currents, currents_rec, ignore=["_potential_electrodes", "_parent"]
        )
        compare_entities(
            potentials, potentials_rec, ignore=["_current_electrodes", "_parent"]
        )
def test_create_block_model_data():

    name = "MyTestBlockModel"

    # Generate a 3D array
    n_x, n_y, n_z = 8, 9, 10

    nodal_x = np.r_[
        0,
        np.cumsum(
            np.r_[
                np.pi / n_x * 1.5 ** np.arange(3)[::-1],
                np.ones(n_x) * np.pi / n_x,
                np.pi / n_x * 1.5 ** np.arange(4),
            ]
        ),
    ]
    nodal_y = np.r_[
        0,
        np.cumsum(
            np.r_[
                np.pi / n_y * 1.5 ** np.arange(5)[::-1],
                np.ones(n_y) * np.pi / n_y,
                np.pi / n_y * 1.5 ** np.arange(6),
            ]
        ),
    ]
    nodal_z = -np.r_[
        0,
        np.cumsum(
            np.r_[
                np.pi / n_z * 1.5 ** np.arange(7)[::-1],
                np.ones(n_z) * np.pi / n_z,
                np.pi / n_z * 1.5 ** np.arange(8),
            ]
        ),
    ]

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"block_model.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)

        grid = BlockModel.create(
            workspace,
            origin=[0, 0, 0],
            u_cell_delimiters=nodal_x,
            v_cell_delimiters=nodal_y,
            z_cell_delimiters=nodal_z,
            name=name,
            rotation=30,
            allow_move=False,
        )

        data = grid.add_data(
            {
                "DataValues": {
                    "association": "CELL",
                    "values": (
                        np.cos(grid.centroids[:, 0])
                        * np.cos(grid.centroids[:, 1])
                        * np.cos(grid.centroids[:, 2])
                    ),
                }
            }
        )

        # Read the data back in from a fresh workspace
        new_workspace = Workspace(h5file_path)

        rec_obj = new_workspace.get_entity(name)[0]
        rec_data = new_workspace.get_entity("DataValues")[0]

        compare_entities(grid, rec_obj)
        compare_entities(data, rec_data)
def test_copy_entity():

    # Generate a random cloud of points
    n_data = 12
    xyz = np.random.randn(n_data, 3)

    # Create surface
    cells = np.unique(np.random.randint(0, xyz.shape[0] - 1, (xyz.shape[0], 3)), axis=1)

    objects = {
        Points: {"name": "Something", "vertices": np.random.randn(n_data, 3)},
        Surface: {
            "name": "Surface",
            "vertices": np.random.randn(n_data, 3),
            "cells": cells,
        },
        Curve: {
            "name": "Curve",
            "vertices": np.random.randn(n_data, 3),
        },
        Octree: {
            "origin": [0, 0, 0],
            "u_count": 32,
            "v_count": 16,
            "w_count": 8,
            "u_cell_size": 1.0,
            "v_cell_size": 1.0,
            "w_cell_size": 2.0,
            "rotation": 45,
        },
    }

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testProject.geoh5"

        # Create a workspace
        workspace = Workspace(h5file_path)
        for obj, kwargs in objects.items():
            entity = obj.create(workspace, **kwargs)

            if getattr(entity, "vertices", None) is not None:
                values = np.random.randn(entity.n_vertices)
            else:
                values = np.random.randn(entity.n_cells)

            entity.add_data({"DataValues": {"values": values}})

        workspace = Workspace(h5file_path)
        new_workspace = Workspace(Path(tempdir) / r"testProject_2.geoh5")
        for entity in workspace.objects:
            entity.copy(parent=new_workspace)

        # workspace = Workspace(h5file_path)
        for entity in workspace.objects:

            # Read the data back in from a fresh workspace
            rec_entity = new_workspace.get_entity(entity.uid)[0]
            rec_data = new_workspace.get_entity(entity.children[0].uid)[0]

            compare_entities(entity, rec_entity, ignore=["_parent"])
            compare_entities(entity.children[0], rec_data, ignore=["_parent"])
def test_create_drillhole_data():

    well_name = "bullseye"
    n_data = 10
    collocation = 1e-5

    with tempfile.TemporaryDirectory() as tempdir:
        h5file_path = Path(tempdir) / r"testCurve.geoh5"
        # Create a workspace
        workspace = Workspace(h5file_path)
        max_depth = 100
        well = Drillhole.create(
            workspace,
            collar=np.r_[0.0, 10.0, 10],
            surveys=np.c_[np.linspace(0, max_depth, n_data),
                          np.linspace(-89, -75, n_data),
                          np.ones(n_data) * 45.0, ],
            name=well_name,
            default_collocation_distance=collocation,
        )
        value_map = {}
        for ref in range(8):
            value_map[ref] = "".join(
                random.choice(string.ascii_lowercase) for i in range(8))

        # Create random from-to
        from_to_a = np.sort(
            np.random.uniform(low=0.05, high=max_depth, size=(50, ))).reshape(
                (-1, 2))
        from_to_b = np.vstack([from_to_a[0, :], [30.1, 55.5], [56.5, 80.2]])

        # Add from-to data
        data_objects = well.add_data({
            "interval_values": {
                "values": np.random.randn(from_to_a.shape[0]),
                "from-to": from_to_a,
            },
            "int_interval_list": {
                "values": [1, 2, 3],
                "from-to": from_to_b,
                "value_map": {
                    1: "Unit_A",
                    2: "Unit_B",
                    3: "Unit_C"
                },
                "type": "referenced",
            },
        })

        assert well.n_cells == (
            from_to_a.shape[0] +
            2), "Error with number of cells on interval data creation."
        assert well.n_vertices == (
            from_to_a.size +
            4), "Error with number of vertices on interval data creation."
        assert not np.any(np.isnan(
            well.get_data("FROM")[0].values)), "FROM values not fully set."
        assert not np.any(np.isnan(
            well.get_data("TO")[0].values)), "TO values not fully set."
        assert (well.get_data("TO")[0].values.shape[0] ==
                well.get_data("FROM")[0].values.shape[0] ==
                well.n_cells), "Shape or FROM to n_cells differ."

        # Add log-data
        data_objects += [
            well.add_data({
                "log_values": {
                    "depth": np.sort(np.random.rand(n_data) * max_depth),
                    "type": "referenced",
                    "values": np.random.randint(1, high=8, size=n_data),
                    "value_map": value_map,
                }
            })
        ]
        workspace.finalize()

        new_count = from_to_a.size + 4 + n_data
        assert well.n_vertices == (
            new_count
        ), "Error with new number of vertices on log data creation."
        # Re-open the workspace and read data back in
        new_workspace = Workspace(h5file_path)
        # Check entities
        compare_entities(
            well,
            new_workspace.get_entity(well_name)[0],
            ignore=["_default_collocation_distance"],
        )
        compare_entities(
            data_objects[0],
            new_workspace.get_entity("interval_values")[0],
            ignore=["_parent"],
        )
        compare_entities(
            data_objects[1],
            new_workspace.get_entity("int_interval_list")[0],
            ignore=["_parent"],
        )
        compare_entities(
            data_objects[2],
            new_workspace.get_entity("log_values")[0],
            ignore=["_parent"],
        )
Exemple #13
0
def test_copy_survey_dcip():

    name = "TestCurrents"
    n_data = 12

    with tempfile.TemporaryDirectory() as tempdir:
        path = Path(tempdir) / r"testDC.geoh5"

        # Create a workspace
        workspace = Workspace(path)

        # Create sources along line
        x_loc, y_loc = np.meshgrid(np.arange(n_data), np.arange(-1, 3))
        vertices = np.c_[x_loc.ravel(), y_loc.ravel(), np.zeros_like(x_loc).ravel()]
        parts = np.kron(np.arange(4), np.ones(n_data)).astype("int")
        currents = CurrentElectrode.create(
            workspace, name=name, vertices=vertices, parts=parts
        )
        currents.add_default_ab_cell_id()
        potentials = PotentialElectrode.create(
            workspace, name=name + "_rx", vertices=vertices
        )
        n_dipoles = 9
        dipoles = []
        current_id = []
        for val in currents.ab_cell_id.values:
            cell_id = int(currents.ab_map[val]) - 1

            for dipole in range(n_dipoles):
                dipole_ids = currents.cells[cell_id, :] + 2 + dipole

                if (
                    any(dipole_ids > (potentials.n_vertices - 1))
                    or len(np.unique(parts[dipole_ids])) > 1
                ):
                    continue

                dipoles += [dipole_ids]
                current_id += [val]

        potentials.cells = np.vstack(dipoles).astype("uint32")
        potentials.add_data(
            {"fake_ab": {"values": np.random.randn(potentials.n_cells)}}
        )
        potentials.ab_cell_id = np.hstack(current_id).astype("int32")
        currents.potential_electrodes = potentials
        workspace.finalize()

        # Copy the survey to a new workspace
        path = Path(tempdir) / r"testDC_copy_current.geoh5"
        new_workspace = Workspace(path)
        currents.copy(parent=new_workspace)

        # Re-open the workspace and read data back in
        new_workspace = Workspace(path)
        currents_rec = new_workspace.get_entity(name)[0]
        potentials_rec = new_workspace.get_entity(name + "_rx")[0]

        # Check entities
        compare_entities(
            currents, currents_rec, ignore=["_potential_electrodes", "_parent"]
        )
        compare_entities(
            potentials, potentials_rec, ignore=["_current_electrodes", "_parent"]
        )

        # Repeat with potential entity
        path = Path(tempdir) / r"testDC_copy_potential.geoh5"
        new_workspace = Workspace(path)
        potentials.copy(parent=new_workspace)

        # Re-open the workspace and read data back in
        new_workspace = Workspace(path)
        currents_rec = new_workspace.get_entity(name)[0]
        potentials_rec = new_workspace.get_entity(name + "_rx")[0]

        # Check entities
        compare_entities(
            currents, currents_rec, ignore=["_potential_electrodes", "_parent"]
        )
        compare_entities(
            potentials, potentials_rec, ignore=["_current_electrodes", "_parent"]
        )