def test_set_parent(): # Generate a random cloud of points xyz = np.random.randn(2, 3) name = "test_points" with tempfile.TemporaryDirectory() as tempdir: h5file_path = Path(tempdir) / r"testProject.geoh5" workspace = Workspace(h5file_path) group_a = ContainerGroup.create(workspace) entity = Points.create(workspace, vertices=xyz, name=name, parent=group_a) entity.add_data({"random": {"values": np.random.randn(xyz.shape[0])}}) group_b = ContainerGroup.create(workspace, name="group_b") entity.parent = group_b workspace = Workspace(h5file_path) group_reload = workspace.get_entity("group_b")[0] entity_reload = workspace.get_entity(name)[0] data_reload = workspace.get_entity("random")[0] assert entity_reload.parent == group_reload, "Parent different than expected." assert ( entity_reload in group_reload.children ), "Entity not in the list of children." assert ( data_reload in entity_reload.children ), "Data not in list of entity children."
def test_create_curve_data(): curve_name = "TestCurve" # Generate a random cloud of points n_data = 12 with tempfile.TemporaryDirectory() as tempdir: h5file_path = Path(tempdir) / r"testCurve.geoh5" # Create a workspace workspace = Workspace(h5file_path) curve = Curve.create(workspace, vertices=np.random.randn(n_data, 3), name=curve_name) # Get and change the parts parts = curve.parts parts[-3:] = 1 curve.parts = parts data_objects = curve.add_data({ "vertexValues": { "values": np.random.randn(curve.n_vertices) }, "cellValues": { "values": np.random.randn(curve.n_cells) }, }) workspace.finalize() # Re-open the workspace and read data back in ws2 = Workspace(h5file_path) obj_rec = ws2.get_entity(curve_name)[0] data_vert_rec = ws2.get_entity("vertexValues")[0] data_cell_rec = ws2.get_entity("cellValues")[0] # Check entities compare_entities(curve, obj_rec) compare_entities(data_objects[0], data_vert_rec) compare_entities(data_objects[1], data_cell_rec) # Modify and write obj_rec.vertices = np.random.randn(n_data, 3) data_vert_rec.values = np.random.randn(n_data) ws2.finalize() # Read back and compare ws3 = Workspace(h5file_path) obj = ws3.get_entity(curve_name)[0] data_vertex = ws3.get_entity("vertexValues")[0] compare_entities(obj_rec, obj) compare_entities(data_vert_rec, data_vertex)
def test_remove_root(): # Generate a random cloud of points n_data = 12 xyz = np.random.randn(n_data, 3) with tempfile.TemporaryDirectory() as tempdir: h5file_path = Path(tempdir) / r"testProject.geoh5" # Create a workspace workspace = Workspace(h5file_path) points = Points.create(workspace, vertices=xyz) data = points.add_data({ "DataValues": { "association": "VERTEX", "values": np.random.randn(n_data), }, "DataValues2": { "association": "VERTEX", "values": np.random.randn(n_data), }, }) group_name = "SomeGroup" data_group = points.add_data_to_group(data, group_name) workspace.finalize() # Remove the root with File(h5file_path, "r+") as project: base = list(project.keys())[0] del project[base]["Root"] del project[base]["Groups"] del project[base]["Types"]["Group types"] # Read the data back in from a fresh workspace new_workspace = Workspace(h5file_path) rec_points = new_workspace.get_entity(points.name)[0] rec_group = rec_points.find_or_create_property_group(name=group_name) rec_data = new_workspace.get_entity(data[0].name)[0] compare_entities( points, rec_points, ignore=["_parent", "_existing_h5_entity", "_property_groups"], ) compare_entities(data[0], rec_data, ignore=["_parent", "_existing_h5_entity"]) compare_entities(data_group, rec_group, ignore=["_parent", "_existing_h5_entity"])
def test_octree(): name = "MyTestOctree" with tempfile.TemporaryDirectory() as tempdir: h5file_path = Path(tempdir) / r"octree.geoh5" # Create a workspace workspace = Workspace(h5file_path) # Create an octree mesh with variable dimensions mesh = Octree.create( workspace, name=name, origin=[0, 0, 0], u_count=32, v_count=16, w_count=8, u_cell_size=1.0, v_cell_size=1.0, w_cell_size=2.0, rotation=45, ) assert mesh.n_cells == 8, "Number of octree cells after base_refine is wrong" # Refine workspace.save_entity(mesh) workspace.finalize() # Read the mesh back in new_workspace = Workspace(h5file_path) rec_obj = new_workspace.get_entity(name)[0] compare_entities(mesh, rec_obj)
def test_create_point_data(): new_name = "TestName" # Generate a random cloud of points xyz = np.random.randn(12, 3) values = np.random.randn(12) with tempfile.TemporaryDirectory() as tempdir: h5file_path = Path(tempdir) / r"testPoints.geoh5" workspace = Workspace(h5file_path) points = Points.create(workspace, vertices=xyz, allow_move=False) data = points.add_data( {"DataValues": { "association": "VERTEX", "values": values }}) tag = points.add_data( {"my_comment": { "association": "OBJECT", "values": "hello_world" }}) # Change some data attributes for testing data.allow_delete = False data.allow_move = True data.allow_rename = False data.name = new_name # Fake ANALYST creating a StatsCache with fetch_h5_handle(h5file_path) as h5file: etype_handle = H5Writer.fetch_handle(h5file, data.entity_type) etype_handle.create_group("StatsCache") # Trigger replace of values data.values = values * 2.0 workspace.finalize() # Read the data back in from a fresh workspace new_workspace = Workspace(h5file_path) rec_obj = new_workspace.get_entity("Points")[0] rec_data = new_workspace.get_entity(new_name)[0] rec_tag = new_workspace.get_entity("my_comment")[0] compare_entities(points, rec_obj) compare_entities(data, rec_data) compare_entities(tag, rec_tag) with fetch_h5_handle(h5file_path) as h5file: etype_handle = H5Writer.fetch_handle(h5file, rec_data.entity_type) assert ( etype_handle.get("StatsCache") is None ), "StatsCache was not properly deleted on update of values"
def test_create_property_group(): with tempfile.TemporaryDirectory() as tempdir: h5file_path = Path(tempdir) / r"prop_group_test.geoh5" # Create a workspace workspace = Workspace(h5file_path) curve = Curve.create( workspace, vertices=np.c_[np.linspace(0, 2 * np.pi, 12), np.zeros(12), np.zeros(12)], ) # Add data props = [] for i in range(4): values = np.cos(curve.vertices[:, 0] / (i + 1)) props += [ curve.add_data({f"Period{i+1}": { "values": values }}, property_group="myGroup") ] # Property group object should have been created prop_group = curve.find_or_create_property_group(name="myGroup") # Create a new group by data name single_data_group = curve.add_data_to_group(f"Period{1}", "Singleton") assert (workspace.find_data( single_data_group.properties[0]).name == f"Period{1}" ), "Failed at creating a property group by data name" workspace.finalize() # Re-open the workspace workspace = Workspace(h5file_path) rec_object = workspace.get_entity(curve.uid)[0] # Read the property_group back in rec_prop_group = rec_object.find_or_create_property_group( name="myGroup") attrs = rec_prop_group.attribute_map check_list = [ attr for attr in attrs.values() if getattr(rec_prop_group, attr) != getattr(prop_group, attr) ] assert ( len(check_list) == 0 ), f"Attribute{check_list} of PropertyGroups in output differ from input" # Copy an object without children new_curve = rec_object.copy(copy_children=False) assert ( new_curve.property_groups == [] ), "Property_groups not properly removed on copy without children."
def test_user_comments(): with tempfile.TemporaryDirectory() as tempdir: h5file_path = Path(tempdir) / r"group_object_comment.geoh5" # Create a workspace workspace = Workspace(h5file_path) object_base = Points.create(workspace, name="myObject") object_comment = "object text comment" author = "John Doe" object_base.add_comment(object_comment, author=author) workspace.finalize() # Read the comments back in ws2 = Workspace(h5file_path) object_base = ws2.get_entity("myObject")[0] assert (object_base.comments.values[0]["Author"] == author ), "Issue with 'Author of object comments" assert (object_base.comments.values[0]["Text"] == object_comment ), "Issue with 'Text' of object comments" # Repeat with Group comments group = ContainerGroup.create(ws2, name="myGroup") group_comment_1 = "group text comment" group_comment_2 = "my other comment" author = "Jane Doe" group.add_comment(group_comment_1) group.add_comment(group_comment_2) ws2.finalize() ws3 = Workspace(h5file_path) group_in = ws3.get_entity("myGroup")[0] assert group_in.comments.values[0]["Author"] == ",".join( ws3.contributors), "Issue with 'Author of object comments" assert (group_in.comments.values[0]["Text"] == group_comment_1 ), "Issue with 'Text' of group comments" assert (group_in.comments.values[1]["Text"] == group_comment_2 ), "Issue with 'Text' of group comments"
def test_create_grid_2d_data(): name = "MyTestGrid2D" # Generate a 2D array n_x, n_y = 10, 15 values, _ = np.meshgrid(np.linspace(0, np.pi, n_x), np.linspace(0, np.pi, n_y)) with tempfile.TemporaryDirectory() as tempdir: h5file_path = Path(tempdir) / r"test2Grid.geoh5" # Create a workspace workspace = Workspace(h5file_path) grid = Grid2D.create( workspace, origin=[0, 0, 0], u_cell_size=20.0, v_cell_size=30.0, u_count=n_x, v_count=n_y, name=name, allow_move=False, ) data = grid.add_data({"DataValues": {"values": values}}) grid.rotation = 45.0 workspace.finalize() # Read the data back in from a fresh workspace new_workspace = Workspace(h5file_path) rec_obj = new_workspace.get_entity(name)[0] rec_data = new_workspace.get_entity("DataValues")[0] compare_entities(grid, rec_obj) compare_entities(data, rec_data)
def test_no_data_values(): # Generate a random cloud of points n_data = 12 xyz = np.random.randn(n_data, 3) float_values = np.random.randn(n_data) float_values[3:5] = np.nan int_values = np.random.randint(n_data, size=n_data).astype(float) int_values[2:5] = np.nan all_nan = np.ones(n_data) with tempfile.TemporaryDirectory() as tempdir: h5file_path = Path(tempdir) / r"testProject.geoh5" # Create a workspace workspace = Workspace(h5file_path) points = Points.create(workspace, vertices=xyz) data_objs = points.add_data({ "DataFloatValues": { "association": "VERTEX", "values": float_values }, "DataIntValues": { "values": int_values, "type": "INTEGER", }, "NoValues": { "association": "VERTEX" }, "AllNanValues": { "association": "VERTEX", "values": all_nan }, }) data_objs[-1].values = None # Reset all values to nan workspace.finalize() # Read the data back in from a fresh workspace new_workspace = Workspace(h5file_path) for data in data_objs: rec_data = new_workspace.get_entity(data.name)[0] if data.values is None: assert rec_data.values is None, "Data 'values' saved should None" else: assert all( np.isnan(rec_data.values) == np.isnan(data.values) ), "Mismatch between input and recovered data values"
def test_create_reference_data(): name = "MyTestPointset" # Generate a random cloud of points with reference values n_data = 12 values = np.random.randint(1, high=8, size=n_data) refs = np.unique(values) value_map = {} for ref in refs: value_map[ref] = "".join( random.choice(string.ascii_lowercase) for i in range(8)) with tempfile.TemporaryDirectory() as tempdir: h5file_path = Path(tempdir) / r"testPoints.geoh5" # Create a workspace workspace = Workspace(h5file_path) points = Points.create(workspace, vertices=np.random.randn(n_data, 3), name=name, allow_move=False) data = points.add_data({ "DataValues": { "type": "referenced", "values": values, "value_map": value_map, } }) new_workspace = Workspace(h5file_path) rec_obj = new_workspace.get_entity(name)[0] rec_data = new_workspace.get_entity("DataValues")[0] compare_entities(points, rec_obj) compare_entities(data, rec_data)
def test_create_group(): group_name = "MyTestContainer" with tempfile.TemporaryDirectory() as tempdir: h5file_path = Path(tempdir) / r"testGroup.geoh5" # Create a workspace workspace = Workspace(h5file_path) group = ContainerGroup.create(workspace, name=group_name) workspace.save_entity(group) workspace.finalize() # Read the group back in rec_obj = workspace.get_entity(group_name)[0] compare_entities(group, rec_obj)
def test_create_surface_data(): with tempfile.TemporaryDirectory() as tempdir: h5file_path = Path(tempdir) / r"testSurface.geoh5" workspace = Workspace(h5file_path) # Create a grid of points and triangulate x, y = np.meshgrid(np.arange(10), np.arange(10)) x, y = x.ravel(), y.ravel() z = np.random.randn(x.shape[0]) xyz = np.c_[x, y, z] simplices = np.unique(np.random.randint(0, xyz.shape[0] - 1, (xyz.shape[0], 3)), axis=1) # Create random data values = np.mean(np.c_[x[simplices[:, 0]], x[simplices[:, 1]], x[simplices[:, 2]]], axis=1) # Create a geoh5 surface surface = Surface.create(workspace, name="mySurf", vertices=xyz, cells=simplices) data = surface.add_data({"TMI": {"values": values}}) # Read the object from a different workspace object on the same file new_workspace = Workspace(h5file_path) rec_obj = new_workspace.get_entity("mySurf")[0] rec_data = rec_obj.get_data("TMI")[0] compare_entities(surface, rec_obj) compare_entities(data, rec_data)
def test_copy_entity(): # Generate a random cloud of points n_data = 12 xyz = np.random.randn(n_data, 3) # Create surface cells = np.unique(np.random.randint(0, xyz.shape[0] - 1, (xyz.shape[0], 3)), axis=1) objects = { Points: {"name": "Something", "vertices": np.random.randn(n_data, 3)}, Surface: { "name": "Surface", "vertices": np.random.randn(n_data, 3), "cells": cells, }, Curve: { "name": "Curve", "vertices": np.random.randn(n_data, 3), }, Octree: { "origin": [0, 0, 0], "u_count": 32, "v_count": 16, "w_count": 8, "u_cell_size": 1.0, "v_cell_size": 1.0, "w_cell_size": 2.0, "rotation": 45, }, } with tempfile.TemporaryDirectory() as tempdir: h5file_path = Path(tempdir) / r"testProject.geoh5" # Create a workspace workspace = Workspace(h5file_path) for obj, kwargs in objects.items(): entity = obj.create(workspace, **kwargs) if getattr(entity, "vertices", None) is not None: values = np.random.randn(entity.n_vertices) else: values = np.random.randn(entity.n_cells) entity.add_data({"DataValues": {"values": values}}) workspace = Workspace(h5file_path) new_workspace = Workspace(Path(tempdir) / r"testProject_2.geoh5") for entity in workspace.objects: entity.copy(parent=new_workspace) # workspace = Workspace(h5file_path) for entity in workspace.objects: # Read the data back in from a fresh workspace rec_entity = new_workspace.get_entity(entity.uid)[0] rec_data = new_workspace.get_entity(entity.children[0].uid)[0] compare_entities(entity, rec_entity, ignore=["_parent"]) compare_entities(entity.children[0], rec_data, ignore=["_parent"])
def test_create_survey_dcip(): name = "TestCurrents" n_data = 12 with tempfile.TemporaryDirectory() as tempdir: path = Path(tempdir) / r"testDC.geoh5" # Create a workspace workspace = Workspace(path) # Create sources along line x_loc, y_loc = np.meshgrid(np.arange(n_data), np.arange(-1, 3)) vertices = np.c_[x_loc.ravel(), y_loc.ravel(), np.zeros_like(x_loc).ravel()] parts = np.kron(np.arange(4), np.ones(n_data)).astype("int") currents = CurrentElectrode.create( workspace, name=name, vertices=vertices, parts=parts ) currents.add_default_ab_cell_id() potentials = PotentialElectrode.create( workspace, name=name + "_rx", vertices=vertices ) n_dipoles = 9 dipoles = [] current_id = [] for val in currents.ab_cell_id.values: cell_id = int(currents.ab_map[val]) - 1 for dipole in range(n_dipoles): dipole_ids = currents.cells[cell_id, :] + 2 + dipole if ( any(dipole_ids > (potentials.n_vertices - 1)) or len(np.unique(parts[dipole_ids])) > 1 ): continue dipoles += [dipole_ids] current_id += [val] potentials.cells = np.vstack(dipoles).astype("uint32") fake_ab = potentials.add_data( {"fake_ab": {"values": np.random.randn(potentials.n_cells)}} ) with pytest.raises(TypeError): potentials.ab_cell_id = fake_ab potentials.ab_cell_id = np.hstack(current_id).astype("int32") # Change again only the values ab_data = potentials.get_data("A-B Cell ID")[0] new_values = ab_data.values new_values[0] = 5 potentials.ab_cell_id = new_values assert ( len(potentials.get_data("A-B Cell ID")) == 1 ), "Issue with new A-B Cell ID data created" fake_meta = { "Current Electrodes": uuid.uuid4(), "Potential Electrodes": uuid.uuid4(), "One too many key": uuid.uuid4(), } with pytest.raises(ValueError): potentials.metadata = fake_meta del fake_meta["One too many key"] with pytest.raises(IndexError): potentials.metadata = fake_meta fake_meta["Current Electrodes"] = currents.uid with pytest.raises(IndexError): potentials.metadata = fake_meta fake_meta["Potential Electrodes"] = potentials.uid potentials.current_electrodes = currents assert ( currents.potential_electrodes == potentials ), "Error assigning the potentiel_electrodes." assert ( potentials.current_electrodes == currents ), "Error assigning the current_electrodes." assert ( currents.metadata == potentials.metadata == { "Current Electrodes": currents.uid, "Potential Electrodes": potentials.uid, } ), "Error assigning metadata" # Repeat the other way with pytest.raises(TypeError) as info: potentials.current_electrodes = None assert info.type == TypeError, "Code did not catch TypeError" with pytest.raises(TypeError) as info: currents.potential_electrodes = None assert info.type == TypeError, "Code did not catch TypeError" setattr(potentials, "_current_electrodes", None) setattr(currents, "_potential_electrodes", None) currents.potential_electrodes = potentials assert ( currents.potential_electrodes == potentials ), "Error assigning the potentiel_electrodes." assert ( potentials.current_electrodes == currents ), "Error assigning the current_electrodes." assert ( currents.metadata == potentials.metadata == { "Current Electrodes": currents.uid, "Potential Electrodes": potentials.uid, } ), "Error assigning metadata" workspace.finalize() # Re-open the workspace and read data back in new_workspace = Workspace(path) currents_rec = new_workspace.get_entity(name)[0] potentials_rec = new_workspace.get_entity(name + "_rx")[0] # Check entities compare_entities( currents, currents_rec, ignore=["_potential_electrodes", "_parent"] ) compare_entities( potentials, potentials_rec, ignore=["_current_electrodes", "_parent"] )
def test_create_block_model_data(): name = "MyTestBlockModel" # Generate a 3D array n_x, n_y, n_z = 8, 9, 10 nodal_x = np.r_[ 0, np.cumsum( np.r_[ np.pi / n_x * 1.5 ** np.arange(3)[::-1], np.ones(n_x) * np.pi / n_x, np.pi / n_x * 1.5 ** np.arange(4), ] ), ] nodal_y = np.r_[ 0, np.cumsum( np.r_[ np.pi / n_y * 1.5 ** np.arange(5)[::-1], np.ones(n_y) * np.pi / n_y, np.pi / n_y * 1.5 ** np.arange(6), ] ), ] nodal_z = -np.r_[ 0, np.cumsum( np.r_[ np.pi / n_z * 1.5 ** np.arange(7)[::-1], np.ones(n_z) * np.pi / n_z, np.pi / n_z * 1.5 ** np.arange(8), ] ), ] with tempfile.TemporaryDirectory() as tempdir: h5file_path = Path(tempdir) / r"block_model.geoh5" # Create a workspace workspace = Workspace(h5file_path) grid = BlockModel.create( workspace, origin=[0, 0, 0], u_cell_delimiters=nodal_x, v_cell_delimiters=nodal_y, z_cell_delimiters=nodal_z, name=name, rotation=30, allow_move=False, ) data = grid.add_data( { "DataValues": { "association": "CELL", "values": ( np.cos(grid.centroids[:, 0]) * np.cos(grid.centroids[:, 1]) * np.cos(grid.centroids[:, 2]) ), } } ) # Read the data back in from a fresh workspace new_workspace = Workspace(h5file_path) rec_obj = new_workspace.get_entity(name)[0] rec_data = new_workspace.get_entity("DataValues")[0] compare_entities(grid, rec_obj) compare_entities(data, rec_data)
def test_modify_property_group(): def compare_objects(object_a, object_b, ignore=None): if ignore is None: ignore = ["_workspace", "_children", "_parent"] for attr in object_a.__dict__.keys(): if attr in ignore: continue if isinstance(getattr(object_a, attr[1:]), ABC): compare_objects( getattr(object_a, attr[1:]), getattr(object_b, attr[1:]) ) else: assert np.all( getattr(object_a, attr[1:]) == getattr(object_b, attr[1:]) ), f"Output attribute {attr[1:]} for {object_a} do not match input {object_b}" obj_name = "myCurve" # Generate a curve with multiple data xyz = np.c_[np.linspace(0, 2 * np.pi, 12), np.zeros(12), np.zeros(12)] with tempfile.TemporaryDirectory() as tempdir: h5file_path = Path(tempdir) / r"prop_group_test.geoh5" # Create a workspace workspace = Workspace(h5file_path) curve = Curve.create(workspace, vertices=xyz, name=obj_name) # Add data props = [] for i in range(4): values = np.cos(xyz[:, 0] / (i + 1)) props += [ curve.add_data( {f"Period{i+1}": {"values": values}}, property_group="myGroup" ) ] children_list = curve.get_data_list() assert all( f"Period{i + 1}" in children_list for i in range(4) ), "Missing data children" # Property group object should have been created prop_group = curve.find_or_create_property_group(name="myGroup") # Remove on props from the list curve.remove_data_from_group(children_list[0], name="myGroup") curve.remove_data_from_group(props[-2:], name="myGroup") assert len(prop_group.properties) == 1, "Error removing a property_group" workspace.finalize() # Re-open the workspace workspace = Workspace(h5file_path) # Read the property_group back in rec_curve = workspace.get_entity(obj_name)[0] rec_prop_group = rec_curve.find_or_create_property_group(name="myGroup") compare_objects(rec_prop_group, prop_group) fetch_group = workspace.fetch_property_groups(rec_curve) assert len(fetch_group) == 1, "Issues reading property groups from workspace" compare_objects(fetch_group[0], prop_group)
def test_copy_survey_dcip(): name = "TestCurrents" n_data = 12 with tempfile.TemporaryDirectory() as tempdir: path = Path(tempdir) / r"testDC.geoh5" # Create a workspace workspace = Workspace(path) # Create sources along line x_loc, y_loc = np.meshgrid(np.arange(n_data), np.arange(-1, 3)) vertices = np.c_[x_loc.ravel(), y_loc.ravel(), np.zeros_like(x_loc).ravel()] parts = np.kron(np.arange(4), np.ones(n_data)).astype("int") currents = CurrentElectrode.create( workspace, name=name, vertices=vertices, parts=parts ) currents.add_default_ab_cell_id() potentials = PotentialElectrode.create( workspace, name=name + "_rx", vertices=vertices ) n_dipoles = 9 dipoles = [] current_id = [] for val in currents.ab_cell_id.values: cell_id = int(currents.ab_map[val]) - 1 for dipole in range(n_dipoles): dipole_ids = currents.cells[cell_id, :] + 2 + dipole if ( any(dipole_ids > (potentials.n_vertices - 1)) or len(np.unique(parts[dipole_ids])) > 1 ): continue dipoles += [dipole_ids] current_id += [val] potentials.cells = np.vstack(dipoles).astype("uint32") potentials.add_data( {"fake_ab": {"values": np.random.randn(potentials.n_cells)}} ) potentials.ab_cell_id = np.hstack(current_id).astype("int32") currents.potential_electrodes = potentials workspace.finalize() # Copy the survey to a new workspace path = Path(tempdir) / r"testDC_copy_current.geoh5" new_workspace = Workspace(path) currents.copy(parent=new_workspace) # Re-open the workspace and read data back in new_workspace = Workspace(path) currents_rec = new_workspace.get_entity(name)[0] potentials_rec = new_workspace.get_entity(name + "_rx")[0] # Check entities compare_entities( currents, currents_rec, ignore=["_potential_electrodes", "_parent"] ) compare_entities( potentials, potentials_rec, ignore=["_current_electrodes", "_parent"] ) # Repeat with potential entity path = Path(tempdir) / r"testDC_copy_potential.geoh5" new_workspace = Workspace(path) potentials.copy(parent=new_workspace) # Re-open the workspace and read data back in new_workspace = Workspace(path) currents_rec = new_workspace.get_entity(name)[0] potentials_rec = new_workspace.get_entity(name + "_rx")[0] # Check entities compare_entities( currents, currents_rec, ignore=["_potential_electrodes", "_parent"] ) compare_entities( potentials, potentials_rec, ignore=["_current_electrodes", "_parent"] )
def test_create_drillhole_data(): well_name = "bullseye" n_data = 10 collocation = 1e-5 with tempfile.TemporaryDirectory() as tempdir: h5file_path = Path(tempdir) / r"testCurve.geoh5" # Create a workspace workspace = Workspace(h5file_path) max_depth = 100 well = Drillhole.create( workspace, collar=np.r_[0.0, 10.0, 10], surveys=np.c_[np.linspace(0, max_depth, n_data), np.linspace(-89, -75, n_data), np.ones(n_data) * 45.0, ], name=well_name, default_collocation_distance=collocation, ) value_map = {} for ref in range(8): value_map[ref] = "".join( random.choice(string.ascii_lowercase) for i in range(8)) # Create random from-to from_to_a = np.sort( np.random.uniform(low=0.05, high=max_depth, size=(50, ))).reshape( (-1, 2)) from_to_b = np.vstack([from_to_a[0, :], [30.1, 55.5], [56.5, 80.2]]) # Add from-to data data_objects = well.add_data({ "interval_values": { "values": np.random.randn(from_to_a.shape[0]), "from-to": from_to_a, }, "int_interval_list": { "values": [1, 2, 3], "from-to": from_to_b, "value_map": { 1: "Unit_A", 2: "Unit_B", 3: "Unit_C" }, "type": "referenced", }, }) assert well.n_cells == ( from_to_a.shape[0] + 2), "Error with number of cells on interval data creation." assert well.n_vertices == ( from_to_a.size + 4), "Error with number of vertices on interval data creation." assert not np.any(np.isnan( well.get_data("FROM")[0].values)), "FROM values not fully set." assert not np.any(np.isnan( well.get_data("TO")[0].values)), "TO values not fully set." assert (well.get_data("TO")[0].values.shape[0] == well.get_data("FROM")[0].values.shape[0] == well.n_cells), "Shape or FROM to n_cells differ." # Add log-data data_objects += [ well.add_data({ "log_values": { "depth": np.sort(np.random.rand(n_data) * max_depth), "type": "referenced", "values": np.random.randint(1, high=8, size=n_data), "value_map": value_map, } }) ] workspace.finalize() new_count = from_to_a.size + 4 + n_data assert well.n_vertices == ( new_count ), "Error with new number of vertices on log data creation." # Re-open the workspace and read data back in new_workspace = Workspace(h5file_path) # Check entities compare_entities( well, new_workspace.get_entity(well_name)[0], ignore=["_default_collocation_distance"], ) compare_entities( data_objects[0], new_workspace.get_entity("interval_values")[0], ignore=["_parent"], ) compare_entities( data_objects[1], new_workspace.get_entity("int_interval_list")[0], ignore=["_parent"], ) compare_entities( data_objects[2], new_workspace.get_entity("log_values")[0], ignore=["_parent"], )