def small_grid_and_surface( tmp_model: Model) -> Tuple[grr.RegularGrid, rqs.Surface]: """Creates a small RegularGrid and a random triangular surface.""" crs = Crs(tmp_model) crs.create_xml() extent = 10 extent_kji = (extent, extent, extent) dxyz = (1.0, 1.0, 1.0) crs_uuid = crs.uuid title = "small_grid" grid = grr.RegularGrid(tmp_model, extent_kji=extent_kji, dxyz=dxyz, crs_uuid=crs_uuid, title=title) grid.create_xml() n_points = 100 points = np.random.rand(n_points, 3) * extent triangles = tri.dt(points) surface = rqs.Surface(tmp_model, crs_uuid=crs_uuid, title="small_surface") surface.set_from_triangles_and_points(triangles, points) surface.triangles_and_points() surface.write_hdf5() surface.create_xml() tmp_model.store_epc() return grid, surface
def test_find_faces_to_represent_surface_regular_wrapper( small_grid_and_surface: Tuple[RegularGrid, Surface]): # Arrange grid, surface = small_grid_and_surface grid_epc = surface_epc = grid.model.epc_file grid_uuid = grid.uuid surface_uuid = surface.uuid name = "test" input_index = 0 use_index_as_realisation = False # Act index, success, epc_file, uuid_list = find_faces_to_represent_surface_regular_wrapper( input_index, use_index_as_realisation, grid_epc, grid_uuid, surface_epc, surface_uuid, name) model = Model(epc_file=epc_file) rm_tree("tmp_dir") # Assert assert success is True assert index == input_index assert len(model.uuids(obj_type='LocalDepth3dCrs')) == 1 assert len(model.uuids(obj_type='IjkGridRepresentation')) == 1 assert len(model.uuids(obj_type='TriangulatedSetRepresentation')) == 1 assert len(model.uuids(obj_type='GridConnectionSetRepresentation')) == 1 assert len(model.uuids(obj_type='FaultInterpretation')) == 1 assert len(model.uuids(obj_type='TectonicBoundaryFeature')) == 1 assert len(model.uuids()) == 6 assert len(uuid_list) == 3
def test_MdDatum(example_model_and_crs): # Set up a new datum model, crs = example_model_and_crs epc = model.epc_file data = dict( location=(0, -99999, 3.14), md_reference='mean low water', ) datum = resqpy.well.MdDatum(parent_model=model, crs_uuid=crs.uuid, **data) uuid = datum.uuid # Save to disk and reload datum.create_part() model.store_epc() del model, crs, datum model2 = Model(epc_file=epc) datum2 = resqpy.well.MdDatum(parent_model=model2, uuid=uuid) for key, expected_value in data.items(): assert getattr(datum2, key) == expected_value, f"Issue with {key}" identical = resqpy.well.MdDatum(parent_model=model2, crs_uuid=datum2.crs_uuid, **data) data['md_reference'] = 'kelly bushing' different = resqpy.well.MdDatum(parent_model=model2, crs_uuid=datum2.crs_uuid, **data) assert identical == datum2 assert different != datum2
def test_volume_multiple_property_collection( example_model_with_properties: Model): # Arrange grid = example_model_with_properties.grid() extent = grid.extent_kji property_collection = grid.property_collection volume_array_gross = np.random.random(extent) property_collection.add_cached_array_to_imported_list( volume_array_gross, 'test data', 'DZ', property_kind='rock volume', facet_type='netgross', facet='gross') volume_array_net = np.random.random(extent) / 2 property_collection.add_cached_array_to_imported_list( volume_array_net, 'test data', 'DZ', property_kind='rock volume', facet_type='netgross', facet='net') property_collection.write_hdf5_for_imported_list() property_collection.create_xml_for_imported_list_and_add_parts_to_model() if hasattr(grid, 'array_volume'): delattr(grid, 'array_volume') # Act volume = cp.volume(grid, property_collection=property_collection) # Assert np.testing.assert_array_almost_equal(volume, volume_array_gross)
def test_thickness_property_collection(example_model_with_properties: Model): # Arrange grid = example_model_with_properties.grid() extent = grid.extent_kji property_collection = grid.property_collection thickness_array = np.random.random(extent) property_collection.add_cached_array_to_imported_list( thickness_array, 'test data', 'DZ', False, uom=grid.z_units(), property_kind='cell length', facet_type='direction', indexable_element='cells', facet='K') property_collection.write_hdf5_for_imported_list() property_collection.create_xml_for_imported_list_and_add_parts_to_model() if hasattr(grid, 'array_thickness'): delattr(grid, 'array_thickness') # Act thickness = cp.thickness(grid, property_collection=property_collection) # Assert np.testing.assert_array_almost_equal(thickness, thickness_array)
def __enter__(self) -> Model: """Enter the runtime context, return a model.""" if self.mode in ["read", "read/write"]: if not os.path.exists(self.epc_file): raise FileNotFoundError(self.epc_file) self._model = Model(epc_file=str(self.epc_file)) else: assert self.mode == "create" for file in [self.epc_file, self.epc_file[:-4] + '.h5']: if os.path.exists(file): os.remove(file) log.info('old file deleted: ' + str(file)) self._model = new_model(self.epc_file) return self._model
def test_WellboreFrame(example_model_and_crs): # Test that all attributes are correctly saved and loaded from disk # --------- Arrange ---------- # Create a WellboreFrame object in memory # Load example model from a fixture model, crs = example_model_and_crs epc_path = model.epc_file # Create a trajectory well_name = 'Banoffee' elevation = 100 datum = resqpy.well.MdDatum(parent_model = model, crs_uuid = crs.uuid, location = (0, 0, -elevation), md_reference = 'kelly bushing') mds = np.array([300, 310, 330]) zs = mds - elevation source_dataframe = pd.DataFrame({ 'MD': mds, 'X': [1, 2, 3], 'Y': [1, 2, 3], 'Z': zs, }) trajectory = resqpy.well.Trajectory(parent_model = model, data_frame = source_dataframe, well_name = well_name, md_datum = datum, length_uom = 'm') trajectory.write_hdf5() trajectory.create_xml() # Create a wellbore frame object wellbore_frame_mds = np.array([305, 315]) wellbore_frame = resqpy.well.WellboreFrame(parent_model = model, trajectory = trajectory, mds = wellbore_frame_mds, title = 'WellboreFrame_1', originator = 'Person_1') # ----------- Act --------- # Save to disk wellbore_frame.write_hdf5() wellbore_frame.create_xml() wellbore_frame_uuid = wellbore_frame.uuid model.store_epc() model.h5_release() # Clear memory del model, datum, trajectory, wellbore_frame # Reload from disk model2 = Model(epc_file = epc_path) wellbore_frame_2 = resqpy.well.WellboreFrame(parent_model = model2, uuid = wellbore_frame_uuid) # ----------- Assert --------- assert wellbore_frame_2.node_count == 2 np.testing.assert_equal(wellbore_frame_2.node_mds, wellbore_frame_mds)
def test_face_centre_invalid_axis(example_model_with_properties: Model): # Arrange grid = example_model_with_properties.grid() cell = (1, 1, 1) axis = 4 zero_or_one = 0 # Act & Assert with pytest.raises(ValueError): ff.face_centre(grid, cell, axis, zero_or_one)
def test_thickness_from_points(example_model_with_properties: Model): # Arrange grid = example_model_with_properties.grid() if hasattr(grid, 'array_thickness'): delattr(grid, 'array_thickness') if hasattr(grid, 'property_collection'): delattr(grid, 'property_collection') # Act thickness = cp.thickness(grid) # Assert np.testing.assert_array_almost_equal(thickness, 20.0)
def test_volume_from_points(example_model_with_properties: Model): # Arrange grid = example_model_with_properties.grid() if hasattr(grid, 'array_volume'): delattr(grid, 'array_thickness') if hasattr(grid, 'property_volume'): delattr(grid, 'property_collection') # Act volume = cp.volume(grid) # Assert np.testing.assert_array_almost_equal(volume, 100000.0)
def test_Trajectory_load_from_xml(example_model_and_crs): # --------- Arrange ---------- model, crs = example_model_and_crs epc_path = model.epc_file elevation = 100 # Create a measured depth datum datum = resqpy.well.MdDatum(parent_model = model, crs_uuid = crs.uuid, location = (0, 0, -elevation), md_reference = 'kelly bushing') datum.create_xml() mds = np.array([300, 310, 330, 340]) zs = mds - elevation well_name = 'JubJub' source_dataframe = pd.DataFrame({ 'MD': mds, 'X': [100, 120, 140, 160], 'Y': [345, 365, 386, 400], 'Z': zs, 'WELL': ['JubJub', 'JubJub', 'JubJub', 'JubJub'] }) # Create a trajectory from dataframe trajectory = resqpy.well.Trajectory(parent_model = model, data_frame = source_dataframe, well_name = well_name, md_datum = datum, length_uom = 'm') # --------- Act ---------- # Save to disk trajectory.write_hdf5() trajectory.create_xml() trajectory_uuid = trajectory.uuid model.store_epc() model.h5_release() # Reload from disk model2 = Model(epc_file = epc_path) trajectory2 = resqpy.well.Trajectory(model2, uuid = trajectory_uuid, set_tangent_vectors = True) # --------- Assert -------------- # Check all attributes were loaded from disk correctly assert trajectory2 is not None assert trajectory2.well_name == well_name np.testing.assert_almost_equal(trajectory2.measured_depths, mds) assert trajectory2.md_datum == datum assert trajectory2.md_uom == 'm'
def example_fine_coarse_model(example_model_and_crs): model, crs = example_model_and_crs coarse_grid = grr.RegularGrid(parent_model=model, origin=(0, 0, 0), extent_kji=(3, 5, 5), crs_uuid=crs.uuid, dxyz=(10, 10, 10)) coarse_grid.cache_all_geometry_arrays() coarse_grid.write_hdf5_from_caches( file=model.h5_file_name(file_must_exist=False), mode='w') coarse_grid.create_xml(ext_uuid=model.h5_uuid(), title='Coarse', write_geometry=True, add_cell_length_properties=True) fine_grid = grr.RegularGrid(parent_model=model, origin=(0, 0, 0), extent_kji=(6, 10, 10), crs_uuid=crs.uuid, dxyz=(5, 5, 5)) fine_grid.cache_all_geometry_arrays() fine_grid.write_hdf5_from_caches( file=model.h5_file_name(file_must_exist=True), mode='a') fine_grid.create_xml(ext_uuid=model.h5_uuid(), title='Fine', write_geometry=True, add_cell_length_properties=True) model.store_epc() model = Model(model.epc_file) coarse = grr.Grid(parent_model=model, uuid=coarse_grid.uuid) fine = grr.Grid(parent_model=model, uuid=fine_grid.uuid) fc = rqfc.FineCoarse(fine_extent_kji=(6, 10, 10), coarse_extent_kji=(3, 5, 5)) fc.set_all_ratios_constant() fc.set_all_proprtions_equal() return model, coarse, fine, fc
def test_organize_classes(tmp_model, cls, data): # Load example model from a fixture model = tmp_model epc = model.epc_file # Create the feature obj = cls(parent_model = model, **data) uuid = obj.uuid # Save to disk obj.create_xml() model.store_epc() model.h5_release() # Reload from disk del model, obj model2 = Model(epc_file = epc) obj2 = cls(parent_model = model2, uuid = uuid) # Check all attributes were loaded correctly for key, expected_value in data.items(): assert getattr(obj2, key) == expected_value, f"Error for {key}"
def faulted_grid(test_data_path) -> Grid: current_filename = os.path.split(getsourcefile(lambda: 0))[0] base_folder = os.path.dirname(os.path.dirname(current_filename)) epc_file = base_folder + '/test_data/wren/wren.epc' model = Model(epc_file=epc_file) return model.grid(title='faulted grid')
def example_model_with_prop_ts_rels(tmp_path): """Model with a grid (5x5x3) and properties. Properties: - Zone (discrete) - VPC (discrete) - Fault block (discrete) - Facies (discrete) - NTG (continuous) - POR (continuous) - SW (continuous) (recurrent) """ model_path = str(tmp_path / 'test_model.epc') model = Model(create_basics=True, create_hdf5_ext=True, epc_file=model_path, new_epc=True) model.store_epc(model.epc_file) grid = grr.RegularGrid(parent_model=model, origin=(0, 0, 0), extent_kji=(3, 5, 5), crs_uuid=rqet.uuid_for_part_root(model.crs_root), set_points_cached=True) grid.cache_all_geometry_arrays() grid.write_hdf5_from_caches(file=model.h5_file_name(file_must_exist=False), mode='w') grid.create_xml(ext_uuid=model.h5_uuid(), title='grid', write_geometry=True, add_cell_length_properties=False) model.store_epc() zone = np.ones(shape=(5, 5), dtype='int') zone_array = np.array([zone, zone + 1, zone + 2], dtype='int') vpc = np.array([[1, 1, 1, 2, 2], [1, 1, 1, 2, 2], [1, 1, 1, 2, 2], [1, 1, 1, 2, 2], [1, 1, 1, 2, 2]], dtype='int') vpc_array = np.array([vpc, vpc, vpc], dtype='int') facies = np.array([[1, 1, 1, 2, 2], [1, 1, 2, 2, 2], [1, 2, 2, 2, 3], [2, 2, 2, 3, 3], [2, 2, 3, 3, 3]], dtype='int') facies_array = np.array([facies, facies, facies], dtype='int') perm = np.array([[1, 1, 1, 10, 10], [1, 1, 1, 10, 10], [1, 1, 1, 10, 10], [1, 1, 1, 10, 10], [1, 1, 1, 10, 10]]) perm_array = np.array([perm, perm, perm], dtype='float') fb = np.array([[1, 1, 1, 1, 1], [1, 1, 1, 1, 1], [1, 1, 1, 1, 1], [2, 2, 2, 2, 2], [2, 2, 2, 2, 2]], dtype='int') fb_array = np.array([fb, fb, fb], dtype='int') ntg = np.array([[0, 0.5, 0, 0.5, 0], [0.5, 0, 0.5, 0, 0.5], [0, 0.5, 0, 0.5, 0], [0.5, 0, 0.5, 0, 0.5], [0, 0.5, 0, 0.5, 0]]) ntg1_array = np.array([ntg, ntg, ntg]) ntg2_array = np.array([ntg + 0.1, ntg + 0.1, ntg + 0.1]) por = np.array([[1, 1, 1, 1, 1], [0.5, 0.5, 0.5, 0.5, 0.5], [1, 1, 1, 1, 1], [0.5, 0.5, 0.5, 0.5, 0.5], [1, 1, 1, 1, 1]]) por1_array = np.array([por, por, por]) por2_array = np.array([por - 0.1, por - 0.1, por - 0.1]) sat = np.array([[1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1]]) sat1_array = np.array([sat, sat, sat]) sat2_array = np.array([sat, sat, np.where(sat == 0.5, 0.75, sat)]) sat3_array = np.array([ np.where(sat == 0.5, 0.75, sat), np.where(sat == 0.5, 0.75, sat), np.where(sat == 0.5, 0.75, sat) ]) collection = rqp.GridPropertyCollection() collection.set_grid(grid) ts = rqts.TimeSeries(parent_model=model, first_timestamp='2000-01-01Z') ts.extend_by_days(365) ts.extend_by_days(365) ts.create_xml() lookup = rqp.StringLookup(parent_model=model, int_to_str_dict={ 1: 'channel', 2: 'interbedded', 3: 'shale' }) lookup.create_xml() model.store_epc() # Add non-varying properties for array, name, kind, discrete, facet_type, facet in zip( [zone_array, vpc_array, fb_array, perm_array], ['Zone', 'VPC', 'Fault block', 'Perm'], ['discrete', 'discrete', 'discrete', 'permeability rock'], [True, True, True, False], [None, None, None, 'direction'], [None, None, None, 'J']): collection.add_cached_array_to_imported_list(cached_array=array, source_info='', keyword=name, discrete=discrete, uom=None, time_index=None, null_value=None, property_kind=kind, facet_type=facet_type, facet=facet, realization=None) collection.write_hdf5_for_imported_list() collection.create_xml_for_imported_list_and_add_parts_to_model() # Add realisation varying properties for array, name, kind, rel in zip( [ntg1_array, por1_array, ntg2_array, por2_array], ['NTG', 'POR', 'NTG', 'POR'], ['net to gross ratio', 'porosity', 'net to gross ratio', 'porosity'], [0, 0, 1, 1]): collection.add_cached_array_to_imported_list(cached_array=array, source_info='', keyword=name, discrete=False, uom=None, time_index=None, null_value=None, property_kind=kind, facet_type=None, facet=None, realization=rel) collection.write_hdf5_for_imported_list() collection.create_xml_for_imported_list_and_add_parts_to_model() # Add categorial property collection.add_cached_array_to_imported_list(cached_array=facies_array, source_info='', keyword='Facies', discrete=True, uom=None, time_index=None, null_value=None, property_kind='discrete', facet_type=None, facet=None, realization=None) collection.write_hdf5_for_imported_list() collection.create_xml_for_imported_list_and_add_parts_to_model( string_lookup_uuid=lookup.uuid) # Add time varying properties for array, ts_index in zip([sat1_array, sat2_array, sat3_array], [0, 1, 2]): collection.add_cached_array_to_imported_list( cached_array=array, source_info='', keyword='SW', discrete=False, uom=None, time_index=ts_index, null_value=None, property_kind='saturation', facet_type='what', facet='water', realization=None) collection.write_hdf5_for_imported_list() collection.create_xml_for_imported_list_and_add_parts_to_model( time_series_uuid=ts.uuid) model.store_epc() return model
def example_model_with_properties(tmp_path): """Model with a grid (5x5x3) and properties. Properties: - Zone (discrete) - VPC (discrete) - Fault block (discrete) - Facies (discrete) - NTG (continuous) - POR (continuous) - SW (continuous) """ model_path = str(tmp_path / 'test_no_rels.epc') model = Model(create_basics=True, create_hdf5_ext=True, epc_file=model_path, new_epc=True) model.store_epc(model.epc_file) grid = grr.RegularGrid(parent_model=model, origin=(0, 0, 0), extent_kji=(3, 5, 5), crs_uuid=rqet.uuid_for_part_root(model.crs_root), set_points_cached=True) grid.cache_all_geometry_arrays() grid.write_hdf5_from_caches(file=model.h5_file_name(file_must_exist=False), mode='w') grid.create_xml(ext_uuid=model.h5_uuid(), title='grid', write_geometry=True, add_cell_length_properties=False) model.store_epc() zone = np.ones(shape=(5, 5)) zone_array = np.array([zone, zone + 1, zone + 2], dtype='int') vpc = np.array([[1, 1, 1, 2, 2], [1, 1, 1, 2, 2], [1, 1, 1, 2, 2], [1, 1, 1, 2, 2], [1, 1, 1, 2, 2]]) vpc_array = np.array([vpc, vpc, vpc]) facies = np.array([[1, 1, 1, 2, 2], [1, 1, 2, 2, 2], [1, 2, 2, 2, 3], [2, 2, 2, 3, 3], [2, 2, 3, 3, 3]]) facies_array = np.array([facies, facies, facies]) fb = np.array([[1, 1, 1, 1, 1], [1, 1, 1, 1, 1], [1, 1, 1, 1, 1], [2, 2, 2, 2, 2], [2, 2, 2, 2, 2]]) fb_array = np.array([fb, fb, fb]) ntg = np.array([[0, 0.5, 0, 0.5, 0], [0.5, 0, 0.5, 0, 0.5], [0, 0.5, 0, 0.5, 0], [0.5, 0, 0.5, 0, 0.5], [0, 0.5, 0, 0.5, 0]]) ntg_array = np.array([ntg, ntg, ntg]) por = np.array([[1, 1, 1, 1, 1], [0.5, 0.5, 0.5, 0.5, 0.5], [1, 1, 1, 1, 1], [0.5, 0.5, 0.5, 0.5, 0.5], [1, 1, 1, 1, 1]]) por_array = np.array([por, por, por]) sat = np.array([[1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1]]) sat_array = np.array([sat, sat, sat]) perm = np.array([[1, 10, 10, 100, 100], [1, 10, 10, 100, 100], [1, 10, 10, 100, 100], [1, 10, 10, 100, 100], [1, 10, 10, 100, 100]]) perm_array = np.array([perm, perm, perm], dtype='float') perm_v_array = perm_array * 0.1 collection = rqp.GridPropertyCollection() collection.set_grid(grid) for array, name, kind, discrete, facet_type, facet in zip( [ zone_array, vpc_array, fb_array, facies_array, ntg_array, por_array, sat_array, perm_array, perm_v_array ], [ 'Zone', 'VPC', 'Fault block', 'Facies', 'NTG', 'POR', 'SW', 'Perm', 'PERMZ' ], [ 'discrete', 'discrete', 'discrete', 'discrete', 'net to gross ratio', 'porosity', 'saturation', 'rock permeability', 'permeability rock' ], [True, True, True, True, False, False, False, False, False], [None, None, None, None, None, None, None, 'direction', 'direction'], [None, None, None, None, None, None, None, 'I', 'K']): collection.add_cached_array_to_imported_list(cached_array=array, source_info='', keyword=name, discrete=discrete, uom=None, time_index=None, null_value=None, property_kind=kind, facet_type=facet_type, facet=facet, realization=None) collection.write_hdf5_for_imported_list() collection.create_xml_for_imported_list_and_add_parts_to_model() model.store_epc() return model
def find_faces_to_represent_surface_regular_wrapper( index: int, use_index_as_realisation: bool, grid_epc: str, grid_uuid: Union[UUID, str], surface_epc: str, surface_uuid: Union[UUID, str], name: str, title: Optional[str] = None, centres: Optional[np.ndarray] = None, agitate: bool = False, feature_type='fault', progress_fn: Optional[Callable] = None, consistent_side: bool = False, return_properties: Optional[List[str]] = None, ) -> Tuple[int, bool, str, List[Union[UUID, str]]]: """Wrapper function of find_faces_to_represent_surface_regular_optimised. Used for multiprocessing to create a new model that is saved in a temporary epc file and returns the required values, which are used in the multiprocessing function to recombine all the objects into a single epc file. Args: index (int): the index of the function call from the multiprocessing function. use_index_as_realisation (bool): if True, uses the index number as the realization number on the property collection. grid_epc (str): epc file path where the grid is saved. grid_uuid (UUID/str): UUID (universally unique identifier) of the grid object. surface_epc (str): epc file path where the surface is saved. surface_uuid (UUID/str): UUID (universally unique identifier) of the surface object. name (str): the feature name to use in the grid connection set. title (str): the citation title to use for the grid connection set; defaults to name centres (np.ndarray, shape (nk, nj, ni, 3)): precomputed cell centre points in local grid space, to avoid possible crs issues; required if grid's crs includes an origin (offset)? agitate (bool): if True, the points of the surface are perturbed by a small random offset, which can help if the surface has been built from a regular mesh with a periodic resonance with the grid feature_type (str, default 'fault'): one of 'fault', 'horizon', or 'geobody boundary' progress_fn (Callable): a callback function to be called at intervals by this function; the argument will progress from 0.0 to 1.0 in unspecified and uneven increments consistent_side (bool): if True, the cell pairs will be ordered so that all the first cells in each pair are on one side of the surface, and all the second cells on the other return_properties (List[str]): if present, a list of property arrays to calculate and return as a dictionary; recognised values in the list are 'triangle', 'offset' and 'normal vector'; triangle is an index into the surface triangles of the triangle detected for the gcs face; offset is a measure of the distance between the centre of the cell face and the intersection point of the inter-cell centre vector with a triangle in the surface; normal vector is a unit vector normal to the surface triangle; each array has an entry for each face in the gcs; the returned dictionary has the passed strings as keys and numpy arrays as values. Returns: Tuple containing: - index (int): the index passed to the function. - success (bool): whether the function call was successful, whatever that definiton is. - epc_file (str): the epc file path where the objects are stored. - uuid_list (List[str]): list of UUIDs of relevant objects. """ surface = Surface(parent_model=Model(surface_epc), uuid=str(surface_uuid)) tmp_dir = Path(f"tmp_dir/{uuid.uuid4()}") tmp_dir.mkdir(parents=True, exist_ok=True) epc_file = f"{tmp_dir}/wrapper.epc" model = new_model(epc_file=epc_file) model.copy_uuid_from_other_model(Model(grid_epc), uuid=str(grid_uuid)) model.copy_uuid_from_other_model(surface.model, uuid=str(surface_uuid)) grid = RegularGrid(parent_model=model, uuid=str(grid_uuid)) uuid_list = [] uuid_list.extend([grid_uuid, surface_uuid]) print("About to call function") returns = rqgs.find_faces_to_represent_surface_regular_optimised( grid, surface, name, title, centres, agitate, feature_type, progress_fn, consistent_side, return_properties, ) print("Function returned") if return_properties is not None: gcs = returns[0] properties = returns[1] realisation = index if use_index_as_realisation else None property_collection = PropertyCollection(support=gcs) for name, array in properties.items(): if name == "normal vector": property_collection.add_cached_array_to_imported_list( array, "from find_faces function", name, discrete=False, uom="Euc", property_kind="continuous", realization=realisation, indexable_element="faces", points=True, ) elif name == "triangle": property_collection.add_cached_array_to_imported_list( array, "from find_faces function", name, discrete=True, null_value=-1, property_kind="discrete", realization=realisation, indexable_element="faces", ) elif name == "offset": property_collection.add_cached_array_to_imported_list( array, "from find_faces function", name, discrete=False, uom=grid.crs.z_units, property_kind="continuous", realization=realisation, indexable_element="faces", ) property_collection.write_hdf5_for_imported_list() uuids_properties = ( property_collection. create_xml_for_imported_list_and_add_parts_to_model()) uuid_list.extend(uuids_properties) else: gcs = returns success = False if gcs.count > 0: success = True gcs.write_hdf5() gcs.create_xml() model.copy_uuid_from_other_model(gcs.model, uuid=gcs.uuid) uuid_list.append(gcs.uuid) model.store_epc() return index, success, epc_file, uuid_list
def test_from_dataframe_and_dataframe(example_model_and_crs): # Test that a WellboreMarkerFrame object can be correctly instantiated from a source dataframe and verify that the # dataframe generated by the dataframe() method matches the source dataframe # --------- Arrange ---------- # Create a WellboreMarkerFrame object in memory # Load example model from a fixture model, crs = example_model_and_crs epc_path = model.epc_file # Create a trajectory well_name = 'Banoffee' elevation = 100 datum = resqpy.well.MdDatum(parent_model=model, crs_uuid=crs.uuid, location=(0, 0, -elevation), md_reference='kelly bushing') mds = np.array([300.0, 310.0, 330.0]) zs = mds - elevation source_dataframe = pd.DataFrame({ 'MD': mds, 'X': [150.0, 165.0, 180.0], 'Y': [240.0, 260.0, 290.0], 'Z': zs, }) trajectory = resqpy.well.Trajectory(parent_model=model, data_frame=source_dataframe, well_name=well_name, md_datum=datum, length_uom='m') trajectory.write_hdf5() trajectory.create_xml() trajectory_uuid = trajectory.uuid # Create features and interpretations horizon_feature_1 = rqo.GeneticBoundaryFeature( parent_model=model, kind='horizon', feature_name='horizon_feature_1') horizon_feature_1.create_xml() horizon_interp_1 = rqo.HorizonInterpretation( parent_model=model, title='horizon_interp_1', genetic_boundary_feature=horizon_feature_1, sequence_stratigraphy_surface='flooding', boundary_relation_list=['conformable']) horizon_interp_1.create_xml() woc_feature_1 = rqo.FluidBoundaryFeature(parent_model=model, kind='water oil contact', feature_name='woc_1') # fluid boundary feature does not have an associated interpretation woc_feature_1.create_xml() fault_feature_1 = rqo.TectonicBoundaryFeature( parent_model=model, kind='fault', feature_name='fault_feature_1') fault_feature_1.create_xml() fault_interp_1 = rqo.FaultInterpretation( parent_model=model, title='fault_interp_1', tectonic_boundary_feature=fault_feature_1, is_normal=True, maximum_throw=15) fault_interp_1.create_xml() df = pd.DataFrame({ 'MD': [400.0, 410.0, 430.0], 'Boundary_Feature_Type': ['horizon', 'water oil contact', 'fault'], 'Marker_Citation_Title': ['marker_horizon_1', 'marker_woc_1', 'marker_fault_1'], 'Interp_Citation_Title': ['horizon_interp_1', None, 'fault_interp_1'], }) # Create a wellbore marker frame from a dataframe wellbore_marker_frame = resqpy.well.WellboreMarkerFrame.from_dataframe( parent_model=model, dataframe=df, trajectory_uuid=trajectory_uuid, title='WBF1', originator='Human', extra_metadata={'target_reservoir': 'treacle'}) # --------- Act ---------- # Save to disk wellbore_marker_frame.write_hdf5() wellbore_marker_frame.create_xml() wmf_uuid = wellbore_marker_frame.uuid # called after create_xml method as it can alter the uuid # get the uuids of each of the markers marker_uuids = [] for marker in wellbore_marker_frame.marker_list: marker_uuids.append(marker.uuid) model.store_epc() model.h5_release() # Clear memory del model, wellbore_marker_frame, datum, trajectory # Reload from disk model2 = Model(epc_file=epc_path) wellbore_marker_frame2 = resqpy.well.WellboreMarkerFrame( parent_model=model2, uuid=wmf_uuid) # Get the uuids of each of the markers marker_uuids2 = [] for marker in wellbore_marker_frame2.marker_list: marker_uuids2.append(marker.uuid) # Create a dataframe from the attributes of the new wellbore marker frame object df2 = wellbore_marker_frame2.dataframe() df2_filtered_cols = df2[[ 'MD', 'Boundary_Feature_Type', 'Marker_Citation_Title', 'Interp_Citation_Title' ]] # --------- Assert ---------- # test that the attributes were reloaded correctly assert bu.matching_uuids(wellbore_marker_frame2.trajectory_uuid, trajectory_uuid) assert wellbore_marker_frame2.node_count == len( wellbore_marker_frame2.node_mds) == len( wellbore_marker_frame2.marker_list) == 3 assert wellbore_marker_frame2.title == 'WBF1' assert wellbore_marker_frame2.originator == 'Human' assert wellbore_marker_frame2.extra_metadata == { 'target_reservoir': 'treacle' } np.testing.assert_almost_equal(wellbore_marker_frame2.node_mds, np.array([400.0, 410.0, 430.0])) for uuid1, uuid2 in zip(marker_uuids, marker_uuids2): assert bu.matching_uuids(uuid1, uuid2) # test that the generated dataframe contains the same data as the original df pd.testing.assert_frame_equal(df, df2_filtered_cols, check_dtype=False)
def function_multiprocessing( function: Callable, kwargs_list: List[Dict[str, Any]], recombined_epc: Union[Path, str], cluster, consolidate: bool = True, ) -> List[bool]: """Calls a function concurrently with the specfied arguments. A multiprocessing pool is used to call the function multiple times in parallel. Once all results are returned, they are combined into a single epc file. Args: function (Callable): the function to be called. Needs to return: - index (int): the index of the kwargs in the kwargs_list. - success (bool): whether the function call was successful, whatever that definiton is. - epc_file (Path/str): the epc file path where the objects are stored. - uuid_list (List[str]): list of UUIDs of relevant objects. kwargs_list (List[Dict[Any]]): A list of keyword argument dictionaries that are used when calling the function. recombined_epc (Path/str): A pathlib Path or path string of where the combined epc will be saved. cluster (LocalCluster/JobQueueCluster): a LocalCluster is a Dask cluster on a local machine. If using a job queing system, a JobQueueCluster can be used such as an SGECluster, SLURMCluster, PBSCluster, LSFCluster etc. consolidate (bool): if True and an equivalent part already exists in a model, it is not duplicated and the uuids are noted as equivalent. Returns: success_list (List[bool]): A boolean list of successful function calls. Note: This function uses the Dask backend to run the given function in parallel, so a Dask cluster must be setup and passed as an argument. Dask will need to be installed in the Python environment because it is not a dependency of the project. More info can be found at https://docs.dask.org/en/latest/deploying.html """ log.info("Multiprocessing function called with %s function.", function.__name__) for i, kwargs in enumerate(kwargs_list): kwargs["index"] = i with parallel_backend("dask"): results = Parallel()(delayed(function)(**kwargs) for kwargs in kwargs_list) log.info("Function calls complete.") # Sorting the results by the original kwargs_list index. results = list(sorted(results, key = lambda x: x[0])) success_list = [result[1] for result in results] epc_list = [result[2] for result in results] uuids_list = [result[3] for result in results] log.info("Number of successes: %s/%s.", sum(success_list), len(results)) epc_file = Path(str(recombined_epc)) if epc_file.is_file(): model_recombined = Model(epc_file = str(epc_file)) else: model_recombined = new_model(epc_file = str(epc_file)) log.info("Creating the recombined epc file.") for i, epc in enumerate(epc_list): if epc is None: continue while True: try: model = Model(epc_file = epc) break except FileNotFoundError: time.sleep(1) continue uuids = uuids_list[i] if uuids is None: uuids = model.uuids() for uuid in uuids: model_recombined.copy_uuid_from_other_model(model, uuid = uuid, consolidate = consolidate) # Deleting temporary directory. log.info("Deleting the temporary directory") rm_tree("tmp_dir") model_recombined.store_epc() log.info("Recombined epc file complete.") return success_list
def test_DeviationSurvey(example_model_with_well, tmp_path): # Test that all attributes are correctly saved and loaded from disk # --------- Arrange ---------- # Create a Deviation Survey object in memory # Load example model from a fixture model, well_interp, datum, traj = example_model_with_well epc_path = model.epc_file # Create 3 copies of the survey, using different initialisers data = dict( title = 'Majestic Umlaut ö', originator = 'Thor, god of sparkles', md_uom = 'ft', angle_uom = 'rad', is_final = True, ) array_data = dict( measured_depths = np.array([1, 2, 3], dtype = float) + 1000.0, azimuths = np.array([4, 5, 6], dtype = float), inclinations = np.array([1, 2, 3], dtype = float), first_station = np.array([0, -1, 999], dtype = float), ) survey = resqpy.well.DeviationSurvey( parent_model = model, represented_interp = well_interp, md_datum = datum, **data, **array_data, ) survey_uuid = survey.uuid df = pd.DataFrame(columns = ['MD', 'AZIM_GN', 'INCL', 'X', 'Y', 'Z']) for col, key in zip(('MD', 'AZIM_GN', 'INCL'), ('measured_depths', 'azimuths', 'inclinations')): df[col] = array_data[key] for axis, col in enumerate(('X', 'Y', 'Z')): df[col] = np.NaN df.loc[0, col] = array_data['first_station'][axis] survey_b = resqpy.well.DeviationSurvey.from_data_frame(parent_model = model, data_frame = df, md_datum = datum, md_uom = data['md_uom'], angle_uom = data['angle_uom']) survey_b_uuid = survey_b.uuid csv_file = os.path.join(tmp_path, 'survey_c.csv') df.to_csv(csv_file) survey_c = resqpy.well.DeviationSurvey.from_ascii_file(parent_model = model, deviation_survey_file = csv_file, md_datum = datum, md_uom = data['md_uom'], angle_uom = data['angle_uom']) survey_c_uuid = survey_c.uuid # ----------- Act --------- # Save to disk for s in (survey, survey_b, survey_c): s.write_hdf5() s.create_xml() model.store_epc() model.h5_release() # Clear memory del model, well_interp, datum, traj, survey, survey_b, survey_c # Reload from disk model2 = Model(epc_file = epc_path) survey2 = resqpy.well.DeviationSurvey(model2, uuid = survey_uuid) survey_b2 = resqpy.well.DeviationSurvey(model2, uuid = survey_b_uuid) survey_c2 = resqpy.well.DeviationSurvey(model2, uuid = survey_c_uuid) # --------- Assert -------------- # Check all attributes were loaded from disk correctly for key, expected_value in data.items(): assert getattr(survey2, key) == expected_value, f"Error for {key}" if 'uom' in key: for s in (survey_b2, survey_c2): assert getattr(s, key) == expected_value, f"Error for {key}" for s in (survey2, survey_b2, survey_c2): for key, expected_value in array_data.items(): assert_array_almost_equal(getattr(s, key), expected_value, err_msg = f"Error for {key}") assert s.station_count == len(array_data['azimuths'])
class ModelContext: """Context manager for easy opening and closing of resqpy models. When a model is opened this way, any open file handles are safely closed when the "with" clause exits. Optionally, the epc can be written back to disk upon exit. Example:: with ModelContext("my_model.epc", mode="rw") as model: print(model.uuids()) Note: The "write_hdf5" and "create_xml" methods of individual resqpy objects still need to be invoked as usual. """ def __init__(self, epc_file, mode="r") -> None: """Open a resqml file, safely closing file handles upon exit. The modes operate as follows: - In "read" mode, an existing epc file is opened. Any changes are not saved to disk automatically, but can still be saved by calling `model.store_epc()`. - In "read/write" mode, changes are written to disk when the context exists. - In "create" mode, a new model is created and saved upon exit. Any existing model will be deleted. Args: epc_file (str): path to existing resqml file mode (str): one of "read", "read/write", "create", or shorthands "r", "rw", "c". """ # Validate mode modes_mapping = {"r": "read", "rw": "read/write", "c": "create"} mode = modes_mapping.get(mode, mode) if mode not in modes_mapping.values(): raise ValueError(f"Unexpected mode '{mode}'") self.epc_file = epc_file self.mode = mode self._model: Optional[Model] = None def __enter__(self) -> Model: """Enter the runtime context, return a model.""" if self.mode in ["read", "read/write"]: if not os.path.exists(self.epc_file): raise FileNotFoundError(self.epc_file) self._model = Model(epc_file=str(self.epc_file)) else: assert self.mode == "create" for file in [self.epc_file, self.epc_file[:-4] + '.h5']: if os.path.exists(file): os.remove(file) log.info('old file deleted: ' + str(file)) self._model = new_model(self.epc_file) return self._model def __exit__(self, exc_type, exc_value, exc_tb): """Exit the runtime context, close the model.""" # Only write to disk if no exception has occured if self.mode in ["read/write", "create"] and exc_type is None: self._model.store_epc() # Release file handles self._model.h5_release()