def small_grid_and_surface( tmp_model: Model) -> Tuple[grr.RegularGrid, rqs.Surface]: """Creates a small RegularGrid and a random triangular surface.""" crs = Crs(tmp_model) crs.create_xml() extent = 10 extent_kji = (extent, extent, extent) dxyz = (1.0, 1.0, 1.0) crs_uuid = crs.uuid title = "small_grid" grid = grr.RegularGrid(tmp_model, extent_kji=extent_kji, dxyz=dxyz, crs_uuid=crs_uuid, title=title) grid.create_xml() n_points = 100 points = np.random.rand(n_points, 3) * extent triangles = tri.dt(points) surface = rqs.Surface(tmp_model, crs_uuid=crs_uuid, title="small_surface") surface.set_from_triangles_and_points(triangles, points) surface.triangles_and_points() surface.write_hdf5() surface.create_xml() tmp_model.store_epc() return grid, surface
def example_model_with_prop_ts_rels(tmp_path): """Model with a grid (5x5x3) and properties. Properties: - Zone (discrete) - VPC (discrete) - Fault block (discrete) - Facies (discrete) - NTG (continuous) - POR (continuous) - SW (continuous) (recurrent) """ model_path = str(tmp_path / 'test_model.epc') model = Model(create_basics=True, create_hdf5_ext=True, epc_file=model_path, new_epc=True) model.store_epc(model.epc_file) grid = grr.RegularGrid(parent_model=model, origin=(0, 0, 0), extent_kji=(3, 5, 5), crs_uuid=rqet.uuid_for_part_root(model.crs_root), set_points_cached=True) grid.cache_all_geometry_arrays() grid.write_hdf5_from_caches(file=model.h5_file_name(file_must_exist=False), mode='w') grid.create_xml(ext_uuid=model.h5_uuid(), title='grid', write_geometry=True, add_cell_length_properties=False) model.store_epc() zone = np.ones(shape=(5, 5), dtype='int') zone_array = np.array([zone, zone + 1, zone + 2], dtype='int') vpc = np.array([[1, 1, 1, 2, 2], [1, 1, 1, 2, 2], [1, 1, 1, 2, 2], [1, 1, 1, 2, 2], [1, 1, 1, 2, 2]], dtype='int') vpc_array = np.array([vpc, vpc, vpc], dtype='int') facies = np.array([[1, 1, 1, 2, 2], [1, 1, 2, 2, 2], [1, 2, 2, 2, 3], [2, 2, 2, 3, 3], [2, 2, 3, 3, 3]], dtype='int') facies_array = np.array([facies, facies, facies], dtype='int') perm = np.array([[1, 1, 1, 10, 10], [1, 1, 1, 10, 10], [1, 1, 1, 10, 10], [1, 1, 1, 10, 10], [1, 1, 1, 10, 10]]) perm_array = np.array([perm, perm, perm], dtype='float') fb = np.array([[1, 1, 1, 1, 1], [1, 1, 1, 1, 1], [1, 1, 1, 1, 1], [2, 2, 2, 2, 2], [2, 2, 2, 2, 2]], dtype='int') fb_array = np.array([fb, fb, fb], dtype='int') ntg = np.array([[0, 0.5, 0, 0.5, 0], [0.5, 0, 0.5, 0, 0.5], [0, 0.5, 0, 0.5, 0], [0.5, 0, 0.5, 0, 0.5], [0, 0.5, 0, 0.5, 0]]) ntg1_array = np.array([ntg, ntg, ntg]) ntg2_array = np.array([ntg + 0.1, ntg + 0.1, ntg + 0.1]) por = np.array([[1, 1, 1, 1, 1], [0.5, 0.5, 0.5, 0.5, 0.5], [1, 1, 1, 1, 1], [0.5, 0.5, 0.5, 0.5, 0.5], [1, 1, 1, 1, 1]]) por1_array = np.array([por, por, por]) por2_array = np.array([por - 0.1, por - 0.1, por - 0.1]) sat = np.array([[1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1]]) sat1_array = np.array([sat, sat, sat]) sat2_array = np.array([sat, sat, np.where(sat == 0.5, 0.75, sat)]) sat3_array = np.array([ np.where(sat == 0.5, 0.75, sat), np.where(sat == 0.5, 0.75, sat), np.where(sat == 0.5, 0.75, sat) ]) collection = rqp.GridPropertyCollection() collection.set_grid(grid) ts = rqts.TimeSeries(parent_model=model, first_timestamp='2000-01-01Z') ts.extend_by_days(365) ts.extend_by_days(365) ts.create_xml() lookup = rqp.StringLookup(parent_model=model, int_to_str_dict={ 1: 'channel', 2: 'interbedded', 3: 'shale' }) lookup.create_xml() model.store_epc() # Add non-varying properties for array, name, kind, discrete, facet_type, facet in zip( [zone_array, vpc_array, fb_array, perm_array], ['Zone', 'VPC', 'Fault block', 'Perm'], ['discrete', 'discrete', 'discrete', 'permeability rock'], [True, True, True, False], [None, None, None, 'direction'], [None, None, None, 'J']): collection.add_cached_array_to_imported_list(cached_array=array, source_info='', keyword=name, discrete=discrete, uom=None, time_index=None, null_value=None, property_kind=kind, facet_type=facet_type, facet=facet, realization=None) collection.write_hdf5_for_imported_list() collection.create_xml_for_imported_list_and_add_parts_to_model() # Add realisation varying properties for array, name, kind, rel in zip( [ntg1_array, por1_array, ntg2_array, por2_array], ['NTG', 'POR', 'NTG', 'POR'], ['net to gross ratio', 'porosity', 'net to gross ratio', 'porosity'], [0, 0, 1, 1]): collection.add_cached_array_to_imported_list(cached_array=array, source_info='', keyword=name, discrete=False, uom=None, time_index=None, null_value=None, property_kind=kind, facet_type=None, facet=None, realization=rel) collection.write_hdf5_for_imported_list() collection.create_xml_for_imported_list_and_add_parts_to_model() # Add categorial property collection.add_cached_array_to_imported_list(cached_array=facies_array, source_info='', keyword='Facies', discrete=True, uom=None, time_index=None, null_value=None, property_kind='discrete', facet_type=None, facet=None, realization=None) collection.write_hdf5_for_imported_list() collection.create_xml_for_imported_list_and_add_parts_to_model( string_lookup_uuid=lookup.uuid) # Add time varying properties for array, ts_index in zip([sat1_array, sat2_array, sat3_array], [0, 1, 2]): collection.add_cached_array_to_imported_list( cached_array=array, source_info='', keyword='SW', discrete=False, uom=None, time_index=ts_index, null_value=None, property_kind='saturation', facet_type='what', facet='water', realization=None) collection.write_hdf5_for_imported_list() collection.create_xml_for_imported_list_and_add_parts_to_model( time_series_uuid=ts.uuid) model.store_epc() return model
class ModelContext: """Context manager for easy opening and closing of resqpy models. When a model is opened this way, any open file handles are safely closed when the "with" clause exits. Optionally, the epc can be written back to disk upon exit. Example:: with ModelContext("my_model.epc", mode="rw") as model: print(model.uuids()) Note: The "write_hdf5" and "create_xml" methods of individual resqpy objects still need to be invoked as usual. """ def __init__(self, epc_file, mode="r") -> None: """Open a resqml file, safely closing file handles upon exit. The modes operate as follows: - In "read" mode, an existing epc file is opened. Any changes are not saved to disk automatically, but can still be saved by calling `model.store_epc()`. - In "read/write" mode, changes are written to disk when the context exists. - In "create" mode, a new model is created and saved upon exit. Any existing model will be deleted. Args: epc_file (str): path to existing resqml file mode (str): one of "read", "read/write", "create", or shorthands "r", "rw", "c". """ # Validate mode modes_mapping = {"r": "read", "rw": "read/write", "c": "create"} mode = modes_mapping.get(mode, mode) if mode not in modes_mapping.values(): raise ValueError(f"Unexpected mode '{mode}'") self.epc_file = epc_file self.mode = mode self._model: Optional[Model] = None def __enter__(self) -> Model: """Enter the runtime context, return a model.""" if self.mode in ["read", "read/write"]: if not os.path.exists(self.epc_file): raise FileNotFoundError(self.epc_file) self._model = Model(epc_file=str(self.epc_file)) else: assert self.mode == "create" for file in [self.epc_file, self.epc_file[:-4] + '.h5']: if os.path.exists(file): os.remove(file) log.info('old file deleted: ' + str(file)) self._model = new_model(self.epc_file) return self._model def __exit__(self, exc_type, exc_value, exc_tb): """Exit the runtime context, close the model.""" # Only write to disk if no exception has occured if self.mode in ["read/write", "create"] and exc_type is None: self._model.store_epc() # Release file handles self._model.h5_release()
def example_model_with_properties(tmp_path): """Model with a grid (5x5x3) and properties. Properties: - Zone (discrete) - VPC (discrete) - Fault block (discrete) - Facies (discrete) - NTG (continuous) - POR (continuous) - SW (continuous) """ model_path = str(tmp_path / 'test_no_rels.epc') model = Model(create_basics=True, create_hdf5_ext=True, epc_file=model_path, new_epc=True) model.store_epc(model.epc_file) grid = grr.RegularGrid(parent_model=model, origin=(0, 0, 0), extent_kji=(3, 5, 5), crs_uuid=rqet.uuid_for_part_root(model.crs_root), set_points_cached=True) grid.cache_all_geometry_arrays() grid.write_hdf5_from_caches(file=model.h5_file_name(file_must_exist=False), mode='w') grid.create_xml(ext_uuid=model.h5_uuid(), title='grid', write_geometry=True, add_cell_length_properties=False) model.store_epc() zone = np.ones(shape=(5, 5)) zone_array = np.array([zone, zone + 1, zone + 2], dtype='int') vpc = np.array([[1, 1, 1, 2, 2], [1, 1, 1, 2, 2], [1, 1, 1, 2, 2], [1, 1, 1, 2, 2], [1, 1, 1, 2, 2]]) vpc_array = np.array([vpc, vpc, vpc]) facies = np.array([[1, 1, 1, 2, 2], [1, 1, 2, 2, 2], [1, 2, 2, 2, 3], [2, 2, 2, 3, 3], [2, 2, 3, 3, 3]]) facies_array = np.array([facies, facies, facies]) fb = np.array([[1, 1, 1, 1, 1], [1, 1, 1, 1, 1], [1, 1, 1, 1, 1], [2, 2, 2, 2, 2], [2, 2, 2, 2, 2]]) fb_array = np.array([fb, fb, fb]) ntg = np.array([[0, 0.5, 0, 0.5, 0], [0.5, 0, 0.5, 0, 0.5], [0, 0.5, 0, 0.5, 0], [0.5, 0, 0.5, 0, 0.5], [0, 0.5, 0, 0.5, 0]]) ntg_array = np.array([ntg, ntg, ntg]) por = np.array([[1, 1, 1, 1, 1], [0.5, 0.5, 0.5, 0.5, 0.5], [1, 1, 1, 1, 1], [0.5, 0.5, 0.5, 0.5, 0.5], [1, 1, 1, 1, 1]]) por_array = np.array([por, por, por]) sat = np.array([[1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1]]) sat_array = np.array([sat, sat, sat]) perm = np.array([[1, 10, 10, 100, 100], [1, 10, 10, 100, 100], [1, 10, 10, 100, 100], [1, 10, 10, 100, 100], [1, 10, 10, 100, 100]]) perm_array = np.array([perm, perm, perm], dtype='float') perm_v_array = perm_array * 0.1 collection = rqp.GridPropertyCollection() collection.set_grid(grid) for array, name, kind, discrete, facet_type, facet in zip( [ zone_array, vpc_array, fb_array, facies_array, ntg_array, por_array, sat_array, perm_array, perm_v_array ], [ 'Zone', 'VPC', 'Fault block', 'Facies', 'NTG', 'POR', 'SW', 'Perm', 'PERMZ' ], [ 'discrete', 'discrete', 'discrete', 'discrete', 'net to gross ratio', 'porosity', 'saturation', 'rock permeability', 'permeability rock' ], [True, True, True, True, False, False, False, False, False], [None, None, None, None, None, None, None, 'direction', 'direction'], [None, None, None, None, None, None, None, 'I', 'K']): collection.add_cached_array_to_imported_list(cached_array=array, source_info='', keyword=name, discrete=discrete, uom=None, time_index=None, null_value=None, property_kind=kind, facet_type=facet_type, facet=facet, realization=None) collection.write_hdf5_for_imported_list() collection.create_xml_for_imported_list_and_add_parts_to_model() model.store_epc() return model
def function_multiprocessing( function: Callable, kwargs_list: List[Dict[str, Any]], recombined_epc: Union[Path, str], cluster, consolidate: bool = True, ) -> List[bool]: """Calls a function concurrently with the specfied arguments. A multiprocessing pool is used to call the function multiple times in parallel. Once all results are returned, they are combined into a single epc file. Args: function (Callable): the function to be called. Needs to return: - index (int): the index of the kwargs in the kwargs_list. - success (bool): whether the function call was successful, whatever that definiton is. - epc_file (Path/str): the epc file path where the objects are stored. - uuid_list (List[str]): list of UUIDs of relevant objects. kwargs_list (List[Dict[Any]]): A list of keyword argument dictionaries that are used when calling the function. recombined_epc (Path/str): A pathlib Path or path string of where the combined epc will be saved. cluster (LocalCluster/JobQueueCluster): a LocalCluster is a Dask cluster on a local machine. If using a job queing system, a JobQueueCluster can be used such as an SGECluster, SLURMCluster, PBSCluster, LSFCluster etc. consolidate (bool): if True and an equivalent part already exists in a model, it is not duplicated and the uuids are noted as equivalent. Returns: success_list (List[bool]): A boolean list of successful function calls. Note: This function uses the Dask backend to run the given function in parallel, so a Dask cluster must be setup and passed as an argument. Dask will need to be installed in the Python environment because it is not a dependency of the project. More info can be found at https://docs.dask.org/en/latest/deploying.html """ log.info("Multiprocessing function called with %s function.", function.__name__) for i, kwargs in enumerate(kwargs_list): kwargs["index"] = i with parallel_backend("dask"): results = Parallel()(delayed(function)(**kwargs) for kwargs in kwargs_list) log.info("Function calls complete.") # Sorting the results by the original kwargs_list index. results = list(sorted(results, key = lambda x: x[0])) success_list = [result[1] for result in results] epc_list = [result[2] for result in results] uuids_list = [result[3] for result in results] log.info("Number of successes: %s/%s.", sum(success_list), len(results)) epc_file = Path(str(recombined_epc)) if epc_file.is_file(): model_recombined = Model(epc_file = str(epc_file)) else: model_recombined = new_model(epc_file = str(epc_file)) log.info("Creating the recombined epc file.") for i, epc in enumerate(epc_list): if epc is None: continue while True: try: model = Model(epc_file = epc) break except FileNotFoundError: time.sleep(1) continue uuids = uuids_list[i] if uuids is None: uuids = model.uuids() for uuid in uuids: model_recombined.copy_uuid_from_other_model(model, uuid = uuid, consolidate = consolidate) # Deleting temporary directory. log.info("Deleting the temporary directory") rm_tree("tmp_dir") model_recombined.store_epc() log.info("Recombined epc file complete.") return success_list