def _inherit_collection(source_grid, grid, inherit_properties, box, inherit_realization, inherit_all_realizations): collection = None if inherit_properties: source_collection = source_grid.extract_property_collection() if source_collection is not None: # do not inherit the inactive property array by this mechanism active_collection = rqp.selective_version_of_collection( source_collection, property_kind='active') source_collection.remove_parts_list_from_dict( active_collection.parts()) inactive_collection = rqp.selective_version_of_collection( source_collection, property_kind='code', # for backward compatibility facet_type='what', facet='inactive') source_collection.remove_parts_list_from_dict( inactive_collection.parts()) collection = rqp.GridPropertyCollection() collection.set_grid(grid) collection.extend_imported_list_copying_properties_from_other_grid_collection( source_collection, box=box, realization=inherit_realization, copy_all_realizations=inherit_all_realizations) return collection
def _inherit_properties(source_grid, grid, fine_coarse, inherit_realization, inherit_all_realizations): source_collection = source_grid.extract_property_collection() collection = None if source_collection is not None: # do not inherit the inactive property array by this mechanism collection = rqp.GridPropertyCollection() collection.set_grid(grid) collection.extend_imported_list_copying_properties_from_other_grid_collection( source_collection, refinement=fine_coarse, realization=inherit_realization, copy_all_realizations=inherit_all_realizations) return collection
def _prepare_simple_inheritance(grid, source_grid, inherit_properties, inherit_realization, inherit_all_realizations): collection = None if inherit_properties: source_collection = source_grid.extract_property_collection() if source_collection is not None: # do not inherit the inactive property array by this mechanism collection = rqp.GridPropertyCollection() collection.set_grid(grid) collection.extend_imported_list_copying_properties_from_other_grid_collection( source_collection, realization=inherit_realization, copy_all_realizations=inherit_all_realizations) return collection
def extract_property_collection(grid): """Load grid property collection object holding lists of all properties in model that relate to this grid. returns: resqml_property.GridPropertyCollection object note: a reference to the grid property collection is cached in this grid object; if the properties change, for example by generating some new properties, the property_collection attribute of the grid object would need to be reset to None elsewhere before calling this method again """ if grid.property_collection is not None: return grid.property_collection grid.property_collection = rprop.GridPropertyCollection(grid = grid) return grid.property_collection
def _displacement_properties(new_grid, old_grid): """Computes cell centre differences in x, y, & z, between old & new grids, and returns a collection of 3 properties.""" displacement_collection = rqp.GridPropertyCollection() displacement_collection.set_grid(new_grid) old_grid.centre_point(cache_centre_array=True) new_grid.centre_point(cache_centre_array=True) displacement = new_grid.array_centre_point - old_grid.array_centre_point log.debug('displacement array shape: ' + str(displacement.shape)) displacement_collection.x_array = displacement[..., 0].copy() displacement_collection.y_array = displacement[..., 1].copy() displacement_collection.z_array = displacement[..., 2].copy() # horizontal_displacement = np.sqrt(x_displacement * x_displacement + y_displacement * y_displacement) # todo: create prop collection to hold z_displacement and horizontal_displacement; add them to imported list xy_units = new_grid.xy_units() z_units = new_grid.z_units() # todo: could replace 3 displacement properties with a single points property displacement_collection.add_cached_array_to_imported_list( displacement_collection.x_array, 'easterly displacement from tilt', 'DX_DISPLACEMENT', discrete=False, uom=xy_units) displacement_collection.add_cached_array_to_imported_list( displacement_collection.y_array, 'northerly displacement from tilt', 'DY_DISPLACEMENT', discrete=False, uom=xy_units) displacement_collection.add_cached_array_to_imported_list( displacement_collection.z_array, 'vertical displacement from tilt', 'DZ_DISPLACEMENT', discrete=False, uom=z_units) return displacement_collection
def extract_inactive_mask(grid, check_pinchout = False): """Returns boolean numpy array indicating which cells are inactive, if (in)active property found in this grid. returns: numpy array of booleans, of shape (nk, nj, ni) being True for cells which are inactive; False for active note: RESQML does not have a built-in concept of inactive (dead) cells, though the usage guide advises to use a discrete property with a local property kind of 'active'; this resqpy code can maintain an 'inactive' attribute for the grid object, which is a boolean numpy array indicating which cells are inactive """ if grid.inactive is not None and not check_pinchout: return grid.inactive geom_defined = cell_geometry_is_defined_ref(grid) if grid.inactive is None: if geom_defined is None or geom_defined is True: grid.inactive = np.zeros(tuple(grid.extent_kji)) # ie. all active else: grid.inactive = np.logical_not(cell_geometry_is_defined_ref(grid)) if check_pinchout: grid.inactive = np.logical_or(grid.inactive, grid.pinched_out()) gpc = grid.extract_property_collection() if gpc is None: grid.all_inactive = np.all(grid.inactive) return grid.inactive active_gpc = rprop.GridPropertyCollection() # note: use of bespoke (local) property kind 'active' as suggested in resqml usage guide active_gpc.inherit_parts_selectively_from_other_collection(other = gpc, property_kind = 'active', indexable = 'cells', continuous = False) active_parts = active_gpc.parts() if len(active_parts) > 1: # try further filtering based on grid's time index data (or filtering out time based arrays) grid.extract_geometry_time_index() if grid.time_index is not None and grid.time_series_uuid is not None: active_gpc = rprop.selective_version_of_collection(active_gpc, time_index = grid.time_index, time_series_uuid = grid.time_series_uuid) else: active_parts = [] for part in active_gpc.parts(): if active_gpc.time_series_uuid_for_part(part) is None and active_gpc.time_index_for_part(part) is None: active_parts.append(part) if len(active_parts) > 0: if len(active_parts) > 1: log.warning('more than one property found with bespoke kind "active", using last encountered') active_part = active_parts[-1] active_array = active_gpc.cached_part_array_ref(active_part, dtype = 'bool') grid.inactive = np.logical_or(grid.inactive, np.logical_not(active_array)) grid.active_property_uuid = active_gpc.uuid_for_part(active_part) active_gpc.uncache_part_array(active_part) else: # for backward compatibility with earlier versions of resqpy inactive_gpc = rprop.GridPropertyCollection() inactive_gpc.inherit_parts_selectively_from_other_collection(other = gpc, property_kind = 'code', facet_type = 'what', facet = 'inactive') if inactive_gpc.number_of_parts() == 1: inactive_part = inactive_gpc.parts()[0] inactive_array = inactive_gpc.cached_part_array_ref(inactive_part, dtype = 'bool') grid.inactive = np.logical_or(grid.inactive, inactive_array) inactive_gpc.uncache_part_array(inactive_part) grid.all_inactive = np.all(grid.inactive) return grid.inactive
def example_model_with_prop_ts_rels(tmp_path): """Model with a grid (5x5x3) and properties. Properties: - Zone (discrete) - VPC (discrete) - Fault block (discrete) - Facies (discrete) - NTG (continuous) - POR (continuous) - SW (continuous) (recurrent) """ model_path = str(tmp_path / 'test_model.epc') model = Model(create_basics=True, create_hdf5_ext=True, epc_file=model_path, new_epc=True) model.store_epc(model.epc_file) grid = grr.RegularGrid(parent_model=model, origin=(0, 0, 0), extent_kji=(3, 5, 5), crs_uuid=rqet.uuid_for_part_root(model.crs_root), set_points_cached=True) grid.cache_all_geometry_arrays() grid.write_hdf5_from_caches(file=model.h5_file_name(file_must_exist=False), mode='w') grid.create_xml(ext_uuid=model.h5_uuid(), title='grid', write_geometry=True, add_cell_length_properties=False) model.store_epc() zone = np.ones(shape=(5, 5), dtype='int') zone_array = np.array([zone, zone + 1, zone + 2], dtype='int') vpc = np.array([[1, 1, 1, 2, 2], [1, 1, 1, 2, 2], [1, 1, 1, 2, 2], [1, 1, 1, 2, 2], [1, 1, 1, 2, 2]], dtype='int') vpc_array = np.array([vpc, vpc, vpc], dtype='int') facies = np.array([[1, 1, 1, 2, 2], [1, 1, 2, 2, 2], [1, 2, 2, 2, 3], [2, 2, 2, 3, 3], [2, 2, 3, 3, 3]], dtype='int') facies_array = np.array([facies, facies, facies], dtype='int') perm = np.array([[1, 1, 1, 10, 10], [1, 1, 1, 10, 10], [1, 1, 1, 10, 10], [1, 1, 1, 10, 10], [1, 1, 1, 10, 10]]) perm_array = np.array([perm, perm, perm], dtype='float') fb = np.array([[1, 1, 1, 1, 1], [1, 1, 1, 1, 1], [1, 1, 1, 1, 1], [2, 2, 2, 2, 2], [2, 2, 2, 2, 2]], dtype='int') fb_array = np.array([fb, fb, fb], dtype='int') ntg = np.array([[0, 0.5, 0, 0.5, 0], [0.5, 0, 0.5, 0, 0.5], [0, 0.5, 0, 0.5, 0], [0.5, 0, 0.5, 0, 0.5], [0, 0.5, 0, 0.5, 0]]) ntg1_array = np.array([ntg, ntg, ntg]) ntg2_array = np.array([ntg + 0.1, ntg + 0.1, ntg + 0.1]) por = np.array([[1, 1, 1, 1, 1], [0.5, 0.5, 0.5, 0.5, 0.5], [1, 1, 1, 1, 1], [0.5, 0.5, 0.5, 0.5, 0.5], [1, 1, 1, 1, 1]]) por1_array = np.array([por, por, por]) por2_array = np.array([por - 0.1, por - 0.1, por - 0.1]) sat = np.array([[1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1]]) sat1_array = np.array([sat, sat, sat]) sat2_array = np.array([sat, sat, np.where(sat == 0.5, 0.75, sat)]) sat3_array = np.array([ np.where(sat == 0.5, 0.75, sat), np.where(sat == 0.5, 0.75, sat), np.where(sat == 0.5, 0.75, sat) ]) collection = rqp.GridPropertyCollection() collection.set_grid(grid) ts = rqts.TimeSeries(parent_model=model, first_timestamp='2000-01-01Z') ts.extend_by_days(365) ts.extend_by_days(365) ts.create_xml() lookup = rqp.StringLookup(parent_model=model, int_to_str_dict={ 1: 'channel', 2: 'interbedded', 3: 'shale' }) lookup.create_xml() model.store_epc() # Add non-varying properties for array, name, kind, discrete, facet_type, facet in zip( [zone_array, vpc_array, fb_array, perm_array], ['Zone', 'VPC', 'Fault block', 'Perm'], ['discrete', 'discrete', 'discrete', 'permeability rock'], [True, True, True, False], [None, None, None, 'direction'], [None, None, None, 'J']): collection.add_cached_array_to_imported_list(cached_array=array, source_info='', keyword=name, discrete=discrete, uom=None, time_index=None, null_value=None, property_kind=kind, facet_type=facet_type, facet=facet, realization=None) collection.write_hdf5_for_imported_list() collection.create_xml_for_imported_list_and_add_parts_to_model() # Add realisation varying properties for array, name, kind, rel in zip( [ntg1_array, por1_array, ntg2_array, por2_array], ['NTG', 'POR', 'NTG', 'POR'], ['net to gross ratio', 'porosity', 'net to gross ratio', 'porosity'], [0, 0, 1, 1]): collection.add_cached_array_to_imported_list(cached_array=array, source_info='', keyword=name, discrete=False, uom=None, time_index=None, null_value=None, property_kind=kind, facet_type=None, facet=None, realization=rel) collection.write_hdf5_for_imported_list() collection.create_xml_for_imported_list_and_add_parts_to_model() # Add categorial property collection.add_cached_array_to_imported_list(cached_array=facies_array, source_info='', keyword='Facies', discrete=True, uom=None, time_index=None, null_value=None, property_kind='discrete', facet_type=None, facet=None, realization=None) collection.write_hdf5_for_imported_list() collection.create_xml_for_imported_list_and_add_parts_to_model( string_lookup_uuid=lookup.uuid) # Add time varying properties for array, ts_index in zip([sat1_array, sat2_array, sat3_array], [0, 1, 2]): collection.add_cached_array_to_imported_list( cached_array=array, source_info='', keyword='SW', discrete=False, uom=None, time_index=ts_index, null_value=None, property_kind='saturation', facet_type='what', facet='water', realization=None) collection.write_hdf5_for_imported_list() collection.create_xml_for_imported_list_and_add_parts_to_model( time_series_uuid=ts.uuid) model.store_epc() return model
def example_model_with_properties(tmp_path): """Model with a grid (5x5x3) and properties. Properties: - Zone (discrete) - VPC (discrete) - Fault block (discrete) - Facies (discrete) - NTG (continuous) - POR (continuous) - SW (continuous) """ model_path = str(tmp_path / 'test_no_rels.epc') model = Model(create_basics=True, create_hdf5_ext=True, epc_file=model_path, new_epc=True) model.store_epc(model.epc_file) grid = grr.RegularGrid(parent_model=model, origin=(0, 0, 0), extent_kji=(3, 5, 5), crs_uuid=rqet.uuid_for_part_root(model.crs_root), set_points_cached=True) grid.cache_all_geometry_arrays() grid.write_hdf5_from_caches(file=model.h5_file_name(file_must_exist=False), mode='w') grid.create_xml(ext_uuid=model.h5_uuid(), title='grid', write_geometry=True, add_cell_length_properties=False) model.store_epc() zone = np.ones(shape=(5, 5)) zone_array = np.array([zone, zone + 1, zone + 2], dtype='int') vpc = np.array([[1, 1, 1, 2, 2], [1, 1, 1, 2, 2], [1, 1, 1, 2, 2], [1, 1, 1, 2, 2], [1, 1, 1, 2, 2]]) vpc_array = np.array([vpc, vpc, vpc]) facies = np.array([[1, 1, 1, 2, 2], [1, 1, 2, 2, 2], [1, 2, 2, 2, 3], [2, 2, 2, 3, 3], [2, 2, 3, 3, 3]]) facies_array = np.array([facies, facies, facies]) fb = np.array([[1, 1, 1, 1, 1], [1, 1, 1, 1, 1], [1, 1, 1, 1, 1], [2, 2, 2, 2, 2], [2, 2, 2, 2, 2]]) fb_array = np.array([fb, fb, fb]) ntg = np.array([[0, 0.5, 0, 0.5, 0], [0.5, 0, 0.5, 0, 0.5], [0, 0.5, 0, 0.5, 0], [0.5, 0, 0.5, 0, 0.5], [0, 0.5, 0, 0.5, 0]]) ntg_array = np.array([ntg, ntg, ntg]) por = np.array([[1, 1, 1, 1, 1], [0.5, 0.5, 0.5, 0.5, 0.5], [1, 1, 1, 1, 1], [0.5, 0.5, 0.5, 0.5, 0.5], [1, 1, 1, 1, 1]]) por_array = np.array([por, por, por]) sat = np.array([[1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1], [1, 0.5, 1, 0.5, 1]]) sat_array = np.array([sat, sat, sat]) perm = np.array([[1, 10, 10, 100, 100], [1, 10, 10, 100, 100], [1, 10, 10, 100, 100], [1, 10, 10, 100, 100], [1, 10, 10, 100, 100]]) perm_array = np.array([perm, perm, perm], dtype='float') perm_v_array = perm_array * 0.1 collection = rqp.GridPropertyCollection() collection.set_grid(grid) for array, name, kind, discrete, facet_type, facet in zip( [ zone_array, vpc_array, fb_array, facies_array, ntg_array, por_array, sat_array, perm_array, perm_v_array ], [ 'Zone', 'VPC', 'Fault block', 'Facies', 'NTG', 'POR', 'SW', 'Perm', 'PERMZ' ], [ 'discrete', 'discrete', 'discrete', 'discrete', 'net to gross ratio', 'porosity', 'saturation', 'rock permeability', 'permeability rock' ], [True, True, True, True, False, False, False, False, False], [None, None, None, None, None, None, None, 'direction', 'direction'], [None, None, None, None, None, None, None, 'I', 'K']): collection.add_cached_array_to_imported_list(cached_array=array, source_info='', keyword=name, discrete=discrete, uom=None, time_index=None, null_value=None, property_kind=kind, facet_type=facet_type, facet=facet, realization=None) collection.write_hdf5_for_imported_list() collection.create_xml_for_imported_list_and_add_parts_to_model() model.store_epc() return model
def add_one_grid_property_array(epc_file, a, property_kind, grid_uuid=None, source_info='imported', title=None, discrete=False, uom=None, time_index=None, time_series_uuid=None, string_lookup_uuid=None, null_value=None, indexable_element='cells', facet_type=None, facet=None, realization=None, local_property_kind_uuid=None, count_per_element=1, const_value=None, expand_const_arrays=False, points=False, extra_metadata={}, new_epc_file=None): """Adds a grid property from a numpy array to an existing resqml dataset. arguments: epc_file (string): file name to load resqml model from (and rewrite to if new_epc_file is None) a (3D numpy array): the property array to be added to the model; for a constant array set this None and use the const_value argument, otherwise this array is required property_kind (string): the resqml property kind grid_uuid (uuid object or string, optional): the uuid of the grid to which the property relates; if None, the property is attached to the 'main' grid source_info (string): typically the name of a file from which the array has been read but can be any information regarding the source of the data title (string): this will be used as the citation title when a part is generated for the array; for simulation models it is desirable to use the simulation keyword when appropriate discrete (boolean, default False): if True, the array should contain integer (or boolean) data; if False, float uom (string, default None): the resqml units of measure for the data; not relevant to discrete data time_index (integer, default None): if not None, the time index to be used when creating a part for the array time_series_uuid (uuid object or string, default None): required if time_index is not None string_lookup_uuid (uuid object or string, optional): required if the array is to be stored as a categorical property; set to None for non-categorical discrete data; only relevant if discrete is True null_value (int, default None): if present, this is used in the metadata to indicate that this value is to be interpreted as a null value wherever it appears in the data (use for discrete data only) indexable_element (string, default 'cells'): the indexable element in the supporting representation (the grid) facet_type (string): resqml facet type, or None facet (string): resqml facet, or None realization (int): realization number, or None local_property_kind_uuid (uuid.UUID or string): uuid of local property kind, or None count_per_element (int, default 1): the number of values per indexable element; if greater than one then this must be the fastest cycling axis in the cached array, ie last index const_value (float or int, optional): if present, a constant array is added 'filled' with this value, in which case argument a should be None expand_const_arrays (bool, default False): if True and a const_value is provided, a fully expanded array is added to the model instead of a const array points (bool, default False): if True, this is a points property with an extra dimension of extent 3 extra_metadata (dict, optional): any items in this dictionary are added as extra metadata to the new property new_epc_file (string, optional): if None, the source epc_file is extended with the new property object; if present, a new epc file (& associated h5 file) is created to contain a copy of the grid and the new property returns: uuid.UUID of newly created property object """ if new_epc_file and epc_file and ( (new_epc_file == epc_file) or (os.path.exists(new_epc_file) and os.path.exists(epc_file) and os.path.samefile(new_epc_file, epc_file))): new_epc_file = None # open up model and establish grid object model = rq.Model(epc_file) if grid_uuid is None: grid = model.grid() grid_uuid = grid.uuid else: grid = model.grid_for_uuid_from_grid_list(grid_uuid) if grid is None: grid = grr.any_grid(model, uuid=grid_uuid, find_properties=False) assert grid is not None, 'failed to establish grid object' if not discrete: string_lookup_uuid = None if const_value is not None and expand_const_arrays: assert count_per_element == 1 and not points, 'attempt to expand const array for non-standard shape' if isinstance(const_value, bool): dtype = bool elif discrete: dtype = int else: dtype = float a = np.full(grid.extent_kji, const_value, dtype=dtype) const_value = None # create an empty property collection and add the new array to its 'imported' list gpc = rqp.GridPropertyCollection() gpc.set_grid(grid) gpc.add_cached_array_to_imported_list( a, source_info, title, discrete=discrete, uom=uom, time_index=time_index, null_value=null_value, property_kind=property_kind, local_property_kind_uuid=local_property_kind_uuid, facet_type=facet_type, facet=facet, realization=realization, indexable_element=indexable_element, count=count_per_element, const_value=const_value, points=points) # write or re-write model model.h5_release() if new_epc_file: grid_title = rqet.citation_title_for_node(grid.root) uuid_list = _write_grid(new_epc_file, grid, property_collection=gpc, grid_title=grid_title, mode='w', time_series_uuid=time_series_uuid, string_lookup_uuid=string_lookup_uuid, extra_metadata=extra_metadata) else: # add arrays to hdf5 file holding source grid geometry uuid_list = _write_grid(epc_file, grid, property_collection=gpc, mode='a', geometry=False, time_series_uuid=time_series_uuid, string_lookup_uuid=string_lookup_uuid, extra_metadata=extra_metadata) if uuid_list is None or len(uuid_list) == 0: return None return uuid_list[0]
def add_ab_properties( epc_file, # existing resqml model grid_uuid=None, # optional grid uuid, required if more than one grid in model; todo: handle list of grids? ext_uuid=None, # if None, hdf5 file holding grid geometry will be used ab_property_list=None ): # list of (file_name, keyword, property_kind, facet_type, facet, uom, time_index, null_value, # discrete, realization) """Process a list of pure binary property array files. Adds as parts of model, related to grid (hdf5 file is appended to). """ assert ab_property_list, 'property list is empty or missing' model = rq.Model(epc_file=epc_file) if grid_uuid is None: grid_node = model.root_for_ijk_grid( ) # will raise an exception if Model has more than 1 grid assert grid_node is not None, 'grid not found in model' grid_uuid = rqet.uuid_for_part_root(grid_node) grid = grr.any_grid(parent_model=model, uuid=grid_uuid, find_properties=False) if ext_uuid is None: ext_node = rqet.find_nested_tags( grid.geometry_root, ['Points', 'Coordinates', 'HdfProxy', 'UUID']) if ext_node is not None: ext_uuid = bu.uuid_from_string(ext_node.text.strip()) # ab_property_list: list of (filename, keyword, property_kind, facet_type, facet, uom, time_index, null_value, discrete, realization) prop_import_collection = rp.GridPropertyCollection() prop_import_collection.set_grid(grid) for (p_filename, p_keyword, p_property_kind, p_facet_type, p_facet, p_uom, p_time_index, p_null_value, p_discrete, p_realization) in ab_property_list: prop_import_collection.import_ab_property_to_cache( p_filename, p_keyword, grid.extent_kji, discrete=p_discrete, uom=p_uom, time_index=p_time_index, null_value=p_null_value, property_kind=p_property_kind, facet_type=p_facet_type, facet=p_facet, realization=p_realization) # todo: property_kind, facet_type & facet are not currently getting passed through the imported_list tuple in resqml_property if prop_import_collection is None: log.warning('no pure binary grid properties to import') else: log.info('number of pure binary grid property arrays: ' + str(prop_import_collection.number_of_imports())) # append to hdf5 file using arrays cached in grid property collection above hdf5_file = model.h5_file_name() log.debug('appending to hdf5 file: ' + hdf5_file) grid.write_hdf5_from_caches(hdf5_file, mode='a', geometry=False, imported_properties=prop_import_collection, write_active=False) # remove cached static property arrays from memory if prop_import_collection is not None: prop_import_collection.remove_all_cached_arrays() # add imported properties parts to model, building property parts list if prop_import_collection is not None and prop_import_collection.imported_list is not None: prop_import_collection.create_xml_for_imported_list_and_add_parts_to_model( ext_uuid) # mark model as modified model.set_modified() # store new version of model log.info('storing model with additional properties in epc file: ' + epc_file) model.store_epc(epc_file) return model
def create_xml(self, ext_uuid=None, add_as_part=True, add_relationships=True, set_as_grid_root=True, title=None, originator=None, write_active=True, write_geometry=None, extra_metadata={}, expand_const_arrays=False, add_cell_length_properties=True): """Creates xml for this RegularGrid object; by default the explicit geometry is not included. see docstring for Grid.create_xml() additional argument: add_cell_length_properties (boolean, default True): if True, 3 constant property arrays with cells as indexable element are created to hold the lengths of the primary axes of the cells; the xml is created for the properties and they are added to the model (no hdf5 write needed) :meta common: """ if extra_metadata is None: extra_metadata = {} if self.crs_uuid is not None: extra_metadata['crs uuid'] = str(self.crs_uuid) if write_geometry is None: write_geometry = (self.grid_representation == 'IjkGrid') node = super().create_xml(ext_uuid=ext_uuid, add_as_part=add_as_part, add_relationships=add_relationships, set_as_grid_root=set_as_grid_root, title=title, originator=originator, write_active=write_active, write_geometry=write_geometry, extra_metadata=extra_metadata) if add_cell_length_properties: axes_lengths_kji = self.axial_lengths_kji() dpc = rprop.GridPropertyCollection() dpc.set_grid(self) for axis in range(3): dpc.add_cached_array_to_imported_list( None, 'regular grid', 'D' + 'ZYX'[axis], discrete=False, uom=self.xy_units(), property_kind='cell length', facet_type='direction', facet='KJI'[axis], indexable_element='cells', count=1, const_value=axes_lengths_kji[axis]) if expand_const_arrays: dpc.write_hdf5_for_imported_list(expand_const_arrays=True) dpc.create_xml_for_imported_list_and_add_parts_to_model( expand_const_arrays=expand_const_arrays) if self.property_collection is None: self.property_collection = dpc else: if self.property_collection.support is None: self.property_collection.set_support(support=self) self.property_collection.inherit_parts_from_other_collection( dpc) return node
def import_vdb_ensemble( epc_file, ensemble_run_dir, existing_epc = False, keyword_list = None, property_kind_list = None, vdb_static_properties = True, # if True, static vdb properties are imported vdb_recurrent_properties = True, decoarsen = True, timestep_selection = 'all', create_property_set_per_realization = True, create_property_set_per_timestep = True, create_complete_property_set = False, # remaining arguments only used if existing_epc is False extent_ijk = None, # 3 element numpy vector corp_xy_units = 'm', corp_z_units = 'm', corp_z_inc_down = True, ijk_handedness = 'right', geometry_defined_everywhere = True, treat_as_nan = None, resqml_xy_units = 'm', resqml_z_units = 'm', resqml_z_inc_down = True, shift_to_local = True, local_origin_place = 'centre', # 'centre' or 'minimum' max_z_void = 0.1, # import will fail if vertical void greater than this is encountered split_pillars = True, split_tolerance = 0.01, # applies to each of x, y, z differences progress_fn = None): """Adds properties from all vdb's within an ensemble directory tree to a single RESQML dataset. Referencing a shared grid. args: epc_file (string): filename of epc file to be extended with ensemble properties ensemble_run_dir (string): path of main ensemble run directory; vdb's within this directory tree are source of import existing_epc (boolean, default False): if True, the epc_file must already exist and contain the compatible grid keyword_list (list of strings, optional): if present, only properties for keywords within the list are included property_kind_list (list of strings, optional): if present, only properties which are mapped to these resqml property kinds are included in the import vdb_static_properties (boolean, default True): if False, no static properties are included, regardless of keyword and/or property kind matches vdb_recurrent_properties (boolean, default True): if False, no recurrent properties are included, regardless of keyword and/or property kind matches decoarsen (boolean, default True): if True and ICOARSE property exists for a grid in a case, the associated property data is decoarsened; if False, the property data is as stored in the vdb timestep_selection (string, default 'all'): may be 'first', 'last', 'first and last', or 'all', controlling which reporting timesteps are included when loading recurrent data create_property_set_per_realization (boolean, default True): if True, a property set object is created for each realization create_property_set_per_timestep (boolean, default True): if True, a property set object is created for each timestep included in the recurrent data import create_complete_property_set (boolean, default False): if True, a property set object is created containing all the properties imported; only really useful to differentiate from other properties related to the grid extent_ijk (triple int, optional): this and remaining arguments are only used if existing_epc is False; the extent is only needed in case automatic determination of the extent fails corp_xy_units (string, default 'm'): the units of x & y values in the vdb corp data; should be 'm' (metres) or 'ft' (feet) corp_z_units (string, default 'm'): the units of z values in the vdb corp data; should be 'm' (metres) or 'ft' (feet) corp_z_inc_down (boolean, default True): set to True if corp z values are depth; False if elevation ijk_handedness (string, default 'right'): set to the handedness of the IJK axes in the Nexus model; 'right' or 'left' geometry_defined_everywhere (boolean, default True): set to False if inactive cells do not have valid geometry; deprecated - use treat_as_nan argument instead treat_as_nan (string, optional): if not None, one of 'dots', 'ij_dots', 'inactive'; controls which inactive cells have their geometry set to undefined resqml_xy_units (string, default 'm'): the units of x & y values to use in the generated resqml grid; should be 'm' (metres) or 'ft' (feet) resqml_z_units (string, default 'm'): the units of z values to use in the generated resqml grid; should be 'm' (metres) or 'ft' (feet) resqml_z_inc_down (boolean, default True): set to True if resqml z values are to be depth; False for elevations shift_to_local (boolean, default True): if True, the resqml coordinate reference system will use a local origin local_origin_place (string, default 'centre'): where to place the local origin; 'centre' or 'minimum'; only relevant if shift_to_local is True max_z_void (float, default 0.1): the tolerance of voids between layers, in z direction; voids greater than this will cause the grid import to fail split_pillars (boolean, default True): if False, a grid is generated without split pillars split_tolerance (float, default 0.01): the tolerance applied to each of x, y, & z values, beyond which a corner point (and hence pillar) will be split progress_fn (function(float), optional): if present, this function is called at intervals during processing; it must accept one floating point argument which will range from 0.0 to 1.0 returns: resqpy.Model object containing properties for all the realisations; hdf5 and epc files having been updated note: if existing_epc is True, the epc file must already exist and contain one grid (or one grid named ROOT) which must have the correct extent for all realisations within the ensemble; if existing_epc is False, the resqml dataset is created afresh with a grid extracted from the first realisation in the ensemble; either way, the single grid is used as the representative grid in the ensemble resqml dataset being generated; all vdb directories within the directory tree headed by ensemble_run_dir are included in the import; by default all properties will be imported; the keyword_list, property_kind_list, vdb_static_properties, vdb_recurrent_properties and timestep_selection arguments can be used to filter the required properties; if both keyword_list and property_kind_list are provided, a property must match an item in both lists in order to be included; if recurrent properties are being included then all vdb's should contain the same number of reporting steps in their recurrent data and these should relate to the same set of timestamps; timestamp data is extracted from a summary file for the first realisation; no check is made to ensure that reporting timesteps in different realisations are actually for the same date. """ assert epc_file.endswith('.epc') assert vdb_static_properties or vdb_recurrent_properties, 'no properties selected for ensemble import' if progress_fn is not None: progress_fn(0.0) # fetch a sorted list of the vdb paths found in the run directory tree ensemble_list = vdb.ensemble_vdb_list(ensemble_run_dir) if len(ensemble_list) == 0: log.error("no vdb's found in run directory tree: " + str(ensemble_run_dir)) return None if not existing_epc: model = import_nexus( epc_file[:-4], # output path and file name without .epc or .h5 extension extent_ijk = extent_ijk, # 3 element numpy vector, in case extent is not automatically determined vdb_file = ensemble_list[0], # vdb input file corp_xy_units = corp_xy_units, corp_z_units = corp_z_units, corp_z_inc_down = corp_z_inc_down, ijk_handedness = ijk_handedness, geometry_defined_everywhere = geometry_defined_everywhere, treat_as_nan = treat_as_nan, resqml_xy_units = resqml_xy_units, resqml_z_units = resqml_z_units, resqml_z_inc_down = resqml_z_inc_down, shift_to_local = shift_to_local, local_origin_place = local_origin_place, # 'centre' or 'minimum' max_z_void = max_z_void, # import will fail if vertical void greater than this is encountered split_pillars = split_pillars, split_tolerance = split_tolerance, # applies to each of x, y, z differences vdb_static_properties = False, vdb_recurrent_properties = False, create_property_set = False) model = rq.Model( epc_file = epc_file) # shouldn't be necessary if just created but it feels safer to re-open the model assert model is not None, 'failed to instantiate model' grid = model.grid() assert grid is not None, 'grid not found' ext_uuid = model.h5_uuid() assert ext_uuid is not None, 'failed to determine uuid for hdf5 file reference' hdf5_file = model.h5_file_name(uuid = ext_uuid) # create reporting timestep time series for recurrent data, if required, based on the first realisation recur_time_series = None recur_ts_uuid = None timestep_list = None if vdb_recurrent_properties: summary_file = ensemble_list[0][:-4] + '.sum' # TODO: check timestep summary file extension, .tssum? full_time_series = rts.time_series_from_nexus_summary(summary_file) if full_time_series is None: log.error('failed to extract info from timestep summary file; disabling recurrent property import') vdb_recurrent_properties = False if vdb_recurrent_properties: vdbase = vdb.VDB(ensemble_list[0]) timestep_list = vdbase.list_of_timesteps() if len(timestep_list) == 0: log.warning( 'no ROOT recurrent data found in vdb for first realisation; disabling recurrent property import') vdb_recurrent_properties = False if vdb_recurrent_properties: if timestep_selection == 'all' or ('first' in timestep_selection): fs_index = 0 else: fs_index = -1 first_stamp = full_time_series.timestamp(timestep_list[fs_index]) if first_stamp is None: log.error('first timestamp number selected for import was not found in summary file: ' + str(timestep_list[fs_index])) log.error('disabling recurrent property import') vdb_recurrent_properties = False if vdb_recurrent_properties: recur_time_series = rts.TimeSeries(model, first_timestamp = first_stamp) if timestep_selection == 'all': remaining_list = timestep_list[1:] elif timestep_selection == 'first and last': remaining_list = [timestep_list[-1]] else: remaining_list = [] for timestep_number in remaining_list: stamp = full_time_series.timestamp(timestep_number) if stamp is None: log.error('timestamp number for which recurrent data exists was not found in summary file: ' + str(timestep_number)) log.error('disabling recurrent property import') vdb_recurrent_properties = False recur_time_series = None break recur_time_series.add_timestamp(stamp) if recur_time_series is not None: recur_ts_node = recur_time_series.create_xml(title = 'simulator recurrent array timestep series') recur_ts_uuid = rqet.uuid_for_part_root(recur_ts_node) model.time_series = recur_ts_node # save as the primary time series for the model if create_complete_property_set or create_property_set_per_timestep: complete_collection = rp.GridPropertyCollection() complete_collection.set_grid(grid) else: complete_collection = None # main loop over realisations for realisation in range(len(ensemble_list)): if progress_fn is not None: progress_fn(float(1 + realisation) / float(1 + len(ensemble_list))) vdb_file = ensemble_list[realisation] log.info('processing realisation ' + str(realisation) + ' from: ' + str(vdb_file)) vdbase = vdb.VDB(vdb_file) # case_list = vdbase.cases() # assert len(case_list) > 0, 'no cases found in vdb: ' + str(vdb_file) # if len(case_list) > 1: log.warning('more than one case found in vdb (using first): ' + str(vdb_file)) # vdb_case = case_list[0] # vdbase.set_use_case(vdb_case) vdbase.set_extent_kji(grid.extent_kji) prop_import_collection = rp.GridPropertyCollection(realization = realisation) prop_import_collection.set_grid(grid) decoarsen_array = None if vdb_static_properties: props = vdbase.list_of_static_properties() if len(props) > 0: for keyword in props: if keyword_list is not None and keyword not in keyword_list: continue prop_kind, facet_type, facet = rp.property_kind_and_facet_from_keyword(keyword) if property_kind_list is not None and prop_kind not in property_kind_list and prop_kind not in [ 'active', 'region initialization' ]: continue prop_import_collection.import_vdb_static_property_to_cache(vdbase, keyword, realization = realisation, property_kind = prop_kind, facet_type = facet_type, facet = facet) if decoarsen: decoarsen_array = prop_import_collection.decoarsen_imported_list() if decoarsen_array is not None: log.debug('static properties decoarsened for realisation ' + str(realisation)) grid.write_hdf5_from_caches(hdf5_file, mode = 'a', geometry = False, imported_properties = prop_import_collection, write_active = False) prop_import_collection.remove_all_cached_arrays() if vdb_recurrent_properties: r_timestep_list = vdbase.list_of_timesteps() # get list of timesteps for which recurrent files exist if len(r_timestep_list) < recur_time_series.number_of_timestamps(): log.error('insufficient number of reporting timesteps; skipping recurrent data for realisation ' + str(realisation)) else: common_recur_prop_set = None for tni in range(recur_time_series.number_of_timestamps()): if timestep_selection in ['all', 'first']: timestep_number = timestep_list[tni] r_timestep_number = r_timestep_list[tni] elif timestep_selection == 'last' or tni > 0: timestep_number = timestep_list[-1] r_timestep_number = r_timestep_list[-1] else: timestep_number = timestep_list[0] r_timestep_number = r_timestep_list[0] stamp = full_time_series.timestamp(timestep_number) recur_prop_list = vdbase.list_of_recurrent_properties(r_timestep_number) if common_recur_prop_set is None: common_recur_prop_set = set(recur_prop_list) elif recur_prop_list is not None: common_recur_prop_set = common_recur_prop_set.intersection(set(recur_prop_list)) step_import_collection = rp.GridPropertyCollection() step_import_collection.set_grid(grid) # for each property for this timestep, cache array and add to recur prop import collection for this time step if recur_prop_list: for keyword in recur_prop_list: if not keyword or not keyword.isalnum(): continue if keyword_list is not None and keyword not in keyword_list: continue prop_kind, facet_type, facet = rp.property_kind_and_facet_from_keyword(keyword) if property_kind_list is not None and prop_kind not in property_kind_list: continue step_import_collection.import_vdb_recurrent_property_to_cache( vdbase, r_timestep_number, keyword, time_index = tni, # index into recur_time_series realization = realisation, property_kind = prop_kind, facet_type = facet_type, facet = facet) if decoarsen_array is not None: step_import_collection.decoarsen_imported_list(decoarsen_array = decoarsen_array) # extend hdf5 with cached arrays for this timestep # log.info('number of recurrent grid property arrays for timestep: ' + str(timestep_number) + # ' is: ' + str(step_import_collection.number_of_imports())) # log.info('extending hdf5 file with recurrent properties for timestep: ' + str(timestep_number)) grid.write_hdf5_from_caches(hdf5_file, mode = 'a', geometry = False, imported_properties = step_import_collection, write_active = False) # add imported list for this timestep to full imported list prop_import_collection.inherit_imported_list_from_other_collection(step_import_collection) # log.debug('total number of property arrays after timestep: ' + str(timestep_number) + # ' is: ' + str(prop_import_collection.number_of_imports())) # remove cached copies of arrays step_import_collection.remove_all_cached_arrays() if len(prop_import_collection.imported_list) == 0: log.warning('no properties imported for realisation ' + str(realisation)) continue prop_import_collection.create_xml_for_imported_list_and_add_parts_to_model(ext_uuid, time_series_uuid = recur_ts_uuid) if create_property_set_per_realization: prop_import_collection.create_property_set_xml('property set for realization ' + str(realisation)) if complete_collection is not None: complete_collection.inherit_parts_from_other_collection(prop_import_collection) if complete_collection is not None: if create_property_set_per_timestep and recur_time_series is not None: for tni in range(recur_time_series.number_of_timestamps()): ts_collection = rp.selective_version_of_collection(complete_collection, time_index = tni) if ts_collection.number_of_parts() > 0: ts_collection.create_property_set_xml('property set for time index ' + str(tni)) if create_complete_property_set: complete_collection.create_property_set_xml('property set for ensemble vdb import') # mark model as modified (will already have happened anyway) model.set_modified() # rewrite epc file log.info('storing updated model in epc file ' + epc_file) model.store_epc(epc_file) if progress_fn is not None: progress_fn(1.0) # return updated resqml model return model
def import_nexus( resqml_file_root, # output path and file name without .epc or .h5 extension extent_ijk = None, # 3 element numpy vector vdb_file = None, # vdb input file: either this or corp_file should be not None vdb_case = None, # if None, first case in vdb is used (usually a vdb only holds one case) corp_file = None, # corp ascii input file: nexus corp data without keyword corp_bin_file = None, # corp binary file: nexus corp data in bespoke binary format corp_xy_units = 'm', corp_z_units = 'm', corp_z_inc_down = True, ijk_handedness = 'right', corp_eight_mode = False, geometry_defined_everywhere = True, treat_as_nan = None, active_mask_file = None, use_binary = False, # this refers to pure binary arrays, not corp bin format resqml_xy_units = 'm', resqml_z_units = 'm', resqml_z_inc_down = True, shift_to_local = False, local_origin_place = 'centre', # 'centre' or 'minimum' max_z_void = 0.1, # vertical gaps greater than this will introduce k gaps intp resqml grid split_pillars = True, split_tolerance = 0.01, # applies to each of x, y, z differences property_array_files = None, # actually, list of (filename, keyword, uom, time_index, null_value, discrete) summary_file = None, # used to extract timestep dates when loading recurrent data from vdb vdb_static_properties = True, # if True, static vdb properties are imported (only relevant if vdb_file is not None) vdb_recurrent_properties = False, timestep_selection = 'all', # 'first', 'last', 'first and last', 'all', or list of ints being reporting timestep numbers use_compressed_time_series = True, decoarsen = True, # where ICOARSE is present, redistribute data to uncoarse cells ab_property_list = None, # list of (file_name, keyword, property_kind, facet_type, facet, uom, time_index, null_value, discrete) create_property_set = False, ensemble_case_dirs_root = None, # path upto but excluding realisation number ensemble_property_dictionary = None, # dictionary mapping title (or keyword) to (filename, property_kind, facet_type, facet, # uom, time_index, null_value, discrete) ensemble_size_limit = None, grid_title = 'ROOT', mode = 'w', progress_fn = None): """Read a simulation grid geometry and optionally grid properties. Input may be from nexus ascii input files, or nexus vdb output. Arguments: resqml_file_root (str): output path and file name without .epc or .h5 extension extent_ijk (triple float, optional): ijk extents (fortran ordering) vdb_file (str, optional): vdb input file, either this or corp_file should be not None. Required if importing from a vdb vdb_case (str, optional): required if the vdb contains more than one case. If None, first case in vdb is used corp_file (str, optional): required if importing from corp ascii file. corp ascii input file: nexus corp data without keyword corp_bin_file (str, optional): required if importing from corp binary file corp_xy_units (str, default 'm'): xy length units corp_z_units (str, default 'm'): z length units corp_z_inc_down (bool, default True): if True z values increase with depth ijk_handedness (str, default 'right'): 'right' or 'left' corp_eight_mode (bool, default False): if True the ordering of corner point data is in nexus EIGHT mode geometry_defined_everywhere (bool, default True): if False then inactive cells are marked as not having geometry treat_as_nan (float, default None): if a value is provided corner points with this value will be assigned nan active_mask_file (str, default None): ascii property file holding values 0 or 1, with 1 indicating active cells use_binary (bool, default False): if True a cached binary version of ascii files will be used (pure binary, not corp bin format) resqml_xy_units (str, default 'm'): output xy units for resqml file resqml_z_units (str, default 'm'): output z units for resqml file resqml_z_inc_down (bool, default True): if True z values increase with depth for output resqml file shift_to_local (bool, default False): if True then a local origin will be used in the CRS local_origin_place (str, default 'centre'): 'centre' or 'minimum'. If 'centre' the local origin is placed at the centre of the grid; ignored if shift_to_local is False max_z_void (float, default 0.1): maximum z gap between vertically neighbouring corner points. Vertical gaps greater than this will introduce k gaps into resqml grid. Units are corp z units split_pillars (bool, default True): if False an unfaulted grid will be generated split_tolerance (float, default 0.01): maximum distance between neighbouring corner points before a pillar is considered 'split'. Applies to each of x, y, z differences property_array_files (list, default None): list of (filename, keyword, uom, time_index, null_value, discrete) summary_file (str, default None): nexus output summary file, used to extract timestep dates when loading recurrent data from vdb vdb_static_properties (bool, default True): if True, static vdb properties are imported (only relevant if vdb_file is not None) vdb_recurrent_properties (bool, default False): # if True, recurrent vdb properties are imported (only relevant if vdb_file is not None) timestep_selection (str, default 'all): 'first', 'last', 'first and last', 'all', or list of ints being reporting timestep numbers. Ignored if vdb_recurrent_properties is False use_compressed_time_series (bool, default True): generates reduced time series containing timesteps with recurrent properties from vdb, rather than full nexus summary time series decoarsen (bool, default True): where ICOARSE is present, redistribute data to uncoarse cells ab_property_list (list, default None): list of (file_name, keyword, property_kind, facet_type, facet, uom, time_index, null_value, discrete) create_property_set (bool, default False): if True a resqml PropertySet is created ensemble_case_dirs_root (str, default None): path up to but excluding realisation number ensemble_property_dictionary (str, default None): dictionary mapping title (or keyword) to (filename, property_kind, facet_type, facet, uom, time_index, null_value, discrete) ensemble_size_limit (int, default None): if present processing of ensemble will terminate after this number of cases is reached grid_title (str, default 'ROOT'): grid citation title mode (str, default 'w'): 'w' or 'a', mode to write or append to hdf5 progress_fn (function, default None): if present function must have one floating argument with value increasing from 0 to 1, and is called at intervals to indicate progress Returns: resqml model in memory & written to disc """ if resqml_file_root.endswith('.epc'): resqml_file_root = resqml_file_root[:-4] assert mode in ['w', 'a'] if vdb_file: using_vdb = True corp_file = corp_bin_file = None grid_title = grid_title.upper() log.info('starting import of Nexus ' + str(grid_title) + ' corp from vdb ' + str(vdb_file)) tm.log_nexus_tm('info') vdbase = vdb.VDB(vdb_file) case_list = vdbase.cases() assert len(case_list) > 0, 'no cases found in vdb' if vdb_case is None: vdb_case = case_list[0] else: assert vdb_case in case_list, 'case ' + vdb_case + ' not found in vdb: ' + vdb_file vdbase.set_use_case(vdb_case) assert grid_title in vdbase.list_of_grids(), 'grid ' + str(grid_title) + ' not found in vdb' if extent_ijk is not None: vdbase.set_extent_kji(tuple(reversed(extent_ijk))) log.debug('using case ' + vdb_case + ' and grid ' + grid_title + ' from vdb') if vdb_recurrent_properties and not summary_file: if vdb_file.endswith('.vdb.zip'): summary_file = vdb_file[:-8] + '.sum' elif vdb_file.endswith('.vdb') or vdb_file.endswith('.zip'): summary_file = vdb_file[:-4] + '.sum' else: sep = vdb_file.rfind(os.sep) dot = vdb_file[sep + 1:].find('.') if dot > 0: summary_file = vdb_file[:sep + 1 + dot] + ',sum' else: summary_file = vdb_file + '.sum' cp_array = vdbase.grid_corp(grid_title) cp_extent_kji = cp_array.shape[:3] if cp_extent_kji[:2] == (1, 1): # auto determination of extent failed assert extent_ijk is not None, 'failed to determine extent of grid from corp data' (ni, nj, nk) = extent_ijk assert cp_extent_kji[2] == ni * nj * nk, 'number of cells in grid corp does not match extent' cp_extent = (nk, nj, ni, 2, 2, 2, 3) # (nk, nj, ni, kp, jp, ip, xyz) cp_array = cp_array.reshape(cp_extent) elif extent_ijk is not None: for axis in range(3): assert cp_extent_kji[axis] == extent_ijk[ 2 - axis], 'extent of grid corp data from vdb does not match that supplied' elif corp_file or corp_bin_file: if corp_bin_file: corp_file = None using_vdb = False # geometry_defined_everywhere = (active_mask_file is None) log.info('starting import of Nexus corp file ' + str(corp_file if corp_file else corp_bin_file)) tm.log_nexus_tm('info') if extent_ijk is None: # auto detect extent extent_kji = None cp_extent = None else: (ni, nj, nk) = extent_ijk extent_kji = np.array((nk, nj, ni), dtype = 'int') cp_extent = (nk, nj, ni, 2, 2, 2, 3) # (nk, nj, ni, kp, jp, ip, xyz) log.debug('reading and resequencing corp data') if corp_bin_file: # bespoke nexus corp bin format, not to be confused with pure binary files used below cp_array = ld.load_corp_array_from_file( corp_bin_file, extent_kji, corp_bin = True, comment_char = None, # comment char will be detected automatically data_free_of_comments = False, use_binary = use_binary) else: cp_binary_file = abt.cp_binary_filename( corp_file, nexus_ordering = False) # pure binary, not bespoke corp bin used above recent_binary_exists = ld.file_exists(cp_binary_file, must_be_more_recent_than_file = corp_file) cp_array = None if use_binary and (extent_ijk is not None) and recent_binary_exists: try: cp_array = ld.load_array_from_file(cp_binary_file, cp_extent, use_binary = True) except Exception: cp_array = None if cp_array is None: cp_array = ld.load_corp_array_from_file( corp_file, extent_kji, corp_bin = False, comment_char = None, # comment char will be detected automatically data_free_of_comments = False, use_binary = use_binary) if use_binary: wd.write_pure_binary_data(cp_binary_file, cp_array) # NB: this binary file is resequenced, not in nexus ordering! else: raise ValueError('vdb_file and corp_file are both None in import_nexus() call') if cp_array is None: log.error('failed to create corner point array') return None if extent_ijk is None: cp_extent = cp_array.shape extent_kji = cp_extent[:3] (nk, nj, ni) = extent_kji extent_ijk = (ni, nj, nk) else: ni, nj, nk = extent_ijk # convert units log.debug('Converting units') if corp_xy_units == corp_z_units and resqml_xy_units == resqml_z_units: bwam.convert_lengths(cp_array, corp_xy_units, resqml_xy_units) else: bwam.convert_lengths(cp_array[:, :, :, :, :, :, 0:1], corp_xy_units, resqml_xy_units) bwam.convert_lengths(cp_array[:, :, :, :, :, :, 2], corp_z_units, resqml_z_units) # invert z if required if resqml_z_inc_down != corp_z_inc_down: log.debug('Inverting z values') inversion = np.negative(cp_array[:, :, :, :, :, :, 2]) cp_array[:, :, :, :, :, :, 2] = inversion # read active cell mask log.debug('Setting up active cell mask') active_mask = inactive_mask = None if vdb_file: assert vdbase is not None, 'problem with vdb object' inactive_mask = vdbase.grid_kid_inactive_mask(grid_title) # TODO: check conversion of KID to boolean for LGRs if inactive_mask is not None: log.debug('using kid array as inactive cell mask') active_mask = np.logical_not(inactive_mask) else: log.warning('kid array not found, using unpack array as active cell indicator') unp = vdbase.grid_unpack(grid_title) assert unp is not None, 'failed to load active cell indicator mask from vdb kid or unpack arrays' active_mask = np.empty((nk, nj, ni), dtype = 'bool') active_mask[:] = (unp > 0) inactive_mask = np.logical_not(active_mask) elif active_mask_file: active_mask = ld.load_array_from_file(active_mask_file, extent_kji, data_type = 'bool', use_binary = use_binary) if active_mask is None: log.error('failed to load active cell indicator array from file: ' + active_mask_file) else: inactive_mask = np.logical_not(active_mask) # will crash if active mask load failed # shift grid geometry to local crs local_origin = np.zeros(3) if shift_to_local: log.debug('shifting to local origin at ' + local_origin_place) if local_origin_place == 'centre': local_origin = np.nanmean(cp_array, axis = (0, 1, 2, 3, 4, 5)) elif local_origin_place == 'minimum': local_origin = np.nanmin(cp_array, axis = (0, 1, 2, 3, 4, 5)) - 1.0 # The -1 ensures all coords are >0 else: assert (False) cp_array -= local_origin # create empty resqml model log.debug('creating an empty resqml model') if mode == 'w': model = rq.Model(resqml_file_root, new_epc = True, create_basics = True, create_hdf5_ext = True) else: model = rq.Model(resqml_file_root) assert model is not None ext_uuid = model.h5_uuid() assert ext_uuid is not None # create coodinate reference system (crs) in model and set references in grid object log.debug('creating coordinate reference system') crs_uuids = model.uuids(obj_type = 'LocalDepth3dCrs') new_crs = rqc.Crs(model, x_offset = local_origin[0], y_offset = local_origin[1], z_offset = local_origin[2], xy_units = resqml_xy_units, z_units = resqml_z_units, z_inc_down = resqml_z_inc_down) new_crs.create_xml(reuse = True) crs_uuid = new_crs.uuid grid = grid_from_cp(model, cp_array, crs_uuid, active_mask = active_mask, geometry_defined_everywhere = geometry_defined_everywhere, treat_as_nan = treat_as_nan, max_z_void = max_z_void, split_pillars = split_pillars, split_tolerance = split_tolerance, ijk_handedness = ijk_handedness, known_to_be_straight = False) # create hdf5 file using arrays cached in grid above log.info('writing grid geometry to hdf5 file ' + resqml_file_root + '.h5') grid.write_hdf5_from_caches(resqml_file_root + '.h5', mode = mode, write_active = False) # build xml for grid geometry log.debug('building xml for grid') ijk_node = grid.create_xml(ext_uuid = None, title = grid_title, add_as_part = True, add_relationships = True) assert ijk_node is not None, 'failed to create IjkGrid node in xml tree' # impprt property arrays into a collection prop_import_collection = None decoarsen_array = None ts_node = None ts_uuid = None if active_mask is None and grid.inactive is not None: active_mask = np.logical_not(grid.inactive) if using_vdb: prop_import_collection = rp.GridPropertyCollection() if vdb_static_properties: props = vdbase.grid_list_of_static_properties(grid_title) if len(props) > 0: prop_import_collection = rp.GridPropertyCollection() prop_import_collection.set_grid(grid) for keyword in props: prop_import_collection.import_vdb_static_property_to_cache(vdbase, keyword, grid_name = grid_title) # if active_mask is not None: # prop_import_collection.add_cached_array_to_imported_list(active_mask, active_mask_file, 'ACTIVE', property_kind = 'active', # discrete = True, uom = None, time_index = None, null_value = None) elif property_array_files is not None and len(property_array_files) > 0: prop_import_collection = rp.GridPropertyCollection() prop_import_collection.set_grid(grid) for (p_filename, p_keyword, p_uom, p_time_index, p_null_value, p_discrete) in property_array_files: prop_import_collection.import_nexus_property_to_cache(p_filename, p_keyword, grid.extent_kji, discrete = p_discrete, uom = p_uom, time_index = p_time_index, null_value = p_null_value, use_binary = use_binary) # if active_mask is not None: # prop_import_collection.add_cached_array_to_imported_list(active_mask, active_mask_file, 'ACTIVE', property_kind = 'active', # discrete = True, uom = None, time_index = None, null_value = None) # ab_property_list: list of (filename, keyword, property_kind, facet_type, facet, uom, time_index, null_value, discrete) elif ab_property_list is not None and len(ab_property_list) > 0: prop_import_collection = rp.GridPropertyCollection() prop_import_collection.set_grid(grid) for (p_filename, p_keyword, p_property_kind, p_facet_type, p_facet, p_uom, p_time_index, p_null_value, p_discrete) in ab_property_list: prop_import_collection.import_ab_property_to_cache(p_filename, p_keyword, grid.extent_kji, discrete = p_discrete, property_kind = p_property_kind, facet_type = p_facet_type, facet = p_facet, uom = p_uom, time_index = p_time_index, null_value = p_null_value) # if active_mask is not None: # prop_import_collection.add_cached_array_to_imported_list(active_mask, active_mask_file, 'ACTIVE', property_kind = 'active', # discrete = True, uom = None, time_index = None, null_value = None) # ensemble_property_dictionary: mapping title (or keyword) to # (filename, property_kind, facet_type, facet, uom, time_index, null_value, discrete) elif ensemble_case_dirs_root and ensemble_property_dictionary: case_path_list = glob.glob(ensemble_case_dirs_root + '*') assert len(case_path_list) > 0, 'no case directories found with path starting: ' + str(ensemble_case_dirs_root) case_number_place = len(ensemble_case_dirs_root) case_zero_used = False case_count = 0 for case_path in case_path_list: if ensemble_size_limit is not None and case_count >= ensemble_size_limit: log.warning('stopping after reaching ensemble size limit') break # NB. import each case individually rather than holding property arrays for whole ensemble in memory at once prop_import_collection = rp.GridPropertyCollection() prop_import_collection.set_grid(grid) tail = case_path[case_number_place:] try: case_number = int(tail) assert case_number >= 0, 'negative case number encountered' if case_number == 0: assert not case_zero_used, 'more than one case number evaluated to zero' case_zero_used = True except Exception: log.error('failed to determine case number for tail: ' + str(tail)) continue for keyword in ensemble_property_dictionary.keys(): (filename, p_property_kind, p_facet_type, p_facet, p_uom, p_time_index, p_null_value, p_discrete) = ensemble_property_dictionary[keyword] p_filename = os.path.join(case_path, filename) if not os.path.exists(p_filename): log.error('missing property file: ' + p_filename) continue prop_import_collection.import_nexus_property_to_cache(p_filename, keyword, grid.extent_kji, discrete = p_discrete, uom = p_uom, time_index = p_time_index, null_value = p_null_value, property_kind = p_property_kind, facet_type = p_facet_type, facet = p_facet, realization = case_number, use_binary = False) if len(prop_import_collection.imported_list) > 0: # create hdf5 file using arrays cached in grid above log.info('writing properties to hdf5 file ' + str(resqml_file_root) + '.h5 for case: ' + str(case_number)) grid.write_hdf5_from_caches(resqml_file_root + '.h5', geometry = False, imported_properties = prop_import_collection, write_active = False) # add imported properties parts to model, building property parts list prop_import_collection.create_xml_for_imported_list_and_add_parts_to_model(ext_uuid, time_series_uuid = ts_uuid) if create_property_set: prop_import_collection.create_property_set_xml('realisation ' + str(case_number)) case_count += 1 # remove cached static property arrays from memory # prop_import_collection.remove_all_cached_arrays() del prop_import_collection prop_import_collection = None log.info(f'Nexus ascii ensemble input processed {case_count} cases') tm.log_nexus_tm('info') # create hdf5 file using arrays cached in grid above if prop_import_collection is not None and len(prop_import_collection.imported_list) > 0: if decoarsen: decoarsen_array = prop_import_collection.decoarsen_imported_list() if decoarsen_array is not None: log.info('static properties decoarsened') prop_import_collection.add_cached_array_to_imported_list(decoarsen_array, 'decoarsen', 'DECOARSEN', discrete = True, uom = None, time_index = None, null_value = -1, property_kind = 'discrete') log.info('writing ' + str(len(prop_import_collection.imported_list)) + ' properties to hdf5 file ' + resqml_file_root + '.h5') elif not ensemble_case_dirs_root: log.info('no static grid properties to import') prop_import_collection = None grid.write_hdf5_from_caches(resqml_file_root + '.h5', geometry = False, imported_properties = prop_import_collection, write_active = True) # remove cached static property arrays from memory if prop_import_collection is not None: prop_import_collection.remove_all_cached_arrays() ts_selection = None if using_vdb and vdb_recurrent_properties and timestep_selection is not None and str(timestep_selection) != 'none': if prop_import_collection is None: prop_import_collection = rp.GridPropertyCollection() prop_import_collection.set_grid(grid) # extract timestep dates from summary file (this info might be hidden in the recurrent binary files but I couldn't find it # todo: create cut down time series from recurrent files and differentiate between reporting time index and mapped time step number full_time_series = rts.time_series_from_nexus_summary(summary_file) if full_time_series is None: log.error('failed to fetch time series from Nexus summary file; recurrent data excluded') tm.log_nexus_tm('error') else: full_time_series.set_model(model) timestep_list = vdbase.grid_list_of_timesteps( grid_title) # get list of timesteps for which recurrent files exist recur_time_series = None for timestep_number in timestep_list: if isinstance(timestep_selection, list): if timestep_number not in timestep_selection: continue else: if timestep_selection == 'first': if timestep_number != timestep_list[0]: break elif timestep_selection == 'last': if timestep_number != timestep_list[-1]: continue elif timestep_selection == 'first and last': if timestep_number != timestep_list[0] and timestep_number != timestep_list[-1]: continue # default to importing all timesteps stamp = full_time_series.timestamp(timestep_number) if stamp is None: log.error('timestamp number for which recurrent data exists was not found in summary file: ' + str(timestep_number)) continue recur_prop_list = vdbase.grid_list_of_recurrent_properties(grid_title, timestep_number) common_recur_prop_set = set() if recur_time_series is None: recur_time_series = rts.TimeSeries(model, first_timestamp = stamp) if recur_prop_list is not None: common_recur_prop_set = set(recur_prop_list) else: recur_time_series.add_timestamp(stamp) if recur_prop_list is not None: common_recur_prop_set = common_recur_prop_set.intersection(set(recur_prop_list)) step_import_collection = rp.GridPropertyCollection() step_import_collection.set_grid(grid) # for each property for this timestep, cache array and add to recur prop import collection for this time step if recur_prop_list: for keyword in recur_prop_list: if not keyword or not keyword.isalnum(): continue prop_kind, facet_type, facet = rp.property_kind_and_facet_from_keyword(keyword) step_import_collection.import_vdb_recurrent_property_to_cache( vdbase, timestep_number, # also used as time_index? keyword, grid_name = grid_title, property_kind = prop_kind, facet_type = facet_type, facet = facet) # extend hdf5 with cached arrays for this timestep log.info('number of recurrent grid property arrays for timestep: ' + str(timestep_number) + ' is: ' + str(step_import_collection.number_of_imports())) if decoarsen_array is not None: log.info('decoarsening recurrent properties for timestep: ' + str(timestep_number)) step_import_collection.decoarsen_imported_list(decoarsen_array = decoarsen_array) log.info('extending hdf5 file with recurrent properties for timestep: ' + str(timestep_number)) grid.write_hdf5_from_caches(resqml_file_root + '.h5', mode = 'a', geometry = False, imported_properties = step_import_collection, write_active = False) # add imported list for this timestep to full imported list prop_import_collection.inherit_imported_list_from_other_collection(step_import_collection) log.debug('total number of property arrays after timestep: ' + str(timestep_number) + ' is: ' + str(prop_import_collection.number_of_imports())) # remove cached copies of arrays step_import_collection.remove_all_cached_arrays() ts_node = full_time_series.create_xml(title = 'simulator full timestep series') model.time_series = ts_node # save as the primary time series for the model ts_uuid = rqet.uuid_for_part_root(ts_node) # create xml for recur_time_series (as well as for full_time_series) and add as part; not needed? if recur_time_series is not None: rts_node = recur_time_series.create_xml(title = 'simulator recurrent array timestep series') if use_compressed_time_series: ts_uuid = rqet.uuid_for_part_root(rts_node) ts_selection = timestep_list # add imported properties parts to model, building property parts list if prop_import_collection is not None and prop_import_collection.imported_list is not None: prop_import_collection.set_grid(grid) # update to pick up on recently created xml root node for grid prop_import_collection.create_xml_for_imported_list_and_add_parts_to_model( ext_uuid, time_series_uuid = ts_uuid, selected_time_indices_list = ts_selection) if create_property_set: prop_import_collection.create_property_set_xml('property set for import for grid ' + str(grid_title)) # mark model as modified (will already have happened anyway) model.set_modified() # create epc file log.info('storing model in epc file ' + resqml_file_root + '.epc') model.store_epc(resqml_file_root + '.epc') # return resqml model return model
def coarsened_grid(epc_file, source_grid, fine_coarse, inherit_properties=False, inherit_realization=None, inherit_all_realizations=False, set_parent_window=None, infill_missing_geometry=True, new_grid_title=None, new_epc_file=None): """Generates a coarsened version of an unsplit source grid, todo: optionally inheriting properties. arguments: epc_file (string): file name to rewrite the model's xml to; if source grid is None, model is loaded from this file source_grid (grid.Grid object, optional): if None, the epc_file is loaded and it should contain one ijk grid object (or one 'ROOT' grid) which is used as the source grid fine_coarse (resqpy.olio.fine_coarse.FineCoarse object): the mapping between cells in the fine (source) and coarse (output) grids inherit_properties (boolean, default False): if True, the new grid will have a copy of any properties associated with the source grid, with values upscaled or sampled inherit_realization (int, optional): realization number for which properties will be inherited; ignored if inherit_properties is False inherit_all_realizations (boolean, default False): if True (and inherit_realization is None), properties for all realizations will be inherited; if False, only properties with a realization of None are inherited; ignored if inherit_properties is False or inherit_realization is not None set_parent_window (boolean or str, optional): if True or 'parent', the coarsened grid has its parent window attribute set; if False, the parent window is not set; if None, the default will be True if new_epc_file is None or False otherwise; if 'grandparent' then an intervening parent window with no refinement or coarsening will be skipped and its box used in the parent window for the new grid, relating directly to the original grid infill_missing_geometry (boolean, default True): if True, an attempt is made to generate grid geometry in the source grid wherever it is undefined; if False, any undefined geometry will result in an assertion failure new_grid_title (string): used as the citation title text for the new grid object new_epc_file (string, optional): if None, the source epc_file is extended with the new grid object; if present, a new epc file (& associated h5 file) is created to contain the refined grid (& crs) returns: new grid object being the coarsened grid; the epc and hdf5 files are written to as an intentional side effect note: this function coarsens an entire grid; to coarsen a local area of a grid, first use the extract_box function and then use this function on the extracted grid; in such a case, using a value of 'grandparent' for the set_parent_window argument will relate the coarsened grid back to the original """ new_epc_file, model, source_grid = _establish_files_and_model( epc_file, new_epc_file, source_grid) if set_parent_window is None: set_parent_window = (new_epc_file is None) assert fine_coarse is not None and isinstance(fine_coarse, fc.FineCoarse) assert not source_grid.has_split_coordinate_lines, 'coarsening only available for unsplit grids: use other functions to heal faults first' if infill_missing_geometry and ( not source_grid.geometry_defined_for_all_cells() or not source_grid.geometry_defined_for_all_pillars()): log.debug('attempting infill of geometry missing in source grid') source_grid.set_geometry_is_defined(treat_as_nan=None, treat_dots_as_nan=True, complete_partial_pillars=True, nullify_partial_pillars=False, complete_all=True) assert source_grid.geometry_defined_for_all_pillars( ), 'coarsening requires geometry to be defined for all pillars' assert source_grid.geometry_defined_for_all_cells( ), 'coarsening requires geometry to be defined for all cells' assert not source_grid.k_gaps, 'coarsening of grids with k gaps not currently supported' assert tuple(fine_coarse.fine_extent_kji) == tuple(source_grid.extent_kji), \ 'fine_coarse mapping fine extent does not match that of source grid' fine_coarse.assert_valid() source_grid.cache_all_geometry_arrays() source_points = source_grid.points_ref().reshape( (source_grid.nk + 1), (source_grid.nj + 1) * (source_grid.ni + 1), 3) # create a new, empty grid object grid = grr.Grid(model) # inherit attributes from source grid grid.grid_representation = 'IjkGrid' grid.extent_kji = fine_coarse.coarse_extent_kji grid.nk, grid.nj, grid.ni = grid.extent_kji[0], grid.extent_kji[ 1], grid.extent_kji[2] grid.k_direction_is_down = source_grid.k_direction_is_down grid.grid_is_right_handed = source_grid.grid_is_right_handed grid.pillar_shape = source_grid.pillar_shape grid.has_split_coordinate_lines = False grid.split_pillars_count = None # inherit the coordinate reference system used by the grid geometry grid.crs_uuid = source_grid.crs_uuid if source_grid.model is not model: model.duplicate_node(source_grid.model.root_for_uuid(grid.crs_uuid), add_as_part=True) grid.crs = rqc.Crs(model, grid.crs_uuid) coarsened_points = np.empty( (grid.nk + 1, (grid.nj + 1) * (grid.ni + 1), 3)) # note: gets reshaped after being populated k_ratio_constant = fine_coarse.constant_ratios[0] if k_ratio_constant: k_indices = None else: k_indices = np.empty(grid.nk + 1, dtype=int) k_indices[0] = 0 for k in range(grid.nk): k_indices[k + 1] = k_indices[k] + fine_coarse.vector_ratios[0][k] assert k_indices[-1] == source_grid.nk for cjp in range(grid.nj + 1): for cji in range(grid.ni + 1): natural_coarse_pillar = cjp * (grid.ni + 1) + cji natural_fine_pillar = fine_coarse.fine_for_coarse_natural_pillar_index( natural_coarse_pillar) if k_ratio_constant: coarsened_points[:, natural_coarse_pillar, :] = source_points[ 0:source_grid.nk + 1:k_ratio_constant, natural_fine_pillar, :] else: coarsened_points[:, natural_coarse_pillar, :] = source_points[ k_indices, natural_fine_pillar, :] grid.points_cached = coarsened_points.reshape( ((grid.nk + 1), (grid.nj + 1), (grid.ni + 1), 3)) grid.geometry_defined_for_all_pillars_cached = True grid.geometry_defined_for_all_cells_cached = True grid.array_cell_geometry_is_defined = np.full(tuple(grid.extent_kji), True, dtype=bool) collection = None if inherit_properties: source_collection = source_grid.extract_property_collection() if source_collection is not None: collection = rqp.GridPropertyCollection() collection.set_grid(grid) collection.extend_imported_list_copying_properties_from_other_grid_collection( source_collection, coarsening=fine_coarse, realization=inherit_realization, copy_all_realizations=inherit_all_realizations) _set_parent_window_in_grid(set_parent_window, source_grid, grid, fine_coarse) # write grid if new_grid_title is None or len(new_grid_title) == 0: new_grid_title = 'grid coarsened from ' + str( rqet.citation_title_for_node(source_grid.root)) model.h5_release() if new_epc_file: _write_grid(new_epc_file, grid, property_collection=collection, grid_title=new_grid_title, mode='w') else: ext_uuid, _ = model.h5_uuid_and_path_for_node( rqet.find_nested_tags(source_grid.root, ['Geometry', 'Points']), 'Coordinates') _write_grid(epc_file, grid, ext_uuid=ext_uuid, property_collection=collection, grid_title=new_grid_title, mode='a') return grid