def _establish_files_and_models(epc_file, new_epc_file, trajectory_epc, source_grid): assert epc_file or new_epc_file, 'epc file name not specified' if new_epc_file and epc_file and ( (new_epc_file == epc_file) or (os.path.exists(new_epc_file) and os.path.exists(epc_file) and os.path.samefile(new_epc_file, epc_file))): new_epc_file = None assert epc_file or source_grid is not None, 'neither epc file name nor source grid supplied' if trajectory_epc == epc_file: trajectory_epc = None if epc_file: model = rq.Model(epc_file) if source_grid is None: source_grid = model.grid( ) # requires there to be exactly one grid in model (or one named ROOT) else: model = source_grid.model assert source_grid.grid_representation in ['IjkGrid', 'IjkBlockGrid'] assert model is not None if trajectory_epc is None: traj_model = model else: traj_model = rq.Model(trajectory_epc) assert traj_model is not None return new_epc_file, trajectory_epc, model, traj_model, source_grid
def test_pointset_from_polyline(example_model_and_crs): # Set up a PolyLine and save to resqml file model, crs = example_model_and_crs coords = np.array([[1, 2, 3, 4, 5], [6, 7, 8, 9, 10], [12, 13, 14, 15, 16]]).T lines = resqpy.lines.Polyline(parent_model=model, set_coord=coords, title='Polylines', set_crs=crs.uuid, set_bool=False) lines.write_hdf5() lines.create_xml() model.store_epc() # Reload the model, and generate pointset using the polyline model = rq.Model(epc_file=model.epc_file) reload_lines = resqpy.lines.Polyline(parent_model=model, uuid=lines.uuid) points = resqpy.surface.PointSet(parent_model=model, polyline=reload_lines, crs_uuid=crs.uuid) points.write_hdf5() points.create_xml() model.store_epc() # Reload the model, and ensure the coordinates are as expected model = rq.Model(epc_file=model.epc_file) reload = resqpy.surface.PointSet(parent_model=model, uuid=points.uuid) assert_array_almost_equal(reload.full_array_ref(), coords)
def test_model_copy_all_parts(example_model_with_properties): epc = example_model_with_properties.epc_file dir = example_model_with_properties.epc_directory copied_epc = os.path.join(dir, 'copied.epc') # test copying without consolidation original = rq.Model(epc) assert original is not None copied = rq.new_model(copied_epc) copied.copy_all_parts_from_other_model(original, consolidate=False) assert set(original.uuids()) == set(copied.uuids()) assert set(original.parts()) == set(copied.parts()) # test without consolidation of two crs objects copied = rq.new_model(copied_epc) new_crs = rqc.Crs(copied) new_crs.create_xml() copied.copy_all_parts_from_other_model(original, consolidate=False) assert len(copied.parts()) == len(original.parts()) + 1 assert set(original.parts()).issubset(set(copied.parts())) assert len(copied.parts(obj_type='LocalDepth3dCrs')) == 2 # test with consolidation of two crs objects copied = rq.new_model(copied_epc) new_crs = rqc.Crs(copied) new_crs.create_xml() copied.copy_all_parts_from_other_model(original, consolidate=True) assert len(copied.parts()) == len(original.parts()) assert len(copied.parts(obj_type='LocalDepth3dCrs')) == 1 crs_uuid = copied.uuid(obj_type='LocalDepth3dCrs') assert (bu.matching_uuids(crs_uuid, new_crs.uuid) or bu.matching_uuids( crs_uuid, original.uuid(obj_type='LocalDepth3dCrs'))) # test write and re-load of copied model copied.store_epc() re_opened = rq.Model(copied_epc) assert re_opened is not None assert len(copied.parts()) == len(original.parts()) crs_uuid = re_opened.uuid(obj_type='LocalDepth3dCrs') assert (bu.matching_uuids(crs_uuid, new_crs.uuid) or bu.matching_uuids( crs_uuid, original.uuid(obj_type='LocalDepth3dCrs')))
def test_root_for_time_series(example_model_with_prop_ts_rels): # test when model has only one time series model = example_model_with_prop_ts_rels ts_root = model.root_for_time_series() assert ts_root is not None assert rqet.node_type(ts_root, strip_obj=True) == 'TimeSeries' assert model.resolve_time_series_root(ts_root) is ts_root assert model.resolve_time_series_root(None) is ts_root # test when model has multiple time series model = rq.Model(model.epc_file) assert model is not None oldest_ts_uuid = model.uuid(obj_type='TimeSeries') assert oldest_ts_uuid is not None for first_timestamp in ['2022-01-01Z', '2023-01-01Z', '2024-01-01Z']: ts = rqts.TimeSeries(parent_model=model, first_timestamp='2000-01-01Z') for _ in range(3): ts.extend_by_days(90) ts.create_xml() newest_ts_uuid = ts.uuid # check that oldest series by creation date is returned by default ts_root = model.root_for_time_series() assert ts_root is model.root(uuid=oldest_ts_uuid) # check that correct time series root is returned when uuid given ts_root = model.root_for_time_series(uuid=newest_ts_uuid) assert ts_root is not None and ts_root is model.root(uuid=newest_ts_uuid)
def test_model(tmp_path): epc = os.path.join(tmp_path, 'model.epc') model = rq.new_model(epc) assert model is not None crs = rqc.Crs(model) crs_root = crs.create_xml() model.store_epc() assert os.path.exists(epc) md_datum_1 = rqw.MdDatum(model, location=(0.0, 0.0, -50.0), crs_uuid=crs.uuid) md_datum_1.create_xml(title='Datum & 1') md_datum_2 = rqw.MdDatum(model, location=(3.0, 0.0, -50.0), crs_uuid=crs.uuid) md_datum_2.create_xml(title='Datum < 2') assert len(model.uuids(obj_type='MdDatum')) == 2 model.store_epc() model = rq.Model(epc) assert model is not None assert len(model.uuids(obj_type='MdDatum')) == 2 datum_part_1 = model.part(obj_type='MdDatum', title='1', title_mode='ends') datum_part_2 = model.part(obj_type='MdDatum', title='2', title_mode='ends') assert datum_part_1 is not None and datum_part_2 is not None and datum_part_1 != datum_part_2 datum_uuid_1 = rqet.uuid_in_part_name(datum_part_1) datum_uuid_2 = rqet.uuid_in_part_name(datum_part_2) assert not bu.matching_uuids(datum_uuid_1, datum_uuid_2) p1 = model.uuid_part_dict[bu.uuid_as_int(datum_uuid_1)] p2 = model.uuid_part_dict[bu.uuid_as_int(datum_uuid_2)] assert p1 == datum_part_1 and p2 == datum_part_2
def _inherit_well(new_epc_file, grid, traj_model, well_name, trajectory, blocked_well, column_ji0, box): newer_model = rq.Model(new_epc_file) if trajectory is None and blocked_well is None: log.info('creating well objects for column') box_column_ji0 = (column_ji0[0] - box[0, 1], column_ji0[1] - box[0, 2]) bw = rqw.BlockedWell(newer_model, grid=grid, column_ji0=box_column_ji0, well_name=well_name, use_face_centres=True) bw.write_hdf5(create_for_trajectory_if_needed=True) bw.create_xml(create_for_trajectory_if_needed=True, title=well_name) elif blocked_well is not None: log.info('inheriting trajectory for blocked well' ) # recursively copies referenced parts newer_model.copy_part_from_other_model( traj_model, traj_model.part(uuid=blocked_well.trajectory.uuid)) else: log.info('inheriting well trajectory' ) # recursively copies referenced parts newer_model.copy_part_from_other_model( traj_model, traj_model.part(uuid=trajectory.uuid)) newer_model.h5_release() newer_model.store_epc()
def test_without_full_load(example_model_with_prop_ts_rels): epc = example_model_with_prop_ts_rels.epc_file uuid_list = example_model_with_prop_ts_rels.uuids() assert len(uuid_list) > 0 del example_model_with_prop_ts_rels # open model with minimum loading of xml model = rq.Model(epc_file=epc, full_load=False, create_basics=False, create_hdf5_ext=False) assert model is not None assert len(model.parts_forest) >= len(uuid_list) # check that xml for parts has not been loaded but part names and uuids are catalogued assert np.all([ p_type is not None and uuid is not None and tree is None for (p_type, uuid, tree) in model.parts_forest.values() ]) # see if parts are searchable cp_parts = model.parts(obj_type='ContinuousProperty') assert cp_parts is not None and len(cp_parts) > 1 # see if xml is loaded on demand cp_tree = model.tree_for_part(cp_parts[0]) assert cp_tree is not None crs_root = model.root(obj_type='LocalDepth3dCrs') assert crs_root is not None assert rqet.find_tag(crs_root, 'VerticalUom') is not None
def test_regular_grid_with_geometry(tmp_path): epc = os.path.join(tmp_path, 'concrete.epc') model = rq.new_model(epc) # create a basic block grid dxyz = (55.0, 65.0, 27.0) grid = grr.RegularGrid(model, extent_kji=(4, 3, 2), title='concrete', origin=(0.0, 0.0, 1000.0), dxyz=dxyz) grid.create_xml(add_cell_length_properties=True) grid_uuid = grid.uuid # store with constant arrays (no hdf5 data) model.store_epc() # check that the grid can be read model = rq.Model(epc) grid = grr.any_grid(model, uuid=grid_uuid) # check that the cell size has been preserved expected_dxyz_dkji = np.zeros((3, 3)) for i in range(3): expected_dxyz_dkji[2 - i, i] = dxyz[i] assert_array_almost_equal(expected_dxyz_dkji, grid.block_dxyz_dkji)
def test_two_grid_gcs(tmp_path): epc = make_epc_with_abutting_grids(tmp_path) # re-open the model and establish the abutting grid connection set model = rq.Model(epc) gcs_uuid = model.uuid(obj_type='GridConnectionSetRepresentation') assert gcs_uuid is not None gcs = rqf.GridConnectionSet(model, uuid=gcs_uuid) assert gcs is not None gcs.cache_arrays() # check that attributes have been preserved assert gcs.number_of_grids() == 2 assert len(gcs.grid_list) == 2 assert not bu.matching_uuids(gcs.grid_list[0].uuid, gcs.grid_list[1].uuid) assert gcs.count == 6 assert gcs.grid_index_pairs.shape == (6, 2) assert np.all(gcs.grid_index_pairs[:, 0] == 0) assert np.all(gcs.grid_index_pairs[:, 1] == 1) assert gcs.face_index_pairs.shape == (6, 2) assert np.all(gcs.face_index_pairs[:, 0] == gcs.face_index_map[1, 1]) # J+ assert np.all(gcs.face_index_pairs[:, 1] == gcs.face_index_map[1, 0]) # J- assert tuple(gcs.face_index_inverse_map[gcs.face_index_pairs[0, 0]]) == (1, 1) assert tuple(gcs.face_index_inverse_map[gcs.face_index_pairs[0, 1]]) == (1, 0) assert gcs.cell_index_pairs.shape == (6, 2) assert np.all(gcs.cell_index_pairs >= 0) assert np.all(gcs.cell_index_pairs < 60)
def test_add_ab_properties(example_model_with_properties, test_data_path): # Arrange model = example_model_with_properties ab_facies = os.path.join(test_data_path, 'facies.ib') ab_ntg = os.path.join(test_data_path, 'ntg_355.db') ab_list = [(ab_facies, 'facies_ab', 'discrete', None, None, None, None, None, True, None), (ab_ntg, 'ntg_ab', 'net to gross ratio', None, None, None, None, None, False, None)] rqi.add_ab_properties(model.epc_file, ab_property_list=ab_list) reload = rq.Model(model.epc_file) pc = reload.grid().property_collection property_names = [pc.citation_title_for_part(part) for part in pc.parts()] assert 'facies_ab' in property_names assert 'ntg_ab' in property_names for part in pc.parts(): if pc.citation_title_for_part(part) == 'facies_ab': assert not pc.continuous_for_part(part) farray = pc.cached_part_array_ref(part) assert np.min(farray) == 0 assert np.max(farray) == 5 elif pc.citation_title_for_part(part) == 'ntg_ab': assert pc.continuous_for_part(part) ntgarray = pc.cached_part_array_ref(part) assert np.min(ntgarray) > 0.4 assert np.max(ntgarray) < 0.7
def test_add_surfaces(example_model_and_crs, test_data_path, surfaces, format, rqclass, interp_and_feat, role, newparts): model, crs = example_model_and_crs model.store_epc() surface_paths = [os.path.join(test_data_path, surf) for surf in surfaces] rqi.add_surfaces(epc_file=model.epc_file, surface_file_format=format, surface_file_list=surface_paths, surface_role=role, rq_class=rqclass, make_horizon_interpretations_and_features=interp_and_feat) model = rq.Model(model.epc_file) assert len(model.parts()) == newparts if rqclass in ['surface', 'TriangulatedSet']: assert len( model.parts_list_of_type( 'obj_TriangulatedSetRepresentation')) == len(surfaces) else: assert len(model.parts_list_of_type( 'obj_Grid2dRepresentation')) == len(surfaces) if interp_and_feat: assert len(model.parts_list_of_type( 'obj_HorizonInterpretation')) == len(surfaces)
def test_pointset_from_charisma(example_model_and_crs, test_data_path, tmp_path): # Set up a PointSet and save to resqml file model, crs = example_model_and_crs charisma_file = test_data_path / "Charisma_points.txt" points = resqpy.surface.PointSet(parent_model=model, charisma_file=str(charisma_file), crs_uuid=crs.uuid) points.write_hdf5() points.create_xml() model.store_epc() # Test reload from resqml model = rq.Model(epc_file=model.epc_file) reload = resqpy.surface.PointSet(parent_model=model, uuid=points.uuid) assert reload.title == str(charisma_file) coords = reload.full_array_ref() assert_array_almost_equal( coords[0], np.array([420691.19624, 6292314.22044, 2799.05591])) assert coords.shape == (15, 3), f'Expected shape (15,3), not {coords.shape}' # Test write back to file out_file = str(tmp_path / "Charisma_points_out.txt") reload.convert_to_charisma(out_file) assert os.path.exists(out_file) with open(out_file, 'r') as f: line = f.readline() assert line == 'INLINE :\t1 XLINE :\t1\t420691.19624\t6292314.22044\t2799.05591\n', 'Output Charisma file does not look as expected'
def test_lines(example_model_and_crs): # Set up a Polyline title = 'Nazca' model, crs = example_model_and_crs line = resqpy.lines.Polyline(parent_model=model, title=title, set_crs=crs.uuid, set_bool=True, set_coord=np.array([[0, 0, 0], [1, 1, 1]])) line.write_hdf5() line.create_xml() # Add a interpretation assert line.rep_int_root is None line.create_interpretation_and_feature(kind='fault') assert line.rep_int_root is not None # Check fault can be loaded in again model.store_epc() model = rq.Model(epc_file=model.epc_file) reload = resqpy.lines.Polyline(parent_model=model, uuid=line.uuid) assert reload.citation_title == title fault_interp = resqpy.organize.FaultInterpretation(model, uuid=line.rep_int_uuid) fault_feature = resqpy.organize.TectonicBoundaryFeature( model, uuid=fault_interp.feature_uuid) # Check title matches expected title assert fault_feature.feature_name == title
def test_pointset_from_irap(example_model_and_crs, test_data_path, tmp_path): # Set up a PointSet and save to resqml file model, crs = example_model_and_crs irap_file = test_data_path / "IRAP_points.txt" points = resqpy.surface.PointSet(parent_model=model, irap_file=str(irap_file), crs_uuid=crs.uuid) points.write_hdf5() points.create_xml() model.store_epc() # Test reload from resqml model = rq.Model(epc_file=model.epc_file) reload = resqpy.surface.PointSet(parent_model=model, uuid=points.uuid) assert reload.title == str(irap_file) coords = reload.full_array_ref() assert_array_almost_equal( coords[0], np.array([429450.658333, 6296954.224574, 2403.837646])) assert coords.shape == (9, 3), f'Expected shape (9,3), not {coords.shape}' # Test write back to file out_file = str(tmp_path / "IRAP_points_out.txt") reload.convert_to_irap(out_file) assert os.path.exists(out_file) with open(out_file, 'r') as f: line = f.readline() assert line == '429450.658333 6296954.224574 2403.837646\n', 'Output IRAP file does not look as expected'
def test_two_fault_gcs(tmp_path): epc = make_epc_with_gcs(tmp_path) # re-open the model and check the gcs model = rq.Model(epc) gcs_uuid = model.uuid(obj_type='GridConnectionSetRepresentation') assert gcs_uuid is not None gcs = rqf.GridConnectionSet(model, uuid=gcs_uuid) assert gcs is not None assert gcs.number_of_features() == 2 feature_names = gcs.list_of_feature_names() assert len(feature_names) == 2 assert 'F1' in feature_names and 'F2' in feature_names fault_names = gcs.list_of_fault_names() assert fault_names == feature_names for fi in (0, 1): assert gcs.feature_name_for_feature_index(fi) in ('F1', 'F2') assert gcs.feature_name_for_feature_index( 0) != gcs.feature_name_for_feature_index(1) fi, f_uuid = gcs.feature_index_and_uuid_for_fault_name('F1') assert fi is not None and fi in (0, 1) assert f_uuid is not None assert gcs.fault_name_for_feature_index(fi) == 'F1' assert gcs.feature_index_for_cell_face((1, 1, 1), 0, 1) is None fi_a = gcs.feature_index_for_cell_face((1, 1, 1), 2, 1) assert fi_a in (0, 1) fi_b = gcs.feature_index_for_cell_face((1, 1, 1), 1, 1) assert fi_b in (0, 1) assert fi_a != fi_b gcs.rework_face_pairs()
def _get_model_details(epc_file, crs_uuid, ext_uuid): log.info('accessing existing resqml model from: ' + epc_file) model = rq.Model(epc_file = epc_file) assert model, 'failed to read existing resqml model from file: ' + epc_file if crs_uuid is None: assert model.crs_uuid is not None, 'no crs uuid given and no default in model' crs_uuid = model.crs_uuid if ext_uuid is None: ext_uuid = model.h5_uuid() if ext_uuid is None: # no pre-existing hdf5 part or references in model hdf5_file = epc_file[:-4] + '.h5' ext_node = model.create_hdf5_ext(file_name = hdf5_file) ext_uuid = rqet.uuid_for_part_root(ext_node) h5_mode = 'w' else: hdf5_file = model.h5_file_name(uuid = ext_uuid) h5_mode = 'a' assert ext_uuid is not None, 'failed to establish hdf5 uuid' # append to hdf5 file using arrays from Surface object's patch(es) log.info('will append to hdf5 file: ' + hdf5_file) return model, crs_uuid, h5_mode, ext_uuid, hdf5_file
def test_create_property_set_per_timestep_true(tmp_path): # Arrange current_filename = os.path.split(getsourcefile(lambda: 0))[0] base_folder = os.path.dirname(os.path.dirname(current_filename)) ensemble_dir = f'{base_folder}/test_data/wren' epc_file = f'{tmp_path}/test.epc' no_parts_expected = [36, 21, 21, 21] # Act import_vdb_ensemble(epc_file, ensemble_dir, create_property_set_per_timestep=True) model = rq.Model(epc_file) grid = model.grid() property_set_uuids = model.uuids(obj_type='PropertySet', title='time index', title_mode='contains') # Assert assert len(property_set_uuids) == 4 for uuid in property_set_uuids: property_set_root = model.root_for_uuid(uuid=uuid) title = rqet.citation_title_for_node(property_set_root).split()[-1] property_set = rqp.PropertyCollection( support=grid, property_set_root=property_set_root) assert property_set.number_of_parts() == no_parts_expected[int(title)]
def test_crs_reuse(): model = rq.Model(new_epc = True, create_basics = True) crs_a = rqc.Crs(model) crs_a.create_xml() crs_b = rqc.Crs(model) crs_b.create_xml() assert len(model.parts(obj_type = 'LocalDepth3dCrs')) == 1 assert crs_a == crs_b assert bu.matching_uuids(crs_a.uuid, crs_b.uuid) crs_c = rqc.Crs(model, z_inc_down = False) crs_c.create_xml() assert len(model.parts(obj_type = 'LocalDepth3dCrs')) == 2 assert crs_c != crs_a assert not bu.matching_uuids(crs_c.uuid, crs_a.uuid) crs_d = rqc.Crs(model, z_units = 'ft') crs_d.create_xml() assert len(model.parts(obj_type = 'LocalDepth3dCrs')) == 3 crs_e = rqc.Crs(model, z_inc_down = False) crs_e.create_xml() assert len(model.uuids(obj_type = 'LocalDepth3dCrs')) == 3 assert crs_e == crs_c assert bu.matching_uuids(crs_e.uuid, crs_c.uuid) crs_f = rqc.Crs(model) crs_f.create_xml(reuse = False) assert len(model.parts(obj_type = 'LocalDepth3dCrs')) == 4 assert crs_f == crs_a assert not bu.matching_uuids(crs_f.uuid, crs_a.uuid)
def test_pointset_from_polylineset(example_model_and_crs): # Set up a PolyLine and save to resqml file model, crs = example_model_and_crs coords1 = np.array([[1, 2, 3, 4, 5], [6, 7, 8, 9, 10], [12, 13, 14, 15, 16]]).T coords2 = np.array([[11, 12, 13, 14, 15], [16, 17, 18, 19, 110], [112, 113, 114, 115, 116]]).T line1 = resqpy.lines.Polyline(parent_model=model, title='Polyline1', set_coord=coords1, set_crs=crs.uuid, set_bool=False) line2 = resqpy.lines.Polyline(parent_model=model, title='Polyline2', set_coord=coords2, set_crs=crs.uuid, set_bool=False) lines = resqpy.lines.PolylineSet(parent_model=model, title='Polylines', polylines=[line1, line2]) lines.write_hdf5() lines.create_xml() model.store_epc() # Reload the model, and generate pointset using the polylineset model = rq.Model(epc_file=model.epc_file) reload_lines = resqpy.lines.PolylineSet(parent_model=model, uuid=lines.uuid) points = resqpy.surface.PointSet(parent_model=model, polyset=reload_lines, crs_uuid=crs.uuid) points.write_hdf5() points.create_xml() model.store_epc() # Reload the model, and ensure the coordinates are as expected model = rq.Model(epc_file=model.epc_file) reload = resqpy.surface.PointSet(parent_model=model, uuid=points.uuid) assert_array_almost_equal(reload.full_array_ref(), np.concatenate((coords1, coords2), axis=0))
def test_copy_from(example_model_with_prop_ts_rels): original_epc = example_model_with_prop_ts_rels.epc_file copied_epc = original_epc[:-4] + '_copy.epc' parts_list = example_model_with_prop_ts_rels.parts(sort_by='oldest') assert len(parts_list) > 0 del example_model_with_prop_ts_rels model = rq.Model(copied_epc, copy_from=original_epc) assert model.parts(sort_by='oldest') == parts_list
def test_regular_grid_half_cell_transmission(tmp_path): def try_one_half_t_regular(model, extent_kji=(2, 2, 2), dxyz=(1.0, 1.0, 1.0), perm_kji=(1.0, 1.0, 1.0), ntg=1.0, darcy_constant=1.0, rotate=None, dip=None): ones = np.ones(extent_kji) grid = grr.RegularGrid(model, extent_kji=extent_kji, dxyz=dxyz) if dip is not None: # dip positive x axis downwards r_matrix = vec.rotation_matrix_3d_axial(1, dip) p = grid.points_ref(masked=False) p[:] = vec.rotate_array(r_matrix, p) if rotate is not None: # rotate anticlockwise in xy plane (viewed from above) r_matrix = vec.rotation_matrix_3d_axial(2, rotate) p = grid.points_ref(masked=False) p[:] = vec.rotate_array(r_matrix, p) half_t = rqtr.half_cell_t(grid, perm_k=perm_kji[0] * ones, perm_j=perm_kji[1] * ones, perm_i=perm_kji[2] * ones, ntg=ntg * ones, darcy_constant=darcy_constant) expected = 2.0 * darcy_constant * np.array( (perm_kji[0] * dxyz[0] * dxyz[1] / dxyz[2], ntg * perm_kji[1] * dxyz[0] * dxyz[2] / dxyz[1], ntg * perm_kji[2] * dxyz[1] * dxyz[2] / dxyz[0])) assert np.all(np.isclose(half_t, expected.reshape(1, 1, 1, 3))) temp_epc = str(os.path.join(tmp_path, f"{bu.new_uuid()}.epc")) model = rq.Model(temp_epc, new_epc=True, create_basics=True) try_one_half_t_regular(model) try_one_half_t_regular(model, extent_kji=(3, 4, 5)) try_one_half_t_regular(model, dxyz=(127.53, 21.05, 12.6452)) try_one_half_t_regular(model, perm_kji=(123.23, 512.4, 314.7)) try_one_half_t_regular(model, ntg=0.7) try_one_half_t_regular(model, darcy_constant=0.001127) try_one_half_t_regular(model, extent_kji=(5, 4, 3), dxyz=(84.23, 77.31, 15.823), perm_kji=(0.6732, 298.14, 384.2), ntg=0.32, darcy_constant=0.008527) try_one_half_t_regular(model, rotate=67.8) # should not have written anything, but try clean-up just in case try: os.remove(temp_epc) except Exception: pass try: os.remove(temp_epc[-4] + '.h5') except Exception: pass
def _establish_model_and_source_grid(epc_file, source_grid): assert epc_file or source_grid is not None, 'neither epc file name nor source grid supplied' if epc_file: model = rq.Model(epc_file) if source_grid is None: source_grid = model.grid( ) # requires there to be exactly one grid in model (or one named ROOT) else: model = source_grid.model return model, source_grid
def _establish_models_and_source_grid(epc_file, new_epc_file, source_grid, source_grid_uuid): assert epc_file or source_grid is not None, 'neither epc file name nor source grid supplied' if not epc_file: epc_file = source_grid.model.epc_file assert epc_file, 'unable to ascertain epc filename from grid object' if new_epc_file and epc_file and ( (new_epc_file == epc_file) or (os.path.exists(new_epc_file) and os.path.exists(epc_file) and os.path.samefile(new_epc_file, epc_file))): new_epc_file = None model = None if new_epc_file: log.debug('creating fresh model for refined grid') model = rq.Model(epc_file=new_epc_file, new_epc=True, create_basics=True, create_hdf5_ext=True) if epc_file: model_in = rq.Model(epc_file) if source_grid is None: if source_grid_uuid is None: log.debug('using default source grid from existing epc') source_grid = model_in.grid() else: log.debug( 'selecting source grid from existing epc based on uuid') source_grid = grr.Grid(model_in, uuid=source_grid_uuid) else: if source_grid_uuid is not None: assert bu.matching_uuids(source_grid_uuid, source_grid.uuid) grid_uuid = source_grid.uuid log.debug('reloading source grid from existing epc file') source_grid = grr.Grid(model_in, uuid=grid_uuid) if model is None: model = model_in else: model_in = source_grid.model assert model_in is not None assert model is not None assert source_grid is not None assert source_grid.grid_representation in ['IjkGrid', 'IjkBlockGrid'] return epc_file, model, model_in, source_grid
def test_random_cell(tmp_path): # --------- Arrange---------- seed(1923877) epc = os.path.join(tmp_path, 'grid.epc') model = rq.new_model(epc) # create a basic block grid dxyz = (10.0, 10.0, 100.0) grid = grr.RegularGrid(model, extent_kji=(4, 2, 2), title='grid_1', origin=(0.0, 10.0, 1000.0), dxyz=dxyz, as_irregular_grid=True) grid_points = grid.points_ref(masked=False) # pinch out cells in the k == 2 layer grid_points[3] = grid_points[2] # collapse cell kji0 == 0,0,0 in the j direction grid_points[0:2, 1, 0:2, 1] = 10. # same as origin y value # store grid grid.write_hdf5() grid.create_xml(add_cell_length_properties=True) grid_uuid = grid.uuid model.store_epc() # check that the grid can be read model = rq.Model(epc) grid_reloaded = grr.any_grid(model, uuid=grid_uuid) corner_points = grid_reloaded.corner_points() # --------- Act---------- # call random_cell function 50 times trial_number = 0 while trial_number < 50: (k, j, i) = random_cell(corner_points=corner_points, border=0.0) # --------- Assert---------- assert 0 <= k < 4 assert 0 <= j < 2 assert 0 <= i < 2 # check that none of the k,j,i combinations that correspond to pinched cells are chosen by the random_cell function assert (k, j, i) not in [(0, 0, 0), (2, 0, 0), (2, 0, 1), (2, 1, 0), (2, 1, 1)] trial_number += 1 # reshape corner get the extent of the new grid corner_points_reshaped = corner_points.reshape(1, 1, 16, 2, 2, 2, 3) new_extent = determine_corp_extent(corner_points=corner_points_reshaped) assert np.all(new_extent == np.array([4, 2, 2], dtype=int))
def test_check_transmisibility_output(test_data_path): current_filename = os.path.split(getsourcefile(lambda: 0))[0] base_folder = os.path.dirname(os.path.dirname(current_filename)) resqml_file_root = base_folder + '/example_data/block.epc' grid_model = rq.Model(resqml_file_root) resqml_grid = grid_model.grid() k, j, i = resqml_grid.transmissibility() snapshot_filename = current_filename + "/snapshots/transmissibility/" check_load_snapshot(i, f'{snapshot_filename}block_i.txt') check_load_snapshot(j, f'{snapshot_filename}block_j.txt') check_load_snapshot(k, f'{snapshot_filename}block_k.txt')
def test_split_pillars_false(tmp_path): # Arrange current_filename = os.path.split(getsourcefile(lambda: 0))[0] base_folder = os.path.dirname(os.path.dirname(current_filename)) ensemble_dir = f'{base_folder}/test_data/wren' epc_file = f'{tmp_path}/test.epc' # Act import_vdb_ensemble(epc_file, ensemble_dir, split_pillars=False) model = rq.Model(epc_file) grid = model.grid() # Assert assert not grid.has_split_coordinate_lines
def test_lineset(example_model_and_crs, tmp_path): # Set up a PolylineSet title = 'Nazcas' model, crs = example_model_and_crs line1 = resqpy.lines.Polyline(parent_model=model, title=title, set_crs=crs.uuid, set_bool=True, set_coord=np.array([[0, 0, 0], [1, 1, 1]], dtype=float)) line2 = resqpy.lines.Polyline(parent_model=model, title=title, set_crs=crs.uuid, set_bool=True, set_coord=np.array([[2, 2, 2], [3, 3, 3]], dtype=float)) lines = resqpy.lines.PolylineSet(parent_model=model, title=title, polylines=[line1, line2]) lines.write_hdf5() lines.create_xml() # Check lines can be loaded in again model.store_epc() model = rq.Model(epc_file=model.epc_file) reload = resqpy.lines.PolylineSet(parent_model=model, uuid=lines.uuid) assert len(reload.polys) == 2, \ f'Expected two polylines in the polylineset, found {len(reload.polys)}' assert (reload.count_perpol == [2, 2]).all(), \ f'Expected count per polyline to be [2,2], found {reload.count_perpol}' pl_list = reload.convert_to_polylines() assert len(pl_list) == 2 pl_set_2 = resqpy.lines.PolylineSet(model) pl_set_2.combine_polylines(pl_list) assert len(pl_set_2.polys) == 2, \ f'Expected two polylines in the polylineset, found {len(pl_set_2.polys)}' assert (pl_set_2.count_perpol == [2, 2]).all(), \ f'Expected count per polyline to be [2,2], found {pl_set_2.count_perpol}' irap_file = os.path.join(tmp_path, 'test_irap.dat') pl_set_2.convert_to_irap(irap_file) charisma_file = os.path.join(tmp_path, 'test_charisma.dat') pl_set_2.convert_to_charisma(charisma_file) pl_set_3 = resqpy.lines.PolylineSet(model, charisma_file=charisma_file) assert pl_set_3 is not None and len(pl_set_3.polys) == 2 pl_set_4 = resqpy.lines.PolylineSet(model, irap_file=irap_file) assert pl_set_4 is not None and len(pl_set_4.polys) == 2
def test_explicit_mesh(example_model_and_crs): model, crs = example_model_and_crs # create some random x,y,z values x = (np.random.random(20) * 10000) y = (np.random.random(20) * 10000) z = (np.random.random(20) * 500 + 2000) # make a 20x20x3 array of values to test array = np.array([x, y, z]).T xyz_values = np.array([ np.multiply(array, np.array([1 + val / 100, 1. + val / 100, 1 + val / 100]).T) for val in np.arange(0, 20) ]) # make an explicit mesh representation mesh = resqpy.surface.Mesh(model, crs_uuid=crs.uuid, mesh_flavour='explicit', xyz_values=xyz_values, title='random explicit mesh', originator='Emma', extra_metadata={'testing mode': 'automated'}) assert mesh is not None mesh.write_hdf5() mesh.create_xml() mesh_uuid = mesh.uuid # fully write model to disc model.store_epc() epc = model.epc_file # re-open model and check the mesh object is there model = rq.Model(epc) assert bu.matching_uuids( model.uuid(obj_type='Grid2dRepresentation', title='random explicit mesh'), mesh_uuid) # establish a resqpy Mesh from the object in the RESQML dataset persistent_mesh = resqpy.surface.Mesh(model, uuid=mesh_uuid) # check some of the metadata assert persistent_mesh.flavour == 'explicit' assert persistent_mesh.ni == len(x) assert persistent_mesh.nj == len(y) # check a fully expanded version of the points assert_array_almost_equal(persistent_mesh.full_array_ref(), mesh.full_array_ref())
def test_keyword_list(tmp_path): # Arrange current_filename = os.path.split(getsourcefile(lambda: 0))[0] base_folder = os.path.dirname(os.path.dirname(current_filename)) ensemble_dir = f'{base_folder}/test_data/wren' epc_file = f'{tmp_path}/test.epc' keyword_set = {'PVR', 'MDEP', 'KH', 'SW', 'SO', 'P'} # Act import_vdb_ensemble(epc_file, ensemble_dir, keyword_list=keyword_set) model = rq.Model(epc_file) pc = model.grid().property_collection pc_keys = {pc.title_for_part(part) for part in pc.parts()} # Assert assert set(model.titles(parts_list=pc.parts())) == keyword_set
def test_time_series_from_list(tmp_path): epc = os.path.join(tmp_path, 'ts_list.epc') model = rq.new_model(epc) ts_list = ['2022-01-01', '2022-02-01', '2022-03-01', '2022-04-01'] ts = rqts.time_series_from_list(ts_list, parent_model = model) assert ts.number_of_timestamps() == 4 assert ts.days_between_timestamps(0, 3) == 31 + 28 + 31 ts.create_xml() model.store_epc() model = rq.Model(epc) ts_uuid = model.uuid(obj_type = 'TimeSeries') assert ts_uuid is not None ts = rqts.any_time_series(model, uuid = ts_uuid) assert isinstance(ts, rqts.TimeSeries) assert ts.number_of_timestamps() == 4 assert ts.days_between_timestamps(0, 3) == 31 + 28 + 31 assert ts.timeframe == 'human'