Esempio n. 1
0
def test_model_copy_all_parts(example_model_with_properties):

    epc = example_model_with_properties.epc_file
    dir = example_model_with_properties.epc_directory
    copied_epc = os.path.join(dir, 'copied.epc')

    # test copying without consolidation
    original = rq.Model(epc)
    assert original is not None
    copied = rq.new_model(copied_epc)
    copied.copy_all_parts_from_other_model(original, consolidate=False)

    assert set(original.uuids()) == set(copied.uuids())
    assert set(original.parts()) == set(copied.parts())

    # test without consolidation of two crs objects
    copied = rq.new_model(copied_epc)
    new_crs = rqc.Crs(copied)
    new_crs.create_xml()

    copied.copy_all_parts_from_other_model(original, consolidate=False)

    assert len(copied.parts()) == len(original.parts()) + 1
    assert set(original.parts()).issubset(set(copied.parts()))
    assert len(copied.parts(obj_type='LocalDepth3dCrs')) == 2

    # test with consolidation of two crs objects
    copied = rq.new_model(copied_epc)
    new_crs = rqc.Crs(copied)
    new_crs.create_xml()

    copied.copy_all_parts_from_other_model(original, consolidate=True)

    assert len(copied.parts()) == len(original.parts())
    assert len(copied.parts(obj_type='LocalDepth3dCrs')) == 1

    crs_uuid = copied.uuid(obj_type='LocalDepth3dCrs')
    assert (bu.matching_uuids(crs_uuid, new_crs.uuid) or bu.matching_uuids(
        crs_uuid, original.uuid(obj_type='LocalDepth3dCrs')))

    # test write and re-load of copied model
    copied.store_epc()
    re_opened = rq.Model(copied_epc)
    assert re_opened is not None

    assert len(copied.parts()) == len(original.parts())

    crs_uuid = re_opened.uuid(obj_type='LocalDepth3dCrs')
    assert (bu.matching_uuids(crs_uuid, new_crs.uuid) or bu.matching_uuids(
        crs_uuid, original.uuid(obj_type='LocalDepth3dCrs')))
Esempio n. 2
0
def test_col_headers(tmp_path):
    epc = os.path.join(tmp_path, 'model.epc')
    model = rq.new_model(epc)
    np_df = np.array([[0.0, 0.0, 1.0, 0], [0.04, 0.015, 0.87, np.nan],
                      [0.12, 0.065, 0.683, np.nan],
                      [0.25, 0.205, 0.35, np.nan], [0.45, 0.55, 0.018, np.nan],
                      [0.74, 1.0, 0.0, 1e-06]])
    df_cols1 = ['Sw', 'Krg', 'Kro', 'Pc']
    df_cols2 = ['Sg', 'Krg', 'Krw', 'Pc']
    df_cols3 = ['Sg', 'Krg', 'Pc', 'Kro']
    df_cols4 = ['Sg', 'krg', 'Kro', 'pC']
    df1 = pd.DataFrame(np_df, columns=df_cols1)
    df2 = pd.DataFrame(np_df, columns=df_cols2)
    df3 = pd.DataFrame(np_df, columns=df_cols3)
    df4 = pd.DataFrame(np_df, columns=df_cols4)
    phase_combo1 = 'gas-oil'
    phase_combo2 = None

    with pytest.raises(ValueError) as excval1:
        RelPerm(model=model, df=df1, phase_combo=phase_combo1)
    assert "incorrect saturation column name and/or multiple saturation columns exist" in str(
        excval1.value)

    with pytest.raises(ValueError) as excval2:
        RelPerm(model=model, df=df2, phase_combo=phase_combo1)
    assert "incorrect column name(s) {'Krw'}" in str(excval2.value)

    with pytest.raises(ValueError) as excval3:
        RelPerm(model=model, df=df3, phase_combo=phase_combo1)
    assert "capillary pressure data should be in the last column of the dataframe" in str(
        excval3.value)

    relperm_obj = RelPerm(model=model, df=df4, phase_combo=phase_combo2)
    assert relperm_obj.phase_combo == 'gas-oil'
Esempio n. 3
0
def test_regular_grid_with_geometry(tmp_path):
    epc = os.path.join(tmp_path, 'concrete.epc')

    model = rq.new_model(epc)

    # create a basic block grid
    dxyz = (55.0, 65.0, 27.0)
    grid = grr.RegularGrid(model,
                           extent_kji=(4, 3, 2),
                           title='concrete',
                           origin=(0.0, 0.0, 1000.0),
                           dxyz=dxyz)
    grid.create_xml(add_cell_length_properties=True)
    grid_uuid = grid.uuid

    # store with constant arrays (no hdf5 data)
    model.store_epc()

    # check that the grid can be read
    model = rq.Model(epc)
    grid = grr.any_grid(model, uuid=grid_uuid)

    # check that the cell size has been preserved
    expected_dxyz_dkji = np.zeros((3, 3))
    for i in range(3):
        expected_dxyz_dkji[2 - i, i] = dxyz[i]
    assert_array_almost_equal(expected_dxyz_dkji, grid.block_dxyz_dkji)
Esempio n. 4
0
def test_dtype_size(tmp_path):

    filenames = ['dtype_16', 'dtype_32', 'dtype_64']
    byte_sizes = [2, 4, 8]
    dtypes = [np.float16, np.float32, np.float64]
    hdf5_sizes = []

    extent_kji = (1000, 100, 100)
    a = np.random.random(extent_kji)

    for filename, dtype in zip(filenames, dtypes):
        epc = os.path.join(tmp_path, filename + '.epc')
        h5_file = epc[:-4] + '.h5'
        model = rq.new_model(epc)
        grid = grr.RegularGrid(model, extent_kji=extent_kji)
        grid.create_xml()
        pc = rqp.PropertyCollection()
        pc.set_support(support_uuid=grid.uuid, model=model)
        pc.add_cached_array_to_imported_list(
            cached_array=a,
            source_info='random',
            keyword='NTG',
            property_kind='net to gross ratio',
            indexable_element='cells',
            uom='m3/m3')
        pc.write_hdf5_for_imported_list(dtype=dtype)
        model.store_epc()
        model.h5_release()
        hdf5_sizes.append(os.path.getsize(h5_file))

    assert hdf5_sizes[0] < hdf5_sizes[1] < hdf5_sizes[2]
    for i, (byte_size, hdf5_size) in enumerate(zip(byte_sizes, hdf5_sizes)):
        array_size = byte_size * a.size
        # following may need to be modified if using hdf5 compression
        assert array_size < hdf5_size < array_size + 100000
Esempio n. 5
0
def test_model_context(tmp_path):

    # Create a new model
    epc_path = str(tmp_path / 'tmp_model.epc')
    model = rq.new_model(epc_path)
    crs = rqc.Crs(parent_model=model, title='kuzcotopia')
    crs_uuid = crs.uuid
    crs.create_xml()
    model.store_epc()
    del crs, model

    # Re-open model in read/write mode
    with rq.ModelContext(epc_path, mode="rw") as model2:

        crs2 = rqc.Crs(model2, uuid=crs_uuid)
        assert len(list(model2.iter_crs())) == 1
        assert crs2.title == 'kuzcotopia'

        # Make a change
        crs2.title = 'wabajam'
        crs2.create_xml(reuse=False)

    # Re-open model in read mode
    with rq.ModelContext(epc_path, mode="r") as model3:

        # Check model has loaded correctly
        assert len(list(model3.iter_crs())) == 1
        crs3 = rqc.Crs(model3, uuid=crs_uuid)
        assert crs3.title == 'wabajam'

    # Overwrite model
    with rq.ModelContext(epc_path, mode="create") as model4:
        # Should be empty
        crs_list = list(model4.iter_crs())
        assert len(crs_list) == 0
Esempio n. 6
0
def test_model(tmp_path):

    epc = os.path.join(tmp_path, 'model.epc')
    model = rq.new_model(epc)
    assert model is not None
    crs = rqc.Crs(model)
    crs_root = crs.create_xml()
    model.store_epc()
    assert os.path.exists(epc)
    md_datum_1 = rqw.MdDatum(model,
                             location=(0.0, 0.0, -50.0),
                             crs_uuid=crs.uuid)
    md_datum_1.create_xml(title='Datum & 1')
    md_datum_2 = rqw.MdDatum(model,
                             location=(3.0, 0.0, -50.0),
                             crs_uuid=crs.uuid)
    md_datum_2.create_xml(title='Datum < 2')
    assert len(model.uuids(obj_type='MdDatum')) == 2
    model.store_epc()

    model = rq.Model(epc)
    assert model is not None
    assert len(model.uuids(obj_type='MdDatum')) == 2
    datum_part_1 = model.part(obj_type='MdDatum', title='1', title_mode='ends')
    datum_part_2 = model.part(obj_type='MdDatum', title='2', title_mode='ends')
    assert datum_part_1 is not None and datum_part_2 is not None and datum_part_1 != datum_part_2
    datum_uuid_1 = rqet.uuid_in_part_name(datum_part_1)
    datum_uuid_2 = rqet.uuid_in_part_name(datum_part_2)
    assert not bu.matching_uuids(datum_uuid_1, datum_uuid_2)
    p1 = model.uuid_part_dict[bu.uuid_as_int(datum_uuid_1)]
    p2 = model.uuid_part_dict[bu.uuid_as_int(datum_uuid_2)]
    assert p1 == datum_part_1 and p2 == datum_part_2
Esempio n. 7
0
def test_relperm_df_none_uuid_none(tmp_path):
    # Arrange
    test_model = rq.new_model(os.path.join(tmp_path, 'test'))

    # Act & Assert
    with pytest.raises(ValueError) as e:
        RelPerm(test_model)
    assert "either a uuid or a dataframe must be provided" in str(e.value)
Esempio n. 8
0
def make_epc_with_gcs(tmp_path):
    epc = os.path.join(tmp_path, 'two_fault.epc')
    model = rq.new_model(epc)

    # create a grid
    g = grr.RegularGrid(model, extent_kji=(5, 4, 3), dxyz=(100.0, 100.0, 10.0))
    g.create_xml()

    # create an empty grid connection set
    gcs = rqf.GridConnectionSet(model, grid=g)

    # prepare two named faults as a dataframe
    data = {
        'name': ['F1', 'F2'],
        'face': ['I+', 'J-'],
        'i1': [1, 0],
        'i2': [1, 2],
        'j1': [0, 2],
        'j2': [3, 2],
        'k1': [0, 0],
        'k2': [4, 4],
        'mult': [0.1, 0.05]
    }
    df = pd.DataFrame(data)

    # set grid connection set from dataframe
    gcs.set_pairs_from_faces_df(df,
                                create_organizing_objects_where_needed=True,
                                create_mult_prop=True,
                                fault_tmult_dict=None,
                                one_based_indexing=False)

    # save the grid connection set
    gcs.write_hdf5()
    gcs.create_xml(title='two fault gcs')
    model.store_epc()

    # add some basic grid properties
    porosity_uuid = rqdm.add_one_grid_property_array(epc,
                                                     np.full(
                                                         g.extent_kji, 0.27),
                                                     property_kind='porosity',
                                                     title='porosity',
                                                     uom='m3/m3')
    assert porosity_uuid is not None
    perm_uuid = rqdm.add_one_grid_property_array(
        epc,
        np.full(g.extent_kji, 152.0),
        property_kind='rock permeability',
        uom='mD',
        facet_type='direction',
        facet='IJK',
        title='permeability')
    assert perm_uuid is not None

    return epc
Esempio n. 9
0
def test_random_cell(tmp_path):

    # --------- Arrange----------
    seed(1923877)
    epc = os.path.join(tmp_path, 'grid.epc')
    model = rq.new_model(epc)

    # create a basic block grid
    dxyz = (10.0, 10.0, 100.0)
    grid = grr.RegularGrid(model,
                           extent_kji=(4, 2, 2),
                           title='grid_1',
                           origin=(0.0, 10.0, 1000.0),
                           dxyz=dxyz,
                           as_irregular_grid=True)

    grid_points = grid.points_ref(masked=False)

    # pinch out cells in the k == 2 layer
    grid_points[3] = grid_points[2]

    # collapse cell kji0 == 0,0,0 in the j direction
    grid_points[0:2, 1, 0:2, 1] = 10.  # same as origin y value

    # store grid
    grid.write_hdf5()
    grid.create_xml(add_cell_length_properties=True)
    grid_uuid = grid.uuid
    model.store_epc()

    # check that the grid can be read
    model = rq.Model(epc)
    grid_reloaded = grr.any_grid(model, uuid=grid_uuid)
    corner_points = grid_reloaded.corner_points()

    # --------- Act----------
    # call random_cell function 50 times
    trial_number = 0
    while trial_number < 50:
        (k, j, i) = random_cell(corner_points=corner_points, border=0.0)
        # --------- Assert----------
        assert 0 <= k < 4
        assert 0 <= j < 2
        assert 0 <= i < 2
        # check that none of the k,j,i combinations that correspond to pinched cells are chosen by the random_cell function
        assert (k, j, i) not in [(0, 0, 0), (2, 0, 0), (2, 0, 1), (2, 1, 0),
                                 (2, 1, 1)]
        trial_number += 1

    # reshape corner get the extent of the new grid
    corner_points_reshaped = corner_points.reshape(1, 1, 16, 2, 2, 2, 3)
    new_extent = determine_corp_extent(corner_points=corner_points_reshaped)
    assert np.all(new_extent == np.array([4, 2, 2], dtype=int))
Esempio n. 10
0
def test_missing_vals(tmp_path):
    epc = os.path.join(tmp_path, 'model.epc')
    model = rq.new_model(epc)
    np_df = np.array([[0.0, 0.0, 1.0, 0], [0.04, 0.015, 0.87, np.nan],
                      [0.12, np.nan, 0.683, np.nan],
                      [0.25, 0.205, 0.35, np.nan], [0.45, 0.55, 0.018, np.nan],
                      [0.74, 1.0, np.nan, 1e-06]])
    df_cols = ['Sg', 'Krg', 'Kro', 'Pc']
    df = pd.DataFrame(np_df, columns=df_cols)
    phase_combo = 'gas-oil'

    with pytest.raises(Exception) as excval:
        RelPerm(model=model, df=df, phase_combo=phase_combo)
    assert "missing values found in Krg column" in str(excval.value)
Esempio n. 11
0
def test_relperm_no_phase_combo(tmp_path, test_cols, test_phase_combo):
    # Arrange
    test_model = rq.new_model(os.path.join(tmp_path, 'test'))
    np_df = np.array([[0.0, 0.0, 1.0, 0], [0.04, 0.015, 0.87, np.nan],
                      [0.12, 0.065, 0.689, np.nan],
                      [0.25, 0.205, 0.35, np.nan], [0.45, 0.55, 0.019, np.nan],
                      [0.74, 1.0, 0.0, 0.000001]])
    test_df = pd.DataFrame(np_df, columns=test_cols)

    # Act
    relperm = RelPerm(test_model, df=test_df)

    # Assert
    assert relperm.phase_combo == test_phase_combo
Esempio n. 12
0
def test_range(tmp_path):
    epc = os.path.join(tmp_path, 'model.epc')
    model = rq.new_model(epc)
    np_df = np.array([[0.0, 0.0, 1.0, 0], [0.04, 0.015, 0.87, np.nan],
                      [0.12, 0.065, 0.683, np.nan],
                      [0.25, 0.205, 0.35, np.nan], [0.45, 0.55, 0.018, np.nan],
                      [0.74, 1.1, -0.001, 1e-06]])
    df_cols = ['Sg', 'Krg', 'Kro', 'Pc']
    df = pd.DataFrame(np_df, columns=df_cols)
    phase_combo = 'gas-oil'

    with pytest.raises(ValueError) as excval:
        RelPerm(model=model, df=df, phase_combo=phase_combo)
    assert "Krg is not within the range 0-1" in str(excval.value)
Esempio n. 13
0
def test_relperm_no_phase_combo_invalid_first_column(tmp_path):
    # Arrange
    test_model = rq.new_model(os.path.join(tmp_path, 'test'))
    np_df = np.array([[0.0, 0.0, 1.0, 0], [0.04, 0.015, 0.87, np.nan],
                      [0.12, 0.065, 0.689, np.nan],
                      [0.25, 0.205, 0.35, np.nan], [0.45, 0.55, 0.019, np.nan],
                      [0.74, 1.0, 0.0, 0.000001]])
    df_cols = ['Sn', 'Krw', 'Kro', 'Pc']
    test_df = pd.DataFrame(np_df, columns=df_cols)

    # Act & Assert
    with pytest.raises(ValueError) as e:
        RelPerm(test_model, df=test_df)
    assert "incorrect saturation column name and/or multiple saturation columns exist" in str(
        e.value)
Esempio n. 14
0
def test_relperm_gas_water_phase_combo_invalid_columns(tmp_path,
                                                       test_phase_combo):
    # Arrange
    test_model = rq.new_model(os.path.join(tmp_path, 'test'))
    np_df = np.array([[0.0, 0.0, 1.0, 0], [0.04, 0.015, 0.87, np.nan],
                      [0.12, 0.065, 0.689, np.nan],
                      [0.25, 0.205, 0.35, np.nan], [0.45, 0.55, 0.019, np.nan],
                      [0.74, 1.0, 0.0, 0.000001]])
    df_cols = ['Sg', 'Krw', 'Kro', 'Pc']
    test_df = pd.DataFrame(np_df, columns=df_cols)

    # Act & Assert
    with pytest.raises(ValueError) as e:
        RelPerm(test_model, df=test_df, phase_combo=test_phase_combo)
    assert "incorrect column name(s) {'Kro'}" in str(e.value)
Esempio n. 15
0
def test_relperm_table_index_equal_0(tmp_path):
    # Arrange
    test_model = rq.new_model(os.path.join(tmp_path, 'test'))
    np_df = np.array([[0.0, 0.0, 1.0, 0], [0.04, 0.015, 0.87, np.nan],
                      [0.12, 0.065, 0.689, np.nan],
                      [0.25, 0.205, 0.35, np.nan], [0.45, 0.55, 0.019, np.nan],
                      [0.74, 1.0, 0.0, 0.000001]])
    df_cols = ['Sw', 'Krw', 'Kro', 'Pc']
    test_df = pd.DataFrame(np_df, columns=df_cols)
    test_table_index = 0

    # Act & Assert
    with pytest.raises(ValueError) as e:
        RelPerm(test_model, df=test_df, table_index=test_table_index)
    assert "table_index cannot be less than 1" in str(e.value)
Esempio n. 16
0
def test_actual_pillar_shape(tmp_path):

    # --------- Arrange----------
    epc = os.path.join(tmp_path, 'grid.epc')
    model = rq.new_model(epc)

    # create a basic block grid
    dxyz = (10.0, 10.0, 10.0)
    vertical_grid = grr.RegularGrid(model,
                                    extent_kji=(2, 2, 2),
                                    title='vert_grid',
                                    origin=(0.0, 0.0, 0.0),
                                    dxyz=dxyz)
    straight_grid = grr.RegularGrid(model,
                                    extent_kji=(2, 2, 2),
                                    title='straight_grid',
                                    origin=(10.0, 10.0, 0.0),
                                    dxyz=dxyz,
                                    as_irregular_grid=True)
    curved_grid = grr.RegularGrid(model,
                                  extent_kji=(3, 2, 2),
                                  title='curved_grid',
                                  origin=(10.0, 10.0, 0.0),
                                  dxyz=dxyz,
                                  as_irregular_grid=True)

    # shift the corner points of cellkji0 == (0, 0, 0) by - 10 x and -10 y units
    straight_grid.corner_points()[0][0][0][0][0][0] = np.array([0., 0., 0])
    straight_grid.corner_points()[0][0][0][0][0][1] = np.array([10., 0., 0])

    # shift 2 corner points of cellkji0 == (1, 0, 0) by -5 x units
    curved_grid.corner_points()[0][0][0][1][0][0] = np.array([5., 10., 10])
    curved_grid.corner_points()[0][0][0][1][0][1] = np.array([15., 10., 10])

    # --------- Act----------
    pillar_shape_vertical = actual_pillar_shape(
        pillar_points=vertical_grid.corner_points())
    pillar_shape_straight = actual_pillar_shape(
        pillar_points=straight_grid.corner_points())
    pillar_shape_curved = actual_pillar_shape(
        pillar_points=curved_grid.corner_points())

    # --------- Assert----------
    assert pillar_shape_vertical == 'vertical'
    assert pillar_shape_straight == 'straight'
    assert pillar_shape_curved == 'curved'
Esempio n. 17
0
    def __enter__(self) -> Model:
        """Enter the runtime context, return a model."""

        if self.mode in ["read", "read/write"]:
            if not os.path.exists(self.epc_file):
                raise FileNotFoundError(self.epc_file)
            self._model = Model(epc_file=str(self.epc_file))

        else:
            assert self.mode == "create"
            for file in [self.epc_file, self.epc_file[:-4] + '.h5']:
                if os.path.exists(file):
                    os.remove(file)
                    log.info('old file deleted: ' + str(file))
            self._model = new_model(self.epc_file)

        return self._model
Esempio n. 18
0
def test_time_series_from_list(tmp_path):
    epc = os.path.join(tmp_path, 'ts_list.epc')
    model = rq.new_model(epc)
    ts_list = ['2022-01-01', '2022-02-01', '2022-03-01', '2022-04-01']
    ts = rqts.time_series_from_list(ts_list, parent_model = model)
    assert ts.number_of_timestamps() == 4
    assert ts.days_between_timestamps(0, 3) == 31 + 28 + 31
    ts.create_xml()
    model.store_epc()
    model = rq.Model(epc)
    ts_uuid = model.uuid(obj_type = 'TimeSeries')
    assert ts_uuid is not None
    ts = rqts.any_time_series(model, uuid = ts_uuid)
    assert isinstance(ts, rqts.TimeSeries)
    assert ts.number_of_timestamps() == 4
    assert ts.days_between_timestamps(0, 3) == 31 + 28 + 31
    assert ts.timeframe == 'human'
Esempio n. 19
0
def test_regular_grid_no_geometry(tmp_path):
    # issue #222

    epc = os.path.join(tmp_path, 'abstract.epc')

    model = rq.new_model(epc)

    # create a basic block grid
    grid = grr.RegularGrid(model, extent_kji=(4, 3, 2), title='spaced out')
    grid.create_xml(add_cell_length_properties=False)
    grid_uuid = grid.uuid

    model.store_epc()

    # check that the grid can be read
    model = rq.Model(epc)

    grid = grr.any_grid(model, uuid=grid_uuid)
Esempio n. 20
0
def make_epc_with_abutting_grids(tmp_path):
    epc = os.path.join(tmp_path, 'abutting_grids.epc')
    model = rq.new_model(epc)

    # create a grid
    g0 = grr.RegularGrid(model,
                         extent_kji=(5, 4, 3),
                         dxyz=(100.0, 100.0, 10.0))
    g0.create_xml()

    g1 = grr.RegularGrid(model,
                         extent_kji=(5, 4, 3),
                         dxyz=(100.0, 100.0, 10.0),
                         origin=(100.0, 400.0, 20.0))
    g1.create_xml()

    # create an empty grid connection set
    gcs = rqf.GridConnectionSet(model, title='abut')

    # populate the grid connection set at low level due to lack of multi-grid methods
    gcs.grid_list = [g0, g1]
    gcs.count = 6
    gcs.grid_index_pairs = np.zeros((6, 2), dtype=int)
    gcs.grid_index_pairs[:, 1] = 1
    gcs.face_index_pairs = np.empty((6, 2), dtype=int)
    gcs.face_index_pairs[:, 0] = gcs.face_index_map[1, 1]  #  J+
    gcs.face_index_pairs[:, 1] = gcs.face_index_map[1, 0]  #  J-
    gcs.cell_index_pairs = np.empty((6, 2), dtype=int)
    cell = 0
    for k in range(3):
        for i in range(2):
            gcs.cell_index_pairs[cell, 0] = g0.natural_cell_index(
                (k + 2, 3, i + 1))
            gcs.cell_index_pairs[cell, 1] = g1.natural_cell_index((k, 0, i))
            cell += 1
    #  leave optional feature list & indices as None

    # save the grid connection set
    gcs.write_hdf5()
    gcs.create_xml()
    model.store_epc()

    return epc
Esempio n. 21
0
def test_monotonicity(tmp_path):
    epc = os.path.join(tmp_path, 'model.epc')
    model = rq.new_model(epc)
    np_df = np.array([[0.0, 0.0, 1.0, 0], [0.04, 0.015, 0.87, np.nan],
                      [0.12, 0.065, 0.683, np.nan],
                      [0.25, 0.205, 0.35, np.nan], [0.85, 0.55, 0.018, 1e-06],
                      [0.74, 1.0, 0.0, 1e-07]])
    df_cols = ['Sg', 'Krg', 'Kro', 'Pc']
    df = pd.DataFrame(np_df, columns=df_cols)
    phase_combo = 'gas-oil'

    with pytest.raises(ValueError) as excval:
        RelPerm(model=model, df=df, phase_combo=phase_combo)
    assert "('Sg', 'Krg', 'Kro') combo is not monotonic" in str(excval.value)

    df['Sg'] = [0.0, 0.04, 0.12, 0.25, 0.45, 0.74]
    with pytest.raises(ValueError) as excval1:
        RelPerm(model=model, df=df, phase_combo=phase_combo)
    assert "Pc values are not monotonic" in str(excval1.value)
Esempio n. 22
0
def test_determine_corp_extent(tmp_path):

    # --------- Arrange----------
    epc = os.path.join(tmp_path, 'grid.epc')
    model = rq.new_model(epc)

    # create a basic block grid
    dxyz = (55.0, 65.0, 27.0)
    grid = grr.RegularGrid(model,
                           extent_kji=(4, 3, 2),
                           title='concrete',
                           origin=(0.0, 0.0, 1000.0),
                           dxyz=dxyz)
    grid.create_xml(add_cell_length_properties=True)
    corner_points = grid.corner_points()
    corner_points_reshaped = corner_points.reshape(1, 1, 24, 2, 2, 2, 3)
    # --------- Act----------
    [nk, nj, ni] = determine_corp_extent(corner_points=corner_points_reshaped)

    # --------- Assert----------
    assert [nk, nj, ni] == [4, 3, 2]
Esempio n. 23
0
def test_time_series_from_args(tmp_path):
    epc = os.path.join(tmp_path, 'ts_args.epc')
    model = rq.new_model(epc)
    ts = rqts.TimeSeries(model,
                         first_timestamp = '1963-08-23',
                         daily = 8,
                         monthly = 4,
                         quarterly = 8,
                         yearly = 5,
                         title = 'late 60s')
    assert ts.number_of_timestamps() == 26
    assert ts.days_between_timestamps(2, 3) == 1
    assert ts.days_between_timestamps(0, 25) == 8 + 4 * 30 + 8 * 90 + 5 * 365
    ts.create_xml()
    model.store_epc()
    model = rq.Model(epc)
    ts_uuid = model.uuid(obj_type = 'TimeSeries')
    assert ts_uuid is not None
    ts = rqts.TimeSeries(model, uuid = ts_uuid)
    assert ts.number_of_timestamps() == 26
    assert ts.days_between_timestamps(2, 3) == 1
    assert ts.days_between_timestamps(0, 25) == 8 + 4 * 30 + 8 * 90 + 5 * 365
Esempio n. 24
0
def test_geologic_time_series(tmp_path):
    epc = os.path.join(tmp_path, 'ts_geologic.epc')
    model = rq.new_model(epc)
    # Cretaceous Age start times, in Ma, with random use of sign to check it is ignored
    ma_list = (145, 72.1, -83.6, 86.3, 89.8, 93.9, 100.5, -113, -125, 129.4, 132.9, 139.8)
    ts_list = [int(round(ma * 1000000)) for ma in ma_list]
    ts_list_2 = [int(round(ma * 2000000)) for ma in ma_list]
    ts = rqts.time_series_from_list(ts_list, parent_model = model)
    assert ts.number_of_timestamps() == 12
    ts.create_xml()
    ts_2 = rqts.GeologicTimeSeries.from_year_list(model, year_list = ts_list_2, title = 'using class method')
    ts_2.create_xml()
    model.store_epc()
    model = rq.Model(epc)
    ts_uuids = model.uuids(obj_type = 'TimeSeries')
    assert ts_uuids is not None and len(ts_uuids) == 2
    for ts_uuid in ts_uuids:
        ts = rqts.any_time_series(model, uuid = ts_uuid)
        assert isinstance(ts, rqts.GeologicTimeSeries)
        assert ts.timeframe == 'geologic'
        assert ts.number_of_timestamps() == 12
        assert ((ts.timestamps[0] == -145000000 and ts.timestamps[-1] == -72100000) or
                (ts.timestamps[0] == -145000000 * 2 and ts.timestamps[-1] == -72100000 * 2))
Esempio n. 25
0
def tmp_model(tmp_path):
    """Example resqpy model in a temporary directory unique to each test"""

    return new_model(str(tmp_path / 'tmp_model.epc'))
Esempio n. 26
0
def test_vertical_prism_grid_from_seed_points_and_surfaces(tmp_path):

    seed(23487656)  # to ensure test reproducibility

    epc = os.path.join(tmp_path, 'voronoi_prism_grid.epc')
    model = rq.new_model(epc)
    crs = rqc.Crs(model)
    crs.create_xml()

    # define a boundary polyline:
    b_count = 7
    boundary_points = np.empty((b_count, 3))
    radius = 1000.0
    for i in range(b_count):
        theta = -vec.radians_from_degrees(i * 360.0 / b_count)
        boundary_points[i] = (2.0 * radius * maths.cos(theta),
                              radius * maths.sin(theta), 0.0)
    boundary = rql.Polyline(model,
                            set_coord=boundary_points,
                            set_bool=True,
                            set_crs=crs.uuid,
                            title='rough ellipse')
    boundary.write_hdf5()
    boundary.create_xml()

    # derive a larger area of interest
    aoi = rql.Polyline.from_scaled_polyline(boundary,
                                            1.1,
                                            title='area of interest')
    aoi.write_hdf5()
    aoi.create_xml()
    min_xy = np.min(aoi.coordinates[:, :2], axis=0) - 50.0
    max_xy = np.max(aoi.coordinates[:, :2], axis=0) + 50.0

    print(f'***** min max xy aoi+ : {min_xy} {max_xy}')  # debug

    # create some seed points within boundary
    seed_count = 5
    seeds = rqs.PointSet(model,
                         crs_uuid=crs.uuid,
                         polyline=boundary,
                         random_point_count=seed_count,
                         title='seeds')
    seeds.write_hdf5()
    seeds.create_xml()
    seeds_xy = seeds.single_patch_array_ref(0)

    for seed_xy in seeds_xy:
        assert aoi.point_is_inside_xy(
            seed_xy), f'seed point {seed_xy} outwith aoi'

    print(
        f'***** min max xy seeds : {np.min(seeds_xy, axis = 0)} {np.max(seeds_xy, axis = 0)}'
    )  # debug

    # create some horizon surfaces
    ni, nj = 21, 11
    lattice = rqs.Mesh(model,
                       crs_uuid=crs.uuid,
                       mesh_flavour='regular',
                       ni=ni,
                       nj=nj,
                       origin=(min_xy[0], min_xy[1], 0.0),
                       dxyz_dij=np.array([[
                           (max_xy[0] - min_xy[0]) / (ni - 1), 0.0, 0.0
                       ], [0.0, (max_xy[1] - min_xy[1]) / (nj - 1), 0.0]]))
    lattice.write_hdf5()
    lattice.create_xml()
    horizons = []
    for i in range(4):
        horizon_depths = 1000.0 + 100.0 * i + 20.0 * (np.random.random(
            (nj, ni)) - 0.5)
        horizon_mesh = rqs.Mesh(model,
                                crs_uuid=crs.uuid,
                                mesh_flavour='ref&z',
                                ni=ni,
                                nj=nj,
                                z_values=horizon_depths,
                                z_supporting_mesh_uuid=lattice.uuid,
                                title='h' + str(i))
        horizon_mesh.write_hdf5()
        horizon_mesh.create_xml()
        horizon_surface = rqs.Surface(model,
                                      crs_uuid=crs.uuid,
                                      mesh=horizon_mesh,
                                      quad_triangles=True,
                                      title=horizon_mesh.title)
        horizon_surface.write_hdf5()
        horizon_surface.create_xml()
        horizons.append(horizon_surface)

    # create a re-triangulated Voronoi vertical prism grid
    grid = rug.VerticalPrismGrid.from_seed_points_and_surfaces(
        model, seeds_xy, horizons, aoi, title="giant's causeway")
    assert grid is not None
    grid.write_hdf5()
    grid.create_xml()

    # check cell thicknesses are in expected range
    thick = grid.thickness()
    assert np.all(thick >= 80.0)
    assert np.all(thick <= 120.0)

    model.store_epc()
Esempio n. 27
0
def test_vertical_prism_grid_from_surfaces(tmp_path):

    epc = os.path.join(tmp_path, 'vertical_prism.epc')
    model = rq.new_model(epc)
    crs = rqc.Crs(model)
    crs.create_xml()

    # create a point set representing a pentagon with a centre node
    pentagon_points = np.array([[-100.0, -200.0, 1050.0],
                                [-200.0, 0.0, 1050.0], [0.0, 200.0, 1025.0],
                                [200.0, 0.0, 975.0], [100.0, -200.0, 999.0],
                                [0.0, 0.0, 1000.0]])
    pentagon = rqs.PointSet(model,
                            points_array=pentagon_points,
                            crs_uuid=crs.uuid,
                            title='pentagon')
    pentagon.write_hdf5()
    pentagon.create_xml()

    # create a surface from the point set (will make a Delauney triangulation)
    top_surf = rqs.Surface(model, point_set=pentagon, title='top surface')
    top_surf.write_hdf5()
    top_surf.create_xml()
    surf_list = [top_surf]

    # check the pentagon surface
    pentagon_triangles, pentagon_points = top_surf.triangles_and_points()
    assert pentagon_points.shape == (6, 3)
    assert pentagon_triangles.shape == (5, 3)

    # create a couple of horizontal surfaces at greater depths
    boundary = np.array([[-300.0, -300.0, 0.0], [300.0, 300.0, 0.0]])
    for depth in (1100.0, 1200.0):
        base = rqs.Surface(model)
        base.set_to_horizontal_plane(depth, boundary)
        base.write_hdf5()
        base.create_xml()
        surf_list.append(base)

    # now build a vertical prism grid from the surfaces
    grid = rug.VerticalPrismGrid.from_surfaces(model,
                                               surf_list,
                                               title='the pentagon')
    grid.write_hdf5()
    grid.create_xml()

    model.store_epc()

    # re-open model

    model = rq.Model(epc)
    assert model is not None

    # find grid by title
    grid_uuid = model.uuid(obj_type='UnstructuredGridRepresentation',
                           title='the pentagon')
    assert grid_uuid is not None

    # re-instantiate the grid
    grid = rug.VerticalPrismGrid(model, uuid=grid_uuid)
    assert grid is not None
    assert grid.nk == 2
    assert grid.cell_count == 10
    assert grid.node_count == 18
    assert grid.face_count == 35

    # create a very similar grid using explicit triangulation arguments

    # make the same Delauney triangulation
    triangles = triangulation.dt(pentagon_points, algorithm="scipy")
    assert triangles.ndim == 2 and triangles.shape[1] == 3

    # slightly shrink pentagon points to be within area of surfaces
    for i in range(len(pentagon_points)):
        if pentagon_points[i, 0] < 0.0:
            pentagon_points[i, 0] += 1.0
        elif pentagon_points[i, 0] > 0.0:
            pentagon_points[i, 0] -= 1.0
        if pentagon_points[i, 1] < 0.0:
            pentagon_points[i, 1] += 1.0
        elif pentagon_points[i, 1] > 0.0:
            pentagon_points[i, 1] -= 1.0

    # load the surfaces
    surf_uuids = model.uuids(obj_type='TriangulatedSetRepresentation',
                             sort_by='oldest')
    surf_list = []
    for surf_uuid in surf_uuids:
        surf_list.append(rqs.Surface(model, uuid=surf_uuid))

    # create a new vertical prism grid using the explicit triangulation arguments
    similar = rug.VerticalPrismGrid.from_surfaces(
        model,
        surf_list,
        column_points=pentagon_points,
        column_triangles=triangles,
        title='similar pentagon')

    # check similarity
    for attr in ('cell_shape', 'nk', 'cell_count', 'node_count', 'face_count'):
        assert getattr(grid, attr) == getattr(similar, attr)
    # for index_attr in ('nodes_per_face', 'nodes_per_face_cl', 'faces_per_cell', 'faces_per_cell_cl'):
    for i, (index_attr, index_attr_cl) in enumerate([
        ('nodes_per_face', 'nodes_per_face_cl'),
        ('faces_per_cell', 'faces_per_cell_cl')
    ]):
        ga_cl = getattr(grid, index_attr_cl)
        sa_cl = getattr(similar, index_attr_cl)
        assert np.all(ga_cl == sa_cl)
        ga = getattr(grid, index_attr)
        sa = getattr(similar, index_attr)
        ip = 0 if i == 0 else ga_cl[i - 1]
        assert set(ga[ip:ga_cl[i]]) == set(sa[ip:ga_cl[i]])
    assert_allclose(grid.points_ref(), similar.points_ref(), atol=2.0)

    # check that isotropic horizontal permeability is preserved
    permeability = 250.0
    primary_k = np.full((grid.cell_count, ), permeability)
    orthogonal_k = primary_k.copy()
    triple_k = grid.triple_horizontal_permeability(primary_k, orthogonal_k,
                                                   37.0)
    assert triple_k.shape == (grid.cell_count, 3)
    assert_array_almost_equal(triple_k, permeability)
    azimuth = np.linspace(0.0, 360.0, num=grid.cell_count)
    triple_k = grid.triple_horizontal_permeability(primary_k, orthogonal_k,
                                                   azimuth)
    assert triple_k.shape == (grid.cell_count, 3)
    assert_array_almost_equal(triple_k, permeability)

    # check that anisotropic horizontal permeability is correctly bounded
    orthogonal_k *= 0.1
    triple_k = grid.triple_horizontal_permeability(primary_k, orthogonal_k,
                                                   azimuth)
    assert triple_k.shape == (grid.cell_count, 3)
    assert np.all(triple_k <= permeability)
    assert np.all(triple_k >= permeability * 0.1)
    assert np.min(triple_k) < permeability / 2.0
    assert np.max(triple_k) > permeability / 2.0

    # set up some properties
    pc = grid.property_collection
    assert pc is not None
    pc.add_cached_array_to_imported_list(cached_array=None,
                                         source_info='unit test',
                                         keyword='NETGRS',
                                         property_kind='net to gross ratio',
                                         discrete=False,
                                         uom='m3/m3',
                                         indexable_element='cells',
                                         const_value=0.75)
    pc.add_cached_array_to_imported_list(cached_array=None,
                                         source_info='unit test',
                                         keyword='PERMK',
                                         property_kind='permeability rock',
                                         facet_type='direction',
                                         facet='K',
                                         discrete=False,
                                         uom='mD',
                                         indexable_element='cells',
                                         const_value=10.0)
    pc.add_cached_array_to_imported_list(cached_array=None,
                                         source_info='unit test',
                                         keyword='PERM',
                                         property_kind='permeability rock',
                                         facet_type='direction',
                                         facet='primary',
                                         discrete=False,
                                         uom='mD',
                                         indexable_element='cells',
                                         const_value=100.0)
    pc.add_cached_array_to_imported_list(cached_array=None,
                                         source_info='unit test',
                                         keyword='PERM',
                                         property_kind='permeability rock',
                                         facet_type='direction',
                                         facet='orthogonal',
                                         discrete=False,
                                         uom='mD',
                                         indexable_element='cells',
                                         const_value=20.0)
    x_min, x_max = grid.xyz_box()[:, 0]
    relative_x = (grid.centre_point()[:, 0] - x_min) * (x_max - x_min)
    azi = relative_x * 90.0 + 45.0
    pc.add_cached_array_to_imported_list(
        cached_array=azi,
        source_info='unit test',
        keyword='primary permeability azimuth',
        property_kind='plane angle',
        facet_type='direction',
        facet='primary',
        discrete=False,
        uom='dega',
        indexable_element='cells')
    pc.write_hdf5_for_imported_list()
    pc.create_xml_for_imported_list_and_add_parts_to_model()

    model.store_epc()

    # test that half cell transmissibilities can be computed
    half_t = grid.half_cell_transmissibility()
    assert np.all(half_t > 0.0)

    # add the half cell transmissibility array as a property
    pc.add_cached_array_to_imported_list(cached_array=half_t.flatten(),
                                         source_info='unit test',
                                         keyword='half transmissibility',
                                         property_kind='transmissibility',
                                         discrete=False,
                                         count=1,
                                         indexable_element='faces per cell')
    pc.write_hdf5_for_imported_list()
    pc.create_xml_for_imported_list_and_add_parts_to_model(
        extra_metadata={'uom': 'm3.cP/(d.kPa)'})

    model.store_epc()
Esempio n. 28
0
def test_tetra_grid(tmp_path):

    epc = os.path.join(tmp_path, 'tetra_test.epc')
    model = rq.new_model(epc)
    crs = rqc.Crs(model)
    crs.create_xml()

    # create an empty TetraGrid
    tetra = rug.TetraGrid(model, title='star')
    assert tetra.cell_shape == 'tetrahedral'

    # hand craft all attribute data
    tetra.crs_uuid = model.uuid(obj_type='LocalDepth3dCrs')
    assert tetra.crs_uuid is not None
    assert bu.matching_uuids(tetra.crs_uuid, crs.uuid)
    tetra.set_cell_count(5)
    # faces
    tetra.face_count = 16
    tetra.faces_per_cell_cl = np.arange(4, 4 * 5 + 1, 4, dtype=int)
    tetra.faces_per_cell = np.empty(20, dtype=int)
    tetra.faces_per_cell[:4] = (0, 1, 2, 3)  # cell 0
    tetra.faces_per_cell[4:8] = (0, 4, 5, 6)  # cell 1
    tetra.faces_per_cell[8:12] = (1, 7, 8, 9)  # cell 2
    tetra.faces_per_cell[12:16] = (2, 10, 11, 12)  # cell 3
    tetra.faces_per_cell[16:] = (3, 13, 14, 15)  # cell 4
    # nodes
    tetra.node_count = 8
    tetra.nodes_per_face_cl = np.arange(3, 3 * 16 + 1, 3, dtype=int)
    tetra.nodes_per_face = np.empty(48, dtype=int)
    # internal faces (cell 0)
    tetra.nodes_per_face[:3] = (0, 1, 2)  # face 0
    tetra.nodes_per_face[3:6] = (0, 3, 1)  # face 1
    tetra.nodes_per_face[6:9] = (1, 3, 2)  # face 2
    tetra.nodes_per_face[9:12] = (2, 3, 0)  # face 3
    # external faces (cell 1)
    tetra.nodes_per_face[12:15] = (0, 1, 4)  # face 4
    tetra.nodes_per_face[15:18] = (1, 2, 4)  # face 5
    tetra.nodes_per_face[18:21] = (2, 0, 4)  # face 6
    # external faces (cell 2)
    tetra.nodes_per_face[21:24] = (0, 3, 5)  # face 7
    tetra.nodes_per_face[24:27] = (3, 1, 5)  # face 8
    tetra.nodes_per_face[27:30] = (1, 0, 5)  # face 9
    # external faces (cell 3)
    tetra.nodes_per_face[30:33] = (1, 3, 6)  # face 10
    tetra.nodes_per_face[33:36] = (3, 2, 6)  # face 11
    tetra.nodes_per_face[36:39] = (2, 1, 6)  # face 12
    # external faces (cell 4)
    tetra.nodes_per_face[39:42] = (2, 3, 7)  # face 10
    tetra.nodes_per_face[42:45] = (3, 0, 7)  # face 11
    tetra.nodes_per_face[45:] = (0, 2, 7)  # face 12
    # face handedness
    tetra.cell_face_is_right_handed = np.zeros(
        20, dtype=bool)  # False for all faces for external cells (1 to 4)
    tetra.cell_face_is_right_handed[:
                                    4] = True  # True for all faces of internal cell (0)
    # points
    tetra.points_cached = np.zeros((8, 3))
    # internal cell (0) points
    half_edge = 36.152
    one_over_root_two = 1.0 / maths.sqrt(2.0)
    tetra.points_cached[0] = (-half_edge, 0.0, -half_edge * one_over_root_two)
    tetra.points_cached[1] = (half_edge, 0.0, -half_edge * one_over_root_two)
    tetra.points_cached[2] = (0.0, half_edge, half_edge * one_over_root_two)
    tetra.points_cached[3] = (0.0, -half_edge, half_edge * one_over_root_two)
    # project remaining nodes outwards
    for fi, o_node in enumerate((3, 2, 0, 1)):
        fc = tetra.face_centre_point(fi)
        tetra.points_cached[4 + fi] = fc - (tetra.points_cached[o_node] - fc)

    # basic validity check
    tetra.check_tetra()

    # write arrays, create xml and store model
    tetra.write_hdf5()
    tetra.create_xml()
    model.store_epc()

    # re-open model and establish grid
    model = rq.Model(epc)
    assert model is not None
    tetra_uuid = model.uuid(obj_type='UnstructuredGridRepresentation',
                            title='star')
    assert tetra_uuid is not None
    tetra = rug.TetraGrid(model, uuid=tetra_uuid)
    assert tetra is not None
    # perform basic checks
    assert tetra.cell_count == 5
    assert tetra.cell_shape == 'tetrahedral'
    tetra.check_tetra()

    # test volume calculation
    expected_cell_volume = ((2.0 * half_edge)**3) / (6.0 * maths.sqrt(2.0))
    for cell in range(tetra.cell_count):
        assert maths.isclose(tetra.volume(cell),
                             expected_cell_volume,
                             rel_tol=1.0e-3)
    assert maths.isclose(tetra.grid_volume(), 5.0 * expected_cell_volume)

    # test face area
    expected_area = maths.sqrt(3.0 * half_edge * (half_edge**3))
    area = tetra.area_of_face(0)
    assert maths.isclose(area, expected_area, rel_tol=1.0e-3)

    # test internal / external face lists
    assert np.all(tetra.external_face_indices() == np.arange(4, 16, dtype=int))
    inactive_mask = np.zeros(5, dtype=bool)
    assert np.all(
        tetra.external_face_indices_for_masked_cells(inactive_mask) ==
        tetra.external_face_indices())
    assert np.all(
        tetra.internal_face_indices_for_masked_cells(inactive_mask) ==
        np.arange(4, dtype=int))
    # mask out central cell
    inactive_mask[0] = True
    assert len(tetra.external_face_indices_for_masked_cells(
        inactive_mask)) == tetra.face_count
    assert len(
        tetra.internal_face_indices_for_masked_cells(inactive_mask)) == 0
Esempio n. 29
0
def test_crs(tmp_path):
    # create some coordinate reference systems
    model = rq.new_model(os.path.join(tmp_path, 'crs_test.epc'))
    crs_default = rqc.Crs(model)
    assert crs_default.null_transform
    crs_m = rqc.Crs(model, xy_units = 'm', z_units = 'm')
    crs_ft = rqc.Crs(model, xy_units = 'ft', z_units = 'ft')
    crs_mixed = rqc.Crs(model, xy_units = 'm', z_units = 'ft')
    crs_offset = rqc.Crs(model, xy_units = 'm', z_units = 'm', x_offset = 100.0, y_offset = -100.0, z_offset = -50.0)
    assert not crs_offset.null_transform
    crs_elevation = rqc.Crs(model, z_inc_down = False)
    crs_rotate = rqc.Crs(model, rotation = maths.pi / 2.0, rotation_units = 'rad')
    crs_south = rqc.Crs(model, axis_order = 'southing westing')
    crs_time_s = rqc.Crs(model, xy_units = 'm', time_units = 's')
    crs_time_ms = rqc.Crs(model, xy_units = 'm', time_units = 'ms')
    for crs_time in [crs_time_s, crs_time_ms]:
        assert crs_time.resqml_type == 'LocalTime3dCrs'

    # check that distincitveness is recognised
    assert crs_default.is_equivalent(crs_m)
    assert not crs_m.is_equivalent(crs_ft)
    assert not crs_mixed.is_equivalent(crs_m)
    assert not crs_m.is_equivalent(crs_offset)
    assert not crs_m.is_equivalent(crs_elevation)
    assert not crs_m.is_equivalent(crs_rotate)
    assert not crs_m.is_equivalent(crs_south)
    assert not crs_time_s.is_equivalent(crs_time_ms)
    for depth_crs in [crs_default, crs_m, crs_ft, crs_mixed, crs_offset, crs_elevation, crs_rotate, crs_south]:
        assert depth_crs.resqml_type == 'LocalDepth3dCrs'
        assert not crs_time_s == depth_crs
        assert not crs_time_ms == depth_crs

    # check handedness
    assert not crs_m.is_right_handed_xy()
    assert not crs_m.is_right_handed_xyz()
    assert not crs_elevation.is_right_handed_xy()
    assert crs_elevation.is_right_handed_xyz()
    assert crs_south.is_right_handed_xy()
    assert crs_south.is_right_handed_xyz()

    # create some xml
    for crs in [
            crs_default, crs_m, crs_ft, crs_mixed, crs_offset, crs_elevation, crs_rotate, crs_south, crs_time_s,
            crs_time_ms
    ]:
        crs.create_xml()
    model.store_epc()
    # check re-use of equivalent crs'es
    assert bu.matching_uuids(crs_default.uuid, crs_m.uuid)

    # test conversion
    ft_to_m = 0.3048

    a = np.empty((10, 3))
    a[:, 0] = np.random.random(10) * 5.0e5
    a[:, 1] = np.random.random(10) * 10.0e5
    a[:, 2] = np.random.random(10) * 4.0e3

    b = a.copy()
    crs_m.convert_array_from(crs_default, a)
    assert np.max(np.abs(b - a)) < 1.0e-6
    a[:] = b
    crs_m.convert_array_to(crs_m, a)
    assert np.all(a == b)
    crs_ft.convert_array_from(crs_m, a)
    assert np.max(np.abs(b - a * ft_to_m)) < 1.0e-6
    crs_ft.convert_array_to(crs_m, a)
    assert np.max(np.abs(b - a)) < 1.0e-6
    a[:] = b
    crs_m.local_to_global_array(a)
    assert np.max(np.abs(b - a)) < 1.0e-6
    a[:] = b
    crs_offset.global_to_local_array(a)
    a[:, 0] += 100.0
    a[:, 1] -= 100.0
    a[:, 2] -= 50.0
    assert_array_almost_equal(a, b)

    # test single point conversion
    p = (456.78, 678.90, -1234.56)
    assert_array_almost_equal(p, crs_offset.global_to_local(crs_offset.local_to_global(p)))
    p_ft = crs_m.convert_to(crs_ft, np.array(p))
    assert_array_almost_equal(p, crs_m.convert_from(crs_ft, p_ft))

    #  test time conversion
    pt = (123456.0, 234567.0, 1983.0)
    pt_s = np.array(crs_time_ms.convert_to(crs_time_s, pt))
    pt_s[2] *= 1000.0  # convert from seconds back to milliseconds
    assert_array_almost_equal(pt, pt_s)

    # todo: test rotation
    p = (0.00, 234.00, 5678.90)
    pr = crs_rotate.local_to_global(p)
    assert_array_almost_equal(pr, (234.00, 0.00, 5678.90))
    assert_array_almost_equal(crs_rotate.global_to_local(pr), p)
Esempio n. 30
0
def find_faces_to_represent_surface_regular_wrapper(
    index: int,
    use_index_as_realisation: bool,
    grid_epc: str,
    grid_uuid: Union[UUID, str],
    surface_epc: str,
    surface_uuid: Union[UUID, str],
    name: str,
    title: Optional[str] = None,
    centres: Optional[np.ndarray] = None,
    agitate: bool = False,
    feature_type='fault',
    progress_fn: Optional[Callable] = None,
    consistent_side: bool = False,
    return_properties: Optional[List[str]] = None,
) -> Tuple[int, bool, str, List[Union[UUID, str]]]:
    """Wrapper function of find_faces_to_represent_surface_regular_optimised.

    Used for multiprocessing to create a new model that is saved in a temporary epc file
    and returns the required values, which are used in the multiprocessing function to
    recombine all the objects into a single epc file.

    Args:
        index (int): the index of the function call from the multiprocessing function.
        use_index_as_realisation (bool): if True, uses the index number as the realization number on
            the property collection.
        grid_epc (str): epc file path where the grid is saved.
        grid_uuid (UUID/str): UUID (universally unique identifier) of the grid object.
        surface_epc (str): epc file path where the surface is saved.
        surface_uuid (UUID/str): UUID (universally unique identifier) of the surface object.
        name (str): the feature name to use in the grid connection set.
        title (str): the citation title to use for the grid connection set; defaults to name
        centres (np.ndarray, shape (nk, nj, ni, 3)): precomputed cell centre points in
           local grid space, to avoid possible crs issues; required if grid's crs includes an origin (offset)?
        agitate (bool): if True, the points of the surface are perturbed by a small random
           offset, which can help if the surface has been built from a regular mesh with a periodic resonance
           with the grid
        feature_type (str, default 'fault'): one of 'fault', 'horizon', or 'geobody boundary'
        progress_fn (Callable): a callback function to be called at intervals by this function;
           the argument will progress from 0.0 to 1.0 in unspecified and uneven increments
        consistent_side (bool): if True, the cell pairs will be ordered so that all the first
           cells in each pair are on one side of the surface, and all the second cells on the other
        return_properties (List[str]): if present, a list of property arrays to calculate and
           return as a dictionary; recognised values in the list are 'triangle', 'offset' and 'normal vector';
           triangle is an index into the surface triangles of the triangle detected for the gcs face; offset
           is a measure of the distance between the centre of the cell face and the intersection point of the
           inter-cell centre vector with a triangle in the surface; normal vector is a unit vector normal
           to the surface triangle; each array has an entry for each face in the gcs; the returned dictionary
           has the passed strings as keys and numpy arrays as values.

    Returns:
        Tuple containing:

            - index (int): the index passed to the function.
            - success (bool): whether the function call was successful, whatever that
                definiton is.
            - epc_file (str): the epc file path where the objects are stored.
            - uuid_list (List[str]): list of UUIDs of relevant objects.
    """
    surface = Surface(parent_model=Model(surface_epc), uuid=str(surface_uuid))

    tmp_dir = Path(f"tmp_dir/{uuid.uuid4()}")
    tmp_dir.mkdir(parents=True, exist_ok=True)
    epc_file = f"{tmp_dir}/wrapper.epc"
    model = new_model(epc_file=epc_file)
    model.copy_uuid_from_other_model(Model(grid_epc), uuid=str(grid_uuid))
    model.copy_uuid_from_other_model(surface.model, uuid=str(surface_uuid))

    grid = RegularGrid(parent_model=model, uuid=str(grid_uuid))

    uuid_list = []
    uuid_list.extend([grid_uuid, surface_uuid])

    print("About to call function")

    returns = rqgs.find_faces_to_represent_surface_regular_optimised(
        grid,
        surface,
        name,
        title,
        centres,
        agitate,
        feature_type,
        progress_fn,
        consistent_side,
        return_properties,
    )

    print("Function returned")
    if return_properties is not None:
        gcs = returns[0]
        properties = returns[1]
        realisation = index if use_index_as_realisation else None
        property_collection = PropertyCollection(support=gcs)
        for name, array in properties.items():
            if name == "normal vector":
                property_collection.add_cached_array_to_imported_list(
                    array,
                    "from find_faces function",
                    name,
                    discrete=False,
                    uom="Euc",
                    property_kind="continuous",
                    realization=realisation,
                    indexable_element="faces",
                    points=True,
                )
            elif name == "triangle":
                property_collection.add_cached_array_to_imported_list(
                    array,
                    "from find_faces function",
                    name,
                    discrete=True,
                    null_value=-1,
                    property_kind="discrete",
                    realization=realisation,
                    indexable_element="faces",
                )
            elif name == "offset":
                property_collection.add_cached_array_to_imported_list(
                    array,
                    "from find_faces function",
                    name,
                    discrete=False,
                    uom=grid.crs.z_units,
                    property_kind="continuous",
                    realization=realisation,
                    indexable_element="faces",
                )
        property_collection.write_hdf5_for_imported_list()
        uuids_properties = (
            property_collection.
            create_xml_for_imported_list_and_add_parts_to_model())
        uuid_list.extend(uuids_properties)
    else:
        gcs = returns

    success = False
    if gcs.count > 0:
        success = True

    gcs.write_hdf5()
    gcs.create_xml()
    model.copy_uuid_from_other_model(gcs.model, uuid=gcs.uuid)
    uuid_list.append(gcs.uuid)

    model.store_epc()

    return index, success, epc_file, uuid_list