Ejemplo n.º 1
0
def test_find_faces_to_represent_surface_regular_wrapper(
        small_grid_and_surface: Tuple[RegularGrid, Surface]):
    # Arrange
    grid, surface = small_grid_and_surface
    grid_epc = surface_epc = grid.model.epc_file
    grid_uuid = grid.uuid
    surface_uuid = surface.uuid

    name = "test"
    input_index = 0
    use_index_as_realisation = False

    # Act
    index, success, epc_file, uuid_list = find_faces_to_represent_surface_regular_wrapper(
        input_index, use_index_as_realisation, grid_epc, grid_uuid,
        surface_epc, surface_uuid, name)
    model = Model(epc_file=epc_file)
    rm_tree("tmp_dir")

    # Assert
    assert success is True
    assert index == input_index
    assert len(model.uuids(obj_type='LocalDepth3dCrs')) == 1
    assert len(model.uuids(obj_type='IjkGridRepresentation')) == 1
    assert len(model.uuids(obj_type='TriangulatedSetRepresentation')) == 1
    assert len(model.uuids(obj_type='GridConnectionSetRepresentation')) == 1
    assert len(model.uuids(obj_type='FaultInterpretation')) == 1
    assert len(model.uuids(obj_type='TectonicBoundaryFeature')) == 1
    assert len(model.uuids()) == 6
    assert len(uuid_list) == 3
Ejemplo n.º 2
0
def function_multiprocessing(
    function: Callable,
    kwargs_list: List[Dict[str, Any]],
    recombined_epc: Union[Path, str],
    cluster,
    consolidate: bool = True,
) -> List[bool]:
    """Calls a function concurrently with the specfied arguments.

    A multiprocessing pool is used to call the function multiple times in parallel. Once
    all results are returned, they are combined into a single epc file.

    Args:
        function (Callable): the function to be called. Needs to return:

            - index (int): the index of the kwargs in the kwargs_list.
            - success (bool): whether the function call was successful, whatever that
                definiton is.
            - epc_file (Path/str): the epc file path where the objects are stored.
            - uuid_list (List[str]): list of UUIDs of relevant objects.

        kwargs_list (List[Dict[Any]]): A list of keyword argument dictionaries that are
            used when calling the function.
        recombined_epc (Path/str): A pathlib Path or path string of
            where the combined epc will be saved.
        cluster (LocalCluster/JobQueueCluster): a LocalCluster is a Dask cluster on a
            local machine. If using a job queing system, a JobQueueCluster can be used
            such as an SGECluster, SLURMCluster, PBSCluster, LSFCluster etc.
        consolidate (bool): if True and an equivalent part already exists in
            a model, it is not duplicated and the uuids are noted as equivalent.

    Returns:
        success_list (List[bool]): A boolean list of successful function calls.

    Note:
        This function uses the Dask backend to run the given function in parallel, so a
        Dask cluster must be setup and passed as an argument. Dask will need to be
        installed in the Python environment because it is not a dependency of the
        project. More info can be found at https://docs.dask.org/en/latest/deploying.html
    """
    log.info("Multiprocessing function called with %s function.", function.__name__)

    for i, kwargs in enumerate(kwargs_list):
        kwargs["index"] = i

    with parallel_backend("dask"):
        results = Parallel()(delayed(function)(**kwargs) for kwargs in kwargs_list)

    log.info("Function calls complete.")

    # Sorting the results by the original kwargs_list index.
    results = list(sorted(results, key = lambda x: x[0]))

    success_list = [result[1] for result in results]
    epc_list = [result[2] for result in results]
    uuids_list = [result[3] for result in results]
    log.info("Number of successes: %s/%s.", sum(success_list), len(results))

    epc_file = Path(str(recombined_epc))
    if epc_file.is_file():
        model_recombined = Model(epc_file = str(epc_file))
    else:
        model_recombined = new_model(epc_file = str(epc_file))

    log.info("Creating the recombined epc file.")
    for i, epc in enumerate(epc_list):
        if epc is None:
            continue
        while True:
            try:
                model = Model(epc_file = epc)
                break
            except FileNotFoundError:
                time.sleep(1)
                continue
        uuids = uuids_list[i]
        if uuids is None:
            uuids = model.uuids()
        for uuid in uuids:
            model_recombined.copy_uuid_from_other_model(model, uuid = uuid, consolidate = consolidate)

    # Deleting temporary directory.
    log.info("Deleting the temporary directory")
    rm_tree("tmp_dir")

    model_recombined.store_epc()

    log.info("Recombined epc file complete.")

    return success_list