Example #1
0
File: ertplot.py Project: pgdr/ert
def main(argv):

    app = QApplication(argv) #Early so that QT is initialized before other imports
    app.setWindowIcon(resourceIcon("application/window_icon_cutout"))

    if len(argv) == 1:
        sys.stderr.write("Missing configuration file")
        sys.exit(1)

    config_file = argv[1]
    strict = True
        
    if not os.path.exists(config_file):
        print("Can not run without a configuration file.")
        sys.exit(1)

    if os.path.isdir(config_file):
        print("The specified configuration file is a directory!")
        sys.exit(1)


    splash = ErtSplash()
    splash.version = "Version %s" % Version.getVersion()
    splash.timestamp = Version.getBuildTime()

    splash.show()
    splash.repaint()

    now = time.time()

    res_config = ResConfig(config_file)
    ert = EnKFMain(res_config, strict=strict, verbose=False)
    ert_gui.configureErtNotifier(ert, config_file)

    window = PlotWindow(ert, None)

    sleep_time = 2 - (time.time() - now)

    if sleep_time > 0:
        time.sleep(sleep_time)

    window.show()
    splash.finish(window)
    window.activateWindow()
    window.raise_()
    finished_code = app.exec_()

    ert.free()

    sys.exit(finished_code)
Example #2
0
def api(tmpdir, source_root):
    with tmpdir.as_cwd():
        test_data_root = source_root / "test-data" / "local"
        test_data_dir = os.path.join(test_data_root, "snake_oil")
        shutil.copytree(test_data_dir, "test_data")
        os.chdir("test_data")
        config_file = "snake_oil.ert"
        rc = ResConfig(user_config_file=config_file)
        rc.convertToCReference(None)
        ert = EnKFMain(rc)
        facade = LibresFacade(ert)
        api = PlotApi(facade)
        yield api
Example #3
0
def test_main_entry_point_block_data_calc():
    cos_config = {"CALCULATE_KEYS": {"keys": [{"key": "RFT3"}]}}

    test_data_dir = os.path.join(TEST_DATA_DIR, "Equinor", "config",
                                 "with_RFT")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("config")
    ert = EnKFMain(res_config)
    obs = ert.getObservations()

    obs_vector = obs["RFT3"]

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == 1.0

    CorrelatedObservationsScalingJob(ert).run(cos_config)

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == 2.0
Example #4
0
    def __init__(self, res_config, controls, results):
        """Will create simulator which can be used to run multiple simulations.

        The @res_config argument should be a ResConfig object, representing the
        fully configured state of libres.


        The @controls argument configures which parameters the simulator should
        get when actually simulating. The @controls argument should be a
        dictionary like this:

            controls = {"cmode": ["Well", "Group"], "order": ["W1", "W2", "W3"]}

        In this example the simulator will expect two arrays 'cmode' and
        'order', consisting of two and three elements respectively. When
        actually simualating these values will be written to json files looking
        like:

             cmode.json = {"Well": 1.0, "Group": 2.0}
             order.json = {"W1": 1, "W2": 1.0, "W3": 1.0}

        When later invoking the start() method the simulator expects to get
        values for all parameters configured with the @controls argument,
        otherwise an exception will be raised. Internally in libres code the
        controls will be implemented as 'ext_param' instances.


        The @results argument is a list of keys of results which the simulator
        expects to be generated by the forward model. If argument @results
        looks like:

             results = ["CMODE", "order"]

        The simulator will look for the files 'CMODE_0' and 'order_0' in the
        simulation folder. If those files are not produced by the simulator an
        exception will be raised.
        """
        if not isinstance(res_config, ResConfig):
            raise ValueError("The first argument must be valid ResConfig instance")

        self.res_config = res_config
        self.ert = EnKFMain(self.res_config)
        self.control_keys = tuple(controls.keys())
        self.result_keys = tuple(results)

        ens_config = self.res_config.ensemble_config
        for control_name, variable_names in controls.iteritems():
            ens_config.addNode(EnkfConfigNode.create_ext_param(control_name, variable_names))

        for key in results:
            ens_config.addNode(EnkfConfigNode.create_gen_data(key, "{}_%d".format(key)))
Example #5
0
def run_cli(args):
    logging.basicConfig(level=logging.INFO, format='%(message)s')
    res_config = ResConfig(args.config)
    os.chdir(res_config.config_path)
    ert = EnKFMain(res_config, strict=True, verbose=args.verbose)
    notifier = ErtCliNotifier(ert, args.config)
    ERT.adapt(notifier)

    if args.mode == WORKFLOW_MODE:
        execute_workflow(args.name)
        return

    model, argument = create_model(args)
    if args.disable_monitoring:
        model.startSimulations(argument)
        if model.hasRunFailed():
            _clear_and_exit(model.getFailMessage())
    else:
        # Test run does not have a current_case
        if "current_case" in args and args.current_case:
            ERT.enkf_facade.select_or_create_new_case(args.current_case)

        if (args.mode in [ENSEMBLE_SMOOTHER_MODE, ITERATIVE_ENSEMBLE_SMOOTHER_MODE, ES_MDA_MODE] and 
            args.target_case == ERT.enkf_facade.get_current_case_name()):
            msg = (
                "ERROR: Target file system and source file system can not be the same. "
                "They were both: {}.".format(args.target_case)
            )
            _clear_and_exit(msg)

        thread = threading.Thread(
            name="ert_cli_simulation_thread",
            target=model.startSimulations,
            args=(argument,)
        )
        thread.start()

        tracker = create_tracker(model, detailed_interval=0)

        monitor = Monitor(color_always=args.color_always)

        try:
            monitor.monitor(tracker)
        except (SystemExit, KeyboardInterrupt):
            print("\nKilling simulations...")
            tracker.request_termination()

        thread.join()

        if model.hasRunFailed():
            _clear_and_exit(1)  # the monitor has already reported the error message
Example #6
0
 def __enter__(self):
     self._dir_before = os.getcwd()
     os.chdir(self._tmp_dir)
     try:
         dir = pathlib.Path(self._model_config).parent
         config = pathlib.Path(self._model_config).name
         copy_tree(dir, self._tmp_dir)
         self._res_config = ResConfig(user_config_file=config)
         self._ert = EnKFMain(self._res_config, strict=True)
     except Exception:
         os.chdir(self._dir_before)
         shutil.rmtree(self._tmp_dir)
         raise
     return self
Example #7
0
    def test_executing_workflow(self):
        with open("test_wf", "w") as wf_file:
            wf_file.write("EXPORT_RUNPATH")

        config_file = "poly.ert"
        with open(config_file, "a") as file:
            file.write("LOAD_WORKFLOW test_wf")

        rc = ResConfig(user_config_file=config_file)
        rc.convertToCReference(None)
        ert = EnKFMain(rc)
        args = Namespace(name="test_wf")
        execute_workflow(ert, args.name)
        assert os.path.isfile(".ert_runpath_list")
Example #8
0
def test_gen_obs_runtime(monkeypatch, copy_snake_oil, snapshot):
    obs_file = pathlib.Path.cwd() / "observations" / "observations.txt"
    with obs_file.open(mode="a") as fin:
        fin.write(create_general_observation())

    res_config = ResConfig("snake_oil.ert")
    ert = EnKFMain(res_config)

    facade = LibresFacade(ert)

    df = MeasuredData(facade,
                      [f"CUSTOM_DIFF_{restart}" for restart in range(500)])

    snapshot.assert_match(df.data.to_csv(), "snake_oil_gendata_output.csv")
Example #9
0
def test_ahmanalysis_run(test_data_root):
    """test data_set with only scalar parameters"""
    test_data_dir = os.path.join(test_data_root, "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")
    res_config.convertToCReference(None)
    ert = EnKFMain(res_config)

    ahmanalysis.AhmAnalysisJob(ert).run(prior_name="default_0")

    # assert that this returns/generates a KS csv file
    output_dir = Path("storage/snake_oil/reports/default_0/AhmAnalysisJob")
    group_obs = [
        "FOPR",
        "WOPR_OP1",
        "SNAKE_OIL_WPR_DIFF",
        "All_obs",
        "All_obs-SNAKE_OIL_WPR_DIFF",
        "All_obs-WOPR_OP1",
        "All_obs-FOPR",
    ]
    parameters = [
        "SNAKE_OIL_PARAM:OP1_PERSISTENCE",
        "SNAKE_OIL_PARAM:OP1_OCTAVES",
        "SNAKE_OIL_PARAM:OP1_DIVERGENCE_SCALE",
        "SNAKE_OIL_PARAM:OP1_OFFSET",
        "SNAKE_OIL_PARAM:OP2_PERSISTENCE",
        "SNAKE_OIL_PARAM:OP2_OCTAVES",
        "SNAKE_OIL_PARAM:OP2_DIVERGENCE_SCALE",
        "SNAKE_OIL_PARAM:OP2_OFFSET",
        "SNAKE_OIL_PARAM:BPR_555_PERSISTENCE",
        "SNAKE_OIL_PARAM:BPR_138_PERSISTENCE",
    ]
    assert (output_dir / "ks.csv").is_file()
    ks_df = pd.read_csv(output_dir / "ks.csv")
    for keys in ks_df["Parameters"].tolist():
        assert keys in parameters
    assert ks_df.columns[1:].tolist() == group_obs
    assert ks_df["WOPR_OP1"].max() <= 1
    assert ks_df["WOPR_OP1"].min() >= 0
    assert (output_dir / "active_obs_info.csv").is_file()
    assert (output_dir / "misfit_obs_info.csv").is_file()
    assert (output_dir / "prior.csv").is_file()
    for group in group_obs:
        filename = group + ".csv"
        assert (output_dir / filename).is_file()
Example #10
0
def main():
    # The ert_cli script should be called from ert.in. The arguments are parsed and verified in ert.in
    if len(sys.argv) < 3:
        raise AssertionError(
            "Required arguments are missing, the config-file, "
            "mode and target case must be provided")

    config_file, mode, target_case = sys.argv[1:]

    config = resconfig(config_file)
    ert = EnKFMain(config)

    if not ert._real_enkf_main().have_observations(
    ) and mode == 'ensemble_smoother':
        logging.error(
            "No observations loaded. Unable to perform model update.")
        return

    if mode == "test_run":
        _test_run(ert)
    if mode == "ensemble_experiment":
        _experiment_run(ert)
    if mode == "ensemble_smoother":
        _ensemble_smoother_run(ert, target_case)
Example #11
0
def test_add_observation_vectors():

    valid_config_data = {"UPDATE_KEYS": {"keys": [{"key": "WOPR_OP1_108"}]}}

    schema = job_config.build_schema()
    config = configsuite.ConfigSuite(valid_config_data, schema, deduce_required=True)

    test_data_dir = os.path.join(TEST_DATA_DIR, "local", "snake_oil_field")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")

    ert = EnKFMain(res_config)

    obs = ert.getObservations()

    new_events = create_active_lists(obs, config.snapshot.UPDATE_KEYS.keys)

    keys = [event.key for event in new_events]

    assert "WOPR_OP1_108" in keys
    assert "WOPR_OP1_144" not in keys
Example #12
0
def test_gen_obs_runtime(monkeypatch, copy_snake_oil):
    obs_file = pathlib.Path.cwd() / "observations" / "observations.txt"
    with obs_file.open(mode="a") as fin:
        fin.write(create_general_observation())

    res_config = ResConfig("snake_oil.ert")
    ert = EnKFMain(res_config)

    facade = LibresFacade(ert)

    df = MeasuredData(facade,
                      [f"CUSTOM_DIFF_{restart}" for restart in range(1, 500)])

    df.remove_inactive_observations()
    assert df.data.shape == (27, 1995)
Example #13
0
def test_main_entry_point_shielded_data(setup_ert, monkeypatch):
    cos_config = {
        "CALCULATE_KEYS": {"keys": [{"key": "FOPR", "index": [1, 2, 3, 4, 5]}]}
    }

    res_config = setup_ert
    ert = EnKFMain(res_config)
    obs = ert.getObservations()

    obs_vector = obs["FOPR"]

    for index, node in enumerate(obs_vector):
        assert node.getStdScaling(index) == 1.0

    monkeypatch.setattr(
        cos.ObservationScaleFactor, "get_scaling_factor", MagicMock(return_value=1.23)
    )
    CorrelatedObservationsScalingJob(ert).run(cos_config)

    for index, node in enumerate(obs_vector):
        if index in [1, 2, 3, 4, 5]:
            assert node.getStdScaling(index) == 1.23, f"index: {index}"
        else:
            assert node.getStdScaling(index) == 1.0, f"index: {index}"
Example #14
0
def test_misfit_preprocessor_all_obs(test_data_root, monkeypatch):
    from unittest.mock import MagicMock
    from semeio.workflows.correlated_observations_scaling import cos

    test_data_dir = os.path.join(test_data_root, "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")
    ert = EnKFMain(res_config)

    monkeypatch.setattr(cos.ObservationScaleFactor, "get_scaling_factor",
                        MagicMock(return_value=1.234))

    misfit_preprocessor.MisfitPreprocessorJob(ert).run()

    scaling_factors = []

    obs = ert.getObservations()
    for key in [
            "FOPR",
            "WOPR_OP1_9",
            "WOPR_OP1_36",
            "WOPR_OP1_72",
            "WOPR_OP1_108",
            "WOPR_OP1_144",
            "WOPR_OP1_190",
            "WPR_DIFF_1",
    ]:
        obs_vector = obs[key]
        for index, node in enumerate(obs_vector):
            scaling_factors.append((index, key, node.getStdScaling(index)))

    for index, key, scaling_factor in scaling_factors:
        assert scaling_factor == 1.234, f"{index}, {key}"
Example #15
0
    def loadAllSummaryData(ert: EnKFMain, case_name, keys=None, realization_index=None):
        """
        @type ert: EnKFMain
        @type case_name: str
        @type keys: list of str
        @rtype: DataFrame
        """

        fs = ert.getEnkfFsManager().getFileSystem(case_name)

        time_map = fs.getTimeMap()
        dates = [time_map[index].datetime() for index in range(1, len(time_map))]

        realizations = SummaryCollector.createActiveList(ert, fs)
        if realization_index is not None:
            if realization_index not in realizations:
                raise IndexError(f"No such realization {realization_index}")
            realizations = [realization_index]

        summary_keys = SummaryCollector.getAllSummaryKeys(ert)
        if keys is not None:
            summary_keys = [
                key for key in keys if key in summary_keys
            ]  # ignore keys that doesn't exist

        summary_data = _lib.enkf_fs_summary_data.get_summary_data(
            ert.ensembleConfig(), fs, summary_keys, realizations, len(dates)
        )

        multi_index = MultiIndex.from_product(
            [realizations, dates], names=["Realization", "Date"]
        )

        df = DataFrame(data=summary_data, index=multi_index, columns=summary_keys)

        return df
Example #16
0
def test_fs_init_from_scratch_deprecated():
    res_config = ResConfig("snake_oil.ert")
    ert = EnKFMain(res_config)
    sim_fs = ert.getEnkfFsManager().getFileSystem("new_case")  # new case
    mask = BoolVector.createFromList(25, [0, 1, 2, 3, 4, 5])
    run_context = ErtRunContext.case_init(sim_fs, mask)
    with pytest.warns(DeprecationWarning):
        ert.getEnkfFsManager().initializeFromScratch(
            StringList(["SNAKE_OIL_PARAM"]), run_context)
    assert len(ert.getEnkfFsManager().getStateMapForCase("new_case")) == 6
Example #17
0
def test_fs_init_from_scratch():
    res_config = ResConfig("snake_oil.ert")
    ert = EnKFMain(res_config)
    sim_fs = ert.getEnkfFsManager().getFileSystem("new_case")
    mask = [True] * 6 + [False] * 19
    run_context = ErtRunContext.case_init(sim_fs, mask)

    ert.getEnkfFsManager().initializeFromScratch(
        StringList(["SNAKE_OIL_PARAM"]), run_context)
    assert len(ert.getEnkfFsManager().getStateMapForCase("new_case")) == 6
Example #18
0
    def test_executing_workflow(self):
        with open('test_wf', 'w') as wf_file:
            wf_file.write('EXPORT_RUNPATH')

        config_file = 'poly.ert'
        with open(config_file, 'a') as file:
            file.write("LOAD_WORKFLOW test_wf")

        rc = ResConfig(user_config_file=config_file)
        rc.convertToCReference(None)
        ert = EnKFMain(rc)
        notifier = ErtCliNotifier(ert, config_file)
        ERT.adapt(notifier)
        args = Namespace(name="test_wf")
        execute_workflow(args.name)
        assert os.path.isfile(".ert_runpath_list")
Example #19
0
def test_misfit_preprocessor_skip_clusters_yielding_empty_data_matrixes(
        monkeypatch, test_data_root):
    def raising_scaling_job(data):
        if data == {
                "CALCULATE_KEYS": {
                    "keys": [{
                        "index": [88, 89],
                        "key": "FOPR"
                    }]
                }
        }:
            raise EmptyDatasetException("foo")

    scaling_mock = Mock(return_value=Mock(
        **{"run.side_effect": raising_scaling_job}))
    monkeypatch.setattr(misfit_preprocessor,
                        "CorrelatedObservationsScalingJob", scaling_mock)

    test_data_dir = os.path.join(test_data_root, "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")
    ert = EnKFMain(res_config)

    config = {
        "workflow": {
            "type": "custom_scale",
            "clustering": {
                "fcluster": {
                    "threshold": 1.0
                }
            },
        },
    }
    config_file = "my_config_file.yaml"
    with open(config_file, "w") as f:
        yaml.dump(config, f)

    job = misfit_preprocessor.MisfitPreprocessorJob(ert)

    try:
        job.run(config_file)
    except EmptyDatasetException:
        pytest.fail(
            "EmptyDatasetException was not handled by misfit preprocessor")
Example #20
0
def main(argv):

    app = QApplication(argv) #Early so that QT is initialized before other imports
    app.setWindowIcon(resourceIcon("application/window_icon_cutout"))

    if len(argv) == 1:
        sys.stderr.write("Missing configuration file")
        sys.exit(1)

    config_file = argv[1]
    strict = True
        
    if not os.path.exists(config_file):
        print("Can not run without a configuration file.")
        sys.exit(1)

    if os.path.isdir(config_file):
        print("The specified configuration file is a directory!")
        sys.exit(1)


    splash = ErtSplash()
    splash.version = "Version %s" % Version.getVersion()
    splash.timestamp = Version.getBuildTime()

    splash.show()
    splash.repaint()

    now = time.time()

    res_config = ResConfig(config_file)
    ert = EnKFMain(res_config, strict=strict, verbose=False)
    ert_gui.configureErtNotifier(ert, config_file)

    window = PlotWindow(ert, None)

    sleep_time = 2 - (time.time() - now)

    if sleep_time > 0:
        time.sleep(sleep_time)

    window.show()
    splash.finish(window)
    window.activateWindow()
    window.raise_()
    finished_code = app.exec_()
    sys.exit(finished_code)
Example #21
0
def test_main_entry_point_gen_data(monkeypatch, test_data_root):
    run_mock = Mock()
    scal_job = Mock(return_value=Mock(run=run_mock))
    monkeypatch.setattr(sc, "CorrelatedObservationsScalingJob", scal_job)

    test_data_dir = os.path.join(test_data_root, "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")
    ert = EnKFMain(res_config)
    sc.SpearmanCorrelationJob(ert).run(*["-t", "1.0"])

    # call_args represents the clusters, we expect the snake_oil
    # observations to generate this amount of them
    # call_args is a call object, which itself is a tuple of args and kwargs.
    # In this case, we want args, and the first element of the arguments, which
    # again is a tuple containing the configuration which is a list of configs.
    assert len(list(
        run_mock.call_args)[0][0]) == 47, "wrong number of clusters"

    cor_matrix_file = os.path.join(
        "storage",
        "snake_oil",
        "reports",
        "default_0",
        "SpearmanCorrelationJob",
        "correlation_matrix.csv",
    )

    pd.read_csv(cor_matrix_file, index_col=[0, 1], header=[0, 1])

    clusters_file = os.path.join(
        "storage",
        "snake_oil",
        "reports",
        "default_0",
        "SpearmanCorrelationJob",
        "clusters.json",
    )
    with open(clusters_file) as f:
        cluster_reports = json.load(f)
        assert len(cluster_reports) == 1

        clusters = cluster_reports[0]
        assert len(clusters.keys()) == 47
Example #22
0
def test_main_entry_point_gen_data(monkeypatch):
    scal_job = Mock()
    monkeypatch.setattr(spearman, "scaling_job", scal_job)

    test_data_dir = os.path.join(TEST_DATA_DIR, "local", "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")

    ert = EnKFMain(res_config)
    facade = LibresFacade(ert)

    spearman.spearman_job(facade, 1.0, False)

    assert scal_job.call_count == 71
    def getObservationKeyForDataKey(ert: EnKFMain, data_key, data_report_step):
        """
        @type ert: EnKFMain
        @rtype: str
        """
        observation_key = None

        enkf_obs = ert.getObservations()
        for obs_vector in enkf_obs:
            if EnkfObservationImplementationType.GEN_OBS:
                report_step = obs_vector.firstActiveStep()
                key = obs_vector.getDataKey()

                if key == data_key and report_step == data_report_step:
                    observation_key = obs_vector.getObservationKey()

        return observation_key
Example #24
0
    def test_ecl_config_creation(self):
        with TestAreaContext("enkf_library_test") as work_area:
            work_area.copy_directory(self.case_directory)

            res_config = ResConfig("simple_config/minimum_config")
            main = EnKFMain(res_config)

            self.assertIsInstance(main.analysisConfig(), AnalysisConfig)
            self.assertIsInstance(main.eclConfig(), EclConfig)

            with self.assertRaises(AssertionError):  # Null pointer!
                self.assertIsInstance(main.eclConfig().getRefcase(), EclSum)

            file_system = main.getEnkfFsManager().getCurrentFileSystem()
            self.assertEqual(file_system.getCaseName(), "default")
            time_map = file_system.getTimeMap()
            self.assertIsInstance(time_map, TimeMap)

            main.free()
Example #25
0
def test_snapshot_alpha(setup_case, alpha, expected):
    """
    Note that this is now a snapshot test, so there is no guarantee that the
    snapshots are correct, they are just documenting the current behavior.
    """
    res_config = setup_case("local/snake_oil", "snake_oil.ert")

    obs_file = Path("observations") / "observations.txt"
    with obs_file.open(mode="w") as fin:
        fin.write(
            """
SUMMARY_OBSERVATION LOW_STD
{
   VALUE   = 10;
   ERROR   = 0.1;
   DATE    = 2015-06-23;
   KEY     = FOPR;
};
SUMMARY_OBSERVATION HIGH_STD
{
   VALUE   = 10;
   ERROR   = 1.0;
   DATE    = 2015-06-23;
   KEY     = FOPR;
};
SUMMARY_OBSERVATION EXTREMELY_HIGH_STD
{
   VALUE   = 10;
   ERROR   = 10.0;
   DATE    = 2015-06-23;
   KEY     = FOPR;
};
"""
        )

    ert = EnKFMain(res_config)
    es_update = ESUpdate(ert)
    ert.analysisConfig().selectModule("IES_ENKF")
    fsm = ert.getEnkfFsManager()
    sim_fs = fsm.getFileSystem("default_0")
    target_fs = fsm.getFileSystem("target")
    run_context = ErtRunContext.ensemble_smoother_update(sim_fs, target_fs)
    ert.analysisConfig().setEnkfAlpha(alpha)
    es_update.smootherUpdate(run_context)
    result_snapshot = ert.update_snapshots[run_context.get_id()]
    assert result_snapshot.alpha == alpha
    assert result_snapshot.ministep_snapshots["ALL_ACTIVE"].obs_status == expected
Example #26
0
def test_run_export():
    with ErtPluginContext():
        config_file = "snake_oil.ert"
        rc = ResConfig(user_config_file=config_file)
        rc.convertToCReference(None)
        ert = EnKFMain(rc)
        ex = Exporter(ert)
        parameters = {
            "output_file": "export.csv",
            "time_index": "raw",
            "column_keys": "FOPR",
        }
        ex.run_export(parameters)

        shutil.rmtree("storage")
        with pytest.raises(UserWarning) as warn:
            ex.run_export(parameters)
        assert ex._export_job in str(warn)
Example #27
0
def test_misfit_preprocessor_main_entry_point_no_config(monkeypatch):
    run_mock = Mock()
    scal_job = Mock(return_value=Mock(run=run_mock))
    monkeypatch.setattr(
        misfit_preprocessor, "CorrelatedObservationsScalingJob", scal_job,
    )

    test_data_dir = os.path.join(TEST_DATA_DIR, "local", "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")
    ert = EnKFMain(res_config)

    misfit_preprocessor.MisfitPreprocessorJob(ert).run()

    assert len(run_mock.call_args[0][0]) > 1
Example #28
0
def test_misfit_preprocessor_main_entry_point_gen_data(monkeypatch,
                                                       test_data_root,
                                                       observation,
                                                       expected_nr_clusters):
    run_mock = Mock()
    scal_job = Mock(return_value=Mock(run=run_mock))
    monkeypatch.setattr(
        misfit_preprocessor,
        "CorrelatedObservationsScalingJob",
        scal_job,
    )

    test_data_dir = os.path.join(test_data_root, "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")
    ert = EnKFMain(res_config)

    config = {
        "observations": [observation],
        "workflow": {
            "type": "custom_scale",
            "clustering": {
                "fcluster": {
                    "threshold": 1.0
                }
            },
        },
    }
    config_file = "my_config_file.yaml"
    with open(config_file, "w") as f:
        yaml.dump(config, f)

    misfit_preprocessor.MisfitPreprocessorJob(ert).run(config_file)

    # call_args represents the clusters, we expect the snake_oil
    # observations to generate this amount of them
    # call_args is a call object, which itself is a tuple of args and kwargs.
    # In this case, we want args, and the first element of the arguments, which
    # again is a tuple containing the configuration which is a list of configs.
    assert (len(list(run_mock.call_args)[0][0]) == expected_nr_clusters
            ), "wrong number of clusters"
Example #29
0
def test_load_inconsistent_time_map_summary(copy_data, caplog):
    """
    Checking that we dont util_abort, we fail the forward model instead
    """
    cwd = os.getcwd()

    # Get rid of GEN_DATA as we are only interested in SUMMARY
    with fileinput.input("snake_oil.ert", inplace=True) as fin:
        for line in fin:
            if line.startswith("GEN_DATA"):
                continue
            print(line, end="")

    res_config = ResConfig("snake_oil.ert")
    ert = EnKFMain(res_config)
    facade = LibresFacade(ert)
    realisation_number = 0
    assert (facade.get_current_fs().getStateMap()[realisation_number].name ==
            "STATE_HAS_DATA")  # Check prior state

    # Create a result that is incompatible with the refcase
    run_path = Path(
        "storage") / "snake_oil" / "runpath" / "realization-0" / "iter-0"
    os.chdir(run_path)
    ecl_sum = run_simulator(1, datetime(2000, 1, 1))
    ecl_sum.fwrite()
    os.chdir(cwd)

    realizations = BoolVector(default_value=False,
                              initial_size=facade.get_ensemble_size())
    realizations[realisation_number] = True
    with caplog.at_level(logging.ERROR):
        loaded = facade.load_from_forward_model("default_0", realizations, 0)
    assert (
        f"""Inconsistency in time_map - loading SUMMARY from: {run_path.absolute()} failed:
Time mismatch for step: 1, new time: 2000-01-10, reference case: 2010-01-10
""" in caplog.messages)
    assert (
        f"Inconsistent time map for summary data from: {run_path.absolute()}"
        f"/SNAKE_OIL_FIELD, realisation failed" in caplog.messages)
    assert loaded == 0
    assert (facade.get_current_fs().getStateMap()[realisation_number].name ==
            "STATE_LOAD_FAILURE")  # Check that status is as expected
Example #30
0
    def __init__(self,
                 config,
                 host="localhost",
                 port=0,
                 log_requests=False,
                 verbose_queue=False):
        SimpleXMLRPCServer.__init__(self, (host, port),
                                    allow_none=True,
                                    logRequests=log_requests)
        self._host = host
        self._verbose_queue = verbose_queue
        # https: server.socket = ssl.wrap_socket(srv.socket, ...)

        if isinstance(config, EnKFMain):
            self._config = config
            self._config_file = config.getUserConfigFile()
        else:
            if os.path.exists(config):
                self._config = EnKFMain(config)
                self._config_file = config
            else:
                raise IOError("The ert config file: %s does not exist" %
                              config)

        self._session = Session()

        self.register_function(self.ertVersion)
        self.register_function(self.getTimeMap)
        self.register_function(self.isRunning)
        self.register_function(self.isInitializationCaseAvailable)
        self.register_function(self.startSimulationBatch)
        self.register_function(self.addSimulation)
        self.register_function(self.isRealizationFinished)
        self.register_function(self.didRealizationSucceed)
        self.register_function(self.didRealizationFail)
        self.register_function(self.getGenDataResult)
        self.register_function(self.getCustomKWResult)
        self.register_function(self.isCustomKWKey)
        self.register_function(self.isGenDataKey)
        self.register_function(self.prototypeStorage)
        self.register_function(self.storeGlobalData)
        self.register_function(self.storeSimulationData)
Example #31
0
def test_main_entry_point_gen_data(monkeypatch):
    run_mock = Mock()
    scal_job = Mock(return_value=Mock(run=run_mock))
    monkeypatch.setattr(sc, "CorrelatedObservationsScalingJob", scal_job)

    test_data_dir = os.path.join(TEST_DATA_DIR, "local", "snake_oil")

    shutil.copytree(test_data_dir, "test_data")
    os.chdir(os.path.join("test_data"))

    res_config = ResConfig("snake_oil.ert")
    ert = EnKFMain(res_config)
    sc.SpearmanCorrelationJob(ert).run(*["-t", "1.0"])

    # call_args represents the clusters, we expect the snake_oil
    # observations to generate this amount of them
    # call_args is a call object, which itself is a tuple of args and kwargs.
    # In this case, we want args, and the first element of the arguments, which
    # again is a tuple containing the configuration which is a list of configs.
    assert len(run_mock.call_args[0][0]) == 47, "wrong number of clusters"
Example #32
0
File: sol3.py Project: berland/ert
#!/usr/bin/env python
import sys
import time
from res.enkf import EnKFMain, ResConfig


# This will instantiate the EnkFMain object and create a handle to
# "everything" ert related for this instance.
res_config = ResConfig( sys.argv[1] )
ert = EnKFMain( res_config )
site_config = ert.siteConfig( )

jobs = site_config.get_installed_jobs( )
for job in jobs:
    print job.name()
    print "   config    : %s" % job.get_config_file()
    print "   executable: %s" % job.get_executable( ) 
    print
Example #33
0
def main(argv):
    app = QApplication(argv)  # Early so that QT is initialized before other imports
    app.setWindowIcon(resourceIcon("application/window_icon_cutout"))

    if len(argv) == 1:
        config_file = QFileDialog.getOpenFileName(None, "Open Configuration File")

        config_file = str(config_file)

        if len(config_file) == 0:
            print("-----------------------------------------------------------------")
            print("-- You must supply the name of configuration file as the first --")
            print("-- commandline argument:                                       --")
            print("--                                                             --")
            print("-- bash%  gert <config_file>                                   --")
            print("--                                                             --")
            print("-- If the configuration file does not exist, gert will create  --")
            print("-- create a new configuration file.                            --")
            print("-----------------------------------------------------------------")

            sys.exit(1)
    else:
        config_file = argv[1]

    help_center = HelpCenter("ERT")
    help_center.setHelpLinkPrefix(os.getenv("ERT_SHARE_PATH") + "/gui/help/")
    help_center.setHelpMessageLink("welcome_to_ert")

    strict = True

    verbose = False
    verbose_var = os.getenv("ERT_VERBOSE", "False")
    lower_verbose_var = verbose_var.lower()
    if lower_verbose_var == "true":
        verbose = True

    if not os.path.exists(config_file):
        print("Trying to start new config")
        new_configuration_dialog = NewConfigurationDialog(config_file)
        success = new_configuration_dialog.exec_()
        if not success:
            print("Can not run without a configuration file.")
            sys.exit(1)
        else:
            config_file = new_configuration_dialog.getConfigurationPath()
            dbase_type = new_configuration_dialog.getDBaseType()
            num_realizations = new_configuration_dialog.getNumberOfRealizations()
            storage_path = new_configuration_dialog.getStoragePath()

            EnKFMain.createNewConfig(config_file, storage_path, dbase_type, num_realizations)
            strict = False

    if os.path.isdir(config_file):
        print("The specified configuration file is a directory!")
        sys.exit(1)

    splash = ErtSplash()
    version = ErtVersion( )
    splash.version = "Version %s" % version.versionString()

    splash.timestamp = version.getBuildTime()

    splash.show()
    splash.repaint()

    now = time.time()

    res_config = ResConfig(config_file)
    os.chdir( res_config.config_path )
    ert = EnKFMain(res_config, strict=strict, verbose=verbose)
    ert_gui.configureErtNotifier(ert, config_file)

    window = GertMainWindow()
    window.setWidget(SimulationPanel())

    plugin_handler = PluginHandler(ert, ert.getWorkflowList().getPluginJobs(), window)

    help_tool = HelpTool("ERT", window)

    window.addDock("Configuration Summary", SummaryPanel(), area=Qt.BottomDockWidgetArea)
    window.addTool(IdeTool(os.path.basename(config_file), help_tool))
    window.addTool(PlotTool())
    window.addTool(ExportTool())
    window.addTool(WorkflowsTool())
    window.addTool(ManageCasesTool())
    window.addTool(PluginsTool(plugin_handler))
    window.addTool(RunAnalysisTool())
    window.addTool(LoadResultsTool())
    window.addTool(help_tool)

    sleep_time = 2 - (time.time() - now)

    if sleep_time > 0:
        time.sleep(sleep_time)

    window.show()
    splash.finish(window)
    window.activateWindow()
    window.raise_()
    ResLog.log(3, "Versions ecl:%s  res:%s   ert:%s" % (EclVersion( ), ResVersion( ), ErtVersion( )))
    finished_code = app.exec_()
    ert.free()

    sys.exit(finished_code)
Example #34
0
File: sol4.py Project: berland/ert
#!/usr/bin/env python
import sys
import time
from res.enkf import EnKFMain, RunArg, NodeId, ResConfig
from res.enkf.data import EnkfNode
from ert.job_queue import JobQueueManager

res_config = ResConfig( sys.argv[1] )
ert = EnKFMain( res_config )
fs_manager = ert.getEnkfFsManager( )
fs = fs_manager.getCurrentFileSystem( )


# Initialize the realisations.
for iens in range( ert.getEnsembleSize()):
    realisation = ert.getRealisation( iens )
    realisation.initialize( fs )


# Fetch out the job_queue from the SiteConfig object. In addition we
# create a JobQueueManager objects which wraps the queue. The purpose
# of this manager object is to let the queue run nonblocking in the
# background.
site_config = ert.siteConfig( )
queue_manager = JobQueueManager( site_config.getJobQueue( ) )
queue_manager.startQueue( ert.getEnsembleSize( ) , verbose = False )


# Create list of RunArg instances which hold metadata for one running
# realisation, create the directory where the simulation should run
# and submit the simulation.
Example #35
0
File: sol1.py Project: berland/ert
#!/usr/bin/env python
import sys
import time
from res.enkf import EnKFMain, ResConfig
from res.enkf.enums import ErtImplType


# This will instantiate the EnkFMain object and create a handle to
# "everything" ert related for this instance.
res_config = ResConfig( sys.argv[1] )
ert = EnKFMain( res_config )


# Ask the EnKFMain instance how many realisations it has. Observe that
# the answer to this question is just the value of the
# NUM_REALISATIONS setting in the configuration file.
print("This instance has %d realisations" % ert.getEnsembleSize())


# Get the ensemble configuration object, and ask for all GEN_KW keys:
ens_config = ert.ensembleConfig( )
for key in ens_config.getKeylistFromImplType(ErtImplType.GEN_KW):
    config_node = ens_config[key]

    # "Downcast" to GEN_KW configuration.
    gen_kw_config = config_node.getModelConfig( )
    print("%s : %s" % (key , gen_kw_config.getKeyWords( )))

Example #36
0
def main(argv):
    app = QApplication(argv)  # Early so that QT is initialized before other imports
    app.setWindowIcon(resourceIcon("application/window_icon_cutout"))

    # There seems to be a setlocale() call deep down in the initialization of
    # QApplication, if the user has set the LC_NUMERIC environment variables to
    # a locale with decimalpoint different from "." the application will fail
    # hard quite quickly.
    current_locale = QLocale()
    decimal_point = str(current_locale.decimalPoint())
    if decimal_point != ".":
        msg = """
** WARNING: You are using a locale with decimalpoint: '{}' - the ert application is
            written with the assumption that '.' is used as decimalpoint, and chances
            are that something will break if you continue with this locale. It is highly
            reccomended that you set the decimalpoint to '.' using one of the environment
            variables 'LANG', LC_ALL', or 'LC_NUMERIC' to either the 'C' locale or
            alternatively a locale which uses '.' as decimalpoint.\n""".format(decimal_point)

        sys.stderr.write(msg)


    if len(argv) == 1:
        config_file = QFileDialog.getOpenFileName(None, "Open Configuration File")

        config_file = str(config_file)

        if len(config_file) == 0:
            print("-----------------------------------------------------------------")
            print("-- You must supply the name of configuration file as the first --")
            print("-- commandline argument:                                       --")
            print("--                                                             --")
            print("-- bash%  gert <config_file>                                   --")
            print("--                                                             --")
            print("-- If the configuration file does not exist, gert will create  --")
            print("-- create a new configuration file.                            --")
            print("-----------------------------------------------------------------")

            sys.exit(1)
    else:
        config_file = argv[1]

    help_center = HelpCenter("ERT")
    help_center.setHelpLinkPrefix(ert_share_path + "/gui/help/")
    help_center.setHelpMessageLink("welcome_to_ert")

    strict = True

    verbose = False
    verbose_var = os.getenv("ERT_VERBOSE", "False")
    lower_verbose_var = verbose_var.lower()
    if lower_verbose_var == "true":
        verbose = True

    if not os.path.exists(config_file):
        print("Trying to start new config")
        new_configuration_dialog = NewConfigurationDialog(config_file)
        success = new_configuration_dialog.exec_()
        if not success:
            print("Can not run without a configuration file.")
            sys.exit(1)
        else:
            config_file = new_configuration_dialog.getConfigurationPath()
            dbase_type = new_configuration_dialog.getDBaseType()
            num_realizations = new_configuration_dialog.getNumberOfRealizations()
            storage_path = new_configuration_dialog.getStoragePath()

            EnKFMain.createNewConfig(config_file, storage_path, dbase_type, num_realizations)
            strict = False

    if os.path.isdir(config_file):
        print("The specified configuration file is a directory!")
        sys.exit(1)

    splash = ErtSplash()
    version = ErtVersion( )
    splash.version = "Version %s" % version.versionString()

    splash.timestamp = version.getBuildTime()

    splash.show()
    splash.repaint()

    now = time.time()

    res_config = ResConfig(config_file)
    os.chdir( res_config.config_path )
    ert = EnKFMain(res_config, strict=strict, verbose=verbose)
    ert_gui.configureErtNotifier(ert, config_file)

    window = GertMainWindow()
    window.setWidget(SimulationPanel())

    plugin_handler = PluginHandler(ert, ert.getWorkflowList().getPluginJobs(), window)

    help_tool = HelpTool("ERT", window)

    window.addDock("Configuration Summary", SummaryPanel(), area=Qt.BottomDockWidgetArea)
    window.addTool(IdeTool(os.path.basename(config_file), help_tool))
    window.addTool(PlotTool())
    window.addTool(ExportTool())
    window.addTool(WorkflowsTool())
    window.addTool(ManageCasesTool())
    window.addTool(PluginsTool(plugin_handler))
    window.addTool(RunAnalysisTool())
    window.addTool(LoadResultsTool())
    window.addTool(help_tool)

    sleep_time = 2 - (time.time() - now)

    if sleep_time > 0:
        time.sleep(sleep_time)

    window.show()
    splash.finish(window)
    window.activateWindow()
    window.raise_()
    ResLog.log(3, "Versions ecl:%s  res:%s   ert:%s" % (EclVersion( ), ResVersion( ), ErtVersion( )))
    finished_code = app.exec_()
    sys.exit(finished_code)