Ejemplo n.º 1
0
def test_read_optimization_iteration(tmp_path):
    path = tmp_path / "test.db"
    database = load_database(path=path)

    # add the optimization_iterations table
    make_optimization_iteration_table(database, first_eval={"output": 0.5})
    iteration_data = [
        {"external_params": np.array([0])},
        {"external_params": np.array([1])},
        {"external_params": np.array([2])},
    ]

    for data in iteration_data:
        append_row(data, "optimization_iterations", database, path, False)

    # add the optimization_problem table
    make_optimization_problem_table(database)
    problem_data = {"params": pd.DataFrame(data=[10], columns=["value"])}
    append_row(problem_data, "optimization_problem", database, path, False)

    first_row_calc = read_optimization_iteration(path, 0)
    assert first_row_calc["rowid"] == 1
    calculated_params = first_row_calc["params"]
    expected_params = pd.DataFrame(data=[0], columns=["value"])
    assert_frame_equal(calculated_params, expected_params, check_dtype=False)

    last_row_calc = read_optimization_iteration(path, -1)
    assert last_row_calc["rowid"] == 3
    calculated_params = last_row_calc["params"]
    expected_params = pd.DataFrame(data=[2], columns=["value"])
    assert_frame_equal(calculated_params, expected_params, check_dtype=False)
Ejemplo n.º 2
0
def test_optimization_iteration_table_vector_valued(tmp_path):
    path = tmp_path / "test.db"
    database = load_database(path=path)
    make_optimization_iteration_table(
        database, first_eval={"output": {"contributions": np.ones(3), "value": 0.5}}
    )
    assert isinstance(
        database.tables["optimization_iterations"].columns["contributions"].type,
        PickleType,
    )
Ejemplo n.º 3
0
def _create_and_initialize_database(logging, log_options, first_eval,
                                    problem_data):
    # extract information
    path = Path(logging)
    fast_logging = log_options.get("fast_logging", False)
    if_table_exists = log_options.get("if_table_exists", "extend")
    if_database_exists = log_options.get("if_database_exists", "extend")

    if "if_exists" in log_options and "if_table_exists" not in log_options:
        warnings.warn(
            "The log_option 'if_exists' was renamed to 'if_table_exists'.")

    if logging.exists():
        if if_database_exists == "raise":
            raise FileExistsError(
                f"The database {logging} already exists and the log_option "
                "'if_database_exists' is set to 'raise'")
        elif if_database_exists == "replace":
            logging.unlink()

    database = load_database(path=path, fast_logging=fast_logging)

    # create the optimization_iterations table
    make_optimization_iteration_table(
        database=database,
        first_eval=first_eval,
        if_exists=if_table_exists,
    )

    # create and initialize the steps table; This is alway extended if it exists.
    make_steps_table(database, if_exists=if_table_exists)

    # create_and_initialize the optimization_problem table
    make_optimization_problem_table(database, if_exists=if_table_exists)

    not_saved = [
        "criterion",
        "criterion_kwargs",
        "constraints",
        "derivative",
        "derivative_kwargs",
        "criterion_and_derivative",
        "criterion_and_derivative_kwargs",
    ]
    problem_data = {
        key: val
        for key, val in problem_data.items() if key not in not_saved
    }

    append_row(problem_data, "optimization_problem", database, path,
               fast_logging)

    return database
Ejemplo n.º 4
0
def test_optimization_iteration_table_scalar(tmp_path, iteration_data):
    path = tmp_path / "test.db"
    database = load_database(path=path)
    make_optimization_iteration_table(database, first_eval={"output": 0.5})
    append_row(iteration_data, "optimization_iterations", database, path, False)
    res = read_last_rows(database, "optimization_iterations", 1, "list_of_dicts")
    assert isinstance(res, list) and isinstance(res[0], dict)
    res = res[0]
    assert res["rowid"] == 1
    assert_array_equal(res["params"], iteration_data["params"])

    for key in ["value", "timestamp"]:
        assert res[key] == iteration_data[key]
Ejemplo n.º 5
0
def test_optimization_iteration_table_dict_valued(tmp_path):
    path = tmp_path / "test.db"
    database = load_database(path=path)
    first_eval = {
        "output": {"contributions": np.ones(3), "value": 5, "bla": pd.DataFrame()}
    }
    make_optimization_iteration_table(database, first_eval=first_eval)
    for col in ["contributions", "bla"]:
        assert isinstance(
            database.tables["optimization_iterations"].columns[col].type, PickleType
        )
    assert isinstance(
        database.tables["optimization_iterations"].columns["value"].type, Float
    )
Ejemplo n.º 6
0
def test_read_table(tmp_path, iteration_data):
    path = tmp_path / "test.db"
    database = load_database(path=path)
    make_optimization_iteration_table(database, first_eval={"output": 0.5})
    for i in range(1, 11):  # sqlalchemy starts counting at 1
        iteration_data["value"] = i
        iteration_data["step"] = i % 2
        append_row(iteration_data, "optimization_iterations", database, path, False)

    table = read_table(
        database=database,
        table_name="optimization_iterations",
        return_type="dict_of_lists",
    )

    assert table["rowid"] == list(range(1, 11))
    assert table["step"] == [1, 0] * 5
Ejemplo n.º 7
0
def test_read_last_rows_stride(tmp_path, iteration_data):
    path = tmp_path / "test.db"
    database = load_database(path=path)
    make_optimization_iteration_table(database, first_eval={"output": 0.5})
    for i in range(1, 11):  # sqlalchemy starts counting at 1
        iteration_data["value"] = i
        append_row(iteration_data, "optimization_iterations", database, path, False)

    res = read_last_rows(
        database=database,
        table_name="optimization_iterations",
        n_rows=3,
        return_type="dict_of_lists",
        stride=2,
    )["value"]

    expected = [6.0, 8.0, 10.0]
    assert res == expected
Ejemplo n.º 8
0
def test_read_last_rows_with_step(tmp_path, iteration_data):
    path = tmp_path / "test.db"
    database = load_database(path=path)
    make_optimization_iteration_table(database, first_eval={"output": 0.5})
    for i in range(1, 11):  # sqlalchemy starts counting at 1
        iteration_data["value"] = i
        iteration_data["step"] = i % 2
        append_row(iteration_data, "optimization_iterations", database, path, False)

    res = read_last_rows(
        database=database,
        table_name="optimization_iterations",
        n_rows=20,
        return_type="dict_of_lists",
        step=0,
    )

    expected = [2, 4, 6, 8, 10]
    assert res["rowid"] == expected
Ejemplo n.º 9
0
def test_update_row(tmp_path, iteration_data):
    path = tmp_path / "test.db"
    database = load_database(path=path)
    make_optimization_iteration_table(database, first_eval={"output": 0.5})
    for i in range(1, 11):  # sqlalchemy starts counting at 1
        iteration_data["value"] = i
        append_row(iteration_data, "optimization_iterations", database, path, False)

    update_row({"value": 20}, 8, "optimization_iterations", database, path, False)

    res = read_new_rows(
        database=database,
        table_name="optimization_iterations",
        last_retrieved=3,
        return_type="dict_of_lists",
    )[0]["value"]

    expected = [4, 5, 6, 7, 20, 9, 10]
    assert res == expected
Ejemplo n.º 10
0
def _create_and_initialize_database(logging, log_options, first_eval,
                                    problem_data):

    # extract information
    path = logging
    fast_logging = log_options.get("fast_logging", False)
    if_exists = log_options.get("if_exists", "extend")
    save_all_arguments = log_options.get("save_all_arguments", False)
    database = load_database(path=path, fast_logging=fast_logging)

    # create the optimization_iterations table
    make_optimization_iteration_table(
        database=database,
        first_eval=first_eval,
        if_exists=if_exists,
    )

    # create and initialize the optimization_status table
    make_optimization_status_table(database, if_exists)
    append_row({"status": "running"}, "optimization_status", database, path,
               fast_logging)

    # create_and_initialize the optimization_problem table
    make_optimization_problem_table(database, if_exists, save_all_arguments)
    if not save_all_arguments:
        not_saved = [
            "criterion",
            "criterion_kwargs",
            "constraints",
            "derivative",
            "derivative_kwargs",
            "criterion_and_derivative",
            "criterion_and_derivative_kwargs",
        ]
        problem_data = {
            key: val
            for key, val in problem_data.items() if key not in not_saved
        }
    append_row(problem_data, "optimization_problem", database, path,
               fast_logging)

    return database