Exemplo n.º 1
0
def test_profile_with_fixed_parameters():
    """Test using profiles with fixed parameters."""
    obj = test_objective.rosen_for_sensi(max_sensi_order=1)['obj']

    lb = -2 * np.ones(5)
    ub = 2 * np.ones(5)
    problem = pypesto.Problem(objective=obj,
                              lb=lb,
                              ub=ub,
                              x_fixed_vals=[0.5, -1.8],
                              x_fixed_indices=[0, 3])

    optimizer = optimize.ScipyOptimizer(options={'maxiter': 50})
    result = optimize.minimize(problem=problem,
                               optimizer=optimizer,
                               n_starts=2)

    for i_method, next_guess_method in enumerate([
            'fixed_step', 'adaptive_step_order_0', 'adaptive_step_order_1',
            'adaptive_step_regression'
    ]):
        print(next_guess_method)
        profile.parameter_profile(problem=problem,
                                  result=result,
                                  optimizer=optimizer,
                                  next_guess_method=next_guess_method)

        # standard plotting
        axes = visualize.profiles(result, profile_list_ids=i_method)
        assert len(axes) == 3
        visualize.profile_cis(result, profile_list=i_method)
Exemplo n.º 2
0
    def test_extending_profiles(self):
        # run profiling
        result = profile.parameter_profile(problem=self.problem,
                                           result=self.result,
                                           optimizer=self.optimizer,
                                           next_guess_method='fixed_step')

        # set new bounds (knowing that one parameter stopped at the bounds
        self.problem.lb_full = -4 * np.ones(2)
        self.problem.ub_full = 4 * np.ones(2)

        # re-run profiling using new bounds
        result = profile.parameter_profile(problem=self.problem,
                                           result=result,
                                           optimizer=self.optimizer,
                                           next_guess_method='fixed_step',
                                           profile_index=np.array([0, 1]),
                                           profile_list=0)
        # check result
        self.assertTrue(
            isinstance(result.profile_result.list[0][0],
                       profile.ProfilerResult))
        self.assertTrue(
            isinstance(result.profile_result.list[0][1],
                       profile.ProfilerResult))
Exemplo n.º 3
0
def test_profile_with_history():
    objective = rosen_for_sensi(max_sensi_order=2, integrated=False)['obj']

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        (problem, result, optimizer) = create_optimization_results(objective,
                                                                   dim_full=5)

    profile_options = profile.ProfileOptions(
        min_step_size=0.0005,
        delta_ratio_max=0.05,
        default_step_size=0.005,
        ratio_min=0.03,
    )

    problem.fix_parameters(
        [0, 3],
        [
            result.optimize_result.list[0].x[0],
            result.optimize_result.list[0].x[3],
        ],
    )
    problem.objective.history = pypesto.MemoryHistory({'trace_record': True})
    profile.parameter_profile(
        problem=problem,
        result=result,
        optimizer=optimizer,
        profile_index=np.array([0, 2, 4]),
        result_index=0,
        profile_options=profile_options,
        filename=None,
    )
Exemplo n.º 4
0
    def test_engine_profiling(self):
        # loop over all possible engines
        # engine=None will be used for comparison
        engines = [
            None,
            pypesto.engine.SingleCoreEngine(),
            pypesto.engine.MultiProcessEngine(),
            pypesto.engine.MultiThreadEngine(),
        ]
        for engine in engines:
            # run profiling, profile results get appended
            # in self.result.profile_result
            profile.parameter_profile(
                problem=self.problem,
                result=self.result,
                optimizer=self.optimizer,
                next_guess_method='fixed_step',
                engine=engine,
                filename=None,
            )

        # check results
        for count, _engine in enumerate(engines[1:]):
            for j in range(len(self.result.profile_result.list[0])):
                assert_almost_equal(
                    self.result.profile_result.list[0][j]['x_path'],
                    self.result.profile_result.list[count][j]['x_path'],
                    err_msg='The values of the profiles for'
                    ' the different engines do not match',
                )
Exemplo n.º 5
0
    def test_selected_profiling(self):
        # create options in order to ensure a short computation time
        options = profile.ProfileOptions(
            default_step_size=0.02,
            min_step_size=0.005,
            max_step_size=1.0,
            step_size_factor=1.5,
            delta_ratio_max=0.2,
            ratio_min=0.3,
            reg_points=5,
            reg_order=2,
        )

        # 1st run of profiling, computing just one out of two profiles
        result = profile.parameter_profile(
            problem=self.problem,
            result=self.result,
            optimizer=self.optimizer,
            profile_index=np.array([1]),
            next_guess_method='fixed_step',
            result_index=1,
            profile_options=options,
            filename=None,
        )

        self.assertIsInstance(result.profile_result.list[0][1],
                              pypesto.ProfilerResult)
        self.assertIsNone(result.profile_result.list[0][0])

        # 2nd run of profiling, appending to an existing list of profiles
        # using another algorithm and another optimum
        result = profile.parameter_profile(
            problem=self.problem,
            result=result,
            optimizer=self.optimizer,
            profile_index=np.array([0]),
            result_index=2,
            profile_list=0,
            profile_options=options,
            filename=None,
        )

        self.assertIsInstance(result.profile_result.list[0][0],
                              pypesto.ProfilerResult)

        # 3rd run of profiling, opening a new list, using the default algorithm
        result = profile.parameter_profile(
            problem=self.problem,
            result=result,
            optimizer=self.optimizer,
            next_guess_method='fixed_step',
            profile_index=np.array([0]),
            profile_options=options,
            filename=None,
        )
        # check result
        self.assertIsInstance(result.profile_result.list[1][0],
                              pypesto.ProfilerResult)
        self.assertIsNone(result.profile_result.list[1][1])
Exemplo n.º 6
0
def test_profile_with_history():
    objective = test_objective.rosen_for_sensi(max_sensi_order=2,
                                               integrated=False)['obj']

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        (problem, result, optimizer) = \
            create_optimization_results(objective)

    profile_options = profile.ProfileOptions(min_step_size=0.0005,
                                             delta_ratio_max=0.05,
                                             default_step_size=0.005,
                                             ratio_min=0.03)

    problem.objective.history = pypesto.MemoryHistory({'trace_record': True})
    profile.parameter_profile(problem=problem,
                              result=result,
                              optimizer=optimizer,
                              profile_index=np.array(
                                  [1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 0]),
                              result_index=0,
                              profile_options=profile_options)
Exemplo n.º 7
0
    def test_default_profiling(self):
        # loop over  methods for creating new initial guesses
        method_list = [
            'fixed_step', 'adaptive_step_order_0', 'adaptive_step_order_1',
            'adaptive_step_regression'
        ]
        for i_run, method in enumerate(method_list):
            # run profiling
            result = profile.parameter_profile(problem=self.problem,
                                               result=self.result,
                                               optimizer=self.optimizer,
                                               next_guess_method=method)

            # check result
            self.assertTrue(
                isinstance(result.profile_result.list[i_run][0],
                           profile.ProfilerResult))
            self.assertEqual(len(result.profile_result.list), i_run + 1)
            self.assertEqual(len(result.profile_result.list[i_run]), 2)

            # check whether profiling needed maybe too many steps
            steps = result.profile_result.list[i_run][0]['ratio_path'].size
            if method == 'adaptive_step_regression':
                self.assertTrue(
                    steps < 20, 'Profiling with regression based '
                    'proposal needed too many steps.')
                self.assertTrue(
                    steps > 1, 'Profiling with regression based '
                    'proposal needed not enough steps.')
            elif method == 'adaptive_step_order_1':
                self.assertTrue(
                    steps < 25, 'Profiling with 1st order based '
                    'proposal needed too many steps.')
                self.assertTrue(
                    steps > 1, 'Profiling with 1st order based '
                    'proposal needed not enough steps.')
            elif method == 'adaptive_step_order_0':
                self.assertTrue(
                    steps < 100, 'Profiling with 0th order based '
                    'proposal needed too many steps.')
                self.assertTrue(
                    steps > 1, 'Profiling with 0th order based '
                    'proposal needed not enough steps.')

            # standard plotting
            visualize.profiles(result, profile_list_ids=i_run)
            visualize.profile_cis(result, profile_list=i_run)
Exemplo n.º 8
0
def test_storage_all():
    """Test `read_result` and `write_result`.

    It currently does not test read/write of the problem as this
    is know to not work completely. Also excludes testing the history
    key of an optimization result.
    """
    objective = pypesto.Objective(fun=so.rosen,
                                  grad=so.rosen_der,
                                  hess=so.rosen_hess)
    dim_full = 10
    lb = -5 * np.ones((dim_full, 1))
    ub = 5 * np.ones((dim_full, 1))
    n_starts = 5
    problem = pypesto.Problem(objective=objective, lb=lb, ub=ub)

    optimizer = optimize.ScipyOptimizer()
    # Optimization
    result = optimize.minimize(
        problem=problem,
        optimizer=optimizer,
        n_starts=n_starts,
        filename=None,
    )
    # Profiling
    result = profile.parameter_profile(
        problem=problem,
        result=result,
        profile_index=[0],
        optimizer=optimizer,
        filename=None,
    )
    # Sampling
    sampler = sample.AdaptiveMetropolisSampler()
    result = sample.sample(
        problem=problem,
        sampler=sampler,
        n_samples=100,
        result=result,
        filename=None,
    )
    # Read and write
    filename = 'test_file.hdf5'
    try:
        write_result(result=result, filename=filename)
        result_read = read_result(filename=filename)

        # test optimize
        for i, opt_res in enumerate(result.optimize_result.list):
            for key in opt_res:
                if key == 'history':
                    continue
                if isinstance(opt_res[key], np.ndarray):
                    np.testing.assert_array_equal(
                        opt_res[key], result_read.optimize_result.list[i][key])
                else:
                    assert (opt_res[key] == result_read.optimize_result.list[i]
                            [key])

        # test profile
        for key in result.profile_result.list[0][0].keys():
            if (result.profile_result.list[0][0].keys is None
                    or key == 'time_path'):
                continue
            elif isinstance(result.profile_result.list[0][0][key], np.ndarray):
                np.testing.assert_array_equal(
                    result.profile_result.list[0][0][key],
                    result_read.profile_result.list[0][0][key],
                )
            elif isinstance(result.profile_result.list[0][0][key], int):
                assert (result.profile_result.list[0][0][key] ==
                        result_read.profile_result.list[0][0][key])

        # test sample
        for key in result.sample_result.keys():
            if result.sample_result[key] is None or key == 'time':
                continue
            elif isinstance(result.sample_result[key], np.ndarray):
                np.testing.assert_array_equal(
                    result.sample_result[key],
                    result_read.sample_result[key],
                )
            elif isinstance(result.sample_result[key], (float, int)):
                np.testing.assert_almost_equal(
                    result.sample_result[key],
                    result_read.sample_result[key],
                )
    finally:
        if os.path.exists(filename):
            os.remove(filename)
Exemplo n.º 9
0
def test_storage_profiling():
    """
    This test tests the saving and loading of profiles
    into HDF5 through pypesto.store.ProfileResultHDF5Writer
    and pypesto.store.ProfileResultHDF5Reader. Tests all entries
    aside from times and message.
    """
    objective = pypesto.Objective(fun=so.rosen,
                                  grad=so.rosen_der,
                                  hess=so.rosen_hess)
    dim_full = 10
    lb = -5 * np.ones((dim_full, 1))
    ub = 5 * np.ones((dim_full, 1))
    n_starts = 5
    startpoints = pypesto.startpoint.latin_hypercube(n_starts=n_starts,
                                                     lb=lb,
                                                     ub=ub)
    problem = pypesto.Problem(objective=objective,
                              lb=lb,
                              ub=ub,
                              x_guesses=startpoints)

    optimizer = optimize.ScipyOptimizer()

    result_optimization = optimize.minimize(
        problem=problem,
        optimizer=optimizer,
        n_starts=n_starts,
        filename=None,
    )
    profile_original = profile.parameter_profile(
        problem=problem,
        result=result_optimization,
        profile_index=[0],
        optimizer=optimizer,
        filename=None,
    )

    fn = 'test_file.hdf5'
    try:
        pypesto_profile_writer = ProfileResultHDF5Writer(fn)
        pypesto_profile_writer.write(profile_original)
        pypesto_profile_reader = ProfileResultHDF5Reader(fn)
        profile_read = pypesto_profile_reader.read()

        for key in profile_original.profile_result.list[0][0].keys():
            if (profile_original.profile_result.list[0][0].keys is None
                    or key == 'time_path'):
                continue
            elif isinstance(profile_original.profile_result.list[0][0][key],
                            np.ndarray):
                np.testing.assert_array_equal(
                    profile_original.profile_result.list[0][0][key],
                    profile_read.profile_result.list[0][0][key],
                )
            elif isinstance(profile_original.profile_result.list[0][0][key],
                            int):
                assert (profile_original.profile_result.list[0][0][key] ==
                        profile_read.profile_result.list[0][0][key])
    finally:
        if os.path.exists(fn):
            os.remove(fn)