Esempio n. 1
0
 def test_fmin(self):
     """
     Test the fmin function which finds the x that gives the minimal output for f(x)
     """
     from math import pi
     fx = lambda x:(x - pi) ** 2 + 2 ** 0.5
     self.assertAlmostEqual(fmin(-10.0, 10.0, fx, 0), pi)
     # compare result of fmin and minimize_scalar
     self.assertAlmostEqual(fmin(-10.0, 10.0, fx, 0), minimize_scalar(fx, bounds=(-10.0, 10.0), method='bounded').x)
Esempio n. 2
0
 def test_fmin(self):
     """
     Test the fmin function which finds the x that gives the minimal output for f(x)
     """
     from math import pi
     fx = lambda x: (x - pi)**2 + 2**0.5
     self.assertAlmostEqual(fmin(-10.0, 10.0, fx, 0), pi)
     # compare result of fmin and minimize_scalar
     self.assertAlmostEqual(
         fmin(-10.0, 10.0, fx, 0),
         minimize_scalar(fx, bounds=(-10.0, 10.0), method='bounded').x)
Esempio n. 3
0
 def fmin(
     self,
     fn,
     space,
     algo,
     max_evals,
     rseed=0,
     verbose=0,
     wait=True,
     pass_expr_memo_ctrl=None,
 ):
     # -- Stop-gap implementation!
     #    fmin should have been a Trials method in the first place
     #    but for now it's still sitting in another file.
     if pass_expr_memo_ctrl != None:
         raise NotImplementedError()
     if wait == False:
         raise NotImplementedError()
     import fmin as fmin_module
     return fmin_module.fmin(fn,
                             space,
                             algo,
                             max_evals,
                             trials=self,
                             rseed=rseed,
                             allow_trials_fmin=False)
Esempio n. 4
0
 def fmin(self, fn, space, algo, max_evals, rseed=0,
     verbose=0,
     wait=True,
     pass_expr_memo_ctrl=None,
     ):
     # -- Stop-gap implementation!
     #    fmin should have been a Trials method in the first place
     #    but for now it's still sitting in another file.
     if pass_expr_memo_ctrl != None:
         raise NotImplementedError()
     if wait == False:
         raise NotImplementedError()
     import fmin as fmin_module
     return fmin_module.fmin(fn, space, algo, max_evals,
         trials=self,
         rseed=rseed,
         allow_trials_fmin=False)
Esempio n. 5
0
    def fmin(
        self,
        fn,
        space,
        algo,
        max_evals,
        rstate=None,
        verbose=0,
        pass_expr_memo_ctrl=None,
        catch_eval_exceptions=False,
        return_argmin=True,
    ):
        """Minimize a function over a hyperparameter space.

        For most parameters, see `hyperopt.fmin.fmin`.

        Parameters
        ----------

        catch_eval_exceptions : bool, default False
            If set to True, exceptions raised by either the evaluation of the
            configuration space from hyperparameters or the execution of `fn`
            , will be caught by fmin, and recorded in self._dynamic_trials as
            error jobs (JOB_STATE_ERROR).  If set to False, such exceptions
            will not be caught, and so they will propagate to calling code.


        """
        # -- Stop-gap implementation!
        #    fmin should have been a Trials method in the first place
        #    but for now it's still sitting in another file.
        import fmin as fmin_module
        return fmin_module.fmin(
            fn,
            space,
            algo,
            max_evals,
            trials=self,
            rstate=rstate,
            verbose=verbose,
            allow_trials_fmin=False,  # -- prevent recursion
            pass_expr_memo_ctrl=pass_expr_memo_ctrl,
            catch_eval_exceptions=catch_eval_exceptions,
            return_argmin=return_argmin)
Esempio n. 6
0
    def fmin(self, fn, space, algo, max_evals,
             rstate=None,
             verbose=0,
             pass_expr_memo_ctrl=None,
             catch_eval_exceptions=False,
             return_argmin=True,
             ):
        """Minimize a function over a hyperparameter space.

        For most parameters, see `hyperopt.fmin.fmin`.

        Parameters
        ----------

        catch_eval_exceptions : bool, default False
            If set to True, exceptions raised by either the evaluation of the
            configuration space from hyperparameters or the execution of `fn`
            , will be caught by fmin, and recorded in self._dynamic_trials as
            error jobs (JOB_STATE_ERROR).  If set to False, such exceptions
            will not be caught, and so they will propagate to calling code.


        """
        # -- Stop-gap implementation!
        #    fmin should have been a Trials method in the first place
        #    but for now it's still sitting in another file.
        import fmin as fmin_module
        return fmin_module.fmin(
            fn, space, algo, max_evals,
            trials=self,
            rstate=rstate,
            verbose=verbose,
            allow_trials_fmin=False,  # -- prevent recursion
            pass_expr_memo_ctrl=pass_expr_memo_ctrl,
            catch_eval_exceptions=catch_eval_exceptions,
            return_argmin=return_argmin)
Esempio n. 7
0
File: tree.py Progetto: dwf/hyperopt
    def optimize_in_model(self, max_evals,
                          sub_suggest,
                          thresh_epsilon,
                          logprior_strength,
                          random_draw_fraction,
                          n_seed_pts,
                          n_random_start_pts,
                          plot_contours,
                         ):
        """
        Parameters
        ----------
        sub_suggest : algo for fmin call to optimize in surrogate
        max_evals : max_evals for fmin call to optimize surrogate
        thresh_epsilon : optimize EI better than (min(losses) - epsilon)
        """
        def descend_branch(node, memo, fn):
            # -- node is a node in the regression tree
            if node['node'] == 'split':
                for k, v in memo.items():
                    if k.arg['label'].obj == node['hp']:
                        if v < node['thresh']:
                            return descend_branch(node['below'], memo, fn)
                        else:
                            return descend_branch(node['above'], memo, fn)
                else:
                    raise Exception('did not find node')
            else:
                assert node['node'] == 'leaf'
                return fn(node, memo, EI_thresh)

        def trees_logEI(expr, memo, ctrl):
            assert expr is self.domain.expr
            # -- expr is the search space expression
            # -- memo is a hyperparameter assignment

            logEIs = [descend_branch(tree, memo, self.leaf_node_logEI)
                      for tree in self.trees]
            # XXX is sign on this right?
            logp = logprior(self.config, memo)
            weighted_logp = logprior_strength * logp
            weighted_logEImean = len(self.tids) * np.mean(logEIs)

            #loss = weighted_logEImean + weighted_logp
            loss = np.mean(logEIs)
            #print np.mean(logEIs)

            return {
                'loss': -loss, # -- improvements are (+) and we're minimizing
                'status': 'ok',
            }

        if len(self.losses) > n_random_start_pts:
            ignore_surrogate = self.rng.rand() < random_draw_fraction
            if ignore_surrogate:
                #    TODO: mark the points drawn from the prior, because they
                #    are more useful for online [tree] model evaluation.
                #    and they provide unbiased estimates of Y mean and var
                #    over search space.
                max_evals = 1
                EI_thresh = 0 # -- irrelevant with max_evals == 1
            else:
                EI_thresh = min(self.losses) - thresh_epsilon
        else:
            max_evals = 1
            EI_thresh = 0 # -- irrelevant with max_evals == 1


        # -- initialize a Trials object with the most promising points from the
        #    original space:

        def _suggest_first_from_best_trials(new_ids, _domain, _trials, _seed):
            # -- return 
            if max_evals == 1:
                return sub_suggest(new_ids, _domain, _trials, _seed)

            if len(_trials.trials) < min(n_seed_pts, len(self.tid_docs_losses)):
                rb = ReplayBest(_domain, _trials, _seed, self.tid_docs_losses)
                return rb(new_ids[0])
            else:
                return sub_suggest(new_ids, _domain, _trials, _seed)

        t0 = time.time()
        rnd_trials = Trials()
        fmin(
            trees_logEI,
            space=self.domain.expr,
            algo=rand.suggest,
            max_evals=max_evals,
            pass_expr_memo_ctrl=True,
            rstate=self.rng,
            trials=rnd_trials,
            )
        tmp_trials = Trials()
        fmin(
            trees_logEI,
            space=self.domain.expr,
            algo=_suggest_first_from_best_trials,
            max_evals=max_evals,
            pass_expr_memo_ctrl=True,
            rstate=self.rng,
            trials=tmp_trials,
            )

        tmp_losses, tmp_docs = (tmp_trials.losses(), tmp_trials.trials)
        assert np.all(np.isfinite(tmp_losses))
        for loss, doc in sorted(zip(tmp_losses, tmp_docs)):
            if doc['misc'].get('ReplayBest', 0):
                continue
            self.best_pt = doc['misc']['vals']
            break


        t1 = time.time()
        if max_evals > 1:
            print 'optimizing surrogate took %f' % (t1 - t0)
            print 'RND', sorted(rnd_trials.losses())[:5]
            print 'ANN', sorted(tmp_trials.losses())[:5]

        if max_evals > 1 and plot_contours:
            # -- PLOT CONTOURS
            xs = np.arange(-15, 15, 0.1)
            apply_node = self.config['x']['node']
            ys = []
            vus = []
            vls = []
            for x in xs:
                memo = {pyll.scope.hyperopt_param('x', apply_node): x}
                meanvars = [descend_branch(tree, memo, self.leaf_node_meanvar)
                            for tree in self.trees]
                treemeans, treevars = map(np.asarray,zip(*meanvars))
                ys.append(treemeans.mean())
                vus.append(max(treemeans + np.sqrt(treevars)))
                vls.append(min(treemeans - np.sqrt(treevars)))

            print 'vus', vus[:10]
            print 'vls', vls[:10]

            import matplotlib.pyplot as plt
            plt.figure(1)
            plt.cla()
            # -- show error bars
            plt.plot(xs, ys)
            plt.axhline(EI_thresh, c='g')
            plt.vlines(xs, vls, vus)
            Xs = [t['misc']['vals']['x'][0] for t in self.trials.trials]
            Ys = [t['result']['loss'] for t in self.trials.trials]
            plt.scatter(Xs, Ys, c='b')
            plt.axvline(self.best_pt['x'][0], linestyle='dashed')
            plt.show()

        return self.best_pt
Esempio n. 8
0
 def test_fmin(self, array):
     result = fmin(array)
     self.assertEqual(1, result)