コード例 #1
0
ファイル: test_theano_gm.py プロジェクト: cyip/hyperopt
    def test_optimize_20(self):
        self.experiment.bandit_algo.build_helpers()
        HL = self.experiment.bandit_algo.helper_locals
        assert len(HL['Gsamples']) == 1
        Gpseudocounts = HL['Gsamples'][0].vals.owner.inputs[1]
        Bpseudocounts = HL['Bsamples'][0].vals.owner.inputs[1]

        f = self.experiment.bandit_algo._helper
        debug = theano.function(
            [HL['n_to_draw'], HL['n_to_keep'], HL['y_thresh'], HL['yvals']]
                + HL['s_obs'].flatten(),
            (HL['Gobs'].flatten()
                + [Gpseudocounts]
                + [Bpseudocounts]
                + [HL['yvals'][where(HL['yvals'] < HL['y_thresh'])]]
                + [HL['yvals'][where(HL['yvals'] >= HL['y_thresh'])]]
                ),
            allow_input_downcast=True,
            )
        debug_rval = [None]
        def _helper(*args):
            rval = f(*args)
            debug_rval[0] = debug(*args)
            return rval
        self.experiment.bandit_algo._helper = _helper
        self.experiment.run(200)

        gobs_idxs, gobs_vals, Gpseudo, Bpseudo, Gyvals, Byvals = debug_rval[0]
        print gobs_idxs
        print 'Gpseudo', Gpseudo
        print 'Bpseudo', Bpseudo

        import matplotlib.pyplot as plt
        plt.subplot(1,4,1)
        Xs = [t['x'] for t in self.experiment.trials]
        Ys = self.experiment.losses()
        plt.plot(Ys)
        plt.xlabel('time')
        plt.ylabel('loss')

        plt.subplot(1,4,2)
        plt.scatter(Xs,Ys )
        plt.xlabel('X')
        plt.ylabel('loss')

        plt.subplot(1,4,3)
        plt.hist(Xs )
        plt.xlabel('X')
        plt.ylabel('freq')

        plt.subplot(1,4,4)
        plt.hist(Gyvals, bins=20)
        plt.hist(Byvals, bins=20)

        print self.experiment.losses()
        print 'MIN', min(self.experiment.losses())
        assert min(self.experiment.losses()) < -3.00

        if 0:
            plt.show()
コード例 #2
0
ファイル: theano_gm.py プロジェクト: wqren/hyperopt
    def build_helpers(self):
        s_prior = IdxsValsList.fromlists(self.s_idxs, self.s_vals)
        s_obs = s_prior.new_like_self()

        # y_thresh is the boundary between 'good' and 'bad' regions of the
        # search space.
        y_thresh = tensor.scalar()

        yvals = tensor.vector()
        n_to_draw = self.s_N
        n_to_keep = tensor.iscalar()

        s_rng = montetheano.RandomStreams(self.seed + 9)

        GE = self.good_estimator
        BE = self.bad_estimator

        Gobs = s_obs.symbolic_take(where(yvals < y_thresh))
        Bobs = s_obs.symbolic_take(where(yvals >= y_thresh))

        # To "optimize" EI we just draw a pile of samples from the density
        # of good points and then just take the best of those.
        Gsamples = GE.posterior(s_prior, Gobs, s_rng)
        Bsamples = BE.posterior(s_prior, Bobs, s_rng)

        G_ll = GE.log_likelihood(Gsamples,
                                 Gsamples,
                                 llik=tensor.zeros((n_to_draw, )))
        B_ll = BE.log_likelihood(Bsamples,
                                 Gsamples,
                                 llik=tensor.zeros((n_to_draw, )))

        # subtract B_ll from G_ll
        log_EI = G_ll - B_ll
        keep_idxs = argsort(log_EI)[-n_to_keep:]

        # store all these vars for the unittests
        self.helper_locals = locals()
        del self.helper_locals['self']
コード例 #3
0
ファイル: theano_gm.py プロジェクト: ardila/hyperopt
    def build_helpers(self):
        s_prior = IdxsValsList.fromlists(self.s_idxs, self.s_vals)
        s_obs = s_prior.new_like_self()

        # y_thresh is the boundary between 'good' and 'bad' regions of the
        # search space.
        y_thresh = tensor.scalar()

        yvals = tensor.vector()
        n_to_draw = self.s_N
        n_to_keep = tensor.iscalar()

        s_rng = montetheano.RandomStreams(self.seed + 9)

        GE = self.good_estimator
        BE = self.bad_estimator

        Gobs = s_obs.symbolic_take(where(yvals < y_thresh))
        Bobs = s_obs.symbolic_take(where(yvals >= y_thresh))

        # To "optimize" EI we just draw a pile of samples from the density
        # of good points and then just take the best of those.
        Gsamples = GE.posterior(s_prior, Gobs, s_rng)
        Bsamples = BE.posterior(s_prior, Bobs, s_rng)

        G_ll = GE.log_likelihood(Gsamples, Gsamples,
                llik = tensor.zeros((n_to_draw,)))
        B_ll = BE.log_likelihood(Bsamples, Gsamples,
                llik = tensor.zeros((n_to_draw,)))

        # subtract B_ll from G_ll
        log_EI = G_ll - B_ll
        keep_idxs = argsort(log_EI)[-n_to_keep:]

        # store all these vars for the unittests
        self.helper_locals = locals()
        del self.helper_locals['self']