def _call(self, ds_): """Extract weights from GPR .. note: Input dataset is not actually used. New dataset is constructed from what is known to the classifier """ clf = self.clf # normalize data: clf._train_labels = (clf._train_labels - clf._train_labels.mean()) \ / clf._train_labels.std() # clf._train_fv = (clf._train_fv-clf._train_fv.mean(0)) \ # /clf._train_fv.std(0) ds = dataset_wizard(samples=clf._train_fv, targets=clf._train_labels) clf.ca.enable("log_marginal_likelihood") ms = ModelSelector(clf, ds) # Note that some kernels does not have gradient yet! # XXX Make it initialize to clf's current hyperparameter values # or may be add ability to specify starting points in the constructor sigma_noise_initial = 1.0e-5 sigma_f_initial = 1.0 length_scale_initial = np.ones(ds.nfeatures) * 1.0e4 # length_scale_initial = np.random.rand(ds.nfeatures)*1.0e4 hyp_initial_guess = np.hstack( [sigma_noise_initial, sigma_f_initial, length_scale_initial]) fixedHypers = array([0] * hyp_initial_guess.size, dtype=bool) fixedHypers = None problem = ms.max_log_marginal_likelihood( hyp_initial_guess=hyp_initial_guess, optimization_algorithm="scipy_lbfgsb", ftol=1.0e-3, fixedHypers=fixedHypers, use_gradient=True, logscale=True) if __debug__ and 'GPR_WEIGHTS' in debug.active: problem.iprint = 1 lml = ms.solve() weights = 1.0 / ms.hyperparameters_best[ 2:] # weight = 1/length_scale if __debug__: debug( "GPR", "%s, train: shape %s, labels %s, min:max %g:%g, " "sigma_noise %g, sigma_f %g" % (clf, clf._train_fv.shape, np.unique(clf._train_labels), clf._train_fv.min(), clf._train_fv.max(), ms.hyperparameters_best[0], ms.hyperparameters_best[1])) return weights
def _call(self, ds_): """Extract weights from GPR .. note: Input dataset is not actually used. New dataset is constructed from what is known to the classifier """ clf = self.clf # normalize data: clf._train_labels = (clf._train_labels - clf._train_labels.mean()) \ / clf._train_labels.std() # clf._train_fv = (clf._train_fv-clf._train_fv.mean(0)) \ # /clf._train_fv.std(0) ds = dataset_wizard(samples=clf._train_fv, targets=clf._train_labels) clf.ca.enable("log_marginal_likelihood") ms = ModelSelector(clf, ds) # Note that some kernels does not have gradient yet! # XXX Make it initialize to clf's current hyperparameter values # or may be add ability to specify starting points in the constructor sigma_noise_initial = 1.0e-5 sigma_f_initial = 1.0 length_scale_initial = np.ones(ds.nfeatures)*1.0e4 # length_scale_initial = np.random.rand(ds.nfeatures)*1.0e4 hyp_initial_guess = np.hstack([sigma_noise_initial, sigma_f_initial, length_scale_initial]) fixedHypers = array([0]*hyp_initial_guess.size, dtype=bool) fixedHypers = None problem = ms.max_log_marginal_likelihood( hyp_initial_guess=hyp_initial_guess, optimization_algorithm="scipy_lbfgsb", ftol=1.0e-3, fixedHypers=fixedHypers, use_gradient=True, logscale=True) if __debug__ and 'GPR_WEIGHTS' in debug.active: problem.iprint = 1 lml = ms.solve() weights = 1.0/ms.hyperparameters_best[2:] # weight = 1/length_scale if __debug__: debug("GPR", "%s, train: shape %s, labels %s, min:max %g:%g, " "sigma_noise %g, sigma_f %g" % (clf, clf._train_fv.shape, np.unique(clf._train_labels), clf._train_fv.min(), clf._train_fv.max(), ms.hyperparameters_best[0], ms.hyperparameters_best[1])) return weights
def _call(self, dataset): """Extract weights from GPR """ clf = self.clf # normalize data: clf._train_labels = (clf._train_labels - clf._train_labels.mean()) \ / clf._train_labels.std() # clf._train_fv = (clf._train_fv-clf._train_fv.mean(0)) \ # /clf._train_fv.std(0) dataset = Dataset(samples=clf._train_fv, labels=clf._train_labels) clf.states.enable("log_marginal_likelihood") ms = ModelSelector(clf, dataset) # Note that some kernels does not have gradient yet! sigma_noise_initial = 1.0e-5 sigma_f_initial = 1.0 length_scale_initial = N.ones(dataset.nfeatures)*1.0e4 # length_scale_initial = N.random.rand(dataset.nfeatures)*1.0e4 hyp_initial_guess = N.hstack([sigma_noise_initial, sigma_f_initial, length_scale_initial]) fixedHypers = N.array([0]*hyp_initial_guess.size, dtype=bool) fixedHypers = None problem = ms.max_log_marginal_likelihood( hyp_initial_guess=hyp_initial_guess, optimization_algorithm="scipy_lbfgsb", ftol=1.0e-3, fixedHypers=fixedHypers, use_gradient=True, logscale=True) if __debug__ and 'GPR_WEIGHTS' in debug.active: problem.iprint = 1 lml = ms.solve() weights = 1.0/ms.hyperparameters_best[2:] # weight = 1/length_scale if __debug__: debug("GPR", "%s, train: shape %s, labels %s, min:max %g:%g, " "sigma_noise %g, sigma_f %g" % (clf, clf._train_fv.shape, N.unique(clf._train_labels), clf._train_fv.min(), clf._train_fv.max(), ms.hyperparameters_best[0], ms.hyperparameters_best[1])) return weights