def __init__(self, data, **kwargs): super(GPcore, self).__init__() #Main Data self.name = 'GPcore' self.data = validate_data(data) self.all_mac_dict = kwargs.get('all_mac_dict', None) self.ndp, self.nap = self.data['Y'].shape #data points, #access points #check data assert self.data['X'].shape[0] == self.ndp assert self.data['X'].shape[1] == 2 #Likelihood params self.likelihood = kwargs.get('likelihood', lh.Gaussian(**kwargs)) #raise error if likelihood is not a derived class of Likelihood assert isinstance(self.likelihood, lh.Likelihood) self.add_noise_var = kwargs.get('add_noise_var', .0005) #Xtest self.Xtest = kwargs.get('Xtest', None) self.Xtest_num = kwargs.get( 'Xtest_num', 75) #num of points for each Xtest dimension self.Xtest_factor = kwargs.get( 'Xtest_factor', .25) #Xtest = min/max x +- x_factor*span if self.Xtest is None: x0 = np.min(self.data['X'][:, 0]) x1 = np.max(self.data['X'][:, 0]) xspn = x1 - x0 self.xmin = x0 - self.Xtest_factor * xspn self.xmax = x1 + self.Xtest_factor * xspn y0 = np.min(self.data['X'][:, 1]) y1 = np.max(self.data['X'][:, 1]) yspn = y1 - y0 self.ymin = y0 - self.Xtest_factor * yspn self.ymax = y1 + self.Xtest_factor * yspn temp_x = np.linspace(self.xmin, self.xmax, self.Xtest_num) temp_y = np.linspace(self.ymin, self.ymax, self.Xtest_num) self.Xtest = mesh(temp_x, temp_y) #Sampling params self.sampling = kwargs.get('sampling', sampling.accept_reject_by_regions_map) self.nsamples = kwargs.get('nsamples', 800) #Debug params self.debug = kwargs.get('debug', False) self.verbose = kwargs.get('verbose', False) if self.debug: print 'class GP init works'
def __init__(self, data, **kwargs): self.data = validate_data(data) self.debug = kwargs.get('debug', 0) self.params = kwargs.get('params_0', None) if self.params is None: self.params = self.params_init() else: if self.params.ndim == 1: #casting into [1,np] self.params = np.reshape(self.params, (1, self.params.shape[0])) #Verify params_0 has adequate format assert params.shape[0] == data['Y'].shape[1]
def nll(self, dataTest=None): """ Negative log likelihood dataTest (Optional): If declared, will be used instead of self.data for computing the nll """ if dataTest is None: data = self.data else: data = validate_data(dataTest) Ys = self.mean_val(data['X']) #bounded estimation log_prob = stats.norm.logpdf(Ys, loc=data['Y'], scale=data['Var']**0.5) return -np.sum(log_prob)
def cv(self, dataTest=None): """ Cross validation value used for comparing different function optimization outputs can be computed with a separate dataset test or with training dataset <math> nll(testing dataset)/nll(zero function) dataTest (Optional): If declared, will be used instead of self.data for computing the nll """ if dataTest is None: data = self.data else: data = validate_data(dataTest) nll_f = self.nll(dataTest=data) nll_z = -np.sum( stats.norm.logpdf(np.zeros_like(data['Y']), loc=data['Y'], scale=data['Var']**0.5)) return nll_f / nll_z