def __init__(self, X, Y,layer_sizes, L2_reg, model_optimize_restarts=1): self.X = X.copy() self.Y = Y self.num_data, self.input_dim = self.X.shape self.layer_sizes = layer_sizes rbf = lambda x: norm.pdf(x, 0, 1) self.nonlinearity= rbf self.sq = lambda x: np.sin(x) noise_variance = 0.01 self.L2_reg =L2_reg self.num_weights, self.predictions, self.logprob = \ self.make_nn_funs(layer_sizes, L2_reg, noise_variance ,self.nonlinearity ) self.log_posterior = lambda weights, t: self.logprob(weights, self.X, self.Y) # Build variational objective. self.objective, self.gradient, self.unpack_params= \ self.black_box_variational_inference(self.log_posterior, self.num_weights, 20) self.rs = npr.RandomState(0) init_mean = self.rs.randn(self.num_weights) init_log_std = -5 * np.ones(self.num_weights) self.init_var_params = np.concatenate([init_mean, init_log_std]) self.update_param = np.concatenate([init_mean, init_log_std]) self.num_samples=20 # variables used for running optimization process self.optimization_runs = [] self.model_optimize_restarts=model_optimize_restarts self.verbosity= True self.reset= True
def __init__(self, filename="Default.log"): self.terminal = sys.stdout self.log = open(filename, "a") def write(self, message): self.terminal.write(message) self.log.write(message) if __name__ == '__main__': sys.stdout = Logger("experiment.txt") # Specify inference problem by its unnor # Specify inference problem by its unnormalized log-posterior. rbf = lambda x: norm.pdf(x, 0, 1) sq = lambda x: np.sin(x) num_weights, predictions, logprob = \ make_nn_funs(layer_sizes=[1, 10, 10, 1], L2_reg=0.01, noise_variance = 0.01, nonlinearity=rbf) inputs, targets = build_toy_dataset() log_posterior = lambda weights, t: logprob(weights, inputs, targets) # Build variational objective. objective, gradient, unpack_params = \ black_box_variational_inference(log_posterior, num_weights, num_samples=20) # Set up figure. fig = plt.figure(figsize=(8,8), facecolor='white')