def main(args): print('setting up distribution') def load_data(folder, test_frac=0.2): X = np.load(folder + '/data.npy') y = np.load(folder + '/labels.npy') N, D = X.shape data = np.concatenate([X, y], axis=1) np.random.shuffle(data) train_data = data[:int(N * (1 - test_frac))] test_data = data[int(N * (1 - test_frac)):] return train_data[:, :-1], train_data[:, -1], test_data[:, : -1], test_data[:, -1] X_train, y_train, X_test, y_test = load_data(args.data) data_dim = X_train.shape[1] + 1 dist = BayesLogRegPost(X_train, y_train, X_test, y_test, args.prior) print('setting up sampler') with tf.variable_scope('sampler', reuse=tf.AUTO_REUSE): sampler = VS(dist.log_prob_func(), data_dim, args.aux_dim, args.hidden_units, args.num_layers, args.train_samples, args.num_chains, args.activation, args.num_mix, args.perturb) print('setting up and running experiment') exp = Experiment(log_dir=args.logdir, sampler=sampler, params=vars(args), dist=dist) exp.run()
def main(args): print(args) print('setting up distribution') def load_data(folder, test_frac=0.2): X = np.load(folder + '/data.npy') y = np.load(folder + '/labels.npy') N, D = X.shape data = np.concatenate([X, y], axis=1) np.random.shuffle(data) train_data = data[:int(N * (1 - test_frac))] test_data = data[int(N * (1 - test_frac)):] return train_data[:, :-1], train_data[:, -1], test_data[:,:-1], test_data[:, -1] X_train, y_train, X_test, y_test = load_data(args.data) data_dim = X_train.shape[1] + 1 dist = BayesLogRegPost(X_train, y_train, X_test, y_test, args.prior) def init_dist(bs): return np.random.randn(bs, data_dim).astype(np.float32) print('setting up sampler') sampler = HMC(dist.log_prob_func(), init_dist, args.step_size, args.num_leap_steps, data_dim ) print('setting up and running experiment') exp = Experiment(log_dir=args.logdir, sampler=sampler, params=vars(args), dist=dist) exp.run()
def main(args): print('setting up distribution') def load_data(folder, test_frac=0.2): X = np.load(folder + '/data.npy') y = np.load(folder + '/labels.npy') N, D = X.shape data = np.concatenate([X, y], axis=1) np.random.shuffle(data) train_data = data[:int(N*(1-test_frac))] test_data = data[int(N*(1-test_frac)):] return train_data[:, :-1], train_data[:, -1], test_data[:, :-1], test_data[:, -1] X_train, y_train, X_test, y_test = load_data(args.data) data_dim = X_train.shape[1] + 1 dist = BayesLogRegPost(X_train, y_train, X_test, y_test, args.prior) def noise(bs): return np.random.normal(0.0, 1.0, [bs, data_dim]) gen_arch = [data_dim, data_dim] + args.gen_arch print('setting up sampler') sampler = NiceSampler(gen_arch=gen_arch, log_prob_func=dist.log_prob_func(), disc_arch=args.disc_arch, init_dist=noise, b=args.b, m=args.m) print('setting up and running experiment') exp = Experiment(log_dir=args.logdir, sampler=sampler, params=vars(args), dist=dist, debug=args.debug) exp.run()
def main(args): # If negative parameters provided, select them at random from a suitable range if args.learning_rate < 0: u = np.random.uniform(2.5, 6) args.learning_rate = np.float32(np.power(10.0, -u)) if args.scale < 0: args.scale = np.float32(np.random.uniform(0.1, 5.0)) if args.lambda_b < 0: args.lambda_b = np.float32(np.random.uniform(0.0, 0.5)) if args.eps < 0: u = np.random.uniform(1, 2) args.eps = np.float32(np.power(10.0, -u)) if args.leap_steps < 0.0: args.leap_steps = np.random.choice([10, 25, 50]) print(args) print('setting up distribution') def load_data(folder, test_frac=0.2): X = np.load(folder + '/data.npy') y = np.load(folder + '/labels.npy') N, D = X.shape data = np.concatenate([X, y], axis=1) np.random.shuffle(data) train_data = data[:int(N * (1 - test_frac))] test_data = data[int(N * (1 - test_frac)):] return train_data[:, :-1], train_data[:, -1], test_data[:, : -1], test_data[:, -1] X_train, y_train, X_test, y_test = load_data(args.data) data_dim = X_train.shape[1] + 1 with tf.name_scope('distribution'): dist = BayesLogRegPost(X_train, y_train, X_test, y_test, args.prior) energy = lambda x: -dist.log_prob_func()(x) print('setting up sampler') def init_dist(bs): return np.random.randn(bs, data_dim) with tf.name_scope('sampler'): sampler = L2HMC(energy_function=energy, arch=args.arch, tar_dim=data_dim, scale=args.scale, init_dist=init_dist, leap_steps=args.leap_steps, leap_size=args.eps, lambda_b=args.lambda_b) print('setting up and running experiment') exp = Experiment(log_dir=args.logdir, sampler=sampler, params=vars(args), dist=dist, debug=args.debug) exp.run()