def main(args):

    print('setting up distribution')

    def load_data(folder, test_frac=0.2):
        X = np.load(folder + '/data.npy')
        y = np.load(folder + '/labels.npy')
        N, D = X.shape
        data = np.concatenate([X, y], axis=1)
        np.random.shuffle(data)
        train_data = data[:int(N * (1 - test_frac))]
        test_data = data[int(N * (1 - test_frac)):]
        return train_data[:, :-1], train_data[:,
                                              -1], test_data[:, :
                                                             -1], test_data[:,
                                                                            -1]

    X_train, y_train, X_test, y_test = load_data(args.data)
    data_dim = X_train.shape[1] + 1
    dist = BayesLogRegPost(X_train, y_train, X_test, y_test, args.prior)

    print('setting up sampler')
    with tf.variable_scope('sampler', reuse=tf.AUTO_REUSE):
        sampler = VS(dist.log_prob_func(), data_dim, args.aux_dim,
                     args.hidden_units, args.num_layers, args.train_samples,
                     args.num_chains, args.activation, args.num_mix,
                     args.perturb)

        print('setting up and running experiment')
        exp = Experiment(log_dir=args.logdir,
                         sampler=sampler,
                         params=vars(args),
                         dist=dist)

        exp.run()
Beispiel #2
0
def main(args):

    print('setting up distribution')
    dist = MixtureOfGaussians(means=args.means, stds=args.stds, pis=[0.5, 0.5])

    dist_irr = MixtureOfGaussians(means=args.means,
                                  stds=args.stds,
                                  pis=[0.5, 0.5])

    def noise(bs):
        return np.random.normal(0.0, 1.0, [bs, 2])

    print('setting up sampler')
    sampler = NiceSampler(gen_arch=args.gen_arch,
                          log_prob_func=dist.log_prob_func(),
                          log_prob_func_irr=dist_irr.log_prob_func(),
                          disc_arch=args.disc_arch,
                          init_dist=noise,
                          b=args.b,
                          m=args.m)

    print('setting up and running experiment')
    exp = Experiment(log_dir=args.logdir,
                     sampler=sampler,
                     params=vars(args),
                     dist=dist,
                     debug=args.debug)

    exp.run()
Beispiel #3
0
def main(args):

    print(args)

    print('setting up distribution')
    dist = Ring(args.scale)

    def noise(bs):
        return np.random.normal(0.0, 1.0, [bs, 2])

    print('setting up sampler')
    sampler = NiceSampler(gen_arch=args.gen_arch,
                          log_prob_func=dist.log_prob_func(),
                          disc_arch=args.disc_arch,
                          init_dist=noise,
                          b=args.b,
                          m=args.m)

    print('setting up and running experiment')
    exp = Experiment(log_dir=args.logdir,
                     sampler=sampler,
                     params=vars(args),
                     dist=dist,
                     debug=args.debug)

    exp.run()
def main(args):
    if args.learning_rate < 0:
        u = np.random.uniform(2.5, 6)
        args.learning_rate = np.float32(np.power(10, -u))
    if args.scale < 0:
        args.scale = np.float32(np.random.uniform(0.1, 5.0))
    if args.lambda_b < 0:
        args.lambda_b = np.float32(np.random.uniform(0.0, 0.5))
    if args.leap_size < 0:
        u = np.random.uniform(1, 2)
        args.leap_size = np.float32(np.power(10, -u))
    if args.leap_steps < 0.0:
        args.leap_steps = np.random.choice([10, 25, 50])
    if args.init_temp < 0.0:
        args.init_temp = np.random.choice([100, 50])

    print(args)

    print('setting up distribution')
    mu_1 = np.array([-10., 0.])
    mu_2 = np.array([10., 0.])
    mus = np.array([mu_1, mu_2])
    cov_1 = 1 * np.eye(2)
    cov_2 = 1 * np.eye(2)
    covs = np.array([cov_1, cov_2])
    pis = np.array([0.5, 0.5])
    dist = GMM(mus, covs, pis)

    def init_dist(bs):
        return np.random.randn(bs, args.data_dim)

    sampler = L2HMC(energy_function=dist.get_energy_function(),
                    arch=args.arch,
                    tar_dim=args.data_dim,
                    scale=args.scale,
                    init_dist=init_dist,
                    leap_steps=args.leap_steps,
                    leap_size=args.leap_size,
                    lambda_b=args.lambda_b,
                    use_temp=True)

    print('setting up and running experiment')
    exp = Experiment(log_dir=args.logdir,
                     sampler=sampler,
                     params=vars(args),
                     dist=dist)

    exp.run()
Beispiel #5
0
def main(args):

    print('setting up distribution')

    def load_data(folder, test_frac=0.2):
        X = np.load(folder + '/data.npy')
        y = np.load(folder + '/labels.npy')
        N, D = X.shape
        data = np.concatenate([X, y], axis=1)
        np.random.shuffle(data)
        train_data = data[:int(N * (1 - test_frac))]
        test_data = data[int(N * (1 - test_frac)):]
        return train_data[:, :-1], train_data[:,
                                              -1], test_data[:, :
                                                             -1], test_data[:,
                                                                            -1]

    X_train, y_train, X_test, y_test = load_data(args.data)
    data_dim = X_train.shape[1] + 1
    arch = [data_dim] + args.arch
    target_dim = np.sum([arch[i] * arch[i + 1] for i in range(len(arch) - 1)])
    gen_arch = 2 * [target_dim] + args.gen_arch
    dist = FeedForwardNetPost(X_train,
                              y_train,
                              X_test,
                              y_test,
                              arch,
                              prec=args.prior)

    def noise(bs):
        return np.random.normal(0.0, 1.0, [bs, target_dim])

    print('setting up sampler')
    sampler = NiceSampler(gen_arch=gen_arch,
                          log_prob_func=dist.log_prob_func(),
                          disc_arch=args.disc_arch,
                          init_dist=noise,
                          b=args.b,
                          m=args.m)

    print('setting up and running experiment')
    exp = Experiment(log_dir=args.logdir,
                     sampler=sampler,
                     params=vars(args),
                     dist=dist,
                     debug=args.debug)

    exp.run()
Beispiel #6
0
def main(args):

    print('setting up distribution')

    def load_data(folder, test_frac=0.2):
        X = np.load(folder + '/data.npy')
        y = np.load(folder + '/labels.npy')
        N, D = X.shape
        data = np.concatenate([X, y], axis=1)
        np.random.shuffle(data)
        train_data = data[:int(N * (1 - test_frac))]
        test_data = data[int(N * (1 - test_frac)):]
        return train_data[:, :-1], train_data[:,
                                              -1], test_data[:, :
                                                             -1], test_data[:,
                                                                            -1]

    X_train, y_train, X_test, y_test = load_data(args.data)
    data_dim = X_train.shape[1] + 1
    arch = [data_dim] + args.dist_arch
    target_dim = np.sum([arch[i] * arch[i + 1] for i in range(len(arch) - 1)])
    dist = FeedForwardNetPost(X_train,
                              y_train,
                              X_test,
                              y_test,
                              arch,
                              prec=args.prior)

    print('setting up sampler')

    def init_dist(bs):
        return np.random.randn(bs, target_dim)

    sampler = L2HMC(log_prob_func=dist.log_prob_func(),
                    arch=args.arch,
                    sample_dim=target_dim,
                    scale=args.scale,
                    init_dist=init_dist,
                    leap_steps=args.leap_steps,
                    eps=args.eps)

    print('setting up and running experiment')
    exp = Experiment(log_dir=args.logdir,
                     sampler=sampler,
                     params=vars(args),
                     dist=dist)

    exp.run()
Beispiel #7
0
def main(args):

    print('setting up distribution')
    dist = MixtureOfGaussians(means=args.means, stds=args.stds, pis=[0.5, 0.5])

    print('setting up sampler')
    with tf.variable_scope('sampler', reuse=tf.AUTO_REUSE):
        sampler = VS(dist.log_prob_func(), args.data_dim, args.aux_dim,
                     args.hidden_units, args.num_layers, args.train_samples,
                     args.num_chains, args.activation, args.num_mix,
                     args.perturb)

        print('setting up and running experiment')
        exp = Experiment(log_dir=args.logdir,
                         sampler=sampler,
                         params=vars(args),
                         dist=dist)

        exp.run()
def main(args):

    print('setting up distribution')
    dist = MixtureOfGaussians(means=args.means, stds=args.stds, pis=[0.5, 0.5])

    def init_dist(bs):
        return np.random.randn(bs, 2).astype(np.float32)

    print('setting up sampler')
    sampler = HMC(dist.log_prob_func(), init_dist, args.step_size,
                  args.num_leap_steps, 2)

    print('setting up and running experiment')
    exp = Experiment(log_dir=args.logdir,
                     sampler=sampler,
                     params=vars(args),
                     dist=dist)

    exp.run()
Beispiel #9
0
def main(args):

    print('setting up distribution')

    def load_data(folder, test_frac=0.2):
        X = np.load(folder + '/data.npy')
        y = np.load(folder + '/labels.npy')
        N, D = X.shape
        data = np.concatenate([X, y], axis=1)
        np.random.shuffle(data)
        train_data = data[:int(N * (1 - test_frac))]
        test_data = data[int(N * (1 - test_frac)):]
        return train_data[:, :-1], train_data[:,
                                              -1], test_data[:, :
                                                             -1], test_data[:,
                                                                            -1]

    X_train, y_train, X_test, y_test = load_data(args.data)
    data_dim = X_train.shape[1] + 1
    dist = Australian(X_train, y_train, X_test, y_test, args.prior)
    dist_irr = Australian(X_train, y_train, X_test, y_test, args.prior)

    def noise(bs):
        return np.random.normal(0.0, 1.0, [bs, data_dim])

    gen_arch = [data_dim, data_dim] + args.gen_arch
    print('setting up sampler')
    sampler = NiceSampler(gen_arch=gen_arch,
                          log_prob_func=dist.log_prob_func(),
                          log_prob_func_irr=dist_irr.log_prob_func(),
                          disc_arch=args.disc_arch,
                          init_dist=noise,
                          b=args.b,
                          m=args.m)

    print('setting up and running experiment')
    exp = Experiment(log_dir=args.logdir,
                     sampler=sampler,
                     params=vars(args),
                     dist=dist,
                     debug=args.debug)

    exp.run()
def main(args):
    if args.learning_rate < 0:
        u = np.random.uniform(2.5, 6)
        args.learning_rate = np.power(10, -np.float32(u))
    if args.scale < 0:
        args.scale = np.float32(np.random.uniform(0.1, 5.0))
    if args.lambda_b < 0:
        args.lambda_b = np.float32(np.random.uniform(0.0, 0.5))
    if args.leap_size < 0:
        u = np.float32(np.random.uniform(1, 2))
        args.leap_size = np.float32(np.power(10, -u))
    if args.leap_steps < 0.0:
        args.leap_steps = np.random.choice([10, 25, 50])

    print(args)

    print('setting up distribution')
    dist = Ring(args.ring_scale)
    energy_func = lambda x: -dist.log_prob_func()(x)

    print('setting up sampler')

    def init_dist(bs):
        return np.random.randn(bs, args.data_dim)

    sampler = L2HMC(energy_function=energy_func,
                    arch=args.arch,
                    tar_dim=args.data_dim,
                    scale=args.scale,
                    init_dist=init_dist,
                    leap_steps=args.leap_steps,
                    leap_size=args.leap_size)

    print('setting up and running experiment')
    exp = Experiment(log_dir=args.logdir,
                     sampler=sampler,
                     params=vars(args),
                     dist=dist)

    exp.run()
def main(args):

    print('setting up distribution')
    dist = MixtureOfGaussians(means=args.means, stds=args.stds, pis=[0.5, 0.5])

    print('setting up sampler')

    def init_dist(bs):
        o = np.ones((bs, 1))
        a = np.array([[0.75, 0.75]])
        return o * a

    sampler = RwmSampler(dist.log_prob_func(), args.data_dim, init_dist,
                         args.perturb)

    print('setting up and running experiment')
    exp = Experiment(log_dir=args.logdir,
                     sampler=sampler,
                     params=vars(args),
                     dist=dist)

    exp.run()
def main(args):

    print(args)
    print('setting up distribution')
    def load_data(folder, test_frac=0.2):
        X = np.load(folder + '/data.npy')
        y = np.load(folder + '/labels.npy')
        N, D = X.shape
        data = np.concatenate([X, y], axis=1)
        np.random.shuffle(data)
        train_data = data[:int(N * (1 - test_frac))]
        test_data = data[int(N * (1 - test_frac)):]
        return train_data[:, :-1], train_data[:, -1], test_data[:,:-1], test_data[:, -1]

    X_train, y_train, X_test, y_test = load_data(args.data)
    data_dim = X_train.shape[1] + 1
    dist = BayesLogRegPost(X_train, y_train, X_test, y_test, args.prior)

    def init_dist(bs):
        return np.random.randn(bs, data_dim).astype(np.float32)

    print('setting up sampler')
    sampler = HMC(dist.log_prob_func(),
                  init_dist,
                  args.step_size,
                  args.num_leap_steps,
                  data_dim
                  )


    print('setting up and running experiment')
    exp = Experiment(log_dir=args.logdir,
                     sampler=sampler,
                     params=vars(args),
                     dist=dist)

    exp.run()
Beispiel #13
0
def main(args):
    # If negative parameters provided, select them at random from a suitable range
    if args.learning_rate < 0:
        u = np.random.uniform(2.5, 6)
        args.learning_rate = np.float32(np.power(10.0, -u))
    if args.scale < 0:
        args.scale = np.float32(np.random.uniform(0.1, 5.0))
    if args.lambda_b < 0:
        args.lambda_b = np.float32(np.random.uniform(0.0, 0.5))
    if args.eps < 0:
        u = np.random.uniform(1, 2)
        args.eps = np.float32(np.power(10.0, -u))
    if args.leap_steps < 0.0:
        args.leap_steps = np.random.choice([10, 25, 50])

    print(args)

    print('setting up distribution')

    def load_data(folder, test_frac=0.2):
        X = np.load(folder + '/data.npy')
        y = np.load(folder + '/labels.npy')
        N, D = X.shape
        data = np.concatenate([X, y], axis=1)
        np.random.shuffle(data)
        train_data = data[:int(N * (1 - test_frac))]
        test_data = data[int(N * (1 - test_frac)):]
        return train_data[:, :-1], train_data[:,
                                              -1], test_data[:, :
                                                             -1], test_data[:,
                                                                            -1]

    X_train, y_train, X_test, y_test = load_data(args.data)
    data_dim = X_train.shape[1] + 1
    with tf.name_scope('distribution'):
        dist = BayesLogRegPost(X_train, y_train, X_test, y_test, args.prior)
    energy = lambda x: -dist.log_prob_func()(x)

    print('setting up sampler')

    def init_dist(bs):
        return np.random.randn(bs, data_dim)

    with tf.name_scope('sampler'):
        sampler = L2HMC(energy_function=energy,
                        arch=args.arch,
                        tar_dim=data_dim,
                        scale=args.scale,
                        init_dist=init_dist,
                        leap_steps=args.leap_steps,
                        leap_size=args.eps,
                        lambda_b=args.lambda_b)

    print('setting up and running experiment')
    exp = Experiment(log_dir=args.logdir,
                     sampler=sampler,
                     params=vars(args),
                     dist=dist,
                     debug=args.debug)

    exp.run()