Example #1
0
labels = labels[0:n]
dataset = data.dataset(inputs, targets, labels)
batches = data.BatchIterator(dataset)

# meta
epochs = 50
learning_rate = 0.1
cd_k = 15
weight_decay = reg.l2(0.0002)
momentum = 0 #0.1

# Load and stack params from initial pre-training.
# initial_params = dbn.params_from_rbms([parameters.load(OUTPUT_PATH, t)
#                                        for t in (1358445776, 1358451846, 1358456203)])

# Params after first 50 epochs of fine tuning. (lr 0.1, p 0.0, cd_k=10)
initial_params = parameters.load(OUTPUT_PATH, timestamp=1358541318)

# The sampling function used by the top-level RBM.
sample_v_softmax = functools.partial(rbm.sample_v_softmax, k=mnist.NUM_CLASSES)

# Optimization objective.
def f(params, data):
    return contrastive_wake_sleep(params, data, weight_decay, cd_k)

output_dir = utils.make_output_directory(OUTPUT_PATH)
save_params = parameters.save_hook(output_dir)

params = optimize.sgd(f, initial_params, batches, epochs, learning_rate, momentum,
                      post_epoch=save_params)
Example #2
0
    sample_v_softmax_clamped = functools.partial(sample_v_softmax,
                                                 labels=np.eye(10))

    # Perform an upward pass from the pixels to the visible units of
    # the top-level RBM.
    # initial_v = np.hstack((
    #         np.eye(10),
    #         up_pass(dbn_params, initial_pixels)))

    initial_v = np.hstack(
        (np.eye(10), np.random.random((10, dbn_params[-1].W.shape[1] - 10))))

    # Initialize the gibbs chain.
    gc = rbm.gibbs_chain(initial_v, dbn_params[-1], rbm.sample_h,
                         sample_v_softmax_clamped)

    tile_2_by_5 = functools.partial(utils.tile, grid_shape=(2, 5))

    gen = itertools.islice(gc, 1, None, 1)
    gen = itertools.islice(gen, 2000)
    gen = itertools.imap(operator.itemgetter(1), gen)
    gen = itertools.imap(lambda v: down_pass(dbn_params, v), gen)
    gen = itertools.imap(tile_2_by_5, gen)

    # Save to disk.
    utils.save_images(gen, tempfile.mkdtemp(dir=OUTPUT_PATH))


params = parameters.load(OUTPUT_PATH, timestamp=1358586160)
generate(params)
Example #3
0
dataset = data.dataset(inputs, targets, labels)
batches = data.BatchIterator(dataset)

# meta
epochs = 50
learning_rate = 0.1
cd_k = 15
weight_decay = reg.l2(0.0002)
momentum = 0  #0.1

# Load and stack params from initial pre-training.
# initial_params = dbn.params_from_rbms([parameters.load(OUTPUT_PATH, t)
#                                        for t in (1358445776, 1358451846, 1358456203)])

# Params after first 50 epochs of fine tuning. (lr 0.1, p 0.0, cd_k=10)
initial_params = parameters.load(OUTPUT_PATH, timestamp=1358541318)

# The sampling function used by the top-level RBM.
sample_v_softmax = functools.partial(rbm.sample_v_softmax, k=mnist.NUM_CLASSES)


# Optimization objective.
def f(params, data):
    return contrastive_wake_sleep(params, data, weight_decay, cd_k)


output_dir = utils.make_output_directory(OUTPUT_PATH)
save_params = parameters.save_hook(output_dir)

params = optimize.sgd(f,
                      initial_params,
Example #4
0
    # Perform an upward pass from the pixels to the visible units of
    # the top-level RBM.
    # initial_v = np.hstack((
    #         np.eye(10),
    #         up_pass(dbn_params, initial_pixels)))

    initial_v = np.hstack((
            np.eye(10),
            np.random.random((10, dbn_params[-1].W.shape[1] - 10))))

    # Initialize the gibbs chain.
    gc = rbm.gibbs_chain(initial_v,
                         dbn_params[-1],
                         rbm.sample_h,
                         sample_v_softmax_clamped)

    tile_2_by_5 = functools.partial(utils.tile, grid_shape=(2, 5))

    gen = itertools.islice(gc, 1, None, 1)
    gen = itertools.islice(gen, 2000)
    gen = itertools.imap(operator.itemgetter(1), gen)
    gen = itertools.imap(lambda v: down_pass(dbn_params, v), gen)
    gen = itertools.imap(tile_2_by_5, gen)

    # Save to disk.
    utils.save_images(gen, tempfile.mkdtemp(dir=OUTPUT_PATH))


params = parameters.load(OUTPUT_PATH, timestamp=1358586160)
generate(params)