def test_transformation_config(): transformer = pre.Transformation(pre.scale, kwargs={'denominator': 2}) transformer_result = [transformer.compute(tensor) for tensor in tensors] config = transformer.get_config() transformer_from_config = pre.Transformation.from_config(config) config_result = [ transformer_from_config.compute(tensor) for tensor in tensors ] assert compare_lists(transformer_result, config_result)
def test_scale(): # test function result_pre = [pre.scale(tensor, 2) for tensor in tensors] result_ref = [0.5 * tensor for tensor in tensors] assert compare_lists(result_pre, result_ref) # test transform transformer = pre.Transformation(pre.scale, kwargs={'denominator': 2}) result_pre_2 = [transformer.compute(tensor) for tensor in tensors] assert compare_lists(result_pre, result_pre_2)
def run(num_epochs=10, show_plot=False): num_hidden_units = 100 batch_size = 100 learning_rate = schedules.PowerLawDecay(initial=0.002, coefficient=0.1) mc_steps = 10 # set up the reader to get minibatches data = util.create_batch(batch_size, train_fraction=0.95, transform=pre.Transformation(pre.binarize_color)) # set up the model and initialize the parameters vis_layer = layers.BernoulliLayer(data.ncols) hid_1_layer = layers.BernoulliLayer(num_hidden_units) hid_2_layer = layers.BernoulliLayer(num_hidden_units) rbm = BoltzmannMachine([vis_layer, hid_1_layer, hid_2_layer]) rbm.initialize(data) # set up the optimizer and the fit method opt = optimizers.ADAM(stepsize=learning_rate) cd = fit.SGD(rbm, data) # fit the model print('training with contrastive divergence') cd.train(opt, num_epochs, method=fit.pcd, mcsteps=mc_steps) # evaluate the model util.show_metrics(rbm, cd.monitor) valid = data.get('validate') util.show_reconstructions(rbm, valid, show_plot, n_recon=10, num_to_avg=10) util.show_fantasy_particles(rbm, valid, show_plot, n_fantasy=5) util.show_weights(rbm, show_plot, n_weights=25) # close the HDF5 store data.close() print("Done")
from paysage import preprocess as pre from paysage import layers from paysage.models import BoltzmannMachine from paysage import fit from paysage import optimizers from paysage import backends as be from paysage import schedules from paysage import metrics as M be.set_seed(137) # for determinism import mnist_util as util transform = pre.Transformation(pre.scale, kwargs={'denominator': 255}) def run(num_epochs=20, show_plot=False): num_hidden_units = 200 batch_size = 100 mc_steps = 10 beta_std = 0.95 # set up the reader to get minibatches data = util.create_batch(batch_size, train_fraction=0.95, transform=transform) # set up the model and initialize the parameters vis_layer = layers.GaussianLayer(data.ncols, center=False) hid_layer = layers.BernoulliLayer(num_hidden_units, center=True) hid_layer.set_fixed_params(hid_layer.get_param_names()) rbm = BoltzmannMachine([vis_layer, hid_layer]) rbm.initialize(data, 'pca', epochs = 500, verbose=True)
from paysage import preprocess as pre from paysage import layers from paysage.models import BoltzmannMachine from paysage import fit from paysage import optimizers from paysage import backends as be from paysage import schedules be.set_seed(137) # for determinism import mnist_util as util transform = pre.Transformation(pre.binarize_color) def run(num_epochs=10, show_plot=False): num_hidden_units = 256 batch_size = 100 learning_rate = schedules.PowerLawDecay(initial=0.001, coefficient=0.1) mc_steps = 1 # set up the reader to get minibatches data = util.create_batch(batch_size, train_fraction=0.95, transform=transform) # set up the model and initialize the parameters vis_layer = layers.BernoulliLayer(data.ncols) hid_layer = layers.GaussianLayer(num_hidden_units)