Exemple #1
0
from cle.cle.train import Training
from cle.cle.train.ext import (EpochCount, GradientClipping, Monitoring,
                               Picklize, EarlyStopping)
from cle.cle.train.opt import RMSProp
from cle.cle.utils import error, flatten, predict, OrderedDict
from cle.datasets.mnist import MNIST

# Set your dataset
data_path = '/home/junyoung/data/mnist/mnist.pkl'
save_path = '/home/junyoung/src/cle/saved/'

batch_size = 128
debug = 0

model = Model()
train_data = MNIST(name='train', path=data_path)

valid_data = MNIST(name='valid', path=data_path)

# Choose the random initialization method
init_W = InitCell('randn')
init_b = InitCell('zeros')

# Define nodes: objects
x, y = train_data.theano_vars()
# You must use THEANO_FLAGS="compute_test_value=raise" python -m ipdb
if debug:
    x.tag.test_value = np.zeros((batch_size, 784), dtype=np.float32)
    y.tag.test_value = np.zeros((batch_size, 1), dtype=np.float32)

h1 = FullyConnectedLayer(name='h1',
Exemple #2
0
)
from cle.cle.train.opt import RMSProp
from cle.cle.utils import error, flatten, predict, OrderedDict
from cle.datasets.mnist import MNIST

# Set your dataset
data_path = '/data/lisa/data/mnist/mnist.pkl'
save_path = '/u/chungjun/src/cle/saved/'
#data_path = '/home/junyoung/data/mnist/mnist.pkl'
#save_path = '/home/junyoung/src/cle/saved/'

batch_size = 128
debug = 0

model = Model()
train_data = MNIST(name='train',
                   path=data_path)

valid_data = MNIST(name='valid',
                    path=data_path)

# Choose the random initialization method
init_W = InitCell('rand')
init_b = InitCell('zeros')

# Define nodes: objects
x, y = train_data.theano_vars()
mn_x, mn_y = valid_data.theano_vars()
# You must use THEANO_FLAGS="compute_test_value=raise" python -m ipdb
if debug:
    x.tag.test_value = np.zeros((batch_size, 784), dtype=np.float32)
    y.tag.test_value = np.zeros((batch_size, 1), dtype=np.float32)
Exemple #3
0
from cle.cle.utils.compat import OrderedDict
from cle.datasets.mnist import MNIST


datapath = '/home/junyoung/data/mnist/mnist_binarized_salakhutdinov.pkl'
savepath = '/home/junyoung/repos/cle/saved/'

batch_size = 100
inpsz = 784
latsz = 100
n_steps = 64
debug = 0

model = Model()
data = MNIST(name='train',
             unsupervised=1,
             path=datapath)

init_W = InitCell('rand')
init_U = InitCell('ortho')
init_b = InitCell('zeros')
init_b_sig = InitCell('const', mean=0.6)

x, _ = data.theano_vars()
if debug:
    x.tag.test_value = np.zeros((batch_size, 784), dtype=np.float32)

error = ErrorLayer(name='error',
                   parent=['x'],
                   recurrent=['canvas'],
                   batch_size=batch_size)
Exemple #4
0
std_dev = 0.001
inp_p = 1.0
inp_scale = 1 / inp_p
int_p = 0.5
int_scale = 1 / int_p


# Set your dataset
data_path = '/data/lisa/data/mnist/mnist.pkl'
save_path = '/u/chungjun/src/cle/saved/'

batch_size = 128
debug = 0

model = Model()
train_data = MNIST(name='train',
                   path=data_path)

valid_data = MNIST(name='valid',
                    path=data_path)

# Define nodes: objects
x, y = train_data.theano_vars()

# You must use THEANO_FLAGS="compute_test_value=raise" python -m ipdb
if debug:
    x.tag.test_value = np.zeros((batch_size, 784), dtype=np.float32)
    y.tag.test_value = np.zeros((batch_size, 1), dtype=np.float32)

# Choose the random initialization method
init_W = InitCell('rand')
init_b = InitCell('zeros')
Exemple #5
0
from cle.cle.train.opt import Adam
from cle.cle.utils import flatten
from cle.cle.utils.compat import OrderedDict
from cle.datasets.mnist import MNIST

datapath = '/home/junyoung/data/mnist/mnist_binarized_salakhutdinov.pkl'
savepath = '/home/junyoung/repos/cle/saved/'

batch_size = 100
input_dim = 784
latent_dim = 100
n_steps = 64
debug = 0

model = Model()
data = MNIST(name='train', unsupervised=1, path=datapath)

init_W = InitCell('rand')
init_U = InitCell('ortho')
init_b = InitCell('zeros')
init_b_sig = InitCell('const', mean=0.6)

x, _ = data.theano_vars()
if debug:
    x.tag.test_value = np.zeros((batch_size, 784), dtype=np.float32)

error = ErrorLayer(name='error',
                   parent=['x'],
                   recurrent=['canvas'],
                   batch_size=batch_size)
Exemple #6
0
from cle.cle.train.opt import RMSProp
from cle.cle.utils import error, predict, OrderedDict
from cle.datasets.mnist import MNIST


# Set your dataset
#data_path = '/data/lisa/data/mnist/mnist.pkl'
#save_path = '/u/chungjun/repos/cle/saved/'
data_path = '/home/junyoung/data/mnist/mnist.pkl'
save_path = '/home/junyoung/repos/cle/saved/'

batch_size = 128
debug = 0

model = Model()
trdata = MNIST(name='train',
               path=data_path)
valdata = MNIST(name='valid',
                path=data_path)

# Choose the random initialization method
init_W = InitCell('randn')
init_b = InitCell('zeros')

# Define nodes: objects
model.inputs = trdata.theano_vars()
x, y = model.inputs
# You must use THEANO_FLAGS="compute_test_value=raise" python -m ipdb
if debug:
    x.tag.test_value = np.zeros((batch_size, 784), dtype=np.float32)
    y.tag.test_value = np.zeros((batch_size, 1), dtype=np.float32)