예제 #1
0
            unit='tanh',
            init_W=init_W,
            init_U=init_U,
            init_b=init_b)

h4 = FullyConnectedLayer(name='h4',
                         parent=['h1', 'h2', 'h3'],
                         nout=res,
                         unit='sigmoid',
                         init_W=init_W,
                         init_b=init_b)

cost = MSELayer(name='cost', parent=['h4', 'y'])

nodes = [h1, h2, h3, h4, cost]
rnn = Net(inputs=inputs, inputs_dim=inputs_dim, nodes=nodes)
cost = unpack(rnn.build_recurrent_graph(output_args=[cost]))
cost = cost.mean()
cost.name = 'cost'
model.graphs = [rnn]

optimizer = Adam(
    lr=0.001
)

extension = [
    GradientClipping(batch_size=batch_size),
    EpochCount(100),
    Monitoring(freq=100,
               ddout=[cost]),
    Picklize(freq=200, path=save_path)
예제 #2
0
파일: cifar10.py 프로젝트: anirudh9119/cle
# Global average pooling missing
h4 = FullyConnectedLayer(name='h4',
                         parent=['c2'],
                         nout=10,
                         unit='softmax',
                         init_W=init_W,
                         init_b=init_b)

cost = MulCrossEntropyLayer(name='cost', parent=['y', 'h4'])

# You will fill in a list of nodes and fed them to the model constructor
nodes = [c1, c2, h1, h2, h3, h4, cost]

# Your model will build the Theano computational graph
cnn = Net(inputs=inputs, inputs_dim=inputs_dim, nodes=nodes)
cnn.build_graph()

# You can access any output of a node by doing model.nodes[$node_name].out
cost = cnn.nodes['cost'].out
err = error(predict(cnn.nodes['h4'].out), predict(y))
cost.name = 'cost'
err.name = 'error_rate'
model.graphs = [cnn]

# Define your optimizer: Momentum (Nesterov), RMSProp, Adam
optimizer = Adam(
    #lr=0.00005
    lr=0.0005
)
예제 #3
0
            unit='tanh',
            init_W=init_W,
            init_U=init_U,
            init_b=init_b)

h4 = FullyConnectedLayer(name='h4',
                         parent=['h1', 'h2', 'h3'],
                         nout=res,
                         unit='sigmoid',
                         init_W=init_W,
                         init_b=init_b)

cost = MSELayer(name='cost', parent=['h4', 'y'])

nodes = [h1, h2, h3, h4, cost]
rnn = Net(inputs=inputs, inputs_dim=inputs_dim, nodes=nodes)
cost = unpack(rnn.build_recurrent_graph(output_args=[cost]))
cost = cost.mean()
cost.name = 'cost'
model.graphs = [rnn]

optimizer = Adam(lr=0.001)

extension = [
    GradientClipping(batch_size=batch_size),
    EpochCount(100),
    Monitoring(freq=100, ddout=[cost]),
    Picklize(freq=200, path=save_path)
]

mainloop = Training(name='toy_bb_gflstm',
예제 #4
0
# Global average pooling missing
h4 = FullyConnectedLayer(name='h4',
                         parent=['c2'],
                         nout=10,
                         unit='softmax',
                         init_W=init_W,
                         init_b=init_b)

cost = MulCrossEntropyLayer(name='cost', parent=['y', 'h4'])

# You will fill in a list of nodes and fed them to the model constructor
nodes = [c1, c2, h1, h2, h3, h4, cost]

# Your model will build the Theano computational graph
cnn = Net(inputs=inputs, inputs_dim=inputs_dim, nodes=nodes)
cnn.build_graph()

# You can access any output of a node by doing model.nodes[$node_name].out
cost = cnn.nodes['cost'].out
err = error(predict(cnn.nodes['h4'].out), predict(y))
cost.name = 'cost'
err.name = 'error_rate'
model.graphs = [cnn]

# Define your optimizer: Momentum (Nesterov), RMSProp, Adam
optimizer = Adam(
    #lr=0.00005
    lr=0.0005)

extension = [
예제 #5
0
파일: mnist.py 프로젝트: anirudh9119/cle
                         init_b=init_b)

h2 = FullyConnectedLayer(name='h2',
                         parent=['h1'],
                         nout=10,
                         unit='softmax',
                         init_W=init_W,
                         init_b=init_b)

cost = MulCrossEntropyLayer(name='cost', parent=['onehot', 'h2'])

# You will fill in a list of nodes and fed them to the model constructor
nodes = [onehot, h1, h2, cost]

# Your model will build the Theano computational graph
mlp = Net(inputs=inputs, inputs_dim=inputs_dim, nodes=nodes)
mlp.build_graph()

# You can access any output of a node by doing model.nodes[$node_name].out
cost = mlp.nodes['cost'].out
err = error(predict(mlp.nodes['h2'].out), predict(mlp.nodes['onehot'].out))
cost.name = 'cost'
err.name = 'error_rate'
model.graphs = [mlp]

# Define your optimizer: Momentum (Nesterov), RMSProp, Adam
optimizer = RMSProp(
    lr=0.001
)

extension = [
예제 #6
0
파일: music.py 프로젝트: anirudh9119/cle
          batch_size=batch_size,
          nout=50,
          unit='tanh',
          init_W=init_W,
          init_U=init_U,
          init_b=init_b)

h4 = FullyConnectedLayer(name='h4',
                         parent=['h1', 'h2', 'h3'],
                         nout=nlabel,
                         unit='sigmoid',
                         init_W=init_W,
                         init_b=init_b)

nodes = [h1, h2, h3, h4]
rnn = Net(inputs=inputs, inputs_dim=inputs_dim, nodes=nodes)
y_hat = rnn.build_recurrent_graph(output_args=[h4])[0]
masked_y = y[mask.nonzero()]
masked_y_hat = y_hat[mask.nonzero()]
cost = NllBin(masked_y, masked_y_hat).sum()
nll = NllBin(masked_y, masked_y_hat).mean()
cost.name = 'cost'
nll.name = 'nll'
model.graphs = [rnn]

optimizer = RMSProp(
    lr=0.0001,
    mom=0.95
)

extension = [