Example #1
0
h4 = FullyConnectedLayer(name='h4',
                         parent=['h1', 'h2', 'h3'],
                         nout=res,
                         unit='sigmoid',
                         init_W=init_W,
                         init_b=init_b)

cost = MSELayer(name='cost', parent=['h4', 'y'])

nodes = [h1, h2, h3, h4, cost]
rnn = Net(inputs=inputs, inputs_dim=inputs_dim, nodes=nodes)
cost = unpack(rnn.build_recurrent_graph(output_args=[cost]))
cost = cost.mean()
cost.name = 'cost'
model.graphs = [rnn]

optimizer = Adam(
    lr=0.001
)

extension = [
    GradientClipping(batch_size=batch_size),
    EpochCount(100),
    Monitoring(freq=100,
               ddout=[cost]),
    Picklize(freq=200, path=save_path)
]

mainloop = Training(
    name='toy_bb_gflstm',
Example #2
0
cost = MulCrossEntropyLayer(name='cost', parent=['y', 'h4'])

# You will fill in a list of nodes and fed them to the model constructor
nodes = [c1, c2, h1, h2, h3, h4, cost]

# Your model will build the Theano computational graph
cnn = Net(inputs=inputs, inputs_dim=inputs_dim, nodes=nodes)
cnn.build_graph()

# You can access any output of a node by doing model.nodes[$node_name].out
cost = cnn.nodes['cost'].out
err = error(predict(cnn.nodes['h4'].out), predict(y))
cost.name = 'cost'
err.name = 'error_rate'
model.graphs = [cnn]

# Define your optimizer: Momentum (Nesterov), RMSProp, Adam
optimizer = Adam(
    #lr=0.00005
    lr=0.0005
)

extension = [
    GradientClipping(batch_size=batch_size),
    EpochCount(100),
    Monitoring(freq=100,
               ddout=[cost, err],
               data=[Iterator(testdata, batch_size)]),
    Picklize(freq=10000,
             path=save_path)
Example #3
0
cost = MulCrossEntropyLayer(name='cost', parent=['y', 'h4'])

# You will fill in a list of nodes and fed them to the model constructor
nodes = [c1, c2, h1, h2, h3, h4, cost]

# Your model will build the Theano computational graph
cnn = Net(inputs=inputs, inputs_dim=inputs_dim, nodes=nodes)
cnn.build_graph()

# You can access any output of a node by doing model.nodes[$node_name].out
cost = cnn.nodes['cost'].out
err = error(predict(cnn.nodes['h4'].out), predict(y))
cost.name = 'cost'
err.name = 'error_rate'
model.graphs = [cnn]

# Define your optimizer: Momentum (Nesterov), RMSProp, Adam
optimizer = Adam(
    #lr=0.00005
    lr=0.0005)

extension = [
    GradientClipping(batch_size=batch_size),
    EpochCount(100),
    Monitoring(freq=100,
               ddout=[cost, err],
               data=[Iterator(test_data, batch_size)]),
    Picklize(freq=10000, path=save_path)
]
Example #4
0
h4 = FullyConnectedLayer(name='h4',
                         parent=['h1', 'h2', 'h3'],
                         nout=res,
                         unit='sigmoid',
                         init_W=init_W,
                         init_b=init_b)

cost = MSELayer(name='cost', parent=['h4', 'y'])

nodes = [h1, h2, h3, h4, cost]
rnn = Net(inputs=inputs, inputs_dim=inputs_dim, nodes=nodes)
cost = unpack(rnn.build_recurrent_graph(output_args=[cost]))
cost = cost.mean()
cost.name = 'cost'
model.graphs = [rnn]

optimizer = Adam(lr=0.001)

extension = [
    GradientClipping(batch_size=batch_size),
    EpochCount(100),
    Monitoring(freq=100, ddout=[cost]),
    Picklize(freq=200, path=save_path)
]

mainloop = Training(name='toy_bb_gflstm',
                    data=Iterator(trdata, batch_size),
                    model=model,
                    optimizer=optimizer,
                    cost=cost,
Example #5
0
cost = MulCrossEntropyLayer(name='cost', parent=['onehot', 'h2'])

# You will fill in a list of nodes and fed them to the model constructor
nodes = [onehot, h1, h2, cost]

# Your model will build the Theano computational graph
mlp = Net(inputs=inputs, inputs_dim=inputs_dim, nodes=nodes)
mlp.build_graph()

# You can access any output of a node by doing model.nodes[$node_name].out
cost = mlp.nodes['cost'].out
err = error(predict(mlp.nodes['h2'].out), predict(mlp.nodes['onehot'].out))
cost.name = 'cost'
err.name = 'error_rate'
model.graphs = [mlp]

# Define your optimizer: Momentum (Nesterov), RMSProp, Adam
optimizer = RMSProp(
    lr=0.001
)

extension = [
    GradientClipping(),
    EpochCount(40),
    Monitoring(freq=100,
               ddout=[cost, err],
               data=[Iterator(trdata, batch_size),
                     Iterator(valdata, batch_size)]),
    Picklize(freq=200,
             path=save_path)