Beispiel #1
0
for node in nodes:
    node.initialize()

# Collect parameters
params = flatten([node.get_params().values() for node in nodes])

# Build the Theano computational graph
h1_out = h1.fprop([x])
d1_out = d1.fprop([h1_out])
h2_out = h2.fprop([d1_out])
d2_out = d2.fprop([h2_out])
y_hat = output.fprop([d2_out])

# Compute the cost
cost = NllMulInd(y, y_hat).mean()
err = error(predict(y_hat), y)
cost.name = 'cross_entropy'
err.name = 'error_rate'

d1.set_mode(1)
d2.set_mode(1)
mn_h1_out = h1.fprop([mn_x])
mn_h2_out = h2.fprop([mn_h1_out])
mn_y_hat = output.fprop([mn_h2_out])

mn_cost = NllMulInd(mn_y, mn_y_hat).mean()
mn_err = error(predict(mn_y_hat), mn_y)
mn_cost.name = 'cross_entropy'
mn_err.name = 'error_rate'

monitor_fn = theano.function([mn_x, mn_y], [mn_cost, mn_err])
Beispiel #2
0
# You will fill in a list of nodes
nodes = [h1, output]

# Initalize the nodes
for node in nodes:
    node.initialize()

params = flatten([node.get_params().values() for node in nodes])

# Build the Theano computational graph
h1_out = h1.fprop([x])
y_hat = output.fprop([h1_out])

# Compute the cost
cost = NllMulInd(y, y_hat).mean()
err = error(predict(y_hat), y)
cost.name = 'cross_entropy'
err.name = 'error_rate'

model.inputs = [x, y]
model._params = params
model.nodes = nodes

# Define your optimizer: Momentum (Nesterov), RMSProp, Adam
optimizer = RMSProp(lr=0.001)

extension = [
    GradientClipping(),
    EpochCount(40),
    Monitoring(freq=100,
               ddout=[cost, err],
Beispiel #3
0
# Initalize the nodes
params = OrderedDict()
for node in nodes:
    params.update(node.initialize())
params = init_tparams(params)
nparams = add_noise_params(params, std_dev=std_dev)

# Build the Theano computational graph
d_x = inp_scale * dropout(x, p=inp_p)
h1_out = h1.fprop([d_x], nparams)
d1_out = int_scale * dropout(h1_out, p=int_p)
y_hat = output.fprop([d1_out], nparams)

# Compute the cost
cost = NllMulInd(y, y_hat).mean()
err = error(predict(y_hat), y)
cost.name = 'cross_entropy'
err.name = 'error_rate'

# Seperate computational graph to compute monitoring values without
# considering the noising processes
m_h1_out = h1.fprop([x], params)
m_y_hat = output.fprop([m_h1_out], params)

m_cost = NllMulInd(y, m_y_hat).mean()
m_err = error(predict(m_y_hat), y)
m_cost.name = 'cross_entropy'
m_err.name = 'error_rate'

monitor_fn = theano.function([x, y], [m_cost, m_err])
Beispiel #4
0
                         unit='softmax',
                         init_W=init_W,
                         init_b=init_b)

cost = MulCrossEntropyLayer(name='cost', parent=['y', 'h4'])

# You will fill in a list of nodes and fed them to the model constructor
nodes = [c1, c2, h1, h2, h3, h4, cost]

# Your model will build the Theano computational graph
cnn = Net(inputs=inputs, inputs_dim=inputs_dim, nodes=nodes)
cnn.build_graph()

# You can access any output of a node by doing model.nodes[$node_name].out
cost = cnn.nodes['cost'].out
err = error(predict(cnn.nodes['h4'].out), predict(y))
cost.name = 'cost'
err.name = 'error_rate'
model.graphs = [cnn]

# Define your optimizer: Momentum (Nesterov), RMSProp, Adam
optimizer = Adam(
    #lr=0.00005
    lr=0.0005
)

extension = [
    GradientClipping(batch_size=batch_size),
    EpochCount(100),
    Monitoring(freq=100,
               ddout=[cost, err],
Beispiel #5
0
# Initalize the nodes
params = OrderedDict()
for node in nodes:
    params.update(node.initialize())
params = init_tparams(params)
nparams = add_noise_params(params, std_dev=std_dev)

# Build the Theano computational graph
d_x = inp_scale * dropout(x, p=inp_p)
h1_out = h1.fprop([d_x], nparams)
d1_out = int_scale * dropout(h1_out, p=int_p)
y_hat = output.fprop([d1_out], nparams)

# Compute the cost
cost = NllMulInd(y, y_hat).mean()
err = error(predict(y_hat), y)
cost.name = 'cross_entropy'
err.name = 'error_rate'

# Seperate computational graph to compute monitoring values without
# considering the noising processes
m_h1_out = h1.fprop([x], params)
m_y_hat = output.fprop([m_h1_out], params)

m_cost = NllMulInd(y, m_y_hat).mean()
m_err = error(predict(m_y_hat), y)
m_cost.name = 'cross_entropy'
m_err.name = 'error_rate'

monitor_fn = theano.function([x, y], [m_cost, m_err])
Beispiel #6
0
                         unit='softmax',
                         init_W=init_W,
                         init_b=init_b)

cost = MulCrossEntropyLayer(name='cost', parent=['y', 'h4'])

# You will fill in a list of nodes and fed them to the model constructor
nodes = [c1, c2, h1, h2, h3, h4, cost]

# Your model will build the Theano computational graph
cnn = Net(inputs=inputs, inputs_dim=inputs_dim, nodes=nodes)
cnn.build_graph()

# You can access any output of a node by doing model.nodes[$node_name].out
cost = cnn.nodes['cost'].out
err = error(predict(cnn.nodes['h4'].out), predict(y))
cost.name = 'cost'
err.name = 'error_rate'
model.graphs = [cnn]

# Define your optimizer: Momentum (Nesterov), RMSProp, Adam
optimizer = Adam(
    #lr=0.00005
    lr=0.0005)

extension = [
    GradientClipping(batch_size=batch_size),
    EpochCount(100),
    Monitoring(freq=100,
               ddout=[cost, err],
               data=[Iterator(test_data, batch_size)]),
Beispiel #7
0
for node in nodes:
    node.initialize()

# Collect parameters
params = flatten([node.get_params().values() for node in nodes])

# Build the Theano computational graph
h1_out = h1.fprop([x])
d1_out = d1.fprop([h1_out])
h2_out = h2.fprop([d1_out])
d2_out = d2.fprop([h2_out])
y_hat = output.fprop([d2_out])

# Compute the cost
cost = NllMulInd(y, y_hat).mean()
err = error(predict(y_hat), y)
cost.name = 'cross_entropy'
err.name = 'error_rate'

d1.set_mode(1)
d2.set_mode(1)
mn_h1_out = h1.fprop([mn_x])
mn_h2_out = h2.fprop([mn_h1_out])
mn_y_hat = output.fprop([mn_h2_out])

mn_cost = NllMulInd(mn_y, mn_y_hat).mean()
mn_err = error(predict(mn_y_hat), mn_y)
mn_cost.name = 'cross_entropy'
mn_err.name = 'error_rate'

monitor_fn = theano.function([mn_x, mn_y], [mn_cost, mn_err])
Beispiel #8
0
                         unit='softmax',
                         init_W=init_W,
                         init_b=init_b)

cost = MulCrossEntropyLayer(name='cost', parent=['onehot', 'h2'])

# You will fill in a list of nodes and fed them to the model constructor
nodes = [onehot, h1, h2, cost]

# Your model will build the Theano computational graph
mlp = Net(inputs=inputs, inputs_dim=inputs_dim, nodes=nodes)
mlp.build_graph()

# You can access any output of a node by doing model.nodes[$node_name].out
cost = mlp.nodes['cost'].out
err = error(predict(mlp.nodes['h2'].out), predict(mlp.nodes['onehot'].out))
cost.name = 'cost'
err.name = 'error_rate'
model.graphs = [mlp]

# Define your optimizer: Momentum (Nesterov), RMSProp, Adam
optimizer = RMSProp(
    lr=0.001
)

extension = [
    GradientClipping(),
    EpochCount(40),
    Monitoring(freq=100,
               ddout=[cost, err],
               data=[Iterator(trdata, batch_size),
Beispiel #9
0
# You will fill in a list of nodes
nodes = [h1, output]

# Initalize the nodes
params = OrderedDict()
for node in nodes:
    params.update(node.initialize())
params = init_tparams(params)

# Build the Theano computational graph
h1_out = h1.fprop([x], params)
y_hat = output.fprop([h1_out], params)

# Compute the cost
cost = NllMulInd(y, y_hat).mean()
err = error(predict(y_hat), y)
cost.name = 'cross_entropy'
err.name = 'error_rate'

model.inputs = [x, y]
model.params = params
model.nodes = nodes

# Define your optimizer: Momentum (Nesterov), RMSProp, Adam
optimizer = RMSProp(
    lr=0.01
)

extension = [
    GradientClipping(batch_size=batch_size, check_nan=1),
    EpochCount(500),