Exemple #1
0
 def train_prop(self, z):
     z = unpack(z)
     z = dropout(z, self.p, self.theano_rng)
     z.name = self.name
     return z * self.train_scale
Exemple #2
0
                             init_W=init_W,
                             init_b=init_b)


# You will fill in a list of nodes
nodes = [h1, output]

# Initalize the nodes
params = OrderedDict()
for node in nodes:
    params.update(node.initialize())
params = init_tparams(params)
nparams = add_noise_params(params, std_dev=std_dev)

# Build the Theano computational graph
d_x = inp_scale * dropout(x, p=inp_p)
h1_out = h1.fprop([d_x], nparams)
d1_out = int_scale * dropout(h1_out, p=int_p)
y_hat = output.fprop([d1_out], nparams)

# Compute the cost
cost = NllMulInd(y, y_hat).mean()
err = error(predict(y_hat), y)
cost.name = 'cross_entropy'
err.name = 'error_rate'

# Seperate computational graph to compute monitoring values without
# considering the noising processes
m_h1_out = h1.fprop([x], params)
m_y_hat = output.fprop([m_h1_out], params)
Exemple #3
0
                             unit='softmax',
                             init_W=init_W,
                             init_b=init_b)

# You will fill in a list of nodes
nodes = [h1, output]

# Initalize the nodes
params = OrderedDict()
for node in nodes:
    params.update(node.initialize())
params = init_tparams(params)
nparams = add_noise_params(params, std_dev=std_dev)

# Build the Theano computational graph
d_x = inp_scale * dropout(x, p=inp_p)
h1_out = h1.fprop([d_x], nparams)
d1_out = int_scale * dropout(h1_out, p=int_p)
y_hat = output.fprop([d1_out], nparams)

# Compute the cost
cost = NllMulInd(y, y_hat).mean()
err = error(predict(y_hat), y)
cost.name = 'cross_entropy'
err.name = 'error_rate'

# Seperate computational graph to compute monitoring values without
# considering the noising processes
m_h1_out = h1.fprop([x], params)
m_y_hat = output.fprop([m_h1_out], params)
Exemple #4
0
 def train_prop(self, z):
     z = unpack(z)
     z = dropout(z, self.p, self.theano_rng)
     z.name = self.name
     return z * self.train_scale