コード例 #1
0
 def test_draw_to_notebook_layers(self, net_fitted):
     from nolearn.lasagne.visualize import draw_to_notebook
     draw_to_notebook(net_fitted.get_all_layers(), output_shape=False)
コード例 #2
0
def draw():
    global net
    net.initialize()
    draw_to_file(net, "layout.png", verbose=True)
    draw_to_notebook(net)
コード例 #3
0
 def test_draw_to_notebook_net(self, net_fitted):
     from nolearn.lasagne.visualize import draw_to_notebook
     draw_to_notebook(net_fitted, output_shape=False)
コード例 #4
0
ファイル: mnist_nolearn.py プロジェクト: wwt1990/MNIST
    max_epochs = 10,
    update = adam, # For 'adam', a small learning rate is best
    update_learning_rate = 0.0002,
    objective_l2 = 0.0025, # L2 regularization
    train_split = TrainSplit(eval_size = 0.25),
    verbose = 1
)
net0.fit(X_train, y_train)


# visualization
from nolearn.lasagne.visualize import draw_to_notebook, plot_loss
from nolearn.lasagne.visualize import plot_conv_weights, plot_conv_activity
from nolearn.lasagne.visualize import plot_occlusion, plot_saliency

draw_to_notebook(net0)
plot_loss(net0)
#plot helps determine if we are overfitting:
#If the train loss is much lower than the validation loss,
#we should probably do something to regularize the net.

# visualize layer weights
plot_conv_weights(net0.layers_[1], figsize = (4,4))
#If the weights just look like noise, we might have to do something
#(e.g. use more filters so that each can specialize better).

# visualize layers' activities
x = X_train[0:1] # an image in the bc01 format (so use X[0:1] instead of just X[0]).
plot_conv_activity(net0.layers_[1], x)

plot_occlusion(net0, X_train[:5], y_train[:5])
コード例 #5
0
    (layers.DenseLayer, {'num_units': 1, 'nonlinearity': lasagne.nonlinearities.softmax}),
]


net1 = NeuralNet(
    layers=layers0,
    # layer parameters:
    max_epochs=10,
    update=lasagne.updates.sgd,
    update_learning_rate=0.0002,
    objective_l2=0.0025,
   # train_split=TrainSplit(eval_size=0.25),
    verbose=1,
    )

draw_to_notebook(net1)
prediction = lasagne.layers.get_output(network6)
prediction=T.clip(prediction,1e-2, 1.0 - 1e-2)
loss = lasagne.objectives.binary_crossentropy(prediction, var_t).mean()
all_params = lasagne.layers.get_all_params(network6)
updates=lasagne.updates.adagrad(loss,all_params)
    
train = theano.function([var_in, var_t], loss, updates=updates)

theano.config.optimizer_verbose = 1
theano.config.compute_test_value = 'warn'
theano.config.optimizer='fast_compile'

varin= timgi[None,None,:,:,0]

# varin = np.transpose(varin, (0,3,1,2))
コード例 #6
0
ファイル: test_visualize.py プロジェクト: Lomascolo/nolearn
 def test_draw_to_notebook_layers(self, net_fitted):
     from nolearn.lasagne.visualize import draw_to_notebook
     draw_to_notebook(net_fitted.get_all_layers(), output_shape=False)
コード例 #7
0
ファイル: test_visualize.py プロジェクト: Lomascolo/nolearn
 def test_draw_to_notebook_net(self, net_fitted):
     from nolearn.lasagne.visualize import draw_to_notebook
     draw_to_notebook(net_fitted, output_shape=False)
コード例 #8
0
# from nolearn.lasagne import PrintLayerInfo

# layer_info=PrintLayerInfo()
#################################################################
# type(Y_train)
print max(Y_train)
# Y_train=np.asarray(Y_train)
# X_train=np.asarray(X_train)
#Y_train=Y_train.astype(np.int32)
###################################################################

print X_train.shape
print Y_train.shape

###################################################################
net0.fit(X_train, Y_train)
###################################################################

import pydot

visualize.draw_to_notebook(net)

###################################################################
import cv2
for Xb, yb in bi(X_train, y_train):
    a = 1
#     print type(Xb)
# all the input array dimensions except for the concatenation axis must match exactly

print Xb