def test_affine_wrapper(backend_default): """ Verify that the Affine wrapper constructs the right layer objects. """ nout = 11 aff = Affine(nout, Uniform()) assert isinstance(aff, list) assert len(aff) == 1 assert isinstance(aff[0], Linear) assert aff[0].nout == nout aff = Affine(nout, Uniform(), bias=Uniform()) assert isinstance(aff, list) assert len(aff) == 2 assert isinstance(aff[0], Linear) assert isinstance(aff[1], Bias) aff = Affine(nout, Uniform(), activation=Rectlin()) assert isinstance(aff, list) assert len(aff) == 2 assert isinstance(aff[0], Linear) assert isinstance(aff[1], Activation) aff = Affine(nout, Uniform(), bias=Uniform(), activation=Rectlin()) assert isinstance(aff, list) assert len(aff) == 3 assert isinstance(aff[0], Linear) assert isinstance(aff[1], Bias) assert isinstance(aff[2], Activation)
def test_conv_wrapper(backend_default): """ Verify that the Conv wrapper constructs the right layer objects. """ conv = Conv((4, 4, 3), Uniform()) assert isinstance(conv, list) assert len(conv) == 1 assert isinstance(conv[0], Convolution) conv = Conv((4, 4, 3), Uniform(), bias=Uniform()) assert isinstance(conv, list) assert len(conv) == 1 assert isinstance(conv[0], Convolution_bias) conv = Conv((4, 4, 3), Uniform(), activation=Rectlin()) assert isinstance(conv, list) assert len(conv) == 2 assert isinstance(conv[0], Convolution) assert isinstance(conv[1], Activation) conv = Conv((4, 4, 3), Uniform(), bias=Uniform(), activation=Rectlin()) assert isinstance(conv, list) assert isinstance(conv[0], Convolution_bias) assert isinstance(conv[1], Activation) assert len(conv) == 2
def create_layers(self, max_action_no): layers = [] initializer = self.get_initializer(input_size=4 * 8 * 8) layers.append( Conv(fshape=(8, 8, 32), strides=4, init=initializer, bias=initializer, activation=Rectlin())) initializer = self.get_initializer(input_size=32 * 4 * 4) layers.append( Conv(fshape=(4, 4, 64), strides=2, init=initializer, bias=initializer, activation=Rectlin())) initializer = self.get_initializer(input_size=64 * 3 * 3) layers.append( Conv(fshape=(3, 3, 64), strides=1, init=initializer, bias=initializer, activation=Rectlin())) initializer = self.get_initializer(input_size=7 * 7 * 64) layers.append( Affine(nout=512, init=initializer, bias=initializer, activation=Rectlin())) initializer = self.get_initializer(input_size=512) layers.append( Affine(nout=max_action_no, init=initializer, bias=initializer)) return layers
def test_conv_wrapper(backend_default): """ Verify that the Conv wrapper constructs the right layer objects. """ conv = Conv((4, 4, 3), Uniform()) assert isinstance(conv, list) assert len(conv) == 1 assert isinstance(conv[0], Convolution) conv = Conv((4, 4, 3), Uniform(), bias=Uniform()) assert isinstance(conv, list) # temp roll back conv_bias if False and conv[0].be.is_mkl(): assert len(conv) == 1 assert isinstance(conv[0], Convolution_bias) else: assert len(conv) == 2 assert isinstance(conv[0], Convolution) assert isinstance(conv[1], Bias) conv = Conv((4, 4, 3), Uniform(), activation=Rectlin()) assert isinstance(conv, list) assert len(conv) == 2 assert isinstance(conv[0], Convolution) assert isinstance(conv[1], Activation) conv = Conv((4, 4, 3), Uniform(), bias=Uniform(), activation=Rectlin()) assert isinstance(conv, list) # temp roll back conv_bias if False and conv[0].be.is_mkl(): assert isinstance(conv[0], Convolution_bias) assert isinstance(conv[1], Activation) assert len(conv) == 2 else: assert isinstance(conv[0], Convolution) assert isinstance(conv[1], Bias) assert isinstance(conv[2], Activation) assert len(conv) == 3
from neon.layers import Conv, Affine, Pooling from neon.initializers import Uniform from neon.transforms.activation import Rectlin, Softmax from neon.models import Model from neon.initializers import Kaiming from neon.optimizers import Adadelta from neon.layers import GeneralizedCost from neon.transforms import CrossEntropyMulti from neon.optimizers import GradientDescentMomentum, RMSProp from neon.callbacks.callbacks import Callbacks from neon.data import ArrayIterator from Readfile import DataSet, readfile be = gen_backend(backend='cpu', batch_size=30) init_uni = Uniform(low=-0.1, high=0.1) layers = [ Conv(fshape=(4, 4, 16), init=init_uni, activation=Rectlin()), Pooling(fshape=2, strides=2), Conv(fshape=(4, 4, 32), init=init_uni, activation=Rectlin()), Pooling(fshape=2, strides=2), Conv(fshape=(4, 4, 32), init=init_uni, activation=Rectlin()), Pooling(fshape=2, strides=2), Affine(nout=500, init=init_uni, activation=Rectlin()), Affine(nout=11, init=init_uni, activation=Softmax()) ] model = Model(layers) model.load_params('model.pkl') data = readfile('PreImage', 'label.csv') X_test = data.test_data test_set = ArrayIterator(X_test, None, nclass=11, lshape=(1, 200, 200))
test_set = ArrayIterator(Xtest, y_test, nclass=2, lshape=(1, 12, 12)) from neon.layers import Conv, Affine, Pooling, Dropout from neon.initializers import Uniform, Constant, Gaussian from neon.transforms.activation import Rectlin, Softmax init_uni = Uniform(low=-0.1, high=0.1) # try Gaussian(loc=0, scale=0.1) init_cst = Constant(0.1) #to avoid dead neurons layers = [] layers.append( Conv(fshape=(2, 2, 32), init=init_uni, bias=init_cst, padding=0, activation=Rectlin())) layers.append(Pooling(fshape=2, strides=2)) layers.append(Affine(nout=2, init=init_uni, activation=Softmax())) from neon.models import Model model = Model(layers) from neon.layers import GeneralizedCost from neon.transforms import CrossEntropyBinary cost = GeneralizedCost(costfunc=CrossEntropyBinary()) from neon.optimizers import GradientDescentMomentum optimizer = GradientDescentMomentum(0.1, momentum_coef=0.9) from neon.callbacks.callbacks import Callbacks callbacks = Callbacks(model, train_set)