def BuildNetwork(self): ActVar = T.reshape(self.ActVar,(-1,1,10,12)) ActInputLayer = lasagne.layers.InputLayer(shape=(None,1,10,12),input_var=ActVar,name='0') StateInputLayer = lasagne.layers.InputLayer(shape=(None,10,10,12),input_var=self.StateVar,name='0') network = lasagne.layers.ConcatLayer([ActInputLayer,StateInputLayer],axis=1) network = lasagne.layers.DenseLayer(network,num_units=2000,W=lasagne.init.GlorotUniform(gain='relu'),nonlinearity=lasagne.nonlinearities.leaky_rectify,name = '1') network = BN.batch_norm(network,name = '1') #network = lasagne.layers.DropoutLayer(network) network = lasagne.layers.DenseLayer(network,num_units=1000,W=lasagne.init.GlorotUniform(gain='relu'),nonlinearity=lasagne.nonlinearities.leaky_rectify,name = '2') network = BN.batch_norm(network,name = '2') #network = lasagne.layers.DropoutLayer(network) network = lasagne.layers.DenseLayer(network,num_units=500,W=lasagne.init.GlorotUniform(gain='relu'),nonlinearity=lasagne.nonlinearities.leaky_rectify,name = '3') network = BN.batch_norm(network,name = '3') #network = lasagne.layers.DropoutLayer(network) network = lasagne.layers.DenseLayer(network,num_units=40,W=lasagne.init.GlorotUniform(gain='relu'),nonlinearity=lasagne.nonlinearities.leaky_rectify,name = '4') network = BN.batch_norm(network,name = '4') #network = lasagne.layers.DropoutLayer(network) network = lasagne.layers.DenseLayer(network,num_units=5,W=lasagne.init.GlorotUniform(gain='relu'),nonlinearity=lasagne.nonlinearities.leaky_rectify,name = '5') network = BN.batch_norm(network,name = '5') #network = lasagne.layers.DropoutLayer(network) network = lasagne.layers.DenseLayer(network,num_units=1,W=lasagne.init.GlorotUniform(gain='relu'),nonlinearity=lasagne.nonlinearities.leaky_rectify,name = '6') network = BN.batch_norm(network,name = '6') return network
def BuildNetwork(self): network = lasagne.layers.InputLayer(shape=(None,10,10,12),input_var=self.InputVar,name='0') network = lasagne.layers.DenseLayer(network,num_units=1500,W=lasagne.init.GlorotUniform(gain='relu'),nonlinearity=lasagne.nonlinearities.leaky_rectify,name = '1') network = BN.batch_norm(network,name = '1') #network = lasagne.layers.DropoutLayer(network,0.8) network = lasagne.layers.DenseLayer(network,num_units=1000,W=lasagne.init.GlorotUniform(gain='relu'),nonlinearity=lasagne.nonlinearities.leaky_rectify,name = '2') network = BN.batch_norm(network,name = '2') #network = lasagne.layers.DropoutLayer(network) network = lasagne.layers.DenseLayer(network,num_units=500,W=lasagne.init.GlorotUniform(gain='relu'),nonlinearity=lasagne.nonlinearities.leaky_rectify,name = '3') network = BN.batch_norm(network,name = '3') #network = lasagne.layers.DropoutLayer(network) network = lasagne.layers.DenseLayer(network,num_units=300,W=lasagne.init.GlorotUniform(gain='relu'),nonlinearity=lasagne.nonlinearities.leaky_rectify,name = '4') network = BN.batch_norm(network,name = '4') #network = lasagne.layers.DropoutLayer(network) network = lasagne.layers.DenseLayer(network,num_units=120,W=lasagne.init.GlorotUniform(gain=1.0),nonlinearity=lasagne.nonlinearities.sigmoid,name = '5') network = BN.batch_norm(network,name = '5') network = lasagne.layers.ReshapeLayer(network,shape=([0],10,12),name='5') return network