Exemplo n.º 1
0
    def test_repeated_reverse_inline_connection(self):
        input_layer_1 = layers.Input(1)
        input_layer_2 = layers.Input(1)
        hidden_layer = layers.Relu(4)
        output_layer = layers.Softmax(5)

        connection_1 = output_layer < hidden_layer < input_layer_1
        connection_2 = output_layer < hidden_layer < input_layer_2

        self.assertListEqual(list(connection_1),
                             [input_layer_1, hidden_layer, output_layer])

        self.assertListEqual(list(connection_2),
                             [input_layer_2, hidden_layer, output_layer])
Exemplo n.º 2
0
    def test_mixture_of_experts(self):
        dataset = datasets.load_diabetes()
        data, target = asfloat(dataset.data), asfloat(dataset.target)
        insize, outsize = data.shape[1], 1

        input_scaler = preprocessing.MinMaxScaler((-1, 1))
        output_scaler = preprocessing.MinMaxScaler()
        x_train, x_test, y_train, y_test = model_selection.train_test_split(
            input_scaler.fit_transform(data),
            output_scaler.fit_transform(target.reshape(-1, 1)),
            train_size=0.8)

        n_epochs = 10
        scaled_y_test = output_scaler.inverse_transform(y_test)
        scaled_y_test = scaled_y_test.reshape((y_test.size, 1))

        # -------------- Train single GradientDescent -------------- #

        bpnet = algorithms.GradientDescent((insize, 20, outsize),
                                           step=0.1,
                                           verbose=False)
        bpnet.train(x_train, y_train, epochs=n_epochs)
        network_output = bpnet.predict(x_test)
        network_error = rmsle(output_scaler.inverse_transform(network_output),
                              scaled_y_test)

        # -------------- Train ensemlbe -------------- #

        moe = algorithms.MixtureOfExperts(
            networks=[
                algorithms.Momentum((insize, 20, outsize),
                                    step=0.1,
                                    batch_size=1,
                                    verbose=False),
                algorithms.Momentum((insize, 20, outsize),
                                    step=0.1,
                                    batch_size=1,
                                    verbose=False),
            ],
            gating_network=algorithms.Momentum(
                layers.Input(insize) > layers.Softmax(2),
                step=0.1,
                verbose=False))
        moe.train(x_train, y_train, epochs=n_epochs)
        ensemble_output = moe.predict(x_test)

        ensemlbe_error = rmsle(
            output_scaler.inverse_transform(ensemble_output), scaled_y_test)

        self.assertGreater(network_error, ensemlbe_error)
Exemplo n.º 3
0
    def test_mixture_of_experts_repr(self):
        moe = algorithms.MixtureOfExperts(
            networks=[
                algorithms.Momentum((3, 2, 1)),
                algorithms.GradientDescent((3, 2, 1)),
            ],
            gating_network=algorithms.Adadelta(
                layers.Input(3) > layers.Softmax(2), ))
        moe_repr = str(moe)

        self.assertIn('MixtureOfExperts', moe_repr)
        self.assertIn('Momentum', moe_repr)
        self.assertIn('GradientDescent', moe_repr)
        self.assertIn('Adadelta', moe_repr)
Exemplo n.º 4
0
    def test_mixture_of_experts_training_exceptions(self):
        moe = algorithms.MixtureOfExperts(
            # Invalid gating network output layer
            networks=self.networks,
            gating_network=algorithms.GradientDescent(
                layers.Input(1) > layers.Softmax(2), verbose=False),
        )
        with self.assertRaises(ValueError):
            # Wrong number of train input features
            moe.train(np.array([[1, 2]]), np.array([[0]]))

        with self.assertRaises(ValueError):
            # Wrong number of train output features
            moe.train(np.array([[1]]), np.array([[0, 0]]))
Exemplo n.º 5
0
    def test_repeated_inline_connections(self):
        input_layer_1 = layers.Input(1)
        input_layer_2 = layers.Input(1)
        hidden_layer = layers.Relu(17)
        output_layer = layers.Softmax(5)

        connection_1 = input_layer_1 > hidden_layer > output_layer
        connection_2 = input_layer_2 > hidden_layer > output_layer

        self.assertListEqual(list(connection_1),
                             [input_layer_1, hidden_layer, output_layer])

        self.assertListEqual(list(connection_2),
                             [input_layer_2, hidden_layer, output_layer])
Exemplo n.º 6
0
    def test_mixture_of_experts_init_gating_network_exceptions(self):
        networks = self.networks

        with self.assertRaises(ValueError):
            # Invalid gating error function
            algorithms.MixtureOfExperts(
                networks=networks,
                gating_network=algorithms.GradientDescent(
                    layers.Input(1) > layers.Softmax(2),
                    error='rmsle',
                    verbose=False),
            )

        with self.assertRaises(ValueError):
            # Invalid gating network algorithm
            algorithms.MixtureOfExperts(
                networks=networks,
                gating_network=algorithms.PNN(),
            )

        with self.assertRaises(ValueError):
            # Invalid gating network output layer
            algorithms.MixtureOfExperts(
                networks=networks,
                gating_network=algorithms.GradientDescent(
                    layers.Input(1) > layers.Sigmoid(2),
                    verbose=False,
                ))

        with self.assertRaises(ValueError):
            # Invalid gating network output layer size
            algorithms.MixtureOfExperts(
                networks=networks,
                gating_network=algorithms.GradientDescent(
                    layers.Input(1) > layers.Softmax(1),
                    verbose=False,
                ))
Exemplo n.º 7
0
    def test_repeated_inline_connections_with_list(self):
        input_layer_1 = layers.Input(1)
        input_layer_2 = layers.Input(1)
        hd1 = layers.Relu(4)
        hd2 = layers.Sigmoid(4)
        output_layer = layers.Softmax(5)

        connection_1 = input_layer_1 > [hd1, hd2] > output_layer
        connection_2 = input_layer_2 > [hd1, hd2] > output_layer

        self.assertListEqual(list(connection_1),
                             [input_layer_1, hd1, hd2, output_layer])

        self.assertListEqual(list(connection_2),
                             [input_layer_2, hd1, hd2, output_layer])
Exemplo n.º 8
0
	def init_network(self, member):
		network = layers.join(layers.Input(self.inputneurons))
		for index in range(0, len(member[1][0])):
			if member[1][1][index] == 1:
				network = network > layers.Sigmoid(member[1][0][index])
			elif member[1][1][index] == 2:
				network = network > layers.Relu(member[1][0][index])
			elif member[1][1][index] == 3:
				network = network > layers.Softmax(member[1][0][index])
			elif member[1][1][index] == 4:
				network = network > layers.Tanh(member[1][0][index])
			elif member[1][1][index] == 5:
				network = network > layers.LeakyRelu(member[1][0][index])
		network = network > layers.Sigmoid(self.outputneurons)
		return(network)
Exemplo n.º 9
0
    def test_gated_average_layer_multi_dimensional_inputs(self):
        input_layer = layers.Input((5, 5, 1))
        network = layers.join([
            input_layer > layers.Reshape() > layers.Softmax(2),
            input_layer > layers.Convolution((2, 2, 3)),
            input_layer > layers.Convolution((2, 2, 3)),
        ], layers.GatedAverage())

        self.assertEqual(network.input_shape, (5, 5, 1))
        self.assertEqual(network.output_shape, (4, 4, 3))

        random_input = asfloat(np.random.random((8, 5, 5, 1)))
        actual_output = self.eval(network.output(random_input))

        self.assertEqual(actual_output.shape, (8, 4, 4, 3))
Exemplo n.º 10
0
    def test_gated_average_layer_multi_dimensional_inputs(self):
        input_layer = layers.Input((1, 5, 5))
        network = layers.join([
            input_layer > layers.Reshape() > layers.Softmax(2),
            input_layer > layers.Convolution((3, 2, 2)),
            input_layer > layers.Convolution((3, 2, 2)),
        ], layers.GatedAverage())

        self.assertEqual(network.input_shape, (1, 5, 5))
        self.assertEqual(network.output_shape, (3, 4, 4))

        predict = network.compile()
        random_input = asfloat(np.random.random((8, 1, 5, 5)))
        actual_output = predict(random_input)

        self.assertEqual(actual_output.shape, (8, 3, 4, 4))
Exemplo n.º 11
0
def create_VIN(input_image_shape=(2, 8, 8), n_hidden_filters=150,
               n_state_filters=10, k=10):

    HalfPaddingConv = partial(layers.Convolution, padding='half', bias=None)

    R = layers.join(
        layers.Input(input_image_shape, name='grid-input'),
        layers.Convolution((n_hidden_filters, 3, 3),
                           padding='half',
                           weight=init.Normal(),
                           bias=init.Normal()),
        HalfPaddingConv((1, 1, 1), weight=init.Normal()),
    )

    # Create shared weights
    q_weight = random_weight((n_state_filters, 1, 3, 3))
    fb_weight = random_weight((n_state_filters, 1, 3, 3))

    Q = R > HalfPaddingConv((n_state_filters, 3, 3), weight=q_weight)

    for i in range(k):
        V = Q > GlobalMaxPooling()
        Q = layers.join(
            # Convolve R and V separately and then add
            # outputs together with the Elementwise layer
            [[
                R,
                HalfPaddingConv((n_state_filters, 3, 3), weight=q_weight)
            ], [
                V,
                HalfPaddingConv((n_state_filters, 3, 3), weight=fb_weight)
            ]],
            layers.Elementwise(merge_function=T.add),
        )

    input_state_1 = layers.Input(10, name='state-1-input')
    input_state_2 = layers.Input(10, name='state-2-input')

    # Select the conv-net channels at the state position (S1, S2)
    VIN = [Q, input_state_1, input_state_2] > SelectValueAtStatePosition()

    # Set up softmax layer that predicts actions base on (S1, S2)
    # position. Each action encodes specific direction:
    # N, S, E, W, NE, NW, SE, SW (in the same order)
    VIN = VIN > layers.Softmax(8, bias=None, weight=init.Normal())

    return VIN
Exemplo n.º 12
0
    def test_inline_connections_after_exception(self):
        # One possibility to solve it is to reset all states in
        # connections/inline.py and when we assing new shape
        # in connections/graph.py:connect_layers catch error if happens
        # and destroy connection between layers
        input_layer = layers.Input(2)

        with self.assertRaises(LayerConnectionError):
            # it suppose to fail because layers in parallel connections
            # specified with different output shapes.
            input_layer > [layers.Sigmoid(20),
                           layers.Sigmoid(10)] > layers.Elementwise()

        # Issue #181. Bug presented in NeuPy versions <= 0.7.2
        network = input_layer > layers.Softmax(5)
        self.assertEqual(network.input_shape, (2, ))
        self.assertEqual(network.output_shape, (5, ))
Exemplo n.º 13
0
    def test_connection_inside_connection_conv(self):
        connection = [
            layers.Input((1, 28, 28)),
            layers.Convolution((8, 3, 3)) > layers.Relu(),
            layers.Convolution((8, 3, 3)) > layers.Relu(),
            layers.MaxPooling((2, 2)),
            layers.Reshape(),
            layers.Softmax(1),
        ]

        network = algorithms.GradientDescent(connection)
        self.assertEqual(8, len(network.layers))

        self.assertIsInstance(network.layers[1], layers.Convolution)
        self.assertIsInstance(network.layers[2], layers.Relu)
        self.assertIsInstance(network.layers[3], layers.Convolution)
        self.assertIsInstance(network.layers[4], layers.Relu)
        self.assertIsInstance(network.layers[5], layers.MaxPooling)
Exemplo n.º 14
0
    def test_connection_inside_connection_conv(self):
        connection = layers.join(
            layers.Input((28, 28, 1)),
            layers.Convolution((3, 3, 8)) > layers.Relu(),
            layers.Convolution((3, 3, 8)) > layers.Relu(),
            layers.MaxPooling((2, 2)),
            layers.Reshape(),
            layers.Softmax(1),
        )

        self.assertEqual(8, len(connection))

        expected_order = [
            layers.Input, layers.Convolution, layers.Relu, layers.Convolution,
            layers.Relu, layers.MaxPooling, layers.Reshape, layers.Softmax
        ]
        for actual_layer, expected_layer in zip(connection, expected_order):
            self.assertIsInstance(actual_layer, expected_layer)
Exemplo n.º 15
0
    def test_gated_average_layer_multi_dimensional_inputs(self):
        network = layers.join(
            layers.Input((5, 5, 1)),
            layers.parallel(
                layers.Reshape() >> layers.Softmax(2),
                layers.Convolution((2, 2, 3)),
                layers.Convolution((2, 2, 3)),
            ),
            layers.GatedAverage(),
        )

        self.assertShapesEqual(network.input_shape, (None, 5, 5, 1))
        self.assertShapesEqual(network.output_shape, (None, 4, 4, 3))

        random_input = asfloat(np.random.random((8, 5, 5, 1)))
        actual_output = self.eval(network.output(random_input))

        self.assertEqual(actual_output.shape, (8, 4, 4, 3))
Exemplo n.º 16
0
	def __init__(self):
		self.population = []

		self.size_population = 20

		self.inputneurons = 4
		self.outputneurons = 1
		self.data = datasets.load_iris()

		for i in range(0,self.size_population):
			#connections
			network = layers.join(layers.Input(self.inputneurons))
			num = random.randint(1,4)
			temp1 = list(random.randint(1,50) for i in range(0, num))
			#print(temp1, end="\n\n")
			temp2 = []
			for neu in temp1:
				n = random.randint(1,5)
				temp2.append(n)
				if n == 1:
					network = network > layers.Sigmoid(neu)
				elif n == 2:
					network = network > layers.Relu(neu)
				elif n == 3:
					network = network > layers.Softmax(neu)
				elif n == 4:
					network = network > layers.Tanh(neu)
				elif n == 5:
					network = network > layers.LeakyRelu(neu)
				#print(network, end="\n~\n")
			network = network > layers.Sigmoid(self.outputneurons)
			attributes = [temp1, temp2]
			self.population.append([network, attributes, 0]) # 0 --> fitness

		#print(self.population)

		self.run()
		while self.best_members[0][2][0] > 1:
			print("next iteration")
			print(self.population)

			self.run()

		file = open("pickle_bestnet.txt", "w")
Exemplo n.º 17
0
def train(X, Y):

    environment.reproducible()
    img_size = X.shape[1]
    network = algorithms.Momentum(
        [
            layers.Input(img_size),
            layers.Relu(100),
            layers.Softmax(Y.shape[1]),
        ],
        error='categorical_crossentropy',
        step=0.01,
        verbose=True,
        shuffle_data=True,
        momentum=0.9,
        nesterov=True,
    )
    network.architecture()
    network.train(X, Y, epochs=20)
    return network
Exemplo n.º 18
0
	def model_network(self, algorithm='LevenbergMarquardt', model=None, opt=None):

		model = self.decode_model(model)
		if model is None:
			model = [
				[1, 'hidden', 15, 'Linear'],
				[2, 'hidden', 10, 'Linear'],
				[3, 'output', self.output_classes, 'Elu']
			]
			# [Input(4), Elu(1)]
			# [Input(4), Elu(6), Elu(1)] EP: 100
		layer_model = [layers.Input(self.input_features)]
		for layer in model:
			if layer[3] == 'Linear':
				layer_model.append(layers.Linear(layer[2]))
			if layer[3] == 'Relu':
				layer_model.append(layers.Relu(layer[2]))
			if layer[3] == 'Sigmoid':
				layer_model.append(layers.Sigmoid(layer[2]))
			if layer[3] == 'HardSigmoid':
				layer_model.append(layers.HardSigmoid(layer[2]))
			if layer[3] == 'Step':
				layer_model.append(layers.Step(layer[2]))
			if layer[3] == 'Tanh':
				layer_model.append(layers.Tanh(layer[2]))
			if layer[3] == 'Softplus':
				layer_model.append(layers.Softplus(layer[2]))
			if layer[3] == 'Softmax':
				layer_model.append(layers.Softmax(layer[2]))
			if layer[3] == 'Elu':
				layer_model.append(layers.Elu(layer[2]))
			if layer[3] == 'PRelu':
				layer_model.append(layers.PRelu(layer[2]))
			if layer[3] == 'LeakyRelu':
				layer_model.append(layers.LeakyRelu(layer[2]))

		print('Model warstw: ' + str(layer_model))

		self.layers = layer_model
		self.select_algorithm(algorithm, options=opt)
Exemplo n.º 19
0
    def test_inline_connection_with_parallel_connection(self):
        left_branch = layers.join(
            layers.Convolution((32, 3, 3)),
            layers.Relu(),
            layers.MaxPooling((2, 2)),
        )

        right_branch = layers.join(
            layers.Convolution((16, 7, 7)),
            layers.Relu(),
        )

        input_layer = layers.Input((3, 10, 10))
        concat = layers.Concatenate()

        network_concat = input_layer > [left_branch, right_branch] > concat
        network = network_concat > layers.Reshape() > layers.Softmax()

        self.assertEqual(network_concat.input_shape, (3, 10, 10))
        self.assertEqual(network_concat.output_shape, (48, 4, 4))

        self.assertEqual(network.input_shape, (3, 10, 10))
        self.assertEqual(network.output_shape, (768,))
Exemplo n.º 20
0
def train_network(n_hidden, x_train, x_test, y_train, y_test):
    network = algorithms.Momentum(
        [
            layers.Input(64),
            layers.Relu(n_hidden),
            layers.Softmax(10),
        ],

        # Randomly shuffle dataset before each
        # training epoch.
        shuffle_data=True,

        # Do not show training progress in output
        verbose=False,
        step=0.001,
        batch_size=128,
        error='categorical_crossentropy',
    )
    network.train(x_train, y_train, epochs=100)

    # Calculates categorical cross-entropy error between
    # predicted value for x_test and y_test value
    return network.prediction_error(x_test, y_test)
Exemplo n.º 21
0
    def test_inline_network_with_parallel_network(self):
        left_branch = layers.join(
            layers.Convolution((3, 3, 32)),
            layers.Relu(),
            layers.MaxPooling((2, 2)),
        )

        right_branch = layers.join(
            layers.Convolution((7, 7, 16)),
            layers.Relu(),
        )

        input_layer = layers.Input((10, 10, 3))
        concat = layers.Concatenate()

        network_concat = input_layer > (left_branch | right_branch) > concat
        network = network_concat > layers.Reshape() > layers.Softmax()

        self.assertShapesEqual(network_concat.input_shape, (None, 10, 10, 3))
        self.assertShapesEqual(network_concat.output_shape, (None, 4, 4, 48))

        self.assertShapesEqual(network.input_shape, (None, 10, 10, 3))
        self.assertShapesEqual(network.output_shape, (None, 768))
Exemplo n.º 22
0
    def test_save_link_to_assigned_connections(self):
        # Tree structure:
        #
        #                       Sigmoid(10)
        #                      /
        # Input(10) - Sigmoid(5)
        #                      \
        #                       Softmax(10)
        #
        input_layer = layers.Input(10)
        minimized = input_layer > layers.Sigmoid(5)
        reconstructed = minimized > layers.Sigmoid(10)
        classifier = minimized > layers.Softmax(20)

        x_matrix = asfloat(np.random.random((3, 10)))
        minimized_output = self.eval(minimized.output(x_matrix))
        self.assertEqual((3, 5), minimized_output.shape)

        reconstructed_output = self.eval(reconstructed.output(x_matrix))
        self.assertEqual((3, 10), reconstructed_output.shape)

        classifier_output = self.eval(classifier.output(x_matrix))
        self.assertEqual((3, 20), classifier_output.shape)
Exemplo n.º 23
0
def train_network(parameters):
    print("Parameters:")
    pprint(parameters)
    print()

    step = parameters['step']
    batch_size = int(parameters['batch_size'])
    proba = parameters['dropout']
    activation_layer = parameters['act_func_type']
    layer_sizes = [int(n) for n in parameters['layers']['n_units_layer']]

    network = layers.Input(784)

    for layer_size in layer_sizes:
        network = network > activation_layer(layer_size)

    network = network > layers.Dropout(proba) > layers.Softmax(10)

    mnet = algorithms.RMSProp(
        network,
        batch_size=batch_size,
        step=step,
        error='categorical_crossentropy',
        shuffle_data=True,
        epoch_end_signal=on_epoch_end,
    )
    mnet.train(x_train, y_train, epochs=50)

    score = mnet.prediction_error(x_test, y_test)

    y_predicted = mnet.predict(x_test).argmax(axis=1)
    accuracy = metrics.accuracy_score(y_test.argmax(axis=1), y_predicted)

    print("Final score: {}".format(score))
    print("Accuracy: {:.2%}".format(accuracy))

    return score
Exemplo n.º 24
0
def train_network(n_hidden, x_train, x_test, y_train, y_test, n_classes,
                  n_dimensionality):
    network = algorithms.Momentum(
        [
            layers.Input(n_dimensionality),  #input dimensionality
            layers.Relu(n_hidden),  #optimisable hyperparam
            layers.Softmax(n_classes),  #class output
        ],

        # Randomly shuffle dataset before each
        # training epoch.
        shuffle_data=True,

        # Do not show training progress in output
        verbose=False,
        step=0.001,
        batch_size=128,
        error='categorical_crossentropy',
    )
    network.train(x_train, y_train, x_test, y_test, epochs=100)

    # Calculates categorical cross-entropy error between
    # predicted value for x_test and y_test value
    return network, network.prediction_error(x_test, y_test)
Exemplo n.º 25
0
         ]],
        layers.Concatenate(),
    )


googlenet = layers.join(
    layers.Input((3, None, None)),
    layers.Convolution((64, 7, 7), padding='half', stride=2),
    layers.Relu(),
    layers.MaxPooling((3, 3), stride=2),
    layers.LocalResponseNorm(alpha=0.00002, k=1),
    layers.Convolution((64, 1, 1)) > layers.Relu(),
    layers.Convolution((192, 3, 3), padding='half') > layers.Relu(),
    layers.LocalResponseNorm(alpha=0.00002, k=1),
    layers.MaxPooling((3, 3), stride=2),
    Inception((32, 64, 96, 128, 16, 32)),
    Inception((64, 128, 128, 192, 32, 96)),
    layers.MaxPooling((3, 3), stride=2),
    Inception((64, 192, 96, 208, 16, 48)),
    Inception((64, 160, 112, 224, 24, 64)),
    Inception((64, 128, 128, 256, 24, 64)),
    Inception((64, 112, 144, 288, 32, 64)),
    Inception((128, 256, 160, 320, 32, 128)),
    layers.MaxPooling((3, 3), stride=2),
    Inception((128, 256, 160, 320, 32, 128)),
    Inception((128, 384, 192, 384, 48, 128)),
    layers.GlobalPooling(function=T.mean),
    layers.Softmax(1000),
)
plots.layer_structure(googlenet)
Exemplo n.º 26
0




adanet.train(x_train, y_train)

aresult = adanet.predict(x_test)

aerror = estimators.rmse(aresult, y_test)
'''

network = architectures.mixture_of_experts([
    layers.join(
        layers.Input(58),
        layers.Softmax(22),
        layers.Softmax(1),
    ),
    layers.join(
        layers.Input(58),
        layers.Relu(60),
        layers.Relu(40),
        layers.Softmax(22),
        layers.Softmax(1),
    ),
    layers.join(
        layers.Input(58),
        layers.Tanh(12),
        layers.Tanh(25),
        layers.Tanh(1),
    ),
Exemplo n.º 27
0
x_train, x_test, y_train, y_test = cross_validation.train_test_split(
    data.astype(np.float32), target.astype(np.float32), train_size=(6 / 7.))

network = algorithms.Adadelta(
    [
        layers.Convolution((32, 1, 3, 3)),
        layers.Relu(),
        layers.Convolution((48, 32, 3, 3)),
        layers.Relu(),
        layers.MaxPooling((2, 2)),
        layers.Dropout(0.2),
        layers.Reshape(),
        layers.Relu(6912),
        layers.Dropout(0.3),
        layers.Softmax(200),
        layers.ArgmaxOutput(10),
    ],
    error='categorical_crossentropy',
    step=1.0,
    verbose=True,
    shuffle_data=True,
    epochs_step_minimizator=8,
    addons=[algorithms.SimpleStepMinimization],
)
network.architecture()
network.train(x_train, y_train, x_test, y_test, epochs=6)

y_predicted = network.predict(x_test)
y_test_labels = np.asarray(y_test.argmax(axis=1)).reshape(len(y_test))
Exemplo n.º 28
0
    def test_storage_save_dict(self):
        network = layers.join(
            layers.parallel([
                layers.Input(2, name='input-1'),
                layers.PRelu(1, name='prelu')
            ], [
                layers.Input(1, name='input-2'),
                layers.Sigmoid(4, name='sigmoid'),
                layers.BatchNorm(name='batch-norm'),
            ]),
            layers.Concatenate(name='concatenate'),
            layers.Softmax(3, name='softmax'),
        )
        dict_network = storage.save_dict(network)

        expected_keys = ('metadata', 'layers', 'graph')
        self.assertItemsEqual(expected_keys, dict_network.keys())

        expected_metadata_keys = ('created', 'language', 'library', 'version')
        actual_metadata_keys = dict_network['metadata'].keys()
        self.assertItemsEqual(expected_metadata_keys, actual_metadata_keys)

        self.assertEqual(len(dict_network['layers']), 7)

        expected_layers = [{
            'class_name': 'Input',
            'configs': {
                'name': 'input-1',
                'shape': (2, )
            },
            'name': 'input-1',
        }, {
            'class_name': 'PRelu',
            'configs': {
                'alpha_axes': (-1, ),
                'name': 'prelu',
                'n_units': 1
            },
            'name': 'prelu',
        }, {
            'class_name': 'Input',
            'configs': {
                'name': 'input-2',
                'shape': (1, )
            },
            'name': 'input-2',
        }, {
            'class_name': 'Sigmoid',
            'configs': {
                'name': 'sigmoid',
                'n_units': 4
            },
            'name': 'sigmoid',
        }, {
            'class_name': 'BatchNorm',
            'configs': {
                'alpha': 0.1,
                'axes': (0, ),
                'epsilon': 1e-05,
                'name': 'batch-norm'
            },
            'name': 'batch-norm',
        }, {
            'class_name': 'Concatenate',
            'configs': {
                'axis': -1,
                'name': 'concatenate'
            },
            'name': 'concatenate',
        }, {
            'class_name': 'Softmax',
            'configs': {
                'name': 'softmax',
                'n_units': 3
            },
            'name': 'softmax',
        }]
        actual_layers = []
        for i, layer in enumerate(dict_network['layers']):
            self.assertIn('parameters', layer, msg="Layer #" + str(i))

            layer = copy.deepcopy(layer)
            del layer['parameters']
            actual_layers.append(layer)

        self.assertEqual(actual_layers, expected_layers)
Exemplo n.º 29
0
    def test_storage_load_dict_using_wrong_names(self):
        network = layers.join(
            layers.Input(3),
            layers.Relu(4, name='relu'),
            layers.Linear(5, name='linear') >> layers.Relu(),
            layers.Softmax(6, name='softmax'),
        )

        storage.load_dict(
            network,
            {
                'metadata': {},  # avoided for simplicity
                'graph': {},  # avoided for simplicity
                # Input layer was avoided on purpose
                'layers': [{
                    'name': 'name-1',
                    'class_name': 'Relu',
                    'configs': {},
                    'parameters': {
                        'weight': {
                            'trainable': True,
                            'value': np.ones((3, 4))
                        },
                        'bias': {
                            'trainable': True,
                            'value': np.ones((4, ))
                        },
                    }
                }, {
                    'name': 'name-2',
                    'class_name': 'Relu',
                    'configs': {},
                    'parameters': {
                        'weight': {
                            'trainable': True,
                            'value': np.ones((4, 5))
                        },
                        'bias': {
                            'trainable': True,
                            'value': np.ones((5, ))
                        },
                    }
                }, {
                    'name': 'name-3',
                    'class_name': 'Softmax',
                    'configs': {},
                    'parameters': {
                        'weight': {
                            'trainable': True,
                            'value': np.ones((5, 6))
                        },
                        'bias': {
                            'trainable': True,
                            'value': np.ones((6, ))
                        },
                    }
                }]
            },
            load_by='order',
            skip_validation=False)

        relu = network.layer('relu')
        self.assertEqual(12, np.sum(self.eval(relu.weight)))
        self.assertEqual(4, np.sum(self.eval(relu.bias)))

        linear = network.layer('linear')
        self.assertEqual(20, np.sum(self.eval(linear.weight)))
        self.assertEqual(5, np.sum(self.eval(linear.bias)))

        softmax = network.layer('softmax')
        self.assertEqual(30, np.sum(self.eval(softmax.weight)))
        self.assertEqual(6, np.sum(self.eval(softmax.bias)))
Exemplo n.º 30
0
SCALE = 70
SENS_SCALE = 195

CONE_TIMER = 200
SAVE_SPAN = 5

fundo_size = 150

esqYpos = 300 + (fundo_size / 2) - 5
dirYpos = 300 - (fundo_size / 2) + 5

nn = layers.join(
    layers.Input(4),
    layers.Sigmoid(12),
    layers.Sigmoid(8),
    layers.Softmax(3),
)

#out-> [0]-acelerar [1]-esquerda [2]-direita
otimizador = algorithms.Momentum(nn)


class MyGame(arcade.Window):
    """ Main application class. """
    def __init__(self, width, height):
        super().__init__(width, height)
        self.time_elapsed = 0.0
        self.distance_map = 0.0
        self.distance_car = 0.0
        self.maxDistance = 0.0
        self.last_checkUpX = 0.0