Exemplo n.º 1
0
    def test_printer(self, b, w, h, c):

        net = Network(batch=b, input_shape=(w, h, c))
        net.add(Activation_layer(activation='relu'))  # layer 1
        net.add(Activation_layer(activation='tanh'))  # layer 2
        net.add(Route_layer(input_layers=(1, 2), by_channels=True))
        net.add(
            Cost_layer(cost_type='mse',
                       scale=1.,
                       ratio=0.,
                       noobject_scale=1.,
                       threshold=0.,
                       smoothing=0.))
        net.compile(optimizer=SGD())

        net.summary()
Exemplo n.º 2
0
    def test_forward(self, b, w, h, c):

        input = np.random.uniform(low=-10, high=10.,
                                  size=(b, w, h, c)).astype(float)

        # init keras model
        inp = Input(batch_shape=(b, w, h, c))
        x = Activation(activation='relu')(inp)
        y = Activation(activation='tanh')(x)
        Concat = Concatenate(axis=-1)([x, y])  # concatenate of x and y
        model = Model(inputs=[inp], outputs=Concat)
        model.compile(optimizer='sgd', loss='mse')

        # init NumPyNet model
        net = Network(batch=b, input_shape=(w, h, c))
        net.add(Activation_layer(activation='relu'))  # layer 1
        net.add(Activation_layer(activation='tanh'))  # layer 2
        net.add(Route_layer(input_layers=(1, 2), by_channels=True))
        net.add(
            Cost_layer(cost_type='mse',
                       scale=1.,
                       ratio=0.,
                       noobject_scale=1.,
                       threshold=0.,
                       smoothing=0.))
        net.compile(optimizer=SGD())

        net.summary()

        assert net._fitted == False
        net._fitted = True  # False control

        # FORWARDS

        fwd_out_numpynet = net.predict(X=input)
        fwd_out_keras = model.predict(x=input, batch_size=b)

        np.testing.assert_allclose(fwd_out_keras,
                                   fwd_out_numpynet,
                                   rtol=1e-5,
                                   atol=1e-8)
Exemplo n.º 3
0
    model.add(BatchNorm_layer())

    model.add(Connected_layer(outputs=num_classes, activation='Linear'))

    model.add(Softmax_layer(spatial=True, groups=1, temperature=1.))
    # model.add(Cost_layer(cost_type=cost_type.mse))

    # model.compile(optimizer=SGD(lr=0.01, decay=0., lr_min=0., lr_max=np.inf))
    model.compile(optimizer=Adam(), metrics=[accuracy])

    print('*************************************')
    print('\n Total input dimension: {}'.format(X_train.shape), '\n')
    print('**************MODEL SUMMARY***********')

    model.summary()

    print('\n***********START TRAINING***********\n')

    # Fit the model on the training set
    model.fit(X=X_train, y=y_train, max_iter=10, verbose=True)

    print('\n***********START TESTING**************\n')

    # Test the prediction with timing
    loss, out = model.evaluate(X=X_test, truth=y_test, verbose=True)

    truth = from_categorical(y_test)
    predicted = from_categorical(out)
    accuracy = mean_accuracy_score(truth, predicted)