Пример #1
0
(X_train, y_train), (X_test, y_test) = mnist.load_data()

# Use seaborn for plots
sns.set()
sns.set_style("dark")

# Display shape of data on terminal
print('X_train shape: ', X_train.shape)
print('y_train shape: ', y_train.shape)
print('X_test shape: ', X_test.shape)
print('y_test shape: ', y_test.shape)

# Plot a few examples
sample_indices = [1, 2, 4, 8, 16, 32, 65, 1329, 4000]
plot_samples(X_train[sample_indices], 'MNIST samples')

# Convert data to floating point and normalize to range 0-1
X_train = X_train.astype('float32') / 255
X_test = X_test.astype('float32') / 255

# Convert class labels to categorical data/one-hot encoding
y_test = to_categorical(y_test)
y_train = to_categorical(y_train)

# Reshape data for input to Dense layer
X_train = X_train.reshape(-1, 28 * 28)
X_test = X_test.reshape(-1, 28 * 28)

# Create sequential 2-layer model
model = Sequential()
Пример #2
0
    if demo == "small":
        n_pot, e_pot, V, E = du.load_dataset("simple_studentScores")

        # Decode: Compute optimal decoding (most likely configuration of the states)
        print dr.decode(n_pot, e_pot, V, E, rule="exact")

        # Infer: Compute Vertex and Edge marginals and Normalizing Constant
        print ir.infer(n_pot, e_pot, V, E, rule="exact")

        # Sample:
        dep_samples = sr.sample(100, n_pot, e_pot, V, E).T

        # Display New sampling results
        #ind_samples = sample_independent_potentials(node_pot)
        hp.plot_samples(dep_samples)

    elif demo == "chain":
        n_pot, e_pot, V, E = du.load_dataset("chain_CSGrads")

        # Decode: Compute optimal decoding (most likely configuration of the states)
        print dr.decode(n_pot, e_pot, V, E, rule="viterbi")

        # Infer: Compute Vertex and Edge marginals and Normalizing Constant
        print ir.infer(n_pot, e_pot, V, E, rule="forward-backward")

        # Sample:
        dep_samples = sr.sample(100, n_pot, e_pot, V, E).T

        # Display New sampling results
        #ind_samples = sample_independent_potentials(node_pot)
Пример #3
0
            sess.run(optimiser_D, feed_dict={Z: z_sampler(batch_size, z_size), X: X_batch})

        # Run one step of the optimiser for G
        sess.run(optimiser_G, feed_dict={Z: z_sampler(batch_size, z_size)})

        err_G = sess.run(error_G, feed_dict={Z: z_sampler(batch_size, z_size)})
        err_D = sess.run(error_D, feed_dict={Z: z_sampler(batch_size, z_size), X: X_batch})

        errors[0].append([epoch, err_G])
        errors[1].append([epoch, err_D])

        # Print out some information every nth iteration
        if epoch % 20 == 0:

            print('Epoch: ', epoch, '\t Generator error:\t {:.4f}'.format(err_G),
                  '\t Discriminator error:\t {:.4f}'.format(err_D))



        # Plot the image generated from 64 different samples to a directory
        if path_to_images and epoch % 500 == 0:
            samples = sess.run(sample, feed_dict={Z: z_sampler(64, z_size)})

            figure = helpers.plot_samples(samples)
            plt.savefig('{}/{}.png'.format(path_to_images, str(epoch)), bbox_inches='tight')
            plt.close()

    plot_errors(errors, nb_epochs)

del sess
sys.exit(0)
Пример #4
0
# Convert class labels to categorical data/one-hot encoding
y_test = to_categorical(y_test)
y_train = to_categorical(y_train)

model = create_lenet()

# Train model and evaluate training
results = model.fit(X_train, y_train, epochs=10, batch_size=64)
#training_eval(results, 'final')

# Predict and evaluate performance
y_fit = model.predict(X_test, batch_size=128)
performance_eval('final', y_fit.argmax(axis=1), y_test.argmax(axis=1))

save_json(model, 'final')
model.save_weights('models/final_weights.h5')

# Plot the problems
mis_index = np.where(y_fit.argmax(axis=1) != y_test.argmax(axis=1))
misclassifieds = X_test[mis_index]
predicted_labels = y_fit.argmax(axis=1)[mis_index]
target_labels = y_test.argmax(axis=1)[mis_index]
print('MNIST misclassifieds - predicted labels')
print(np.resize(predicted_labels, 10 * 10).reshape((10, 10)))
print('\nMNIST misclassifieds - target labels')
print(np.resize(target_labels, 10 * 10).reshape((10, 10)))

plot_samples(misclassifieds.reshape(np.shape(mis_index)[1], 28, 28),
             title='MNIST_misclassifieds',
             width=10,
             height=10)