#from sklearn import svm
from sklearn.svm import SVC
from sklearn.preprocessing import MinMaxScaler
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeClassifier

from sklearn.neighbors import KNeighborsClassifier
from sklearn.preprocessing import StandardScaler
from sklearn.decomposition import PCA
from sklearn.pipeline import Pipeline

import time

print('reading data...', end='')
X, y, classes = read_mnist('mnist_public.npy')
print(X)  # X is the image points
print(y)  # y is the answers
print('done!')

X_train, X_test, y_train, y_test = train_test_split(X,
                                                    y,
                                                    test_size=0.1,
                                                    train_size=0.9,
                                                    random_state=5)

scl = MinMaxScaler()
pca = PCA()
knn = KNeighborsClassifier(n_neighbors=3,
                           weights='uniform',
                           algorithm='brute',
Beispiel #2
0
    images = tf.layers.flatten(images)
    hidden = tf.layers.dense(images, 512, tf.nn.elu)
    mean = tf.layers.dense(hidden, code_size, tf.nn.elu)
    return mean


def make_decoder(code, data_shape=[28, 28, 1]):
    hidden = tf.layers.dense(code, 512, tf.nn.elu)
    logit = tf.layers.dense(hidden, np.prod(data_shape), tf.nn.elu)
    logit = tf.reshape(logit, [-1] + data_shape)
    return logit


if __name__ == '__main__':
    mnist_folder = os.path.join('..', 'data', 'mnist.npz')
    (train_x, train_y), (_, _) = mnist_utils.read_mnist(mnist_folder, flatten=False)
    train_y = np.argmax(train_y, axis=1)
    train_x_0 = train_x
    # np.random.shuffle(train_x_0)
    train_x_0 = train_x_0[:min(len(train_x_0), 1000)]

    code_size = 100
    tf.reset_default_graph()
    inputs = tf.placeholder(tf.float32, [None, 28, 28, 1])

    encoder = make_encoder(inputs, code_size)
    output = make_decoder(encoder)
    loss = tf.losses.huber_loss(inputs, output)
    optimize = tf.train.AdamOptimizer(learning_rate=0.0001).minimize(loss)

    mnist_utils.safe_mkdir('checkpoints')