Beispiel #1
0
    model = Sequential()
    model.add(Conv2D,
              ksize=3,
              stride=1,
              activation=ReLU(),
              input_size=(8, 8, 1),
              filters=7,
              padding=0)
    model.add(MaxPool2D, ksize=2, stride=1, padding=0)
    model.add(Conv2D,
              ksize=2,
              stride=1,
              activation=ReLU(),
              filters=5,
              padding=0)
    model.add(Flatten)
    model.add(Dense, units=1, activation=Sigmoid())
    model.summary()

    model.compile(BinaryCrossEntropy())

    print("Initial Loss", model.evaluate(X, y)[0])
    model.fit(X,
              y,
              n_epochs=100,
              batch_size=300,
              learning_rate=0.001,
              optimizer=GradientDescentOptimizer(),
              verbose=1)
    print("Final Loss", model.evaluate(X, y)[0])
# Create dataset
X, y, X_test, y_test = create_data_mnist('fashion_mnist_images')
# Shuffle the training dataset
keys = np.array(range(X.shape[0]))
np.random.shuffle(keys)
X = X[keys]
y = y[keys]
# Scale and reshape samples
X = (X.reshape(X.shape[0], -1).astype(np.float32) - 127.5) / 127.5
X_test = (X_test.reshape(X_test.shape[0], -1).astype(np.float32) -
          127.5) / 127.5
# Instantiate the model
model = Sequential()
# Add layers
model.add(Layer_Dense(X.shape[1], 128))
model.add(Activation_ReLU())
model.add(Layer_Dense(128, 128))
model.add(Activation_ReLU())
model.add(Layer_Dense(128, 10))
model.add(Activation_softmax())
# Set loss, optimizer and accuracy objects
model.compile(loss=Loss_CategoricalCrossentropy(),
              optimizer=Optimizer_Adam(decay=1e-4),
              metrics=Accuracy_Categorical())
# model.fit(X, y, validation_data = (X_test, y_test), epochs = 10 , batch_size = 128 , steps_per_epoch = 100 )
# model.save('fashion_mnist.model')
# model.evaluate(X_test, y_test)
model = model.load('fashion_mnist.model')
model.evaluate(X_test, y_test)