validation_steps=len(testX) // BS,
              epochs=EPOCHS)

# predictions on Test set
predictions = model.predict(testX, batch_size=BS)

# finding the index of the largest probability
predictions = np.argmax(predictions, axis=1)

# show a nicely formatted classification report
print(
    classification_report(testY.argmax(axis=1),
                          predictions,
                          target_names=lb.classes_))

# saving the model
model.save(args["model"], save_format="h5")

# Plotting the training loss and accuaracy
N = EPOCHS
plt.style.use("ggplot")
plt.figure()
plt.plot(np.arange(0, N), H.history["loss"], label="train_loss")
plt.plot(np.arange(0, N), H.history["val_loss"], label="val_loss")
plt.plot(np.arange(0, N), H.history["accuracy"], label="train_acc")
plt.plot(np.arange(0, N), H.history["val_accuracy"], label="val_acc")
plt.title("Training Loss and Accuracy")
plt.xlabel("Epoch #")
plt.ylabel("Loss/Accuracy")
plt.legend(loc="lower left")
plt.savefig(args["plot"])
예제 #2
0
(x_train, y_train), (x_test, y_test) = fashion_mnist.load_data()

x_train = np.reshape(x_train, (len(x_train), 28, 28, 1))
x_test = np.reshape(x_test, (len(x_test), 28, 28, 1))
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)

ip = Input(shape=(28, 28, 1))
x = Conv2D(16, (3, 3), activation='relu', padding='same')(ip)
x = MaxPooling2D((2, 2), padding='same')(x)
x = Conv2D(32, (3, 3), activation='relu', padding='same')(x)
x = MaxPooling2D((2, 2), padding='same')(x)
x = Conv2D(64, (3, 3), activation='relu', padding='same')(x)
x = MaxPooling2D((2, 2), padding='same')(x)
x = Flatten()(x)
x = Dense(512, activation='relu')(x)
x = Dense(10, activation='softmax')(x)
x = Model(ip, x)

x.compile(optimizer='adam',
          loss='categorical_crossentropy',
          metrics=['accuracy'])
x.fit(x_train,
      y_train,
      epochs=10,
      batch_size=128,
      shuffle=True,
      verbose=1,
      validation_data=(x_test, y_test))
x.save("fashion_mnist2.h5")
예제 #3
0
train_data = train_data.map(preprocess) \
 .batch(batch_size).prefetch(1)
test_data = test_data.map(preprocess) \
 .batch(batch_size).prefetch(1)
print('shape of training data after preprocessing: ', train_data)
print('shape of test data after preprocessing: ', test_data)




train_data = train_data.shuffle(1000)

"""# Saving and Recreating the trained model"""

## Save the whole model
model.save('./trained_CNN/Smart_Truck/my_model_tld1.h5')

## Recreate whole model
new_model=keras.models.load_model('./trained_CNN/Smart_Truck/my_model_tld1.h5')
new_model.summary()

## Save the weights
model.save_weights('./trained_CNN/Smart_Truck/my_weights_tld1.h5')

## Restore the weights
model=create_model()
model.load_weights('./trained_CNN/Smart_Truck/my_weights_tld1.h5')

"""# DOWNLOAD created files
In this case downloading the previously created model.
    loss="sparse_categorical_crossentropy",
    optimizer=optimizer,
    metrics=["accuracy"]
)
model.summary()

# Train the model
history = model.fit(
    train_data,
    epochs=13,
    validation_data = test_data
)

"""# Saving and Recreating the trained model"""

## Save the whole model
model.save('./trained_CNN/imagewoof/my_model_imagewoof.h5')

## Recreate whole model
new_model=keras.models.load_model('./trained_CNN/imagewoof/my_model_imagewoof.h5')
new_model.summary()

"""# DOWNLOAD created files
In this case downloading the previously created model.

Steps for downloading files manually: Anzeigen -> Inhalt -> Dateien (you can also display and download everything generated).
"""

# DOWNLOAD created files (in this case the previously created model) #################
from google.colab import files
files.download('./trained_CNN/imagewoof/my_model_imagewoof.h5')