"""use python3 Chapter19/visualize_architecture.py""" from nn.conv.lenet import LeNet from keras.utils import plot_model # initialize LeNet and then write the network architecture # visualization graph to disk model = LeNet.build(28, 28, 1, 10) plot_model(model, to_file="Chapter19/lenet.png", show_shapes=True)
input_size = n*n*3 sp = SimplePreprocessor(n, n) iap = ImageToArrayPreprocessor() sdl = SimpleDatasetLoader(preprocessors=[sp, iap]) (data, labels) = sdl.load(image_paths, verbose=500) data = data.astype("float") / 255.0 (train_x, test_x, train_y, test_y) = train_test_split(data, labels, test_size=0.25, random_state=42) train_y = LabelBinarizer().fit_transform(train_y) test_y = LabelBinarizer().fit_transform(test_y) print ("[INFO] compiling model...") opt = SGD(lr=0.005) model = LeNet.build(width=32, height=32, depth=3, classes=4) model.compile(loss="categorical_crossentropy", optimizer=opt, metrics=["accuracy"]) print ("[INFO] training network...") H = model.fit(train_x, train_y, validation_data=(test_x, test_y), batch_size=32, epochs=100, verbose=1) print("[INFO] evaluating network...") predictions = model.predict(test_x, batch_size=32) print(classification_report(test_y.argmax(axis=1), predictions.argmax(axis=1), target_names=["diamonds", "hearts", "spades", "three_sisters"])) plt.style.use("ggplot") plt.figure() plt.plot(np.arange(0, 100), H.history["loss"], label="train_loss") plt.plot(np.arange(0, 100), H.history["val_loss"], label="val_loss")
from keras.utils import plot_model from nn.conv.lenet import LeNet model = LeNet.build(width=28, height=28, depth=1, classes=10) plot_model(model, to_file='lenet.png', show_shapes=True)
def main(): args = option() class_name = ['Apple','Avocado','Banana','Coconut','Custard_apple', 'Dragon_fruit','Guava','Mango','Orange','Plum', 'Start_fruit','Watermelon'] in_data = 'H5PY/train_normal_128.h5' in_label = 'H5PY/labels_train_64_128.h5' # import the feature vector and trained labels h5f_data = h5py.File(in_data, 'r') h5f_label = h5py.File(in_label, 'r') data = h5f_data['dataset'] labels = h5f_label['dataset'] data = np.array(data) labels = np.array(labels) # reshape data matrix if K.image_data_format() == "channels_first": data = data.reshape(data.shape[0],3,128,128) else: data = data.reshape(data.shape[0],128,128,3) print(data.shape) # split training: 80%, testing: 20% (trainX, testX, trainY, testY) = train_test_split(data, labels, test_size=0.20, random_state=42) # convert labels as vector lb = LabelBinarizer() trainY = lb.fit_transform(trainY) testY = lb.fit_transform(testY) # initialize the optimizer and model print("[INFO] compiling model...") opt = SGD(lr=0.05) model = LeNet.build(width=128, height=128, depth=3, classes=12) model.compile(loss="categorical_crossentropy", optimizer=opt, metrics=["accuracy"]) # train the network print("[INFO]training network ...") H = model.fit(trainX, trainY, validation_data=(testX,testY), batch_size=32, epochs=40, verbose=1) # save the network to disk print("[INFO] serializing network ...") model.save(args["model"]) # evaluate the network print("[INFO] evaluating network...") print("[INFO] shape feature: {}".format(data.shape)) preds = model.predict(testX) print(classification_report(testY.argmax(axis=1), preds.argmax(axis=1), target_names=class_name)) # plot the training loss and accuracy plt.style.use("ggplot") plt.figure() plt.plot(np.arange(0, 40), H.history["loss"], label="train_loss") plt.plot(np.arange(0, 40), H.history["val_loss"], label="val_loss") plt.plot(np.arange(0, 40), H.history["accuracy"], label="train_acc") plt.plot(np.arange(0, 40), H.history["val_accuracy"], label="val_acc") plt.title("Training Loss and Accuracy") plt.xlabel("Epoch #") plt.ylabel("Loss/Accuracy") plt.legend() plt.savefig(args["output"])