def train(aug,trainX,trainY,testX,testY,args):
    # initialize the model
    print("[INFO] compiling model...")
    model = LeNet.build(width=norm_size, height=norm_size, depth=3, classes=CLASS_NUM)
    opt = Adam(lr=INIT_LR, decay=INIT_LR / EPOCHS)
    model.compile(loss="categorical_crossentropy", optimizer=opt,
        metrics=["accuracy"])

    # train the network
    print("[INFO] training network...")
    H = model.fit_generator(aug.flow(trainX, trainY, batch_size=BS),
        validation_data=(testX, testY), steps_per_epoch=len(trainX) // BS,
        epochs=EPOCHS, verbose=1)

    # save the model to disk
    print("[INFO] serializing network...")
    model.save(args["model"])
    
    # plot the training loss and accuracy
    plt.style.use("ggplot")
    plt.figure()
    N = EPOCHS
    plt.plot(np.arange(0, N), H.history["loss"], label="train_loss")
    plt.plot(np.arange(0, N), H.history["val_loss"], label="val_loss")
    plt.plot(np.arange(0, N), H.history["acc"], label="train_acc")
    plt.plot(np.arange(0, N), H.history["val_acc"], label="val_acc")
    plt.title("Training Loss and Accuracy on Invoice classifier")
    plt.xlabel("Epoch #")
    plt.ylabel("Loss/Accuracy")
    plt.legend(loc="lower left")
    plt.savefig(args["plot"])
Example #2
0
def modelChoice(args):
    print("creating model..." + args.model)
    if args.model == 'googleNet':
        model = googleNet.build(width=args.width,
                                height=args.height,
                                depth=3,
                                classes=CLASS_NUM)
        return model
    elif args.model == 'AlexNet':
        model = AlexNet.build(width=args.width,
                              height=args.height,
                              depth=3,
                              classes=CLASS_NUM)
        return model
    elif args.model == 'VGG16':
        model = VGG16.build(width=args.width,
                            height=args.height,
                            depth=3,
                            classes=CLASS_NUM)
        return model
    else:
        model = LeNet.build(width=args.width,
                            height=args.height,
                            depth=3,
                            classes=CLASS_NUM)
        return model
Example #3
0
def train(aug,trainX,trainY,testX,testY,args):
    # initialize the model
    print("[INFO] compiling model...")
    model = LeNet.build(width=norm_size, height=norm_size, depth=3, classes=CLASS_NUM)
    opt = Adam(lr=INIT_LR, decay=INIT_LR / EPOCHS)
    model.compile(loss="categorical_crossentropy", optimizer=opt,
        metrics=["accuracy"])

    # train the network
    print("[INFO] training network...")
    H = model.fit_generator(aug.flow(trainX, trainY, batch_size=BS),
        validation_data=(testX, testY), steps_per_epoch=len(trainX) // BS,
        epochs=EPOCHS, verbose=1)

    # save the model to disk
    print("[INFO] serializing network...")
    model.save(args["model"])
    
    # plot the training loss and accuracy
    plt.style.use("ggplot")
    plt.figure()
    N = EPOCHS
    plt.plot(np.arange(0, N), H.history["loss"], label="train_loss")
    plt.plot(np.arange(0, N), H.history["val_loss"], label="val_loss")
    plt.plot(np.arange(0, N), H.history["acc"], label="train_acc")
    plt.plot(np.arange(0, N), H.history["val_acc"], label="val_acc")
    plt.title("Training Loss and Accuracy on traffic-sign classifier")
    plt.xlabel("Epoch #")
    plt.ylabel("Loss/Accuracy")
    plt.legend(loc="lower left")
    plt.savefig(args["plot"])
Example #4
0
# USAGE
# python visualize_architecture.py

# import the necessary packages
from net.lenet import LeNet
from keras.utils import plot_model

# initialize LeNet and then write the network architecture
# visualization grpah to disk
model = LeNet.build(28, 28, 1, 10)
plot_model(model, to_file="lenet.png", show_shapes=True)
Example #5
0
    model_zoo = [
        'lenet', 'alexnet', 'vgg11', 'vgg16', 'vgg_imageNet', 'googlenetv1',
        'resnet', 'densenet'
    ]
    model_to_train = ['densenet']

    for op in op_to_run:
        if op_to_run == optimizer_zoo[0]:
            optimizer = Adam(lr=INIT_LR, decay=INIT_LR / EPOCHS)
        else:
            optimizer = SGD(lr=INIT_LR)

        for model_name in model_to_train:

            if model_name == model_zoo[0]:  # LeNet
                model = LeNet.build(input_shape, CLASS_NUM)
            elif model_name == model_zoo[1]:  # AlexNet
                model = AlexNet.build(input_shape,
                                      CLASS_NUM,
                                      dense_layers=2,
                                      hidden_units=512,
                                      dropout_rate=0.5,
                                      subsample_initial_block=False)
            elif model_name == model_zoo[2]:  # VGG11
                model = Vgg11.build(input_shape,
                                    CLASS_NUM,
                                    dense_layers=2,
                                    hidden_units=512,
                                    dropout_rate=0.5,
                                    sc_ratio=4)
            elif model_name == model_zoo[3]:  # VGG16
Example #6
0
    input_shape = (img_rows, img_cols, 1)
    chanDim = -1

x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
print('x_train shape:', x_train.shape)
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')

# convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)

model = LeNet.build(chanDim, input_shape, num_classes)
#model.compile(loss='mse', optimizer=SGD(lr=0.1), metrics=['accuracy'])
#model.compile(loss='categorical_crossentropy', optimizer=SGD(lr=0.1), metrics=['accuracy'])
model.compile(loss='categorical_crossentropy',
              optimizer='adam',
              metrics=['accuracy'])

#start train-----------------------------------
#model.fit(x_train, y_train, batch_size=1000, epochs=20)
model.fit(x_train,
          y_train,
          batch_size=batch_size,
          epochs=epochs,
          verbose=1,
          validation_data=(x_test, y_test))
# partition the data into training and testing splits using 75% of
# the data for training and the remaining 25% for testing
(trainX, testX, trainY, testY) = train_test_split(data,
                                                  labels,
                                                  test_size=0.25,
                                                  random_state=42)

# convert the labels from integers to vectors
lb = LabelBinarizer().fit(trainY)
trainY = lb.transform(trainY)
testY = lb.transform(testY)

# initialize the model
print("[INFO] compiling model...")
model = LeNet.build(width=28, height=28, depth=1, classes=9)
opt = SGD(lr=0.01)
model.compile(loss="categorical_crossentropy",
              optimizer=opt,
              metrics=["accuracy"])

# train the network
print("[INFO] training network...")
H = model.fit(trainX,
              trainY,
              validation_data=(testX, testY),
              batch_size=32,
              epochs=15,
              verbose=1)

# evaluate the network