Ejemplo n.º 1
0
    def train(self):
        batch_size = 32
        epochs = 50
        xTrain, yTrain = self.trainData
        valData = self.valData

        #Set up generator for data augmentation.
        dataGen = augmentor(rotation_range=359,horizontal_flip=True,vertical_flip=True)
        dataGen.fit(xTrain)

        #Set up callbacks tuple for checkpoints. This will save the model every time validation loss improves.
        callbacks = [ModelCheckpoint(self.model_file, monitor='val_loss', verbose=1, save_best_only=True)]

        #Train the model and save its training progress in a history object.
        history = self.model.fit_generator(dataGen.flow(xTrain, yTrain,batch_size=batch_size),
                                           steps_per_epoch=len(xTrain)/batch_size,epochs=epochs,
                                           verbose=2,validation_data=valData,callbacks=callbacks)
        val_acc = open("val_acc_history.txt",'w')
        acc = open("acc_history.txt",'w')
        loss = open("loss_history.txt",'w')
        val_loss = open("val_loss_history.txt",'w')
        a = history.history['acc']
        va = history.history['val_acc']
        l = history.history['loss']
        vl = history.history['val_loss']

        self.modelAccuracy = max(a)

        for accVal,vacc,lossVal,vloss in zip(a,va,l,vl):
            acc.write("{}\n".format(accVal))
            val_acc.write("{}\n".format(vacc))
            loss.write("{}\n".format(lossVal))
            val_loss.write("{}\n".format(vloss))
        val_acc.close()
        loss.close()
        val_loss.close()
        acc.close()

        #Plot and save training and validation accuracy data.
        plt.plot(history.history['acc'])
        plt.plot(history.history['val_acc'])
        plt.title('model accuracy')
        plt.ylabel('accuracy')
        plt.xlabel('epoch')
        plt.legend(['train', 'test'], loc='lower right')
        plt.savefig("Accuracy.png")
        plt.clf()

        #Plot and save training and validation loss data.
        plt.plot(history.history['loss'])
        plt.plot(history.history['val_loss'])
        plt.title('model loss')
        plt.ylabel('loss')
        plt.xlabel('epoch')
        plt.legend(['train', 'test'], loc='upper right')
        plt.savefig("Loss.png")
#net = filter_CNN()
Ejemplo n.º 2
0
    def train(self):
        """
        Function to train a fresh model.
        :return: Void.
        """

        batch_size = 64
        epochs = 150
        xTrain, yTrain = self.trainData
        valData = self.valData

        #Set up generator for data augmentation.
        dataGen = augmentor(rotation_range=90,
                            horizontal_flip=True,
                            vertical_flip=True)
        dataGen.fit(xTrain)

        #Set up callbacks tuple for checkpoints. This will save the model every time validation loss improves.
        callbacks = [
            ModelCheckpoint(self.model_file,
                            monitor='val_loss',
                            verbose=1,
                            save_best_only=True)
        ]

        #Train the model and save its training progress in a history object.
        history = self.model.fit_generator(dataGen.flow(xTrain,
                                                        yTrain,
                                                        batch_size=batch_size),
                                           steps_per_epoch=len(xTrain) /
                                           batch_size,
                                           epochs=epochs,
                                           verbose=2,
                                           validation_data=valData,
                                           callbacks=callbacks)
        self.plot_training_metrics(history)
Ejemplo n.º 3
0
import numpy as np
from keras.callbacks import ModelCheckpoint
from keras import backend as K
from keras.layers import Conv2D, MaxPooling2D, BatchNormalization, AveragePooling2D, Activation
from keras.layers import Dense, Dropout, Flatten, Input, Add, Concatenate, Reshape, Average
from keras.models import Model, load_model
from keras.optimizers import Adam, Adadelta
from keras.preprocessing.image import ImageDataGenerator as augmentor
from util import Data_Loader as dl
import os

matplotlib.use('Agg')
import matplotlib.pyplot as plt

dataGen = augmentor(rotation_range=359,
                    horizontal_flip=True,
                    vertical_flip=True)


def multi_input_data_generator(x1, x2, y1, y2, batch_size):
    genX1 = dataGen.flow(x1, y1, batch_size=batch_size)
    genX2 = dataGen.flow(x2, y2, batch_size=batch_size)
    while True:
        out1 = genX1.next()
        out2 = genX2.next()
        yield [out1[0], out2[0]], [out1[1], out2[1]]


class Reconnet(object):
    def __init__(self, modelNumber):
        if not os.path.exists("data/model_{}".format(modelNumber)):