import sys

sys.path.insert(0, 'class')
import pathconfig
import matplotlib.pyplot as plt
import preprocessing_images
import model
'''
   Script py to train the Transfer learning network 
'''

paths = pathconfig.paths()  #instance to import paths
preprocessing = preprocessing_images.Preprocessing_Images(
)  #instance to import preprocessing class
Model = model.Model()  #instance to import model class
model_Tran = model.Transfer_Learning()  #instance to import a  CNN model
path_model = model_Tran.path_model  #path_model

path_weights = model_Tran.path_weights  #path weights
print('Dataset for training set: {}'.format(
    preprocessing.trainingset))  #path of training set
print('Dataset for test set: {}'.format(
    preprocessing.testset))  #path of test set

history = model_Tran.training_model()  #train the CNN model

name = model_Tran.name
Model.plot_history(history, name)  #plot the history of training
#Model.save_model(model,path_model,path_weights)                   #save model
示例#2
0
import preprocessing
import pathconfig

######################################################################
###################### RUN FOR PREPROCESSING  ########################
######################################################################
""" in this file py, is possible run all preprocessing in order to elaborate data for the training of model.
    It use the class Preprocessing and its relative sub-class. Many files were deleted , like Semcor, so is not
    possible run this part of preprocessing"""

paths = pathconfig.paths()  #create instance for paths from config_path file

corpus = preprocessing.Preprocessing.Corpus()  # instance class corpus

input_x = "input_x"
babelnet_task = 'babelnet'
domains_taks = 'domains'
lexnames_task = 'lexnames'
""" Creation of all corpus needs for training sets"""

print('START corpus TRAINING...')
corpus.create_and_save_corpus_x(path_xml_par=paths.SEMCOR_PATH,
                                path_save_corpus_par=paths.CORPUS_X_PATH
                                )  #creates the corpus for input data
corpus.create_and_save_corpus_babelnet_y(
    path_xml_par=paths.SEMCOR_PATH,
    path_gold_key_par=paths.SEMCOR_GOLD_KEY_PATH,
    path_save_corpus_par=paths.CORPUS_BABELNET_LABELS_PATH
)  #creates the corpus fine grained
corpus.create_and_save_corpus_domains_y(
    path_corpus_babelnet_par=paths.CORPUS_BABELNET_LABELS_PATH,
示例#3
0
def main(mode, dataset_eva):
    def build_fully_connected(input_shape, num_classes):
        """
            Create a fully-connected model to train or test on UC Merced dataset.
            """
        model = Sequential()
        model.add(Flatten(input_shape=input_shape))
        model.add(Dense(256))
        model.add(Activation('relu'))
        model.add(Dropout(0.5))
        model.add(Dense(num_classes, activation='softmax'))
        return model

    def train_model_VGG(x_train, y_train, x_validate, y_validate, num_classes):
        # Build, compile, and fit the model
        model = build_fully_connected(input_shape=X['train'].shape[1:],
                                      num_classes=num_classes)
        adam = optimizers.Adam(lr=0.0001)
        model.compile(optimizer=adam,
                      loss='categorical_crossentropy',
                      metrics=['accuracy'])
        model_fit_history = model.fit(X['train'],
                                      Y['train'],
                                      batch_size=64,
                                      epochs=50,
                                      verbose=2,
                                      validation_data=(X['validate'],
                                                       Y['validate']))

        epochs = np.argmin(model_fit_history.history['val_loss']) + 1
        print(f'Stop training at {epochs} epochs')

        plots_loss_accuracy_from_training(
            model_fit_history
        )  # plots for loss and accuracy model after training

        # Merge training and validation data
        X_train = np.concatenate([x_train,
                                  x_validate])  #concatenate train dataset
        Y_train = np.concatenate([y_train,
                                  y_validate])  #concatenate validation dataset

        # Randomly shuffle X and Y
        shuffle_index = np.random.permutation(len(X_train))
        X_train = X_train[shuffle_index]
        Y_train = Y_train[shuffle_index]
        model = build_fully_connected(input_shape=X_train.shape[1:],
                                      num_classes=num_classes)
        model.compile(optimizer=adam,
                      loss='categorical_crossentropy',
                      metrics=['accuracy'])
        print('Train with Training dataset + Validation dataset as input.')
        model_fit_history = model.fit(
            X_train, Y_train, batch_size=64, epochs=epochs,
            verbose=0)  # train with trainign and validation dataset
        save_model(model, path_save_model_vgg, path_save_weight_vgg)

    paths = pathconfig.paths(
    )  #object from class pathconfig to extract the path for solution

    path_save_model_vgg = paths.MODEL_VGG  #the path of model vgg
    path_save_weight_vgg = paths.WEIGHTS_VGG  #the path of weights vgg

    # Collect class names from directory names in './data/UCMerced_LandUse/Images/'
    sources_dataset = paths.FLOW_TRAIN  #the sources path of dataset
    class_names = os.listdir(sources_dataset)  # the class names from dataset
    try:
        target_dirs = {
            target: os.path.join(paths.BASE_FLOW, target)
            for target in ['train', 'validate', 'test']
        }
    except:
        print(
            'Check to have the folder called "flow" in order to have dataset for testing/training/validate'
        )

    #Calculate the training image means by channel (3)
    means = []
    for root, _, filenames in os.walk(target_dirs['train']):
        for filename in filenames:  # from filenames of trainng directory of flow
            filepath = os.path.join(root, filename)
            image = imread(filepath)  #read image
            means.append(np.mean(image, axis=(0, 1)))
    channel_means = np.mean(means, axis=0)

    # Let's try the VGG16 model from keras
    pretrained_model = tf.keras.applications.VGG16(include_top=False,
                                                   weights='imagenet')

    # Extract bottleneck features from pretrained model, predicting from "dataset" directory
    # the num of classes (21)
    num_classes = len(class_names)  #num of classes
    X, Y = dict(), dict()  #the two dict for inputs and outputs
    preprocess = lambda x: x - channel_means
    for dataset in ['train', 'validate', 'test']:
        X[dataset], Y[dataset] = get_bottleneck_features(
            model=pretrained_model,
            dataset=dataset,
            target_dirs=target_dirs,
            preproc_func=preprocess)

    # for re-training the VGG , decomment the code line "train_model_VGG"
    if (mode == 'train'):
        train_model_VGG(X['train'], Y['train'], X['validate'], Y['validate'],
                        num_classes)

    # load the model from VGG
    loaded_model = load_model(path_save_model_vgg, path_save_weight_vgg)

    #evaluation of results from VGG model (confusion matrix)
    if (dataset_eva == 'test'):
        confusion_matrix(loaded_model, X['test'], Y['test'], class_names)
    elif (dataset_eva == 'train'):
        confusion_matrix(loaded_model, X['train'], Y['train'], class_names)
    else:
        confusion_matrix(loaded_model, X['validate'], Y['validate'],
                         class_names)
import numpy as np
import sys
sys.path.insert(0,'class')
import model
import pathconfig
import preprocessing_images
import evaluation

'''
   Script py to evaluate the network Transfer Learning  
'''


paths = pathconfig.paths()


Model = model.Model()
Evaluation = evaluation.Evaluation()
model_Transfer_Learning= model.Transfer_Learning()         #Tranfer Learning model
path_test_set = paths.PATH_IMAGES_BLIND_TEST                     #the dataset chosen for evaluation. If you want change, you have to just change the
                                                           #path of dataset ( paths.PATH_IMAGES_TEST )
path_model = model_Transfer_Learning.path_model
path_weights = model_Transfer_Learning.path_weights

model = Model.load_model(path_model,path_weights)

Preprocessing_Images = preprocessing_images.Preprocessing_Images()
test_datagen, val_steps, classnames = Preprocessing_Images.get_set_from_path(setdata=path_test_set)

predictions = model.predict_generator(test_datagen,verbose=1,steps=val_steps)