Example #1
0
    def compile_model(self, model=None):
        f1_score = km.categorical_f1_score(label=1)
        self.earlyStopping = EarlyStopping(monitor='val_f1_score',
                                           restore_best_weights=True,
                                           patience=patience, verbose=0, mode='max'  ,min_delta=0.01
                                           )
        adam = optimizers.Adam(lr=self.lr, decay=self.lr_decay)
        loss = weighted_categorical_crossentropy(weights)
        self.model.compile(optimizer=adam, loss=loss 

    def create_model(self):
        self.model = KerasClassifier(build_fn=self.build_model, epochs=self.epoch, batch_size=self.batch_size,
                                     verbose=1)

    def make_report(self,i, y_true, y_pred,city,method):
        data_frame = classification_report(y_true.argmax(axis=-1), y_pred.argmax(axis=-1), output_dict=True)
        df = pd.DataFrame(data_frame)
        df = df.reset_index()
        roc_dict = self.roc_auc(i,y_true, y_pred,city,method)
        df = df.append({'index': 'auc', '0': roc_dict[0], '1': roc_dict[1],
                        'micro avg': roc_dict['micro'],
                        'macro avg': roc_dict['macro']}, ignore_index=True)
        df = df.set_index('index')
        return df

    def roc_auc(self,i, y_test, y_score,city,method):
        fpr, tpr, roc_auc = roc_auc_compute(y_test, y_score)
        return roc_auc
Example #2
0
def hybridModel(embeddingMatrix, maxDataLenght, embeddingVectorLength,
                numAttributes, numNeurons):
    model = Sequential()
    model.add(
        Embedding(input_dim=numAttributes,
                  output_dim=embeddingVectorLength,
                  weights=[embeddingMatrix],
                  input_length=maxDataLenght,
                  trainable=False))
    model.add(Dropout(0.2))
    model.add(Conv1D(64, 5, activation='relu'))
    model.add(MaxPooling1D(pool_size=4))
    model.add(
        Bidirectional(LSTM(numNeurons, return_sequences=False),
                      merge_mode="sum"))
    model.add(
        Dense(2,
              activation='softmax',
              kernel_regularizer=regularizers.l2(0.001)))
    model.compile(loss='categorical_crossentropy',
                  optimizer='adam',
                  metrics=[
                      'accuracy',
                      km.categorical_f1_score(),
                      km.categorical_precision(),
                      km.categorical_recall()
                  ])

    return model
Example #3
0
def get_custom_metrics():
    custom_metrics = []
    custom_metrics.append(keras_metrics.categorical_f1_score())
    custom_metrics.append(keras_metrics.categorical_precision())
    custom_metrics.append(keras_metrics.categorical_recall())
    custom_metrics = {m.__name__: m for m in custom_metrics}
    custom_metrics["sin"] = K.sin
    custom_metrics["abs"] = K.abs
    return custom_metrics
Example #4
0
 def compile_model(self, model=None):
     f1_score = km.categorical_f1_score(label=1)
     self.earlyStopping = EarlyStopping(monitor='val_f1_score',
                                        restore_best_weights=True,
                                        patience=patience,
                                        verbose=0,
                                        mode='max')
     adam = optimizers.Adam(lr=self.lr, decay=self.lr_decay)
     loss = weighted_categorical_crossentropy(weights)
     self.model.compile(optimizer=adam, loss=loss, metrics=[f1_score])
Example #5
0
def test_on_images(network, images_dir, models_dir, *args):
    """"""
    print('Testing on images...')
    # Extract parameters from args
    img_width, img_height, batch_size, models_dir, figures_dir = args

    # Get image generators
    num_images_val, num_classes_val, val_gen = get_image_generator(
        network, images_dir, 'val', img_width, img_height, batch_size)

    num_images_test, num_classes_test, test_gen = get_image_generator(
        network, images_dir, 'test', img_width, img_height, batch_size)

    # Make sure val/test have the same number of classes
    assert num_classes_val == num_classes_test

    # Get network model for an image input shape
    input_shape = (img_width, img_height, 3)
    main_input = Input(shape=input_shape)
    base_model, last_layer_number = get_cnn_model(network, input_shape,
                                                  main_input)

    x = base_model.output
    x = GlobalAveragePooling2D()(x)
    predictions = Dense(num_classes_val, activation='softmax')(x)
    model = Model(base_model.input, outputs=predictions)

    path = f'{models_dir}/{network}'
    models_list = glob(f'{path}/A*', )

    net_id = f'{os.path.basename(models_dir)}A_'
    print('Loading model...')
    for h5_model in models_list:
        if gpu_number > 1:
            multi_gpu_model = load_model(h5_model,
                                         custom_objects={
                                             'categorical_f1_score':
                                             km.categorical_f1_score()
                                         })
            model = multi_gpu_model.layers[-2]
        else:
            model.load_weights(h5_model)
        print('Validation')
        get_report(val_gen, num_images_val, num_classes_val, batch_size, model,
                   'val', network, net_id, figures_dir)
        print('Test')
        get_report(test_gen, num_images_test, num_classes_test, batch_size,
                   model, 'test', network, net_id, figures_dir)
Example #6
0
def load_model(model_path):
    precision = km.categorical_precision()
    recall = km.categorical_recall()
    f1_score = km.categorical_f1_score()
    model = keras.models.load_model(model_path,
                                    custom_objects={
                                        'AdaBound': AdaBound,
                                        'categorical_precision': precision,
                                        'categorical_recall': recall,
                                        'categorical_f1_score': f1_score
                                    })
    decay = LR_FINAL / EPOCHS
    optm = AdaBound(lr=0.001,
                    final_lr=LR_FINAL,
                    gamma=1e-03,
                    weight_decay=decay,
                    amsbound=False)
    return model
Example #7
0
def get_metrics_fresh(metrics, nr_classes):
    """
    Function takes a list of metrics and creates fresh tensors accordingly.
    This is necessary, after re-compiling the model because the placeholder has
    to be updated    
    """
    f1 = any(["f1_score" in str(a) for a in metrics])
    precision = any(["precision" in str(a) for a in metrics])
    recall = any(["recall" in str(a) for a in metrics])

    Metrics = []
    if f1 == True:
        for class_ in range(nr_classes):
            Metrics.append(keras_metrics.categorical_f1_score(label=class_))
    if precision == True:
        for class_ in range(nr_classes):
            Metrics.append(keras_metrics.categorical_precision(label=class_))
    if recall == True:
        for class_ in range(nr_classes):
            Metrics.append(keras_metrics.categorical_recall(label=class_))

    metrics = ['accuracy'] + Metrics
    return metrics
Example #8
0
def get_model():
    input_tensor = Input(shape=(IMAGE_SIZE[0], IMAGE_SIZE[1], 3))
    base_model = Xception(input_shape=(IMAGE_SIZE[0], IMAGE_SIZE[1], 3),
                          include_top=False,
                          weights=None,
                          input_tensor=input_tensor,
                          pooling='avg',
                          classes=N_CLASSES)
    x = base_model.output
    predictions = Dense(N_CLASSES, activation='softmax')(x)
    model = keras.models.Model(inputs=base_model.input, outputs=predictions)
    decay = LR_FINAL / EPOCHS
    optm = AdaBound(lr=0.01,
                    final_lr=LR_FINAL,
                    gamma=1e-03,
                    weight_decay=decay,
                    amsbound=False)
    precision = km.categorical_precision()
    recall = km.categorical_recall()
    f1_score = km.categorical_f1_score()
    model.compile(optimizer=optm,
                  loss='categorical_crossentropy',
                  metrics=['accuracy', precision, recall, f1_score])
    return model
Example #9
0
def Build_Model_RNN_Text(word_index,
                         embedding_index,
                         number_of_classes,
                         MAX_SEQUENCE_LENGTH,
                         EMBEDDING_DIM,
                         sparse_categorical,
                         min_hidden_layer_rnn,
                         max_hidden_layer_rnn,
                         min_nodes_rnn,
                         max_nodes_rnn,
                         random_optimizor,
                         dropout,
                         use_cuda=True,
                         use_bidirectional=True,
                         _l2=0.01,
                         lr=1e-3):
    """
    def buildModel_RNN(word_index, embedding_index, number_of_classes, MAX_SEQUENCE_LENGTH, EMBEDDING_DIM, sparse_categorical):
    word_index in word index ,
    embedding_index is embeddings index, look at data_helper.py
    number_of_classes is number of classes,
    MAX_SEQUENCE_LENGTH is maximum lenght of text sequences
    """

    Recurrent = CuDNNGRU if use_cuda else GRU

    model = Sequential()
    values = list(range(min_nodes_rnn, max_nodes_rnn + 1))
    values_layer = list(range(min_hidden_layer_rnn - 1, max_hidden_layer_rnn))

    layer = random.choice(values_layer)
    print(layer)

    embedding_matrix = np.zeros((len(word_index) + 1, EMBEDDING_DIM))
    for word, i in word_index.items():
        embedding_vector = embedding_index.get(word)
        if embedding_vector is not None:
            # words not found in embedding index will be all-zeros.
            embedding_matrix[i] = embedding_vector
        else:
            embedding_matrix[i] = embedding_index['UNK']

    model.add(
        Embedding(len(word_index) + 1,
                  EMBEDDING_DIM,
                  weights=[embedding_matrix],
                  input_length=MAX_SEQUENCE_LENGTH,
                  trainable=True))

    gru_node = random.choice(values)
    print(gru_node)
    for i in range(0, layer):
        if use_bidirectional:
            model.add(
                Bidirectional(
                    Recurrent(gru_node,
                              return_sequences=True,
                              kernel_regularizer=l2(_l2))))
        else:
            model.add(
                Recurrent(gru_node,
                          return_sequences=True,
                          kernel_regularizer=l2(_l2)))
        model.add(Dropout(dropout))
    if use_bidirectional:
        model.add(
            Bidirectional(Recurrent(gru_node, kernel_regularizer=l2(_l2))))
    else:
        model.add(Recurrent(gru_node, kernel_regularizer=l2(_l2)))
    model.add(Dropout(dropout))
    model.add(Dense(256, activation='relu', kernel_regularizer=l2(_l2)))
    if number_of_classes == 2:
        model.add(Dense(1, activation='sigmoid', kernel_regularizer=l2(_l2)))
        model_tmp = model
        model.compile(loss='binary_crossentropy',
                      optimizer=optimizors(random_optimizor, lr),
                      metrics=[
                          'accuracy',
                          km.binary_precision(),
                          km.binary_recall(),
                          km.binary_f1_score(),
                          km.binary_true_positive(),
                          km.binary_true_negative(),
                          km.binary_false_positive(),
                          km.binary_false_negative()
                      ])
    else:
        model.add(
            Dense(number_of_classes,
                  activation='softmax',
                  kernel_regularizer=l2(_l2)))
        model_tmp = model
        if sparse_categorical:
            model.compile(loss='sparse_categorical_crossentropy',
                          optimizer=optimizors(random_optimizor, lr),
                          metrics=[
                              'accuracy',
                              km.sparse_categorical_precision(),
                              km.sparse_categorical_recall(),
                              km.sparse_categorical_f1_score(),
                              km.sparse_categorical_true_positive(),
                              km.sparse_categorical_true_negative(),
                              km.sparse_categorical_false_positive(),
                              km.sparse_categorical_false_negative()
                          ])
        else:
            model.compile(loss='categorical_crossentropy',
                          optimizer=optimizors(random_optimizor, lr),
                          metrics=[
                              'accuracy',
                              km.categorical_precision(),
                              km.categorical_recall(),
                              km.categorical_f1_score(),
                              km.categorical_true_positive(),
                              km.categorical_true_negative(),
                              km.categorical_false_positive(),
                              km.categorical_false_negative()
                          ])
    return model, model_tmp
Example #10
0
def Build_Model_RNN_Image(shape, number_of_classes, sparse_categorical,
                          min_nodes_rnn, max_nodes_rnn, random_optimizor,
                          dropout):
    """
        def Image_model_RNN(num_classes,shape):
        num_classes is number of classes,
        shape is (w,h,p)
    """
    values = list(range(min_nodes_rnn - 1, max_nodes_rnn))
    node = random.choice(values)

    x = Input(shape=shape)

    # Encodes a row of pixels using TimeDistributed Wrapper.
    encoded_rows = TimeDistributed(CuDNNLSTM(node,
                                             recurrent_dropout=dropout))(x)
    node = random.choice(values)
    # Encodes columns of encoded rows.
    encoded_columns = CuDNNLSTM(node, recurrent_dropout=dropout)(encoded_rows)

    # Final predictions and model.
    #prediction = Dense(256, activation='relu')(encoded_columns)
    if number_of_classes == 2:
        prediction = Dense(1, activation='sigmoid')(encoded_columns)
    else:
        prediction = Dense(number_of_classes,
                           activation='softmax')(encoded_columns)

    model = Model(x, prediction)
    model_tmp = model
    if number_of_classes == 2:
        model.compile(loss='binary_crossentropy',
                      optimizer=optimizors(random_optimizor),
                      metrics=[
                          'accuracy',
                          km.binary_precision(),
                          km.binary_recall(),
                          km.binary_f1_score(),
                          km.binary_true_positive(),
                          km.binary_true_negative(),
                          km.binary_false_positive(),
                          km.binary_false_negative()
                      ])
    else:
        if sparse_categorical:
            model.compile(loss='sparse_categorical_crossentropy',
                          optimizer=optimizors(random_optimizor),
                          metrics=[
                              'accuracy',
                              km.sparse_categorical_precision(),
                              km.sparse_categorical_recall(),
                              km.sparse_categorical_f1_score(),
                              km.sparse_categorical_true_positive(),
                              km.sparse_categorical_true_negative(),
                              km.sparse_categorical_false_positive(),
                              km.sparse_categorical_false_negative()
                          ])
        else:
            model.compile(loss='categorical_crossentropy',
                          optimizer=optimizors(random_optimizor),
                          metrics=[
                              'accuracy',
                              km.categorical_precision(),
                              km.categorical_recall(),
                              km.categorical_f1_score(),
                              km.categorical_true_positive(),
                              km.categorical_true_negative(),
                              km.categorical_false_positive(),
                              km.categorical_false_negative()
                          ])
    return model, model_tmp
Example #11
0
def Build_Model_CNN_Image(shape, number_of_classes, sparse_categorical,
                          min_hidden_layer_cnn, max_hidden_layer_cnn,
                          min_nodes_cnn, max_nodes_cnn, random_optimizor,
                          dropout):
    """""
    def Image_model_CNN(num_classes,shape):
    num_classes is number of classes,
    shape is (w,h,p)
    """ ""

    model = Sequential()
    values = list(range(min_nodes_cnn, max_nodes_cnn))
    Layers = list(range(min_hidden_layer_cnn, max_hidden_layer_cnn))
    Layer = random.choice(Layers)
    Filter = random.choice(values)
    model.add(Conv2D(Filter, (3, 3), padding='same', input_shape=shape))
    model.add(Activation('relu'))
    model.add(Conv2D(Filter, (3, 3)))
    model.add(Activation('relu'))

    for i in range(0, Layer):
        Filter = random.choice(values)
        model.add(Conv2D(Filter, (3, 3), padding='same'))
        model.add(Activation('relu'))
        model.add(MaxPooling2D(pool_size=(2, 2)))
        model.add(Dropout(dropout))

    model.add(Flatten())
    model.add(Dense(256, activation='relu'))
    model.add(Dropout(dropout))
    if number_of_classes == 2:
        model.add(Dense(1, activation='sigmoid', kernel_constraint=maxnorm(3)))
        model_tmp = model
        model.compile(loss='binary_crossentropy',
                      optimizer=optimizors(random_optimizor),
                      metrics=[
                          'accuracy',
                          km.binary_precision(),
                          km.binary_recall(),
                          km.binary_f1_score(),
                          km.binary_true_positive(),
                          km.binary_true_negative(),
                          km.binary_false_positive(),
                          km.binary_false_negative()
                      ])
    else:
        model.add(
            Dense(number_of_classes,
                  activation='softmax',
                  kernel_constraint=maxnorm(3)))
        model_tmp = model
        if sparse_categorical:
            model.compile(loss='sparse_categorical_crossentropy',
                          optimizer=optimizors(random_optimizor),
                          metrics=[
                              'accuracy',
                              km.sparse_categorical_precision(),
                              km.sparse_categorical_recall(),
                              km.sparse_categorical_f1_score(),
                              km.sparse_categorical_true_positive(),
                              km.sparse_categorical_true_negative(),
                              km.sparse_categorical_false_positive(),
                              km.sparse_categorical_false_negative()
                          ])
        else:
            model.compile(loss='categorical_crossentropy',
                          optimizer=optimizors(random_optimizor),
                          metrics=[
                              'accuracy',
                              km.categorical_precision(),
                              km.categorical_recall(),
                              km.categorical_f1_score(),
                              km.categorical_true_positive(),
                              km.categorical_true_negative(),
                              km.categorical_false_positive(),
                              km.categorical_false_negative()
                          ])
    return model, model_tmp
Example #12
0
def Build_Model_DNN_Text(shape,
                         number_of_classes,
                         sparse_categorical,
                         min_hidden_layer_dnn,
                         max_hidden_layer_dnn,
                         min_nodes_dnn,
                         max_nodes_dnn,
                         random_optimizor,
                         dropout,
                         _l2=0.01,
                         lr=1e-3):
    """
    buildModel_DNN_Tex(shape, number_of_classes,sparse_categorical)
    Build Deep neural networks Model for text classification
    Shape is input feature space
    number_of_classes is number of classes
    """
    model = Sequential()
    layer = list(range(min_hidden_layer_dnn, max_hidden_layer_dnn))
    node = list(range(min_nodes_dnn, max_nodes_dnn))

    Numberof_NOde = random.choice(node)
    nLayers = random.choice(layer)

    Numberof_NOde_old = Numberof_NOde
    model.add(
        Dense(Numberof_NOde,
              input_dim=shape,
              activation='relu',
              kernel_regularizer=l2(_l2)))
    model.add(Dropout(dropout))
    for i in range(0, nLayers):
        Numberof_NOde = random.choice(node)
        model.add(
            Dense(Numberof_NOde,
                  input_dim=Numberof_NOde_old,
                  activation='relu',
                  kernel_regularizer=l2(_l2)))
        model.add(Dropout(dropout))
        Numberof_NOde_old = Numberof_NOde
    if number_of_classes == 2:
        model.add(Dense(1, activation='sigmoid', kernel_regularizer=l2(_l2)))
        model_tmp = model
        model.compile(loss='binary_crossentropy',
                      optimizer=optimizors(random_optimizor, lr),
                      metrics=[
                          'accuracy',
                          km.binary_precision(),
                          km.binary_recall(),
                          km.binary_f1_score(),
                          km.binary_true_positive(),
                          km.binary_true_negative(),
                          km.binary_false_positive(),
                          km.binary_false_negative()
                      ])
    else:
        model.add(
            Dense(number_of_classes,
                  activation='softmax',
                  kernel_regularizer=l2(_l2)))
        model_tmp = model
        if sparse_categorical:
            model.compile(loss='sparse_categorical_crossentropy',
                          optimizer=optimizors(random_optimizor, lr),
                          metrics=[
                              'accuracy',
                              km.sparse_categorical_precision(),
                              km.sparse_categorical_recall(),
                              km.sparse_categorical_f1_score(),
                              km.sparse_categorical_true_positive(),
                              km.sparse_categorical_true_negative(),
                              km.sparse_categorical_false_positive(),
                              km.sparse_categorical_false_negative()
                          ])
        else:
            model.compile(loss='categorical_crossentropy',
                          optimizer=optimizors(random_optimizor, lr),
                          metrics=[
                              'accuracy',
                              km.categorical_precision(),
                              km.categorical_recall(),
                              km.categorical_f1_score(),
                              km.categorical_true_positive(),
                              km.categorical_true_negative(),
                              km.categorical_false_positive(),
                              km.categorical_false_negative()
                          ])
    return model, model_tmp
Example #13
0
 def __init__(self, opts, output_size=1, filter_length=50, hidden_size=128, kernel_size=2):
     self.model = tf.keras.models.Sequential()
     loss = tf.keras.losses.CategoricalCrossentropy(from_logits=True, reduction=tf.losses.Reduction.NONE)
     self.model.add(tf.keras.Input(opts['input_shape']))
     self.model.add(tf.keras.layers.Reshape([1, opts['input_shape']]))
     self.model.add(tf.keras.layers.Conv1D(filter_length, kernel_size, padding='valid', activation='relu', strides=1, data_format='channels_first'))
     self.model.add(tf.keras.layers.GlobalMaxPooling1D(data_format='channels_first'))
     self.model.add(tf.keras.layers.Dense(hidden_size))
     self.model.add(tf.keras.layers.Dropout(0.2))
     self.model.add(tf.keras.layers.Dense(32, activation='relu'))
     self.model.add(tf.keras.layers.Dense(output_size, activation='softmax'))
     self.model.compile(loss=loss, optimizer='adam', metrics=['accuracy', km.categorical_precision(), km.categorical_recall(), km.categorical_f1_score()])
Example #14
0
def test_combined(network, images_dir, csv_dir, csv_data, models_dir, *args):
    # Extract parameters from args
    img_width, img_height, batch_size, models_dir, figures_dir = args

    # Get image generators

    num_images_val, num_classes_val, features, val_gen = get_combined_generator(
        network, images_dir, csv_dir, csv_data, 'val', img_width, img_height,
        batch_size)

    num_images_test, num_classes_test, features, test_gen = get_combined_generator(
        network, images_dir, csv_dir, csv_data, 'test', img_width, img_height,
        batch_size)

    # Make sure val/test have the same number of classes
    assert num_classes_val == num_classes_test

    # Get network model for an image input shape
    input_shape = (img_width, img_height, 3)
    main_input = Input(shape=input_shape)
    base_model, last_layer_number = get_cnn_model(network, input_shape,
                                                  main_input)

    x = base_model.output
    x = GlobalAveragePooling2D()(x)
    x = Dense(512, activation='relu')(x)

    # Load Simple MLP
    aux_input = Input(shape=(features, ))
    aux = Dense(512, activation='relu')(aux_input)
    aux = Dropout(0.3)(aux)
    aux = Dense(512, activation='relu')(aux)
    aux = Dropout(0.3)(aux)
    aux = Dense(512, activation='relu')(aux)

    # Merge input models
    merge = concatenate([x, aux])
    predictions = Dense(num_classes_val, activation='softmax')(merge)

    model = Model(inputs=[main_input, aux_input], outputs=predictions)

    path = f'{models_dir}/{network}'
    models_list = glob(f'{path}/B*', )
    net_id = f'{os.path.basename(models_dir)}B_'

    for h5_model in models_list:
        if gpu_number > 1:
            multi_gpu_model = load_model(
                h5_model,
                custom_objects={
                    'categorical_f1_score': km.categorical_f1_score()
                },
            )
            model = multi_gpu_model.layers[-2]
        else:
            model.load_weights(h5_model)

        get_report(val_gen, num_images_val, num_classes_val, batch_size, model,
                   'val', network, net_id, figures_dir)
        get_report(test_gen, num_images_val, num_classes_val, batch_size,
                   model, 'test', network, net_id, figures_dir)
Example #15
0
def apply_CNN():
    # Importing the Keras libraries and packages
    from keras.models import Sequential
    from keras.layers import Convolution2D, BatchNormalization, Dropout
    from keras.layers import MaxPooling2D
    from keras.layers import Flatten
    from keras.layers import Dense
    #Imports for collecting metrics
    import keras_metrics as km
    import tensorflow as tf
    #import tensorflow.keras as keras

    # Initialising the CNN
    classifier = Sequential()

    # Step 1 - Convolution
    classifier.add(
        Convolution2D(32, 3, 3, input_shape=(64, 64, 3), activation='relu'))
    classifier.add(BatchNormalization())
    # Step 2 - Pooling
    classifier.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))
    classifier.add(Dropout(0.2))
    # Adding a second convolutional layer
    classifier.add(Convolution2D(32, 3, 3, activation='relu'))
    classifier.add(BatchNormalization())
    classifier.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))
    classifier.add(Dropout(0.5))
    classifier.add(Flatten())
    # Step 4 - Full connection
    classifier.add(Dense(output_dim=128, activation='relu'))
    classifier.add(BatchNormalization())
    classifier.add(Dropout(0.2))
    classifier.add(Dense(output_dim=3, activation='softmax'))  # catgorical
    # SET METRICS
    precision = km.categorical_precision()
    recall = km.categorical_recall()
    f1 = km.categorical_f1_score()
    classifier.compile(optimizer='adam',
                       loss='categorical_crossentropy',
                       metrics=['accuracy', precision, recall, f1])

    # Part 2 - Fitting the CNN to the images
    from keras.preprocessing.image import ImageDataGenerator
    train_datagen = ImageDataGenerator()
    test_datagen = ImageDataGenerator()
    seed = 7
    training_set = train_datagen.flow_from_directory(
        'training',
        target_size=(64, 64),
        batch_size=32,
        class_mode='categorical',
        shuffle=True,
        seed=seed)  #,save_to_dir = 'generatedimages') #categorical,binary

    test_set = test_datagen.flow_from_directory('test',
                                                target_size=(64, 64),
                                                batch_size=32,
                                                class_mode='categorical',
                                                shuffle=True,
                                                seed=seed)  #categorical,binary
    with tf.Session() as s:
        s.run(tf.global_variables_initializer())
        classifier.fit_generator(training_set,
                                 samples_per_epoch=250,
                                 nb_epoch=35,
                                 validation_data=test_set,
                                 nb_val_samples=90,
                                 shuffle=True,
                                 verbose=2)
    return
Example #16
0
def Build_Model_CNN_Text(word_index,
                         embedding_index,
                         number_of_classes,
                         MAX_SEQUENCE_LENGTH,
                         EMBEDDING_DIM,
                         sparse_categorical,
                         min_hidden_layer_cnn,
                         max_hidden_layer_cnn,
                         min_nodes_cnn,
                         max_nodes_cnn,
                         random_optimizor,
                         dropout,
                         simple_model=False,
                         _l2=0.01,
                         lr=1e-3):
    """
        def buildModel_CNN(word_index,embedding_index,number_of_classes,MAX_SEQUENCE_LENGTH,EMBEDDING_DIM,Complexity=0):
        word_index in word index ,
        embedding_index is embeddings index, look at data_helper.py
        number_of_classes is number of classes,
        MAX_SEQUENCE_LENGTH is maximum lenght of text sequences,
        EMBEDDING_DIM is an int value for dimention of word embedding look at data_helper.py
        Complexity we have two different CNN model as follows
        F=0 is simple CNN with [1 5] hidden layer
        Complexity=2 is more complex model of CNN with filter_length of range [1 10]
    """

    model = Sequential()
    if simple_model:
        embedding_matrix = np.zeros((len(word_index) + 1, EMBEDDING_DIM))
        for word, i in word_index.items():
            embedding_vector = embedding_index.get(word)
            if embedding_vector is not None:
                # words not found in embedding index will be all-zeros.
                embedding_matrix[i] = embedding_vector
            else:
                embedding_matrix[i] = embedding_index['UNK']
        model.add(
            Embedding(len(word_index) + 1,
                      EMBEDDING_DIM,
                      weights=[embedding_matrix],
                      input_length=MAX_SEQUENCE_LENGTH,
                      trainable=True))
        values = list(range(min_nodes_cnn, max_nodes_cnn))
        Layer = list(range(min_hidden_layer_cnn, max_hidden_layer_cnn))
        Layer = random.choice(Layer)
        for i in range(0, Layer):
            Filter = random.choice(values)
            model.add(
                Conv1D(Filter,
                       5,
                       activation='relu',
                       kernel_regularizer=l2(_l2)))
            model.add(Dropout(dropout))
            model.add(MaxPooling1D(5))

        model.add(Flatten())
        Filter = random.choice(values)
        model.add(Dense(Filter, activation='relu', kernel_regularizer=l2(_l2)))
        model.add(Dropout(dropout))
        Filter = random.choice(values)
        model.add(Dense(Filter, activation='relu', kernel_regularizer=l2(_l2)))
        model.add(Dropout(dropout))
        if number_of_classes == 2:
            model.add(
                Dense(1, activation='sigmoid', kernel_regularizer=l2(_l2)))
            model_tmp = model
            model.compile(loss='binary_crossentropy',
                          optimizer=optimizors(random_optimizor, lr),
                          metrics=[
                              'accuracy',
                              km.binary_precision(),
                              km.binary_recall(),
                              km.binary_f1_score(),
                              km.binary_true_positive(),
                              km.binary_true_negative(),
                              km.binary_false_positive(),
                              km.binary_false_negative()
                          ])
        else:
            model.add(
                Dense(number_of_classes,
                      activation='softmax',
                      kernel_regularizer=l2(_l2)))
            model_tmp = model
            if sparse_categorical:
                model.compile(loss='sparse_categorical_crossentropy',
                              optimizer=optimizors(random_optimizor, lr),
                              metrics=[
                                  'accuracy',
                                  km.sparse_categorical_precision(),
                                  km.sparse_categorical_recall(),
                                  km.sparse_categorical_f1_score(),
                                  km.sparse_categorical_true_positive(),
                                  km.sparse_categorical_true_negative(),
                                  km.sparse_categorical_false_positive(),
                                  km.sparse_categorical_false_negative()
                              ])
            else:
                model.compile(loss='categorical_crossentropy',
                              optimizer=optimizors(random_optimizor, lr),
                              metrics=[
                                  'accuracy',
                                  km.categorical_precision(),
                                  km.categorical_recall(),
                                  km.categorical_f1_score(),
                                  km.categorical_true_positive(),
                                  km.categorical_true_negative(),
                                  km.categorical_false_positive(),
                                  km.categorical_false_negative()
                              ])
    else:
        embedding_matrix = np.zeros((len(word_index) + 1, EMBEDDING_DIM))
        for word, i in word_index.items():
            embedding_vector = embedding_index.get(word)
            if embedding_vector is not None:
                # words not found in embedding index will be all-zeros.
                embedding_matrix[i] = embedding_vector
            else:
                embedding_matrix[i] = embedding_index['UNK']
        embedding_layer = Embedding(len(word_index) + 1,
                                    EMBEDDING_DIM,
                                    weights=[embedding_matrix],
                                    input_length=MAX_SEQUENCE_LENGTH,
                                    trainable=True)

        # applying a more complex convolutional approach
        convs = []
        values_layer = list(range(min_hidden_layer_cnn, max_hidden_layer_cnn))
        filter_sizes = []
        layer = random.choice(values_layer)
        print("Filter  ", layer)
        for fl in range(0, layer):
            filter_sizes.append((fl + 2))

        values_node = list(range(min_nodes_cnn, max_nodes_cnn))
        node = random.choice(values_node)
        print("Node  ", node)
        sequence_input = Input(shape=(MAX_SEQUENCE_LENGTH, ), dtype='int32')
        embedded_sequences = embedding_layer(sequence_input)

        for fsz in filter_sizes:
            l_conv = Conv1D(node, kernel_size=fsz,
                            activation='relu')(embedded_sequences)
            l_pool = MaxPooling1D(5)(l_conv)
            #l_pool = Dropout(0.25)(l_pool)
            convs.append(l_pool)

        l_merge = Concatenate(axis=1)(convs)
        l_cov1 = Conv1D(node, 5, activation='relu')(l_merge)
        l_cov1 = Dropout(dropout)(l_cov1)
        l_pool1 = MaxPooling1D(5)(l_cov1)
        l_cov2 = Conv1D(node, 5, activation='relu')(l_pool1)
        l_cov2 = Dropout(dropout)(l_cov2)
        l_pool2 = MaxPooling1D(30)(l_cov2)
        l_flat = Flatten()(l_pool2)
        l_dense = Dense(1024, activation='relu')(l_flat)
        l_dense = Dropout(dropout)(l_dense)
        l_dense = Dense(512, activation='relu')(l_dense)
        l_dense = Dropout(dropout)(l_dense)
        if number_of_classes == 2:
            preds = Dense(1, activation='sigmoid')(l_dense)
        else:
            preds = Dense(number_of_classes, activation='softmax')(l_dense)
        model = Model(sequence_input, preds)
        model_tmp = model
        if number_of_classes == 2:
            model.compile(loss='binary_crossentropy',
                          optimizer=optimizors(random_optimizor, lr),
                          metrics=[
                              'accuracy',
                              km.binary_precision(),
                              km.binary_recall(),
                              km.binary_f1_score(),
                              km.binary_true_positive(),
                              km.binary_true_negative(),
                              km.binary_false_positive(),
                              km.binary_false_negative()
                          ])
        else:
            if sparse_categorical:
                model.compile(loss='sparse_categorical_crossentropy',
                              optimizer=optimizors(random_optimizor, lr),
                              metrics=[
                                  'accuracy',
                                  km.sparse_categorical_precision(),
                                  km.sparse_categorical_recall(),
                                  km.sparse_categorical_f1_score(),
                                  km.sparse_categorical_true_positive(),
                                  km.sparse_categorical_true_negative(),
                                  km.sparse_categorical_false_positive(),
                                  km.sparse_categorical_false_negative()
                              ])
            else:
                model.compile(loss='categorical_crossentropy',
                              optimizer=optimizors(random_optimizor, lr),
                              metrics=[
                                  'accuracy',
                                  km.categorical_precision(),
                                  km.categorical_recall(),
                                  km.categorical_f1_score(),
                                  km.categorical_true_positive(),
                                  km.categorical_true_negative(),
                                  km.categorical_false_positive(),
                                  km.categorical_false_negative()
                              ])
    return model, model_tmp
Example #17
0
 def __init__(self, opts, output_size=1, hidden_size=128):
     self.model = tf.keras.models.Sequential()
     loss = tf.keras.losses.CategoricalCrossentropy(from_logits=True, reduction=tf.losses.Reduction.NONE)
     self.model = tf.keras.Sequential()
     self.model.add(tf.keras.Input(opts['input_shape']))
     self.model.add(tf.keras.layers.Reshape([1, opts['input_shape']]))
     self.model.add(tf.keras.layers.CuDNNLSTM(hidden_size))
     self.model.add(tf.keras.layers.Dropout(0.3))
     self.model.add(tf.keras.layers.Dense(32, activation='relu'))
     self.model.add(tf.keras.layers.Dense(output_size, activation='softmax'))
     self.model.compile(loss=loss, optimizer='adam', metrics=['accuracy', km.categorical_precision(), km.categorical_recall(), km.categorical_f1_score()])
Example #18
0
def Build_Model_DNN_Image(shape, number_of_classes, sparse_categorical,
                          min_hidden_layer_dnn, max_hidden_layer_dnn,
                          min_nodes_dnn, max_nodes_dnn, random_optimizor,
                          dropout):
    '''
    buildModel_DNN_image(shape, number_of_classes,sparse_categorical)
    Build Deep neural networks Model for text classification
    Shape is input feature space
    number_of_classes is number of classes
    '''

    model = Sequential()
    values = list(range(min_nodes_dnn, max_nodes_dnn))
    Numberof_NOde = random.choice(values)
    Lvalues = list(range(min_hidden_layer_dnn, max_hidden_layer_dnn))
    nLayers = random.choice(Lvalues)
    print(shape)
    model.add(Flatten(input_shape=shape))
    model.add(Dense(Numberof_NOde, activation='relu'))
    model.add(Dropout(dropout))
    for i in range(0, nLayers - 1):
        Numberof_NOde = random.choice(values)
        model.add(Dense(Numberof_NOde, activation='relu'))
        model.add(Dropout(dropout))
    if number_of_classes == 2:
        model.add(Dense(1, activation='sigmoid'))
        model_tmp = model
        model.compile(loss='binary_crossentropy',
                      optimizer=optimizors(random_optimizor),
                      metrics=[
                          'accuracy',
                          km.binary_precision(),
                          km.binary_recall(),
                          km.binary_f1_score(),
                          km.binary_true_positive(),
                          km.binary_true_negative(),
                          km.binary_false_positive(),
                          km.binary_false_negative()
                      ])
    else:
        model.add(Dense(number_of_classes, activation='softmax'))
        model_tmp = model
        if sparse_categorical:
            model.compile(loss='sparse_categorical_crossentropy',
                          optimizer=optimizors(random_optimizor),
                          metrics=[
                              'accuracy',
                              km.sparse_categorical_precision(),
                              km.sparse_categorical_recall(),
                              km.sparse_categorical_f1_score(),
                              km.sparse_categorical_true_positive(),
                              km.sparse_categorical_true_negative(),
                              km.sparse_categorical_false_positive(),
                              km.sparse_categorical_false_negative()
                          ])
        else:
            model.compile(loss='categorical_crossentropy',
                          optimizer=optimizors(random_optimizor),
                          metrics=[
                              'accuracy',
                              km.categorical_precision(),
                              km.categorical_recall(),
                              km.categorical_f1_score(),
                              km.categorical_true_positive(),
                              km.categorical_true_negative(),
                              km.categorical_false_positive(),
                              km.categorical_false_negative()
                          ])
    return model, model_tmp
Example #19
0
#classifier.add(MaxPooling2D(pool_size = (2, 2)))
#classifier.add(Dropout(0.2))
# Step 3 - Flattening
classifier.add(Flatten())

# Step 4 - Full connection
classifier.add(Dense(output_dim=128, activation='relu'))
classifier.add(BatchNormalization())
classifier.add(Dropout(0.2))
#classifier.add(Dense(output_dim =1, activation = 'sigmoid'))# binary
classifier.add(Dense(output_dim=3, activation='softmax'))  # catgorical
# SET METRICS

precision = km.categorical_precision()
recall = km.categorical_recall()
f1 = km.categorical_f1_score()

# Compiling the CNN
#classifier.compile(optimizer = 'adam', loss = 'binary_crossentropy', metrics = ['accuracy'])
classifier.compile(optimizer='adam',
                   loss='categorical_crossentropy',
                   metrics=['accuracy', precision, recall, f1])
# # checkpoint
filepath = "weights-improvement-{epoch:02d}-{val_acc:.2f}.hdf5"
checkpoint = ModelCheckpoint(filepath,
                             monitor='val_acc',
                             verbose=1,
                             save_best_only=True,
                             mode='max')
callbacks_list = [checkpoint]