def fit_model_siames_soft(train_x, train_emb_x_1, train_emb_x_2, train_y,
                          val_x, val_emb_x_1, val_emb_x_2, val_y, model_train,
                          n_epochs, optimizer, batchsize, loss_weigths, verb):
    tensorboard = TensorBoard(log_dir=working_level + "/board_logs/" +
                              model_train.Name + "-" + model_name +
                              "-{}".format(time()))
    checkpoint = ModelCheckpoint(
        working_level +
        "/model_checkpoints/{0}-check-{{epoch:02d}}-{{val_main_acc:.2f}}.hdf5".
        format(model_train.name + "-" + model_name),
        save_weights_only=True,
        period=int(n_epochs / 5))
    best_model_save = ModelCheckpoint(
        working_level +
        "/model_checkpoints/{0}-best.hdf5".format(model_train.name + "-" +
                                                  model_name),
        monitor='val_main_acc',
        save_weights_only=True,
        save_best_only=True,
        mode='max')
    logger = EpochLogger(display=25)

    model_train.compile(optimizer=optimizer,
                        loss='sparse_categorical_crossentropy',
                        metrics=['accuracy',
                                 km.sparse_categorical_recall()],
                        loss_weights=loss_weigths)
    return model_train.fit(
        [train_emb_x_1, train_emb_x_2, train_x],
        y=[train_y, train_y],
        verbose=verb,
        validation_data=([val_emb_x_1, val_emb_x_2, val_x], [val_y, val_y]),
        epochs=n_epochs,
        batch_size=batchsize,
        callbacks=[tensorboard, checkpoint, best_model_save,
                   logger])  # starts training
Esempio n. 2
0
def Build_Model_DNN_Image(shape, number_of_classes, sparse_categorical,
                          min_hidden_layer_dnn, max_hidden_layer_dnn,
                          min_nodes_dnn, max_nodes_dnn, random_optimizor,
                          dropout):
    '''
    buildModel_DNN_image(shape, number_of_classes,sparse_categorical)
    Build Deep neural networks Model for text classification
    Shape is input feature space
    number_of_classes is number of classes
    '''

    model = Sequential()
    values = list(range(min_nodes_dnn, max_nodes_dnn))
    Numberof_NOde = random.choice(values)
    Lvalues = list(range(min_hidden_layer_dnn, max_hidden_layer_dnn))
    nLayers = random.choice(Lvalues)
    print(shape)
    model.add(Flatten(input_shape=shape))
    model.add(Dense(Numberof_NOde, activation='relu'))
    model.add(Dropout(dropout))
    for i in range(0, nLayers - 1):
        Numberof_NOde = random.choice(values)
        model.add(Dense(Numberof_NOde, activation='relu'))
        model.add(Dropout(dropout))
    if number_of_classes == 2:
        model.add(Dense(1, activation='sigmoid'))
        model_tmp = model
        model.compile(loss='binary_crossentropy',
                      optimizer=optimizors(random_optimizor),
                      metrics=[
                          'accuracy',
                          km.binary_precision(),
                          km.binary_recall(),
                          km.binary_f1_score(),
                          km.binary_true_positive(),
                          km.binary_true_negative(),
                          km.binary_false_positive(),
                          km.binary_false_negative()
                      ])
    else:
        model.add(Dense(number_of_classes, activation='softmax'))
        model_tmp = model
        if sparse_categorical:
            model.compile(loss='sparse_categorical_crossentropy',
                          optimizer=optimizors(random_optimizor),
                          metrics=[
                              'accuracy',
                              km.sparse_categorical_precision(),
                              km.sparse_categorical_recall(),
                              km.sparse_categorical_f1_score(),
                              km.sparse_categorical_true_positive(),
                              km.sparse_categorical_true_negative(),
                              km.sparse_categorical_false_positive(),
                              km.sparse_categorical_false_negative()
                          ])
        else:
            model.compile(loss='categorical_crossentropy',
                          optimizer=optimizors(random_optimizor),
                          metrics=[
                              'accuracy',
                              km.categorical_precision(),
                              km.categorical_recall(),
                              km.categorical_f1_score(),
                              km.categorical_true_positive(),
                              km.categorical_true_negative(),
                              km.categorical_false_positive(),
                              km.categorical_false_negative()
                          ])
    return model, model_tmp
Esempio n. 3
0
def Build_Model_CNN_Text(word_index,
                         embedding_index,
                         number_of_classes,
                         MAX_SEQUENCE_LENGTH,
                         EMBEDDING_DIM,
                         sparse_categorical,
                         min_hidden_layer_cnn,
                         max_hidden_layer_cnn,
                         min_nodes_cnn,
                         max_nodes_cnn,
                         random_optimizor,
                         dropout,
                         simple_model=False,
                         _l2=0.01,
                         lr=1e-3):
    """
        def buildModel_CNN(word_index,embedding_index,number_of_classes,MAX_SEQUENCE_LENGTH,EMBEDDING_DIM,Complexity=0):
        word_index in word index ,
        embedding_index is embeddings index, look at data_helper.py
        number_of_classes is number of classes,
        MAX_SEQUENCE_LENGTH is maximum lenght of text sequences,
        EMBEDDING_DIM is an int value for dimention of word embedding look at data_helper.py
        Complexity we have two different CNN model as follows
        F=0 is simple CNN with [1 5] hidden layer
        Complexity=2 is more complex model of CNN with filter_length of range [1 10]
    """

    model = Sequential()
    if simple_model:
        embedding_matrix = np.zeros((len(word_index) + 1, EMBEDDING_DIM))
        for word, i in word_index.items():
            embedding_vector = embedding_index.get(word)
            if embedding_vector is not None:
                # words not found in embedding index will be all-zeros.
                embedding_matrix[i] = embedding_vector
            else:
                embedding_matrix[i] = embedding_index['UNK']
        model.add(
            Embedding(len(word_index) + 1,
                      EMBEDDING_DIM,
                      weights=[embedding_matrix],
                      input_length=MAX_SEQUENCE_LENGTH,
                      trainable=True))
        values = list(range(min_nodes_cnn, max_nodes_cnn))
        Layer = list(range(min_hidden_layer_cnn, max_hidden_layer_cnn))
        Layer = random.choice(Layer)
        for i in range(0, Layer):
            Filter = random.choice(values)
            model.add(
                Conv1D(Filter,
                       5,
                       activation='relu',
                       kernel_regularizer=l2(_l2)))
            model.add(Dropout(dropout))
            model.add(MaxPooling1D(5))

        model.add(Flatten())
        Filter = random.choice(values)
        model.add(Dense(Filter, activation='relu', kernel_regularizer=l2(_l2)))
        model.add(Dropout(dropout))
        Filter = random.choice(values)
        model.add(Dense(Filter, activation='relu', kernel_regularizer=l2(_l2)))
        model.add(Dropout(dropout))
        if number_of_classes == 2:
            model.add(
                Dense(1, activation='sigmoid', kernel_regularizer=l2(_l2)))
            model_tmp = model
            model.compile(loss='binary_crossentropy',
                          optimizer=optimizors(random_optimizor, lr),
                          metrics=[
                              'accuracy',
                              km.binary_precision(),
                              km.binary_recall(),
                              km.binary_f1_score(),
                              km.binary_true_positive(),
                              km.binary_true_negative(),
                              km.binary_false_positive(),
                              km.binary_false_negative()
                          ])
        else:
            model.add(
                Dense(number_of_classes,
                      activation='softmax',
                      kernel_regularizer=l2(_l2)))
            model_tmp = model
            if sparse_categorical:
                model.compile(loss='sparse_categorical_crossentropy',
                              optimizer=optimizors(random_optimizor, lr),
                              metrics=[
                                  'accuracy',
                                  km.sparse_categorical_precision(),
                                  km.sparse_categorical_recall(),
                                  km.sparse_categorical_f1_score(),
                                  km.sparse_categorical_true_positive(),
                                  km.sparse_categorical_true_negative(),
                                  km.sparse_categorical_false_positive(),
                                  km.sparse_categorical_false_negative()
                              ])
            else:
                model.compile(loss='categorical_crossentropy',
                              optimizer=optimizors(random_optimizor, lr),
                              metrics=[
                                  'accuracy',
                                  km.categorical_precision(),
                                  km.categorical_recall(),
                                  km.categorical_f1_score(),
                                  km.categorical_true_positive(),
                                  km.categorical_true_negative(),
                                  km.categorical_false_positive(),
                                  km.categorical_false_negative()
                              ])
    else:
        embedding_matrix = np.zeros((len(word_index) + 1, EMBEDDING_DIM))
        for word, i in word_index.items():
            embedding_vector = embedding_index.get(word)
            if embedding_vector is not None:
                # words not found in embedding index will be all-zeros.
                embedding_matrix[i] = embedding_vector
            else:
                embedding_matrix[i] = embedding_index['UNK']
        embedding_layer = Embedding(len(word_index) + 1,
                                    EMBEDDING_DIM,
                                    weights=[embedding_matrix],
                                    input_length=MAX_SEQUENCE_LENGTH,
                                    trainable=True)

        # applying a more complex convolutional approach
        convs = []
        values_layer = list(range(min_hidden_layer_cnn, max_hidden_layer_cnn))
        filter_sizes = []
        layer = random.choice(values_layer)
        print("Filter  ", layer)
        for fl in range(0, layer):
            filter_sizes.append((fl + 2))

        values_node = list(range(min_nodes_cnn, max_nodes_cnn))
        node = random.choice(values_node)
        print("Node  ", node)
        sequence_input = Input(shape=(MAX_SEQUENCE_LENGTH, ), dtype='int32')
        embedded_sequences = embedding_layer(sequence_input)

        for fsz in filter_sizes:
            l_conv = Conv1D(node, kernel_size=fsz,
                            activation='relu')(embedded_sequences)
            l_pool = MaxPooling1D(5)(l_conv)
            #l_pool = Dropout(0.25)(l_pool)
            convs.append(l_pool)

        l_merge = Concatenate(axis=1)(convs)
        l_cov1 = Conv1D(node, 5, activation='relu')(l_merge)
        l_cov1 = Dropout(dropout)(l_cov1)
        l_pool1 = MaxPooling1D(5)(l_cov1)
        l_cov2 = Conv1D(node, 5, activation='relu')(l_pool1)
        l_cov2 = Dropout(dropout)(l_cov2)
        l_pool2 = MaxPooling1D(30)(l_cov2)
        l_flat = Flatten()(l_pool2)
        l_dense = Dense(1024, activation='relu')(l_flat)
        l_dense = Dropout(dropout)(l_dense)
        l_dense = Dense(512, activation='relu')(l_dense)
        l_dense = Dropout(dropout)(l_dense)
        if number_of_classes == 2:
            preds = Dense(1, activation='sigmoid')(l_dense)
        else:
            preds = Dense(number_of_classes, activation='softmax')(l_dense)
        model = Model(sequence_input, preds)
        model_tmp = model
        if number_of_classes == 2:
            model.compile(loss='binary_crossentropy',
                          optimizer=optimizors(random_optimizor, lr),
                          metrics=[
                              'accuracy',
                              km.binary_precision(),
                              km.binary_recall(),
                              km.binary_f1_score(),
                              km.binary_true_positive(),
                              km.binary_true_negative(),
                              km.binary_false_positive(),
                              km.binary_false_negative()
                          ])
        else:
            if sparse_categorical:
                model.compile(loss='sparse_categorical_crossentropy',
                              optimizer=optimizors(random_optimizor, lr),
                              metrics=[
                                  'accuracy',
                                  km.sparse_categorical_precision(),
                                  km.sparse_categorical_recall(),
                                  km.sparse_categorical_f1_score(),
                                  km.sparse_categorical_true_positive(),
                                  km.sparse_categorical_true_negative(),
                                  km.sparse_categorical_false_positive(),
                                  km.sparse_categorical_false_negative()
                              ])
            else:
                model.compile(loss='categorical_crossentropy',
                              optimizer=optimizors(random_optimizor, lr),
                              metrics=[
                                  'accuracy',
                                  km.categorical_precision(),
                                  km.categorical_recall(),
                                  km.categorical_f1_score(),
                                  km.categorical_true_positive(),
                                  km.categorical_true_negative(),
                                  km.categorical_false_positive(),
                                  km.categorical_false_negative()
                              ])
    return model, model_tmp
Esempio n. 4
0
def Build_Model_RNN_Text(word_index,
                         embedding_index,
                         number_of_classes,
                         MAX_SEQUENCE_LENGTH,
                         EMBEDDING_DIM,
                         sparse_categorical,
                         min_hidden_layer_rnn,
                         max_hidden_layer_rnn,
                         min_nodes_rnn,
                         max_nodes_rnn,
                         random_optimizor,
                         dropout,
                         use_cuda=True,
                         use_bidirectional=True,
                         _l2=0.01,
                         lr=1e-3):
    """
    def buildModel_RNN(word_index, embedding_index, number_of_classes, MAX_SEQUENCE_LENGTH, EMBEDDING_DIM, sparse_categorical):
    word_index in word index ,
    embedding_index is embeddings index, look at data_helper.py
    number_of_classes is number of classes,
    MAX_SEQUENCE_LENGTH is maximum lenght of text sequences
    """

    Recurrent = CuDNNGRU if use_cuda else GRU

    model = Sequential()
    values = list(range(min_nodes_rnn, max_nodes_rnn + 1))
    values_layer = list(range(min_hidden_layer_rnn - 1, max_hidden_layer_rnn))

    layer = random.choice(values_layer)
    print(layer)

    embedding_matrix = np.zeros((len(word_index) + 1, EMBEDDING_DIM))
    for word, i in word_index.items():
        embedding_vector = embedding_index.get(word)
        if embedding_vector is not None:
            # words not found in embedding index will be all-zeros.
            embedding_matrix[i] = embedding_vector
        else:
            embedding_matrix[i] = embedding_index['UNK']

    model.add(
        Embedding(len(word_index) + 1,
                  EMBEDDING_DIM,
                  weights=[embedding_matrix],
                  input_length=MAX_SEQUENCE_LENGTH,
                  trainable=True))

    gru_node = random.choice(values)
    print(gru_node)
    for i in range(0, layer):
        if use_bidirectional:
            model.add(
                Bidirectional(
                    Recurrent(gru_node,
                              return_sequences=True,
                              kernel_regularizer=l2(_l2))))
        else:
            model.add(
                Recurrent(gru_node,
                          return_sequences=True,
                          kernel_regularizer=l2(_l2)))
        model.add(Dropout(dropout))
    if use_bidirectional:
        model.add(
            Bidirectional(Recurrent(gru_node, kernel_regularizer=l2(_l2))))
    else:
        model.add(Recurrent(gru_node, kernel_regularizer=l2(_l2)))
    model.add(Dropout(dropout))
    model.add(Dense(256, activation='relu', kernel_regularizer=l2(_l2)))
    if number_of_classes == 2:
        model.add(Dense(1, activation='sigmoid', kernel_regularizer=l2(_l2)))
        model_tmp = model
        model.compile(loss='binary_crossentropy',
                      optimizer=optimizors(random_optimizor, lr),
                      metrics=[
                          'accuracy',
                          km.binary_precision(),
                          km.binary_recall(),
                          km.binary_f1_score(),
                          km.binary_true_positive(),
                          km.binary_true_negative(),
                          km.binary_false_positive(),
                          km.binary_false_negative()
                      ])
    else:
        model.add(
            Dense(number_of_classes,
                  activation='softmax',
                  kernel_regularizer=l2(_l2)))
        model_tmp = model
        if sparse_categorical:
            model.compile(loss='sparse_categorical_crossentropy',
                          optimizer=optimizors(random_optimizor, lr),
                          metrics=[
                              'accuracy',
                              km.sparse_categorical_precision(),
                              km.sparse_categorical_recall(),
                              km.sparse_categorical_f1_score(),
                              km.sparse_categorical_true_positive(),
                              km.sparse_categorical_true_negative(),
                              km.sparse_categorical_false_positive(),
                              km.sparse_categorical_false_negative()
                          ])
        else:
            model.compile(loss='categorical_crossentropy',
                          optimizer=optimizors(random_optimizor, lr),
                          metrics=[
                              'accuracy',
                              km.categorical_precision(),
                              km.categorical_recall(),
                              km.categorical_f1_score(),
                              km.categorical_true_positive(),
                              km.categorical_true_negative(),
                              km.categorical_false_positive(),
                              km.categorical_false_negative()
                          ])
    return model, model_tmp
Esempio n. 5
0
def Build_Model_RNN_Image(shape, number_of_classes, sparse_categorical,
                          min_nodes_rnn, max_nodes_rnn, random_optimizor,
                          dropout):
    """
        def Image_model_RNN(num_classes,shape):
        num_classes is number of classes,
        shape is (w,h,p)
    """
    values = list(range(min_nodes_rnn - 1, max_nodes_rnn))
    node = random.choice(values)

    x = Input(shape=shape)

    # Encodes a row of pixels using TimeDistributed Wrapper.
    encoded_rows = TimeDistributed(CuDNNLSTM(node,
                                             recurrent_dropout=dropout))(x)
    node = random.choice(values)
    # Encodes columns of encoded rows.
    encoded_columns = CuDNNLSTM(node, recurrent_dropout=dropout)(encoded_rows)

    # Final predictions and model.
    #prediction = Dense(256, activation='relu')(encoded_columns)
    if number_of_classes == 2:
        prediction = Dense(1, activation='sigmoid')(encoded_columns)
    else:
        prediction = Dense(number_of_classes,
                           activation='softmax')(encoded_columns)

    model = Model(x, prediction)
    model_tmp = model
    if number_of_classes == 2:
        model.compile(loss='binary_crossentropy',
                      optimizer=optimizors(random_optimizor),
                      metrics=[
                          'accuracy',
                          km.binary_precision(),
                          km.binary_recall(),
                          km.binary_f1_score(),
                          km.binary_true_positive(),
                          km.binary_true_negative(),
                          km.binary_false_positive(),
                          km.binary_false_negative()
                      ])
    else:
        if sparse_categorical:
            model.compile(loss='sparse_categorical_crossentropy',
                          optimizer=optimizors(random_optimizor),
                          metrics=[
                              'accuracy',
                              km.sparse_categorical_precision(),
                              km.sparse_categorical_recall(),
                              km.sparse_categorical_f1_score(),
                              km.sparse_categorical_true_positive(),
                              km.sparse_categorical_true_negative(),
                              km.sparse_categorical_false_positive(),
                              km.sparse_categorical_false_negative()
                          ])
        else:
            model.compile(loss='categorical_crossentropy',
                          optimizer=optimizors(random_optimizor),
                          metrics=[
                              'accuracy',
                              km.categorical_precision(),
                              km.categorical_recall(),
                              km.categorical_f1_score(),
                              km.categorical_true_positive(),
                              km.categorical_true_negative(),
                              km.categorical_false_positive(),
                              km.categorical_false_negative()
                          ])
    return model, model_tmp
Esempio n. 6
0
def Build_Model_CNN_Image(shape, number_of_classes, sparse_categorical,
                          min_hidden_layer_cnn, max_hidden_layer_cnn,
                          min_nodes_cnn, max_nodes_cnn, random_optimizor,
                          dropout):
    """""
    def Image_model_CNN(num_classes,shape):
    num_classes is number of classes,
    shape is (w,h,p)
    """ ""

    model = Sequential()
    values = list(range(min_nodes_cnn, max_nodes_cnn))
    Layers = list(range(min_hidden_layer_cnn, max_hidden_layer_cnn))
    Layer = random.choice(Layers)
    Filter = random.choice(values)
    model.add(Conv2D(Filter, (3, 3), padding='same', input_shape=shape))
    model.add(Activation('relu'))
    model.add(Conv2D(Filter, (3, 3)))
    model.add(Activation('relu'))

    for i in range(0, Layer):
        Filter = random.choice(values)
        model.add(Conv2D(Filter, (3, 3), padding='same'))
        model.add(Activation('relu'))
        model.add(MaxPooling2D(pool_size=(2, 2)))
        model.add(Dropout(dropout))

    model.add(Flatten())
    model.add(Dense(256, activation='relu'))
    model.add(Dropout(dropout))
    if number_of_classes == 2:
        model.add(Dense(1, activation='sigmoid', kernel_constraint=maxnorm(3)))
        model_tmp = model
        model.compile(loss='binary_crossentropy',
                      optimizer=optimizors(random_optimizor),
                      metrics=[
                          'accuracy',
                          km.binary_precision(),
                          km.binary_recall(),
                          km.binary_f1_score(),
                          km.binary_true_positive(),
                          km.binary_true_negative(),
                          km.binary_false_positive(),
                          km.binary_false_negative()
                      ])
    else:
        model.add(
            Dense(number_of_classes,
                  activation='softmax',
                  kernel_constraint=maxnorm(3)))
        model_tmp = model
        if sparse_categorical:
            model.compile(loss='sparse_categorical_crossentropy',
                          optimizer=optimizors(random_optimizor),
                          metrics=[
                              'accuracy',
                              km.sparse_categorical_precision(),
                              km.sparse_categorical_recall(),
                              km.sparse_categorical_f1_score(),
                              km.sparse_categorical_true_positive(),
                              km.sparse_categorical_true_negative(),
                              km.sparse_categorical_false_positive(),
                              km.sparse_categorical_false_negative()
                          ])
        else:
            model.compile(loss='categorical_crossentropy',
                          optimizer=optimizors(random_optimizor),
                          metrics=[
                              'accuracy',
                              km.categorical_precision(),
                              km.categorical_recall(),
                              km.categorical_f1_score(),
                              km.categorical_true_positive(),
                              km.categorical_true_negative(),
                              km.categorical_false_positive(),
                              km.categorical_false_negative()
                          ])
    return model, model_tmp
Esempio n. 7
0
def Build_Model_DNN_Text(shape,
                         number_of_classes,
                         sparse_categorical,
                         min_hidden_layer_dnn,
                         max_hidden_layer_dnn,
                         min_nodes_dnn,
                         max_nodes_dnn,
                         random_optimizor,
                         dropout,
                         _l2=0.01,
                         lr=1e-3):
    """
    buildModel_DNN_Tex(shape, number_of_classes,sparse_categorical)
    Build Deep neural networks Model for text classification
    Shape is input feature space
    number_of_classes is number of classes
    """
    model = Sequential()
    layer = list(range(min_hidden_layer_dnn, max_hidden_layer_dnn))
    node = list(range(min_nodes_dnn, max_nodes_dnn))

    Numberof_NOde = random.choice(node)
    nLayers = random.choice(layer)

    Numberof_NOde_old = Numberof_NOde
    model.add(
        Dense(Numberof_NOde,
              input_dim=shape,
              activation='relu',
              kernel_regularizer=l2(_l2)))
    model.add(Dropout(dropout))
    for i in range(0, nLayers):
        Numberof_NOde = random.choice(node)
        model.add(
            Dense(Numberof_NOde,
                  input_dim=Numberof_NOde_old,
                  activation='relu',
                  kernel_regularizer=l2(_l2)))
        model.add(Dropout(dropout))
        Numberof_NOde_old = Numberof_NOde
    if number_of_classes == 2:
        model.add(Dense(1, activation='sigmoid', kernel_regularizer=l2(_l2)))
        model_tmp = model
        model.compile(loss='binary_crossentropy',
                      optimizer=optimizors(random_optimizor, lr),
                      metrics=[
                          'accuracy',
                          km.binary_precision(),
                          km.binary_recall(),
                          km.binary_f1_score(),
                          km.binary_true_positive(),
                          km.binary_true_negative(),
                          km.binary_false_positive(),
                          km.binary_false_negative()
                      ])
    else:
        model.add(
            Dense(number_of_classes,
                  activation='softmax',
                  kernel_regularizer=l2(_l2)))
        model_tmp = model
        if sparse_categorical:
            model.compile(loss='sparse_categorical_crossentropy',
                          optimizer=optimizors(random_optimizor, lr),
                          metrics=[
                              'accuracy',
                              km.sparse_categorical_precision(),
                              km.sparse_categorical_recall(),
                              km.sparse_categorical_f1_score(),
                              km.sparse_categorical_true_positive(),
                              km.sparse_categorical_true_negative(),
                              km.sparse_categorical_false_positive(),
                              km.sparse_categorical_false_negative()
                          ])
        else:
            model.compile(loss='categorical_crossentropy',
                          optimizer=optimizors(random_optimizor, lr),
                          metrics=[
                              'accuracy',
                              km.categorical_precision(),
                              km.categorical_recall(),
                              km.categorical_f1_score(),
                              km.categorical_true_positive(),
                              km.categorical_true_negative(),
                              km.categorical_false_positive(),
                              km.categorical_false_negative()
                          ])
    return model, model_tmp