def MultiFNN(ANNSetup, test, train):
    TrainMultiClass = to_categorical(train.MultiClass)
    TestMultiClass = to_categorical(test.MultiClass)
    #ClassWeights = GetClassWeights(train.MultiClass,train.Weights)
    TrainWeights = GetTrainWeights(train.MultiClass, train.Weights)

    model = Sequential()
    model.Y_test = TestMultiClass[:, 0]
    model.X_train = train.Events
    model.Y_train = TrainMultiClass[:, 0]
    model.W_train = train.Weights  #Original weights!

    model.add(
        Dense(ANNSetup.Neurons[0],
              activation='selu',
              input_dim=ANNSetup.InputDim))
    if (ANNSetup.Dropout != None):
        model.add(Dropout(ANNSetup.Dropout))
    for i in range(1, len(ANNSetup.Neurons)):
        if (i == len(ANNSetup.Neurons) - 1):
            model.add(Dense(ANNSetup.Neurons[i], activation='softmax'))
        else:
            model.add(Dense(ANNSetup.Neurons[i], activation='selu'))

    Opti = GetOpti(ANNSetup.Optimizer, ANNSetup.LearnRate.Lr)
    lrate = GetLearnRate(ANNSetup.LearnRate, ANNSetup.Epochs)
    Roc = Histories()
    Lcallbacks = [Roc, lrate]

    model.compile(optimizer=Opti,
                  loss='categorical_crossentropy',
                  metrics=['accuracy'])
    history = model.fit(train.Events,
                        TrainMultiClass,
                        sample_weight=TrainWeights,
                        validation_data=(test.Events, TestMultiClass,
                                         test.Weights),
                        epochs=int(ANNSetup.Epochs),
                        batch_size=int(ANNSetup.Batch),
                        verbose=2,
                        callbacks=Lcallbacks)  #, sample_weight=TrainWeights

    LAuc = Roc.TestAucs
    LTrainAuc = Roc.TrainAucs
    print("Best Roc {0:.4f} at Epoch {1}".format(max(LAuc),
                                                 LAuc.index(max(LAuc)) + 1))
    print("Train Auc {0:.4f}".format(LTrainAuc[LAuc.index(max(LAuc))]))
    # print("Test Rocs: {0}".format(LAuc))
    # print("Test Loss: {0}".format(Roc.TestLosses))
    # print("Train Rocs: {0}".format(LTrainAuc))
    # print("Train Loss: {0}".format(Roc.TrainLosses))

    model.save(ANNSetup.SavePath)

    return model, Roc
Example #2
0
def LSTMNN(ANNSetup, test, train, VarList):
    TrainWeights = GetTrainWeights(train.OutTrue,train.Weights)

    model = Sequential()
    model.X_train = train.Events
    model.Y_train = train.OutTrue
    model.W_train = train.Weights       #Original weights!
    model.X_test  = test.Events
    model.Y_test  = test.OutTrue
    model.W_test  = test.Weights

    LSTMNeurons  = ANNSetup.Neurons[0]
    DenseNeurons = ANNSetup.Neurons[1]
    width = train.Events.shape[1]
    Seq = train.Events.shape[2]
    model.add(LSTM(LSTMNeurons[0], input_shape=(width, Seq),kernel_regularizer=l1(ANNSetup.Regu), return_sequences=True))  # kernel_regularizer=l2(ANNSetup.Regu) 
    if(ANNSetup.Dropout[0] != 0):
        model.add(Dropout(ANNSetup.Dropout[0]))
    for i in range(1,len(LSTMNeurons)):
        if(i == len(LSTMNeurons)-1):                                               # Add last LSTMLayer
            model.add(LSTM(LSTMNeurons[i]))
        else:
            model.add(LSTM(LSTMNeurons[i],return_sequences=True,dropout=ANNSetup.Dropout[i], recurrent_regularizer=l2(ANNSetup.Regu)))
        if(ANNSetup.Dropout[i] != 0):
            model.add(Dropout(ANNSetup.Dropout[i]))
    for j in range(len(DenseNeurons)):
        model.add(Dense(DenseNeurons[j], activation='selu'))

    Opti = GetOpti(ANNSetup.Optimizer,ANNSetup.LearnRate.Lr)
    lrate = GetLearnRate(ANNSetup.LearnRate,ANNSetup.Epochs)
    Roc = Histories()
    if(lrate == None):
        Lcallbacks = [Roc]
    else:
        Lcallbacks = [Roc,lrate]


    model.summary()
    model.compile(optimizer=Opti, loss='binary_crossentropy', metrics=['accuracy'])
    history = model.fit(train.Events, train.OutTrue, sample_weight=TrainWeights, validation_data=(test.Events, test.OutTrue, test.Weights), epochs=int(ANNSetup.Epochs),
                        batch_size=int(ANNSetup.Batch), verbose=2, callbacks=Lcallbacks)

    LAuc = Roc.TestAucs
    LTrainAuc = Roc.TrainAucs
    print("Best Roc {0:.4f} at Epoch {1}".format(max(LAuc),LAuc.index(max(LAuc))+1)) #0:.4f
    print("Train Auc {0:.4f}".format(LTrainAuc[LAuc.index(max(LAuc))]))
    #print("Test Rocs: {0}".format(LAuc))

    for i in range(len(LAuc)):
        print("Auc at Epoch {0}: {1:.4f} Ov: {2:.3f}".format(i,LAuc[i],1-LAuc[i]/LTrainAuc[i]))

    model.save(ANNSetup.SavePath)

    return model, Roc
def FNN(ANNSetup, test, train):

    # ClassWeights = GetClassWeights(train.OutTrue, train.Weights)
    TrainWeights = GetTrainWeights(train.OutTrue,
                                   train.Weights)  # Defined below

    # tf.debugging.set_log_device_placement(True)                                                                 # Check if system is running on the correct device

    model = Sequential(
    )  # from tensorflow.keras.models                                                                                          #
    model.X_train = train.Events
    model.Y_train = train.OutTrue
    model.W_train = train.Weights  # Original weights!
    model.X_test = test.Events
    model.Y_test = test.OutTrue
    model.W_test = test.Weights

    model = BuildModel(ANNSetup, model)  # Defined below

    Opti = GetOpti(ANNSetup.Optimizer, ANNSetup.LearnRate.Lr)  # Defined below

    lrate = GetLearnRate(ANNSetup.LearnRate, ANNSetup.Epochs)  # Defined below

    Roc = Histories()  # Defined in Callbacks.py
    # Roc = RedHistory()

    if lrate == None:
        Lcallbacks = [Roc]
        # Lcallbacks = []
    else:
        Lcallbacks = [Roc, lrate]
        # Lcallbacks = [lrate]

    model.summary()
    model.compile(optimizer=Opti,
                  loss='binary_crossentropy',
                  metrics=['accuracy'])

    start = time.clock()  # from time

    history = model.fit(train.Events,
                        train.OutTrue,
                        sample_weight=TrainWeights,
                        validation_data=(test.Events, test.OutTrue,
                                         test.Weights),
                        epochs=int(ANNSetup.Epochs),
                        batch_size=int(ANNSetup.Batch),
                        verbose=2,
                        callbacks=Lcallbacks)  # sample_weight=TrainWeights

    # history = model.fit(train.Events, train.OutTrue, batch_size=4000, epochs=2)

    end = time.clock()  # from time

    print("The training took {} seconds".format(end - start))

    LAuc = Roc.TestAucs
    LTrainAuc = Roc.TrainAucs

    print("Best Test Auc {0:.4f} at Epoch {1}".format(
        max(LAuc), (LAuc.index(max(LAuc)) + 1)))  # 0:.4f
    print("Best Train Auc {0:.4f}".format(LTrainAuc[LAuc.index(max(LAuc))]))

    for i in range(len(LAuc)):
        print("Auc at Epoch {0}: {1:.4f} Ov: {2:.3f}".format(
            i, LAuc[i], 1 - LAuc[i] / LTrainAuc[i]))

    model.save(ANNSetup.SavePath)

    return model, Roc
Example #4
0
def LSTMNN(ANNSetup, test, train, VarList):
    """ Bulding a Keras for the Recurrent Neural Networks with LSTM layers """

    TrainWeights = GetTrainWeights(train.OutTrue,train.Weights) # Transformation of the Monte Carlo weights for training

    #Create the model and pass it the data for Callbacks
    model = Sequential()
    model.X_train = train.Events
    model.Y_train = train.OutTrue
    model.W_train = train.Weights       #Original weights!
    model.X_test  = test.Events
    model.Y_test  = test.OutTrue
    model.W_test  = test.Weights

    # Building the model from the predefinite configuration (RNN.py)
    LSTMNeurons  = ANNSetup.Neurons[0]
    DenseNeurons = ANNSetup.Neurons[1]
    width = train.Events.shape[1]
    Seq = train.Events.shape[2]
    model.add(LSTM(LSTMNeurons[0], input_shape=(width, Seq),kernel_regularizer=l1(ANNSetup.Regu), return_sequences=True))  # kernel_regularizer=l2(ANNSetup.Regu) 
    if(ANNSetup.Dropout[0] != 0):
        model.add(Dropout(ANNSetup.Dropout[0]))
    for i in range(1,len(LSTMNeurons)):
        if(i == len(LSTMNeurons)-1):                                               # Add last LSTMLayer
            model.add(LSTM(LSTMNeurons[i]))
        else:
            model.add(LSTM(LSTMNeurons[i],return_sequences=True,dropout=ANNSetup.Dropout[i], recurrent_regularizer=l2(ANNSetup.Regu)))
        if(ANNSetup.Dropout[i] != 0):
            model.add(Dropout(ANNSetup.Dropout[i]))
    for j in range(len(DenseNeurons)):
        model.add(Dense(DenseNeurons[j], activation='selu'))

    Opti = GetOpti(ANNSetup.Optimizer,ANNSetup.LearnRate.Lr)    # Set the optimizer
    lrate = GetLearnRate(ANNSetup.LearnRate,ANNSetup.Epochs)    # Set a learning rate schedule
    Roc = Histories()                                           # Definite history for AUC at each training step
    if(lrate == None):
        Lcallbacks = [Roc]
    else:
        Lcallbacks = [Roc,lrate]


    model.summary()
    model.compile(optimizer=Opti, loss='binary_crossentropy', metrics=['accuracy'])
    start = time.clock()                                        # start clock to track training time
    history = model.fit(train.Events, train.OutTrue, sample_weight=TrainWeights, validation_data=(test.Events, test.OutTrue, test.Weights), nb_epoch=int(ANNSetup.Epochs),
                        batch_size=int(ANNSetup.Batch), verbose=2, callbacks=Lcallbacks)
    end = time.clock()
    print("The training took {} seconds".format(end-start))

    LAuc = Roc.TestAucs
    LTrainAuc = Roc.TrainAucs
    print("Best Roc {0:.4f} at Epoch {1}".format(max(LAuc),LAuc.index(max(LAuc))+1)) #0:.4f
    print("Train Auc {0:.4f}".format(LTrainAuc[LAuc.index(max(LAuc))]))
    #print("Test Rocs: {0}".format(LAuc))

    for i in range(len(LAuc)):
        print("Auc at Epoch {0}: {1:.4f} Ov: {2:.3f}".format(i,LAuc[i],1-LAuc[i]/LTrainAuc[i]))

    model.save(ANNSetup.SavePath)

    return model, Roc
Example #5
0
def MultiFNN(ANNSetup, test, train):
    """ Bulding a Keras for multi-classifer """

    #One hot encoding
    TrainMultiClass = to_categorical(train.MultiClass)
    TestMultiClass = to_categorical(test.MultiClass)

    #ClassWeights = GetClassWeights(train.MultiClass,train.Weights)
    TrainWeights = GetTrainWeights(
        train.MultiClass, train.Weights
    )  # Transformation of the Monte Carlo weights for training

    #Create the model and pass it the data for Callbacks
    model = Sequential()
    model.Y_test = TestMultiClass[:, 0]
    model.X_train = train.Events
    model.Y_train = TrainMultiClass[:, 0]
    model.W_train = train.Weights  #Original weights!

    # Build model from configuration (set in FNN.py)
    model.add(
        Dense(ANNSetup.Neurons[0],
              activation='selu',
              input_dim=ANNSetup.InputDim))
    if (ANNSetup.Dropout != None):
        model.add(Dropout(ANNSetup.Dropout))
    for i in range(1, len(ANNSetup.Neurons)):
        if (i == len(ANNSetup.Neurons) - 1):
            model.add(Dense(ANNSetup.Neurons[i], activation='softmax'))
        else:
            model.add(Dense(ANNSetup.Neurons[i], activation='selu'))

    Opti = GetOpti(ANNSetup.Optimizer, ANNSetup.LearnRate.Lr)  # Set optimizer
    lrate = GetLearnRate(ANNSetup.LearnRate,
                         ANNSetup.Epochs)  # Set learning rate schedule
    Roc = Histories()  # Create history for AUC during training
    Lcallbacks = [Roc, lrate]

    model.compile(optimizer=Opti,
                  loss='categorical_crossentropy',
                  metrics=['accuracy'])
    history = model.fit(train.Events,
                        TrainMultiClass,
                        sample_weight=TrainWeights,
                        validation_data=(test.Events, TestMultiClass,
                                         test.Weights),
                        epochs=int(ANNSetup.Epochs),
                        batch_size=int(ANNSetup.Batch),
                        verbose=2,
                        callbacks=Lcallbacks)  #, sample_weight=TrainWeights

    LAuc = Roc.TestAucs
    LTrainAuc = Roc.TrainAucs
    print("Best Roc {0:.4f} at Epoch {1}".format(max(LAuc),
                                                 LAuc.index(max(LAuc)) + 1))
    print("Train Auc {0:.4f}".format(LTrainAuc[LAuc.index(max(LAuc))]))
    # print("Test Rocs: {0}".format(LAuc))
    # print("Test Loss: {0}".format(Roc.TestLosses))
    # print("Train Rocs: {0}".format(LTrainAuc))
    # print("Train Loss: {0}".format(Roc.TrainLosses))

    model.save(ANNSetup.SavePath)

    return model, Roc
Example #6
0
def FNN(ANNSetup, test, train):
    """ Bulding a Keras for the Feedforward Neural Networks """

    #ClassWeights = GetClassWeights(train.OutTrue,train.Weights)
    TrainWeights = GetTrainWeights(
        train.OutTrue, train.Weights
    )  # Transformation of the Monte Carlo weights for training

    #tf.debugging.set_log_device_placement(True)                   #Check if system is running on the correct device

    #Create the model and pass it the data for Callbacks
    model = Sequential()
    model.X_train = train.Events
    model.Y_train = train.OutTrue
    model.W_train = train.Weights  #Original weights!
    model.X_test = test.Events
    model.Y_test = test.OutTrue
    model.W_test = test.Weights

    model = BuildModel(
        ANNSetup, model
    )  # Building the model from the predefinite configuration (FNN.py)

    Opti = GetOpti(ANNSetup.Optimizer,
                   ANNSetup.LearnRate.Lr)  # Set the optimizer
    lrate = GetLearnRate(ANNSetup.LearnRate,
                         ANNSetup.Epochs)  # Set a learning rate schedule
    Roc = Histories(
    )  # Definie history for the AUC results at each training step
    #Roc = RedHistory()
    if (lrate == None):
        Lcallbacks = [Roc]
        #Lcallbacks = []
    else:
        Lcallbacks = [Roc, lrate]
        #Lcallbacks = [lrate]

    model.summary()
    model.compile(optimizer=Opti,
                  loss='binary_crossentropy',
                  metrics=['accuracy'])
    start = time.clock()  # start clock to track training time
    history = model.fit(train.Events,
                        train.OutTrue,
                        sample_weight=TrainWeights,
                        validation_data=(test.Events, test.OutTrue,
                                         test.Weights),
                        epochs=int(ANNSetup.Epochs),
                        batch_size=int(ANNSetup.Batch),
                        verbose=2,
                        callbacks=Lcallbacks
                        )  #, callbacks=Lcallbacks , sample_weight=TrainWeights
    #history = model.fit(train.Events,train.OutTrue,batch_size=4000,epochs=2)
    end = time.clock()
    print("The training took {} seconds".format(end - start))

    LAuc = Roc.TestAucs
    LTrainAuc = Roc.TrainAucs
    print("Best Test Auc {0:.4f} at Epoch {1}".format(
        max(LAuc), (LAuc.index(max(LAuc)) + 1)))  #0:.4f
    print("Best Train Auc {0:.4f}".format(LTrainAuc[LAuc.index(max(LAuc))]))

    for i in range(len(LAuc)):
        print("Auc at Epoch {0}: {1:.4f} Ov: {2:.3f}".format(
            i, LAuc[i], 1 - LAuc[i] / LTrainAuc[i]))

    model.save(ANNSetup.SavePath)

    return model, Roc