Example #1
0
    def train(self):
        x_train, x_test, y_train, y_test, num_vars, eta_test, pt_test, eta_train, pt_train = self.get_data()

        Nbatch = 500
        Nepoch = 10
        
        model = self.build_model(num_vars)
        model.summary()

        scaler  = StandardScaler().fit(x_train)
        x_train = scaler.transform(x_train)

        
        callbacks = all_callbacks(stop_patience=1000, 
                            lr_factor=0.5,
                            lr_patience=10,
                            lr_epsilon=0.000001, 
                            lr_cooldown=2, 
                            lr_minimum=0.0000001,
                            outputDir='/uscms_data/d3/jkrupa/pf_studies/CMSSW_10_5_0_pre2/src/CaloTrigNN/CaloNtupler/test/h5_files')

        print 'Fit model...'
        if options.use_weights:
            weights = get_weights(y_train, pt_train, options)
            history = model.fit(x_train, y_train, epochs=Nepoch, batch_size=Nbatch, callbacks=callbacks.callbacks, validation_split=0.0, sample_weight=weights)
        else:
            history = model.fit(x_train, y_train, epochs=Nepoch, batch_size=Nbatch, callbacks=callbacks.callbacks, validation_split=0.0)


        #https://hackernoon.com/simple-guide-on-how-to-generate-roc-plot-for-keras-classifier-2ecc6c73115a
        y_pred                = model.predict(x_test).ravel()

        #inclusive
        fpr, tpr, thresholds  = roc_curve(y_test, y_pred)

        #kinematic binning
        if options.inc:
          lpT  = [1.,10000.0]
          leta = [1.7,3.0]
        else:
          lpT  = [1.,5.,10.,20.,10000.0]
          leta = [1.7,2.0,2.5,3.0]


        if options.makeroc:
          for i0 in range(len(lpT)-1):
            for i1 in range(len(leta)-1):
              make_roc_curve(y_pred,y_test,eta_test, pt_test, lpT[i0],lpT[i0+1],leta[i1],leta[i1+1],options)


        frozen_graph = freeze_session(K.get_session(),
                              output_names=[out.op.name for out in model.outputs])
        tf.train.write_graph(frozen_graph, "h5_files", "tf_model.pb", as_text=False)


        print_model_to_json(model,'/uscms_data/d3/jkrupa/pf_studies/CMSSW_10_5_0_pre2/src/CaloTrigNN/CaloNtupler/test/h5_files/model.json')
        model.save_weights('/uscms_data/d3/jkrupa/pf_studies/CMSSW_10_5_0_pre2/src/CaloTrigNN/CaloNtupler/test/h5_files/dense_model_weights.h5')
        json_string = model.to_json()
Example #2
0
def getCallbacks():
    callbacks = all_callbacks(stop_patience=1000,
                              lr_factor=0.5,
                              lr_patience=10,
                              lr_epsilon=0.000001,
                              lr_cooldown=2,
                              lr_minimum=0.0000001,
                              outputDir=outdir)
    callbacks = callbacks.callbacks
    return callbacks
                                       l1Reg=yamlConfig['L1Reg'],
                                       h5fName=options.dropWeights)

    print_model_to_json(keras_model,
                        options.outputDir + '/' + 'KERAS_model.json')

    startlearningrate = 0.0001
    adam = Adam(lr=startlearningrate)
    keras_model.compile(optimizer=adam,
                        loss=[yamlConfig['KerasLoss']],
                        metrics=['accuracy'])

    # Load pre-trained weights!
    keras_model.load_weights(options.inputModel, by_name=True)

    callbacks = all_callbacks(stop_patience=1000,
                              lr_factor=0.5,
                              lr_patience=10,
                              lr_epsilon=0.000001,
                              lr_cooldown=2,
                              lr_minimum=0.0000001,
                              outputDir=options.outputDir)

    keras_model.fit(X_train_val,
                    y_train_val,
                    batch_size=1024,
                    epochs=100,
                    validation_split=0.25,
                    shuffle=True,
                    callbacks=callbacks.callbacks)
Example #4
0
    startlearningrate=yamlparameters["Training_learning_rate"]
    
    adam = Adam(lr=startlearningrate,
                beta_1=yamlparameters["Training_learning_beta1"],
                beta_2=yamlparameters["Training_learning_beta2"],
                amsgrad=True)
    
    keras_model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['binary_accuracy'])
    
    callbacks=all_callbacks(stop_patience=yamlparameters["Training_early_stopping"], 
                            initial_lr=yamlparameters["Training_learning_rate"],
                            lr_factor=yamlparameters["Training_lr_factor"],
                            lr_patience=yamlparameters["Training_lr_patience"],
                            lr_epsilon=yamlparameters["Training_lr_min_delta"], 
                            lr_cooldown=yamlparameters["Training_lr_cooldown"], 
                            lr_minimum=yamlparameters["Training_lr_minimum"],
                            Prune_begin=experiment.get_parameter("pruning_begin_epoch"),
                            Prune_end=experiment.get_parameter("pruning_end_epoch"),
                            prune_lrs=[experiment.get_parameter("pruning_lr_factor_1"),
                                       experiment.get_parameter("pruning_lr_factor_2"),
                                       experiment.get_parameter("pruning_lr_factor_3")],
                            outputDir=yamlparameters["TrainDir"])

    callbacks.callbacks.append(pruning_callbacks.UpdatePruningStep())

    with experiment.train():
    
        keras_model.fit(X_train,y_train,
                        batch_size=yamlparameters["Training_batch_size"],
                        epochs=yamlparameters["Training_epochs"],
                        callbacks=callbacks.callbacks,
model1.add(layers.Dense(5, activation='softmax', name='output'))

# ============================= TRAIN MODEL ================================
train = False

if train:
    adam = Adam(lr=0.001)
    model1.compile(optimizer=adam,
                   loss=['categorical_crossentropy'],
                   metrics=['categorical_accuracy'])

    callbacks = all_callbacks(stop_patience=1000,
                              lr_factor=0.5,
                              lr_patience=10,
                              lr_epsilon=0.0001,
                              lr_cooldown=2,
                              lr_minimum=0.0000001,
                              outputDir='model_1')
    model1.fit(x_train,
               y_train_one_hot,
               batch_size=1024,
               epochs=30,
               validation_split=0.25,
               shuffle=True,
               callbacks=callbacks.callbacks)
else:
    from tensorflow.keras.models import load_model
    model1 = load_model('model_1/KERAS_check_best_model.h5')

# ============================== PLOTTING =====================================
Example #6
0
        bits=14,
        ints=2)
    #keras_model = models.dense_model(Input(shape=X_train.shape[1:]), l1Reg=experiment.get_parameter("Regularization"))
    startlearningrate = experiment.get_parameter("learning_rate")
    adam = Adam(lr=startlearningrate,
                beta_1=experiment.get_parameter("learning_beta1"),
                beta_2=experiment.get_parameter("learning_beta2"),
                amsgrad=experiment.get_parameter("Adagrad"))
    keras_model.compile(optimizer=adam,
                        loss='binary_crossentropy',
                        metrics=['binary_accuracy'])

    callbacks = all_callbacks(
        stop_patience=yamlparameters["Training_early_stopping"],
        lr_factor=experiment.get_parameter("Training_lr_factor"),
        lr_patience=experiment.get_parameter("Training_lr_patience"),
        lr_epsilon=yamlparameters["Training_lr_min_delta"],
        lr_cooldown=yamlparameters["Training_lr_cooldown"],
        lr_minimum=yamlparameters["Training_lr_minimum"],
        outputDir="None")
    keras_model.fit(
        X_train,
        y_train,
        batch_size=experiment.get_parameter("batch_size"),
        epochs=experiment.get_parameter("epochs"),
        validation_split=yamlparameters["Training_validation_split"],
        shuffle=True,
        callbacks=callbacks.callbacks,
        verbose=1)

    y_predict = keras_model.predict(X_test, verbose=0)
    loss, binary_accuracy = keras_model.evaluate(X_test, y_test, verbose=0)
Example #7
0
# The callbacks will decay the learning rate and save the model into a directory 'model_1'
# The model isn't very complex, so this should just take a few minutes even on the CPU.
# If you've restarted the notebook kernel after training once, set `train = False` to load the trained model.

# In[ ]:

train = True
if train:
    adam = Adam(lr=0.0001)
    model.compile(optimizer=adam,
                  loss=['categorical_crossentropy'],
                  metrics=['accuracy'])
    callbacks = all_callbacks(stop_patience=1000,
                              lr_factor=0.5,
                              lr_patience=10,
                              lr_epsilon=0.000001,
                              lr_cooldown=2,
                              lr_minimum=0.0000001,
                              outputDir='test_model')
    model.fit(X_train_val,
              y_train_val,
              batch_size=1024,
              epochs=30,
              validation_split=0.25,
              shuffle=True,
              callbacks=callbacks.callbacks)
else:
    from tensorflow.keras.models import load_model
    model = load_model('test_model/KERAS_check_best_model.h5')

# ## Check performance
Example #8
0
    return predict_test


X_train_val, X_test, y_train_val, y_test, labels = get_features(options,
                                                                yamlConfig,
                                                                test_size=0.2)

adam = Adam(lr=yamlConfig['L1Reg'])
model.compile(optimizer=adam,
              loss=[yamlConfig['KerasLoss']],
              metrics=['accuracy'])

callbacks = all_callbacks(stop_patience=1000,
                          lr_factor=0.5,
                          lr_patience=10,
                          lr_epsilon=0.000001,
                          lr_cooldown=2,
                          lr_minimum=0.0000001,
                          outputDir=os.curdir + '\\training_callbacks')

history = model.fit(X_train_val,
                    y_train_val,
                    batch_size=1024,
                    epochs=400,
                    validation_split=0.25,
                    shuffle=True,
                    callbacks=callbacks.callbacks)

y_pred = makeRoc(X_test, labels, y_test, model, os.curdir, '_trained4')

plt.figure()
    print_model_to_json(keras_model,yamlparameters["TrainDir"] +'/KERAS_model.json')

    startlearningrate=yamlparameters["Training_learning_rate"]
    
    adam = Adam(lr=startlearningrate,
                beta_1=yamlparameters["Training_learning_beta1"],
                beta_2=yamlparameters["Training_learning_beta2"],
                amsgrad=True)
    
    keras_model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['binary_accuracy'])
    
    callbacks=all_callbacks(stop_patience=yamlparameters["Training_early_stopping"], 
                            initial_lr=yamlparameters["Training_learning_rate"],
                            lr_factor=yamlparameters["Training_lr_factor"],
                            lr_patience=yamlparameters["Training_lr_patience"],
                            lr_epsilon=yamlparameters["Training_lr_min_delta"], 
                            lr_cooldown=yamlparameters["Training_lr_cooldown"], 
                            lr_minimum=yamlparameters["Training_lr_minimum"],
                            Prune_begin=yamlparameters["Pruning_begin_epoch"],
                            Prune_end=yamlparameters["Pruning_end_epoch"],
                            outputDir=yamlparameters["TrainDir"])

    callbacks.callbacks.append(pruning_callbacks.UpdatePruningStep())

    with experiment.train():
    
        keras_model.fit(X_train,y_train,
                        batch_size=yamlparameters["Training_batch_size"],
                        epochs=yamlparameters["Training_epochs"],
                        callbacks=callbacks.callbacks,
                        verbose=1,
                        validation_split=yamlparameters["Training_validation_split"],