Example #1
0

if __name__ == "__main__":
    # Specify number of particles to use and number of features
    nParticles = 60
    #nFeatures=51
    nFeatures = 47
    loader = ModelLoader((nParticles, nFeatures))
    model = loader.load()
    utils = Utilities(nParticles)
    history = Histories()
    history.set_up_config(config=config)
    history.on_train_begin()
    # Build the first training dataset
    print("TRAIN_DATA: ", TRAIN_DATA)
    X_train, Y, W_train, MVA_train = utils.BuildBatch(indir=TRAIN_DATA)

    for epoch in range(100):
        pool_local = ThreadPool(processes=1)
        # Shuffle loaded datasets and begin
        inds = range(len(X_train))
        np.random.shuffle(inds)
        X_epoch, Y_epoch, W_epoch, MVA_epoch = X_train[inds], Y[inds], W_train[
            inds], MVA_train[inds]

        # Check that nothing strange happened in the loaded datset
        if (np.min(W_train) == np.nan): continue
        if (np.min(W_train) == np.inf): continue

        ##Save the validation:
        history.set_mode(mode="train")
Example #2
0
    # Specify number of particles to use and number of features
    nParticles = 60
    #nFeatures=51
    nFeatures = 47
    loader = ModelLoader((nParticles, nFeatures))
    ## Define Loss for the model:
    from Loss.Loss import multi_weighted_logloss

    utils = Utilities(nParticles)
    #history = Histories()
    #history.set_up_config(config=config)
    #history.on_train_begin()
    # Build the first training dataset
    print("TRAIN_DATA: ", TRAIN_DATA)
    X_train, Y, W_train, MVA_train = utils.BuildBatch(indir=TRAIN_DATA,
                                                      nEvents=50,
                                                      nFiles=10)

    model = loader.load_multiclass(
        ouput_class=4,
        loss='categorical_crossentropy')  #,weights=class_weight)

    for epoch in range(1000):
        pool_local = ThreadPool(processes=1)
        # Shuffle loaded datasets and begin
        inds = range(len(X_train))
        np.random.shuffle(inds)
        X_epoch, Y_epoch, W_epoch, MVA_epoch = X_train[inds], Y[inds], W_train[
            inds], MVA_train[inds]
        # Check that nothing strange happened in the loaded datset
        if (np.min(W_train) == np.nan): continue
Example #3
0
    # nFeatures=51
    nFeatures = 47

    Phi_sizes, F_sizes = (50, 50, 12), (50, 50, 50)
    model = PFN(input_dim=nFeatures,
                Phi_sizes=Phi_sizes,
                F_sizes=F_sizes,
                output_dim=1,
                output_act='sigmoid',
                loss='binary_crossentropy')
    plot_model(model, to_file='deepset.png')

    utils = Utilities(nParticles)

    # Build the first training dataset
    X_train, Y, W_train, MVA_train = utils.BuildBatch()
    print(MVA_train.shape)

    for epoch in range(10000):
        # Shuffle loaded datasets and begin
        inds = range(len(X_train))
        np.random.shuffle(inds)
        X_epoch, Y_epoch, W_epoch, MVA_epoch = X_train[inds], Y[inds], W_train[
            inds], MVA_epoch[inds]
        if (np.min(W_train) == np.nan): continue
        if (np.min(W_train) == np.inf): continue

        model.fit(X_epoch, Y_epoch, epochs=1, batch_size=4 * 512, verbose=1)
        pd.DataFrame(X_epoch).to_csv("X_example.csv", index=False)
        pd.DataFrame(Y_epoch).to_csv("Y_example.csv", index=False)
        pd.DataFrame(MVA_epoch).to_csv("MVA_example.csv", index=False)
Example #4
0
        model = loader.load()
    utils = Utilities(nParticles)
    history = Histories()
    history.set_up_config(config=config)
    history.on_train_begin()
    lr_decay = DecayLR(history.aucs['train'])
    lr_decay.set_model(model)
    log = Logger(history, TRAINING_RES + "train_" + MODEL_NAME + ".log")
    # Build the first training dataset
    print("TRAIN_DATA: ", TRAIN_DATA)

    #print(X_train[0])
    log.on_train_begin()

    for epoch in range(int(epoch) + 1, 51):
        X_train, Y, W_train, MVA_train = utils.BuildBatch(indir=TRAIN_DATA,
                                                          nEvents=250)
        pool_local = ThreadPool(processes=1)
        # Shuffle loaded datasets and begin
        inds = range(len(X_train))
        np.random.shuffle(inds)

        X_epoch, Y_epoch, W_epoch, MVA_epoch = X_train[inds], Y[inds], W_train[
            inds], MVA_train[inds]

        # Check that nothing strange happened in the loaded datset
        if (np.min(W_train) == np.nan): continue
        if (np.min(W_train) == np.inf): continue

        ##Save the validation:
        history.set_mode(mode="train")