Esempio n. 1
0
def training(model_name):
    # Data parameters
    data_dir = r'/src/'
    seq_length = 40
    n_videos = {'train': 1000, 'validation': 100}
    image_size = (50, 88)

    # Training parameters
    n_epochs = 50
    batch_size = 8
    steps_per_epoch = n_videos['train'] // batch_size

    # Load data generators
    data = CNN3DDataLoader(data_dir,
                           seq_length=seq_length,
                           n_videos=n_videos,
                           labels=labels_want)
    train_gen = data.sequence_generator('train', batch_size, image_size)
    validation_gen = data.sequence_generator('validation', batch_size,
                                             image_size)

    #optimizer = keras.optimizers.SGD(lr=0.1, momentum=0.9, decay= 1e-6, nesterov=True)
    #Load model
    optimizer = keras.optimizers.Adadelta()
    ml = ModelLoader(data.n_labels,
                     data.seq_length,
                     model_name,
                     image_size=image_size,
                     optimizer=optimizer)
    model = ml.model

    #Define callbacks
    checkpointer = ModelCheckpoint(filepath='model_name' +
                                   '-{epoch:03d}-{loss:.3f}.hdf5',
                                   verbose=1,
                                   save_best_only=True)
    tb = TensorBoard(log_dir='./models/logs')
    early_stopper = EarlyStopping(patience=2)
    csv_logger = CSVLogger('./models/logs/' + model_name + '-' + 'training-' + \
        str(time.time()) + '.log')

    callbacks = [tb, early_stopper, csv_logger, checkpointer]

    # Training
    print('Starting training')

    history = model.fit_generator(
        generator=train_gen,
        steps_per_epoch=20,
        #sample_per_epoch= 200,
        epochs=n_epochs,
        verbose=1,
        callbacks=callbacks,
        validation_data=validation_gen,
        validation_steps=10,
    )

    model.save('./my_model.h5')

    save_history(history, "c3d")
    'learning_rate': 0.01,
    'n_estimators': 20000,
    'max_depth': 5,
    'max_delta_step': 5,
    'colsample_bylevel': 0.9,
    'colsample_bytree': 0.95,
    'subsample': 0.8,
    'gamma': 1.5,
    'max_leaves': 10,
    'min_child_weight': 50,
    'reg_alpha': 0.6,  # L1 regularization
    'reg_lambda': 50,  # L2 regularization
    'seed': 42
}

model = ModelLoader(xgb.XGBClassifier, model_params, **xgboost_params)

fit_params = {'early_stopping_rounds': 2500, 'verbose': 1000}
predict_params = {}

results = model.run(data_loader,
                    roc_auc_score,
                    fit_params,
                    predict_params,
                    verbose=True)

if args.save:
    current_file_path = os.path.abspath(__file__)  # to save this .py file
    model.save(data_loader, results, current_file_path, args.preds,
               args.models)
## << Create and train model
Esempio n. 3
0
    'online_val': "eval_set"
}
catboost_params = {
    'loss_function': "Logloss",
    'eval_metric': "AUC",
    'task_type': "GPU",
    'learning_rate': 0.01,
    'iterations': 70000,
    'l2_leaf_reg': 50,
    'random_seed': 42,
    'od_type': "Iter",
    'depth': 5,
    'early_stopping_rounds': 5000,
    'border_count': 64
}
model = ModelLoader(CatBoostClassifier, model_params, **catboost_params)

fit_params = {'use_best_model': True, 'verbose': 5000, 'plot': True}
predict_params = {}
results = model.run(data_loader,
                    roc_auc_score,
                    fit_params,
                    predict_params,
                    verbose=True)

if args.save:
    current_file_path = os.path.abspath(__file__)  # to save this .py file
    model.save(data_loader, results, current_file_path, args.preds,
               args.models)
## << Create and train model
Esempio n. 4
0
            "verbosity": 0,
            "seed": 42
        }

    def fit(self, train, cv):
        x_tr, y_tr = train
        x_cv, y_cv = cv
        trn_data = lgb.Dataset(x_tr, label=y_tr)
        val_data = lgb.Dataset(x_cv, label=y_cv)
        evals_result = {}
        self.model = lgb.train(self.lgb_params,
                               trn_data,
                               100000,
                               valid_sets=[trn_data, val_data],
                               early_stopping_rounds=3000,
                               verbose_eval=1000,
                               evals_result=evals_result)

    def predict(self, test):
        return self.model.predict(test)


model = ModelLoader(LightGbmTrainer, model_params)
results = model.run(data_loader, roc_auc_score, {}, {}, verbose=True)

if args.save:
    current_file_path = os.path.abspath(__file__)  # to save this .py file
    model.save(data_loader, results, current_file_path, args.preds,
               args.models)
## << Create and train model
Esempio n. 5
0
                           n_splits=5,
                           shuffle=True,
                           random_state=42)
## << Read and preprocess data

## >> Create and train model
from sklearn.naive_bayes import GaussianNB
from sklearn.metrics import roc_auc_score

model_params = {
    'name': "gaussian",
    'fit': "fit",
    'predict': "predict_proba",
    'pred_col': 1
}
model = ModelLoader(GaussianNB, model_params)

fit_params = {}
predict_params = {}
results = model.run(data_loader,
                    roc_auc_score,
                    fit_params,
                    predict_params,
                    verbose=True)

if args.save:
    current_file_path = os.path.abspath(__file__)  # to save this .py file
    model.save(data_loader, results, current_file_path, args.preds,
               args.models)
## << Create and train model
from sklearn.metrics import roc_auc_score

model_params = {
    'name': "logreg",
    'fit': "fit",
    'predict': "predict_proba",
    'pred_col': 1
}
logreg_params = {
    'C': 0.9,
    'penalty': 'l2',
    'multi_class': 'ovr',
    'solver': 'liblinear',
    'random_state': 42
}
model = ModelLoader(LogisticRegression, model_params, **logreg_params)

fit_params = {}
predict_params = {}
results = model.run(data_loader,
                    roc_auc_score,
                    fit_params,
                    predict_params,
                    verbose=True)

if args.save:
    current_file_path = os.path.abspath(__file__)  # to save this .py file
    model.save(data_loader, results, current_file_path, args.preds,
               args.models)
## << Create and train model
Esempio n. 7
0
model_params = {
    'name': "dense_nn",
    'fit': "fit",
    'predict': "predict_proba",
    'pred_col': 1
}

nn_params = {
    'build_fn': dense_nn_model,
    'epochs': 25,
    'batch_size': 256,
    'verbose': 1
}

model = ModelLoader(KerasClassifier, model_params, **nn_params)

fit_params = {}
predict_params = {}
results = model.run(data_loader,
                    roc_auc_score,
                    fit_params,
                    predict_params,
                    verbose=True)

if args.save:
    current_file_path = os.path.abspath(__file__)  # to save this .py file
    model.save(data_loader, results, current_file_path, args.preds,
               args.models)
## << Create and train model