Example #1
0
#        self.meta_model_.fit(out_of_fold_predictions, y)
#        return self
#   
#    #Do the predictions of all base models on the test data and use the averaged predictions as 
#    #meta-features for the final prediction which is done by the meta-model
#    def predict(self, X):
#        meta_features = np.column_stack([
#            np.column_stack([model.predict(X) for model in base_models]).mean(axis=1)
#            for base_models in self.base_models_ ])
#        return self.meta_model_.predict(meta_features)


# In[1]:

from models.cnn import CNN
model = CNN()
model.fit(X_train[0], y_train[0], X_test[0], y_test[0])

# # Make submission

# In[11]:

#import pandas as pd
#sub = pd.DataFrame()


# In[ ]:



def train(train_ds, val_ds, exp_count, overwrite_flag, **params):

    if overwrite_flag:
        tag = exp_count
    else:
        tag_list = [
            int(path.split("_")[-1]) for path in os.listdir("results")
            if "exp" in path
        ]
        if tag_list:
            tag = max(tag_list) + 1
        else:
            tag = 0

    save_dir = 'experiment_' + str(tag)
    save_dir = os.path.join('results', save_dir)

    if not os.path.isdir(save_dir):
        os.makedirs(save_dir)

    num_epochs = params["num_epochs"]
    learning_rate = params["learning_rate"]
    optimizer_type = params["optimizer_type"]
    initializer_type = params["initializer_type"]
    loss_type = params["loss_type"]
    early_stop_tolerance = params["early_stop_tolerance"]

    optimizer_dispatcher = {
        "adam": tf.optimizers.Adam,
        "sgd": tf.optimizers.SGD,
        "rmsprop": tf.optimizers.RMSprop
    }
    initializer_dispatcher = {
        "xavier": tf.initializers.glorot_uniform,
        "random": tf.random_normal_initializer
    }
    loss_dispatcher = {"l2": loss_l2, "l1": loss_l1}

    optimizer = optimizer_dispatcher[optimizer_type](learning_rate)
    initializer = initializer_dispatcher[initializer_type]()
    loss_fun = loss_dispatcher[loss_type]

    model = CNN(initializer=initializer,
                optimizer=optimizer,
                loss_function=loss_fun,
                **params)

    train_loss, val_loss, evaluation_val_loss = model.fit(
        train_ds, val_ds, num_epochs, early_stop_tolerance)

    # plot and save the loss curve
    plot_loss_curve(train_loss, val_loss, loss_type, save_dir)

    save_dict = params
    save_dict['train_loss'] = train_loss
    save_dict['val_loss'] = val_loss
    save_dict['evaluation_val_loss'] = evaluation_val_loss
    print('Saving...')
    conf_save_path = os.path.join(save_dir, 'config.pkl')
    model_save_path = os.path.join(save_dir, 'model.pkl')
    for path, obj in zip([conf_save_path, model_save_path],
                         [save_dict, model]):
        with open(path, 'wb') as file:
            pkl.dump(obj, file)