Example #1
0
def run_experiment(experiment_generator, out_dir, test_data, plot_loss_batch=False):
    np.random.seed(12345)
    results = defaultdict(list)

    for i, (model_dict, train_dict, exp_name, value) in enumerate(experiment_generator()):
        model = MlpNet(**model_dict)
        trainer = Trainer(model, **train_dict)

        label = f'{exp_name}={value}'
        print(f'{i}. {label}')

        start_time = time()
        trainer.train_loop()
        time_period = time() - start_time

        log_data = trainer.logger.logging_data

        if plot_loss_batch:
            # plot train loss per batch in first epoch
            filename = exp_name + str(value) + '_loss_one_batch'
            plot_val_loss_per_batch(log_data['loss_batch']['train'], filename, out_dir)

        results['model_dict'].append(model_dict)
        results['train_dict'].append(train_dict)
        results['time'].append(time_period)
        results['label'].append(label)
        results['log_data'].append(log_data)

        # calculate accuracy on test data
        acc_metric = LabelAccuracy()
        x_test, y_test = test_data
        accuracy = acc_metric(model.predict_classes(x_test), y_test)
        print('Accuracy on test data: {}'.format(accuracy))

    return results
Example #2
0
def get_model(model_name,
              optimizer='SGDMomentum',
              initializer='Xavier',
              activation='sigmoid',
              loss_fun='crossEntropy',
              hidden_units=(512, )):
    metrics = [LabelAccuracy()]
    optimizer = get_optimizer(optimizer)
    initializer = get_initializer(initializer, activation)
    loss_fun = get_loss_function(loss_fun)
    activation = get_activation(activation)

    models = dict(MlpNet=MlpNet(optimizer, initializer, metrics, loss_fun,
                                activation, hidden_units),
                  ConvNet=ConvNet(optimizer, initializer, metrics, loss_fun))

    m = models[model_name]
    print("Creating model {} with parameters count: {}".format(
        model_name, m.param_count()))
    return m
Example #3
0
        # calculate accuracy on test data
        acc_metric = LabelAccuracy()
        x_test, y_test = test_data
        accuracy = acc_metric(model.predict_classes(x_test), y_test)
        print('Accuracy on test data: {}'.format(accuracy))

    return results


if __name__ == "__main__":
    train_data, val_data, test_data = load_data(DATA_PATH)

    model_dict = {
        'optimizer': SGD(),
        'initializer': Xavier(),
        'metrics': [LabelAccuracy()],
        'loss_fun': categorical_cross_entropy,
        'activation': Sigmoid,
        'hidden_units': (500, )
    }

    train_dict = {
        'train_data':
        train_data,
        'val_data':
        val_data,
        'epochs':
        30,
        'batch_size':
        50,
        'callbacks': [
def calc_test_accuracy(model, test_data, train_dict):
    acc_metric = LabelAccuracy()
    x_test, y_test = test_data
    model.load_variables(train_dict['callbacks'][-1].save_path)
    accuracy = acc_metric(model.predict_classes(x_test), y_test)
    print('Accuracy on test data: {}'.format(accuracy))
Example #5
0
from settings import DATA_PATH, PROJECT_PATH
from training import Trainer
from utils import load_data, ensure_dir_path_exists

if __name__ == "__main__":
    np.random.seed(3)

    train_data, val_data, test_data = load_data(DATA_PATH)

    out_dir = 'my_nets/simple_net'
    ensure_dir_path_exists(os.path.join(PROJECT_PATH, out_dir))

    model_dict = {
        'optimizer': SGD(),
        'initializer': Xavier(),
        'metrics': [LabelAccuracy()],
        'loss_fun': categorical_cross_entropy,
        'activation': Sigmoid,
        'hidden_units': (100, )
    }

    train_dict = {
        'train_data':
        train_data,
        'val_data':
        val_data,
        'epochs':
        10,
        'batch_size':
        50,
        'callbacks': [