Beispiel #1
0
def createNet(X, Y, ln, loadFile = ""):
    net1 = NeuralNet(
        layers=[  # four layers: two hidden layers
            ('input', layers.InputLayer),
            ('hidden', layers.DenseLayer),
            ('hidden1', layers.DenseLayer),
            ('hidden2', layers.DenseLayer),
            ('hidden3', layers.DenseLayer),
            ('output', layers.DenseLayer),
            ],
        # layer parameters: Best 400 400
        input_shape=(None, numInputs),  # 31 inputs
        hidden_num_units=400,  # number of units in hidden layer
        hidden1_num_units=400,
        hidden2_num_units=400,
        hidden3_num_units=400,
        output_nonlinearity=None,  # output layer uses identity function
        output_num_units=numOutputs,  # 4 outputs
    
        # optimization method:
        update=nesterov_momentum,
        update_learning_rate=ln,
        update_momentum=0.9,
    
        regression=True,  # flag to indicate we're dealing with regression problem
        max_epochs=1500,  # we want to train this many epochs
        verbose=1,
        )
    #if (loadFile != ""):
        #net1.load_params_from(loadFile)
    net1.max_epochs = 50
    net1.update_learning_rate = ln;

    return net1
Beispiel #2
0
        print(('train_labels_file = %s' % (train_labels_file)))

    print(('trained_net_file = %s' % (trained_net_file)))

    if retrain:
        print('retraining net...')
        data, labels = load2d(train_data_file, train_labels_file)
        train_net(data, labels, net13, trained_net_file)
        plot_loss(net13, outfile=join(root, 'plots', 'net13_loss.png'))
    elif test:
        print('loading net from disk...')
        with open(trained_net_file, 'rb') as ifile:
            net13 = pickle.load(ifile)
        print('loading test data...')
        data, _ = load2d(test_data_file)
        names = np.load(test_names_file)
        print('generating kaggle submission...')

        generate_submission(data, names, net13, submission_file, header_file, N=50)
    elif finetune:
        print('loading pre-trained net from disk...')
        with open(trained_net_file, 'rb') as ifile:
            net13 = pickle.load(ifile)
        print('loading training data...')
        data, labels = load2d(train_data_file, train_labels_file)
        net13.batch_iterator_train = BatchIterator(batch_size=128)
        net13.update_learning_rate = 0.00001
        net13.max_epochs = 1
        #net13.on_training_finished = [early_stopping.load_best_weights]
        train_net(data, labels, net13, finetuned_net_file)