def train_test_NN(train, classes, test, **parameters):
    parameters['dense2_nonlinearity'] = parameters[
        'dense1_nonlinearity']  # hack1
    parameters['dense2_init'] = parameters['dense1_init']  # hack2
    net = make_net(**parameters)
    net.fit(train, classes - 1)
    return net.predict_proba(test)
Пример #2
0
def train_test_NN(train, classes, test, **parameters):

	train, classes = equalize_class_sizes(train, classes)
	train, classes = filter_data(train, classes, cut_outlier_frac = 0.06, method = 'OCSVM')  # remove ourliers
	train = normalize_data(train, use_log = True)[0]  # also converts to floats
	test = normalize_data(test, use_log = True)[0]

	parameters['dense2_nonlinearity'] = parameters['dense1_nonlinearity']  # hack1
	parameters['dense2_init'] = parameters['dense1_init']  # hack2
	net = make_net(**parameters)
	net.fit(train, classes - 1)
	return net.predict_proba(test)
Пример #3
0
def train_test_NN(train,
                  labels,
                  test,
                  use_rescale_priors=False,
                  outlier_frac=0,
                  outlier_method='OCSVM',
                  normalize_log=True,
                  use_calibration=False,
                  **parameters):
    net = make_net(**parameters)
    train, test = conormalize_data(train, test, use_log=normalize_log)
    load_knowledge(net, 'results/nnets/optimize_new.log_1000.net.npz')
    prediction = net.predict_proba(test)
    if use_rescale_priors:
        prediction = scale_to_priors(prediction,
                                     priors=bincount(labels)[1:] /
                                     float64(len(labels)))
    return prediction
Пример #4
0
def train_NN(train,
             labels,
             test,
             outlier_frac=0,
             outlier_method='OCSVM',
             use_calibration=False,
             normalize_log=True,
             use_rescale_priors=False,
             extra_feature_count=0,
             extra_feature_seed=0,
             test_data_confidence=None,
             test_only=False,
             **parameters):
    """
		Train a neural network, for internal use by other functions in this file.
	"""
    train, labels = expand_from_test(train,
                                     labels,
                                     get_testing_data()[0],
                                     confidence=test_data_confidence)
    train, test = chain_feature_generators(train,
                                           labels,
                                           test,
                                           extra_features=extra_feature_count,
                                           seed=extra_feature_seed)
    train, test = conormalize_data(train, test, use_log=normalize_log)
    if outlier_frac:
        train, labels = filter_data(train,
                                    labels,
                                    cut_outlier_frac=outlier_frac,
                                    method=outlier_method)
    net = make_net(NFEATS=train.shape[1], **parameters)
    if use_calibration:
        net = CalibratedClassifierCV(net,
                                     method='sigmoid',
                                     cv=ShuffleSplit(train.shape[0],
                                                     n_iter=1,
                                                     test_size=0.2))
    if not test_only:
        net.fit(train, labels - 1)
    return net, train, test
Пример #5
0
	'dense3_size': None,
	'learning_rate': 0.0007,            # initial learning rate (learning rate is effectively higher for higher momentum)
	'learning_rate_scaling': 100,       # progression over time; 0.1 scaled by 10 is 0.01
	'momentum': 0.9,                    # initial momentum
	'momentum_scaling': 10 ,            # 0.9 scaled by 10 is 0.99
	'dropout1_rate': 0.5,               # [0, 0.5]
	'dropout2_rate': None,              # inherit dropout1_rate if dense2 exists
	'dropout3_rate': None,              # inherit dropout2_rate if dense3 exist
	'weight_decay': 0.00007,            # constrain the weights to avoid overfitting
	'max_epochs': 600,                  # it terminates when overfitting or increasing, so just leave high
	'auto_stopping': True,              # stop training automatically if it seems to be failing
	'save_snapshots_stepsize': None,    # save snapshot of the network every X epochs
}

print '>> making network'
net = make_net(train.shape[1], **params)

pretrain = join(PRETRAIN_DIR, '{0:s}_pretrain_{1:d}_{2:d}_{3:d}.net.npz'.format(params['name'], params['dense1_size'] or 0, params['dense2_size'] or 0, params['dense3_size']or 0))
if not isfile(pretrain):
	print '>> pretraining network'
	make_pretrain(pretrain, train, labels, extra_feature_count = extra_feature_count, **params)

print '>> loading pretrained network'
load_knowledge(net, pretrain)

print '>> training network'
out = net.fit(train, labels - 1)

print '>> saving network'
save_knowledge(net, join(NNET_STATE_DIR, 'single_trained.net.npz'))