예제 #1
0
def main():
    BATCH_SIZE = 100

    EPOCH = 3
    # load dataset
    trainX, trainY, testX, testY, validX, validY = getDataXY()
    model = build_model()
    trainX, trainY = preDataStep(trainX, trainY)
    validX, validY = preDataStep(validX, validY)
    testX, testY = preDataStep(testX, testY)

    # train model ...
    print "train..."
    earlyStopping = EarlyStopping(monitor='val_acc', patience=2)
    print numpy.shape(testX)

    model.fit(trainX,
              trainY,
              verbose=2,
              shuffle=True,
              callbacks=[earlyStopping],
              nb_epoch=EPOCH,
              validation_data=(validX, validY),
              batch_size=BATCH_SIZE)
    loss_and_metrics = model.evaluate(testX,
                                      testY,
                                      verbose=2,
                                      batch_size=BATCH_SIZE)
    print '==============='
    print 'loss_metrics: ', loss_and_metrics

    print numpy.shape(trainX)

    return 0
예제 #2
0
import os
from time import time

from sklearn.externals import joblib
from sklearn.neural_network import MLPClassifier

from preprocessData import getDataXY

trainX, trainY, testX, testY, validX, validY = getDataXY()
print len(trainX), len(trainY), len(testX), len(testY), len(validX), len(
    validY)

retrain = 1
X = trainX
y = trainY
# clf = SGDClassifier(loss="hinge", penalty="l2", n_iter=800)
# clf = tree.DecisionTreeClassifier()
# clf = NearestCentroid(metric='manhattan')
clf = MLPClassifier(solver='lbfgs',
                    alpha=1e-5,
                    hidden_layer_sizes=(13, 5),
                    random_state=1,
                    verbose=True,
                    max_iter=100,
                    early_stopping=True)

if not os.path.isfile('NNP.pkl') or retrain == 1:
    clf.fit(X, y)
else:
    clf = joblib.load('NNP.pkl')