Beispiel #1
0
def trainNetWithAllData():
    unsupervisedData, data, labels = createTrainingSet()

    print "data.shape"
    print data.shape
    print "labels.shape"
    print labels.shape

    data = common.scale(data)
    unsupervisedData = None

    activationFunction = activationfunctions.Rectified()
    rbmActivationFunctionVisible = activationfunctions.Identity()
    rbmActivationFunctionHidden = activationfunctions.RectifiedNoisy()

    unsupervisedLearningRate = 0.0001
    supervisedLearningRate = 0.001
    momentumMax = 0.99

    print "This is input data shape", data.shape
    print labels.shape

    net = db.DBN(4, [1200, 1500, 1000, len(args.emotions)],
                 binary=False,
                 activationFunction=activationFunction,
                 rbmActivationFunctionVisible=rbmActivationFunctionVisible,
                 rbmActivationFunctionHidden=rbmActivationFunctionHidden,
                 unsupervisedLearningRate=unsupervisedLearningRate,
                 supervisedLearningRate=supervisedLearningRate,
                 momentumMax=momentumMax,
                 nesterovMomentum=True,
                 rbmNesterovMomentum=True,
                 rmsprop=True,
                 miniBatchSize=20,
                 hiddenDropout=0.5,
                 visibleDropout=0.8,
                 momentumFactorForLearningRateRBM=False,
                 firstRBMheuristic=False,
                 rbmVisibleDropout=1.0,
                 rbmHiddenDropout=1.0,
                 preTrainEpochs=10,
                 sparsityConstraintRbm=False,
                 sparsityRegularizationRbm=0.001,
                 sparsityTragetRbm=0.01)

    net.train(data,
              labels,
              maxEpochs=200,
              validation=False,
              unsupervisedData=unsupervisedData)

    with open(args.net_file, "wb") as f:
        pickle.dump(net, f)
    return net
Beispiel #2
0
def trainNetWithAllData():
    unsupervisedData, data, labels = createTrainingSet()

    print "data.shape"
    print data.shape
    print "labels.shape"
    print labels.shape

    data = common.scale(data)
    unsupervisedData = None

    activationFunction = activationfunctions.Rectified()
    rbmActivationFunctionVisible = activationfunctions.Identity()
    rbmActivationFunctionHidden = activationfunctions.RectifiedNoisy()

    unsupervisedLearningRate = 0.0001
    supervisedLearningRate = 0.001
    momentumMax = 0.99

    # net = db.DBN(4, [1200, 1500, 1000, len(args.emotions)],
    #            binary=False,
    #            activationFunction=activationFunction,
    #            rbmActivationFunctionVisible=rbmActivationFunctionVisible,
    #            rbmActivationFunctionHidden=rbmActivationFunctionHidden,
    #            unsupervisedLearningRate=unsupervisedLearningRate,
    #            supervisedLearningRate=supervisedLearningRate,
    #            momentumMax=momentumMax,
    #            nesterovMomentum=True,
    #            rbmNesterovMomentum=True,
    #            rmsprop=True,
    #            miniBatchSize=20,
    #            hiddenDropout=0.5,
    #            visibleDropout=0.8,
    #            momentumFactorForLearningRateRBM=False,
    #            firstRBMheuristic=False,
    #            rbmVisibleDropout=1.0,
    #            rbmHiddenDropout=1.0,
    #            preTrainEpochs=10,
    #            sparsityConstraintRbm=False,
    #            sparsityRegularizationRbm=0.001,
    #            sparsityTragetRbm=0.01)
    #
    # net.train(data, labels, maxEpochs=200,
    #           validation=False,
    #           unsupervisedData=unsupervisedData)

    net = cnn.CNN(width=30, height=40, classes=len(args.emotions))

    net.train(data, labels)

    with open(args.net_file, "wb") as f:
        pickle.dump(net, f)
    return net
Beispiel #3
0
def getHyperParamsAndBestNet():
    unsupervisedData, data, labels = createTrainingSet()

    print np.unique(np.argmax(labels, axis=1))

    print "data.shape"
    print data.shape
    print "labels.shape"
    print labels.shape

    print data
    data = common.scale(data)
    unsupervisedData = None

    activationFunction = activationfunctions.Rectified()
    rbmActivationFunctionVisible = activationfunctions.Identity()
    rbmActivationFunctionHidden = activationfunctions.RectifiedNoisy()

    tried_params = []
    percentages = []
    best_index = 0
    index = 0
    best_correct = 0

    # Random data for training and testing
    kf = cross_validation.KFold(n=len(data), n_folds=10)
    for train, test in kf:
        unsupervisedLearningRate = random.uniform(0.0001, 0.2)
        supervisedLearningRate = random.uniform(0.0001, 0.2)
        momentumMax = random.uniform(0.7, 1)

        tried_params += [{
            'unsupervisedLearningRate': unsupervisedLearningRate,
            'supervisedLearningRate': supervisedLearningRate,
            'momentumMax': momentumMax
        }]

        trainData = data[train]
        trainLabels = labels[train]

        # net = db.DBN(4, [1200, 1500, 1000, len(args.emotions)],
        #            binary=False,
        #            activationFunction=activationFunction,
        #            rbmActivationFunctionVisible=rbmActivationFunctionVisible,
        #            rbmActivationFunctionHidden=rbmActivationFunctionHidden,
        #            unsupervisedLearningRate=unsupervisedLearningRate,
        #            supervisedLearningRate=supervisedLearningRate,
        #            momentumMax=momentumMax,
        #            nesterovMomentum=True,
        #            rbmNesterovMomentum=True,
        #            rmsprop=True,
        #            miniBatchSize=20,
        #            hiddenDropout=0.5,
        #            visibleDropout=0.8,
        #            momentumFactorForLearningRateRBM=False,
        #            firstRBMheuristic=False,
        #            rbmVisibleDropout=1.0,
        #            rbmHiddenDropout=1.0,
        #            preTrainEpochs=10,
        #            sparsityConstraintRbm=False,
        #            sparsityRegularizationRbm=0.001,
        #            sparsityTragetRbm=0.01)
        #
        # net.train(trainData, trainLabels, maxEpochs=200,
        #           validation=False,
        #           unsupervisedData=unsupervisedData)
        #
        # probs, predicted = net.classify(data[test])

        net = cnn.CNN(30, 40, len(args.emotions))

        net.train(trainData, trainLabels)

        probs, predicted = net.classify(data[test])

        actualLabels = labels[test]
        correct = 0

        for i in xrange(len(test)):
            actual = actualLabels[i]
            print probs[i]
            if predicted[i] == np.argmax(actual):
                correct += 1

        percentage_correct = correct * 1.0 / len(test)
        print "percentage correct"
        print percentage_correct

        if percentage_correct > best_correct:
            best_index = index
            best_correct = percentage_correct
            with open(args.net_file, "wb") as f:
                pickle.dump(net, f)

        percentages += [percentage_correct]
        index += 1

    print 'best params'
    print tried_params[best_index]
    print 'precision'
    print best_correct
Beispiel #4
0
def trainAndTestNet():
    unsupervisedData, data, labels = createTrainingSet()

    print np.unique(np.argmax(labels, axis=1))

    print "data.shape"
    print data.shape
    print "labels.shape"
    print labels.shape

    # Random data for training and testing
    kf = cross_validation.KFold(n=len(data), k=5)
    for train, test in kf:
        break

    print data
    data = common.scale(data)
    unsupervisedData = None

    activationFunction = activationfunctions.Rectified()
    rbmActivationFunctionVisible = activationfunctions.Identity()
    rbmActivationFunctionHidden = activationfunctions.RectifiedNoisy()

    unsupervisedLearningRate = 0.0001
    supervisedLearningRate = 0.001
    momentumMax = 0.99

    trainData = data[train]
    trainLabels = labels[train]

    # net = db.DBN(4, [1200, 1500, 1000, len(args.emotions)],
    #            binary=False,
    #            activationFunction=activationFunction,
    #            rbmActivationFunctionVisible=rbmActivationFunctionVisible,
    #            rbmActivationFunctionHidden=rbmActivationFunctionHidden,
    #            unsupervisedLearningRate=unsupervisedLearningRate,
    #            supervisedLearningRate=supervisedLearningRate,
    #            momentumMax=momentumMax,
    #            nesterovMomentum=True,
    #            rbmNesterovMomentum=True,
    #            rmsprop=True,
    #            miniBatchSize=20,
    #            hiddenDropout=0.5,
    #            visibleDropout=0.8,
    #            momentumFactorForLearningRateRBM=False,
    #            firstRBMheuristic=False,
    #            rbmVisibleDropout=1.0,
    #            rbmHiddenDropout=1.0,
    #            preTrainEpochs=10,
    #            sparsityConstraintRbm=False,
    #            sparsityRegularizationRbm=0.001,
    #            sparsityTragetRbm=0.01)
    #
    # net.train(trainData, trainLabels, maxEpochs=200,
    #           validation=False,
    #           unsupervisedData=unsupervisedData)
    #
    # probs, predicted = net.classify(data[test])

    net = cnn.CNN(30, 40, len(args.emotions))

    net.train(trainData, trainLabels)

    probs, predicted = net.classify(data[test])

    actualLabels = labels[test]
    correct = 0
    errorCases = []

    for i in xrange(len(test)):
        actual = actualLabels[i]
        print probs[i]
        if predicted[i] == np.argmax(actual):
            correct += 1
        else:
            errorCases.append(i)

    print "correct"
    print correct

    print "percentage correct"
    print correct * 1.0 / len(test)

    confMatrix = confusion_matrix(np.argmax(actualLabels, axis=1), predicted)
    print "confusion matrix"
    print confMatrix

    with open(args.net_file, "wb") as f:
        pickle.dump(net, f)
    return net