Esempio n. 1
0
        #offset = 0
        Xtrain = data[train, 0:-1]
        #Xtrain = Xtrain[1,:,:]
        ytrain = data[train, -1]
        #ytrain = ytrain.ravel()
        Xtest = data[test, 0:-1]
        ytest = data[test, -1]
        #ytest = ytest.ravel()

        #    trainset, testset = splitdataset(dataset)
        #    print('Split {0} rows into train={1} and test={2} rows').format(len(dataset), trainset[0].shape[0], testset[0].shape[0])
        #    nnparams = {'ni': trainset[0].shape[1], 'nh': 32, 'no': 1}
        #Add comments here for testing purpose.

        classalgs = {
            'Random': algs.Classifier(),
            'Linear Regression': algs.LinearRegressionClass(),
            #                    'Naive Bayes': algs.NaiveBayes({'usecolumnones': False})
            'Logistic Regression': algs.LogitReg()
        }

        for learnername, learner in classalgs.iteritems():
            #print 'Running learner = ' + learnername
            # Train model
            learner.learn(Xtrain, ytrain)
            # Test model
            #print ytest
            predictions = learner.predict(Xtest)
            #print
            accuracy = getaccuracy(ytest, predictions)
            if learnername == 'Random':
Esempio n. 2
0
def cross_validate(K, X, Y, classalgs):
    for k in range(K):
        for learnername in classalgs:
            print('make this work')

    best_algorithm = classalgs[learnername]
    return best_algorithm


if __name__ == '__main__':
    trainsize = 5000
    testsize = 5000
    numruns = 10

    classalgs = {'Random': algs.Classifier(),
#                 'Naive Bayes': algs.NaiveBayes({'usecolumnones': False}),
#                 'Naive Bayes Ones': algs.NaiveBayes({'usecolumnones': True}),
                 'Linear Regression': algs.LinearRegressionClass(),
#                 'Logistic Regression': algs.LogitReg(),
#                 'Neural Network': algs.NeuralNet({'epochs': 100})
                }
    numalgs = len(classalgs)

    parameters = (
        {'regwgt': 0.0, 'nh': 4},
        {'regwgt': 0.01, 'nh': 8},
        {'regwgt': 0.05, 'nh': 16},
        {'regwgt': 0.1, 'nh': 32},
                      )
    numparams = len(parameters)
Esempio n. 3
0
                besterror = aveerror
                bestparams = p

    best_algorithm = classalgs[p]
    return best_algorithm


if __name__ == '__main__':

    trainsize = 5000
    testsize = 5000
    numruns = 10

    classalgs = {
        'Random':
        algs.Classifier(),
        'Naive Bayes':
        algs.NaiveBayes({'usecolumnones': False}),
        'Naive Bayes Ones':
        algs.NaiveBayes({'usecolumnones': True}),
        'Linear Regression':
        algs.LinearRegressionClass(),
        'Logistic Regression':
        algs.LogitReg(),
        'Neural Network':
        algs.NeuralNet({'epochs': 100}),
        'LinearKernelLogitReg':
        algs.KernelLogitReg({
            'kernel': 'linear',
            'regwgt': 0.01,
            'regularizer': 'None'