Пример #1
0
        aveerr = np.mean(errs[learnername])
        print('Average error for ' + learnername + ': ' + str(aveerr))
        if aveerr < besterr:
            besterr = aveerr
            best_algorithm = learnername
    print('best algorithm is: ', best_algorithm, 'with error: ', besterr)
    return best_algorithm


if __name__ == '__main__':
    trainsize = 5000
    testsize = 5000
    numruns = 1

    classalgs = {'Random': algs.Classifier(),
                 'Linear Regression': algs.LinearRegressionClass(),
                 'Naive Bayes': algs.NaiveBayes({'usecolumnones': False}),
                 'Naive Bayes Ones': algs.NaiveBayes({'usecolumnones': True}),
                 'Linear Regression': algs.LinearRegressionClass(),
                 'Logistic Regression': algs.LogitReg(),
                 'Neural Network': algs.NeuralNet({'epochs': 100}),
                 'Neural Network 2': algs.NeuralNet2({'epochs': 100}),
                 'KernelLogitReg': algs.KernelLogitReg(),
                #  'KernelLogitReg': algs.KernelLogitReg({'kernel': 'hamming'})

                ##################### for calling cross validation use these instead ####################
                
                #  'Neural Network 4 32': algs.NeuralNet({'nh': 4, 'batch_size': 32}),
                #  'Neural Network 16 32': algs.NeuralNet({'nh': 16, 'batch_size': 32}),
                #  'Neural Network 4 128': algs.NeuralNet({'nh': 4, 'batch_size': 128}),
                #  'Neural Network 16 128': algs.NeuralNet({'nh': 16, 'batch_size': 128}),
Пример #2
0
    best_algorithm = learner
    return best_algorithm

if __name__ == '__main__':
    trainsize = 5000
    testsize = 5000
    numruns = 3

    classalgs = {  #'Random': algs.Classifier(),
        'Naive Bayes':
        algs.NaiveBayes({'usecolumnones': False}),
        'Naive Bayes Ones':
        algs.NaiveBayes({'usecolumnones': True}),
        'Linear Regression':
        algs.LinearRegressionClass(),
        'Logistic Regression Reg':
        algs.LogitReg({
            'regularizer': 'l2',
            'lamb': 0.001,
            'stepsize': 0.001
        }),
        'Logistic Regression':
        algs.LogitReg({
            'lamb': 0.001,
            'stepsize': 0.001
        }),
        'kernel Logistic Regression':
        algs.KernelLogitReg({'k': 30}),
        'Hamming kernel Logistic Regression':
        algs.KernelLogitReg({
Пример #3
0
        
       trainset, testset = loadsusy()
       """The choice of the number of folds should be user-input"""
       fold=10
    
       trainlabel=np.reshape(trainset[1],(-1,1))
       trset = np.hstack((trainset[0],trainlabel))
       numinputs = trset.shape[1]-1
       np.random.shuffle(trset)
       parts = [trset[i::fold] for i in xrange(fold)]
       obj=[] 
       print('Running on train={0} and test={1} samples').format(trainset[0].shape[0], testset[0].shape[0])
       parm_pass={'Neural Network':{'ni': trset.shape[1]-1, 'nh': 0, 'no': 1},
               'Logistic Regression':{'regwt':0,'type':"L2"}}
               
       classalgs = {'Linear Regression': algs.LinearRegressionClass(),
                    'Naive Bayes Ones': algs.NaiveBayes(),
                    'Logistic Regression': algs.LogitReg(parm_pass['Logistic Regression']),
                    'Neural Network': algs.NeuralNet(parm_pass['Neural Network'])
                 }
                 
       classalgs1 = collections.OrderedDict(sorted(classalgs.items())) 
        
       best_parm=[]
       
       for learnername , learner in classalgs1.iteritems():
        
           print 'Running learner = ' + learnername
        
#           # Train model
           parm_accuracy={}