if __name__ == '__main__':
    trainsize = 1000
    testsize = 5000
    numruns = 10

    classalgs = {  #'Random': algs.Classifier(),
        # 'Naive Bayes': algs.NaiveBayes({'usecolumnones': False}),
        # 'Naive Bayes Ones': algs.NaiveBayes({'usecolumnones': True}),
        # 'Linear Regression': algs.LinearRegressionClass(),
        # 'Logistic Regression': algs.LogitReg(),
        # 'L1 Logistic Regression': algs.LogitReg({'regularizer': 'l1'}),
        # 'L2 Logistic Regression': algs.LogitReg({'regularizer': 'l2'}),
        # 'Elastic Logistic Regression': algs.LogitReg({'regularizer': 'elastic'}),
        # 'Logistic Alternative': algs.LogitRegAlternative(),
        'Neural Network': algs.NeuralNet({'epochs': 100})
    }
    numalgs = len(classalgs)

    parameters = (
        {
            'regwgt': 0.0,
            'nh': 4
        },
        {
            'regwgt': 0.01,
            'nh': 8
        },
        {
            'regwgt': 0.05,
            'nh': 16
Exemplo n.º 2
0
            'stepsize': 0.001
        }),
        'Logistic Regression':
        algs.LogitReg({
            'lamb': 0.001,
            'stepsize': 0.001
        }),
        'kernel Logistic Regression':
        algs.KernelLogitReg({'k': 30}),
        'Hamming kernel Logistic Regression':
        algs.KernelLogitReg({
            'kernel': 'hamming',
            'k': 20
        }),
        'Neural Network':
        algs.NeuralNet({'epochs': 100}),
        'Neural Network2':
        algs.NeuralNet2({'epochs': 100})
    }
    numalgs = len(classalgs)

    cls = {
        'Logistic RegressionRegularized':
        algs.LogitReg({
            'regularizer': 'l2',
            'lamb': 0.001,
            'stepsize': 0.001
        }),
        'Logistic RegressionRegularized':
        algs.LogitReg({
            'regularizer': 'l2',
 errors = {}
 # accuracies = []
 classalgs = {}
 numparams = 0
 parameters = {}
 for i in range(5):
     smallDataSet = dataset[i * 1300:(i + 1) * 1300]
     classalgs = {
         'Naive Bayes':
         algs.NaiveBayes({'usecolumnones': False}),
         'Logistic Regression':
         algs.LogitReg(),
         'Neural Network':
         algs.NeuralNet({
             'epochs': 100,
             'stepsize': 0.01,
             'nh': 8,
             'ni': 19
         })
         # 'L1 Logistic Regression': algs.LogitReg({'regularizer': 'l1'}),
         # 'L2 Logistic Regression': algs.LogitReg({'regularizer': 'l2'}),
     }
     numalgs = len(classalgs)
     parameters = (
         # {'regwgt': 0.0, 'nh': 4},
         {
             'regwgt': 0.01,
             'nh': 8
         },
         # {'regwgt': 0.05, 'nh': 16},
         # {'regwgt': 0.1, 'nh': 32},
     )
Exemplo n.º 4
0
if __name__ == '__main__':
    trainsize = 5000
    testsize = 5000
    numruns = 5

    classalgs = {
        'Random':
        algs.Classifier(),
        #'Naive Bayes': algs.NaiveBayes({'usecolumnones': False}),
        #'Naive Bayes Ones': algs.NaiveBayes({'usecolumnones': True}),
        #'Linear Regression': algs.LinearRegressionClass({'regularizer': 'l2', 'regwgt': 0.0}),
        #'Logistic Regression': algs.LogitReg({'regularizer': 'l2', 'regwgt': 0.0}),
        'Neural Network':
        algs.NeuralNet({
            'epochs': 150,
            'transfer': 'sigmoid',
            'stepsize': 0.1,
            'nh': 32
        }),
        #'Kernel Logistic Regression': algs.KernelLogitReg({'kernel': 'None', 'num_centers': 10, 'regularizer': 'None', 'regwgt': 0.0}),
        #'Linear Kernel Logistic Regression': algs.KernelLogitReg({'kernel': 'linear', 'num_centers': 100, 'regularizer': 'l2', 'regwgt': 0.0}),
        #'Hamming Kernel Logistic Regression': algs.KernelLogitReg({'kernel': 'hamming', 'num_centers': 100, 'regularizer': 'l2', 'regwgt': 0.0})
    }
    numalgs = len(classalgs)

    parameters = (
        #{'regwgt': 0.0, 'nh': 4},
        #{'regwgt': 0.01, 'nh': 8},
        #{'regwgt': 0.05, 'nh': 16},
        #{'regwgt': 0.1, 'nh': 32},
        {
            'regwgt': 0.0,
Exemplo n.º 5
0
    testsetx = np.hstack((datasettest, np.ones((datasettest.shape[0], 1))))
    testset = (testsetx, testlab)

    return trainset, testset


if __name__ == '__main__':
    trainset, testset = loadsusy()
    #trainset, testset = loadmadelon()
    print('Running on train={0} and test={1} samples').format(
        trainset[0].shape[0], testset[0].shape[0])
    nnparams = {'ni': trainset[0].shape[1], 'nh': 64, 'no': 1}
    classalgs = {
        'Random': algs.Classifier(),
        'Linear Regression': algs.LinearRegressionClass(),
        'Naive Bayes': algs.NaiveBayes({'usecolumnones': False}),
        'Naive Bayes Ones': algs.NaiveBayes(),
        'Logistic Regression': algs.LogitReg(),
        'Neural Network': algs.NeuralNet(nnparams),
        'Linear Class': algs.LinearClass()
    }

    for learnername, learner in classalgs.iteritems():
        print 'Running learner = ' + learnername
        # Train model
        learner.learn(trainset[0], trainset[1])
        # Test model
        predictions = learner.predict(testset[0])
        accuracy = getaccuracy(testset[1], predictions)
        print 'Accuracy for ' + learnername + ': ' + str(accuracy)
    else:
        k_foldStratified = 0
        k_foldClass = 0

    classalgs = {
        'Random':
        algs.Classifier(),
        #				'Naive Bayes': algs.NaiveBayes({'usecolumnones': False}),
        # 			   'Naive Bayes Ones': algs.NaiveBayes({'usecolumnones': True}),
        'Linear Regression':
        algs.LinearRegressionClass(),
        'Logistic Regression':
        algs.LogitReg(),
        'Neural Network':
        algs.NeuralNet({'hiddenLayers': 1}),
        'Neural Network':
        algs.NeuralNet({
            'epochs': 100,
            'hiddenLayers': 2
        }),
        'Kernel Logistic Regression linear':
        algs.KernelLogitReg({
            'kernel': 'linear',
            'regularizer': 'l2'
        }),
        'Kernel Logistic Regression hamming':
        algs.KernelLogitReg({
            'kernel': 'hamming',
            'regularizer': 'l2'
        })
Exemplo n.º 7
0
       fold=10
    
       trainlabel=np.reshape(trainset[1],(-1,1))
       trset = np.hstack((trainset[0],trainlabel))
       numinputs = trset.shape[1]-1
       np.random.shuffle(trset)
       parts = [trset[i::fold] for i in xrange(fold)]
       obj=[] 
       print('Running on train={0} and test={1} samples').format(trainset[0].shape[0], testset[0].shape[0])
       parm_pass={'Neural Network':{'ni': trset.shape[1]-1, 'nh': 0, 'no': 1},
               'Logistic Regression':{'regwt':0,'type':"L2"}}
               
       classalgs = {'Linear Regression': algs.LinearRegressionClass(),
                    'Naive Bayes Ones': algs.NaiveBayes(),
                    'Logistic Regression': algs.LogitReg(parm_pass['Logistic Regression']),
                    'Neural Network': algs.NeuralNet(parm_pass['Neural Network'])
                 }
                 
       classalgs1 = collections.OrderedDict(sorted(classalgs.items())) 
        
       best_parm=[]
       
       for learnername , learner in classalgs1.iteritems():
        
           print 'Running learner = ' + learnername
        
#           # Train model
           parm_accuracy={}
        
           for j in range(0,len(parm_dict[learnername])):
               parm=[]