Пример #1
0
def geterror(predictions, ytest):
    # I have changed this geterror by the permission of the TA
    return l2err(predictions, ytest)**2 / (2 * ytest.shape[0])


if __name__ == '__main__':
    trainsize = 1000
    testsize = 5000
    #number of runs should be larger than 1 to make it possible to calculate standard deviation for standard error
    numruns = 2

    regressionalgs = {#'Random': algs.Regressor(),
        #                'Mean': algs.MeanPredictor(),
        #                'FSLinearRegression5': algs.FSLinearRegression({'features': [1,2,3,4,5]}),
                'FSLinearRegression50': algs.FSLinearRegression({'features': range(385)}),
                'RidgeLinearRegression': algs.RidgeLinearRegression({'features': range(385)}),
        'LassoRegression' : algs.LassoRegression({'features': range(385)}),
        'StochasticGradientDescent' : algs.StochasticGradientDescent({'features': range(385)}),
        'BatchGradientDescent' : algs.BatchGradientDescent({'features': range(385)}),
        'RMSProp' : algs.RMSProp({'features': range(385)}),
        'AMSGrad' : algs.AMSGrad({'features': range(385)}),
             }
    numalgs = len(regressionalgs)

    # Enable the best parameter to be selected, to enable comparison
    # between algorithms with their best parameter settings
    parameters = (
        {
            'regwgt': 0.01
        },
    # Can change this to other error values
    return l2err(predictions, ytest) / ytest.shape[0]


if __name__ == '__main__':
    trainsize = 1000
    testsize = 5000
    numruns = 1

    regressionalgs = {
        'Random':
        algs.Regressor(),
        'Mean':
        algs.MeanPredictor(),
        'FSLinearRegression5':
        algs.FSLinearRegression({'features': [1, 2, 3, 4, 5]}),
        'FSLinearRegression50':
        algs.FSLinearRegression({'features': range(50)}),
        'RidgeLinearRegression':
        algs.RidgeLinearRegression(),
        'Lasso':
        algs.Lasso(),
        'GSD':
        algs.GSD(),
        'BSD':
        algs.BSD(),
    }
    numalgs = len(regressionalgs)

    # Enable the best parameter to be selected, to enable comparison
    # between algorithms with their best parameter settings
Пример #3
0
if __name__ == '__main__':
    trainsize = 1000
    testsize = 5000
    numruns = 1
    """
    By changing the value in feature variable it will change the number of features to be selected
    """
    L = []
    feature = 5
    L.extend(range(feature))


    regressionalgs = {#'Random': algs.Regressor(),
                'Mean': algs.MeanPredictor(),
                'FSLinearRegression5': algs.FSLinearRegression({'features': L}),
                'FSLinearRegression50': algs.FSLinearRegression({'features': range(50)}),
                'RidgeLinearRegression': algs.RidgeLinearRegression(),
                'gradientDescent' : algs.gradientDescent(),
                'stochasticgradientDescent':algs.stochasticgradientDescent(),
                'LassoLinearRegression' : algs.LassoLinearRegression(),
                'RMSPropRegression' : algs.RMSPropRegression(),
                'amsGrad':algs.amsGrad(),
              }
    numalgs = len(regressionalgs)

    # Enable the best parameter to be selected, to enable comparison
    # between algorithms with their best parameter settings
    parameters = (
        #{'regwgt': 0.0},
        {