def spambase(T=100):
    train, target = load_spambase()
    target = np.array(map(lambda v: -1.0 if v == 0 else 1.0, target))

    train, test, train_target, test_target = train_test_shuffle_split(train, target, len(train) / 10)
    boost = AdaBoost()
    start = timeit.default_timer()
    boost.boost(train, train_target, test, test_target, T)
    stop = timeit.default_timer()
    print "Total Run Time: %s secs" % (stop - start)
def spambase(T=100):
    train, target = load_spambase()
    target = np.array(map(lambda v: -1.0 if v == 0 else 1.0, target))

    train, test, train_target, test_target = train_test_shuffle_split(
        train, target,
        len(train) / 10)
    boost = AdaBoost()
    start = timeit.default_timer()
    boost.boost(train, train_target, test, test_target, T)
    stop = timeit.default_timer()
    print "Total Run Time: %s secs" % (stop - start)
Ejemplo n.º 3
0
def spam(step, loop, converge):
    train, target = load_spambase()

    train, test, train_target, test_target = cross_validation.train_test_shuffle_split(train, target, len(train) / 10)
    scaler = normalize(train)
    train = append_new_column(train, 1.0, 0)
    scaler.scale_test(test)
    test = append_new_column(test, 1.0, 0)

    print '\n============== Logistic Regression - Stochastic Gradient Descending==============='
    spam_logistic(train, test, train_target, test_target, step, loop, converge)

    print '\n============== Linear Regression - Stochastic Gradient Descending ==============='
    spam_linear(train, test, train_target, test_target, step, loop, converge)

    print '\n============== Linear Regression - Normal Equation==============='
    spam_normal_equation(train, test, train_target, test_target)

    print '\n============== Decision Tree ===================================='
    spam_decision_tree(train, test, train_target, test_target)