def testGradient():  # Great success with subset
    test, train = utils.load_and_normalize_housing_set()
    df_full = pd.DataFrame(train)
    subset_size = 100
    df = utils.train_subset(df_full, ['CRIM', 'TAX', 'B', 'MEDV'], n=subset_size)
    dfX = pd.DataFrame([df['CRIM'], df['TAX']]).transpose()
    print len(dfX)
    print dfX
    #raw_input()

    fit = gd.gradient(dfX, df['MEDV'].head(subset_size), .5, max_iterations=300)

    print 'read v fit'
    print len(dfX)
    print df['MEDV'].head(10)
    print fit
    data = gd.add_col(gd.pandas_to_data(dfX), 1)
    print np.dot(data, fit)
def k_folds_linear_gd(df_test, df_train, Y):
    k = 10
    df_test = gd.pandas_to_data(df_test)
    k_folds = partition_folds(df_test, k)
    model = Model_w()
    theta = None
    for ki in range(k - 1):
        print "k fold is {}".format(k)
        data, truth = get_data_and_truth(k_folds[ki])
        binary = True
        model.update(gd.gradient(data, np.array(truth), 0.00001, max_iterations=5, binary=binary))
        print model.w
        if theta is None:
            theta, max_acc = get_best_theta(data, truth, model.w, binary, False)
        predict = gd.predict_data(data, model.w, binary, False, theta)
        error = mystats.get_error(predict, truth, binary)
        print "Error for fold {} is {} with theta =  {}".format(k, error, theta)
    test, truth = get_data_and_truth(k_folds[k - 1])
    predict = gd.predict_data(test, model.w, binary, False, theta)
    test_error = mystats.get_error(predict, truth, binary)
    return [error, test_error]