Beispiel #1
0
def gradient_descent_linear_regression(train_data, learning_rate):
    # Add bias term in training data
    train_data_with_bias = add_bias_term_in_data(train_data)
    predicted_y = []
    tetas = initialize_tetas(len(train_data_with_bias[0]) - 1)
    if (len(train_data_with_bias)):
        cost_arr = []
        iteration = []
        tetas, change_in_tetas = one_training_iteration(
            train_data_with_bias, tetas, learning_rate,
            len(train_data_with_bias))
        for i in range(1000):
            tetas, change_in_tetas = one_training_iteration(
                train_data_with_bias, tetas, learning_rate,
                len(train_data_with_bias))
            print("Iteration %d", (i + 1))
            cost = compute_cost(train_data_with_bias, tetas)
            print("Cost: %(key1)s" % {'key1': cost})
            iteration.append(i + 1)
            cost_arr.append(cost)
        for i in range(len(train_data_with_bias)):
            predicted_y.append(predict_instance(train_data_with_bias[i],
                                                tetas))
            #print("predicted value %(key1)s Actual value %(key2)s"%{'key1':predict_instance(train_data_with_bias[i],tetas),'key2':train_data_with_bias[i][len(train_data_with_bias[0])-1]})
        plot_linear_regression_with_one_variable(train_data, predicted_y)
        plot_cost_function(iteration, cost_arr)
    else:
        print("No training data")
    return predicted_y, tetas
Beispiel #2
0
def gradient_descent_logistic_regression(train_data, learning_rate):
    # Add bias term in training data
    train_data, mean, std_dev = normalize_features(train_data)
    train_data_with_bias = add_bias_term_in_data(train_data)
    predicted_y = []
    tetas = initialize_tetas(len(train_data_with_bias[0]) - 1)
    if (len(train_data_with_bias)):
        tetas = one_training_iteration(train_data_with_bias,
                                       tetas, learning_rate,
                                       len(train_data_with_bias))
        print(tetas)
        for i in range(1000):
            tetas = one_training_iteration(train_data_with_bias, tetas,
                                           learning_rate,
                                           len(train_data_with_bias))
            print("Iteration:  %(key1)s" % {'key1': i + 1})
            cost = compute_cost(train_data_with_bias, tetas)
            print("Cost:  %(key1)s" % {'key1': cost})
        legends = ['Admitted', 'Not Admitted', 'Decision Boundary']
        titles = [
            "Exam 1 Score", "Exam 2 Score",
            "Traning Data with decision boundary"
        ]
        plotTrainingData(train_data, tetas, titles, legends)

    else:
        print("No training data")
    return tetas, mean, std_dev
Beispiel #3
0
def training(train_data):
    temp_X = [row[:-1] for row in train_data]
    Y = np.array([row[-1] for row in train_data])
    X = add_bias_term_in_data(temp_X)
    X_t = X.transpose()
    X_mul_X_t = X_t.dot(X)
    tetas = inv(X_mul_X_t).dot(X_t).dot(Y)
    """print(X)
    print(Y)
    print(tetas)"""
    print(predict_instance([1, 1650, 3], tetas))
    return tetas
def main():
    #ex1data1.
    train_data = loadData('data/ex1data2.txt')
    #evaluateModels(train_data)
    tetas, mean, std_dev, predicted_y = gradient_descent_linear_regression(
        train_data, 0.5)
    test_data = loadData('data/testdata.txt')
    test_data = normalize_test_data(test_data, mean, std_dev)
    test_data = add_bias_term_in_data(test_data)
    print("Start predicting new instances")
    for i in range(len(test_data)):
        predicted_value = predict_instance(test_data[i], tetas)
        print("value of instance %(key1)s is %(key2)s" % {
            'key1': i + 1,
            'key2': predicted_value
        })
Beispiel #5
0
def main():
    #ex1data1.
    train_data = loadData('data2/ex2data1.txt')
    legends = ['Admitted', 'Not Admitted']
    titles = ["Exam 1 Score", "Exam 2 Score", "Scatter Plot of training data"]
    plotTrainingData(train_data, [], titles, legends)
    #train_data = [[0,0,0],[0,1,1],[1,0,1],[1,1,1]]
    tetas, mean, std_dev = gradient_descent_logistic_regression(
        train_data, 0.8)
    test_data = [[45, 85, 1]]
    test_data = normalize_test_data(test_data, mean, std_dev)
    test_data = add_bias_term_in_data(test_data)
    for i in range(len(test_data)):
        predicted_value = predict_instance(test_data[i], tetas)
        print("Probability of Test Example is %(key2)s" % {
            'key1': i + 1,
            'key2': predicted_value
        })
Beispiel #6
0
def predict_test_data(test_data,tetas):
    test_data_with_bias = add_bias_term_in_data(test_data)
    predicted_y = []
    for i in range(len(test_data_with_bias)):
        predicted_y.append(predict_instance(test_data_with_bias[i],tetas))
    return predicted_y