Exemple #1
0
def choose_gradient():
    print("********************************************")
    print("*******Choose your gradient technique*******")
    print("*******************Linear*******************")
    print("********************************************")

    print("(1): Gradient Descent (2): Mini-batches (3): Stochastic")
    technique = 0
    while technique not in range(1, 4):
        technique = int(input("Which one? "))

        if technique == 1:
            print("Starting Gradient Descent....")
            time.sleep(5)
            gradient_descent.run()
        elif technique == 2:
            print("Starting Mini-batches....")
            time.sleep(5)
            mini_batches.run()
        elif technique == 3:
            print("Starting Stochastic...")
            time.sleep(5)
            stochastic.run()
        else:
            print("Choose a valid number")
def main():  #main driver function
    init_points = np.array([0.0, 1.0])  #intial points
    print("2nd order optimization starts at " +
          str(time.asctime()))  #start time
    time_t = time.time()  #start time
    newton_points = compute_newton(init_points,
                                   max_iter=100)  #find the solution
    print(newton_points)
    print("b = {0}, m = {1}, error = {2}".format(
        newton_points[1], newton_points[0],
        compute_total_error(newton_points[0], newton_points[1])))
    time_t = time.time() - time_t  #end time
    print("2nd order optimization ends at %s and has taken %dms" %
          (str(time.asctime()), time_t))
    plot_line_data(newton_points[0],
                   newton_points[1])  #plot the line generated
    print("1st order optimization starts at " +
          str(time.asctime()))  #start time
    time_t = time.time()
    m, b = gd.run()
    time_t = time.time() - time_t  #end time
    print("1st order optimization ends at %s and has taken %dms" %
          (str(time.asctime()), time_t))
    plot_line_data(m, b)  #plot the generated line
    return
        subplot.plot(x, y, 'bo', clip_on=False)

    x = np.linspace(-2.0, 2.0, 100)
    y = ne.evaluate('m*x + b', local_dict={'x': x, 'm': m, 'b': b})

    subplot.plot(x, y, 'r')
    subplot.grid()
    plt.ylim([-15, 15])
    plt.xlim([-2, 2])

    plt.savefig(out_file, bbox_inches='tight', pad_inches=0.0)


data_points = np.load('example_1_sample_points.npy')

parameters = gradient_descent.run(data_points, initialize_params,
                                  calculate_derivs, calculate_error)

print('plotting error surface and steps...(this could take a few minutes)')
count = 0
for i in range(0, len(parameters)):
    error_fig = plt.figure()
    error_surface = error_fig.add_subplot(111, projection='3d')
    #Plot original error surface
    plot_error_surface(error_surface)
    error_surface.set_xlabel('m')
    error_surface.set_ylabel('b')
    error_surface.set_zlabel('Error')
    error_surface.view_init(elev=40, azim=79)

    err = 0
    for j in range(0, i + 1):
        y = point[0]
        x = point[1]
        subplot.plot(x, y, "bo", clip_on=False)
        min_y = min(y, min_y)
        max_y = max(y, max_y)

    x = np.linspace(-2.5, 2.5, 100)
    y = ne.evaluate("a + b*x + c*x**2 + d*x**3", local_dict={"x": x, "a": a, "b": b, "c": c, "d": d})

    subplot.plot(x, y, "r")
    plt.ylim([min_y, max_y])
    plt.xlim([-2.5, 2.5])
    plt.savefig(out_file, bbox_inches="tight", pad_inches=0.0)


data_points = np.load("example_2_sample_points.npy")

parameters = gradient_descent.run(
    data_points, initialize_params, calculate_derivs, calculate_error, learning_rate=0.03, max_steps=100
)

print("plotting line graphs")
count = 0
for params in parameters:
    param = params[0]
    err = params[1]
    save_line_graph(param, err, data_points, "line_graph-" + str(count) + ".png")
    count = count + 1

np.save("parameter_estimates_example_2.npy", parameters)
Exemple #5
0
def calculate_derivs(datas, params):
    a, b, c, d = params[0], params[1], params[2], params[3]
    derivs = [0, 0, 0, 0]
    long = len(datas.iloc[:, 0])
    for point in range(long):
        y = datas.iloc[:, 2]
        x = datas.iloc[:, 0]
        inner = y - (a + b * x + c * x ** 2 + d * x ** 3)
        derivs[0] = derivs[0] + inner
        derivs[1] = derivs[1] + x * inner
        derivs[2] = derivs[2] + (x ** 2) * inner
        derivs[3] = derivs[3] + (x ** 3) * inner

    factor = -2.0 / len(datas.iloc[:, 0])
    for i in range(0, len(derivs)):
        derivs[i] = factor * derivs[i]

    return derivs


datas = pd.read_csv('/home/melis/Desktop/cwurData.csv')

parameters = gradient_descent.run(datas, initialize_params, calculate_derivs, learning_rate=0.03,
                                  max_steps=30)
x = datas.iloc[:, 0]
plt.scatter(x, datas.iloc[:, 2])
y = parameters[0] + parameters[1] * x + parameters[2] * (x ** 2) + parameters[3] * (x ** 3)
plt.plot(x, y)
plt.show()
                        'a': a,
                        'b': b,
                        'c': c,
                        'd': d
                    })

    subplot.plot(x, y, 'r')
    plt.ylim([min_y, max_y])
    plt.xlim([-2.5, 2.5])
    plt.savefig(out_file, bbox_inches='tight', pad_inches=0.0)


data_points = np.load('example_2_sample_points.npy')

parameters = gradient_descent.run(data_points,
                                  initialize_params,
                                  calculate_derivs,
                                  calculate_error,
                                  learning_rate=0.03,
                                  max_steps=100)

print('plotting line graphs')
count = 0
for params in parameters:
    param = params[0]
    err = params[1]
    save_line_graph(param, err, data_points,
                    'line_graph-' + str(count) + '.png')
    count = count + 1

np.save('parameter_estimates_example_2.npy', parameters)