Beispiel #1
0
def gradient_descent(D,X0,feedback=False,plot=False,**kwargs):
    """\
    Solution to MDS problem using gradient descent

    --- arguments ---
    D : target distance matrix (n x n)
    X0 : initial positions, organized by row (n x p)
    feedback = prints feedback if set to True
 
    --- kwargs ---
    rate, max_iters, min_step, max_step, trajectory, costs
    """
    if feedback is True:
        print("\nmds.gradient_descent():")
    if plot is True:
        kwargs['step_history'] = True
        kwargs['cost_history'] = True
        
    df = lambda x: gradient(x,D)
    f = lambda x: stress(x,D)
            
    results = gd.gradient_descent(X0,df,f=f,feedback=feedback,plot_history=plot,
                                  **kwargs)
        
    return results
Beispiel #2
0
def Xdescent(X0, Qs, Ds, feedback=False, **kwargs):
    """\
    X solution to multiview-MDSq problem using gradient descent.

    --- arguments ---
    X0 : initial positions, organized by row (n x p)
    Qs = list of orthogonal matrices
    Ds : list of target distance matrices (n x n)
    """
    if feedback is True:
        print("\nmmdsq.Xdescent():")

    df = lambda x: Xgradient(x, Qs, Ds)
    f = lambda x: stress(x, Qs, Ds)
    results = gd.gradient_descent(X0, df, f=f, feedback=feedback, **kwargs)

    return results
Beispiel #3
0
def Qdescent(X, Q0, D, feedback=False, plot=False, **kwargs):
    """\
    MDSq Q optimization

    --- arguments ---
    X = node positions
    Q0 = initial orthogonal matrix
    D = target distances
    feedback = set to True to print feedback
    plot = set to True to return stress plot
    """
    if feedback is True:
        print("\nmdsq.Qdescent():")
        print(f" initial stress = {stress(X,Q0,D):.2e}")
    if plot is True:
        kwargs['cost_history'] = True
        kwargs['step_history'] = True

    df = lambda Q: Qgradient(X, Q, D)
    f = lambda Q: stress(X, Q, D)
    p = lambda Q: nearest_orthogonal(Q)

    results = gd.gradient_descent(Q0,
                                  df,
                                  projection=p,
                                  f=f,
                                  plot_history=True,
                                  **kwargs)

    if feedback is True:
        Q = results['output']
        print(f" final stress = {stress(X,Q,D):.2e}")
    if plot is True:
        plt.show()

    return results
Beispiel #4
0
from matplotlib import pyplot as plt
from gd import gradient_descent

if __name__ == '__main__':

    lr = [0.001, 0.005, 0.01, 0.05, 0.1, 0.5]
    for a in lr:
        path = gradient_descent(50, a, 0.01)
        plt.plot(path[:, 0], path[:, 1], label=str(a))
    plt.legend()
    plt.xlabel("x1")
    plt.ylabel("x2")
    plt.savefig("gd_random_lr_fun.png")
    plt.show()
Beispiel #5
0
import sys
from matplotlib import pyplot as plt
from gd import gradient_descent

if __name__ == '__main__':

    iter = int(sys.argv[1])
    alpha = float(sys.argv[2])
    epsilon = float(sys.argv[3])

    fig, ax = plt.subplots()
    for i in range(10):
        path = gradient_descent(iter, alpha, epsilon)
        ax.plot(path[:, 0], path[:, 1])
    ax.set_xlabel("x1")
    ax.set_ylabel("x2")
    plt.savefig("gd_random_init_fun.png")
    plt.show()
Beispiel #6
0
import gd
from functools import *
import numpy as np

def residuals(betas, x, y):
    X = np.full((x.size, 2), 1.0)
    X[:,1] = x

    return y - np.matmul(X, betas) 

def ssr_grad(betas, x, y):
    r = residuals(betas, x, y)
    return [-2 * r.sum(), -2 * np.dot(r, x)]

def ssr_loss(betas, x, y):
    r = residuals(betas, x, y)
    return np.dot(r, r)

if __name__ == "__main__":
    x = np.linspace(0.0, 16.0, 8)
    y = 1 + 2 * x

    search = gd.gradient_descent(
            gradient = lambda b : ssr_grad(b, x, y), 
            start = [0.1, 3.4], 
            learn_rate = 0.0001, 
            n_iter = 100000)
    for it in search:
        print(f'{it.i} {it.step} {it.result}')
import gd
from functools import *

if __name__ == "__main__":
    f = lambda x: x * x
    g = lambda x: 2 * x

    print('descending')
    r = reduce(lambda v, e: f'{v}\n{e.i} {e.step} {e.result}',
               gd.gradient_descent(g, 4, 0.8, 1000), '')
    print(f'{r}')
y = np.linspace(-1.5, 1.5, 50)

X, Y = np.meshgrid(x, y)
zs = np.array([func(x,y) for x,y in zip(np.ravel(X), np.ravel(Y))])
Z = zs.reshape(X.shape)

ax = plt.axes(projection='3d')
ax.plot_surface(X, Y, Z, rstride=1, cstride=1,
                cmap='viridis', edgecolor='none')

x0 = max(x)
y0 = max(y)
learning_rate = 2
epoch = 500

x_gd, y_gd, z_gd = gradient_descent(x0, y0, learning_rate, epoch)


min_point = np.array([max(x), max(y)])
min_point_ = min_point[:, np.newaxis]
ax.plot(*min_point_, func(*min_point_), 'r*', markersize=10)
ax.set_title('surface')
ax.plot(x_gd, y_gd, 'go')
'''for i in range(1, epoch+1):
    ax.annotate('', xy=(x_gd[i], y_gd[i]), xytext=(x_gd[i-1], y_gd[i-1]),
                   arrowprops={'arrowstyle': '->', 'color': 'r', 'lw': 1},
                   va='center', ha='center')
some = list(zs)
#print(some.index(min(some)))
#print(some[2210])'''
plt.show()