Пример #1
0
def sqerror_ridge_gradient(x: Vector, y: float, beta: Vector,
                           alpha: float) -> Vector:
    """
    the gradient corresponding to the ith squared error term
    including the ridge penalty
    """
    return add(sqerror_gradient(x, y, beta),
               ridge_penalty_gradient(beta, alpha))
Пример #2
0
def sqerror_ridge_gradient(x: Vector, y: float, beta: Vector,
                           alpha: float) -> Vector:
    """
    Gradient odpowiadający czynnikowi i-tego błędu
    podniesionego do kwadratu w tym również kary grzbietowej.
    """
    return add(sqerror_gradient(x, y, beta),
               ridge_penalty_gradient(beta, alpha))
Пример #3
0
def gradient_step(v: Vector, gradient: Vector, step_size: float) -> Vector:
    """v에서 step_size만큼 이동하기"""
    assert len(v) == len(gradient)
    step = scalar_multiply(step_size, gradient)
    retrun add(v, step)
def gradient_step(v: Vector, gradient: Vector, step_size: float) -> Vector:
    """Moves `step_size` in the `gradient` direction from `v`"""
    assert len(v) == len(gradient)
    step = scalar_multiply(step_size, gradient)
    return add(v, step)
Пример #5
0
def gradient_step(v: Vector, gradient: Vector, step_size: float) -> Vector:
    step = scalar_multiply(step_size, gradient)
    return add(v, step)
def gradient_step(v: Vector, gradient: Vector, step_size: float) -> Vector:
    """Moves `step_size` in the `gradient` direction from `v`"""
    assert len(v) == len(gradient)
    step = scalar_multiply(step_size, gradient)
    return add(v, step)
Пример #7
0
def sqerror_ridge_gradient(x: Vector, y: float, beta: Vector,
                           alpha: float) -> Vector:

    return add(sqerror_gradient(x, y, beta),
               ridge_penalty_gradient(beta, alpha))
Пример #8
0

def estimate_gradient(f, v, h=0.00001):

    return [partial_difference_quotient(f: Callable[Vector], v:Vector, i:int, h:float =0.0001):
            for i,  in range(len(v))]
        

#116p

import random 
from scratch.linear_algebra import distance, add, scalar_multiply

assert len(v) == len(gradient)
step = scalat_multiply(step_size, gradient)
return add (v, step)


def sum_of_squares_gradient( v: Vector)-> Vector :
    return [ 2 * v_i for v_i in v]


v = [random.uniform(-10,10) for i in range (3) ]

for epoch in range(1000) :
        grad = sum_of_squares_gradient(v) 
        v= gradient_step(v, grad, -0.01)
        print(epoch, v)


#117p
Пример #9
0
def gradient_step(v: Vector, gradient: Vector, step_size: float) -> Vector:
    """Przejdź o step_size w kierunku gradient od punktu v."""
    assert len(v) == len(gradient)
    step = scalar_multiply(step_size, gradient)
    return add(v, step)
Пример #10
0
def gradient_step(v:np.array,gradient:np.array,step_size:float)->np.array:
    step = scalar_multiply(step_size,gradient)
    return add(v,step)