def run_check_grad(hyperparameters):
    """Performs gradient check on logistic function.
    """

    # This creates small random data with 7 examples and
    # 9 dimensions and checks the gradient on that data.
    num_examples = 7
    num_dimensions = 9

    weights = np.random.randn(num_dimensions + 1, 1)
    data = np.random.randn(num_examples, num_dimensions)
    targets = (np.random.rand(num_examples, 1) > 0.5).astype(int)

    # print weights
    # print data
    # print targets

    diff = check_grad(
        logistic,  # function to check
        weights,
        0.001,  # perturbation
        data,
        targets,
        hyperparameters)

    print "diff =", diff
Пример #2
0
def run_check_grad(hyperparameters):
    """Performs gradient check on logistic_pen function.
    """

    # This creates small random data with 20 examples and
    # 10 dimensions and checks the gradient on that data.
    num_examples = 20
    num_dimensions = 10

    weights = np.random.randn(num_dimensions + 1, 1)
    data = np.random.randn(num_examples, num_dimensions)
    targets = np.round(np.random.rand(num_examples, 1), 0)

    diff = check_grad(logistic_pen, weights, 0.001, data, targets, hyperparameters)  # function to check  # perturbation

    print "diff =", diff
def run_check_grad(hyperparameters):
    """ Performs gradient check on logistic function.
    :return: None
    """
    # This creates small random data with 20 examples and
    # 10 dimensions and checks the gradient on that data.
    num_examples = 20
    num_dimensions = 10

    weights = np.random.randn(num_dimensions + 1, 1)
    data = np.random.randn(num_examples, num_dimensions)
    targets = np.random.rand(num_examples, 1)

    y, dy, = logistic(weights, data, targets, hyperparameters)[:2]

    diff = check_grad(logistic, weights, 0.001, data, targets, hyperparameters)

    print("diff =", diff)
def run_check_grad(hyperparameters):
    """Performs gradient check on logistic function.
    """

    # This creates small random data with 20 examples and
    # 10 dimensions and checks the gradient on that data.
    num_examples = 20
    num_dimensions = 10

    weights = np.random.randn(num_dimensions + 1, 1)
    data = np.random.randn(num_examples, num_dimensions)
    targets = np.random.rand(num_examples, 1)

    diff = check_grad(
        logistic,  # function to check
        weights,
        0.001,  # perturbation
        data,
        targets,
        hyperparameters)
def run_check_grad(parameters):
    """Performs gradient check on logistic function.
    """

    #This creates small random data with 20 examples and 
    # 10 dimensions and checks the gradient on that data.
    num_examples = 20
    num_dimensions = 10

    weights = np.random.randn(num_dimensions+1).reshape(-1, 1)
    data = np.random.randn(
                          num_examples*num_dimensions
                ).reshape(num_examples,num_dimensions)
    targets = np.random.randn(num_examples).reshape(-1, 1)

    diff = check_grad(logistic,      # function to check
                      weights,
                      0.001,         # perturbation
                      data,
                      targets,
                      parameters)

    print "diff =", diff
Пример #6
0
print(y.shape)

one = np.ones((X.shape[0], 1))

Xbar = np.concatenate((one, X), axis = 1)

def cost(w):
    N = Xbar.shape[0]
    return 0.5/N*np.linalg.norm(y - Xbar.dot(w), 2)**2

def grad(w):
    N = Xbar.shape[0]
    return 1/N*Xbar.T.dot(Xbar.dot(w) - y)

import check_grad
print(check_grad.check_grad(np.random.rand(Xbar.shape[1], 1), cost, grad))

def GD_NAG(w_init, grad, eta, gamma):
    w = [w_init]
    v = [np.zeros_like(w_init)]
    for _ in range(100):
        v_new = gamma*v[-1] + eta*grad(w[-1] - gamma*v[-1])
        w_new = w[-1] - v_new
        if np.linalg.norm(grad(w_new)) / len(w_new) < 1e-3:
            break
        w.append(w_new)
        v.append(v_new)
    
    return w[-1]

w = GD_NAG(np.random.rand(2, 1), grad, 0.1, 0.9)
Пример #7
0
    """

    # TODO: Finish this function

    f,df,y = logistic(weights, data, targets, hyperparameters)

    wlambda = hyperparameters['weight_regularization']

    if(wlambda != 0):
        weights2 = weights[0:weights.size-1]

        f += (wlambda/2.0)*np.dot(weights2.T,weights2)
        f = f[0]


        df[0:weights.size-1] += weights2*wlambda

    return f, df, y


if __name__ == '__main__':
    test_weights = np.array([[1, 2, 3]]).T
    test_data = np.array([[1, 0], [0,1], [0,0], [1,1], [2,2], [1,2], [-1,-1]])
    test_targets = np.array([[1, 1, 1, 1, 1, 1, 0]]).T

    logistic_predict(test_weights, test_data)

    logistic(test_weights, test_data, test_targets, None)

    check_grad(logistic, test_weights, 0.001, test_data, test_targets, None)
Пример #8
0
### cho bai 2
# def grad(w):
#     x = w[0].copy()
#     y = w[1].copy()
#     fx = 4*x*(x**2 + y - 7) + 2*(x - y + 1)
#     fy = 2*(x**2 + y - 7) - 2*(x - y + 1)
#     return np.asarray([fx, fy])

# def cost(w):
#     x = w[0].copy()
#     y = w[1].copy()
#     return (x**2 + y - 7)**2 + (x - y + 1)**2

import check_grad
print(check_grad.check_grad(np.random.rand(2, 1), cost, grad))


def GD(w0, ete=0.1):
    w = [w0]
    for _ in range(1000):
        new_w = w[-1] - ete * grad(w[-1])
        w.append(new_w)
        if np.linalg.norm(grad(new_w), 2) / len(new_w) < 1e-3:
            break
    return w[-1]


w0 = np.random.rand(2, 1)
w = GD(w0)
print(w)