def negative_log_gradient(xs: List[Vector], ys: Vector,
                          beta: Vector) -> Vector:
    "Total 'error' i.e. summing the negative log gradients"
    return vector_sum(
        [_negative_log_gradient(x, y, beta) for x, y in zip(xs, ys)])
Exemple #2
0
def directional_variance_gradient(X, w):
    return vector_sum(directional_variance_gradient_i(x_i, w) for x_i in X)
Exemple #3
0
A = [1, 3, 5, 7, 9]
B = [6, 4, 8, 2, 10]

print("*** Test Module <linear_algebra> ***")
print("*** vector ......")

print("vector A = ", A)
print("vector B = ", B)

C = la.vector_add(A, B)
print("A + B = ", C)

C = la.vector_subtract(A, B)
print("A - B = ", C)

C = la.vector_sum([A, B])
print("A and B summary = ", C)

C = la.scalar_multiply(10, A)
print("10 * A = ", C)

C = la.vector_mean([A, B])
print("A and B mean = ", C)

C = la.dot(A, B)
print("A dot B = ", C)

C = la.sum_of_squares(A)
print("A^2's summary = ", C)

C = la.magnitude(A)
def negative_log_gradient(xs: List[Vector], ys: List[float],
                          beta: Vector) -> Vector:
    return vector_sum(
        [_negative_log_gradient(x, y, beta) for x, y in zip(xs, ys)])
def directional_variance_gradient(X, w):
    return vector_sum(directional_variance_gradient_i(x_i,w) for x_i in X)
 def test_vector_sum(self):
     self.assertEqual([6, 5, 4],
                      vector_sum([[2, 3, 1], [1, 1, 2], [3, 1, 1]]))