def MSE(X, y, m, b):
    """
    Returns Mean Squared Error where
    the predicted target is mX+b
    and y is the observed target variable
    """
    loss = Var(0)
    for vec, y_i in zip(X, y):
        loss = loss + (ops.sum(m * vec) + b - y_i)**2
    return loss / len(X)
def MSE_regularized(X, y, m, b, p=1, C=1):
    """
    Returns a regularized Mean Squared Error with L-p norm
    where the predicted target is mX+b,
    y is the observed target variable
    and C is the weight in L-p norm of the vector m
    """
    loss = Var(0)
    for vec, y_i in zip(X, y):
        loss = loss + (ops.sum(m * vec) + b - y_i)**2
    return loss / (2 * len(X)) + C * ops.norm(m, p=p)**p
def ridge_loss(X, y, m, b, C=1):
    """
    Returns Ridge Regression objective function
    Reference: https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.Ridge.html
    the predicted target is mX+b,
    y is the observed target variable
    and C is the weight in L-2 norm of the vector m
    """
    loss = Var(0)
    for vec, y_i in zip(X, y):
        loss = loss + (ops.sum(m * vec) + b - y_i)**2
    return loss + C * ops.norm(m, 2)**2
def elastic_loss(X, y, m, b, C=1, l1_ratio=0.5):
    """
    Returns Elastic Net Regression objective function
    Reference: https://scikit-learn.org/stable/modules/generated/sklearn.linear_model.ElasticNet.html
    the predicted target is mX+b,
    y is the observed target variable,
    C is the weight in L-2 norm of the vector m
    and L1_ratio is the ratio of L-1 norm loss
    """
    loss = Var(0)
    for vec, y_i in zip(X, y):
        loss = loss + (ops.sum(m * vec) + b - y_i)**2
    return loss / (2 * len(X)) + C * l1_ratio * ops.norm(
        m, p=1) + 0.5 * C * (1 - l1_ratio) * ops.norm(m, 2)**2
Exemple #5
0
def test_sum():
    var1 = Var([2, 2, 2, 2, 2])
    var2 = ops.sum(var1)
    var2.backward()
    assert var2.val == 10.
    assert np.all(var2.grad(var1) == [1, 1, 1, 1, 1])