Пример #1
0
def test_LinearRegressionModel_convergence():
    # Run until convergence
    # assert that model can converge
    model = LinearRegressionModel(2, 2)
    # NOTE: We use and instead of xor, because xor is non-linear
    dataset = datasets.get_and()

    model.train(*dataset)
    # NOTE: This linear model cannot achieve 0 MSE
    assert validation.get_error(model, *dataset) <= 0.1
Пример #2
0
def test_LinearRegressionModel():
    # Run for a couple of iterations
    # assert that new error is less than original
    model = LinearRegressionModel(2, 2)
    # NOTE: We use and instead of xor, because xor is non-linear
    dataset = datasets.get_and()

    error = validation.get_error(model, *dataset)
    model.train(*dataset, iterations=10)
    assert validation.get_error(model, *dataset) < error
Пример #3
0
def test_LinearRegressionModel_get_obj_equals_get_obj_jac_l2_penalty():
    _check_get_obj_equals_get_obj_jac(lambda a, o: LinearRegressionModel(
        a,
        o,
        penalty_func=error.L2Penalty(penalty_weight=random.uniform(0.0, 2.0))))
Пример #4
0
def test_LinearRegressionModel_get_obj_equals_get_obj_jac():
    _check_get_obj_equals_get_obj_jac(lambda a, o: LinearRegressionModel(a, o))
Пример #5
0
def test_LinearRegressionModel_jacobian_l2_penalty():
    _check_jacobian(lambda a, o: LinearRegressionModel(
        a,
        o,
        penalty_func=error.L2Penalty(penalty_weight=random.uniform(0.0, 2.0))))
Пример #6
0
def test_LinearRegressionModel_jacobian():
    _check_jacobian(lambda a, o: LinearRegressionModel(a, o))
Пример #7
0
import functools

from learning import datasets, validation, LinearRegressionModel
from ill import ILL, get_neighborhood_k_nearest

# Grab the popular iris dataset, from 'learning'
dataset = datasets.get_iris()

# Make an underlying model for ILL
# See 'learning' library for more details
underlying_model = LinearRegressionModel(4, 3)

# Make an ILL ensemble of our underlying model
# See code for more options
model = ILL(
    underlying_model,
    grid_spacing=0.5,
    neighborhood_func=functools.partial(
        get_neighborhood_k_nearest, k_nearest=5))

# Lets train our ILL
# First, we'll split our dataset into training and testing sets
# Our training set will contain 30 samples from each class
training_set, testing_set = validation.make_train_test_sets(
    *dataset, train_per_class=30)

# We could customize training and stopping criteria through
# the arguments of train, but the defaults should be sufficient here
model.train(*training_set)

# Our ILL should converge in a few moments