示例#1
0
def test_clone_idempotent():

    model = preprocessing.StandardScaler() | linear_model.LogisticRegression(
        optimizer=optim.Adam(), l2=0.1)

    trace = []
    for x, y in datasets.Phishing():
        trace.append(model.predict_proba_one(x))
        model.learn_one(x, y)

    clone = model.clone()
    for i, (x, y) in enumerate(datasets.Phishing()):
        assert clone.predict_proba_one(x) == trace[i]
        clone.learn_one(x, y)
示例#2
0
@pytest.mark.parametrize(
    'lm, dataset',
    [
        pytest.param(lm(
            optimizer=copy.deepcopy(optimizer), initializer=initializer, l2=0),
                     dataset,
                     id=f'{lm.__name__} - {optimizer} - {initializer}')
        for lm, dataset in [(lm.LinearRegression, datasets.TrumpApproval()
                             ), (lm.LogisticRegression, datasets.Bananas())]
        for optimizer, initializer in itertools.product(
            [
                optim.AdaBound(),
                optim.AdaDelta(),
                optim.AdaGrad(),
                optim.AdaMax(),
                optim.Adam(),
                optim.AMSGrad(),
                # TODO: check momentum optimizers
                # optim.Momentum(),
                # optim.NesterovMomentum(),
                optim.RMSProp(),
                optim.SGD()
            ],
            [
                optim.initializers.Zeros(),
                optim.initializers.Normal(mu=0, sigma=1, seed=42)
            ])
    ])
@pytest.mark.slow
def test_finite_differences(lm, dataset):
    """Checks the gradient of a linear model via finite differences.