Beispiel #1
0
@pytest.mark.parametrize(
    'lm, dataset',
    [
        pytest.param(
            lm(optimizer=copy.deepcopy(optimizer), initializer=initializer, l2=0),
            dataset,
            id=f'{lm.__name__} - {optimizer} - {initializer}'
        )
        for lm, dataset in [
            (lm.LinearRegression, datasets.TrumpApproval()),
            (lm.LogisticRegression, datasets.Bananas())
        ]
        for optimizer, initializer in itertools.product(
            [
                optim.AdaBound(),
                optim.AdaDelta(),
                optim.AdaGrad(),
                optim.AdaMax(),
                optim.Adam(),
                optim.AMSGrad(),
                # TODO: check momentum optimizers
                # optim.Momentum(),
                # optim.NesterovMomentum(),
                optim.RMSProp(),
                optim.SGD()
            ],
            [
                optim.initializers.Zeros(),
                optim.initializers.Normal(mu=0, sigma=1, seed=42)
            ]
Beispiel #2
0
l2 = config['l2']
opt = config['optimizer']
lr = config['lr']
AdaBound_lr = config['AdaBound_lr']
rho = config['rho']
beta_1 = config['beta_1']
beta_2 = config['beta_2']
eps = config['eps']
gamma = config['gamma']
final_lr = config['final_lr']
alpha = config['alpha']
FTRL_l1 = config['FTRL_l1']
FTRL_l2 = config['FTRL_l2']

if (opt == "AdaBound"):
    optimizer = optim.AdaBound(lr, beta_1, beta_2, eps, gamma, final_lr)
elif (opt == "AdaDelta"):
    optimizer = optim.AdaDelta(rho, eps)
elif (opt == "AdaGrad"):
    optimizer = optim.AdaGrad(lr, eps)
elif (opt == "Adam"):
    optimizer = optim.Adam(lr, beta_1, beta_2, eps)
elif (opt == "FTRLProximal"):
    optimizer = optim.FTRLProximal(alpha, beta, l1, l2)
elif (opt == "Momentum"):
    optimizer = optim.Momentum(lr, rho)
elif (opt == "RMSProp"):
    optimizer = optim.RMSProp(lr, rho, eps)
elif (opt == "VanillaSGD"):
    optimizer = optim.VanillaSGD(lr)
elif (opt == "NesterovMomentum"):