Example #1
0
def test_bfgs_lr():
    obj = LogisticRegression()
    args = itertools.repeat(((obj.X, obj.Z), {}))
    opt = Bfgs(obj.pars, obj.f, obj.fprime, args=args)
    for i, info in enumerate(opt):      
        if i > 50:
            break
    assert obj.solved(), 'did not find solution'
Example #2
0
def test_xnes_lr():
    obj = LogisticRegression(seed=10101)
    args = itertools.repeat(((obj.X, obj.Z), {}))
    opt = Xnes(obj.pars, obj.f, args=args)
    for i, info in enumerate(opt):
        if i > 100:
            break
    assert obj.solved(), "did not find solution"
Example #3
0
def test_smd_lr():
    obj = LogisticRegression(seed=10101)
    args = itertools.repeat(((obj.X, obj.Z), {}))
    opt = Smd(obj.pars, obj.f, obj.fprime, obj.f_Hp, args=args, eta0=0.1)
    for i, info in enumerate(opt):      
        if i > 150:
            break
    assert obj.solved(), 'did not find solution'
Example #4
0
def test_nesterov_lr():
    obj = LogisticRegression()
    args = itertools.repeat(((obj.X, obj.Z), {}))
    opt = Nesterov(obj.pars, obj.fprime, steprate=0.1, args=args)
    for i, info in enumerate(opt):      
        if i > 750:
            break
    assert obj.solved(), 'did not find solution'
Example #5
0
def test_ncg_lr():
    obj = LogisticRegression()
    args = itertools.repeat(((obj.X, obj.Z), {}))
    opt = NonlinearConjugateGradient(obj.pars, obj.f, obj.fprime, args=args)
    for i, info in enumerate(opt):
        if i > 50:
            break
    assert obj.solved(), 'did not find solution'
Example #6
0
def test_xnes_lr():
    obj = LogisticRegression(seed=10101)
    args = itertools.repeat(((obj.X, obj.Z), {}))
    opt = Xnes(obj.pars, obj.f, args=args)
    for i, info in enumerate(opt):
        if i > 100:
            break
    assert obj.solved(), 'did not find solution'
Example #7
0
def test_asgd_lr():
    obj = LogisticRegression()
    args = itertools.repeat(((obj.X, obj.Z), {}))
    opt = Asgd(obj.pars, obj.fprime, eta0=0.2, lmbd=1e-2, t0=0.1, args=args)
    for i, info in enumerate(opt):
        if i > 3000:
            break
    assert obj.solved(0.15), 'did not find solution'
Example #8
0
def test_asgd_lr():
    obj = LogisticRegression()
    args = itertools.repeat(((obj.X, obj.Z), {}))
    opt = Asgd(obj.pars, obj.fprime, eta0=0.2, lmbd=1e-2, t0=0.1, args=args)
    for i, info in enumerate(opt):      
        if i > 3000:
            break
    assert obj.solved(0.15), 'did not find solution'
Example #9
0
def test_sbfgs_lr():
    obj = LogisticRegression()
    args = itertools.repeat(((obj.X, obj.Z), {}))
    opt = Sbfgs(obj.pars, obj.f, obj.fprime, args=args)
    for i, info in enumerate(opt):      
        if i > 50:
            break
    assert obj.solved(), 'did not find solution'
Example #10
0
def test_ncg_lr():
    obj = LogisticRegression()
    args = itertools.repeat(((obj.X, obj.Z), {}))
    opt = NonlinearConjugateGradient(obj.pars, obj.f, obj.fprime, args=args)
    for i, info in enumerate(opt):      
        if i > 50:
            break
    assert obj.solved(), 'did not find solution'
Example #11
0
def test_gd_lr():
    obj = LogisticRegression()
    args = itertools.repeat(((obj.X, obj.Z), {}))
    opt = GradientDescent(
        obj.pars, obj.fprime, step_rate=0.01, momentum=.9, args=args)
    for i, info in enumerate(opt):
        if i > 500:
            break
    assert obj.solved(), 'did not find solution'
Example #12
0
def test_rprop_lr():
    obj = LogisticRegression()
    args = itertools.repeat(((obj.X, obj.Z), {}))
    opt = Rprop(obj.pars, obj.f, obj.fprime, step_shrink=0.1, step_grow=1.2,
                min_step=1e-6, max_step=0.1, args=args)
    for i, info in enumerate(opt):      
        if i > 500:
            break
    assert obj.solved(), 'did not find solution'
Example #13
0
def test_adadelta_lr():
    obj = LogisticRegression()
    args = itertools.repeat(((obj.X, obj.Z), {}))
    opt = Adadelta(obj.pars, obj.fprime, 0.9, args=args)
    for i, info in enumerate(opt):
        print obj.f(opt.wrt, obj.X, obj.Z)
        if i > 3000:
            break
    assert obj.solved(0.15), 'did not find solution'
Example #14
0
def test_gd_lr():
    obj = LogisticRegression()
    args = itertools.repeat(((obj.X, obj.Z), {}))
    opt = GradientDescent(obj.pars,
                          obj.fprime,
                          step_rate=0.01,
                          momentum=.9,
                          args=args)
    for i, info in enumerate(opt):
        if i > 500:
            break
    assert obj.solved(), 'did not find solution'
Example #15
0
def test_rprop_lr():
    obj = LogisticRegression()
    args = itertools.repeat(((obj.X, obj.Z), {}))
    opt = Rprop(obj.pars,
                obj.fprime,
                step_shrink=0.1,
                step_grow=1.2,
                min_step=1e-6,
                max_step=0.1,
                args=args)
    for i, info in enumerate(opt):
        if i > 500:
            break
    assert obj.solved(), 'did not find solution'
Example #16
0
def test_rmsprop_continue():
    obj = LogisticRegression(n_inpt=2, n_classes=2)
    args = itertools.repeat(((obj.X, obj.Z), {}))
    opt = RmsProp(
        obj.pars, obj.fprime, step_rate=0.01, momentum=.9, decay=0.9,
        args=args)

    continuation(opt)
Example #17
0
def test_gd_continue():
    obj = LogisticRegression(n_inpt=2, n_classes=2)
    args = itertools.repeat(((obj.X, obj.Z), {}))
    opt = GradientDescent(obj.pars,
                          obj.fprime,
                          step_rate=0.01,
                          momentum=.9,
                          momentum_type='nesterov',
                          args=args)

    continuation(opt)
Example #18
0
def test_rprop_continue():
    obj = LogisticRegression(n_inpt=2, n_classes=2)
    args = itertools.repeat(((obj.X, obj.Z), {}))
    opt = Rprop(obj.pars,
                obj.fprime,
                step_shrink=0.1,
                step_grow=1.2,
                min_step=1e-6,
                max_step=0.1,
                args=args)

    continuation(opt)
Example #19
0
def test_adadelta_continue():
    obj = LogisticRegression(n_inpt=2, n_classes=2)
    args = itertools.repeat(((obj.X, obj.Z), {}))
    opt = Adadelta(obj.pars, obj.fprime, 0.9, args=args)

    continuation(opt)
Example #20
0
def test_radagrad_lr():
    seed = 54
    random.seed(seed)
    n_samples = 100
    n_dim = 40
    n_classes = 3
    obj_rada = LogisticRegression(n_samples=n_samples, n_inpt=n_dim, n_classes=n_classes, seed=seed)
    print obj_rada.X.shape
    obj_ada = LogisticRegression(n_samples=n_samples, n_inpt=n_dim, n_classes=n_classes, seed=seed)
    obj_dada = LogisticRegression(n_samples=n_samples, n_inpt=n_dim, n_classes=n_classes, seed=seed)
    obj_fada = LogisticRegression(n_samples=n_samples, n_inpt=n_dim, n_classes=n_classes, seed=seed)
    ridx = random.sample(xrange(obj_rada.X.shape[0]), obj_rada.X.shape[0])
#     ridx = xrange(obj_rada.X.shape[0])
#     print ridx
    eta = 0.5
    eta_rada = 0.5
    delta = 0.001
#     k = obj_rada.pars.shape[0]
    k = int(np.sqrt(n_dim) + 1)
    print n_dim, k
    opt_rada = Radagrad(obj_rada.pars, obj_rada.fprime, eta_rada, 0.001, delta, k, n_classes=n_classes, args=itertools.repeat(((obj_rada.X[ridx], obj_rada.Z[ridx]), {})))
    opt_ada = Adadelta(obj_ada.pars, obj_ada.fprime, 0.9, args=itertools.repeat(((obj_ada.X[ridx], obj_ada.Z[ridx]), {})))
    opt_dada = Adagrad(obj_dada.pars, obj_dada.fprime, eta, delta, args=itertools.repeat(((obj_dada.X[ridx], obj_dada.Z[ridx]), {})))
    opt_fada = AdagradFull(obj_fada.pars, obj_fada.fprime, eta, 0.001, delta, n_classes=n_classes, args=itertools.repeat(((obj_rada.X[ridx], obj_rada.Z[ridx]), {})))

    rada_loss, ada_loss, dada_loss, fada_loss = [], [], [], []


#     print opt_ada.wrt
#     print obj_ada.pars
    for i, info in enumerate(opt_ada):
#         print info
        ada_loss += [obj_ada.f(opt_ada.wrt, obj_ada.X, obj_ada.Z)]
        
        if i > n_samples * 6:
            break

#     print opt_dada.wrt
#     print obj_dada.pars
    for i, info in enumerate(opt_dada):
#         print info
        dada_loss += [obj_dada.f(opt_dada.wrt, obj_dada.X, obj_dada.Z)]

        if i > n_samples * 6:
            break


#     print opt_rada.wrt
#     print obj_rada.pars
    for i, info in enumerate(opt_rada):
#         print info['args'][0][i]

        rada_loss += [obj_rada.f(opt_rada.wrt, obj_rada.X, obj_rada.Z)]

        if i > n_samples * 6:
            break

    for i, info in enumerate(opt_fada):
#         print info['args'][0][i]

        fada_loss += [obj_rada.f(opt_fada.wrt, obj_fada.X, obj_fada.Z)]

        if i > n_samples * 6:
            break


    plt.plot(rada_loss, '-r')
    plt.plot(ada_loss, '-b')
    plt.plot(dada_loss, '-g')
    plt.plot(fada_loss, '-k')

    plt.show()