Пример #1
0
def test_admm_with_large_lamduh(N, p, nchunks):
    X = da.random.random((N, p), chunks=(N // nchunks, p))
    beta = np.random.random(p)
    y = make_y(X, beta=np.array(beta), chunks=(N // nchunks, ))

    X, y = persist(X, y)
    z = admm(X, y, reg=L1, lamduh=1e4, rho=20, max_iter=500)

    assert np.allclose(z, np.zeros(p), atol=1e-4)
Пример #2
0
def test_admm_with_large_lamduh(N, p, nchunks, is_cupy):
    X = da.random.random((N, p), chunks=(N // nchunks, p))
    beta = np.random.random(p)
    y = make_y(X, beta=np.array(beta), chunks=(N // nchunks, ))

    if is_cupy:
        cupy = pytest.importorskip('cupy')
        X, y = to_dask_cupy_array_xy(X, y, cupy)

    X, y = persist(X, y)
    z = admm(X, y, regularizer=L1(), lamduh=1e5, rho=20, max_iter=500)

    assert np.allclose(z, np.zeros(p), atol=1e-4)
Пример #3
0
def test_basic_unreg_descent(func, kwargs, N, nchunks, family):
    beta = np.random.normal(size=2)
    M = len(beta)
    X = da.random.random((N, M), chunks=(N // nchunks, M))
    y = make_y(X, beta=np.array(beta), chunks=(N // nchunks, ))

    X, y = persist(X, y)

    result = func(X, y, family=family, **kwargs)
    test_vec = np.random.normal(size=2)

    opt = family.pointwise_loss(result, X, y).compute()
    test_val = family.pointwise_loss(test_vec, X, y).compute()

    assert opt < test_val
Пример #4
0
def make_intercept_data(N, p, seed=20009):
    '''Given the desired number of observations (N) and
    the desired number of variables (p), creates
    random logistic data to test on.'''

    # set the seeds
    da.random.seed(seed)
    np.random.seed(seed)

    X = np.random.random((N, p + 1))
    col_sums = X.sum(axis=0)
    X = X / col_sums[None, :]
    X[:, p] = 1
    X = da.from_array(X, chunks=(N / 5, p + 1))
    y = make_y(X, beta=np.random.random(p + 1))

    return X, y
Пример #5
0
def test_basic_reg_descent(func, kwargs, N, nchunks, family, lam, reg):
    beta = np.random.normal(size=2)
    M = len(beta)
    X = da.random.random((N, M), chunks=(N // nchunks, M))
    y = make_y(X, beta=np.array(beta), chunks=(N // nchunks, ))

    X, y = persist(X, y)

    result = func(X, y, family=family, lamduh=lam, regularizer=reg, **kwargs)
    test_vec = np.random.normal(size=2)

    f = reg.add_reg_f(family.pointwise_loss, lam)

    opt = f(result, X, y).compute()
    test_val = f(test_vec, X, y).compute()

    assert opt < test_val
Пример #6
0
def test_basic_unreg_descent(func, kwargs, N, nchunks, family, is_cupy):
    beta = np.random.normal(size=2)
    M = len(beta)
    X = da.random.random((N, M), chunks=(N // nchunks, M))
    y = make_y(X, beta=np.array(beta), chunks=(N // nchunks, ))

    if is_cupy:
        cupy = pytest.importorskip('cupy')
        X, y = to_dask_cupy_array_xy(X, y, cupy)

    X, y = persist(X, y)

    result = func(X, y, family=family, **kwargs)
    test_vec = np.random.normal(size=2)
    test_vec = maybe_to_cupy(test_vec, X)

    opt = family.pointwise_loss(result, X, y).compute()
    test_val = family.pointwise_loss(test_vec, X, y).compute()

    assert opt < test_val
Пример #7
0
N = 100000
S = 2

X = np.random.randn(N, M)
X[:, 1] = 1.0
beta0 = np.random.randn(M)


def make_y(X, beta0=beta0):
    N, M = X.shape
    z0 = X.dot(beta0)
    z0 = da.compute(z0)[0]  # ensure z0 is a numpy array
    scl = S / z0.std()
    beta0 *= scl
    z0 *= scl
    y = np.random.rand(N) < logit(z0)
    return y, z0


y, z0 = make_y(X)
L0 = N * math.log(2.0)


dX = da.from_array(X, chunks=(N / 10, M))
dy = da.from_array(y, chunks=(N / 10,))


@pytest.mark.parametrize('X,y', [(dX, dy)])
def test_gradient(X, y):
    gradient(X, y)