Пример #1
0
def check_the_grad(nos=100, idim=30, hdim=10, eps=1e-8, verbose=False):
    """
    Check gradient computation for Neural Networks.
    """
    #
    from opt import check_grad
    from misc import sigmoid
    from losses import ssd, mia
    # number of input samples (nos)
    # with dimension ind each
    ins = np.random.randn(nos, idim)
    structure = dict()
    structure["hdim"] = hdim
    structure["af"] = sigmoid
    structure["score"] = ssd
    structure["beta"] = 0.7
    structure["rho"] = 0.01
    
    weights = np.zeros(idim*hdim + hdim + idim)
    weights[:idim*hdim] = 0.001 * np.random.randn(idim*hdim)
    
    args = dict()
    args["inputs"] = ins
    args["structure"] = structure
    #
    delta = check_grad(true_score, grad, weights, args, eps=eps, verbose=verbose)
    assert delta < 1e-4, "[sae.py] check_the_grad FAILED. Delta is %f" % delta
    return True
Пример #2
0
def check_the_grad(nos=1, ind=30, outd=5, bxe=False,
        eps=1e-6, verbose=False):
    """
    """
    from opt import check_grad
    #
    weights = 0.1 * np.random.randn(ind*outd + outd)
    ins = np.random.randn(nos, ind)

    if bxe:
        score = score_mia
        grad = grad_mia
        outs = 1.*(np.random.rand(nos, outd) > 0.5)
    else:
        outs = np.random.random_integers(outd, size=(nos)) - 1
        score = score_xe
        grad = grad_xe

    cg = dict()
    cg["inputs"] = ins
    cg["targets"] = outs
    #
    delta = check_grad(f=score, fprime=grad, x0=weights,
            args=cg, eps=eps, verbose=verbose)
    assert delta < 1e-4, "[logreg.py] check_the_gradient FAILED. Delta is %f" % delta
    return True
Пример #3
0
def check_the_grad(nos=1, ind=30, outd=5, bxe=False, eps=1e-6, verbose=False):
    """
    """
    from opt import check_grad
    #
    weights = 0.1 * np.random.randn(ind * outd + outd)
    ins = np.random.randn(nos, ind)

    if bxe:
        score = score_mia
        grad = grad_mia
        outs = 1. * (np.random.rand(nos, outd) > 0.5)
    else:
        outs = np.random.random_integers(outd, size=(nos)) - 1
        score = score_xe
        grad = grad_xe

    cg = dict()
    cg["inputs"] = ins
    cg["targets"] = outs
    #
    delta = check_grad(f=score,
                       fprime=grad,
                       x0=weights,
                       args=cg,
                       eps=eps,
                       verbose=verbose)
    assert delta < 1e-4, "[logreg.py] check_the_gradient FAILED. Delta is %f" % delta
    return True
Пример #4
0
def check_the_grad(nos=100, ind=30, outd=10, eps=1e-8, verbose=False):
    """
    Check gradient computation.

    _nos_: number of samples
    _ind_: dimension of one sample
    _outd_: number of filters
    """
    from opt import check_grad
    from misc import sqrtsqr

    ins = np.random.randn(nos, ind)
    weights = 0.001 * np.random.randn(ind, outd).ravel()

    structure = dict()
    structure["af"] = sqrtsqr
    structure["eps"] = 1e-8

    args = dict()
    args["inputs"] = ins
    args["structure"] = structure

    delta = check_grad(score, grad, weights, args, eps=eps, verbose=verbose)
    assert delta < 1e-4, "[sf.py] check_the_grad FAILED. Delta is %f" % delta
    return True
Пример #5
0
Файл: nn.py Проект: waytai/utils
def check_the_grad(regression=True, nos=1, ind=30,
        outd=3, bxe=False, eps=1e-8,verbose=False):
    """
    Check gradient computation for Neural Networks.
    """
    #
    from opt import check_grad
    from misc import sigmoid
    from losses import xe, ssd, mia
    # number of input samples (nos)
    # with dimension ind each
    ins = np.random.randn(nos, ind)
    #
    structure = dict()
    if regression:
    # Regression
    # Network with one hidden layer
        structure["layers"] = [(ind, 15), (15, outd)]
        structure["activs"] = [np.tanh]
        structure["score"] = ssd 
        outs = np.random.randn(nos, outd)
    else:
        # Classification
        # _outd_ is interpreted as number of classes
        structure["layers"] = [(ind, 15), (15, outd)]
        structure["activs"] = [sigmoid]
        if bxe:
            structure["score"] = mia
            outs = 1.*(np.random.rand(nos, outd) > 0.5)
        else:
            structure["score"] = xe
            outs = np.random.random_integers(outd, size=(nos)) - 1
    # weight decay
    structure["l2"] = 0.1
    weights = init_weights(structure)
    cg = dict()
    cg["inputs"] = ins
    cg["targets"] = outs
    cg["structure"] = structure
    #
    delta = check_grad(score, grad, weights, cg, eps=eps, verbose=verbose)
    assert delta < 1e-4, "[nn.py] check_the_grad FAILED. Delta is %f" % delta
    return True
Пример #6
0
def check_the_grad(loss,
                   nos=1,
                   idim=30,
                   hdim=10,
                   odim=5,
                   eps=1e-8,
                   verbose=False):
    """
    Check gradient computation for Neural Networks.
    """
    #
    from opt import check_grad
    from misc import sigmoid
    from losses import ssd, mia, xe
    # number of input samples (nos)
    # with dimension ind each
    ins = np.random.rand(nos, idim)
    if loss is ssd:
        outs = np.random.randn(nos, odim)
    if loss is mia:
        outs = 1. * (np.random.rand(nos, odim) > 0.5)
    if loss is xe:
        outs = np.random.random_integers(odim, size=(nos)) - 1
    structure = dict()
    structure["hdim"] = hdim
    structure["odim"] = odim
    structure["af"] = sigmoid
    structure["score"] = loss
    structure["l2"] = 1e-2

    weights = np.zeros(idim * hdim + hdim + hdim * odim + odim)
    weights[:idim * hdim] = 0.001 * np.random.randn(idim * hdim)
    weights[idim * hdim + hdim:-odim] = 0.001 * np.random.randn(hdim * odim)

    args = dict()
    args["inputs"] = ins
    args["targets"] = outs
    args["structure"] = structure
    #
    delta = check_grad(score, grad, weights, args, eps=eps, verbose=verbose)
    assert delta < 1e-4, "[flann.py] check_the_grad FAILED. Delta is %f" % delta
    return True
Пример #7
0
def check_the_grad(loss, nos=1, idim=30, hdim=10, odim=5, eps=1e-8, verbose=False):
    """
    Check gradient computation for Neural Networks.
    """
    #
    from opt import check_grad
    from misc import sigmoid
    from losses import ssd, mia, xe
    # number of input samples (nos)
    # with dimension ind each
    ins = np.random.rand(nos, idim)
    if loss is ssd:
        outs = np.random.randn(nos, odim)
    if loss is mia:
        outs = 1.*(np.random.rand(nos, odim) > 0.5)
    if loss is xe:
        outs = np.random.random_integers(odim, size=(nos))-1
    structure = dict()
    structure["hdim"] = hdim
    structure["odim"] = odim
    structure["af"] = sigmoid
    structure["score"] = loss
    structure["l2"] = 1e-2

    weights = np.zeros(idim*hdim + hdim + hdim*odim + odim)
    weights[:idim*hdim] = 0.001 * np.random.randn(idim*hdim)
    weights[idim*hdim+hdim:-odim] = 0.001 * np.random.randn(hdim*odim)
    
    args = dict()
    args["inputs"] = ins
    args["targets"] = outs
    args["structure"] = structure
    #
    delta = check_grad(score, grad, weights, args, eps=eps, verbose=verbose)
    assert delta < 1e-4, "[flann.py] check_the_grad FAILED. Delta is %f" % delta
    return True
Пример #8
0
def check_the_grad(nos=1, ind=30, outd=10, eps=1e-8, verbose=False):
    """
    Check gradient computation.
    """
    from opt import check_grad
    from misc import logcosh, sqrtsqr
    # number of input samples (nos)
    # with dimension ind each
    ins = np.random.randn(nos, ind)
    
    weights = 0.001 * np.random.randn(ind*outd)

    structure = dict()
    structure["l1"] = sqrtsqr
    structure["lmbd"] = 1 

    args = dict()
    args["inputs"] = ins
    args["structure"] = structure

    delta = check_grad(score, grad, weights, args, eps=eps, verbose=verbose)
    
    assert delta < 1e-4, "[ica.py] check_the_grad FAILED. Delta is %f" % delta
    return True
Пример #9
0
def check_the_grad(nos=1, ind=30, outd=10, eps=1e-8, verbose=False):
    """
    Check gradient computation.
    """
    from opt import check_grad
    from misc import logcosh, sqrtsqr
    # number of input samples (nos)
    # with dimension ind each
    ins = np.random.randn(nos, ind)

    weights = 0.001 * np.random.randn(ind * outd)

    structure = dict()
    structure["l1"] = sqrtsqr
    structure["lmbd"] = 1

    args = dict()
    args["inputs"] = ins
    args["structure"] = structure

    delta = check_grad(score, grad, weights, args, eps=eps, verbose=verbose)

    assert delta < 1e-4, "[ica.py] check_the_grad FAILED. Delta is %f" % delta
    return True