示例#1
0
def check_the_grad(regression=True,
                   nos=1,
                   ind=30,
                   outd=3,
                   bxe=False,
                   eps=1e-8,
                   verbose=False):
    """
    Check gradient computation for Neural Networks.
    """
    #
    from misc import sigmoid, identy, check_grad
    from losses import xe, ssd, mia
    # number of input samples (nos)
    # with dimension ind each
    ins = np.random.randn(nos, ind)
    #
    structure = dict()
    if regression:
        # Regression
        # Network with one hidden layer
        structure["layers"] = [(ind, 15), (15, outd)]
        # the last layer also needs to be specified.
        # it should be a linear layer (see score functions in losses.py).
        structure["activs"] = [np.tanh, identy]
        structure["score"] = ssd
        outs = np.random.randn(nos, outd)
    else:
        # Classification
        # _outd_ is interpreted as number of classes
        structure["layers"] = [(ind, 15), (15, outd)]
        # the last layer also needs to be specified.
        structure["activs"] = [sigmoid, identy]
        if bxe:
            structure["score"] = mia
            outs = 1. * (np.random.rand(nos, outd) > 0.5)
        else:
            structure["score"] = xe
            outs = np.random.random_integers(outd, size=(nos)) - 1
    # weight decay
    structure["l2"] = 0.1
    weights = init_weights(structure)
    cg = dict()
    cg["inputs"] = ins
    cg["targets"] = outs
    cg["structure"] = structure
    #
    delta = check_grad(score, grad, weights, cg, eps=eps, verbose=verbose)
    assert delta < 1e-4, "[nn.py] check_the_grad FAILED. Delta is %f" % delta
    return True
示例#2
0
文件: nn.py 项目: pombredanne/utils-1
def check_the_grad(regression=True, nos=1, ind=30, outd=3, bxe=False, eps=1e-8, verbose=False):
    """
    Check gradient computation for Neural Networks.
    """
    #
    from misc import sigmoid, identy, check_grad
    from losses import xe, ssd, mia

    # number of input samples (nos)
    # with dimension ind each
    ins = np.random.randn(nos, ind)
    #
    structure = dict()
    if regression:
        # Regression
        # Network with one hidden layer
        structure["layers"] = [(ind, 15), (15, outd)]
        # the last layer also needs to be specified.
        # it should be a linear layer (see score functions in losses.py).
        structure["activs"] = [np.tanh, identy]
        structure["score"] = ssd
        outs = np.random.randn(nos, outd)
    else:
        # Classification
        # _outd_ is interpreted as number of classes
        structure["layers"] = [(ind, 15), (15, outd)]
        # the last layer also needs to be specified.
        structure["activs"] = [sigmoid, identy]
        if bxe:
            structure["score"] = mia
            outs = 1.0 * (np.random.rand(nos, outd) > 0.5)
        else:
            structure["score"] = xe
            outs = np.random.random_integers(outd, size=(nos)) - 1
    # weight decay
    structure["l2"] = 0.1
    weights = init_weights(structure)
    cg = dict()
    cg["inputs"] = ins
    cg["targets"] = outs
    cg["structure"] = structure
    #
    delta = check_grad(score, grad, weights, cg, eps=eps, verbose=verbose)
    assert delta < 1e-4, "[nn.py] check_the_grad FAILED. Delta is %f" % delta
    return True
示例#3
0
def check_the_grad(eps=1e-6, verbose=False):
    """
    """
    from misc import check_grad
    from losses import ssd

    structure = {}
    structure['layers'] = [(1, 1, 2, 2), (1, 1, 2, 2)]
    structure['pools'] = [(2, 2), (2, 2)]
    structure['activs'] = [misc.identy, misc.identy]
    structure['score'] = ssd
    structure['l2'] = 0.

    weights = init_weights(structure)
    inpts = np.random.randn(2, 1, 7, 7)
    trgts = np.random.randn(2).reshape(2, 1)
    cg = dict()
    cg["inputs"] = inpts
    cg["targets"] = trgts
    cg["structure"] = structure
    #
    delta = check_grad(score, grad, weights, cg, eps=eps, verbose=verbose)
示例#4
0
def check_the_grad(eps=1e-6, verbose=False):
    """
    """
    from misc import check_grad
    from losses import ssd

    structure = {}
    structure["layers"] = [(1, 1, 2, 2), (1, 1, 2, 2)]
    structure["pools"] = [(2, 2), (2, 2)]
    structure["activs"] = [misc.identy, misc.identy]
    structure["score"] = ssd
    structure["l2"] = 0.0

    weights = init_weights(structure)
    inpts = np.random.randn(2, 1, 7, 7)
    trgts = np.random.randn(2).reshape(2, 1)
    cg = dict()
    cg["inputs"] = inpts
    cg["targets"] = trgts
    cg["structure"] = structure
    #
    delta = check_grad(score, grad, weights, cg, eps=eps, verbose=verbose)