コード例 #1
0
ファイル: CFTest.py プロジェクト: rohrdr/DenseNetworks
def test_suite():
    """
    Runs all the tests available:
        - Cross Entropy (cost and derivative)
        
    """

    res = True
    n = 15
    m = 13
    
    costfunction = cf.CrossEntropy()
    
    y = np.abs(np.random.randn(n, m)) * 0.1
    yhat = np.abs(np.random.randn(n, m)) * 0.1
    
    cost = costfunction.get_loss(yhat, y)

    ders = []    
    for i in range(m):
        new_yhat = yhat[:, i].reshape(n, 1)
        new_y = y[:, i].reshape(n, 1)
        der = costfunction.get_loss_der(new_yhat, new_y)
        num = grad_num(new_yhat, costfunction.get_loss, new_y)
        err = eval_err(num, der, "error in the " + str(i) + "-th column of Y")
        if not err:
            res = False
            print("iteration " + str(i))
            print("analytical derivative")
            print(der)
            print("numerical derivative")
            print(num)
        
        ders.append(der)

    errmsg = "error between derivative at once and one by one"
    der = costfunction.get_loss_der(yhat, y)
    ders = np.squeeze(ders).T
        
    err = eval_err(ders, der, errmsg)
            
    if not err:
        res = False
        print("all at once")
        print(der)
        print("one by one")
        print(ders)

    if res: print('All tests on Cost Functions ran successfully')

    return res
コード例 #2
0
def test_suite():

    np.random.seed(1)

    layers = list()
    firstlayer = list()
    firstlayer.append(4)
    firstlayer.append(3)
    firstlayer.append(af.ReLU())
    
    secondlayer = list()
    secondlayer.append(3)
    secondlayer.append(1)
    secondlayer.append(af.Sigmoid())

    layers.append(firstlayer)
    layers.append(secondlayer)
    
    my_dn = dn.DenseNN(layers, cf.CrossEntropy())
    x = np.random.randn(4, 2)
    y = np.array([1.0, 0.0]).reshape(1, 2)
    loss = my_dn.get_loss(x, y)
    
    my_dn.train_dn(x, y, maxiter=1000, print_frequency=10000, print_flag=False)

    y_target = np.array([[0.98678318, 0.16073323]])
    yhat = my_dn.forward_propagation(x)

    res = eval_err(y_target, yhat, errmsg='error in train_dn')

    if res: print('All tests on Dense Networks ran successfully')
    
    return res
コード例 #3
0
    def test_inverse_derivative(x, l_re_l):
        y = l_re_l.get_inverse_activation_der(x)
        z = np.where(x <= 0.0, 100.0, 1.0)

        errmsg = err1 + "_der function of the inverse LeakyRelu" + err2

        res = eval_err(z, y, errmsg)

        return res
コード例 #4
0
    def test_derivative(x, sig):

        y = sig.get_activation_der(x)
        z = np.exp(x) / np.power(1.0 + np.exp(x), 2)

        errmsg = err1 + "_der function of the Sigmoid" + err2

        res = eval_err(z, y, errmsg)

        return res
コード例 #5
0
    def test_activation(x, sig):

        y = sig.get_activation(x)
        z = np.exp(x) / (1.0 + np.exp(x))

        errmsg = err1 + " function of the Sigmoid" + err2

        res = eval_err(z, y, errmsg)

        return res
コード例 #6
0
    def test_derivative(x, l_re_l):

        y = l_re_l.get_activation_der(x)
        z = np.exp(x) / (1.0 + np.exp(x))

        errmsg = err1 + "_der function of the Softplus" + err2

        res = eval_err(z, y, errmsg)

        return res
コード例 #7
0
    def test_activation(x, l_re_l):

        y = l_re_l.get_activation(x)
        z = np.log(1.0) + np.log(1.0 + np.exp(x))

        errmsg = err1 + " function of the SoftPlus" + err2

        res = eval_err(z, y, errmsg)

        return res
コード例 #8
0
    def test_activation(x, l_re_l):

        y = l_re_l.get_activation(x)
        z = np.where(x <= 0.0, 0.01 * x, x)

        errmsg = err1 + " function of the LeakyRelu" + err2

        res = eval_err(z, y, errmsg)

        return res
コード例 #9
0
    def test_derivative(x, re_l):

        y = re_l.get_activation_der(x)
        z = np.where(x <= 0.0, 0.0, 1.0)

        errmsg = err1 + "_der function of the ReLU" + err2

        res = eval_err(z, y, errmsg)

        return res
コード例 #10
0
    def test_derivative(x, tan):

        y = tan.get_activation_der(x)
        z = 1.0 / np.power(np.cosh(x), 2)

        errmsg = err1 + "_der function of the TanH" + err2

        res = eval_err(z, y, errmsg)

        return res
コード例 #11
0
    def test_activation(x, tan):

        y = tan.get_activation(x)
        z = np.sinh(x) / np.cosh(x)

        errmsg = err1 + " function of the TanH" + err2

        res = eval_err(z, y, errmsg)

        return res
コード例 #12
0
def num_inv_derivative(x, actfunc):
    y = actfunc.get_inverse_activation_der(x)

    nx = x.shape[0]
    ny = x.shape[1]

    new_x = x.reshape((nx * ny, 1))

    z = grad_num(new_x, actfunc.get_inverse_activation)

    new_z = z.reshape((nx, ny))

    errmsg = "error in the numerical gradient of inverse " + str(actfunc)

    res = eval_err(new_z, y, errmsg)

    return res
コード例 #13
0
ファイル: LayersTest.py プロジェクト: rohrdr/DenseNetworks
def test_suite():
    """
    Runs all the tests available:
        - checks the derivative dA
        - checks the derivative dW
        - checks the derivative db
        ...
        all of the above for the Activation Functions
        - Sigmoid
        - TanH
        - ReLU
        - LeakyRelu
        - Softplus
    """
    n_x = 7
    n_y = 4
    samples = 3

    afs = [
        afuncs.Sigmoid(),
        afuncs.TanH(),
        afuncs.ReLU(),
        afuncs.LeakyRelu(),
        afuncs.Softplus()
    ]

    for af in afs:

        res = True

        lay = Layer(n_x, n_y, af)

        x = np.random.randn(n_x, samples)
        y = lay.get_y(x)

        d_x = np.ones((n_y, samples))
        d_a, d_w, d_b = lay.get_grad(d_x)

        new_x = x.reshape(n_x * samples, 1)
        num = grad_num(new_x, test_x, lay)
        err = eval_err(num, d_a.reshape(n_x * samples, 1), "error in X")

        if not err:
            res = False
            print("dA:     " + str(d_a.shape))
            print(d_a)
            print("num dA: " + str(num.reshape(n_x, samples).shape))
            print(num.reshape(n_x, samples))

        new_w = lay.W.reshape(n_x * n_y, 1)
        num = grad_num(new_w, testd_w, lay) / samples
        err = eval_err(num, d_w.reshape(n_x * n_y, 1), "error in W")

        if not err:
            res = False
            print("dW:     " + str(d_w.shape))
            print(d_w)
            print("num dW: " + str(num.reshape(n_y, n_x).shape))
            print(num.reshape(n_y, n_x))

        new_b = lay.b
        num = grad_num(new_b, testd_b, lay) / samples
        err = eval_err(num, d_b, "error in b")

        if not err:
            res = False
            print("db:     " + str(d_b.shape))
            print(d_b)
            print("num dA: " + str(num.shape))
            print(num)

    if res: print('All tests on Layers ran successfully')

    return res