Exemplo n.º 1
0
    sw1 = swish1.forward(z1)
    sw2 = dense2.forward(sw1)
    y_pre = swish2.forward(sw2)

    # loss = loss_fn.loss(y_true, y_pred)
    # print("loss: ", loss)
    # print("loss's mean: ",np.mean(loss))

    # sigloss = loss_fn.loss(y_true, y_pre)
    # print("SIG loss: ", sigloss)
    # print("sig loss's mean: ",np.mean(sigloss))

    # dldy_pred = loss_fn.gradient(y_true , y_pred)
    # print("lldy: ",dldy_pred)
    # dldz2 = activation2.backward(dldy_pred)
    # print("dldz2: ",dldz2)
    # dLda1 = dense2.backward(dldz2)
    # print("dLda1: ",dLda1)
    # dLz1 = sigmoid.backward(dLda1)
    # dLdw = dense.backward(dLz1)

    d1 = loss_fn.gradient(y_true, y_pre)
    # a = swish2.gradient(d1)
    # print(d1)
    d2 = swish2.backward(d1)
    d3 = dense2.backward(d2)
    d4 = swish1.backward(d3)
    d5 = dense.backward(d4)
    print(d2)
    # Dense -> Activation -> Dense -> Activation -> y_pred

    z1 = dense.forward(x)
    a1 = activation1.forward(z1)
    print("Activation Value:", a1)

    z2 = dense2.forward(a1)
    a2 = activation2.forward(z2)
    y_pred = a2

    loss = loss_func.loss(y_true, y_pred)

    print("Individual Loss:", loss)
    total_loss = np.mean(loss)
    print("Total Loss:", total_loss)

    #Backward Propagation
    dLdy_pred = loss_func.gradient(y_true, y_pred)
    print("dLdy:", dLdy_pred)
    '''

	dydz=activation2.gradient(z2)
	dLdz2-dLdy_pred*dydz
	
	'''

    dLdz2 = activation2.backward(dLdy_pred)
    dLda1 = dense2.backward(dLdz2)
    dLdz1 = sigmoid.backward(dLda1)
    dLdw = dense.backward(dLdz1)