def f1():
    # calculate backward combine softmax with loss
    softmax_loss = Activation_Softmax_Loss_CategoricalCorssentropy()
    softmax_loss.backward(softmax_output, class_targets)
    dvalues1 = softmax_loss.dinputs
Exemple #2
0
# Create dataset
x, y = spiral_data(samples=100, classes=3)

# Create Dense Layer with 2 input features and 3 output values
dense1 = Layer_Dense(2, 3)

# Create ReLU activation (to be used with Dense Layer)
activation1_relu = Activation_ReLU()

# Create second Dense layer with 3 input features (as we take  ouput of previous layer here)
# and 3 output values (output values)
dense2 = Layer_Dense(3, 3)

# Create a softmax classfier's combined loss and activation
loss_activation = Activation_Softmax_Loss_CategoricalCorssentropy()

# Perform a forward pass of our training data through this layer
dense1.forward(x)

# Perform a forward pass through activation function
# takes the output of first dense layer here
activation1_relu.forward(dense1.output)

# Perform a forward pass through second Dense layer takes
# output of activation function of first layer as inputs
dense2.forward(activation1_relu.output)

# Perform a forward pass through the activation/loss function
# take the output of second dense layer here and return loss
loss = loss_activation.forward(dense2.output, y)
import nnfs
from libraries import Activation_Softmax_Loss_CategoricalCorssentropy
from libraries import Activation_Softmax
from libraries import Loss_CategoricalCrossentropy
from timeit import timeit

# initialize nnfs dataset
nnfs.init()

# dummy output from activation softmax
softmax_output = np.array([[0.7, 0.1, 0.2], [0.1, 0.5, 0.4], [0.02, 0.9,
                                                              0.08]])

class_targets = np.array([0, 1, 1])

softmax_loss = Activation_Softmax_Loss_CategoricalCorssentropy()
softmax_loss.backward(softmax_output, class_targets)

dvalues1 = softmax_loss.dinputs

activation = Activation_Softmax()
activation.output = softmax_output

loss = Loss_CategoricalCrossentropy()
loss.backward(softmax_output, class_targets)

# update activation softmax backward
activation.backward(loss.dinputs)
dvalues2 = activation.dinputs

print('Gradient: combined loss and activation:')