示例#1
0
from libraries import Activation_Softmax
from libraries import Loss_CategoricalCrossentropy
from libraries import Activation_ReLU
from libraries import Layer_Dense

# initialize nnfs dataset
nnfs.init()

# Create dataset
x, y = spiral_data(samples=100, classes=3)

# Create Dense Layer with 2 input features and 3 output values
dense1 = Layer_Dense(2, 3)

# Create ReLU activation (to be used with Dense Layer)
activation1_relu = Activation_ReLU()

# Create second Dense layer with 3 input features (as we take  ouput of previous layer here)
# and 3 output values (output values)
dense2 = Layer_Dense(3, 3)

# Create a softmax classfier's combined loss and activation
loss_activation = Activation_Softmax_Loss_CategoricalCorssentropy()

# Perform a forward pass of our training data through this layer
dense1.forward(x)

# Perform a forward pass through activation function
# takes the output of first dense layer here
activation1_relu.forward(dense1.output)
from libraries import Layer_Dense
from libraries import Activation_ReLU
from libraries import Activation_Softmax


# Initializes NNFS
nnfs.init()

# Create dataset
x,y = spiral_data(samples=100, classes=3)

# Create Dense layer with 2 input features and 3 output values
dense1 = Layer_Dense(2,3)

# Create Relu Activation (to be used with Dense Layer)
activation_relu = Activation_ReLU()

# Create second Dense layer with 3 input features (as we take output of previous layer here)
# and 3 output values
dense2 = Layer_Dense(3, 3)

# Create Softmax actiavtion (to be used with Dense layer):
activation_softmax = Activation_Softmax()

# Make a forward pass of our training data through this layer
dense1.forward(x)

# Make a forward pass through activation fucntion
# it takes the output of first dense layer here
activation_relu.forward(dense1.output)