예제 #1
0
def apply_activation_fun(data,activation="relu"):
    if activation=="relu":
        return A.relu(data)
    elif activation == "softmax":
        return A.softmax(data)
    elif activation == "tanh":
        return A.tanh(data)
    elif activation == "softplus":
        return A.softplus(data)
    elif activation == "swish":
        return A.swish(data)
    elif activation == "sigmoid":
        return A.sigmoid(data)
예제 #2
0
파일: layer.py 프로젝트: polowis/neatevo
    def feed_forward(self, layer):
        """Feeds forward the layers values"""
        for i in range(len(self.bias.weights)):
            layer.nodes[i].value = 0

        for i in range(len(self.nodes)):
            for weight in range(len(self.nodes[i].weights)):
                layer.nodes[weight].value += self.nodes[i].value * self.nodes[
                    i].weights[weight]

        for weight in range(len(self.bias.weights)):
            layer.nodes[weight].value += self.bias.weights[weight]

        for w in range(len(layer.nodes)):
            # use tanh as our activation function
            layer.nodes[w].value = Activation.tanh(layer.nodes[w].value)