def train_single(self, input_vector, target_vector):
        """"
            Forward Propagation
            input:  input_vector  [784], target_vector [10]
            H1:     output_vector [100], weights_in_hidden[100, 784], output_hidden[100]
            output: output_vector2 [10], weight_hidden_output[10, 100], output_network[10]
            loss scalar 2.3

            Backward Propagation
            gradient [10], derived [10], tmp2 [10], hidden_errors [100,10], tmp5 [100,10]
            
        """
        input_vector = np.array(input_vector, ndmin=2).T
        target_vector = np.array(target_vector, ndmin=2).T

        output_vector1 = np.dot(self.weights_in_hidden, input_vector)
        output_hidden = Activation.leakyReLU(output_vector1)

        output_vector2 = np.dot(self.weights_hidden_output, output_hidden)
        output_network = Activation.leakyReLU(output_vector2)

        loss = Cross_Entropy.calc(output_network, target_vector)
        gradient = Cross_Entropy.derived_calc(output_network, target_vector)
        # update the weights:
        derived1 = Derivative.leakyReLU(gradient)
        tmp2 = derived1 * (loss * gradient)
        # calculate hidden errors:
        hidden_errors = np.dot(self.weights_hidden_output.T, loss * derived1)
        # update the weights:
        tmp5 = hidden_errors * Derivative.leakyReLU(output_hidden)
        self.weights_hidden_output += self.learning_rate * np.dot(
            tmp2, output_hidden.T)
        self.weights_in_hidden += self.learning_rate * np.dot(
            tmp5, input_vector.T)
    def run(self, input_vector):
        # input_vector can be tuple, list or ndarray
        input_vector = np.array(input_vector, ndmin=2).T
        # 1st layer
        output_vector = np.dot(self.weights_in_hidden, input_vector)
        output_vector = Activation.leakyReLU(output_vector)
        # 2nd layer
        output_vector = np.dot(self.weights_hidden_output, output_vector)
        output_vector = Activation.leakyReLU(output_vector)

        return output_vector