Пример #1
0
    def forward(self, X):
        """
        Performs the forward pass of the model.

        :param X: Input data of shape N x D. Each X[i] is a training sample.
        :return: Predicted value for the data in X, shape N x 1
                 1-dimensional array of length N with the classification scores.
        """

        self.cache = {}
        self.reg = {}
        X = X.reshape(X.shape[0], -1)
        # Unpack variables from the params dictionary
        for i in range(self.num_layer - 1):
            W, b = self.params['W' + str(i + 1)], self.params['b' + str(i + 1)]

            # Forward i_th layer
            X, cache_affine = affine_forward(X, W, b)
            self.cache["affine" + str(i + 1)] = cache_affine

            # Activation function
            X, cache_sigmoid = self.activation.forward(X)
            self.cache["sigmoid" + str(i + 1)] = cache_sigmoid

            # Store the reg for the current W
            self.reg['W' + str(i + 1)] = np.sum(W ** 2) * self.reg_strength

        # last layer contains no activation functions
        W, b = self.params['W' + str(self.num_layer)],\
               self.params['b' + str(self.num_layer)]
        y, cache_affine = affine_forward(X, W, b)
        self.cache["affine" + str(self.num_layer)] = cache_affine
        self.reg['W' + str(self.num_layer)] = np.sum(W ** 2) * self.reg_strength

        return y
Пример #2
0
    def forward(self, X):
        ########################################################################
        # TODO:  Your forward here                                             #
        ########################################################################
        self.cache = {}
        self.reg = {}
        X = X.reshape(X.shape[0], -1)
        for i in range(self.num_layer - 1):
            W, b = self.params['W' + str(i + 1)], self.params['b' + str(i + 1)]

            X, cache_affine = affine_forward(X, W, b)
            self.cache['affine' + str(i + 1)] = cache_affine

            X, cache_Relu = self.activation.forward(X)
            self.cache['Relu' + str(i + 1)] = cache_Relu
            # X, cache_Tanh = self.activation.forward(X)
            # self.cache['Tanh' + str(i + 1)] = cache_Tanh
            # X, cache_LRelu = self.activation.forward(X)
            # self.cache['LeakyRelu' + str(i + 1)] = cache_LRelu

            self.reg['W' + str(i + 1)] = np.sum(W ** 2) * self.reg_strength

        W, b = self.params['W' + str(self.num_layer)], self.params['b' + str(self.num_layer)]
        y, cache_affine = affine_forward(X, W, b)
        self.cache['affine' + str(self.num_layer)] = cache_affine
        self.reg['W' + str(self.num_layer)] = np.sum(W ** 2) * self.reg_strength

        pass

        ########################################################################
        #                           END OF YOUR CODE                           #
        ########################################################################
        return y
Пример #3
0
    def forward(self, X):
        """
        Performs the forward pass of the model.

        :param X: Input data of shape N x D. Each X[i] is a training sample.
        :return: Predicted value for the data in X, shape N x 1
                 1-dimensional array of length N with housing prices.
        """
        # Unpack variables from the params dictionary
        W1, b1 = self.params['W1'], self.params['b1']
        W2, b2 = self.params['W2'], self.params['b2']

        cache_affine1 = None
        cache_sigmoid = None
        cache_affine2 = None
        y = None

        ########################################################################
        # TODO                                                                 #
        # Implement the forward pass using the layers you implemented.         #
        # It consists of 3 steps:                                              #
        #   1. Forward the first affine layer                                  #
        #   2. Forward the sigmoid layer                                       #
        #   3. Forward the second affine layer                                 #
        # (Dont't forget the caches)                                           #
        ########################################################################

        # Forward first layer
        h, cache_affine1 = affine_forward(X, W1, b1)

        # Activation function
        h_, cache_sigmoid = sigmoid_forward(h)

        # Forward second layer
        y, cache_affine2 = affine_forward(h_, W2, b2)

        ########################################################################
        #                           END OF YOUR CODE                           #
        ########################################################################

        self.cache = {
            'affine1': cache_affine1,
            'sigmoid': cache_sigmoid,
            'affine2': cache_affine2
        }

        # calculate the number of operation and memory
        batch_size = X.shape[0]
        self.num_operation = batch_size * self.input_size * self.hidden_size + \
            batch_size * self.hidden_size + batch_size * self.hidden_size * 1
        self.memory_forward = sys.getsizeof(cache_affine1) + sys.getsizeof(
            cache_affine2) + sys.getsizeof(cache_sigmoid)
        self.memory = self.memory_forward

        return y
Пример #4
0
    def forward(self, X):
        out = None
        ########################################################################
        # TODO:  Your forward here                                             #
        ########################################################################
        self.cache = {}
        self.reg = {}
        X = X.reshape(X.shape[0], -1)
        # Unpack variables from the params dictionary
        for i in range(self.num_layer - 1):
            W, b = self.params['W' + str(i + 1)], self.params['b' + str(i + 1)]

            # Forward i_th layer
            X, cache_affine = affine_forward(X, W, b)
            self.cache["affine" + str(i + 1)] = cache_affine

            # Activation function
            X, cache_sigmoid = self.activation.forward(X)
            self.cache["sigmoid" + str(i + 1)] = cache_sigmoid

            # Store the reg for the current W
            self.reg['W' + str(i + 1)] = np.sum(W ** 2) * self.reg_strength

        # last layer contains no activation functions
        W, b = self.params['W' + str(self.num_layer)],\
               self.params['b' + str(self.num_layer)]
        y, cache_affine = affine_forward(X, W, b)
        self.cache["affine" + str(self.num_layer)] = cache_affine
        self.reg['W' + str(self.num_layer)] = np.sum(W ** 2) * self.reg_strength

        

        pass

        ########################################################################
        #                           END OF YOUR CODE                           #
        ########################################################################
        return y