Exemplo n.º 1
0
    def create_forward_flow(self):
        """
        Implements the forward propagation for the model:
        CONV2D -> RELU -> MAXPOOL -> CONV2D -> RELU -> MAXPOOL -> FLATTEN -> FULLYCONNECTED

        Note that for simplicity and grading purposes, we'll hard-code some values
        such as the stride and kernel (filter) sizes.
        Normally, functions should take these values as function parameters.

        Arguments:
        X -- input dataset placeholder, of shape (input size, number of examples)
        parameters -- python dictionary containing your parameters "W1", "W2"
                      the shapes are given in initialize_parameters

        Returns:
        Z3 -- the output of the last LINEAR unit
        """

        X_input = Input(self.input_dims)
        X = X_input.copy()
        for i, d_params in enumerate(self.conv_layer_params):

            X = Conv2D(filters=d_params['n'],
                       kernel_size=d_params['kernel_size'],
                       strides=d_params.get('strides', (1, 1)),
                       padding=d_params.get('padding', 'SAME'),
                       name='conv{}'.format(i))(X)
            X = BatchNormalization(axis=3, name='bn{}'.format(X))(X)
            X = Activation(d_params.get('activation', 'relu'))(X)

            if not d_params.get('has_pool', True):
                continue

            if d_params.get('pool_op', 'max') == 'max':
                X = MaxPooling2D(pool_size=d_params.get('pool_size', (4, 4)),
                                 strides=d_params.get('strides_pool', (1, 1)),
                                 padding=d_params.get('padding_pool', 'same'),
                                 name='pool{}'.format(i))(X)
            else:
                raise NotImplementedError

        # Flatten result of convolution layers
        X = Flatten()(X)

        # Add hidden layers of fully connected part
        for i, d_params in enumerate(self.fc_layer_params):
            X = Dense(d_params['nhn'],
                      activation=d_params.get('activation', 'relu'),
                      name='fc{}'.format(i))(X)

        # Add output layer
        X = Dense(self.fc_layer_params[-1]['ny'],
                  activation=self.fc_layer_params[-1]['activation'],
                  name='output_layer')(X)

        model = Model(inputs=X_input, outputs=X, name=self.name)

        return X_input, X, model