コード例 #1
0
ファイル: SOD_DenseNet.py プロジェクト: stmutasa/SODKit
    def transition_layer_3d(self, X, scope):
        """
        3D Transition layer for Densenet: Uses strided convolutions
        :param X: input
        :param scope: scope
        :return:
        """

        with tf.name_scope(scope):

            # ReLu and BN first since we're using a convolution to downsample
            conv = tf.nn.relu(
                self.batch_normalization(X, self.phase_train, None))

            # First downsample the Z axis - double filters to prevent bottleneck
            conv = self.convolution_3d(scope + '_down',
                                       conv, [2, 1, 1],
                                       2 * self.filters,
                                       1,
                                       'VALID',
                                       self.phase_train,
                                       BN=False,
                                       relu=False)

            # Now do the average pool to downsample the X and Y planes
            conv = tf.nn.avg_pool3d(conv, [1, 1, 2, 2, 1], [1, 1, 2, 2, 1],
                                    'SAME')

            return conv
コード例 #2
0
ファイル: SOD_DenseNet.py プロジェクト: stmutasa/SODKit
    def dense_block_3d(self, input_x, nb_layers, layer_name, keep_prob=None):
        """
        Creates a dense block
        :param input_x: The input to this dense block (output of prior downsample operation)
        :param nb_layers: how many layers desired
        :param layer_name: base name of this block
        :param keep_prob: Whether to use dropout
        :return:
        """

        with tf.name_scope(layer_name):

            # Array to hold each layer
            layers_concat = []

            # Append to list
            layers_concat.append(input_x)

            # The first layer of this block
            conv = self.bottleneck_layer_3d(input_x,
                                            (layer_name + '_denseN_0'),
                                            keep_prob)

            # Concat the first layer
            layers_concat.append(conv)

            # Loop through the number of layers desired
            for z in range(nb_layers):

                # Concat all the prior layer into this layer
                conv = tf.concat(layers_concat, axis=-1)

                # Create a new layer
                conv = self.bottleneck_layer_3d(
                    conv, (layer_name + '_denseN_' + str(z + 1)), keep_prob)

                # Append this layer to the running list of dense connected layers
                layers_concat.append(conv)

            # The concat has to be 2D, first retreive the Z and K size
            concat = tf.concat(layers_concat, -1)
            Fz, Ch = concat.get_shape().as_list()[1], concat.get_shape(
            ).as_list()[-1]

            # Now create a projection matrix
            concat = self.convolution_3d(layer_name + '_projection',
                                         concat, [Fz, 1, 1],
                                         Ch,
                                         1,
                                         'VALID',
                                         self.phase_train,
                                         BN=False)

            return conv, tf.squeeze(concat)
コード例 #3
0
ファイル: SOD_DenseNet.py プロジェクト: stmutasa/SODKit
    def dense_block(self,
                    input_x,
                    nb_layers,
                    layer_name,
                    keep_prob=None,
                    downsample=False):
        """
        Creates a dense block connection all same sized filters
        :param input_x: The input to this dense block (output of prior downsample operation)
        :param nb_layers: how many layers desired
        :param layer_name: base name of this block
        :param keep_prob: Whether to use dropout
        :param trans: whether to include a downsample at the end
        :return:
        """

        with tf.name_scope(layer_name):

            # Array to hold each layer
            layers_concat = []

            # Append to list
            layers_concat.append(input_x)

            # The first layer of this block
            conv = self.bottleneck_layer(input_x, (layer_name + '_denseN_0'),
                                         keep_prob)

            # Concat the first layer
            layers_concat.append(conv)

            # Loop through the number of layers desired
            for z in range(nb_layers):

                # Concat all the prior layer into this layer
                conv = tf.concat(layers_concat, axis=-1)

                # Create a new layer
                conv = self.bottleneck_layer(
                    conv, (layer_name + '_denseN_' + str(z + 1)), keep_prob)

                # Append this layer to the running list of dense connected layers
                layers_concat.append(conv)

            # Combine the layers
            conv = tf.concat(layers_concat, axis=-1)

            # Downsample if requested
            if downsample:
                conv = self.transition_layer(conv,
                                             (layer_name + '_Downsample'),
                                             keep_prob)

            return conv
コード例 #4
0
    def residual_block_3d(self,
                          input_x,
                          nb_layers,
                          layer_name,
                          K,
                          F=3,
                          padding='SAME',
                          downsample=2,
                          stanford=False):
        """
        Implements a block of residual layers at the same spatial dimension in 3 dimensions
        :param input_x: Input, either from the last conv layer or the images
        :param nb_layers: number of residual layers
        :param layer_name: the baseline name of this block
        :param K: feature map size
        :param F: filter size
        :param padding: SAME or VALID
        :param downsample: 0 = None, 1 = Traditional, 2 = 2.5D downsample
        :param stanford: whether to use stanford style layers
        :return:
        """

        with tf.name_scope(layer_name):

            # The first layer of this block
            conv = self.residual_layer_3d((layer_name + '_res_0'), input_x, F,
                                          K, 1, padding, self.phase_train)

            # Loop through the number of layers desired
            for z in range(nb_layers):

                # Perform the desired operations
                conv = self.residual_layer_3d(
                    (layer_name + '_res_' + str(z + 1)), conv, F, K, 1,
                    padding, self.phase_train)

            # Perform a downsample. 0 = None, 1 = Traditional, 2 = 2.5D downsample
            if downsample == 1:
                conv = self.convolution_3d((layer_name + '_res_down_'), conv,
                                           F, K * 2, 2, padding,
                                           self.phase_train)

            elif downsample == 2:
                conv = self.convolution_3d((layer_name + '_res_down_'), conv,
                                           [2, 3, 3], K * 2, [1, 2, 2],
                                           'VALID', self.phase_train)

            return conv
コード例 #5
0
    def residual_block(self,
                       input_x,
                       nb_layers,
                       layer_name,
                       K,
                       F=3,
                       padding='SAME',
                       downsample=True,
                       stanford=False):
        """
        Implements a block of residual layers at the same spatial dimension
        :param input_x: Input, either from the last conv layer or the images
        :param nb_layers: number of residual layers
        :param layer_name: the baseline name of this block
        :param K: feature map size
        :param F: filter size
        :param padding: SAME or VALID
        :param downsample: Whether to downsample at the end
        :param stanford: whether to use stanford style layers
        :return:
        """

        with tf.name_scope(layer_name):

            # The first layer of this block
            conv = self.residual_layer((layer_name + '_res_0'), input_x, F, K,
                                       1, padding, self.phase_train)

            # Loop through the number of layers desired
            for z in range(nb_layers):

                # Perform the desired operations
                conv = self.residual_layer((layer_name + '_res_' + str(z + 1)),
                                           conv, F, K, 1, padding,
                                           self.phase_train)

            # Downsample if requested
            if downsample:
                conv = self.residual_layer((layer_name + '_res_down_'), conv,
                                           F, K * 2, 2, padding,
                                           self.phase_train)

            return conv
コード例 #6
0
ファイル: SOD_DenseNet.py プロジェクト: stmutasa/SODKit
    def bottleneck_layer(self, X, scope, keep_prob=None):
        """
        Implements a bottleneck layer with BN-->ReLU -> 1x1 Conv -->BN/ReLU --> 3x3 conv
        :param x:  input
        :param scope: scope of the operations
        :param phase_train: whether in training or testing
        :return: results
        """

        with tf.name_scope(scope):

            # Batch norm first
            conv = self.batch_normalization(X, self.phase_train, scope)

            # ReLU
            conv = tf.nn.relu(conv)

            # 1x1 conv: note BN and Relu applied by default
            conv = self.convolution(scope, conv, 1, self.filters, 1, 'SAME',
                                    self.phase_train)

            # Dropout (note this is after BN and relu in this case)
            if keep_prob and self.phase_train == True:
                conv = tf.nn.dropout(conv, keep_prob)

            # 3x3 conv, don't apply BN and relu
            conv = self.convolution(scope + '_2',
                                    conv,
                                    3,
                                    self.filters,
                                    1,
                                    'SAME',
                                    self.phase_train,
                                    BN=False,
                                    relu=False)

            # Dropout (note that this is before BN and relu)
            if keep_prob and self.phase_train == True:
                conv = tf.nn.dropout(conv, keep_prob)

            return conv
コード例 #7
0
ファイル: SOD_DenseNet.py プロジェクト: stmutasa/SODKit
    def transition_layer(self, X, scope, keep_prob=None):
        """
        Transition layer for Densenet: not wide
        :param X: input
        :param scope: scope
        :param phase_train:
        :return:
        """

        with tf.name_scope(scope):

            # BN first
            conv = self.batch_normalization(X, self.phase_train, scope)

            # ReLU
            conv = tf.nn.relu(conv)

            # Conv 1x1
            conv = self.convolution(scope,
                                    conv,
                                    1,
                                    self.filters,
                                    1,
                                    'SAME',
                                    self.phase_train,
                                    BN=False,
                                    relu=False)

            # Dropout
            if keep_prob and self.phase_train == True:
                conv = tf.nn.dropout(conv, keep_prob)

            # Average pool
            conv = tf.nn.avg_pool(conv, [1, 2, 2, 1], [1, 2, 2, 1], 'VALID')

            return conv