def _resnet_small(self,
                   inputs,
                   num_classes=None,
                   is_training=True,
                   global_pool=True,
                   output_stride=None,
                   include_root_block=True,
                   reuse=None,
                   scope='resnet_v2_small'):
     """A shallow and thin ResNet v2 for faster tests."""
     bottleneck = resnet_v2.bottleneck
     blocks = [
         resnet_utils.Block('block1', bottleneck,
                            [(4, 1, 1)] * 2 + [(4, 1, 2)]),
         resnet_utils.Block('block2', bottleneck,
                            [(8, 2, 1)] * 2 + [(8, 2, 2)]),
         resnet_utils.Block('block3', bottleneck,
                            [(16, 4, 1)] * 2 + [(16, 4, 2)]),
         resnet_utils.Block('block4', bottleneck, [(32, 8, 1)] * 2)
     ]
     return resnet_v2.resnet_v2(inputs,
                                blocks,
                                num_classes,
                                is_training=is_training,
                                global_pool=global_pool,
                                output_stride=output_stride,
                                include_root_block=include_root_block,
                                reuse=reuse,
                                scope=scope)
 def testEndPointsV2(self):
     """Test the end points of a tiny v2 bottleneck network."""
     bottleneck = resnet_v2.bottleneck
     blocks = [
         resnet_utils.Block('block1', bottleneck, [(4, 1, 1), (4, 1, 2)]),
         resnet_utils.Block('block2', bottleneck, [(8, 2, 1), (8, 2, 1)])
     ]
     inputs = create_test_input(2, 32, 16, 3)
     with slim.arg_scope(resnet_utils.resnet_arg_scope()):
         _, end_points = self._resnet_plain(inputs, blocks, scope='tiny')
     expected = [
         'tiny/block1/unit_1/bottleneck_v2/shortcut',
         'tiny/block1/unit_1/bottleneck_v2/conv1',
         'tiny/block1/unit_1/bottleneck_v2/conv2',
         'tiny/block1/unit_1/bottleneck_v2/conv3',
         'tiny/block1/unit_2/bottleneck_v2/conv1',
         'tiny/block1/unit_2/bottleneck_v2/conv2',
         'tiny/block1/unit_2/bottleneck_v2/conv3',
         'tiny/block2/unit_1/bottleneck_v2/shortcut',
         'tiny/block2/unit_1/bottleneck_v2/conv1',
         'tiny/block2/unit_1/bottleneck_v2/conv2',
         'tiny/block2/unit_1/bottleneck_v2/conv3',
         'tiny/block2/unit_2/bottleneck_v2/conv1',
         'tiny/block2/unit_2/bottleneck_v2/conv2',
         'tiny/block2/unit_2/bottleneck_v2/conv3'
     ]
     self.assertItemsEqual(expected, end_points)
Exemplo n.º 3
0
def resnet_v1_200(inputs,
                  num_classes=None,
                  is_training=True,
                  global_pool=True,
                  output_stride=None,
                  reuse=None,
                  scope='resnet_v1_200'):
    """ResNet-200 model of [2]. See resnet_v1() for arg and return description."""
    blocks = [
        resnet_utils.Block('block1', bottleneck,
                           [(256, 64, 1)] * 2 + [(256, 64, 2)]),
        resnet_utils.Block('block2', bottleneck,
                           [(512, 128, 1)] * 23 + [(512, 128, 2)]),
        resnet_utils.Block('block3', bottleneck,
                           [(1024, 256, 1)] * 35 + [(1024, 256, 2)]),
        resnet_utils.Block('block4', bottleneck, [(2048, 512, 1)] * 3)
    ]
    return resnet_v1(inputs,
                     blocks,
                     num_classes,
                     is_training,
                     global_pool=global_pool,
                     output_stride=output_stride,
                     include_root_block=True,
                     reuse=reuse,
                     scope=scope)
    def _atrousValues(self, bottleneck):
        """Verify the values of dense feature extraction by atrous convolution.

    Make sure that dense feature extraction by stack_blocks_dense() followed by
    subsampling gives identical results to feature extraction at the nominal
    network output stride using the simple self._stack_blocks_nondense() above.

    Args:
      bottleneck: The bottleneck function.
    """
        blocks = [
            resnet_utils.Block('block1', bottleneck, [(4, 1, 1), (4, 1, 2)]),
            resnet_utils.Block('block2', bottleneck, [(8, 2, 1), (8, 2, 2)]),
            resnet_utils.Block('block3', bottleneck, [(16, 4, 1), (16, 4, 2)]),
            resnet_utils.Block('block4', bottleneck, [(32, 8, 1), (32, 8, 1)])
        ]
        nominal_stride = 8

        # Test both odd and even input dimensions.
        height = 30
        width = 31
        with slim.arg_scope(resnet_utils.resnet_arg_scope()):
            with slim.arg_scope([slim.batch_norm], is_training=False):
                for output_stride in [1, 2, 4, 8, None]:
                    with tf.Graph().as_default():
                        with self.test_session() as sess:
                            tf.set_random_seed(0)
                            inputs = create_test_input(1, height, width, 3)
                            # Dense feature extraction followed by subsampling.
                            output = resnet_utils.stack_blocks_dense(
                                inputs, blocks, output_stride)
                            if output_stride is None:
                                factor = 1
                            else:
                                factor = nominal_stride // output_stride

                            output = resnet_utils.subsample(output, factor)
                            # Make the two networks use the same weights.
                            tf.get_variable_scope().reuse_variables()
                            # Feature extraction at the nominal network rate.
                            expected = self._stack_blocks_nondense(
                                inputs, blocks)
                            sess.run(tf.initialize_all_variables())
                            output, expected = sess.run([output, expected])
                            self.assertAllClose(output,
                                                expected,
                                                atol=1e-4,
                                                rtol=1e-4)