Пример #1
0
 def testModelHasExpectedNumberOfParameters(self):
   batch_size = 5
   height, width = 224, 224
   inputs = random_ops.random_uniform((batch_size, height, width, 3))
   with arg_scope(inception_v1.inception_v1_arg_scope()):
     inception_v1.inception_v1_base(inputs)
   total_params, _ = model_analyzer.analyze_vars(
       variables_lib.get_model_variables())
   self.assertAlmostEqual(5607184, total_params)
Пример #2
0
 def testModelHasExpectedNumberOfParameters(self):
     batch_size = 5
     height, width = 224, 224
     inputs = random_ops.random_uniform((batch_size, height, width, 3))
     with arg_scope(inception_v1.inception_v1_arg_scope()):
         inception_v1.inception_v1_base(inputs)
     total_params, _ = model_analyzer.analyze_vars(
         variables_lib.get_model_variables())
     self.assertAlmostEqual(5607184, total_params)
Пример #3
0
  def testBuildAndCheckAllEndPointsUptoMixed5c(self):
    batch_size = 5
    height, width = 224, 224

    inputs = random_ops.random_uniform((batch_size, height, width, 3))
    _, end_points = inception_v1.inception_v1_base(
        inputs, final_endpoint='Mixed_5c')
    endpoints_shapes = {
        'Conv2d_1a_7x7': [5, 112, 112, 64],
        'MaxPool_2a_3x3': [5, 56, 56, 64],
        'Conv2d_2b_1x1': [5, 56, 56, 64],
        'Conv2d_2c_3x3': [5, 56, 56, 192],
        'MaxPool_3a_3x3': [5, 28, 28, 192],
        'Mixed_3b': [5, 28, 28, 256],
        'Mixed_3c': [5, 28, 28, 480],
        'MaxPool_4a_3x3': [5, 14, 14, 480],
        'Mixed_4b': [5, 14, 14, 512],
        'Mixed_4c': [5, 14, 14, 512],
        'Mixed_4d': [5, 14, 14, 512],
        'Mixed_4e': [5, 14, 14, 528],
        'Mixed_4f': [5, 14, 14, 832],
        'MaxPool_5a_2x2': [5, 7, 7, 832],
        'Mixed_5b': [5, 7, 7, 832],
        'Mixed_5c': [5, 7, 7, 1024]
    }

    self.assertItemsEqual(endpoints_shapes.keys(), end_points.keys())
    for endpoint_name in endpoints_shapes:
      expected_shape = endpoints_shapes[endpoint_name]
      self.assertTrue(endpoint_name in end_points)
      self.assertListEqual(end_points[endpoint_name].get_shape().as_list(),
                           expected_shape)
Пример #4
0
    def testBuildAndCheckAllEndPointsUptoMixed5c(self):
        batch_size = 5
        height, width = 224, 224

        inputs = random_ops.random_uniform((batch_size, height, width, 3))
        _, end_points = inception_v1.inception_v1_base(
            inputs, final_endpoint='Mixed_5c')
        endpoints_shapes = {
            'Conv2d_1a_7x7': [5, 112, 112, 64],
            'MaxPool_2a_3x3': [5, 56, 56, 64],
            'Conv2d_2b_1x1': [5, 56, 56, 64],
            'Conv2d_2c_3x3': [5, 56, 56, 192],
            'MaxPool_3a_3x3': [5, 28, 28, 192],
            'Mixed_3b': [5, 28, 28, 256],
            'Mixed_3c': [5, 28, 28, 480],
            'MaxPool_4a_3x3': [5, 14, 14, 480],
            'Mixed_4b': [5, 14, 14, 512],
            'Mixed_4c': [5, 14, 14, 512],
            'Mixed_4d': [5, 14, 14, 512],
            'Mixed_4e': [5, 14, 14, 528],
            'Mixed_4f': [5, 14, 14, 832],
            'MaxPool_5a_2x2': [5, 7, 7, 832],
            'Mixed_5b': [5, 7, 7, 832],
            'Mixed_5c': [5, 7, 7, 1024]
        }

        self.assertItemsEqual(endpoints_shapes.keys(), end_points.keys())
        for endpoint_name in endpoints_shapes:
            expected_shape = endpoints_shapes[endpoint_name]
            self.assertTrue(endpoint_name in end_points)
            self.assertListEqual(
                end_points[endpoint_name].get_shape().as_list(),
                expected_shape)
Пример #5
0
  def testHalfSizeImages(self):
    batch_size = 5
    height, width = 112, 112

    inputs = random_ops.random_uniform((batch_size, height, width, 3))
    mixed_5c, _ = inception_v1.inception_v1_base(inputs)
    self.assertTrue(mixed_5c.op.name.startswith('InceptionV1/Mixed_5c'))
    self.assertListEqual(mixed_5c.get_shape().as_list(),
                         [batch_size, 4, 4, 1024])
Пример #6
0
    def testHalfSizeImages(self):
        batch_size = 5
        height, width = 112, 112

        inputs = random_ops.random_uniform((batch_size, height, width, 3))
        mixed_5c, _ = inception_v1.inception_v1_base(inputs)
        self.assertTrue(mixed_5c.op.name.startswith('InceptionV1/Mixed_5c'))
        self.assertListEqual(mixed_5c.get_shape().as_list(),
                             [batch_size, 4, 4, 1024])
Пример #7
0
def inception():
    image = tf.placeholder(tf.float32, [None, 224, 224, 3], 'image')
    with slim.arg_scope(inception_arg_scope(is_training=False)):
        with variable_scope.variable_scope(
                'InceptionV1', 'InceptionV1', [image, 1000], reuse=None) as scope:
            with arg_scope(
                    [layers_lib.batch_norm, layers_lib.dropout], is_training=False):
                net, end_points = inception_v1_base(image, scope=scope)
                with variable_scope.variable_scope('Logits'):
                    net_conv = layers_lib.avg_pool2d(
                        net, [7, 7], stride=1, scope='MaxPool_0a_7x7')
    print(net_conv.shape)

    return net_conv, image
Пример #8
0
  def testBuildBaseNetwork(self):
    batch_size = 5
    height, width = 224, 224

    inputs = random_ops.random_uniform((batch_size, height, width, 3))
    mixed_6c, end_points = inception_v1.inception_v1_base(inputs)
    self.assertTrue(mixed_6c.op.name.startswith('InceptionV1/Mixed_5c'))
    self.assertListEqual(mixed_6c.get_shape().as_list(),
                         [batch_size, 7, 7, 1024])
    expected_endpoints = [
        'Conv2d_1a_7x7', 'MaxPool_2a_3x3', 'Conv2d_2b_1x1', 'Conv2d_2c_3x3',
        'MaxPool_3a_3x3', 'Mixed_3b', 'Mixed_3c', 'MaxPool_4a_3x3', 'Mixed_4b',
        'Mixed_4c', 'Mixed_4d', 'Mixed_4e', 'Mixed_4f', 'MaxPool_5a_2x2',
        'Mixed_5b', 'Mixed_5c'
    ]
    self.assertItemsEqual(end_points.keys(), expected_endpoints)
Пример #9
0
    def testBuildBaseNetwork(self):
        batch_size = 5
        height, width = 224, 224

        inputs = random_ops.random_uniform((batch_size, height, width, 3))
        mixed_6c, end_points = inception_v1.inception_v1_base(inputs)
        self.assertTrue(mixed_6c.op.name.startswith('InceptionV1/Mixed_5c'))
        self.assertListEqual(mixed_6c.get_shape().as_list(),
                             [batch_size, 7, 7, 1024])
        expected_endpoints = [
            'Conv2d_1a_7x7', 'MaxPool_2a_3x3', 'Conv2d_2b_1x1',
            'Conv2d_2c_3x3', 'MaxPool_3a_3x3', 'Mixed_3b', 'Mixed_3c',
            'MaxPool_4a_3x3', 'Mixed_4b', 'Mixed_4c', 'Mixed_4d', 'Mixed_4e',
            'Mixed_4f', 'MaxPool_5a_2x2', 'Mixed_5b', 'Mixed_5c'
        ]
        self.assertItemsEqual(end_points.keys(), expected_endpoints)
Пример #10
0
 def testBuildOnlyUptoFinalEndpoint(self):
   batch_size = 5
   height, width = 224, 224
   endpoints = [
       'Conv2d_1a_7x7', 'MaxPool_2a_3x3', 'Conv2d_2b_1x1', 'Conv2d_2c_3x3',
       'MaxPool_3a_3x3', 'Mixed_3b', 'Mixed_3c', 'MaxPool_4a_3x3', 'Mixed_4b',
       'Mixed_4c', 'Mixed_4d', 'Mixed_4e', 'Mixed_4f', 'MaxPool_5a_2x2',
       'Mixed_5b', 'Mixed_5c'
   ]
   for index, endpoint in enumerate(endpoints):
     with ops.Graph().as_default():
       inputs = random_ops.random_uniform((batch_size, height, width, 3))
       out_tensor, end_points = inception_v1.inception_v1_base(
           inputs, final_endpoint=endpoint)
       self.assertTrue(
           out_tensor.op.name.startswith('InceptionV1/' + endpoint))
       self.assertItemsEqual(endpoints[:index + 1], end_points)
Пример #11
0
 def testBuildOnlyUptoFinalEndpoint(self):
     batch_size = 5
     height, width = 224, 224
     endpoints = [
         'Conv2d_1a_7x7', 'MaxPool_2a_3x3', 'Conv2d_2b_1x1',
         'Conv2d_2c_3x3', 'MaxPool_3a_3x3', 'Mixed_3b', 'Mixed_3c',
         'MaxPool_4a_3x3', 'Mixed_4b', 'Mixed_4c', 'Mixed_4d', 'Mixed_4e',
         'Mixed_4f', 'MaxPool_5a_2x2', 'Mixed_5b', 'Mixed_5c'
     ]
     for index, endpoint in enumerate(endpoints):
         with ops.Graph().as_default():
             inputs = random_ops.random_uniform(
                 (batch_size, height, width, 3))
             out_tensor, end_points = inception_v1.inception_v1_base(
                 inputs, final_endpoint=endpoint)
             self.assertTrue(
                 out_tensor.op.name.startswith('InceptionV1/' + endpoint))
             self.assertItemsEqual(endpoints[:index + 1], end_points)
Пример #12
0
 def build_graph(self):
     with arg_scope(inception_v1.inception_v1_arg_scope()):
         #with variable_scope.variable_scope(None, 'InceptionV1', [self.image, 21], reuse=None) as scope:
         with arg_scope([layers_lib.batch_norm, layers_lib.dropout],
                        is_training=True):
             logit, endpoints = inception_v1.inception_v1_base(self.image)
             net = endpoints['Mixed_3c']
             self.feature_map.append(
                 conv_layer(net, [3, 3, 480, 4 * (class_num + 4)],
                            [1, 1, 1, 1], 'FeatureMap_1', 'SSD',
                            self.is_training, self.reuse))
             net = endpoints['Mixed_4f']
             net = conv_layer(net, [3, 3, 832, 1024], [1, 1, 1, 1], 'conv6',
                              'SSD', self.is_training, self.reuse)
             net = conv_layer(net, [1, 1, 1024, 1024], [1, 1, 1, 1],
                              'conv7', 'SSD', self.is_training, self.reuse)
             self.feature_map.append(
                 conv_layer(net, [3, 3, 1024, 6 * (class_num + 4)],
                            [1, 1, 1, 1], 'FeatureMap_2', 'SSD',
                            self.is_training, self.reuse))
             net = conv_layer(net, [1, 1, 1024, 256], [1, 1, 1, 1],
                              'conv8_1', 'SSD', self.is_training,
                              self.reuse)
             net = conv_layer(net, [3, 3, 256, 512], [1, 2, 2, 1],
                              'conv8_2', 'SSD', self.is_training,
                              self.reuse)
             self.feature_map.append(
                 conv_layer(net, [3, 3, 512, 6 * (class_num + 4)],
                            [1, 1, 1, 1], 'FeatureMap_3', 'SSD',
                            self.is_training, self.reuse))
             net = conv_layer(net, [1, 1, 512, 128], [1, 1, 1, 1],
                              'conv9_1', 'SSD', self.is_training,
                              self.reuse)
             net = conv_layer(net, [3, 3, 128, 256], [1, 2, 2, 1],
                              'conv9_2', 'SSD', self.is_training,
                              self.reuse)
             self.feature_map.append(
                 conv_layer(net, [3, 3, 256, 6 * (class_num + 4)],
                            [1, 1, 1, 1], 'FeatureMap_4', 'SSD',
                            self.is_training, self.reuse))
             net = conv_layer(net, [1, 1, 256, 128], [1, 1, 1, 1],
                              'conv10_1', 'SSD', self.is_training,
                              self.reuse)
             net = conv_layer(net, [3, 3, 128, 256], [1, 1, 1, 1],
                              'conv10_2',
                              'SSD',
                              self.is_training,
                              self.reuse,
                              padding='VALID')
             self.feature_map.append(
                 conv_layer(net, [3, 3, 256, 6 * (class_num + 4)],
                            [1, 1, 1, 1], 'FeatureMap_5', 'SSD',
                            self.is_training, self.reuse))
             net = conv_layer(net, [1, 1, 256, 128], [1, 1, 1, 1],
                              'conv11_1', 'SSD', self.is_training,
                              self.reuse)
             net = conv_layer(net, [3, 3, 128, 256], [1, 1, 1, 1],
                              'conv11_2',
                              'SSD',
                              self.is_training,
                              self.reuse,
                              padding='VALID')
             self.feature_map.append(
                 conv_layer(net, [1, 1, 256, 6 * (class_num + 4)],
                            [1, 1, 1, 1], 'FeatureMap_6', 'SSD',
                            self.is_training, self.reuse))