Esempio n. 1
0
 def test_resnet_50_classification(self):
     tf.logging.set_verbosity(tf.logging.INFO)
     path = './image/butterfly.jpeg'
     with self.test_session() as sess:
         img = cv2.imread(path)
         img = cv2.resize(img, (224, 224))
         img = img[..., ::-1]
         img = img[np.newaxis, ...]
         img = tf.convert_to_tensor(img)
         image = tf.image.convert_image_dtype(img, dtype=tf.float32)
         image = tf.subtract(image, 0.5)
         image = tf.multiply(image, 2.0)
         with lib.arg_scope(lib.research.resnet_beta_arg_scope()):
             model = lib.research.ResNet_V1_50_beta((224, 224, 3),
                                                    is_training=False,
                                                    global_pool=True,
                                                    num_classes=1001)
         outputs = model(image)
         model.load_weights(
             '../research/resnet_v1_beta/checkpoint/resnet_v1_50/model.ckpt'
         )
         self.assertListEqual(outputs.get_shape().as_list(), [1, 1001])
         sess.run(tf.global_variables_initializer())
         predict = sess.run(outputs)[0]
         print(np.argmax(predict))
         # writer = tf.summary.FileWriter("D:/GeekGank/workspace/graph", tf.get_default_graph())
         # writer.close()
         self.assertEqual(np.argmax(predict), 323)
Esempio n. 2
0
 def test_mobilenet_v2_forward(self):
     start = time.time()
     inputs = lib.Input((224, 224, 3))
     with lib.arg_scope(lib.research.resnet_beta_arg_scope()):
         model = lib.research.MobileNetV2(3,
                                          num_classes=1001,
                                          multiplier=0.25)
     outputs = model(inputs)
     print(time.time() - start)
     self.assertListEqual(outputs.get_shape().as_list(), [1, 1001])
     writer = tf.summary.FileWriter(
         "D:/GeekGank/workspace/graph/model_graph", tf.get_default_graph())
     writer.close()
Esempio n. 3
0
 def test_network_weights(self):
     inputs = lib.Input((224, 224, 3), batch_size=2)
     with lib.arg_scope(lib.research.resnet_beta_arg_scope()):
         net = lib.research.ResNet_V1_50_beta(num_classes=1001,
                                              global_pool=True)
     # net = lib.research.MobileNetV2(3, 10)
     net.train()
     _ = net(inputs)
     weights = set()
     for w in net.weights:
         if w in weights:
             print('repeat', w)
         else:
             weights.add(w)
             print(w)
Esempio n. 4
0
 def test_resnet(self):
     start = time.time()
     inputs = lib.Input((224, 224, 3))
     with lib.arg_scope(lib.research.resnet_beta_arg_scope()):
         model = lib.research.ResNet_V1_101_beta((224, 224, 3),
                                                 is_training=False,
                                                 global_pool=True,
                                                 num_classes=1001)
     outputs = model(inputs)
     print(time.time() - start)
     node = getattr(outputs, '_anchor')[0]
     self.assertEqual(node.layer.name, 'resnet_v1_101/')
     self.assertListEqual(outputs.get_shape().as_list(), [1, 1001])
     writer = tf.summary.FileWriter(
         "D:/GeekGank/workspace/graph/model_graph", tf.get_default_graph())
     writer.close()
Esempio n. 5
0
    def __init__(self,
                 in_channels,
                 num_classes=1001,
                 endpoints=None,
                 prediction_fn=None,
                 base_only=False,
                 output_stride=None,
                 explicit_padding=False,
                 min_depth=None,
                 divisible_by=None,
                 multiplier=1.0,
                 block_def=None,
                 **kwargs):
        super(MobileNetV2, self).__init__(**kwargs)
        self.bottlenecks = lib.LayerList()
        self.num_classes = num_classes
        self.prediction_fn = prediction_fn
        if endpoints is not None:
            if not isinstance(endpoints, list):
                raise TypeError(
                    "Expected type of endpoint is list, but given %s" %
                    str(endpoints))
        self.endpoints = endpoints
        self._endpoints = {}

        if not base_only and output_stride is not None and output_stride != 32:
            raise ValueError(
                "As the `base_only` is set to `False`, `output_stride` can only be 32, "
                "but given %d." % output_stride)
        self.base_only = base_only

        if output_stride is not None and output_stride not in [8, 16, 32]:
            raise ValueError(
                'Only allowed output_stride values are 8, 16, 32.')

        depth_args = {}
        if min_depth is not None:
            depth_args['min_depth'] = min_depth
        if divisible_by is not None:
            depth_args['divisible_by'] = divisible_by
        if multiplier <= 0:
            raise ValueError('`multiplier` is not greater than zero.')

        if output_stride is not None:
            if output_stride == 0 or (output_stride > 1 and output_stride % 2):
                raise ValueError(
                    'Output stride must be None, 1 or a multiple of 2.')

        current_stride = 1
        rate = 1
        block_def = block_def if block_def else BLOCK_DEF
        in_channels = in_channels
        with lib.arg_scope([lib.layers.BatchNorm], trainable=self.trainable):
            for block in block_def:
                for i, unit in enumerate(block.args):
                    params = dict(unit)
                    block_stride = params.get('stride', 1)
                    params['in_channels'] = in_channels
                    params['out_channels'] = params.pop('multiplier_func')(
                        multiplier, **depth_args)
                    in_channels = params["out_channels"]
                    if output_stride is not None and current_stride == output_stride:
                        layer_stride = 1
                        layer_rate = rate
                        rate *= block_stride
                    else:
                        layer_stride = block_stride
                        layer_rate = rate
                        current_stride *= block_stride
                    params['stride'] = layer_stride
                    if layer_rate > 1:
                        if tuple(params.get('kernel_size', [])) != (1, 1):
                            params['rate'] = layer_rate
                    if explicit_padding:
                        params['explicit_padding'] = explicit_padding
                    self.bottlenecks.append(block.object(**params))
        self.logit_layer = lib.layers.Conv2D(out_channels=self.num_classes,
                                             kernel_size=(1, 1),
                                             activation=None,
                                             name="conv2d_1x1")