Beispiel #1
0
 def _resnet_small(self,
                   inputs,
                   num_classes=None,
                   is_training=True,
                   global_pool=True,
                   output_stride=None,
                   include_root_block=True,
                   spatial_squeeze=True,
                   reuse=None,
                   scope='resnet_v1_small'):
     """A shallow and thin ResNet v1 for faster tests."""
     block = resnet_v1.resnet_v1_block
     blocks = [
         block('block1', base_depth=1, num_units=3, stride=2),
         block('block2', base_depth=2, num_units=3, stride=2),
         block('block3', base_depth=4, num_units=3, stride=2),
         block('block4', base_depth=8, num_units=2, stride=1),
     ]
     return resnet_v1.resnet_v1(inputs,
                                blocks,
                                num_classes,
                                is_training=is_training,
                                global_pool=global_pool,
                                output_stride=output_stride,
                                include_root_block=include_root_block,
                                spatial_squeeze=spatial_squeeze,
                                reuse=reuse,
                                scope=scope)
  def GetResnet50Subnetwork(self,
                            images,
                            is_training=False,
                            global_pool=False,
                            reuse=None):
    """Constructs resnet_v1_50 part of the DELF model.

    Args:
      images: A tensor of size [batch, height, width, channels].
      is_training: Whether or not the model is in training mode.
      global_pool: If True, perform global average pooling after feature
        extraction. This may be useful for DELF's descriptor fine-tuning stage.
      reuse: Whether or not the layer and its variables should be reused.

    Returns:
      net: A rank-4 tensor of size [batch, height_out, width_out, channels_out].
        If global_pool is True, height_out = width_out = 1.
      end_points: A set of activations for external use.
    """
    block = resnet_v1.resnet_v1_block
    blocks = [
        block('block1', base_depth=64, num_units=3, stride=2),
        block('block2', base_depth=128, num_units=4, stride=2),
        block('block3', base_depth=256, num_units=6, stride=2),
    ]
    if self._target_layer_type == 'resnet_v1_50/block4':
      blocks.append(block('block4', base_depth=512, num_units=3, stride=1))
    net, end_points = resnet_v1.resnet_v1(
        images,
        blocks,
        is_training=is_training,
        global_pool=global_pool,
        reuse=reuse,
        scope='resnet_v1_50')
    return net, end_points
 def _resnet_small(self,
                   inputs,
                   num_classes=None,
                   is_training=True,
                   global_pool=True,
                   output_stride=None,
                   include_root_block=True,
                   reuse=None,
                   scope='resnet_v1_small'):
     """A shallow and thin ResNet v1 for faster tests."""
     bottleneck = resnet_v1.bottleneck
     blocks = [
         resnet_utils.Block('block1', bottleneck,
                            [(4, 1, 1)] * 2 + [(4, 1, 2)]),
         resnet_utils.Block('block2', bottleneck,
                            [(8, 2, 1)] * 2 + [(8, 2, 2)]),
         resnet_utils.Block('block3', bottleneck,
                            [(16, 4, 1)] * 2 + [(16, 4, 2)]),
         resnet_utils.Block('block4', bottleneck, [(32, 8, 1)] * 2)
     ]
     return resnet_v1.resnet_v1(inputs,
                                blocks,
                                num_classes,
                                is_training=is_training,
                                global_pool=global_pool,
                                output_stride=output_stride,
                                include_root_block=include_root_block,
                                reuse=reuse,
                                scope=scope)
 def _resnet_small(self,
                   inputs,
                   num_classes=None,
                   is_training=True,
                   global_pool=True,
                   output_stride=None,
                   include_root_block=True,
                   spatial_squeeze=True,
                   reuse=None,
                   scope='resnet_v1_small'):
   """A shallow and thin ResNet v1 for faster tests."""
   block = resnet_v1.resnet_v1_block
   blocks = [
       block('block1', base_depth=1, num_units=3, stride=2),
       block('block2', base_depth=2, num_units=3, stride=2),
       block('block3', base_depth=4, num_units=3, stride=2),
       block('block4', base_depth=8, num_units=2, stride=1),
   ]
   return resnet_v1.resnet_v1(inputs, blocks, num_classes,
                              is_training=is_training,
                              global_pool=global_pool,
                              output_stride=output_stride,
                              include_root_block=include_root_block,
                              spatial_squeeze=spatial_squeeze,
                              reuse=reuse,
                              scope=scope)
 def _resnet_small(self,
                   inputs,
                   num_classes=None,
                   is_training=True,
                   global_pool=True,
                   output_stride=None,
                   include_root_block=True,
                   reuse=None,
                   scope='resnet_v1_small'):
   """A shallow and thin ResNet v1 for faster tests."""
   bottleneck = resnet_v1.bottleneck
   blocks = [
       resnet_utils.Block(
           'block1', bottleneck, [(4, 1, 1)] * 2 + [(4, 1, 2)]),
       resnet_utils.Block(
           'block2', bottleneck, [(8, 2, 1)] * 2 + [(8, 2, 2)]),
       resnet_utils.Block(
           'block3', bottleneck, [(16, 4, 1)] * 2 + [(16, 4, 2)]),
       resnet_utils.Block(
           'block4', bottleneck, [(32, 8, 1)] * 2)]
   return resnet_v1.resnet_v1(inputs, blocks, num_classes,
                              is_training=is_training,
                              global_pool=global_pool,
                              output_stride=output_stride,
                              include_root_block=include_root_block,
                              reuse=reuse,
                              scope=scope)
Beispiel #6
0
  def GetResnet50Subnetwork(self,
                            images,
                            is_training=False,
                            global_pool=False,
                            reuse=None):
    """Constructs resnet_v1_50 part of the DELF model.

    Args:
      images: A tensor of size [batch, height, width, channels].
      is_training: Whether or not the model is in training mode.
      global_pool: If True, perform global average pooling after feature
        extraction. This may be useful for DELF's descriptor fine-tuning stage.
      reuse: Whether or not the layer and its variables should be reused.

    Returns:
      net: A rank-4 tensor of size [batch, height_out, width_out, channels_out].
        If global_pool is True, height_out = width_out = 1.
      end_points: A set of activations for external use.
    """
    block = resnet_v1.resnet_v1_block
    blocks = [
        block('block1', base_depth=64, num_units=3, stride=2),
        block('block2', base_depth=128, num_units=4, stride=2),
        block('block3', base_depth=256, num_units=6, stride=2),
    ]
    if self._target_layer_type == 'resnet_v1_50/block4':
      blocks.append(block('block4', base_depth=512, num_units=3, stride=1))
    net, end_points = resnet_v1.resnet_v1(
        images,
        blocks,
        is_training=is_training,
        global_pool=global_pool,
        reuse=reuse,
        scope='resnet_v1_50')
    return net, end_points
def resnet_v1_50_C6(inputs,
                    num_classes=None,
                    is_training=True,
                    global_pool=True,
                    output_stride=None,
                    spatial_squeeze=True,
                    reuse=None,
                    scope='resnet_v1_50',
                    initial_conv_name='conv1'):
    """Unlike the slim default we use a stride of 2 in the last block."""
    blocks = [
        resnet_v1_block('block1', base_depth=64, num_units=3, stride=1),
        resnet_v1_block('block2', base_depth=128, num_units=4, stride=2),
        resnet_v1_block('block3', base_depth=256, num_units=6, stride=2),
        resnet_v1_block('block4', base_depth=512, num_units=3, stride=2),
        resnet_v1_block('block5', base_depth=512, num_units=3, stride=2)
    ]
    return resnet_v1.resnet_v1(inputs,
                               blocks,
                               num_classes,
                               is_training,
                               global_pool=global_pool,
                               output_stride=output_stride,
                               include_root_block=True,
                               spatial_squeeze=spatial_squeeze,
                               reuse=reuse,
                               scope=scope,
                               initial_conv_name=initial_conv_name)