Beispiel #1
0
        def _call(inputs):
            logging.info('Block %s  input shape: %s', self.name, inputs.shape)
            if self._block_args.expand_ratio != 1:
                x = self._relu_fn(
                    self._bn0(self._expand_conv(inputs), training=training))
            else:
                x = inputs
            logging.info('Expand shape: %s', x.shape)

            self.endpoints = {'expansion_output': x}

            x = self._bn1(self._project_conv(x), training=training)
            # Add identity so that quantization-aware training can insert quantization
            # ops correctly.
            x = tf.identity(x)
            if self._clip_projection_output:
                x = tf.clip_by_value(x, -6, 6)

            if self._block_args.id_skip:
                if all(
                        s == 1 for s in self._block_args.strides
                ) and self._block_args.input_filters == self._block_args.output_filters:
                    # Apply only if skip connection presents.
                    if survival_prob:
                        x = utils.drop_connect(x, training, survival_prob)
                    x = tf.add(x, inputs)
            logging.info('Project shape: %s', x.shape)
            return x
def box_net(images,
            level,
            num_anchors,
            num_filters,
            is_training,
            act_type,
            repeats=4,
            separable_conv=True,
            survival_prob=None,
            strategy=None,
            data_format='channels_last'):
  """Box regression network."""
  if separable_conv:
    conv_op = functools.partial(
        tf.layers.separable_conv2d, depth_multiplier=1,
        data_format=data_format,
        pointwise_initializer=tf.initializers.variance_scaling(),
        depthwise_initializer=tf.initializers.variance_scaling())
  else:
    conv_op = functools.partial(
        tf.layers.conv2d,
        data_format=data_format,
        kernel_initializer=tf.random_normal_initializer(stddev=0.01))

  for i in range(repeats):
    orig_images = images
    images = conv_op(
        images,
        num_filters,
        kernel_size=3,
        activation=None,
        bias_initializer=tf.zeros_initializer(),
        padding='same',
        name='box-%d' % i)
    images = utils.batch_norm_act(
        images,
        is_training,
        act_type=act_type,
        init_zero=False,
        strategy=strategy,
        data_format=data_format,
        name='box-%d-bn-%d' % (i, level))

    if i > 0 and survival_prob:
      images = utils.drop_connect(images, is_training, survival_prob)
      images = images + orig_images

  boxes = conv_op(
      images,
      4 * num_anchors,
      kernel_size=3,
      bias_initializer=tf.zeros_initializer(),
      padding='same',
      name='box-predict')

  return boxes
Beispiel #3
0
 def _call(image):
     original_image = image
     image = conv_op(image)
     image = bn(image, training=training)
     if self.act_type:
         image = utils.activation_fn(image, act_type)
     if i > 0 and self.survival_prob:
         image = utils.drop_connect(image, training, self.survival_prob)
         image = image + original_image
     return image
Beispiel #4
0
        def _call(inputs):
            logging.info('Block %s input shape: %s', self.name, inputs.shape)
            x = inputs

            # creates conv 2x2 kernel
            if self.super_pixel:
                x = self.super_pixel(x, training)
                logging.info('SuperPixel %s: %s', self.name, x.shape)

            if self._block_args.fused_conv:
                # If use fused mbconv, skip expansion and use regular conv.
                x = self._relu_fn(
                    self._bn1(self._fused_conv(x), training=training))
                logging.info('Conv2D shape: %s', x.shape)
            else:
                # Otherwise, first apply expansion and then apply depthwise conv.
                if self._block_args.expand_ratio != 1:
                    x = self._relu_fn(
                        self._bn0(self._expand_conv(x), training=training))
                    logging.info('Expand shape: %s', x.shape)

                x = self._relu_fn(
                    self._bn1(self._depthwise_conv(x), training=training))
                logging.info('DWConv shape: %s', x.shape)

            if self._se:
                x = self._se(x)

            self.endpoints = {'expansion_output': x}

            x = self._bn2(self._project_conv(x), training=training)
            # Add identity so that quantization-aware training can insert quantization
            # ops correctly.
            x = tf.identity(x)
            if self._clip_projection_output:
                x = tf.clip_by_value(x, -6, 6)
            if self._block_args.id_skip:
                if all(
                        s == 1 for s in self._block_args.strides
                ) and self._block_args.input_filters == self._block_args.output_filters:
                    # Apply only if skip connection presents.
                    if survival_prob:
                        x = utils.drop_connect(x, training, survival_prob)
                    x = tf.add(x, inputs)
            logging.info('Project shape: %s', x.shape)
            return x