Beispiel #1
0
def build_backbone(features, config):
    backbone_name = config.backbone_name
    is_training_bn = config.is_training_bn
    if 'efficientnet' in backbone_name:
        override_params = {
            'relu_fn': tf.nn.swish,
            'batch_norm': utils.batch_norm_class(is_training_bn),
        }
        if 'b0' in backbone_name:
            override_params['survival_prob'] = 0.0
        if config.backbone_config is not None:
            override_params['blocks_args'] = (
                efficientnet_builder.BlockDecoder().encode(
                    config.backbone_config.blocks))
        _, endpoints = efficientnet_builder.build_model_base(
            features,
            backbone_name,
            training=is_training_bn,
            override_params=override_params)
        u2 = endpoints['reduction_2']
        u3 = endpoints['reduction_3']
        u4 = endpoints['reduction_4']
        u5 = endpoints['reduction_5']
    else:
        raise ValueError(
            'backbone model {} is not supported.'.format(backbone_name))
    return {2: u2, 3: u3, 4: u4, 5: u5}
def build_backbone(features, config):
    """Builds backbone model.

  Args:
   features: input tensor.
   config: config for backbone, such as is_training and backbone name.

  Returns:
    A dict from levels to the feature maps from the output of the backbone model
    with strides of 8, 16 and 32.

  Raises:
    ValueError: if backbone_name is not supported.
  """
    backbone_name = config.backbone_name
    is_training = config.is_training_bn
    if 'efficientnet' in backbone_name:
        override_params = {
            'batch_norm':
            utils.batch_norm_class(is_training, config.strategy),
            'relu_fn':
            functools.partial(utils.activation_fn, act_type=config.act_type),
        }
        if 'b0' in backbone_name:
            override_params['survival_prob'] = 0.0
        if config.backbone_config is not None:
            override_params['blocks_args'] = (
                efficientnet_builder.BlockDecoder().encode(
                    config.backbone_config.blocks))
        override_params['data_format'] = config.data_format
        model_builder = backbone_factory.get_model_builder(backbone_name)
        _, endpoints = model_builder.build_model_base(
            features,
            backbone_name,
            training=is_training,
            override_params=override_params)

        all_feats = [
            features,
            endpoints['reduction_1'],
            endpoints['reduction_2'],
            endpoints['reduction_3'],
            endpoints['reduction_4'],
            endpoints['reduction_5'],
        ]
    else:
        raise ValueError(
            'backbone model {} is not supported.'.format(backbone_name))

    # Only return features within the expected levels.
    return all_feats[config.min_level:config.max_level + 1]
Beispiel #3
0
def build_backbone(features, config):
    """Builds backbone model.

  Args:
   features: input tensor.
   config: config for backbone, such as is_training_bn and backbone name.

  Returns:
    A dict from levels to the feature maps from the output of the backbone model
    with strides of 8, 16 and 32.

  Raises:
    ValueError: if backbone_name is not supported.
  """
    backbone_name = config.backbone_name
    is_training_bn = config.is_training_bn
    if 'efficientnet' in backbone_name:
        override_params = {
            'batch_norm':
            utils.batch_norm_class(is_training_bn, config.strategy),
            'relu_fn':
            functools.partial(utils.activation_fn, act_type=config.act_type),
        }
        if 'b0' in backbone_name:
            override_params['survival_prob'] = 0.0
        if config.backbone_config is not None:
            override_params['blocks_args'] = (
                efficientnet_builder.BlockDecoder().encode(
                    config.backbone_config.blocks))
        override_params['data_format'] = config.data_format
        model_builder = backbone_factory.get_model_builder(backbone_name)
        _, endpoints = model_builder.build_model_base(
            features,
            backbone_name,
            training=is_training_bn,
            override_params=override_params)
        print(endpoints.keys())
        print(backbone_name)
        u1 = endpoints['reduction_1']
        u2 = endpoints['reduction_2']
        u3 = endpoints['reduction_3']
        u4 = endpoints['reduction_4']
        u5 = endpoints['reduction_5']
    else:
        raise ValueError(
            'backbone model {} is not supported.'.format(backbone_name))
    return {0: features, 1: u1, 2: u2, 3: u3, 4: u4, 5: u5}
def get_model_params(model_name, override_params):
  """Get the block args and global params for a given model."""
  if model_name.startswith('efficientnet-lite'):
    width_coefficient, depth_coefficient, _, dropout_rate = (
        efficientnet_lite_params(model_name))
    global_params = efficientnet_lite(
        width_coefficient, depth_coefficient, dropout_rate)
  else:
    raise NotImplementedError('model name is not pre-defined: %s' % model_name)

  if override_params:
    # ValueError will be raised here if override_params has fields not included
    # in global_params.
    global_params = global_params._replace(**override_params)

  decoder = efficientnet_builder.BlockDecoder()
  blocks_args = decoder.decode(global_params.blocks_args)

  logging.info('global_params= %s', global_params)
  return blocks_args, global_params
def build_backbone(features, config):
    """Builds backbone model.

  Args:
   features: input tensor.
   config: config for backbone, such as is_training_bn and backbone name.

  Returns:
    A dict from levels to the feature maps from the output of the backbone model
    with strides of 8, 16 and 32.

  Raises:
    ValueError: if backbone_name is not supported.
  """
    backbone_name = config.backbone_name
    is_training_bn = config.is_training_bn
    if 'efficientnet' in backbone_name:
        override_params = {
            'relu_fn': utils.backbone_relu_fn,
            'batch_norm': utils.batch_norm_class(is_training_bn),
        }
        if 'b0' in backbone_name:
            override_params['survival_prob'] = 0.0
        if config.backbone_config is not None:
            override_params['blocks_args'] = (
                efficientnet_builder.BlockDecoder().encode(
                    config.backbone_config.blocks))
        _, endpoints = efficientnet_builder.build_model_base(
            features,
            backbone_name,
            training=is_training_bn,
            override_params=override_params)
        u2 = endpoints['reduction_2']
        u3 = endpoints['reduction_3']
        u4 = endpoints['reduction_4']
        u5 = endpoints['reduction_5']
    else:
        raise ValueError(
            'backbone model {} is not supported.'.format(backbone_name))
    return {2: u2, 3: u3, 4: u4, 5: u5}
Beispiel #6
0
  def __init__(self, model_name=None, config=None, name=''):
    """Initialize model."""
    super().__init__(name=name)

    config = config or hparams_config.get_efficientdet_config(model_name)
    self.config = config

    # Backbone.
    backbone_name = config.backbone_name
    is_training_bn = config.is_training_bn
    if 'efficientnet' in backbone_name:
      override_params = {
          'batch_norm':
              utils.batch_norm_class(is_training_bn, config.strategy),
          'relu_fn':
              functools.partial(utils.activation_fn, act_type=config.act_type),
      }
      if 'b0' in backbone_name:
        override_params['survival_prob'] = 0.0
      if config.backbone_config is not None:
        override_params['blocks_args'] = (
            efficientnet_builder.BlockDecoder().encode(
                config.backbone_config.blocks))
      override_params['data_format'] = config.data_format
      self.backbone = backbone_factory.get_model(
          backbone_name, override_params=override_params)

    # Feature network.
    self.resample_layers = []  # additional resampling layers.
    for level in range(6, config.max_level + 1):
      # Adds a coarser level by downsampling the last feature map.
      self.resample_layers.append(
          ResampleFeatureMap(
              feat_level=(level - config.min_level),
              target_num_channels=config.fpn_num_filters,
              apply_bn=config.apply_bn_for_resampling,
              is_training_bn=config.is_training_bn,
              conv_after_downsample=config.conv_after_downsample,
              strategy=config.strategy,
              data_format=config.data_format,
              name='resample_p%d' % level,
          ))
    self.fpn_cells = FPNCells(config)

    # class/box output prediction network.
    num_anchors = len(config.aspect_ratios) * config.num_scales
    num_filters = config.fpn_num_filters
    for head in config.heads:
      if head == 'object_detection':
        self.class_net = ClassNet(
            num_classes=config.num_classes,
            num_anchors=num_anchors,
            num_filters=num_filters,
            min_level=config.min_level,
            max_level=config.max_level,
            is_training_bn=config.is_training_bn,
            act_type=config.act_type,
            repeats=config.box_class_repeats,
            separable_conv=config.separable_conv,
            survival_prob=config.survival_prob,
            strategy=config.strategy,
            data_format=config.data_format)

        self.box_net = BoxNet(
            num_anchors=num_anchors,
            num_filters=num_filters,
            min_level=config.min_level,
            max_level=config.max_level,
            is_training_bn=config.is_training_bn,
            act_type=config.act_type,
            repeats=config.box_class_repeats,
            separable_conv=config.separable_conv,
            survival_prob=config.survival_prob,
            strategy=config.strategy,
            data_format=config.data_format)

      if head == 'segmentation':
        self.seg_head = SegmentationHead(
            num_classes=config.seg_num_classes,
            num_filters=num_filters,
            min_level=config.min_level,
            max_level=config.max_level,
            is_training_bn=config.is_training_bn,
            act_type=config.act_type,
            strategy=config.strategy,
            data_format=config.data_format)