Exemplo n.º 1
0
    def _pnasnet(self,
                 images,
                 backbone,
                 num_classes,
                 is_training=True,
                 output_stride=16,
                 final_endpoint=None):
        """Build PNASNet model backbone."""
        hparams = tf.contrib.training.HParams(
            filter_scaling_rate=2.0,
            num_conv_filters=10,
            drop_path_keep_prob=1.0,
            total_training_steps=200000,
        )
        if not is_training:
            hparams.set_hparam('drop_path_keep_prob', 1.0)

        cell = nas_genotypes.PNASCell(hparams.num_conv_filters,
                                      hparams.drop_path_keep_prob,
                                      len(backbone),
                                      hparams.total_training_steps)
        with arg_scope([slim.dropout, slim.batch_norm],
                       is_training=is_training):
            return nas_network._build_nas_base(images,
                                               cell=cell,
                                               backbone=backbone,
                                               num_classes=num_classes,
                                               hparams=hparams,
                                               reuse=tf.AUTO_REUSE,
                                               scope='pnasnet_small',
                                               final_endpoint=final_endpoint)
Exemplo n.º 2
0
def pnasnet(images,
            num_classes,
            is_training=True,
            global_pool=False,
            output_stride=16,
            nas_architecture_options=None,
            nas_training_hyper_parameters=None,
            reuse=None,
            scope='pnasnet',
            final_endpoint=None,
            sync_batch_norm_method='None'):
    """Builds PNASNet model."""
    if nas_architecture_options is None:
        raise ValueError(
            'Using NAS model variants. nas_architecture_options cannot be None.'
        )
    hparams = config(num_conv_filters=nas_architecture_options[
        'nas_stem_output_num_conv_filters'])
    if nas_training_hyper_parameters:
        hparams.set_hparam(
            'drop_path_keep_prob',
            nas_training_hyper_parameters['drop_path_keep_prob'])
        hparams.set_hparam(
            'total_training_steps',
            nas_training_hyper_parameters['total_training_steps'])
    if not is_training:
        tf.logging.info('During inference, setting drop_path_keep_prob = 1.0.')
        hparams.set_hparam('drop_path_keep_prob', 1.0)
    tf.logging.info(hparams)
    if output_stride == 8:
        backbone = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
    elif output_stride == 16:
        backbone = [1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2]
    elif output_stride == 32:
        backbone = [1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3]
    else:
        raise ValueError('Unsupported output_stride ', output_stride)
    batch_norm = utils.get_batch_norm_fn(sync_batch_norm_method)
    cell = nas_genotypes.PNASCell(hparams.num_conv_filters,
                                  hparams.drop_path_keep_prob,
                                  len(backbone),
                                  hparams.total_training_steps,
                                  batch_norm_fn=batch_norm)
    with arg_scope([slim.dropout, batch_norm], is_training=is_training):
        return _build_nas_base(
            images,
            cell=cell,
            backbone=backbone,
            num_classes=num_classes,
            hparams=hparams,
            global_pool=global_pool,
            output_stride=output_stride,
            nas_use_classification_head=nas_architecture_options[
                'nas_use_classification_head'],
            reuse=reuse,
            scope=scope,
            final_endpoint=final_endpoint,
            batch_norm_fn=batch_norm,
            nas_remove_os32_stride=nas_architecture_options[
                'nas_remove_os32_stride'])
Exemplo n.º 3
0
def pnasnet(images,
            num_classes,
            is_training=True,
            global_pool=False,
            output_stride=16,
            nas_stem_output_num_conv_filters=20,
            nas_training_hyper_parameters=None,
            reuse=None,
            scope='pnasnet',
            final_endpoint=None):
  """Builds PNASNet model."""
  hparams = config(num_conv_filters=nas_stem_output_num_conv_filters)
  if nas_training_hyper_parameters:
    hparams.set_hparam('drop_path_keep_prob',
                       nas_training_hyper_parameters['drop_path_keep_prob'])
    hparams.set_hparam('total_training_steps',
                       nas_training_hyper_parameters['total_training_steps'])
  if not is_training:
    tf.logging.info('During inference, setting drop_path_keep_prob = 1.0.')
    hparams.set_hparam('drop_path_keep_prob', 1.0)
  tf.logging.info(hparams)
  if output_stride == 8:
    backbone = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
  elif output_stride == 16:
    backbone = [1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2]
  elif output_stride == 32:
    backbone = [1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3]
  else:
    raise ValueError('Unsupported output_stride ', output_stride)
  cell = nas_genotypes.PNASCell(hparams.num_conv_filters,
                                hparams.drop_path_keep_prob,
                                len(backbone),
                                hparams.total_training_steps)
  with arg_scope([slim.dropout, slim.batch_norm], is_training=is_training):
    return _build_nas_base(
        images,
        cell=cell,
        backbone=backbone,
        num_classes=num_classes,
        hparams=hparams,
        global_pool=global_pool,
        reuse=reuse,
        scope=scope,
        final_endpoint=final_endpoint)