示例#1
0
def retinanet_head_generator(params):
    """Generator function for RetinaNet head architecture."""
    return heads.RetinanetHead(params.min_level,
                               params.max_level,
                               params.num_classes,
                               params.anchors_per_location,
                               params.retinanet_head_num_convs,
                               params.retinanet_head_num_filters,
                               batch_norm_relu=batch_norm_relu_generator(
                                   params.batch_norm))
示例#2
0
def retinanet_head_generator(params):
    """Generator function for RetinaNet head architecture."""
    head_params = params.retinanet_head
    return heads.RetinanetHead(
        head_params.min_level,
        head_params.max_level,
        head_params.num_classes,
        head_params.anchors_per_location,
        head_params.retinanet_head_num_convs,
        head_params.retinanet_head_num_filters,
        head_params.use_separable_conv,
        head_params.use_batch_norm,
        batch_norm_activation=batch_norm_activation_generator(
            params.batch_norm_activation))
示例#3
0
def retinanet_head_generator(params):
    """Generator function for RetinaNet head architecture."""
    head_params = params.retinanet_head
    if head_params.anchors_per_location:
        logging.info('[Deprecation]: `retinanet_head.anchors_per_location` '
                     'is no longer used.')
    anchor_aspect_ratios = len(params.anchor.aspect_ratios)
    anchor_num_scales = params.anchor.num_scales
    anchors_per_location = anchor_aspect_ratios * anchor_num_scales
    return heads.RetinanetHead(
        params.architecture.min_level,
        params.architecture.max_level,
        params.architecture.num_classes,
        anchors_per_location,
        head_params.num_convs,
        head_params.num_filters,
        head_params.use_separable_conv,
        params.batch_norm_activation.activation,
        head_params.use_batch_norm,
        batch_norm_activation=batch_norm_activation_generator(
            params.batch_norm_activation))