示例#1
0
def _deep_fusion_fc_layers(num_layers, layer_sizes, input_rois, input_weights,
                           fusion_method, l2_weight_decay, keep_prob,
                           num_final_classes, box_rep, is_training):

    if l2_weight_decay > 0:
        weights_regularizer = slim.l2_regularizer(l2_weight_decay)
    else:
        weights_regularizer = None

    # Apply fusion
    fusion_layer = avod_fc_layer_utils.feature_fusion(fusion_method,
                                                      input_rois,
                                                      input_weights,
                                                      is_training)
    fusion_layer = slim.flatten(fusion_layer, scope='flatten')

    with slim.arg_scope([slim.fully_connected],
                        weights_regularizer=weights_regularizer):
        # Build layers
        for layer_idx in range(num_layers):
            fc_name_idx = 6 + layer_idx

            all_branches = []
            for branch_idx in range(len(input_rois)):
                fc_layer = slim.fully_connected(fusion_layer,
                                                layer_sizes[layer_idx],
                                                scope='br{}_fc{}'.format(
                                                    branch_idx, fc_name_idx))
                fc_drop = slim.dropout(fc_layer,
                                       keep_prob=keep_prob,
                                       is_training=is_training,
                                       scope='br{}_fc{}_drop'.format(
                                           branch_idx, fc_name_idx))

                all_branches.append(fc_drop)

            # Apply fusion
            fusion_layer = avod_fc_layer_utils.feature_fusion(
                fusion_method, all_branches, input_weights, is_training)

        # Ouput layers
        output_layers = build_output_layers(fusion_layer, num_final_classes,
                                            box_rep)
    return output_layers
示例#2
0
def _basic_fc_layers(num_layers, layer_sizes, input_rois, input_weights,
                     fusion_method, l2_weight_decay, keep_prob,
                     num_final_classes, box_rep, is_training):

    if not num_layers == len(layer_sizes):
        raise ValueError('num_layers does not match length of layer_sizes')

    if l2_weight_decay > 0:
        weights_regularizer = slim.l2_regularizer(l2_weight_decay)
    else:
        weights_regularizer = None

    # Feature fusion
    fused_features = avod_fc_layer_utils.feature_fusion(
        fusion_method, input_rois, input_weights)
    output_names = ['cls', 'off', 'ang']
    cls_logits = None
    offsets = None
    angles = None

    with slim.arg_scope([slim.fully_connected],
                        weights_regularizer=weights_regularizer):
        for output in output_names:
            # Flatten
            fc_drop = slim.flatten(fused_features, scope=output + '_flatten')
            for layer_idx in range(num_layers):
                fc_name_idx = 6 + layer_idx

                # Use conv2d instead of fully_connected layers.
                fc_layer = slim.fully_connected(fc_drop,
                                                layer_sizes[layer_idx],
                                                scope=output +
                                                '_fc{}'.format(fc_name_idx))

                fc_drop = slim.dropout(fc_layer,
                                       keep_prob=keep_prob,
                                       is_training=is_training,
                                       scope=output +
                                       '_fc{}_drop'.format(fc_name_idx))

                fc_name_idx += 1
            if output == 'cls':
                cls_logits = build_output_layers(fc_drop, num_final_classes,
                                                 box_rep, output)
            elif output == 'off':
                offsets = build_output_layers(fc_drop, num_final_classes,
                                              box_rep, output)
            elif output == 'ang':
                angles = build_output_layers(fc_drop, num_final_classes,
                                             box_rep, output)

    return cls_logits, offsets, angles
示例#3
0
def _late_fusion_fc_layers(num_layers, layer_sizes, input_rois, input_weights,
                           fusion_method, l2_weight_decay, keep_prob,
                           num_final_classes, box_rep, is_training):

    if l2_weight_decay > 0:
        weights_regularizer = slim.l2_regularizer(l2_weight_decay)
    else:
        weights_regularizer = None

    # Build fc layers, one branch per input
    num_branches = len(input_rois)
    branch_outputs = []

    with slim.arg_scope([slim.fully_connected],
                        weights_regularizer=weights_regularizer):
        for branch_idx in range(num_branches):

            # Branch feature ROIs
            branch_rois = input_rois[branch_idx]
            fc_drop = slim.flatten(branch_rois,
                                   scope='br{}_flatten'.format(branch_idx))

            for layer_idx in range(num_layers):
                fc_name_idx = 6 + layer_idx

                # Use conv2d instead of fully_connected layers.
                fc_layer = slim.fully_connected(fc_drop,
                                                layer_sizes[layer_idx],
                                                scope='br{}_fc{}'.format(
                                                    branch_idx, fc_name_idx))

                fc_drop = slim.dropout(fc_layer,
                                       keep_prob=keep_prob,
                                       is_training=is_training,
                                       scope='br{}_fc{}_drop'.format(
                                           branch_idx, fc_name_idx))

            branch_outputs.append(fc_drop)

        # Feature fusion
        fused_features = avod_fc_layer_utils.feature_fusion(
            fusion_method, branch_outputs, input_weights)

        # Ouput layers
        output_layers = build_output_layers(fused_features, num_final_classes,
                                            box_rep)
    return output_layers
示例#4
0
def _early_fusion_fc_layers(num_layers, layer_sizes, input_rois, input_weights,
                            fusion_method, l2_weight_decay, keep_prob,
                            num_final_classes, box_rep, is_training):

    if not num_layers == len(layer_sizes):
        raise ValueError('num_layers does not match length of layer_sizes')

    if l2_weight_decay > 0:
        weights_regularizer = slim.l2_regularizer(l2_weight_decay)
    else:
        weights_regularizer = None

    # Feature fusion
    fused_features = avod_fc_layer_utils.feature_fusion(
        fusion_method, input_rois, input_weights)

    # Flatten
    fc_drop = slim.flatten(fused_features)

    with slim.arg_scope([slim.fully_connected],
                        weights_regularizer=weights_regularizer):

        for layer_idx in range(num_layers):
            fc_name_idx = 6 + layer_idx

            # Use conv2d instead of fully_connected layers.
            fc_layer = slim.fully_connected(fc_drop,
                                            layer_sizes[layer_idx],
                                            scope='fc{}'.format(fc_name_idx))

            fc_drop = slim.dropout(fc_layer,
                                   keep_prob=keep_prob,
                                   is_training=is_training,
                                   scope='fc{}_drop'.format(fc_name_idx))

            fc_name_idx += 1

        output_layers = build_output_layers(fc_drop, num_final_classes,
                                            box_rep)
    return output_layers
示例#5
0
def _basic_fc_layers(cls_layer_sizes,
                     off_layer_sizes,
                     ang_layer_sizes,
                     input_rois, input_weights,
                     fusion_method,
                     l2_weight_decay, keep_prob,
                     num_final_classes, box_rep,
                     is_training):

    if l2_weight_decay > 0:
        weights_regularizer = slim.l2_regularizer(l2_weight_decay)
    else:
        weights_regularizer = None

    if len(input_rois) == 2:
        # Feature fusion
        fused_features = avod_fc_layer_utils.feature_fusion(fusion_method,
                                                            input_rois,
                                                            input_weights)
    else:
        fused_features = input_rois[0]

    output_names = ['cls', 'off', 'ang']
    cls_logits = None
    offsets = None
    angles = None

    with slim.arg_scope(
            [slim.fully_connected],
            weights_regularizer=weights_regularizer):
        for output in output_names:
            # Flatten
            fc_drop = slim.flatten(fused_features,
                                   scope=output + '_flatten')

            if output == 'cls':
                num_layers = len(cls_layer_sizes)

                for layer_idx in range(num_layers):
                    fc_name_idx = 6 + layer_idx

                    fc_layer = slim.fully_connected(
                        fc_drop, cls_layer_sizes[layer_idx],
                        scope=output + '_fc{}'.format(fc_name_idx))

                    fc_drop = slim.dropout(
                        fc_layer,
                        keep_prob=keep_prob,
                        is_training=is_training,
                        scope=output +
                        '_fc{}_drop'.format(fc_name_idx))

                    fc_name_idx += 1

                cls_logits = build_output_layers(fc_drop,
                                                 num_final_classes,
                                                 box_rep,
                                                 output)
            elif output == 'off':
                num_layers = len(off_layer_sizes)

                for layer_idx in range(num_layers):
                    fc_name_idx = 6 + layer_idx

                    fc_layer = slim.fully_connected(
                        fc_drop, off_layer_sizes[layer_idx],
                        scope=output + '_fc{}'.format(fc_name_idx))

                    fc_drop = slim.dropout(
                        fc_layer,
                        keep_prob=keep_prob,
                        is_training=is_training,
                        scope=output +
                        '_fc{}_drop'.format(fc_name_idx))

                    fc_name_idx += 1
                offsets = build_output_layers(fc_drop,
                                              num_final_classes,
                                              box_rep,
                                              output)

            elif output == 'ang':
                num_layers = len(ang_layer_sizes)

                for layer_idx in range(num_layers):
                    fc_name_idx = 6 + layer_idx

                    fc_layer = slim.fully_connected(
                        fc_drop, ang_layer_sizes[layer_idx],
                        scope=output + '_fc{}'.format(fc_name_idx))

                    fc_drop = slim.dropout(
                        fc_layer,
                        keep_prob=keep_prob,
                        is_training=is_training,
                        scope=output +
                        '_fc{}_drop'.format(fc_name_idx))

                    fc_name_idx += 1
                angles = build_output_layers(fc_drop,
                                             num_final_classes,
                                             box_rep,
                                             output)

    return cls_logits, offsets, angles