def __init__(self, config, name='fpn_cell'): super().__init__(name=name) self.config = config if config.fpn_config: self.fpn_config = config.fpn_config else: self.fpn_config = fpn_configs.get_fpn_config(config.fpn_name, config.min_level, config.max_level, config.fpn_weight_method) self.fnodes = [] for i, fnode_cfg in enumerate(self.fpn_config.nodes): logging.info('fnode %d : %s', i, fnode_cfg) fnode = FNode( fnode_cfg['feat_level'] - self.config.min_level, fnode_cfg['inputs_offsets'], config.fpn_num_filters, config.apply_bn_for_resampling, config.is_training_bn, config.conv_after_downsample, config.conv_bn_act_pattern, config.separable_conv, config.act_type, strategy=config.strategy, weight_method=self.fpn_config.weight_method, data_format=config.data_format, name='fnode%d' % i) self.fnodes.append(fnode)
def __init__(self, config, name='fpn_cells'): super().__init__(name=name) self.config = config if config.fpn_config: self.fpn_config = config.fpn_config else: self.fpn_config = fpn_configs.get_fpn_config( config.fpn_name, config.min_level, config.max_level, config.fpn_weight_method) self.cells = [ FPNCell(self.config, name='cell_%d' % rep) for rep in range(self.config.fpn_cell_repeats) ]
def build_bifpn_layer(feats, feat_sizes, config): """Builds a feature pyramid given previous feature pyramid and config.""" p = config # use p to denote the network config. if p.fpn_config: fpn_config = p.fpn_config else: fpn_config = fpn_configs.get_fpn_config(p.fpn_name, p.min_level, p.max_level, p.fpn_weight_method) num_output_connections = [0 for _ in feats] for i, fnode in enumerate(fpn_config.nodes): with tf.variable_scope('fnode{}'.format(i)): logging.info('fnode %d : %s', i, fnode) new_node_height = feat_sizes[fnode['feat_level']]['height'] new_node_width = feat_sizes[fnode['feat_level']]['width'] nodes = [] for idx, input_offset in enumerate(fnode['inputs_offsets']): input_node = feats[input_offset] num_output_connections[input_offset] += 1 input_node = resample_feature_map( input_node, '{}_{}_{}'.format(idx, input_offset, len(feats)), new_node_height, new_node_width, p.fpn_num_filters, p.apply_bn_for_resampling, p.is_training_bn, p.conv_after_downsample, strategy=p.strategy, data_format=config.data_format) nodes.append(input_node) new_node = fuse_features(nodes, fpn_config.weight_method) with tf.variable_scope('op_after_combine{}'.format(len(feats))): if not p.conv_bn_act_pattern: new_node = utils.activation_fn(new_node, p.act_type) if p.separable_conv: conv_op = functools.partial(tf.layers.separable_conv2d, depth_multiplier=1) else: conv_op = tf.layers.conv2d new_node = conv_op(new_node, filters=p.fpn_num_filters, kernel_size=(3, 3), padding='same', use_bias=not p.conv_bn_act_pattern, data_format=config.data_format, name='conv') new_node = utils.batch_norm_act( new_node, is_training_bn=p.is_training_bn, act_type=None if not p.conv_bn_act_pattern else p.act_type, data_format=config.data_format, strategy=p.strategy, name='bn') feats.append(new_node) num_output_connections.append(0) output_feats = {} for l in range(p.min_level, p.max_level + 1): for i, fnode in enumerate(reversed(fpn_config.nodes)): if fnode['feat_level'] == l: output_feats[l] = feats[-1 - i] break return output_feats