def fpn(feats, fpn_filters, strides): # lateral connections(1x1 conv) feats = [Conv_BN(i, fpn_filters, kernel_size=1, strides=1, activation=None) for i in feats] C3, C4, C5 = feats # top-down connections(upSampling) P5 = C5 P5_up = UpSampling2D(size=2, interpolation='nearest')(P5) P4 = add([C4, P5_up]) P4_up = UpSampling2D(size=2, interpolation='nearest')(P4) P3 = add([C3, P4_up]) # p6p7 P6 = Conv_BN(P5, fpn_filters, 3, strides=2, activation='relu') P7 = Conv_BN(P6, fpn_filters, 3, strides=2, activation='relu') feature_dict = {8:P3, 16:P4, 32:P5, 64:P6, 128:P7} return [feature_dict[s] for s in strides]
def build_bifpn(feats, config): # Node id starts from the input features and monotonically increase whenever # a new node is added. Here is an example for level P3 - P7: # P7 (4) P7" (12) # P6 (3) P6' (5) P6" (11) # P5 (2) P5' (6) P5" (10) # P4 (1) P4' (7) P4" (9) # P3 (0) P3" (8) fpn_nodes = [{'feat_level': 6, 'inputs_offsets': [3, 4]}, # for P6' {'feat_level': 5, 'inputs_offsets': [2, 5]}, # for P5' {'feat_level': 4, 'inputs_offsets': [1, 6]}, # for P4' {'feat_level': 3, 'inputs_offsets': [0, 7]}, # for P3" {'feat_level': 4, 'inputs_offsets': [1, 7, 8]}, # for P4" {'feat_level': 5, 'inputs_offsets': [2, 6, 9]}, # for P5" {'feat_level': 6, 'inputs_offsets': [3, 5, 10]}, # for P6" {'feat_level': 7, 'inputs_offsets': [4, 11]}] # for P7" for i, fpn_node in enumerate(fpn_nodes): new_node_h, new_node_w = feats[fpn_node['feat_level']-config['min_level']]._keras_shape[1:3] nodes_in = [] for idx, inputs_offset in enumerate(fpn_node['inputs_offsets']): input_node = feats[inputs_offset] input_node = resample_feature_map(input_node, new_node_h, new_node_w, config['fpn_num_filters']) nodes_in.append(input_node) new_node = Lambda(fuse_features, arguments={'weight_method': 'fast'})(nodes_in) new_node = Conv_BN(new_node, config['fpn_num_filters'], activation=None) feats.append(new_node) fpn_feats = feats[-5:] return fpn_feats # [P3", P4", P5", P6", P7"]
def shared_box_head(n_filters, n_anchors): inpt = Input((None,None,n_filters)) x = inpt for i in range(4): x = Conv_BN(x, n_filters, 3, strides=1, activation='relu') x = Conv2D(4*n_anchors, 3, strides=1, padding='same')(x) model = Model(inpt, x, name='shared_box_head') return model
def resample_feature_map(x, target_h, target_w, target_c): # 1x1 conv if channel not match, conv-bn-swish x = Conv_BN(x, target_c, kernel_size=1, strides=1) # resize h, w = x._keras_shape[1:3] if h > target_h and w > target_w: h_stride = int((h - 1)//target_h + 1) w_stride = int((w - 1)//target_w + 1) x = MaxPooling2D(pool_size=(h_stride+1, w_stride+1), strides=(h_stride,w_stride), padding='same')(x) elif h <= target_h and w <= target_w: x = Lambda(tf.image.resize_nearest_neighbor, arguments={'size': [target_h, target_w]})(x) else: print("Incompatible target feature map size") return x
def fpn_node(x, n_filters, up_filters, out_filters): # shared conv x = Conv_BN(x, n_filters, 1, strides=1, activation='leaky') x = Conv_BN(x, n_filters * 2, 3, strides=1, activation='leaky') x = Conv_BN(x, n_filters, 1, strides=1, activation='leaky') x = Conv_BN(x, n_filters * 2, 3, strides=1, activation='leaky') x = Conv_BN(x, n_filters, 1, strides=1, activation='leaky') # up branch 1x1 conv up = Conv_BN(x, up_filters, 1, strides=1, activation='leaky') if up_filters else x # out branch 3x3 conv + 1x1 conv head out = Conv_BN(x, n_filters * 2, 3, strides=1, activation='leaky') out = Conv2D(out_filters, 1, strides=1, padding='same')(out) return up, out