def forward(self, net, batched_inputs): del batched_inputs cls_net = wnnl.non_local_blockv4(net, scope=f"NonLocalROIHeadsHook_clsv2", normalizer_fn=wnnl.evo_norm_s0, activation_fn=None, n_head=4, weighed_sum=False) reg_net = wnnl.non_local_blockv4(net, scope=f"NonLocalROIHeadsHook_regv2", normalizer_fn=wnnl.evo_norm_s0, n_head=4, activation_fn=None, weighed_sum=False) return cls_net, reg_net
def forward(self, features, batched_inputs): del batched_inputs res = OrderedDict() normalizer_fn, normalizer_params = odt.get_norm( "evo_norm_s0", is_training=self.is_training) normalizer_params['G'] = 8 with tf.variable_scope("NonLocalBackboneHookV3"): for k, v in features.items(): if k[0] not in ["C", "P"]: continue level = int(k[1:]) if level <= 2: res[k] = v continue h = self.base_size // (2**level) w = self.base_size // (2**level) v = wnnl.non_local_blockv4(v, inner_dims=[128, 128, 128], normalizer_fn=normalizer_fn, normalizer_params=normalizer_params, n_head=2, activation_fn=None, weighed_sum=False, scope=f"non_localv4_{level}", size=[h, w]) res[k] = v return res
def forward(self, net, batched_inputs, reuse=None): del batched_inputs net = wnnl.non_local_blockv4(net, scope=f"NonLocalROIHeadsHookV2", inner_dims_multiplier=[1, 1, 1], normalizer_fn=wnnl.evo_norm_s0, activation_fn=None, weighed_sum=False, n_head=4) return net
def forward(self, net, batched_inputs): del batched_inputs cls_net = wnnl.non_local_blockv4(net, scope=f"NonLocalROIHeadsHook_clsv3", inner_dims_multiplier=[2, 2, 2], normalizer_fn=wnnl.evo_norm_s0, activation_fn=None, n_head=2, weighed_sum=False) return cls_net, net
def forward(self, net, batched_inputs, reuse=None): with tf.variable_scope("AddBBoxesSizeInfo"): reg_net = net shape = wmlt.combined_static_and_dynamic_shape(net) C = shape[-1] K = 4 with tf.variable_scope("pos_embedding"): pos_embs_shape = [1, shape[1], shape[2], K * C] pos_embedding = tf.get_variable( "pos_embs", shape=pos_embs_shape, dtype=tf.float32, initializer=tf.random_normal_initializer(stddev=0.02)) bboxes = self.parent.t_proposal_boxes with tf.name_scope("trans_bboxes"): _, H, W, _ = btf.combined_static_and_dynamic_shape( batched_inputs[IMAGE]) bboxes = odb.tfrelative_boxes_to_absolutely_boxes(bboxes, W, H) bymin, bxmin, bymax, bxmax = tf.unstack(bboxes, axis=-1) bh = bymax - bymin bw = bxmax - bxmin br0 = bh / (bw + 1e-8) br1 = bw / (bh + 1e-8) bboxes = tf.stack([bh, bw, br0, br1], axis=-1) B, BN, BC = btf.combined_static_and_dynamic_shape(bboxes) bboxes = tf.reshape(bboxes, [B * BN, BC]) bboxes = tf.stop_gradient(bboxes) bboxes = slim.fully_connected(bboxes, C, activation_fn=self.activation_fn, normalizer_fn=self.normalizer_fn, normalizer_params=self.norm_params) bboxes = slim.fully_connected(bboxes, K * C, activation_fn=tf.nn.sigmoid, normalizer_fn=None) pos_embedding = tf.reshape(bboxes, [B * BN, 1, 1, K * C]) * pos_embedding pos_embedding = tf.layers.dense( pos_embedding, C, kernel_initializer=tf.truncated_normal_initializer( stddev=0.02)) cls_net = wnnl.non_local_blockv4(net, scope=f"non_local", normalizer_fn=wnnl.evo_norm_s0, activation_fn=None, n_head=4, weighed_sum=False, pos_embedding=pos_embedding) return cls_net, reg_net
def forward(self, net, batched_inputs): del batched_inputs if isinstance(net, (list, tuple)): cls_x = net[0] box_x = net[1] else: cls_x = net box_x = net cls_x = wnnl.non_local_blockv4(cls_x, scope=f"NonLocalROIHeadsHook_clsv2", normalizer_fn=wnnl.evo_norm_s0, activation_fn=None, n_head=4, weighed_sum=False) return cls_x, box_x