def forward(self, net, batched_inputs): del batched_inputs cls_net = wnnl.non_local_blockv1(net, scope=f"NonLocalROIHeadsHook_cls", normalizer_fn=wnnl.evo_norm_s0, activation_fn=None, weighed_sum=False) reg_net = wnnl.non_local_blockv1(net, scope=f"NonLocalROIHeadsHook_reg", normalizer_fn=wnnl.evo_norm_s0, activation_fn=None, weighed_sum=False) return cls_net, reg_net
def forward(self, net, batched_inputs, reuse=None): del batched_inputs net = wnnl.non_local_blockv1(net, scope=f"NonLocalROIHeadsHook", normalizer_fn=wnnl.evo_norm_s0, activation_fn=None, weighed_sum=False) return net
def forward(self, features, batched_inputs): normalizer_fn, normalizer_params = odt.get_norm( "evo_norm_s0", is_training=self.is_training) res = OrderedDict() with tf.variable_scope("BalanceNonLocalBackboneHook"): del batched_inputs ref_index = 1 end_points = list(features.items()) k0, v0 = end_points[ref_index] mfeatures = [] with tf.name_scope("fusion"): shape0 = wmlt.combined_static_and_dynamic_shape(v0) for i, (k, v) in enumerate(end_points): if i == ref_index: net = v else: net = tf.image.resize_bilinear(v, shape0[1:3], name=f"resize{i}") mfeatures.append(net) net = tf.add_n(mfeatures) / float(len(mfeatures)) net = slim.conv2d(net, net.get_shape().as_list()[-1], [3, 3], activation_fn=None, normalizer_fn=normalizer_fn, normalizer_params=normalizer_params, scope=f"smooth") for i, (k, v) in enumerate(end_points): with tf.variable_scope(f"merge{i}"): shape = wmlt.combined_static_and_dynamic_shape(v) v0 = tf.image.resize_bilinear(net, shape[1:3]) net = v + v0 if i > 0: net = wnnl.non_local_blockv1( net, inner_dims_multiplier=[1, 1, 1], normalizer_fn=normalizer_fn, normalizer_params=normalizer_params, activation_fn=None, weighed_sum=False) res[k] = net return res
def forward(self, x, batched_inputs, reuse=None): del batched_inputs if isinstance(x, (list, tuple)) and len(x) == 2: iou_x = wnnl.non_local_blockv3(x[0], x[1], x[1], inner_dims_multiplier=[1, 1, 1], scope=f"NonLocalROIHeadsHook_iou", normalizer_fn=wnnl.evo_norm_s0, activation_fn=None, weighed_sum=False, skip_connect=False) return x[0], x[0], iou_x else: iou_x = wnnl.non_local_blockv1(x, scope=f"NonLocalROIHeadsHook_iou", normalizer_fn=wnnl.evo_norm_s0, activation_fn=None, weighed_sum=False) return x, x, iou_x
def forward(self, features, batched_inputs): del batched_inputs res = OrderedDict() normalizer_fn, normalizer_params = odt.get_norm( "evo_norm_s0", is_training=self.is_training) with tf.variable_scope("NonLocalBackboneHook"): for k, v in features.items(): if k[0] not in ["C", "P"]: continue level = int(k[1:]) if level <= 3: res[k] = v continue res[k] = wnnl.non_local_blockv1( v, inner_dims_multiplier=[1, 1, 1], normalizer_fn=normalizer_fn, normalizer_params=normalizer_params, activation_fn=None, weighed_sum=False) return res