示例#1
0
 def conv_block(self, xx, i, j):
     """
     :param xx:  input for the i-th block
     :param i: i-th block
     :param j: the j-th convolution block
     :return: the ouput of the i-th conv block in the
     """
     out = tf.add(
         tf.nn.conv2d(
             xx,
             self.task_parameter["res" + str(i + 1) + "conv_w" +
                                 str(j + 1)],
             self.no_stride,
             "SAME",
         ),
         self.task_parameter["res" + str(i + 1) + "conv_bias" + str(j + 1)],
     )
     out = tf.contrib.layers.batch_norm(
         out,
         activation_fn=None,
         variables_collections=self.var_collections,
         scope="scope" + str(i + 1) + str(j + 1),
         reuse=self.reuse,
     )
     return network_utils.leaky_relu(out, 0.1)
示例#2
0
 def conv_block(xx):
     out = tcl.conv2d(
         xx,
         n_filters,
         3,
         activation_fn=None,
         normalizer_fn=tcl.batch_norm,
         variables_collections=self.var_coll,
     )
     return network_utils.leaky_relu(out, 0.1)
示例#3
0
 def conv_block(xx, i, j):
     out = tf.nn.conv2d(
         xx,
         self.task_parameter["res" + str(i + 1) + "conv_w" +
                             str(j + 1)],
         self.no_stride,
         "SAME",
     )
     out = tf.contrib.layers.batch_norm(out, activation_fn=None)
     return network_utils.leaky_relu(out, 0.1)
示例#4
0
 def conv_block(self, xx, i, j):
     out = tf.add(
         tf.nn.conv2d(
             xx,
             self.task_parameter["res" + str(i + 1) + "conv_w" +
                                 str(j + 1)],
             self.no_stride,
             "SAME",
         ),
         self.task_parameter["res" + str(i + 1) + "conv_bias" + str(j + 1)],
     )
     out = tf.contrib.layers.batch_norm(
         out,
         activation_fn=None,
         variables_collections=self.var_collections,
         scope="scope" + str(i + 1) + str(j + 1),
         reuse=self.reuse,
     )
     return network_utils.leaky_relu(out, 0.1)