def __resBlock(self, x, channels=64, kernel_size=[3, 3], scale=1, layer=0): """ Creates a convolutional residual block as defined in the paper. More on this inside model.py a resBlock is defined in the paper as (excuse the ugly ASCII graph) x |\ | \ | conv2d | relu | conv2d | / |/ + (addition here) | result x: input to pass through the residual block channels: number of channels to compute stride: convolution stride :param x: input tensor :param channnels: channnels in the block :param kernel_size: filter kernel size :param scale: scale for residual skip :param layer: layer number """ nn = tl.Conv2d(x, channels, kernel_size, act=tf.nn.relu, name='res%d/c1' % (layer)) nn = tl.Conv2d(nn, channels, kernel_size, act=None, name='res%d/c2' % (layer)) nn = ScaleLayer(nn, scale, name='res%d/scale' % (layer)) n = tl.ElementwiseLayer([x, nn], tf.add, name='res%d/res_add' % (layer)) return n
def build_model(self, n_features=256, n_res_blocks=36, scale=8, max_to_keep=100): print("Building CycleSR...") norm_input = utils.normalize_color_tf(self.input) norm_target = utils.normalize_color_tf(self.target) x = tl.InputLayer(norm_input, name='input_layer') # One convolution before res blocks and to convert to required feature depth x = tl.Conv2d(x, n_features, (3, 3), name='c') # Store the output of the first convolution to add later conv_1 = x scaling_factor = 0.1 with tf.variable_scope("res_blocks"): for i in range(n_res_blocks): x = self._res_block(x, n_features, (3, 3), scale=scaling_factor, layer=i) x = tl.Conv2d(x, n_features, (3, 3), name='res_c') x = tl.ElementwiseLayer([conv_1, x], tf.add, name='res_add') with tf.variable_scope("upscale_module"): x = utils.deconv_upsample(x, n_features, kernel=(5, 5), scale=scale) output = tl.Conv2d(x, self.n_channels, (1, 1), act=tf.nn.relu, name='bottleneck') self.output = tf.clip_by_value(output.outputs, 0.0, 1.0, name='output') self.cycle_x = tf.image.resize_bicubic(self.output*255.0+0.5, [48, 48]) self.calculate_loss(norm_target, self.output) conf = tf.ConfigProto(allow_soft_placement=True, log_device_placement=False) self.sess = tf.Session(config=conf) self.saver = tf.train.Saver(max_to_keep=max_to_keep) print("Done building!")
def __resBlock(self, x, channels=64, kernel_size=[3, 3], scale=1, layer=0): nn = tl.Conv2d(x, channels, kernel_size, act=tf.nn.relu, name='res%d/c1' % (layer)) nn = tl.Conv2d(nn, channels, kernel_size, act=None, name='res%d/c2' % (layer)) nn = ScaleLayer(nn, scale, name='res%d/scale' % (layer)) n = tl.ElementwiseLayer([x, nn], tf.add, name='res%d/res_add' % (layer)) return n