from tensorlayer.layers import InputLayer, TimeDistributedLayer, DenseLayer sess = tf.InteractiveSession() batch_size = 32 timestep = 20 input_dim = 100 ## no reuse x = tf.placeholder(dtype=tf.float32, shape=[batch_size, timestep, input_dim], name="encode_seqs") net = InputLayer(x, name='input') net = TimeDistributedLayer(net, layer_class=DenseLayer, args={'n_units': 50, 'name': 'dense'}, name='time_dense') if net.outputs.get_shape().as_list() != [32, 20, 50]: raise Exception("shape do not match") # ... (32, 20, 50) net.print_params(False) if net.count_params() != 5050: raise Exception("params do not match") ## reuse def model(x, is_train=True, reuse=False): with tf.variable_scope("model", reuse=reuse): net = InputLayer(x, name='input') net = TimeDistributedLayer(net, layer_class=DenseLayer, args={'n_units': 50, 'name': 'dense'}, name='time_dense') return net net_train = model(x, is_train=True, reuse=False) net_test = model(x, is_train=False, reuse=True)
print(n.outputs.shape) # ... (10, 200, 2) n.print_layers() n.print_params(False) shape = n.outputs.get_shape().as_list() if shape != [10, 200, 16]: raise Exception("shape dont match") if len(n.all_layers) != 2: raise Exception("layers dont match") if len(n.all_params) != 2: raise Exception("params dont match") if n.count_params() != 416: raise Exception("params dont match") ## 2D x = tf.placeholder('float32', [10, 100, 100, 3], name='x') n = InputLayer(x, name='in') n = Conv2d(n, 32, (3, 2), (1, 1), padding='SAME', name='conv2d') n = SubpixelConv2d(n, scale=2, name='subpixel2d') print(n.outputs.shape) n.print_layers() n.print_params(False) shape = n.outputs.get_shape().as_list() if shape != [10, 200, 200, 8]: raise Exception("shape dont match")
from tensorlayer.layers import InputLayer, TimeDistributedLayer, DenseLayer sess = tf.InteractiveSession() batch_size = 32 timestep = 20 input_dim = 100 ## no reuse x = tf.placeholder(dtype=tf.float32, shape=[batch_size, timestep, input_dim], name="encode_seqs") net = InputLayer(x, name='input') net = TimeDistributedLayer(net, layer_class=DenseLayer, args={'n_units': 50, 'name': 'dense'}, name='time_dense') if net.outputs.get_shape().as_list() != [32, 20, 50]: raise Exception("shape dont match") # ... (32, 20, 50) net.print_params(False) if net.count_params() != 5050: raise Exception("params dont match") ## reuse def model(x, is_train=True, reuse=False): with tf.variable_scope("model", reuse=reuse): net = InputLayer(x, name='input') net = TimeDistributedLayer(net, layer_class=DenseLayer, args={'n_units': 50, 'name': 'dense'}, name='time_dense') return net net_train = model(x, is_train=True, reuse=False) net_test = model(x, is_train=False, reuse=True)