예제 #1
0
    def __get_network__(self,
                        model_name,
                        encode_seqs,
                        reuse=False,
                        is_train=True):
        # the architecture of networks
        with tf.variable_scope(model_name, reuse=reuse):
            # tl.layers.set_name_reuse(reuse)
            net_in = InputLayer(inputs=encode_seqs, name="in_word_embed")

            filter_length = [3, 4, 5]
            n_filter = 200
            net_cnn_list = list()
            for fsz in filter_length:
                net_cnn = Conv1d(net_in,
                                 n_filter=n_filter,
                                 filter_size=fsz,
                                 stride=1,
                                 act=tf.nn.relu,
                                 name="cnn%d" % fsz)
                net_cnn.outputs = tf.reduce_max(net_cnn.outputs,
                                                axis=1,
                                                name="global_maxpool%d" % fsz)
                net_cnn_list.append(net_cnn)

            net_cnn = ConcatLayer(net_cnn_list, concat_dim=-1)
            net_fc = DenseLayer(net_cnn,
                                n_units=300,
                                act=tf.nn.relu,
                                name="fc_1")

            net_fc = DenseLayer(net_fc,
                                n_units=1,
                                act=tf.nn.sigmoid,
                                name="fc_2")
        return net_fc, net_cnn
import tensorflow as tf
from tensorlayer.layers import SubpixelConv1d, SubpixelConv2d, InputLayer, Conv1d, Conv2d

## 1D
t_signal = tf.placeholder('float32', [10, 100, 4], name='x')
n = InputLayer(t_signal, name='in')
n = Conv1d(n, 32, 3, 1, padding='SAME', name='conv1d')
n = SubpixelConv1d(n, scale=2, name='subpixel')
print(n.outputs.shape)
# ... (10, 200, 2)
n.print_layers()
n.print_params(False)

shape = n.outputs.get_shape().as_list()
if shape != [10, 200, 16]:
    raise Exception("shape dont match")

if len(n.all_layers) != 2:
    raise Exception("layers dont match")

if len(n.all_params) != 2:
    raise Exception("params dont match")

if n.count_params() != 416:
    raise Exception("params dont match")

## 2D
x = tf.placeholder('float32', [10, 100, 100, 3], name='x')
n = InputLayer(x, name='in')
n = Conv2d(n, 32, (3, 2), (1, 1), padding='SAME', name='conv2d')
n = SubpixelConv2d(n, scale=2, name='subpixel2d')
    def __get_network__(self,
                        model_name,
                        encode_seqs,
                        reuse=False,
                        is_train=True):
        with tf.variable_scope(model_name, reuse=reuse):
            tl.layers.set_name_reuse(reuse)

            net_in = InputLayer(inputs=encode_seqs, name="in_word_embed")
            '''
            net_in = ReshapeLayer(
                net_in,
                (-1, self.max_length, self.word_embedding_dim, 1),
                name="reshape"
            )
            '''

            filter_length = [2, 4, 8]
            n_filter = 600

            net_cnn_list = list()

            for fsz in filter_length:

                net_cnn = Conv1d(net_in,
                                 n_filter=n_filter,
                                 filter_size=fsz,
                                 stride=1,
                                 act=tf.nn.relu,
                                 name="cnn%d" % fsz)
                net_cnn.outputs = tf.reduce_max(net_cnn.outputs,
                                                axis=1,
                                                name="global_maxpool%d" % fsz)
                net_cnn_list.append(net_cnn)

            net_cnn = ConcatLayer(net_cnn_list, concat_dim=-1)
            '''
            net_cnn = Conv1d(net_in, 400, 8, act=tf.nn.relu, name="cnn_1")
            net_cnn = MaxPool1d(net_cnn, 2, 2, padding="valid", name="maxpool_1")

            net_cnn = Conv1d(net_cnn, 600, 4, act=tf.nn.relu, name="cnn_2")
            net_cnn = MaxPool1d(net_cnn, 2, 2, padding="valid", name="maxpool_2")

            net_cnn = Conv1d(net_cnn, 600, 2, act=tf.nn.relu, name="cnn_3")
            net_cnn = MaxPool1d(net_cnn, 2, 2, padding="valid", name="maxpool_3")

            net_cnn = FlattenLayer(net_cnn, name="flatten")
            '''
            '''
            net_cnn = Conv2d(net_in, 64, (8, 8), act=tf.nn.relu, name="cnn_1")
            net_cnn = MaxPool2d(net_cnn, (2, 2), padding="valid", name="maxpool_1")

            net_cnn = Conv2d(net_cnn, 32, (4, 4), act=tf.nn.relu, name="cnn_2")
            net_cnn = MaxPool2d(net_cnn, (2, 4), padding="valid", name="maxpool_2")

            net_cnn = Conv2d(net_cnn, 8, (2, 2), act=tf.nn.relu, name="cnn_3")
            net_cnn = MaxPool2d(net_cnn, (2, 2), padding="valid", name="maxpool_3")

            net_cnn = FlattenLayer(net_cnn, name="flatten")
            '''

            net_cnn = DropoutLayer(net_cnn,
                                   keep=0.5,
                                   is_fix=True,
                                   is_train=is_train,
                                   name='drop1')

            net_fc = DenseLayer(net_cnn,
                                n_units=400,
                                act=tf.nn.relu,
                                name="fc_1")

            net_fc = DropoutLayer(net_fc,
                                  keep=0.5,
                                  is_fix=True,
                                  is_train=is_train,
                                  name='drop2')

            net_fc = DenseLayer(net_fc,
                                n_units=100,
                                act=tf.nn.relu,
                                name="fc_2")

            net_fc = DropoutLayer(net_fc,
                                  keep=0.5,
                                  is_fix=True,
                                  is_train=is_train,
                                  name='drop3')

            net_fc = DenseLayer(net_fc,
                                n_units=self.number_of_seen_classes,
                                act=tf.nn.relu,
                                name="fc_3")

        return net_fc
    def __get_network__(self, model_name, encode_seqs, class_label_seqs, kg_vector, reuse=False, is_train=True):
        with tf.variable_scope(model_name, reuse=reuse):
            tl.layers.set_name_reuse(reuse)

            net_word_embed = InputLayer(
                inputs=encode_seqs,
                name="in_word_embed"
            )

            net_class_label_embed = InputLayer(
                inputs=class_label_seqs,
                name="in_class_label_embed"
            )

            net_kg = InputLayer(
                inputs=kg_vector,
                name='in_kg'
            )

            net_kg = ReshapeLayer(
                net_kg,
                shape=(-1, self.kg_embedding_dim),
                name="reshape_kg_1"
            )

            net_kg = ReshapeLayer(
                net_kg,
                shape=(-1, self.max_length, self.kg_embedding_dim),
                name="reshape_kg_2"
            )

            if config.model == "vwvcvkg":
                # dbpedia and 20news
                net_in = ConcatLayer(
                    [net_word_embed, net_class_label_embed, net_kg],
                    concat_dim=-1,
                    name='concat_vw_vwc_vc'
                )
            elif config.model == "vwvc":
                net_in = ConcatLayer(
                    [net_word_embed, net_class_label_embed],
                    concat_dim=-1,
                    name='concat_vw_vc'
                )
            elif config.model == "vwvkg":
                net_in = ConcatLayer(
                    [net_word_embed, net_kg],
                    concat_dim=-1,
                    name='concat_vw_vwc'
                )
            elif config.model == "vcvkg":
                net_in = ConcatLayer(
                    [net_class_label_embed, net_kg],
                    concat_dim=-1,
                    name='concat_vc_vwc'
                )
            elif config.model == "kgonly":
                net_in = ConcatLayer(
                    [net_kg],
                    concat_dim=-1,
                    name='concat_vwc'
                )
            else:
                raise Exception("config.model value error")

            filter_length = [2, 4, 8]
            # dbpedia
            n_filter = 600
            # n_filter = 200

            net_cnn_list = list()

            for fsz in filter_length:

                net_cnn = Conv1d(
                    net_in,
                    n_filter=n_filter,
                    filter_size=fsz,
                    stride=1,
                    act=tf.nn.relu,
                    name="cnn%d" % fsz
                )
                net_cnn.outputs = tf.reduce_max(net_cnn.outputs, axis=1, name="global_maxpool%d" % fsz)
                net_cnn_list.append(net_cnn)

            '''
            if config.model == "vwvcvkg":
                net_class_label_embed.outputs = tf.slice(
                    net_class_label_embed.outputs,
                    [0, 0, 0],
                    [config.batch_size, 1, self.word_embedding_dim],
                    name="slice_word"
                )
                net_class_label_embed.outputs = tf.squeeze(
                    net_class_label_embed.outputs,
                    name="squeeze_word"
                )
                net_cnn = ConcatLayer(net_cnn_list + [net_class_label_embed], concat_dim=-1)
            else:
                net_cnn = ConcatLayer(net_cnn_list, concat_dim=-1)
            '''
            net_cnn = ConcatLayer(net_cnn_list, concat_dim=-1)

            net_fc = DropoutLayer(net_cnn, keep=0.5, is_fix=True, is_train=is_train, name='drop1')

            net_fc = DenseLayer(
                net_fc,
                n_units=400,
                act=tf.nn.relu,
                name="fc_1"
            )

            net_fc = DropoutLayer(net_fc, keep=0.5, is_fix=True, is_train=is_train, name='drop2')

            # dbpedia
            net_fc = DenseLayer(
                net_fc,
                n_units=100,
                act=tf.nn.relu,
                name="fc_2"
            )
            net_fc = DropoutLayer(net_fc, keep=0.5, is_fix=True, is_train=is_train, name='drop3')

            net_fc = DenseLayer(
                net_fc,
                n_units=1,
                act=tf.nn.sigmoid,
                name="fc_3"
            )
        return net_fc, net_cnn
    def __get_network_cnnfc__(self, model_name, encode_seqs, class_label_seqs, reuse=False, is_train=True):
        with tf.variable_scope(model_name, reuse=reuse):
            tl.layers.set_name_reuse(reuse)

            net_word_embed = InputLayer(
                inputs=encode_seqs,
                name="in_word_embed"
            )

            net_class_label_embed = InputLayer(
                inputs=class_label_seqs,
                name="in_class_label_embed"
            )

            net_class_label_embed.outputs = tf.slice(
                net_class_label_embed.outputs,
                [0, 0, 0],
                [config.batch_size, 1, self.word_embedding_dim],
                name="slice_word"
            )

            net_class_label_embed.outputs = tf.squeeze(
                net_class_label_embed.outputs,
                name="squeeze_word"
            )

            net_in = ConcatLayer(
                [net_word_embed],
                concat_dim=-1,
                name='concat_vw'
            )

            filter_length = [2, 4, 8]
            # dbpedia
            n_filter = 600
            # n_filter = 200

            net_cnn_list = list()

            for fsz in filter_length:

                net_cnn = Conv1d(
                    net_in,
                    n_filter=n_filter,
                    filter_size=fsz,
                    stride=1,
                    act=tf.nn.relu,
                    name="cnn%d" % fsz
                )
                net_cnn.outputs = tf.reduce_max(net_cnn.outputs, axis=1, name="global_maxpool%d" % fsz)
                net_cnn_list.append(net_cnn)

            net_cnn = ConcatLayer(net_cnn_list + [net_class_label_embed], concat_dim=-1)

            net_fc = DropoutLayer(net_cnn, keep=0.5, is_fix=True, is_train=is_train, name='drop1')

            net_fc = DenseLayer(
                net_fc,
                n_units=400,
                act=tf.nn.relu,
                name="fc_1"
            )

            net_fc = DropoutLayer(net_fc, keep=0.5, is_fix=True, is_train=is_train, name='drop2')

            # dbpedia
            net_fc = DenseLayer(
                net_fc,
                n_units=100,
                act=tf.nn.relu,
                name="fc_2"
            )
            net_fc = DropoutLayer(net_fc, keep=0.5, is_fix=True, is_train=is_train, name='drop3')

            net_fc = DenseLayer(
                net_fc,
                n_units=1,
                act=tf.nn.sigmoid,
                name="fc_3"
            )
        return net_fc
예제 #6
0
    def __get_network__(self,
                        encode_seq,
                        neighbour_seq,
                        decode_seq,
                        features,
                        features_full,
                        is_train=True,
                        reuse=False):
        w_init = tf.random_normal_initializer(stddev=0.02)
        g_init = tf.random_normal_initializer(1., 0.02)

        with tf.variable_scope(self.model_name + "_spatial",
                               reuse=reuse) as vs:
            tl.layers.set_name_reuse(reuse)
            inputs_x_root = InputLayer(encode_seq, name='in_root')
            inputs_x_nbor = InputLayer(neighbour_seq, name="in_neighbour")

            # encoding neighbour graph information
            n = ReshapeLayer(inputs_x_nbor,
                             (config.batch_size * config.in_seq_length,
                              config.num_neighbour), "reshape1")
            n.outputs = tf.expand_dims(n.outputs, axis=-1)
            n = Conv1d(n,
                       4,
                       4,
                       1,
                       act=tf.identity,
                       padding='SAME',
                       W_init=w_init,
                       name='conv1')
            n = BatchNormLayer(n,
                               act=tf.nn.relu,
                               is_train=is_train,
                               gamma_init=g_init,
                               name='bn1')
            n = MaxPool1d(n, 2, 2, padding='valid', name='maxpool1')
            n = FlattenLayer(n, name="flatten1")
            n = ReshapeLayer(n, (config.batch_size, config.in_seq_length, -1),
                             name="reshape1_back")

            net_encode = ConcatLayer([inputs_x_root, n],
                                     concat_dim=-1,
                                     name="encode")
            net_decode = InputLayer(decode_seq, name="decode")

            net_rnn = Seq2Seq(
                net_encode,
                net_decode,
                cell_fn=tf.contrib.rnn.BasicLSTMCell,
                n_hidden=config.dim_hidden,
                initializer=tf.random_uniform_initializer(-0.1, 0.1),
                encode_sequence_length=tl.layers.retrieve_seq_length_op(
                    net_encode.outputs),
                decode_sequence_length=tl.layers.retrieve_seq_length_op(
                    net_decode.outputs),
                initial_state_encode=None,
                # dropout=(0.8 if is_train else None),
                dropout=None,
                n_layer=1,
                return_seq_2d=True,
                name='seq2seq')
            net_rnn_seq2seq = net_rnn

            net_spatial_out = DenseLayer(net_rnn,
                                         n_units=1,
                                         act=tf.identity,
                                         name='dense2')
            if is_train:
                net_spatial_out = ReshapeLayer(
                    net_spatial_out,
                    (config.batch_size, config.out_seq_length + 1, 1),
                    name="reshape_out")
            else:
                net_spatial_out = ReshapeLayer(net_spatial_out,
                                               (config.batch_size, 1, 1),
                                               name="reshape_out")

        with tf.variable_scope(self.model_name + "_wide", reuse=reuse) as vs:
            tl.layers.set_name_reuse(reuse)
            # Features
            net_features = InputLayer(features, name="in_features")
            net_features_full = InputLayer(features_full,
                                           name="in_features_full")
            net_features_full = ReshapeLayer(
                net_features_full,
                (config.batch_size *
                 (config.out_seq_length + 1), config.dim_features),
                name="reshape_feature_full_1")
            if is_train:
                net_features = ReshapeLayer(
                    net_features,
                    (config.batch_size *
                     (config.out_seq_length + 1), config.dim_features),
                    name="reshape_feature_1")
            else:
                net_features = ReshapeLayer(net_features,
                                            (config.batch_size *
                                             (1), config.dim_features),
                                            name="reshape_feature_1")

            self.net_features_dim = 32
            net_features = DenseLayer(net_features,
                                      n_units=self.net_features_dim,
                                      act=tf.nn.relu,
                                      name='dense_features')
            net_features_full = DenseLayer(net_features_full,
                                           n_units=self.net_features_dim,
                                           act=tf.nn.relu,
                                           name='dense_features_full')
            # self.net_features = net_features

            net_wide_out = ConcatLayer([net_rnn_seq2seq, net_features],
                                       concat_dim=-1,
                                       name="concat_features")
            net_wide_out = DenseLayer(net_wide_out,
                                      n_units=1,
                                      act=tf.identity,
                                      name='dense2')

            if is_train:
                net_wide_out = ReshapeLayer(
                    net_wide_out,
                    (config.batch_size, config.out_seq_length + 1, 1),
                    name="reshape_out")
            else:
                net_wide_out = ReshapeLayer(net_wide_out,
                                            (config.batch_size, 1, 1),
                                            name="reshape_out")

        with tf.variable_scope(self.model_name + "_query", reuse=reuse) as vs:
            tl.layers.set_name_reuse(reuse)

            net_decode_query = InputLayer(self.query_decode_seq,
                                          name="decode_query")

            net_rnn_query = RNNLayer(
                net_decode_query,
                cell_fn=tf.contrib.rnn.BasicLSTMCell,
                cell_init_args={"forget_bias": 1.0},
                n_hidden=config.query_dim_hidden,
                initializer=tf.random_uniform_initializer(-0.1, 0.1),
                n_steps=config.out_seq_length,
                return_last=True,

                # return_last=False,
                # return_seq_2d=True,
                name="rnn_query")
            '''
            net_rnn_query = DynamicRNNLayer(
                net_decode_query,
                cell_fn=tf.contrib.rnn.BasicLSTMCell,
                cell_init_args={"forget_bias": 1.0},
                # n_hidden=config.query_dim_hidden,
                n_hidden=32,
                initializer=tf.random_uniform_initializer(-0.1, 0.1),
                return_last=True,
                # dropout=0.8,
                sequence_length=tl.layers.retrieve_seq_length_op(net_decode_query.outputs),
                # return_last=False,
                # return_seq_2d=True,
                name="rnn_query_dynamic"
            )
            '''

            net_rnn_query = ExpandDimsLayer(net_rnn_query,
                                            axis=1,
                                            name="rnn_query_expand")
            net_rnn_query = TileLayer(net_rnn_query,
                                      [1, config.out_seq_length, 1],
                                      name="rnn_query_tile")
            net_rnn_query = ReshapeLayer(
                net_rnn_query, (config.batch_size * config.out_seq_length,
                                config.query_dim_hidden),
                name="rnn_query_reshape")
            # net_rnn_query = ReshapeLayer(net_rnn_query, (config.batch_size * config.out_seq_length, 32), name="rnn_query_reshape")

            # self.net_rnn_query = net_rnn_query

            net_traffic_state = InputLayer(self.traffic_state,
                                           name="in_traffic_state")
            '''
            if is_train:
                net_rnn_traffic = ReshapeLayer(net_rnn_seq2seq, (config.batch_size, config.out_seq_length + 1, config.dim_hidden), name="reshape_traffic_q1")
                net_rnn_traffic.outputs = tf.slice(net_rnn_traffic.outputs, [0, 0, 0], [config.batch_size, config.out_seq_length, config.dim_hidden], name="slice_traffic_q")
                net_rnn_traffic = ReshapeLayer(net_rnn_traffic, (config.batch_size * config.out_seq_length, config.dim_hidden), name="reshape_traffic_q2")

                net_features_traffic = ReshapeLayer(net_features, (config.batch_size, config.out_seq_length + 1, self.net_features_dim), name="reshape_features_q1")
                net_features_traffic.outputs = tf.slice(net_features_traffic.outputs, [0, 0, 0], [config.batch_size, config.out_seq_length, self.net_features_dim], name="slice_features_q")
                net_features_traffic = ReshapeLayer(net_features_traffic, (config.batch_size * config.out_seq_length, self.net_features_dim), name="reshape_features_q2")

                net_query_out = ConcatLayer([net_rnn_traffic, net_features_traffic, net_rnn_query], concat_dim=-1, name="concat_traffic_query1")
                # net_query_out = ConcatLayer([net_rnn_traffic, net_rnn_query], concat_dim=-1, name="concat_traffic_query1")
            else:
            '''
            net_features_traffic = ReshapeLayer(
                net_features_full,
                (config.batch_size, config.out_seq_length + 1,
                 self.net_features_dim),
                name="reshape_features_q1")
            net_features_traffic.outputs = tf.slice(
                net_features_traffic.outputs, [0, 0, 0], [
                    config.batch_size, config.out_seq_length,
                    self.net_features_dim
                ],
                name="slice_features_q")
            net_features_traffic = ReshapeLayer(
                net_features_traffic,
                (config.batch_size * config.out_seq_length,
                 self.net_features_dim),
                name="reshape_features_q2")

            net_query_out = ConcatLayer(
                [net_traffic_state, net_features_traffic, net_rnn_query],
                concat_dim=-1,
                name="concat_traffic_query1")
            # net_rnn_traffic = ReshapeLayer(net_rnn_seq2seq, (config.batch_size, config.out_seq_length + 1, config.dim_hidden), name="reshape_traffic_q1")
            # net_rnn_traffic.outputs = tf.slice(net_rnn_traffic.outputs, [0, 0, 0], [config.batch_size, config.out_seq_length, config.dim_hidden], name="slice_traffic_q")
            # net_rnn_traffic = ReshapeLayer(net_rnn_traffic, (config.batch_size * config.out_seq_length, config.dim_hidden), name="reshape_traffic_q2")
            # net_query_out = ConcatLayer([net_rnn_traffic, net_features_traffic, net_rnn_query], concat_dim=-1, name="concat_traffic_query1")

            # net_out = DenseLayer(net_out, n_units=128, act=tf.nn.relu, name="dense_query1")
            # net_out = DenseLayer(net_out, n_units=64, act=tf.nn.relu, name="dense_query2")
            # net_query_out = DropoutLayer(net_query_out, keep=0.8, is_fix=True, is_train=is_train, name='drop_query3')
            net_query_out = DenseLayer(net_query_out,
                                       n_units=1,
                                       act=tf.identity,
                                       name="dense_query3")
            # net_out = ReshapeLayer(net_out, (config.batch_size, config.out_seq_length + 1, 1), name="reshape_out")
            # if is_train:
            net_query_out = ReshapeLayer(
                net_query_out, (config.batch_size, config.out_seq_length, 1),
                name="reshape_out")
            # else:
            #    net_out = ReshapeLayer(net_out, (config.batch_size, 1, 1), name="reshape_out")

            # TODO residual net
            '''
            if is_train:
                net_query_out.outputs = tf.add(
                    net_query_out.outputs,
                    tf.slice(net_wide_out.outputs, [0, 0, 0], [config.batch_size, config.out_seq_length, 1]),
                    name="res_add"
                )
            else:
            '''
            net_base_pred = InputLayer(self.base_pred, name="in_net_base_pred")
            net_query_out.outputs = tf.add(net_query_out.outputs,
                                           net_base_pred.outputs,
                                           name="res_add")

        return net_rnn_seq2seq, net_spatial_out, net_wide_out, net_rnn_query, net_query_out
예제 #7
0
    def __get_network__(self,
                        encode_seq,
                        neighbour_seq,
                        decode_seq,
                        is_train=True,
                        reuse=False):
        w_init = tf.random_normal_initializer(stddev=0.02)
        g_init = tf.random_normal_initializer(1., 0.02)

        with tf.variable_scope(self.model_name, reuse=reuse) as vs:
            tl.layers.set_name_reuse(reuse)
            inputs_x_root = InputLayer(encode_seq, name='in_root')
            inputs_x_nbor = InputLayer(neighbour_seq, name="in_neighbour")

            # encoding neighbour graph information
            n = ReshapeLayer(inputs_x_nbor,
                             (config.batch_size * config.in_seq_length,
                              config.num_neighbour), "reshape1")
            n.outputs = tf.expand_dims(n.outputs, axis=-1)
            n = Conv1d(n,
                       4,
                       4,
                       1,
                       act=tf.identity,
                       padding='SAME',
                       W_init=w_init,
                       name='conv1')
            n = BatchNormLayer(n,
                               act=tf.nn.relu,
                               is_train=is_train,
                               gamma_init=g_init,
                               name='bn1')
            n = MaxPool1d(n, 2, 2, padding='valid', name='maxpool1')
            n = FlattenLayer(n, name="flatten1")
            n = ReshapeLayer(n, (config.batch_size, config.in_seq_length, -1),
                             name="reshape1_back")

            net_encode = ConcatLayer([inputs_x_root, n],
                                     concat_dim=-1,
                                     name="encode")
            net_decode = InputLayer(decode_seq, name="decode")

            net_rnn = Seq2Seq(
                net_encode,
                net_decode,
                cell_fn=tf.contrib.rnn.BasicLSTMCell,
                n_hidden=config.dim_hidden,
                initializer=tf.random_uniform_initializer(-0.1, 0.1),
                encode_sequence_length=tl.layers.retrieve_seq_length_op(
                    net_encode.outputs),
                decode_sequence_length=tl.layers.retrieve_seq_length_op(
                    net_decode.outputs),
                initial_state_encode=None,
                # dropout=(0.8 if is_train else None),
                dropout=None,
                n_layer=1,
                return_seq_2d=True,
                name='seq2seq')
            # net_out = DenseLayer(net_rnn, n_units=64, act=tf.identity, name='dense1')
            net_out = DenseLayer(net_rnn,
                                 n_units=1,
                                 act=tf.identity,
                                 name='dense2')
            if is_train:
                net_out = ReshapeLayer(
                    net_out, (config.batch_size, config.out_seq_length + 1, 1),
                    name="reshape_out")
            else:
                net_out = ReshapeLayer(net_out, (config.batch_size, 1, 1),
                                       name="reshape_out")

            self.net_rnn = net_rnn

            return net_out