예제 #1
0
    def __init__(
            self,
            output_dim,
            hidden_sizes,
            hidden_nonlinearity,
            output_nonlinearity,
            name=None,
            hidden_w_init=ly.XavierUniformInitializer(),
            hidden_b_init=tf.zeros_initializer(),
            output_w_init=ly.XavierUniformInitializer(),
            output_b_init=tf.zeros_initializer(),
            input_var=None,
            input_layer=None,
            input_shape=None,
            batch_normalization=False,
            weight_normalization=False,
    ):

        Serializable.quick_init(self, locals())

        with tf.variable_scope(name, "MLP"):
            if input_layer is None:
                l_in = ly.InputLayer(
                    shape=(None, ) + input_shape,
                    input_var=input_var,
                    name="input")
            else:
                l_in = input_layer
            self._layers = [l_in]
            l_hid = l_in
            if batch_normalization:
                l_hid = ly.batch_norm(l_hid)
            for idx, hidden_size in enumerate(hidden_sizes):
                l_hid = ly.DenseLayer(
                    l_hid,
                    num_units=hidden_size,
                    nonlinearity=hidden_nonlinearity,
                    name="hidden_%d" % idx,
                    w=hidden_w_init,
                    b=hidden_b_init,
                    weight_normalization=weight_normalization)
                if batch_normalization:
                    l_hid = ly.batch_norm(l_hid)
                self._layers.append(l_hid)
            l_out = ly.DenseLayer(
                l_hid,
                num_units=output_dim,
                nonlinearity=output_nonlinearity,
                name="output",
                w=output_w_init,
                b=output_b_init,
                weight_normalization=weight_normalization)
            if batch_normalization:
                l_out = ly.batch_norm(l_out)
            self._layers.append(l_out)
            self._l_in = l_in
            self._l_out = l_out

            LayersPowered.__init__(self, l_out)
예제 #2
0
    def __init__(
        self,
        env_spec,
        name='GaussianGRUPolicy',
        hidden_dim=32,
        hidden_nonlinearity=tf.tanh,
        recurrent_nonlinearity=tf.nn.sigmoid,
        recurrent_w_x_init=L.XavierUniformInitializer(),
        recurrent_w_h_init=L.OrthogonalInitializer(),
        output_nonlinearity=None,
        output_w_init=L.XavierUniformInitializer(),
        feature_network=None,
        state_include_action=True,
        gru_layer_cls=L.GRULayer,
        learn_std=True,
        init_std=1.0,
        std_share_network=False,
    ):
        """
        :param env_spec: A spec for the env.
        :param hidden_dim: dimension of hidden layer
        :param hidden_nonlinearity: nonlinearity used for each hidden layer
        :return:
        """
        assert isinstance(env_spec.action_space, Box)

        self._mean_network_name = 'mean_network'
        self._std_network_name = 'std_network'

        with tf.variable_scope(name, 'GaussianGRUPolicy'):
            Serializable.quick_init(self, locals())
            super(GaussianGRUPolicy, self).__init__(env_spec)

            obs_dim = env_spec.observation_space.flat_dim
            action_dim = env_spec.action_space.flat_dim

            if state_include_action:
                input_dim = obs_dim + action_dim
            else:
                input_dim = obs_dim

            l_input = L.InputLayer(shape=(None, None, input_dim), name='input')

            if feature_network is None:
                feature_dim = input_dim
                l_flat_feature = None
                l_feature = l_input
            else:
                feature_dim = feature_network.output_layer.output_shape[-1]
                l_flat_feature = feature_network.output_layer
                l_feature = L.OpLayer(
                    l_flat_feature,
                    extras=[l_input],
                    name='reshape_feature',
                    op=lambda flat_feature, input: tf.reshape(
                        flat_feature,
                        tf.stack([
                            tf.shape(input)[0],
                            tf.shape(input)[1], feature_dim
                        ])),
                    shape_op=lambda _, input_shape:
                    (input_shape[0], input_shape[1], feature_dim))

            if std_share_network:
                mean_network = GRUNetwork(
                    input_shape=(feature_dim, ),
                    input_layer=l_feature,
                    output_dim=2 * action_dim,
                    hidden_dim=hidden_dim,
                    hidden_nonlinearity=hidden_nonlinearity,
                    recurrent_nonlinearity=recurrent_nonlinearity,
                    recurrent_w_x_init=recurrent_w_x_init,
                    recurrent_w_h_init=recurrent_w_h_init,
                    output_nonlinearity=output_nonlinearity,
                    output_w_init=output_w_init,
                    gru_layer_cls=gru_layer_cls,
                    name='gru_mean_network')

                l_mean = L.SliceLayer(mean_network.output_layer,
                                      slice(action_dim),
                                      name='mean_slice')

                l_step_mean = L.SliceLayer(mean_network.step_output_layer,
                                           slice(action_dim),
                                           name='step_mean_slice')

                l_log_std = L.SliceLayer(mean_network.output_layer,
                                         slice(action_dim, 2 * action_dim),
                                         name='log_std_slice')

                l_step_log_std = L.SliceLayer(mean_network.step_output_layer,
                                              slice(action_dim,
                                                    2 * action_dim),
                                              name='step_log_std_slice')
            else:
                mean_network = GRUNetwork(
                    input_shape=(feature_dim, ),
                    input_layer=l_feature,
                    output_dim=action_dim,
                    hidden_dim=hidden_dim,
                    hidden_nonlinearity=hidden_nonlinearity,
                    recurrent_nonlinearity=recurrent_nonlinearity,
                    recurrent_w_x_init=recurrent_w_x_init,
                    recurrent_w_h_init=recurrent_w_h_init,
                    output_nonlinearity=output_nonlinearity,
                    output_w_init=output_w_init,
                    gru_layer_cls=gru_layer_cls,
                    name='gru_mean_network')

                l_mean = mean_network.output_layer

                l_step_mean = mean_network.step_output_layer

                l_log_std = L.ParamLayer(
                    mean_network.input_layer,
                    num_units=action_dim,
                    param=tf.constant_initializer(np.log(init_std)),
                    name='output_log_std',
                    trainable=learn_std,
                )

                l_step_log_std = L.ParamLayer(
                    mean_network.step_input_layer,
                    num_units=action_dim,
                    param=l_log_std.param,
                    name='step_output_log_std',
                    trainable=learn_std,
                )

            self.mean_network = mean_network
            self.feature_network = feature_network
            self.l_input = l_input
            self.state_include_action = state_include_action

            flat_input_var = tf.placeholder(dtype=tf.float32,
                                            shape=(None, input_dim),
                                            name='flat_input')
            if feature_network is None:
                feature_var = flat_input_var
            else:
                feature_var = L.get_output(
                    l_flat_feature,
                    {feature_network.input_layer: flat_input_var})

            with tf.name_scope(self._mean_network_name):
                out_step_mean, out_step_hidden_mean = L.get_output(
                    [l_step_mean, mean_network.step_hidden_layer],
                    {mean_network.step_input_layer: feature_var})
                out_step_mean = tf.identity(out_step_mean, 'step_mean')
                out_step_hidden_mean = tf.identity(out_step_hidden_mean,
                                                   'step_hidden_mean')

            with tf.name_scope(self._std_network_name):
                out_step_log_std = L.get_output(
                    l_step_log_std,
                    {mean_network.step_input_layer: feature_var})
                out_step_log_std = tf.identity(out_step_log_std,
                                               'step_log_std')

            self.f_step_mean_std = tensor_utils.compile_function([
                flat_input_var,
                mean_network.step_prev_state_layer.input_var,
            ], [out_step_mean, out_step_log_std, out_step_hidden_mean])

            self.l_mean = l_mean
            self.l_log_std = l_log_std

            self.input_dim = input_dim
            self.action_dim = action_dim
            self.hidden_dim = hidden_dim

            self.prev_actions = None
            self.prev_hiddens = None
            self.dist = RecurrentDiagonalGaussian(action_dim)
            self.name = name

            out_layers = [l_mean, l_log_std, l_step_log_std]
            if feature_network is not None:
                out_layers.append(feature_network.output_layer)

            LayersPowered.__init__(self, out_layers)
예제 #3
0
파일: network.py 프로젝트: xht033/garage
    def __init__(self,
                 input_shape,
                 output_dim,
                 conv_filters,
                 conv_filter_sizes,
                 conv_strides,
                 conv_pads,
                 hidden_sizes,
                 hidden_nonlinearity,
                 output_nonlinearity,
                 name=None,
                 hidden_w_init=ly.XavierUniformInitializer(),
                 hidden_b_init=tf.zeros_initializer(),
                 output_w_init=ly.XavierUniformInitializer(),
                 output_b_init=tf.zeros_initializer(),
                 input_var=None,
                 input_layer=None,
                 batch_normalization=False,
                 weight_normalization=False):
        Serializable.quick_init(self, locals())
        """
        A network composed of several convolution layers followed by some fc
        layers.
        input_shape: (width,height,channel)
            HOWEVER, network inputs are assumed flattened. This network will
            first unflatten the inputs and then apply the standard convolutions
            and so on.
        conv_filters: a list of numbers of convolution kernel
        conv_filter_sizes: a list of sizes (int) of the convolution kernels
        conv_strides: a list of strides (int) of the conv kernels
        conv_pads: a list of pad formats (either 'SAME' or 'VALID')
        hidden_nonlinearity: a nonlinearity from tf.nn, shared by all conv and
         fc layers
        hidden_sizes: a list of numbers of hidden units for all fc layers
        """
        with tf.variable_scope(name, 'ConvNetwork'):
            if input_layer is not None:
                l_in = input_layer
                l_hid = l_in
            elif len(input_shape) == 3:
                l_in = ly.InputLayer(shape=(None, np.prod(input_shape)),
                                     input_var=input_var,
                                     name='input')
                l_hid = ly.reshape(l_in, ([0], ) + input_shape,
                                   name='reshape_input')
            elif len(input_shape) == 2:
                l_in = ly.InputLayer(shape=(None, np.prod(input_shape)),
                                     input_var=input_var,
                                     name='input')
                input_shape = (1, ) + input_shape
                l_hid = ly.reshape(l_in, ([0], ) + input_shape,
                                   name='reshape_input')
            else:
                l_in = ly.InputLayer(shape=(None, ) + input_shape,
                                     input_var=input_var,
                                     name='input')
                l_hid = l_in

            if batch_normalization:
                l_hid = ly.batch_norm(l_hid)
            for idx, conv_filter, filter_size, stride, pad in zip(
                    range(len(conv_filters)),
                    conv_filters,
                    conv_filter_sizes,
                    conv_strides,
                    conv_pads,
            ):
                l_hid = ly.Conv2DLayer(
                    l_hid,
                    num_filters=conv_filter,
                    filter_size=filter_size,
                    stride=(stride, stride),
                    pad=pad,
                    nonlinearity=hidden_nonlinearity,
                    name='conv_hidden_%d' % idx,
                    weight_normalization=weight_normalization,
                )
                if batch_normalization:
                    l_hid = ly.batch_norm(l_hid)

            if output_nonlinearity == ly.spatial_expected_softmax:
                assert not hidden_sizes
                assert output_dim == conv_filters[-1] * 2
                l_hid.nonlinearity = tf.identity
                l_out = ly.SpatialExpectedSoftmaxLayer(l_hid)
            else:
                l_hid = ly.flatten(l_hid, name='conv_flatten')
                for idx, hidden_size in enumerate(hidden_sizes):
                    l_hid = ly.DenseLayer(
                        l_hid,
                        num_units=hidden_size,
                        nonlinearity=hidden_nonlinearity,
                        name='hidden_%d' % idx,
                        w=hidden_w_init,
                        b=hidden_b_init,
                        weight_normalization=weight_normalization,
                    )
                    if batch_normalization:
                        l_hid = ly.batch_norm(l_hid)
                l_out = ly.DenseLayer(
                    l_hid,
                    num_units=output_dim,
                    nonlinearity=output_nonlinearity,
                    name='output',
                    w=output_w_init,
                    b=output_b_init,
                    weight_normalization=weight_normalization,
                )
                if batch_normalization:
                    l_out = ly.batch_norm(l_out)
            self._l_in = l_in
            self._l_out = l_out
            # self._input_var = l_in.input_var

        LayersPowered.__init__(self, l_out)
예제 #4
0
파일: network.py 프로젝트: xht033/garage
    def __init__(self,
                 input_shape,
                 extra_input_shape,
                 output_dim,
                 hidden_sizes,
                 conv_filters,
                 conv_filter_sizes,
                 conv_strides,
                 conv_pads,
                 name=None,
                 extra_hidden_sizes=None,
                 hidden_w_init=ly.XavierUniformInitializer(),
                 hidden_b_init=tf.zeros_initializer(),
                 output_w_init=ly.XavierUniformInitializer(),
                 output_b_init=tf.zeros_initializer(),
                 hidden_nonlinearity=tf.nn.relu,
                 output_nonlinearity=None,
                 input_var=None,
                 input_layer=None):
        Serializable.quick_init(self, locals())

        if extra_hidden_sizes is None:
            extra_hidden_sizes = []

        with tf.variable_scope(name, 'ConvMergeNetwork'):

            input_flat_dim = np.prod(input_shape)
            extra_input_flat_dim = np.prod(extra_input_shape)
            total_input_flat_dim = input_flat_dim + extra_input_flat_dim

            if input_layer is None:
                l_in = ly.InputLayer(shape=(None, total_input_flat_dim),
                                     input_var=input_var,
                                     name='input')
            else:
                l_in = input_layer

            l_conv_in = ly.reshape(ly.SliceLayer(l_in,
                                                 indices=slice(input_flat_dim),
                                                 name='conv_slice'),
                                   ([0], ) + input_shape,
                                   name='conv_reshaped')
            l_extra_in = ly.reshape(ly.SliceLayer(l_in,
                                                  indices=slice(
                                                      input_flat_dim, None),
                                                  name='extra_slice'),
                                    ([0], ) + extra_input_shape,
                                    name='extra_reshaped')

            l_conv_hid = l_conv_in
            for idx, conv_filter, filter_size, stride, pad in zip(
                    range(len(conv_filters)),
                    conv_filters,
                    conv_filter_sizes,
                    conv_strides,
                    conv_pads,
            ):
                l_conv_hid = ly.Conv2DLayer(
                    l_conv_hid,
                    num_filters=conv_filter,
                    filter_size=filter_size,
                    stride=(stride, stride),
                    pad=pad,
                    nonlinearity=hidden_nonlinearity,
                    name='conv_hidden_%d' % idx,
                )

            l_extra_hid = l_extra_in
            for idx, hidden_size in enumerate(extra_hidden_sizes):
                l_extra_hid = ly.DenseLayer(
                    l_extra_hid,
                    num_units=hidden_size,
                    nonlinearity=hidden_nonlinearity,
                    name='extra_hidden_%d' % idx,
                    w=hidden_w_init,
                    b=hidden_b_init,
                )

            l_joint_hid = ly.concat(
                [ly.flatten(l_conv_hid, name='conv_hidden_flat'), l_extra_hid],
                name='joint_hidden')

            for idx, hidden_size in enumerate(hidden_sizes):
                l_joint_hid = ly.DenseLayer(
                    l_joint_hid,
                    num_units=hidden_size,
                    nonlinearity=hidden_nonlinearity,
                    name='joint_hidden_%d' % idx,
                    w=hidden_w_init,
                    b=hidden_b_init,
                )
            l_out = ly.DenseLayer(
                l_joint_hid,
                num_units=output_dim,
                nonlinearity=output_nonlinearity,
                name='output',
                w=output_w_init,
                b=output_b_init,
            )
            self._l_in = l_in
            self._l_out = l_out

            LayersPowered.__init__(self, [l_out], input_layers=[l_in])
예제 #5
0
파일: network.py 프로젝트: xht033/garage
    def __init__(self,
                 input_shape,
                 output_dim,
                 hidden_dim,
                 name=None,
                 hidden_nonlinearity=tf.nn.relu,
                 output_w_init=ly.XavierUniformInitializer(),
                 recurrent_nonlinearity=tf.nn.sigmoid,
                 recurrent_w_x_init=ly.XavierUniformInitializer(),
                 recurrent_w_h_init=ly.OrthogonalInitializer(),
                 lstm_layer_cls=ly.LSTMLayer,
                 output_nonlinearity=None,
                 input_var=None,
                 input_layer=None,
                 forget_bias=1.0,
                 use_peepholes=False,
                 layer_args=None):
        with tf.variable_scope(name, 'LSTMNetwork'):
            if input_layer is None:
                l_in = ly.InputLayer(shape=(None, None) + input_shape,
                                     input_var=input_var,
                                     name='input')
            else:
                l_in = input_layer
            l_step_input = ly.InputLayer(shape=(None, ) + input_shape,
                                         name='step_input')
            # contains previous hidden and cell state
            l_step_prev_state = ly.InputLayer(shape=(None, hidden_dim * 2),
                                              name='step_prev_state')
            if layer_args is None:
                layer_args = dict()
            l_lstm = lstm_layer_cls(l_in,
                                    num_units=hidden_dim,
                                    hidden_nonlinearity=hidden_nonlinearity,
                                    gate_nonlinearity=recurrent_nonlinearity,
                                    hidden_init_trainable=False,
                                    name='lstm_layer',
                                    forget_bias=forget_bias,
                                    cell_init_trainable=False,
                                    w_x_init=recurrent_w_x_init,
                                    w_h_init=recurrent_w_h_init,
                                    use_peepholes=use_peepholes,
                                    **layer_args)
            l_lstm_flat = ly.ReshapeLayer(l_lstm,
                                          shape=(-1, hidden_dim),
                                          name='lstm_flat')
            l_output_flat = ly.DenseLayer(l_lstm_flat,
                                          num_units=output_dim,
                                          nonlinearity=output_nonlinearity,
                                          w=output_w_init,
                                          name='output_flat')
            l_output = ly.OpLayer(
                l_output_flat,
                op=lambda flat_output, l_input: tf.reshape(
                    flat_output,
                    tf.stack(
                        (tf.shape(l_input)[0], tf.shape(l_input)[1], -1))),
                shape_op=lambda flat_output_shape, l_input_shape:
                (l_input_shape[0], l_input_shape[1], flat_output_shape[-1]),
                extras=[l_in],
                name='output')
            l_step_state = l_lstm.get_step_layer(l_step_input,
                                                 l_step_prev_state,
                                                 name='step_state')
            l_step_hidden = ly.SliceLayer(l_step_state,
                                          indices=slice(hidden_dim),
                                          name='step_hidden')
            l_step_cell = ly.SliceLayer(l_step_state,
                                        indices=slice(hidden_dim, None),
                                        name='step_cell')
            l_step_output = ly.DenseLayer(l_step_hidden,
                                          num_units=output_dim,
                                          nonlinearity=output_nonlinearity,
                                          w=l_output_flat.w,
                                          b=l_output_flat.b,
                                          name='step_output')

            self._l_in = l_in
            self._hid_init_param = l_lstm.h0
            self._cell_init_param = l_lstm.c0
            self._l_lstm = l_lstm
            self._l_out = l_output
            self._l_step_input = l_step_input
            self._l_step_prev_state = l_step_prev_state
            self._l_step_hidden = l_step_hidden
            self._l_step_cell = l_step_cell
            self._l_step_state = l_step_state
            self._l_step_output = l_step_output
            self._hidden_dim = hidden_dim
예제 #6
0
파일: network.py 프로젝트: xht033/garage
    def __init__(self,
                 input_shape,
                 output_dim,
                 hidden_dim,
                 name=None,
                 hidden_nonlinearity=tf.nn.relu,
                 output_w_init=ly.XavierUniformInitializer(),
                 recurrent_nonlinearity=tf.nn.sigmoid,
                 recurrent_w_x_init=ly.XavierUniformInitializer(),
                 recurrent_w_h_init=ly.OrthogonalInitializer(),
                 gru_layer_cls=ly.GRULayer,
                 output_nonlinearity=None,
                 input_var=None,
                 input_layer=None,
                 layer_args=None):
        with tf.variable_scope(name, 'GRUNetwork'):
            if input_layer is None:
                l_in = ly.InputLayer(shape=(None, None) + input_shape,
                                     input_var=input_var,
                                     name='input')
            else:
                l_in = input_layer
            l_step_input = ly.InputLayer(shape=(None, ) + input_shape,
                                         name='step_input')
            l_step_prev_state = ly.InputLayer(shape=(None, hidden_dim),
                                              name='step_prev_state')
            if layer_args is None:
                layer_args = dict()
            l_gru = gru_layer_cls(l_in,
                                  num_units=hidden_dim,
                                  hidden_nonlinearity=hidden_nonlinearity,
                                  gate_nonlinearity=recurrent_nonlinearity,
                                  hidden_init_trainable=False,
                                  w_x_init=recurrent_w_x_init,
                                  w_h_init=recurrent_w_h_init,
                                  name='gru',
                                  **layer_args)
            l_gru_flat = ly.ReshapeLayer(l_gru,
                                         shape=(-1, hidden_dim),
                                         name='gru_flat')
            l_output_flat = ly.DenseLayer(l_gru_flat,
                                          num_units=output_dim,
                                          nonlinearity=output_nonlinearity,
                                          w=output_w_init,
                                          name='output_flat')
            l_output = ly.OpLayer(
                l_output_flat,
                op=lambda flat_output, l_input: tf.reshape(
                    flat_output,
                    tf.stack(
                        (tf.shape(l_input)[0], tf.shape(l_input)[1], -1))),
                shape_op=lambda flat_output_shape, l_input_shape:
                (l_input_shape[0], l_input_shape[1], flat_output_shape[-1]),
                extras=[l_in],
                name='output')
            l_step_state = l_gru.get_step_layer(l_step_input,
                                                l_step_prev_state,
                                                name='step_state')
            l_step_hidden = l_step_state
            l_step_output = ly.DenseLayer(l_step_hidden,
                                          num_units=output_dim,
                                          nonlinearity=output_nonlinearity,
                                          w=l_output_flat.w,
                                          b=l_output_flat.b,
                                          name='step_output')

            self._l_in = l_in
            self._hid_init_param = l_gru.h0
            self._l_gru = l_gru
            self._l_out = l_output
            self._l_step_input = l_step_input
            self._l_step_prev_state = l_step_prev_state
            self._l_step_hidden = l_step_hidden
            self._l_step_state = l_step_state
            self._l_step_output = l_step_output
            self._hidden_dim = hidden_dim
예제 #7
0
    def __init__(self,
                 env_spec,
                 name='CategoricalGRUPolicy',
                 hidden_dim=32,
                 hidden_nonlinearity=tf.nn.tanh,
                 recurrent_nonlinearity=tf.nn.sigmoid,
                 recurrent_w_x_init=L.XavierUniformInitializer(),
                 recurrent_w_h_init=L.OrthogonalInitializer(),
                 output_nonlinearity=tf.nn.softmax,
                 output_w_init=L.XavierUniformInitializer(),
                 feature_network=None,
                 state_include_action=True,
                 gru_layer_cls=L.GRULayer):
        """
        :param env_spec: A spec for the env.
        :param hidden_dim: dimension of hidden layer
        :param hidden_nonlinearity: nonlinearity used for each hidden layer
        :return:
        """
        assert isinstance(env_spec.action_space, akro.Discrete)

        self._prob_network_name = 'prob_network'
        with tf.variable_scope(name, 'CategoricalGRUPolicy'):
            Serializable.quick_init(self, locals())
            super(CategoricalGRUPolicy, self).__init__(env_spec)

            obs_dim = env_spec.observation_space.flat_dim
            action_dim = env_spec.action_space.flat_dim

            if state_include_action:
                input_dim = obs_dim + action_dim
            else:
                input_dim = obs_dim

            l_input = L.InputLayer(shape=(None, None, input_dim), name='input')

            if feature_network is None:
                feature_dim = input_dim
                l_flat_feature = None
                l_feature = l_input
            else:
                feature_dim = feature_network.output_layer.output_shape[-1]
                l_flat_feature = feature_network.output_layer
                l_feature = L.OpLayer(
                    l_flat_feature,
                    extras=[l_input],
                    name='reshape_feature',
                    op=lambda flat_feature, input: tf.reshape(
                        flat_feature,
                        tf.stack([
                            tf.shape(input)[0],
                            tf.shape(input)[1], feature_dim
                        ])),
                    shape_op=lambda _, input_shape: (input_shape[
                        0], input_shape[1], feature_dim))

            prob_network = GRUNetwork(
                input_shape=(feature_dim, ),
                input_layer=l_feature,
                output_dim=env_spec.action_space.n,
                hidden_dim=hidden_dim,
                hidden_nonlinearity=hidden_nonlinearity,
                recurrent_nonlinearity=recurrent_nonlinearity,
                recurrent_w_x_init=recurrent_w_x_init,
                recurrent_w_h_init=recurrent_w_h_init,
                output_nonlinearity=output_nonlinearity,
                output_w_init=output_w_init,
                gru_layer_cls=gru_layer_cls,
                name=self._prob_network_name)

            self.prob_network = prob_network
            self.feature_network = feature_network
            self.l_input = l_input
            self.state_include_action = state_include_action

            flat_input_var = tf.placeholder(
                dtype=tf.float32, shape=(None, input_dim), name='flat_input')
            if feature_network is None:
                feature_var = flat_input_var
            else:
                with tf.name_scope('feature_network', values=[flat_input_var]):
                    feature_var = L.get_output(
                        l_flat_feature,
                        {feature_network.input_layer: flat_input_var})

            with tf.name_scope(self._prob_network_name, values=[feature_var]):
                out_prob_step, out_prob_hidden = L.get_output(
                    [
                        prob_network.step_output_layer,
                        prob_network.step_hidden_layer
                    ], {prob_network.step_input_layer: feature_var})
                out_prob_step = tf.identity(out_prob_step, 'prob_step_output')
                out_prob_hidden = tf.identity(out_prob_hidden,
                                              'prob_step_hidden')

            self.f_step_prob = tensor_utils.compile_function(
                [flat_input_var, prob_network.step_prev_state_layer.input_var],
                [out_prob_step, out_prob_hidden])

            self.input_dim = input_dim
            self.action_dim = action_dim
            self.hidden_dim = hidden_dim
            self.name = name

            self.prev_actions = None
            self.prev_hiddens = None
            self.dist = RecurrentCategorical(env_spec.action_space.n)

            out_layers = [prob_network.output_layer]
            if feature_network is not None:
                out_layers.append(feature_network.output_layer)

            LayersPowered.__init__(self, out_layers)