def __init__( self, env_spec, name='GaussianGRUPolicy', hidden_dim=32, hidden_nonlinearity=tf.tanh, recurrent_nonlinearity=tf.nn.sigmoid, recurrent_w_x_init=L.XavierUniformInitializer(), recurrent_w_h_init=L.OrthogonalInitializer(), output_nonlinearity=None, output_w_init=L.XavierUniformInitializer(), feature_network=None, state_include_action=True, gru_layer_cls=L.GRULayer, learn_std=True, init_std=1.0, std_share_network=False, ): """ :param env_spec: A spec for the env. :param hidden_dim: dimension of hidden layer :param hidden_nonlinearity: nonlinearity used for each hidden layer :return: """ assert isinstance(env_spec.action_space, Box) self._mean_network_name = 'mean_network' self._std_network_name = 'std_network' with tf.variable_scope(name, 'GaussianGRUPolicy'): Serializable.quick_init(self, locals()) super(GaussianGRUPolicy, self).__init__(env_spec) obs_dim = env_spec.observation_space.flat_dim action_dim = env_spec.action_space.flat_dim if state_include_action: input_dim = obs_dim + action_dim else: input_dim = obs_dim l_input = L.InputLayer(shape=(None, None, input_dim), name='input') if feature_network is None: feature_dim = input_dim l_flat_feature = None l_feature = l_input else: feature_dim = feature_network.output_layer.output_shape[-1] l_flat_feature = feature_network.output_layer l_feature = L.OpLayer( l_flat_feature, extras=[l_input], name='reshape_feature', op=lambda flat_feature, input: tf.reshape( flat_feature, tf.stack([ tf.shape(input)[0], tf.shape(input)[1], feature_dim ])), shape_op=lambda _, input_shape: (input_shape[0], input_shape[1], feature_dim)) if std_share_network: mean_network = GRUNetwork( input_shape=(feature_dim, ), input_layer=l_feature, output_dim=2 * action_dim, hidden_dim=hidden_dim, hidden_nonlinearity=hidden_nonlinearity, recurrent_nonlinearity=recurrent_nonlinearity, recurrent_w_x_init=recurrent_w_x_init, recurrent_w_h_init=recurrent_w_h_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, gru_layer_cls=gru_layer_cls, name='gru_mean_network') l_mean = L.SliceLayer(mean_network.output_layer, slice(action_dim), name='mean_slice') l_step_mean = L.SliceLayer(mean_network.step_output_layer, slice(action_dim), name='step_mean_slice') l_log_std = L.SliceLayer(mean_network.output_layer, slice(action_dim, 2 * action_dim), name='log_std_slice') l_step_log_std = L.SliceLayer(mean_network.step_output_layer, slice(action_dim, 2 * action_dim), name='step_log_std_slice') else: mean_network = GRUNetwork( input_shape=(feature_dim, ), input_layer=l_feature, output_dim=action_dim, hidden_dim=hidden_dim, hidden_nonlinearity=hidden_nonlinearity, recurrent_nonlinearity=recurrent_nonlinearity, recurrent_w_x_init=recurrent_w_x_init, recurrent_w_h_init=recurrent_w_h_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, gru_layer_cls=gru_layer_cls, name='gru_mean_network') l_mean = mean_network.output_layer l_step_mean = mean_network.step_output_layer l_log_std = L.ParamLayer( mean_network.input_layer, num_units=action_dim, param=tf.constant_initializer(np.log(init_std)), name='output_log_std', trainable=learn_std, ) l_step_log_std = L.ParamLayer( mean_network.step_input_layer, num_units=action_dim, param=l_log_std.param, name='step_output_log_std', trainable=learn_std, ) self.mean_network = mean_network self.feature_network = feature_network self.l_input = l_input self.state_include_action = state_include_action flat_input_var = tf.placeholder(dtype=tf.float32, shape=(None, input_dim), name='flat_input') if feature_network is None: feature_var = flat_input_var else: feature_var = L.get_output( l_flat_feature, {feature_network.input_layer: flat_input_var}) with tf.name_scope(self._mean_network_name): out_step_mean, out_step_hidden_mean = L.get_output( [l_step_mean, mean_network.step_hidden_layer], {mean_network.step_input_layer: feature_var}) out_step_mean = tf.identity(out_step_mean, 'step_mean') out_step_hidden_mean = tf.identity(out_step_hidden_mean, 'step_hidden_mean') with tf.name_scope(self._std_network_name): out_step_log_std = L.get_output( l_step_log_std, {mean_network.step_input_layer: feature_var}) out_step_log_std = tf.identity(out_step_log_std, 'step_log_std') self.f_step_mean_std = tensor_utils.compile_function([ flat_input_var, mean_network.step_prev_state_layer.input_var, ], [out_step_mean, out_step_log_std, out_step_hidden_mean]) self.l_mean = l_mean self.l_log_std = l_log_std self.input_dim = input_dim self.action_dim = action_dim self.hidden_dim = hidden_dim self.prev_actions = None self.prev_hiddens = None self.dist = RecurrentDiagonalGaussian(action_dim) self.name = name out_layers = [l_mean, l_log_std, l_step_log_std] if feature_network is not None: out_layers.append(feature_network.output_layer) LayersPowered.__init__(self, out_layers)
def __init__(self, input_shape, output_dim, hidden_dim, name=None, hidden_nonlinearity=tf.nn.relu, output_w_init=ly.XavierUniformInitializer(), recurrent_nonlinearity=tf.nn.sigmoid, recurrent_w_x_init=ly.XavierUniformInitializer(), recurrent_w_h_init=ly.OrthogonalInitializer(), gru_layer_cls=ly.GRULayer, output_nonlinearity=None, input_var=None, input_layer=None, layer_args=None): with tf.variable_scope(name, 'GRUNetwork'): if input_layer is None: l_in = ly.InputLayer(shape=(None, None) + input_shape, input_var=input_var, name='input') else: l_in = input_layer l_step_input = ly.InputLayer(shape=(None, ) + input_shape, name='step_input') l_step_prev_state = ly.InputLayer(shape=(None, hidden_dim), name='step_prev_state') if layer_args is None: layer_args = dict() l_gru = gru_layer_cls(l_in, num_units=hidden_dim, hidden_nonlinearity=hidden_nonlinearity, gate_nonlinearity=recurrent_nonlinearity, hidden_init_trainable=False, w_x_init=recurrent_w_x_init, w_h_init=recurrent_w_h_init, name='gru', **layer_args) l_gru_flat = ly.ReshapeLayer(l_gru, shape=(-1, hidden_dim), name='gru_flat') l_output_flat = ly.DenseLayer(l_gru_flat, num_units=output_dim, nonlinearity=output_nonlinearity, w=output_w_init, name='output_flat') l_output = ly.OpLayer( l_output_flat, op=lambda flat_output, l_input: tf.reshape( flat_output, tf.stack( (tf.shape(l_input)[0], tf.shape(l_input)[1], -1))), shape_op=lambda flat_output_shape, l_input_shape: (l_input_shape[0], l_input_shape[1], flat_output_shape[-1]), extras=[l_in], name='output') l_step_state = l_gru.get_step_layer(l_step_input, l_step_prev_state, name='step_state') l_step_hidden = l_step_state l_step_output = ly.DenseLayer(l_step_hidden, num_units=output_dim, nonlinearity=output_nonlinearity, w=l_output_flat.w, b=l_output_flat.b, name='step_output') self._l_in = l_in self._hid_init_param = l_gru.h0 self._l_gru = l_gru self._l_out = l_output self._l_step_input = l_step_input self._l_step_prev_state = l_step_prev_state self._l_step_hidden = l_step_hidden self._l_step_state = l_step_state self._l_step_output = l_step_output self._hidden_dim = hidden_dim
def __init__(self, input_shape, output_dim, hidden_dim, name=None, hidden_nonlinearity=tf.nn.relu, output_w_init=ly.XavierUniformInitializer(), recurrent_nonlinearity=tf.nn.sigmoid, recurrent_w_x_init=ly.XavierUniformInitializer(), recurrent_w_h_init=ly.OrthogonalInitializer(), lstm_layer_cls=ly.LSTMLayer, output_nonlinearity=None, input_var=None, input_layer=None, forget_bias=1.0, use_peepholes=False, layer_args=None): with tf.variable_scope(name, 'LSTMNetwork'): if input_layer is None: l_in = ly.InputLayer(shape=(None, None) + input_shape, input_var=input_var, name='input') else: l_in = input_layer l_step_input = ly.InputLayer(shape=(None, ) + input_shape, name='step_input') # contains previous hidden and cell state l_step_prev_state = ly.InputLayer(shape=(None, hidden_dim * 2), name='step_prev_state') if layer_args is None: layer_args = dict() l_lstm = lstm_layer_cls(l_in, num_units=hidden_dim, hidden_nonlinearity=hidden_nonlinearity, gate_nonlinearity=recurrent_nonlinearity, hidden_init_trainable=False, name='lstm_layer', forget_bias=forget_bias, cell_init_trainable=False, w_x_init=recurrent_w_x_init, w_h_init=recurrent_w_h_init, use_peepholes=use_peepholes, **layer_args) l_lstm_flat = ly.ReshapeLayer(l_lstm, shape=(-1, hidden_dim), name='lstm_flat') l_output_flat = ly.DenseLayer(l_lstm_flat, num_units=output_dim, nonlinearity=output_nonlinearity, w=output_w_init, name='output_flat') l_output = ly.OpLayer( l_output_flat, op=lambda flat_output, l_input: tf.reshape( flat_output, tf.stack( (tf.shape(l_input)[0], tf.shape(l_input)[1], -1))), shape_op=lambda flat_output_shape, l_input_shape: (l_input_shape[0], l_input_shape[1], flat_output_shape[-1]), extras=[l_in], name='output') l_step_state = l_lstm.get_step_layer(l_step_input, l_step_prev_state, name='step_state') l_step_hidden = ly.SliceLayer(l_step_state, indices=slice(hidden_dim), name='step_hidden') l_step_cell = ly.SliceLayer(l_step_state, indices=slice(hidden_dim, None), name='step_cell') l_step_output = ly.DenseLayer(l_step_hidden, num_units=output_dim, nonlinearity=output_nonlinearity, w=l_output_flat.w, b=l_output_flat.b, name='step_output') self._l_in = l_in self._hid_init_param = l_lstm.h0 self._cell_init_param = l_lstm.c0 self._l_lstm = l_lstm self._l_out = l_output self._l_step_input = l_step_input self._l_step_prev_state = l_step_prev_state self._l_step_hidden = l_step_hidden self._l_step_cell = l_step_cell self._l_step_state = l_step_state self._l_step_output = l_step_output self._hidden_dim = hidden_dim
def __init__(self, env_spec, name='CategoricalGRUPolicy', hidden_dim=32, hidden_nonlinearity=tf.nn.tanh, recurrent_nonlinearity=tf.nn.sigmoid, recurrent_w_x_init=L.XavierUniformInitializer(), recurrent_w_h_init=L.OrthogonalInitializer(), output_nonlinearity=tf.nn.softmax, output_w_init=L.XavierUniformInitializer(), feature_network=None, state_include_action=True, gru_layer_cls=L.GRULayer): """ :param env_spec: A spec for the env. :param hidden_dim: dimension of hidden layer :param hidden_nonlinearity: nonlinearity used for each hidden layer :return: """ assert isinstance(env_spec.action_space, akro.Discrete) self._prob_network_name = 'prob_network' with tf.variable_scope(name, 'CategoricalGRUPolicy'): Serializable.quick_init(self, locals()) super(CategoricalGRUPolicy, self).__init__(env_spec) obs_dim = env_spec.observation_space.flat_dim action_dim = env_spec.action_space.flat_dim if state_include_action: input_dim = obs_dim + action_dim else: input_dim = obs_dim l_input = L.InputLayer(shape=(None, None, input_dim), name='input') if feature_network is None: feature_dim = input_dim l_flat_feature = None l_feature = l_input else: feature_dim = feature_network.output_layer.output_shape[-1] l_flat_feature = feature_network.output_layer l_feature = L.OpLayer( l_flat_feature, extras=[l_input], name='reshape_feature', op=lambda flat_feature, input: tf.reshape( flat_feature, tf.stack([ tf.shape(input)[0], tf.shape(input)[1], feature_dim ])), shape_op=lambda _, input_shape: (input_shape[ 0], input_shape[1], feature_dim)) prob_network = GRUNetwork( input_shape=(feature_dim, ), input_layer=l_feature, output_dim=env_spec.action_space.n, hidden_dim=hidden_dim, hidden_nonlinearity=hidden_nonlinearity, recurrent_nonlinearity=recurrent_nonlinearity, recurrent_w_x_init=recurrent_w_x_init, recurrent_w_h_init=recurrent_w_h_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, gru_layer_cls=gru_layer_cls, name=self._prob_network_name) self.prob_network = prob_network self.feature_network = feature_network self.l_input = l_input self.state_include_action = state_include_action flat_input_var = tf.placeholder( dtype=tf.float32, shape=(None, input_dim), name='flat_input') if feature_network is None: feature_var = flat_input_var else: with tf.name_scope('feature_network', values=[flat_input_var]): feature_var = L.get_output( l_flat_feature, {feature_network.input_layer: flat_input_var}) with tf.name_scope(self._prob_network_name, values=[feature_var]): out_prob_step, out_prob_hidden = L.get_output( [ prob_network.step_output_layer, prob_network.step_hidden_layer ], {prob_network.step_input_layer: feature_var}) out_prob_step = tf.identity(out_prob_step, 'prob_step_output') out_prob_hidden = tf.identity(out_prob_hidden, 'prob_step_hidden') self.f_step_prob = tensor_utils.compile_function( [flat_input_var, prob_network.step_prev_state_layer.input_var], [out_prob_step, out_prob_hidden]) self.input_dim = input_dim self.action_dim = action_dim self.hidden_dim = hidden_dim self.name = name self.prev_actions = None self.prev_hiddens = None self.dist = RecurrentCategorical(env_spec.action_space.n) out_layers = [prob_network.output_layer] if feature_network is not None: out_layers.append(feature_network.output_layer) LayersPowered.__init__(self, out_layers)