コード例 #1
0
 def get_variables(cls, all_units, vf_share_layers=False, name=None):
     name = name or "keras_mlp_policy"
     if "policy" in name:
         last_kernel_init_value = 0.01
     elif "vf" in name:
         last_kernel_init_value = 1.0
     else:
         raise NotImplementedError
     variables = {}
     with tf.name_scope(name):
         for i, (size_in, size_out) in \
                 enumerate(zip(all_units, all_units[1:])):
             name = f"dense_{i}"
             variables[name] = \
                 Dense.get_variables(
                     size_in,
                     size_out,
                     name=name,
                     kernel_initializer=(
                         normc_initializer(1.0) if i < len(all_units) - 2
                         else normc_initializer(last_kernel_init_value)))
         if vf_share_layers:
             name = f"dense_vf"
             variables[name] = \
                 Dense.get_variables(
                     size_in,
                     1,
                     name=name,
                     kernel_initializer=normc_initializer(1.0))
     # tricky to remove the name count of the dummy instance
     # since the default name is used here
     # graph = tf.get_default_graph()
     # K.PER_GRAPH_LAYER_NAME_UIDS[graph][
     #     ("", dummy_instance.name)] -= 1
     return variables
コード例 #2
0
    def get_variables(
            cls,
            latent_units,
            projection_kernel_init_value,
            # mlp_input_units,
            # mlp_units,
            # vf_share_layers=False
        name=None):
        name = name or "keras_maesn_policy"
        variables = {}
        with tf.name_scope(name):
            variables["latent_z"] = tf.Variable(
                initial_value=normc_initializer(projection_kernel_init_value)(
                    shape=(latent_units, )),
                trainable=True,
                dtype=tf.float32,
                name="latent_z")
            # variables["cell"] = GRUCell.get_variables(
            #     rnn_input_units, rnn_units)
            # variables["projection"] = Dense.get_variables(
            #     rnn_units,
            #     rnn_output_units,
            #     name="dense_projection",
            #     kernel_initializer=normc_initializer(projection_kernel_init_value))
            # variables["mlp"] = KerasMLP.get_dummy_variables(
            #     [mlp_input_units + rnn_output_units] + mlp_units,
            #     vf_share_layers=vf_share_layers, **mlp_kwargs)

        # graph = tf.get_default_graph()
        # K.PER_GRAPH_LAYER_NAME_UIDS[graph][
        #     ("", dummy_instance.name)] -= 1
        return variables
コード例 #3
0
ファイル: keras_tesp.py プロジェクト: zstbackcourt/tesp
    def get_variables(cls,
                      rnn_input_units,
                      rnn_units,
                      rnn_output_units,
                      projection_kernel_init_value,
                      # mlp_input_units,
                      # mlp_units,
                      # vf_share_layers=False
                      name=None):
        name = name or "keras_tesp_policy"
        variables = {}
        with tf.name_scope(name):
            variables["cell"] = GRUCell.get_variables(
                rnn_input_units, rnn_units)
            variables["projection"] = Dense.get_variables(
                rnn_units,
                rnn_output_units,
                name="dense_projection",
                kernel_initializer=normc_initializer(projection_kernel_init_value))
            # variables["mlp"] = KerasMLP.get_dummy_variables(
            #     [mlp_input_units + rnn_output_units] + mlp_units,
            #     vf_share_layers=vf_share_layers, **mlp_kwargs)

        # graph = tf.get_default_graph()
        # K.PER_GRAPH_LAYER_NAME_UIDS[graph][
        #     ("", dummy_instance.name)] -= 1
        return variables
コード例 #4
0
    def __init__(self,
                 layer_units_exclude_first,
                 activation,
                 custom_params=None,
                 vf_share_layers=False,
                 name=None):
        """
            layer_units: list, a list of the number of units of all layers
                except the input layer
        """
        name = name or "keras_mlp_policy"
        if "policy" in name:
            last_kernel_init_value = 0.01
        elif "vf" in name:
            last_kernel_init_value = 0.01
        else:
            raise NotImplementedError
        keras_models.Model.__init__(self, name=name)

        custom_params = custom_params or {}
        for i, size in enumerate(layer_units_exclude_first):
            name = f"dense_{i}"
            layer = Dense(
                size,
                custom_params=custom_params.get(name),
                activation=(activation if
                            i < len(layer_units_exclude_first) - 1 else None),
                kernel_initializer=(normc_initializer(1.0) if
                                    i < len(layer_units_exclude_first) - 1 else
                                    normc_initializer(last_kernel_init_value)),
                name=name)
            setattr(self, name, layer)
        if vf_share_layers:
            name = f"dense_vf"
            layer = Dense(1,
                          custom_params=custom_params.get(name),
                          activation=None,
                          kernel_initializer=normc_initializer(1.0),
                          name=name)
            setattr(self, name, layer)
        self._vf_share_layers = vf_share_layers