Ejemplo n.º 1
0
    def __init__(
        self, spec: EnvSpec, feats: FeatureStack, init_param_kwargs: Optional[dict] = None, use_cuda: bool = False
    ):
        """
        Constructor

        :param spec: specification of environment
        :param feats: list of feature functions
        :param init_param_kwargs: additional keyword arguments for the policy parameter initialization
        :param use_cuda: `True` to move the module to the GPU, `False` (default) to use the CPU
        """
        if not isinstance(feats, FeatureStack):
            raise pyrado.TypeErr(given=feats, expected_type=FeatureStack)

        # Call Policy's constructor
        super().__init__(spec, use_cuda)

        self._feats = feats
        self.num_active_feat = feats.get_num_feat(spec.obs_space.flat_dim)
        self.net = nn.Linear(self.num_active_feat, spec.act_space.flat_dim, bias=False)

        # Call custom initialization function after PyTorch network parameter initialization
        init_param_kwargs = init_param_kwargs if init_param_kwargs is not None else dict()
        self.init_param(None, **init_param_kwargs)
        self.to(self.device)
Ejemplo n.º 2
0
    def __init__(self,
                 spec: EnvSpec,
                 feats: FeatureStack,
                 init_param_kwargs: dict = None,
                 use_cuda: bool = False):
        """
        Constructor

        :param spec: specification of environment
        :param feats: list of feature functions
        :param init_param_kwargs: additional keyword arguments for the policy parameter initialization
        """
        super().__init__(spec, use_cuda)

        if not isinstance(feats, FeatureStack):
            raise pyrado.TypeErr(given=feats, expected_type=FeatureStack)

        # Store inputs
        self._num_act = spec.act_space.flat_dim
        self._num_obs = spec.obs_space.flat_dim

        self._feats = feats
        self.num_active_feat = feats.get_num_feat(self._num_obs)
        self.net = nn.Linear(self.num_active_feat, self._num_act, bias=False)

        # Call custom initialization function after PyTorch network parameter initialization
        init_param_kwargs = init_param_kwargs if init_param_kwargs is not None else dict(
        )
        self.init_param(None, **init_param_kwargs)
        self.to(self.device)