def __init__(self, ModelCls=None, model_kwargs=None, initial_model_state_dict=None): """ Arguments are saved but no model initialization occurs. Args: ModelCls: The model class to be used. model_kwargs (optional): Any keyword arguments to pass when instantiating the model. initial_model_state_dict (optional): Initial model parameter values. """ save__init__args(locals()) self.model = None # type: torch.nn.Module self.shared_model = None self.distribution = None self.device = torch.device("cpu") self._mode = None if self.model_kwargs is None: self.model_kwargs = dict() # The rest only for async operations: self._rw_lock = RWLock() self._send_count = mp.RawValue("l", 0) self._recv_count = 0
def __init__(self, ModelCls=None, model_kwargs=None, initial_model_state_dict=None): save__init__args(locals()) self.model = None # type: torch.nn.Module self.shared_model = None self.distribution = None self.device = torch.device("cpu") # 指定在CPU上运行 self._mode = None if self.model_kwargs is None: self.model_kwargs = dict() # The rest only for async operations: self._rw_lock = RWLock() # 读写锁 self._send_count = mp.RawValue("l", 0) # RawValue(typecode_or_type, *args) 返回从共享内存中分配的ctypes对象,l代表ctypes.c_long self._recv_count = 0
def __init__(self, ModelCls=None, model_kwargs=None, initial_model_state_dict=None): save__init__args(locals()) self.model = None # type: torch.nn.Module self.shared_model = None self.distribution = None self.device = torch.device("cpu") self._mode = None if self.model_kwargs is None: self.model_kwargs = dict() # The rest only for async operations: self._rw_lock = RWLock() self._send_count = mp.RawValue("l", 0) self._recv_count = 0
def __init__(self, ModelCls=None, model_kwargs=None, initial_model_state_dict=None): """ Parameters ---------- param ModelCls: the constructor to build self.model which serves as policy network. """ save__init__args(locals()) self.model = None # type: torch.nn.Module self.shared_model = None self.distribution = None self.device = torch.device("cpu") self._mode = None if self.model_kwargs is None: self.model_kwargs = dict() # The rest only for async operations: self._rw_lock = RWLock() self._send_count = mp.RawValue("l", 0) self._recv_count = 0
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.async_t = mp.RawValue("l") # Type c_long. self.rw_lock = RWLock() self._async_buffer_full = mp.RawValue(ctypes.c_bool, False)
def __init__(self, *args, **kwargs): kwargs.pop("shared_memory") super().__init__(*args, shared_memory=True, **kwargs) self.async_t = mp.RawValue("l") # Type c_long. self.rw_lock = RWLock()