import functools from pyro.params.param_store import _MODULE_NAMESPACE_DIVIDER, ParamStoreDict # noqa: F401 # the global pyro stack _PYRO_STACK = [] # the global ParamStore _PYRO_PARAM_STORE = ParamStoreDict() class _DimAllocator(object): """ Dimension allocator for internal use by :class:`plate`. There is a single global instance. Note that dimensions are indexed from the right, e.g. -1, -2. """ def __init__(self): self._stack = [] # in reverse orientation of log_prob.shape def allocate(self, name, dim): """ Allocate a dimension to an :class:`plate` with given name. Dim should be either None for automatic allocation or a negative integer for manual allocation. """ if name in self._stack: raise ValueError('duplicate plate "{}"'.format(name)) if dim is None: # Automatically designate the rightmost available dim for allocation.
from pyro.params.param_store import ParamStoreDict from torch.autograd import Variable from pyro.optim.optim import PyroOptim from inspect import isclass import pyro from torch.nn import Parameter import torch # global map of params for now _param_store = ParamStoreDict() # set pyro.param function to _param_store.get_param param = _param_store.get_param # set global tensor type (cpu v.gpu); cpu by default _global_tensor_type = 'cpu' def set_cuda(): global _global_tensor_type _global_tensor_type = 'cuda' torch.set_default_tensor_type('torch.cuda.FloatTensor') def set_cpu(): global _global_tensor_type _global_tensor_type = 'cpu' torch.set_default_tensor_type('torch.FloatTensor') def device(x):