Esempio n. 1
0
 def __init__(self, mu, C):
     if C.shape[0] != C.shape[1]:
         raise ParameterError("C has non-square shape {}".format(C.shape))
     if C.shape[1] != mu.shape[0]:
         raise ParameterError(
             "mu and C have incompatible shapes {}, {}".format(
                 mu.shape, C.shape))
     self.Ndim = N
     self.Npars = N + N * (N + 1) / 2
     self.mu = np.zeros(size=(N, ))
     self.C = np.eye(size=(N, N))
     self.L = np.sqrt(self.C)
     self.theta_cached = None
Esempio n. 2
0
    def __init__(self, model, proposal, theta0, betas=None, Pswap=0.1):
        """
        :param model: Model class instance to sample
        :param proposal: Proposal class instance to use within chains
        :param theta0: initial parameter guess
        :param betas: optional np.array of shape (Ntemps,)
        :param Pswap: probability per unit time of proposing a swap
        """

        # Initialize betas
        # If the user doesn't provide a temperature ladder, initialize a
        # default ladder.
        if betas is None:
            self.betas = 0.5**np.arange(5)
        elif isinstance(betas, np.array) and len(betas.shape) == 1:
            if betas.shape[0] > 20:
                print ("PTSampler: warning -- more than 20 temperatures"
                       " -- watch out, this could be *really* slow")
            sorted_betas = np.array(sorted(betas))[::-1]
            print ("Initializing temperature ladder with betas =", betas)
            self.betas = betas

        # Other sanity checks
        if not (Pswap > 0 and Pswap < 1):
            raise ParameterError("Pswap must be a number between 0 and 1")
        else:
            self.Pswap = Pswap

        # Set up a ladder of Samplers with different Models
        print("PTSampler: betas =", self.betas)
        self.samplers = [ ]
        for beta in self.betas:
            submodel = TemperedModel(model, beta)
            self.samplers.append(Sampler(submodel, proposal, theta0))
Esempio n. 3
0
 def unpack(self, theta):
     if theta == self.theta_cached:
         return
     if theta.shape != (2, ):
         raise ParameterError("theta should have shape (2,)")
     self.mu, self.sigma = theta
     self.theta_cached = theta
Esempio n. 4
0
 def __init__(self, mu, C):
     """
     :param mu: mean; np.array of shape (Ndim,)
     :param C: covariance; np.array of shape (Ndim, Ndim)
     """
     # Sanity checks for input
     mu = np.atleast_1d(mu)
     C = np.atleast_2d(C)
     if C.shape[0] != C.shape[1]:
         raise ParameterError("C has non-square shape {}".format(C.shape))
     if C.shape[1] != mu.shape[0]:
         raise ParameterError("mu and C have incompatible shapes {}, {}"
                              .format(mu.shape, C.shape))
     # Cache Cholesky factor and log det for later use
     self.mu = mu
     self.L = np.linalg.cholesky(C)
     self.logdetC = 2*np.sum(np.log(np.diag(self.L)))
     self.Ndim = len(mu)
Esempio n. 5
0
 def __init__(self, base_model, beta):
     """
     :param base_model: Model class instance to wrap
     :param beta: initial beta for this model
     """
     # Sanity checks
     if not (beta >= 0 and beta <= 1):
         raise ParameterError("beta = {} must be a number between 0 and 1".format(beta))
     else:
         self.base_model = base_model
         self.beta = beta
Esempio n. 6
0
 def __init__(self, mu, C, data):
     """
     :param mu: init guess mean; np.array of shape (Ndim,)
     :param C: init guess covariance; np.array of shape (Ndim, Ndim)
     """
     # Sanity checks for input
     mu = np.atleast_1d(mu)
     C = np.atleast_2d(C)
     self.Ndim = N = mu.shape[0]
     if C.shape[0] != C.shape[1]:
         raise ParameterError("C has non-square shape {}".format(C.shape))
     if C.shape[1] != N:
         raise ParameterError("mu and C have incompatible shapes {}, {}"
                              .format(mu.shape, C.shape))
     if data.shape[1] != self.Ndim:
         raise ParameterError("data and mu have incompatible shapes {}, {}"
                              .format(data.shape, self.mu.shape))
     
     self.Npars = N + N*(N+1)/2      # total number of parameters
     self._theta_cached = None       # for lazy evaluation
     self.data = data
Esempio n. 7
0
 def unpack(self, theta):
     # This will work, technically, but autograd won't like it
     if theta == self.theta_cached:
         return
     if theta.shape != self.Ndim + self.Ndim * (self.Ndim + 1) / 2:
         raise ParameterError("theta, mu and C have incompatible shapes")
     self.mu = theta[:self.Ndim]
     k = self.Ndim
     for i, Crow in enumerate(self.C):
         self.C[i, i:] = self.C[i:, i] = theta[k:k + (Ndim - i)]
         k += Ndim - i
     eps = 1e-10 * np.eye(np.median(np.diag(C)))
     self.L = np.linalg.cholesky(C + eps)
     self.theta_cached = theta
Esempio n. 8
0
 def unpack(self, theta):
     # This will work, technically, but autograd probably won't like it
     if np.all(theta == self._theta_cached):
         return
     if len(theta) != self.Ndim + self.Ndim*(self.Ndim+1)/2:
         raise ParameterError("theta, mu and C have incompatible shapes")
     # Represent covariance directly as lower-triangular Cholesky factor,
     # in the hopes of improving numerical stability
     self.mu = theta[:self.Ndim]
     k = self.Ndim
     self.L = np.zeros((k, k))
     for i, Lrow in enumerate(self.L):
         # self.C[i,i:] = self.C[i:,i] = theta[k:k+(self.Ndim-i)]
         self.L[i:,i] = theta[k:k+(self.Ndim-i)]
         k += self.Ndim - i
     self.C = np.dot(self.L, self.L.T)
     self.logdetC = 2*np.sum(np.log(np.diag(self.L)))
     self.logNd2pi = self.Ndim*np.log(2*np.pi)
     self._theta_cached = theta
Esempio n. 9
0
 def load_data(self, data):
     if len(data.shape) != 1:
         raise ParameterError("data needs to be 1-dimensional")
     self.data = data
Esempio n. 10
0
 def propose(self, theta):
     theta = np.atleast_1d(theta)
     if self.L.shape[1] != theta.shape[0]:
         raise ParameterError("theta and L have incompatible shapes")
     xi = np.random.normal(size=theta.shape)
     return theta + np.dot(self.L, xi), 0.0
Esempio n. 11
0
 def load_data(self, data):
     if data.shape[0] != self.Ndim:
         raise ParameterError(
             "data and mu have incompatible shapes {}, {}".format(
                 data.shape, mu.shape))
     self.data = data