def update_displacements(self):
     n, p = self.data.shape
     B = len(self.D.block)
     if self.proposal == 'prior':
         for i in xrange(n):
             for b in np.random.permutation(range(B)):
                 block = self.D.block[b]
                 A = self.update_block(i, b, 'prior', self.std)
     elif self.proposal == 'rand_walk':
         if np.isscalar(self.proposal_std):
             for i in xrange(n):
                 for b in np.random.permutation(range(B)):
                     block = self.D.block[b]
                     A = self.update_block(i, b, 'rand_walk', self.proposal_std)
         else:
             for i in xrange(n):
                 for b in np.random.permutation(range(B)):
                     block = self.D.block[b]
                     A = self.update_block(i, b, 'rand_walk', self.proposal_std[:, i, b])
     else:
         for i in xrange(n):
             for b in np.random.permutation(range(B)):
                 block = self.D.block[b]
                 A = self.update_block(i, b, 'fixed', self.proposal_std[:, i, b], self.proposal_mean[:, i, b])
     self.N *= 0
     ones = np.ones((p, 1), float)
     for i in xrange(n):
         Ii = self.D.I[i]
         add_lines(ones, self.N.reshape(p, 1), Ii)
     if self.verbose:
         print "mean rejected displacements :", self.R.mean(axis=0)
 def update_mean_effect(self, T=1.0):
     """
     T is a temperature used to compute log posterior density 
     by simulated annealing
     """
     n, p = self.data.shape
     X_sum = np.zeros(p, float)
     if self.std == None:
         X_sum = self.X.sum(axis=0)
     else:
         #self.N *= 0
         #ones = np.ones((p, 1), float)
         for i in xrange(n):
             Ii = self.D.I[i]
             XI = self.X[i].reshape(p, 1)
             add_lines(XI, X_sum.reshape(p, 1), Ii)
             #add_lines(ones, self.N.reshape(p, 1), Ii)
     for j in xrange(len(self.network)):
         L = np.where(self.labels == j)[0]
         m_var = self.m_var[j] * T
         v = self.v[j] * T
         if self.std == None:
             #tot_var = self.v + m_var * n
             tot_var = v + m_var * n
         else:
             #tot_var = self.v + m_var * self.N[L]
             tot_var = v + m_var * self.N[L]
         #cond_mean = (X_sum[L] * m_var + self.v * self.m_mean[j]) / tot_var
         #cond_std = np.sqrt(self.v * m_var / tot_var)
         cond_mean = (X_sum[L] * m_var + v * self.m_mean[j]) / tot_var
         cond_std = np.sqrt(v * m_var / tot_var)
         self.m[L] = cond_mean + np.random.randn(len(L)) * cond_std
 def update_summary_statistics(self, w=1.0, update_spatial=True, mode='saem'):
     n, p = self.data.shape
     if self.std == None:
         m = self.m
     else:
         m = self.m[self.D.I]
         if update_spatial:
             self.s4 = np.square(self.D.U).sum()
             if mode == 'saem':
                 self.S4 += w * (self.s4 - self.S4)
     if self.vardata == None:
         SS = np.square(self.data - m) #/ self.v + np.log(2 * np.pi * self.v)
     else:
         SS = np.square(self.X - m) #/ self.vardata + np.log(2 * np.pi * self.vardata)
     if self.std == None:
         SS_sum = SS.sum(axis=0)
     else:
         SS_sum = np.zeros(p, float)
         for i in xrange(n):
             Ii = self.D.I[i]
             SSi = SS[i].reshape(p, 1)
             add_lines(SSi, SS_sum.reshape(p, 1), Ii)
     for j in xrange(len(self.network)):
         L = np.where(self.labels == j)[0]
         self.s1[j] = SS_sum[L].sum()
         if self.labels_prior != None:
             self.s6[j] = len(L)
         self.s2[j] = np.square(self.m[L]).sum()
         if self.network[j] == 1:
             self.s3[j] = self.m[L].sum()
         if update_spatial and self.std != None:
             self.s5[j] = self.N[L].sum()
             if mode == 'saem':
                 self.S5 += w * (self.s5 - self.S5)
     if mode == 'saem':
         self.S1 += w * (self.s1 - self.S1)
         self.S2 += w * (self.s2 - self.S2)
         self.S3 += w * (self.s3 - self.S3)
         if self.labels_prior != None:
             self.S6 += w * (self.s6 - self.S6)
         size = self.S6
         sum_sq = self.S2
         sum = self.S3
     else:
         size = self.S6
         sum_sq = self.s2
         sum = self.s3
     # Update m_var post scale
     # used to update parameters,
     # and compute conditional posterior
     rate = self.m_mean_rate
     shape = self.m_var_shape
     scale = self.m_var_scale
     J = self.network == 1
     N1 = J.sum()
     if N1 > 0:
         post_rate = rate[J] + size[J]
         self.m_var_post_scale[J] = scale[J] + 0.5 * (sum_sq[J] - np.square(sum[J]) / post_rate)
     if N1 < len(self.network):
         self.m_var_post_scale[J==0] = scale[J==0] + 0.5 * sum_sq[J==0]
 def compute_log_voxel_likelihood(self, v=None, m_mean=None, m_var=None, return_SS=False):
     if v == None:
         v = self.v
     if m_mean == None:
         m_mean = self.m_mean
     if m_var == None:
         m_var = self.m_var
     n, p = self.data.shape
     if self.std == None:
         N = n
         v_labels = v[self.labels]
         Z = self.data - m_mean[self.labels]
     else:
         N = self.N
         I = self.D.I
         v_labels = v[self.labels[I]]
         Z = self.data - m_mean[self.labels[I]]
     if self.vardata == None:
         tot_var = v_labels + np.zeros(self.data.shape, float)
     else:
         tot_var = v_labels + self.vardata
     if self.std == None:
         SS1 = (1 / tot_var).sum(axis=0)
         SS2 = np.log(tot_var).sum(axis=0)
         SS3 = (Z**2 / tot_var).sum(axis=0)
         SS4 = (Z / tot_var).sum(axis=0)
     else:
         SS1 = np.zeros(p, float)
         SS2 = np.zeros(p, float)
         SS3 = np.zeros(p, float)
         SS4 = np.zeros(p, float)
         for i in xrange(n):
             Ii = self.D.I[i]
             add_lines((1 / tot_var[i]).reshape(p, 1), SS1.reshape(p, 1), Ii)
             add_lines(np.log(tot_var[i]).reshape(p, 1), SS2.reshape(p, 1), Ii)
             add_lines((Z[i]**2 / tot_var[i]).reshape(p, 1), SS3.reshape(p, 1), Ii)
             add_lines((Z[i] / tot_var[i]).reshape(p, 1), SS4.reshape(p, 1), Ii)
     LL = - 0.5 * (N * np.log(2 * np.pi) + np.log(1 + m_var[self.labels] * SS1) \
             + SS2 + SS3 - SS4**2 / (1 / m_var[self.labels] + SS1))
     if return_SS:
         return LL, Z, tot_var, SS1, SS2, SS3, SS4
     else:
         return LL
 def update_block_SA(self, i, b, T=1.0, proposal_std=None, verbose=False, reject_override=False, proposal='rand_walk', proposal_mean=None):
     """
     Update displacement block using simulated annealing scheme 
     with random-walk kernel
     """
     if proposal_std==None:
         proposal_std=self.std
     block = self.D.block[b]
     if verbose:
         print 'sampling field', i, 'block', b
     # Propose new displacement
     U, V, L, W, I = self.D.sample(i, b, proposal, proposal_std * T, proposal_mean=proposal_mean)
     Uc = self.D.U[:, i, b].copy()
     #Vc = self.D.V[:, i, block].copy()
     p = self.data.shape[1]
     pL = len(L)
     if pL > 0:
         #Wc = self.D.W[:, i, L].copy()
         Ic = self.D.I[i, L].copy()
         J = np.unique(np.concatenate((I, Ic)))
         q = len(J)
         IJ = np.searchsorted(J, I)
         IJc = np.searchsorted(J, Ic)
         N = self.N[J].copy()
         Zc = self.Z[i,L].copy()
         tot_varc = self.tot_var[i,L].copy()
         SS1 = self.SS1[J].copy()
         SS2 = self.SS2[J].copy()
         SS3 = self.SS3[J].copy()
         SS4 = self.SS4[J].copy()
         # log acceptance rate
         #self.D.U[:, i, b] = U
         #self.D.V[:, i, block] = V
         #if pL > 0:
         #self.D.W[:, i, L] = W
         #self.D.I[i, L] = I
         ones = np.ones((len(L), 1), float)
         add_lines(-ones, N.reshape(q, 1), IJc)
         add_lines(ones, N.reshape(q, 1), IJ)
         Z = self.data[i,L] - self.m_mean[self.labels[I]]
         if self.vardata == None:
             tot_var = self.v[self.labels[I]] + np.zeros(len(L), float)
         else:
             tot_var = self.v[self.labels[I]] + self.vardata[i,L]
         add_lines(\
             -(1.0 / tot_varc).reshape(pL, 1), 
             SS1.reshape(q, 1), 
             IJc)
         add_lines(\
             (1.0 / tot_var).reshape(pL, 1), 
             SS1.reshape(q, 1), 
             IJ)
         add_lines(\
             -np.log(tot_varc).reshape(pL, 1), 
             SS2.reshape(q, 1), 
             IJc)
         add_lines(\
             np.log(tot_var).reshape(pL, 1), 
             SS2.reshape(q, 1), 
             IJ)
         add_lines(\
             -(Zc**2 / tot_varc).reshape(pL, 1), 
             SS3.reshape(q, 1), 
             IJc)
         add_lines(\
             (Z**2 / tot_var).reshape(pL, 1), 
             SS3.reshape(q, 1), 
             IJ)
         add_lines(\
             -(Zc / tot_varc).reshape(pL, 1), 
             SS4.reshape(q, 1), 
             IJc)
         add_lines(\
             (Z / tot_var).reshape(pL, 1), 
             SS4.reshape(q, 1), 
             IJ)
         fc = self.log_voxel_likelihood[J]
         f = - 0.5 * (\
                 N * np.log(2 * np.pi) + \
                 np.log(1 + self.m_var[self.labels[J]] * SS1) \
                 + SS2 + SS3 - SS4**2 / \
                 (1 / self.m_var[self.labels[J]] + SS1))
     else:
         f = np.zeros(1)
         fc = np.zeros(1)
     A = (f - fc).sum() + 0.5 * (Uc**2 - U**2).sum() / self.std**2
     self.R[i, b] = np.random.uniform() > np.exp(A / T)
     if self.R[i, b] == 0 and not reject_override:
         self.D.U[:, i, b] = U
         self.D.V[:, i, block] = V
         if len(L) > 0:
             self.D.W[:, i, L] = W
             self.D.I[i, L] = I
             self.N[J] = N
             self.Z[i,L] = Z
             self.tot_var[i,L] = tot_var
             self.SS1[J] = SS1
             self.SS2[J] = SS2
             self.SS3[J] = SS3
             self.SS4[J] = SS4
             self.log_voxel_likelihood[J] = f
     return A