def _get_mc_stats(self, op): loc = _np.empty(self._samples.shape[0:2], dtype=_np.complex128) for i, sample in enumerate(self._samples): _local_values(op, self._machine, sample, out=loc[i]) # notice that loc.T is passed to statistics, since that function assumes # that the first index is the batch index. return loc, _statistics(loc.T)
def _get_mc_superop_stats(self, op): samples_r = self._samples.reshape((-1, self._samples.shape[-1])) loc = _local_values(op, self._machine, samples_r).reshape(self._samples.shape[0:2]) # notice that loc.T is passed to statistics, since that function assumes # that the first index is the batch index. return loc, _statistics(abs(loc.T)**2)
def _get_mc_obs_stats(self, op): if not self._obs_samples_valid: self.sweep_diagonal() samples_r = self._samples_obs.reshape( (-1, self._samples_obs.shape[-1])) loc = _local_values(op, self._machine, samples_r).reshape(self._samples_obs.shape[0:2]) # notice that loc.T is passed to statistics, since that function assumes # that the first index is the batch index. return loc, _statistics(loc.T)
def estimate(op): lvs = _local_values(op, psi, samples) stats = _statistics(lvs.T) if compute_gradients: samples_r = samples.reshape((-1, samples.shape[-1])) eloc_r = (lvs - _mean(lvs)).reshape(-1, 1) grad = sampler.machine.vector_jacobian_prod( samples_r, eloc_r / n_samples, ) return stats, grad else: return stats