def __init__(self, mu=0.0, sigmasq=1.0, lb=-np.Inf, ub=np.Inf): assert np.all(sigmasq) >= 0 # Broadcast arrays to be of the same shape self.mu, self.sigmasq, self.lb, self.ub \ = np.broadcast_arrays(mu, sigmasq, lb, ub) # Precompute the normalizers self.zlb = (self.lb-self.mu) / np.sqrt(self.sigmasq) self.zub = (self.ub-self.mu) / np.sqrt(self.sigmasq) self.Z = normal_cdf(self.zub) - normal_cdf(self.zlb)
def mf_expected_log_notp(self): """ Compute the expected log probability of no connection under Z :return: """ mu = self.mf_mu_Z return np.log(normal_cdf(0, mu=mu, sigma=1.0))
def mf_expected_log_p(self): """ Compute the expected log probability of a connection under Z :return: """ mu = self.mf_mu_Z # return np.nan_to_num(np.log(1.0 - normal_cdf(0, mu=mu, sigma=1.0))) return np.log(1.0 - normal_cdf(0, mu=mu, sigma=1.0))
def mf_expected_log_p_mc(self): N_samples = 100 E_mu = self.mf_expected_mu()[None, :, :] std_mu = np.sqrt(self.mf_variance_mu()[None, :, :]) mus = E_mu + std_mu * np.random.randn(N_samples, self.N, self.N) # ps = 1.0 - normal_cdf(0, mu=mus, sigma=1.0) # log_ps = np.log(ps) # log_notps = np.log(1-ps) # Approximate log(1-p) for p~= 1 u = normal_cdf(0, mu=mus, sigma=1.0) log_ps = np.log1p(-u) log_notps = np.log1p(-1+u) return log_ps.mean(0), log_notps.mean(0)