Example #1
0
def update_hyper(n, pi, alpha, sigma, tau, u, modelparam, mcmcparam):
    abs_pi = pi.size

    if modelparam['estimate_alpha']:
        alpha_a = modelparam['alpha_a']
        alpha_b = modelparam['alpha_b']
        alpha = np.random.gamma(alpha_a + abs_pi,
                                1. / (alpha_b + ((u + tau) ** sigma
                                                 - tau ** sigma) / sigma))

    if modelparam['estimate_sigma']:
        # std = np.sqrt(1. / 4.)
        std = 0.1
        prop_sigma = 1 - np.random.lognormal(log(1 - sigma), std)

        log_rate = log_density_sigma(alpha, prop_sigma, tau, u, abs_pi, n, pi) \
                   + lognorm.logpdf(1 - sigma, std, scale=(1 - prop_sigma)) \
                   - log_density_sigma(alpha, sigma, tau, u, abs_pi, n, pi) \
                   - lognorm.logpdf(1 - prop_sigma, std, scale=(1 - sigma))

        if np.isnan(log_rate):
            log_rate = -np.Inf

        if np.isinf(prop_sigma):
            log_rate = -np.Inf

        rate = min(1, np.exp(log_rate))

        if np.random.random() < rate:
            sigma = prop_sigma

    if modelparam['estimate_tau']:
        tau_a = modelparam['tau_a']
        tau_b = modelparam['tau_b']
        std = np.sqrt(1. / 4.)
        logtau = log(tau)
        prop_logtau = np.random.normal(logtau, std)

        # compute acceptance probability
        log_rate = log_density_tau(prop_logtau, alpha, sigma, u, n, abs_pi, tau_a,
                                   tau_b) + norm.logpdf(logtau, prop_logtau, std) \
                   - log_density_tau(logtau, alpha, sigma, u, n, abs_pi, tau_a,
                                     tau_b) - norm.logpdf(prop_logtau, logtau, std)

        if np.isnan(log_rate):
            log_rate = -np.Inf

        if np.isinf(exp(prop_logtau)) or np.isnan(exp(prop_logtau)):
            log_rate = -np.Inf

        rate = min(1, np.exp(log_rate))

        if np.random.random() < rate:
            tau = exp(prop_logtau)

    return alpha, sigma, tau
def generate(max_time, n_sequences, filename='stationary_renewal'):
    times, nll = [], []

    for _ in range(n_sequences):
        s = np.sqrt(np.log(6*6+1))
        mu = -s*s/2
        tau = lognorm.rvs(s=s, scale=np.exp(mu), size=1000)

        lpdf = lognorm.logpdf(tau, s=s, scale=np.exp(mu))
        T = tau.cumsum()

        T = T[T < max_time]
        lpdf = lpdf[:len(T)]

        score = -np.sum(lpdf)

        times.append(T)
        nll.append(score)

    if filename is not None:
        mean_number_items = sum(len(t) for t in times) / len(times)
        nll = [n/mean_number_items for n in nll]
        np.savez(f'{dataset_dir}/{filename}.npz', arrival_times=times, nll=nll, t_max=max_time, mean_number_items=mean_number_items)
    else:
        return times
Example #3
0
def nll(arrival_times, std=6):
    """Negative log-likelihood of a renewal process.

    Conditional density f*(t) is lognormal with given std.

    """
    s = np.sqrt(np.log(std**2 + 1))
    mu = -0.5 * s * s
    inter_times = get_inter_times(arrival_times)
    log_probs = lognorm.logpdf(inter_times, s=s, scale=np.exp(mu))
    return -np.mean(log_probs)
def log_liklihood(rich, M, a,b,sigma):
    p = 0

    #p-= np.sum(((b*np.log(M)+np.log(a)-np.log(rich))**2)/(2*sigma**2)+np.log(sigma*rich))
    #redefine A to be intercept at center rather than 0
    #p-= np.sum(((b*(np.log(M)-13.5)+np.log(a)-np.log(rich))**2)/(2*sigma**2)+np.log(sigma*rich))
    #p-= np.sum(((b*np.log(M)+a-np.log(rich))**2)/(2*sigma**2)+np.log(sigma*rich))#See Above
    #p-= np.sum(((b*np.log(M-13.5)+a-np.log(rich))**2)/(2*sigma**2)+np.log(sigma*rich))#See Above

    p+= np.sum(lognorm.logpdf(rich, sigma, loc = (b*(np.log(M)-offset)+np.log(a))))

    return p
Example #5
0
def lnprior_lognorm(p):
    """
    Log-normal prior
    """
    # note - for parameter definitions see 
    # http://nbviewer.ipython.org/url/xweb.geos.ed.ac.uk/~jsteven5/blog/lognormal_distributions.ipynb
    mu = 7.3 # Mean of log(X)
    sigma = 0.9 # Standard deviation of log(X)
    shape = sigma # Scipy's shape parameter
    scale = np.exp(mu) # Scipy's scale parameter
    ret = lognorm.logpdf(p, shape, loc=0, scale=scale)
    ret = ret.sum()
    return ret
Example #6
0
def mcmc(tree,max_steps,chain):
    ne = 1
    #generate list of times with k linages
    time_linage = list()
    internal_nodes_heights(tree.get_root(),time_linage)
    time_linage.append(0)
    time_linage.sort()
    for i in range(1,len(time_linage)):
        time_linage[i] = (time_linage[i] - time_linage[i-1])
    del time_linage[0]
    time_linage.reverse()
    for i in range(max_steps):
        u = np.random.uniform(-10,10) #random step
        ne2 = ne + u
        if(ne2 > 0): #handle by rejectioon
            likelihood1 = 0
            likelihood2 = 0
            #generate log of the prior probability densities
            prior1 = lognorm.logpdf(ne, s = 1.25,scale=np.exp(3)) 
            prior2 = lognorm.logpdf(ne2, s = 1.25,scale=np.exp(3)) 
            for k in range(2,len(tree.get_leaves())):  #time interval with k linages
                tk = time_linage[k-2]
                b = (k*(k-1)*tk)/(2*ne)
                c = (k*(k-1)*tk)/(2*ne2)
                #compute the coalescent likelihood
                likelihood1 += (-np.log(ne) - b) 
                likelihood2 += (-np.log(ne2) - c)
            #obtain posterior distributions
            post1 = likelihood1 + (prior1) 
            post2 = likelihood2 + (prior2)
            post = post2 - post1
            #compute acceptance probability
            if post > -800 and post < 700: #handle overflow by reject large values
                a = min(1,np.exp(post))
                v = np.random.uniform(0,1)
                if(a > v):
                    ne = ne2
        chain.append(ne) #add to MCMC chain
 def lnprior_Y0(Y0):
     """
     Prior on detector state at t=0
     """
     if Y0.min() <= 0.0:
         return -np.inf
     # note - for parameter definitions see
     # http://nbviewer.ipython.org/url/xweb.geos.ed.ac.uk/~jsteven5/blog/lognormal_distributions.ipynb
     sigma = np.log(2.0) # Standard deviation of log(X) - factor of two
     shape = sigma # Scipy's shape parameter
     scale = Y0_mu_prior # Scipy's scale parameter = np.exp( mean of log(X) )
     ret = lognorm.logpdf(Y0, shape, loc=0, scale=scale)
     ret = ret.sum()
     return ret
Example #8
0
    def logprob(self, x):
        """

        :param x: np.array
        :return: float
        """

        if np.any(x <= 0):
            return -np.inf

        x = np.sqrt(x)
        dy_dx = 2.0 * x

        return np.sum(lognorm.logpdf(x, s=self.scale,
                                     scale=self.param)) - np.log(dy_dx)
Example #9
0
def lnprior_lognorm_from_data(p, observed_data, n_window=10, sigma_factor=2, _cache=[]):
    """
    Log-normal prior where the distribution parameters vary with time
    """
    if len(_cache) == 0 or len(_cache[0]) != len(p):
        mu, sigma = running_lognormal_parameters(p, observed_data, 
                                                 n_window=n_window, sigma_factor=sigma_factor)
        while len(_cache) > 0:
            _cache.pop()
        _cache.extend((mu,sigma))
    else:
        mu, sigma = _cache
    shape = sigma # Scipy's shape parameter
    scale = np.exp(mu) # Scipy's scale parameter    
    ret = lognorm.logpdf(p, shape, loc=0, scale=scale)
    ret = ret.sum()
    return ret
    def logprob(self, x):
        """

        :param x: np.array(nxm)
        :return: float
        """

        if len(x.shape) == 1:
            x = x.reshape(len(x), 1)

        if np.any(x < 0):
            return -np.inf

        llh = 0
        for dim in xrange(self.dimension):
            llh += np.sum(lognorm.logpdf(x[:, dim], s=self.scale[dim], scale=np.exp(self.mu[dim])))
        return llh
Example #11
0
def sampleproposal(B,D,K):
    #sample pis from dirichlet(1,1...1,1)
    pi_alpha = 0.1
    pi_pr = np.random.dirichlet(pi_alpha* np.ones(K),size = B)
    logpidensity =dirichlet.logpdf(np.transpose(pi_pr),pi_alpha*np.ones(K))

    #sample mus from normal(0,25)
    mu_std = 5
    mu_pr = mu_std*np.random.randn(B,K,D)
    logmudensity = np.sum(norm.logpdf(mu_pr, scale = mu_std),axis = (1,2))

    #sample sigmas from lognormal(0,1)
    sigma_pr = np.exp(np.random.randn(B,K,D)) 
    logsigmadensity = np.sum(lognorm.logpdf(sigma_pr, s = 1),axis = (1,2))
 
    logpropdensity = logpidensity + logmudensity + logsigmadensity

    logpipriordensity = dirichlet.logpdf(np.transpose(pi_pr),np.ones(K))
    logpriordensity = logpipriordensity  + np.sum(norm.logpdf(mu_pr, scale = 1),axis = (1,2)) +logsigmadensity

    return pi_pr,mu_pr,sigma_pr, logpriordensity, logpropdensity
Example #12
0
 def sigma_loglikelihood(sigma):
     wp_loglikelihood = lognorm.logpdf(wp, s = sigma, scale = wp_pred_exp)
     return -(np.sum(wp_loglikelihood))