Exemple #1
0
def test_poisson_logpdf(T=100, K=4, D=10):
    # Test single datapoint log pdf
    x = npr.poisson(1, size=(T, D))
    lambdas = np.exp(npr.randn(K, D))
    ll1 = poisson_logpdf(x[:, None, :], lambdas)
    ll2 = np.sum(poisson.logpmf(x[:, None, :], lambdas[None, :, :]), axis=-1)
    assert np.allclose(ll1, ll2)
 def sample_x(self, z, xhist, input=None, tag=None, with_noise=True):
     assert self.D == 1, "InputDrivenObservations written for D = 1!"
     if input.ndim == 1 and input.shape == (
             self.M,
     ):  # if input is vector of size self.M (one time point), expand dims to be (1, M)
         input = np.expand_dims(input, axis=0)
     lambdas = np.exp(self.Wk @ input.T)
     return npr.poisson(lambdas[z])  #y = Poisson(exp(W @ x))
Exemple #3
0
def simulate_ramping(beta=np.linspace(-0.02, 0.02, 5),
                     w2=3e-3,
                     x0=0.5,
                     C=40,
                     T=100,
                     bin_size=0.01):

    NC = 5  # number of trial types
    cohs = np.arange(NC)
    trial_cohs = np.repeat(cohs, int(T / NC))
    tr_lengths = np.random.randint(50, size=(T)) + 50
    us = []
    xs = []
    zs = []
    ys = []
    for t in range(T):
        tr_coh = trial_cohs[t]
        betac = beta[tr_coh]

        tr_length = tr_lengths[t]
        x = np.zeros(tr_length)
        z = np.zeros(tr_length)
        x[0] = x0 + np.sqrt(w2) * npr.randn()
        z[0] = 0
        for i in np.arange(1, tr_length):

            if x[i - 1] >= 1.0:
                x[i] = 1.0
                z[i] = 1
            else:
                x[i] = np.min(
                    (1.0, x[i - 1] + betac + np.sqrt(w2) * npr.randn()))
                if x[i] >= 1.0:
                    z[i] = 1
                else:
                    z[i] = 0

        y = npr.poisson(np.log1p(np.exp(C * x)) * bin_size)

        u = np.tile(one_hot(tr_coh, 5), (tr_length, 1))
        us.append(u)
        xs.append(x.reshape((tr_length, 1)))
        zs.append(z.reshape((tr_length, 1)))
        ys.append(y.reshape((tr_length, 1)))

    return ys, xs, zs, us, tr_lengths, trial_cohs
Exemple #4
0
 def sample_x(self, z, xhist, input=None, tag=None, with_noise=True):
     lambdas = np.exp(self.log_lambdas)
     return npr.poisson(lambdas[z])
Exemple #5
0
 def sample(self, z, x, input=None, tag=None):
     T = z.shape[0]
     z = np.zeros_like(z, dtype=int) if self.single_subspace else z
     lambdas = self.mean(self.forward(x, input, tag))
     y = npr.poisson(lambdas[np.arange(T), z, :])
     return y
Exemple #6
0
def softplus(x):
    return np.log1p(np.exp(x))


# generate fake data
N = 10  # number of observations
D = 2  # number of covariates
T = 100  # number of time points

x = npr.randn(T, D)
C = npr.randn(N, D)
d = npr.randn(N)

lambdas = softplus(np.dot(x, C.T) + d)
y = npr.poisson(lambdas)


# compute Hessian wrt x of log-likelihood using autograd
def obj(x):
    lambdas = softplus(np.dot(x, C.T) + d)
    obj = np.sum(y * np.log(lambdas)) - np.sum(lambdas)  # + const
    return obj


g = grad(obj)
hess = hessian(obj)
hessian_autograd = hess(x).reshape((T * D), (T * D))

# compute Hessian wrt x of log-likelihood analytically
# use lambdas from above
Exemple #7
0
    # print(w, b)
    # print(what, bhat)
    # print("")

    # print("poisson / softplus")
    # y = npr.poisson(np.log1p(np.exp(u)))
    # what, bhat = fit_scalar_glm(X, y, model="poisson", mean_function="softplus")
    # print("true: ", w, b)
    # print("inf:  ", what, bhat)
    # print("")

    # r = 3
    # print("negative_binomial / logistic; r=", r)
    # y = npr.negative_binomial(r, 1 - 1 / (1 + np.exp(-u)))
    # what, bhat = fit_scalar_glm(X, y, model="negative_binomial", mean_function="exp", model_hypers=dict(r=r))
    # print("true: ", w, b)
    # print("inf:  ", what, bhat)
    # print("")

    print("poisson / softplus with uncertain data")
    y = npr.poisson(np.log1p(np.exp(u)))
    what, bhat = fit_scalar_glm(X,
                                y,
                                model="poisson",
                                mean_function="softplus",
                                X_variances=np.tile(0.5 * np.eye(p)[None, ...],
                                                    (n, 1, 1)))
    print("true: ", w, b)
    print("inf:  ", what, bhat)
    print("")
# S = np.arange(1, D_in+1)
# R = np.linalg.svd(npr.randn(D_in, D_in))[0] * S
# A = R.dot(A0).dot(np.linalg.inv(R))
# b =  np.zeros(D_in)
# true_lds.dynamics.As[0] = A
# true_lds.dynamics.bs[0] = b
# true_lds.dynamics.Sigmas = true_lds.dynamics.Sigmas / np.max(true_lds.dynamics.Sigmas[0]) * 0.5
# x, y = true_lds.sample(T)
# x = x / np.max(x) * 5.0

# Xmat = np.hstack((np.ones((T,1)), x))

# Simulate spike response
Xproj = Xmat @ w
R, _, _, _ = nlfun(Xproj)
Ysps = npr.poisson(R)
print("Max number of spikes: ", np.max(Ysps))

# Generate Ca data
Yobs = np.zeros(T)
Yobs[0] = alpha * Ysps[0] + np.sqrt(sig2) * npr.randn()
for t in range(1, T):
    Yobs[t] = alpha * Ysps[t] + np.exp(
        -1.0 / tau) * Yobs[t - 1] + np.sqrt(sig2) * npr.randn()

# add in some random measurement noise (not part of generative model)
Yobs = Yobs + 0.2 * npr.randn(*Yobs.shape)
Yobs_test = Yobs[T:]
Xmat_test = Xmat[T:]
Yobs = Yobs[1:T]
Xmat = Xmat[:T]
Exemple #9
0
def negbin_sample(r, p, size):
    # a negative binomial is a gamma-compound-Poisson
    return npr.poisson(npr.gamma(r, p/(1-p), size=size))
def negbin_sample(r, p, size):
    # a negative binomial is a gamma-compound-Poisson
    return npr.poisson(npr.gamma(r, p / (1 - p), size=size))
Exemple #11
0
 def sample_y(self, z, x, input=None, tag=None):
     T = z.shape[0]
     z = np.zeros_like(z, dtype=int) if self.single_subspace else z
     lambdas = self.mean(self.compute_mus(x))
     return npr.poisson(lambdas[:, z, :])