Esempio n. 1
0
def objective(X, Y, C, mu, a, b, e, f, a0, b0, e0, f0):
  log2pi = np.log(2*np.pi)
  N, D = X.shape

  # E(lnX) = digamma(a) - ln(b) for X ~ Gamma(a,b)
  E_ln_lambda = digamma(e) - np.log(f)
  E_ln_alpha = digamma(a) - np.log(b)

  # model likelihood
  total = (N/2.0)*(E_ln_lambda - log2pi)
  data_total = 0
  for i in xrange(N):
    delta = Y[i] - X[i].dot(mu)
    data_total += delta*delta + X[i].dot(C).dot(X[i])
  total -= (float(e)/f)/2.0 * data_total

  # print "total after model likelihood:", total

  # w likelihood
  total -= (D/2.0)*log2pi
  for k in xrange(D):
    total += 0.5*(E_ln_alpha[k] - (float(a[k])/b[k])*(C[k,k] + mu[k]*mu[k]))

  # print "total after w likelihood:", total

  # lambda likelihood
  total += e0*np.log(f0) - np.log(gamma(e0)) + (e0 - 1)*E_ln_lambda - f0*(float(e)/f)

  # print "total after lambda likelihood:", total

  # alpha likelihood
  for k in xrange(D):
    total += a0*np.log(b0) - np.log(gamma(a0)) + (a0 - 1)*E_ln_alpha[k] - b0*(float(a[k])/b[k])

  # print "total after alpha likelihood:", total

  # entropy
  # TODO: calculate this manually
  # total -= mvn.entropy(mean=mu, cov=C)
  # e1 = mvn.entropy(cov=C)
  # e2 = 0.5*np.log( np.linalg.det(2*np.pi*np.e*C) )
  # print "e1:", e1, "e2:", e2
  # total += 0.5*np.log( np.linalg.det(2*np.pi*np.e*C) )

  total += mvn.entropy(cov=C)
  # print "det(C):", np.linalg.det(C)
  # print "total after lnq(w):", total

  # total -= gamma_dist.entropy(e, scale=1.0/f)
  # e3 = gamma_dist.entropy(e, scale=1.0/f)
  # e4 = -e_ln_q_gamma(e, f)
  # print "e3:", e3, "e4:", e4
  # assert(np.abs(e3 - e4) < 1e-8)
  total += gamma_dist.entropy(e, scale=1.0/f)
  # total -= e_ln_q_gamma(e, f)
  # print "total after lnq(lambda):", total
  for k in xrange(D):
    # total -= e_ln_q_gamma(a[k], b[k])
    total += gamma_dist.entropy(a[k], scale=1.0/b[k])
  return total
Esempio n. 2
0
def objective(X, Y, C, mu, a, b, e, f, a0, b0, e0, f0):
  log2pi = np.log(2*np.pi)
  N, D = X.shape

  # E(lnX) = digamma(a) - ln(b) for X ~ Gamma(a,b)
  E_ln_lambda = digamma(e) - np.log(f)
  E_ln_alpha = digamma(a) - np.log(b)

  # model likelihood
  total = (N/2.0)*(E_ln_lambda - log2pi)
  data_total = 0
  for i in xrange(N):
    delta = Y[i] - X[i].dot(mu)
    data_total += delta*delta + X[i].dot(C).dot(X[i])
  total -= (float(e)/f)/2.0 * data_total

  # print "total after model likelihood:", total

  # w likelihood
  total -= (D/2.0)*log2pi
  for k in xrange(D):
    total += 0.5*(E_ln_alpha[k] - (float(a[k])/b[k])*(C[k,k] + mu[k]*mu[k]))

  # print "total after w likelihood:", total

  # lambda likelihood
  total += e0*np.log(f0) - np.log(gamma(e0)) + (e0 - 1)*E_ln_lambda - f0*(float(e)/f)

  # print "total after lambda likelihood:", total

  # alpha likelihood
  for k in xrange(D):
    total += a0*np.log(b0) - np.log(gamma(a0)) + (a0 - 1)*E_ln_alpha[k] - b0*(float(a[k])/b[k])

  # print "total after alpha likelihood:", total

  # entropy
  # TODO: calculate this manually
  # total -= mvn.entropy(mean=mu, cov=C)
  # e1 = mvn.entropy(cov=C)
  # e2 = 0.5*np.log( np.linalg.det(2*np.pi*np.e*C) )
  # print "e1:", e1, "e2:", e2
  # total += 0.5*np.log( np.linalg.det(2*np.pi*np.e*C) )

  total += mvn.entropy(cov=C)
  # print "det(C):", np.linalg.det(C)
  # print "total after lnq(w):", total

  # total -= gamma_dist.entropy(e, scale=1.0/f)
  # e3 = gamma_dist.entropy(e, scale=1.0/f)
  # e4 = -e_ln_q_gamma(e, f)
  # print "e3:", e3, "e4:", e4
  # assert(np.abs(e3 - e4) < 10e-8)
  total += gamma_dist.entropy(e, scale=1.0/f)
  # total -= e_ln_q_gamma(e, f)
  # print "total after lnq(lambda):", total
  for k in xrange(D):
    # total -= e_ln_q_gamma(a[k], b[k])
    total += gamma_dist.entropy(a[k], scale=1.0/b[k])
  return total
Esempio n. 3
0
  # total += 0.5*np.log( np.linalg.det(2*np.pi*np.e*C) )

  total += mvn.entropy(cov=C)
  # print "det(C):", np.linalg.det(C)
  # print "total after lnq(w):", total

  # total -= gamma_dist.entropy(e, scale=1.0/f)
  # e3 = gamma_dist.entropy(e, scale=1.0/f)
  # e4 = -e_ln_q_gamma(e, f)
  # print "e3:", e3, "e4:", e4
<<<<<<< HEAD
  # assert(np.abs(e3 - e4) < 10e-8)
=======
  # assert(np.abs(e3 - e4) < 1e-8)
>>>>>>> upstream/master
  total += gamma_dist.entropy(e, scale=1.0/f)
  # total -= e_ln_q_gamma(e, f)
  # print "total after lnq(lambda):", total
  for k in xrange(D):
    # total -= e_ln_q_gamma(a[k], b[k])
    total += gamma_dist.entropy(a[k], scale=1.0/b[k])
  return total


def run(num=1, T=500):
  X = pd.read_csv('X_set%s.csv' % num, header=None).as_matrix()
  Y = pd.read_csv('y_set%s.csv' % num, header=None).as_matrix().flatten()
  Z = pd.read_csv('z_set%s.csv' % num, header=None).as_matrix().flatten()
  N, D = X.shape
  print X.shape, Y.shape, Z.shape
Esempio n. 4
0
 def entropy(self, n, p):
     ent = gamma.entropy(self, n, p)
     return ent