Beispiel #1
0
    for x in xrange(train.shape[1]):
      colU = u[y, :]
      rowV = svT[:, x]
      assert np.allclose(train[y, x], single_dot(u, svT, x, y))
  """
    ##
    plt.title('SVD reconstruction error on {}x{} matrix'.format(*train.shape))
    plt.xlabel('Low rank approximation (k)')
    plt.ylabel('Frobenius norm')
    plt.ylim(0, max(svdY))
    plt.legend(loc='best')
    plt.savefig('reconstruct_fro_{}x{}.pdf'.format(*train.shape))
    plt.show(block=True)
    ##
    plt.plot(orthoX,
             orthoY,
             label="SVD",
             color='black',
             linewidth=2,
             linestyle='--')
    for label, X, Y in incr_ortho:
        plt.plot(X, Y, label=label)
    plt.title('SVD orthogonality error on {}x{} matrix'.format(*train.shape))
    plt.xlabel('Low rank approximation (k)')
    plt.ylabel('Deviation from orthogonality')
    plt.semilogy()
    #plt.ylim(0, max(orthoY))
    plt.legend(loc='best')
    plt.savefig('reconstruct_ortho_{}x{}.pdf'.format(*train.shape))
    plt.show(block=True)
Beispiel #2
0
yd = np.arange(len(xd))[::-1] + 1
lxd = np.log(xd)
lyd = np.log(yd)

loglins = linear_model.BayesianRidge()
loglogs = linear_model.BayesianRidge()
loglind = linear_model.BayesianRidge()
loglogd = linear_model.BayesianRidge()

loglins.fit(xs, lys)
loglogs.fit(lxs, lys)
loglind.fit(xd, lyd)
loglogd.fit(lxd, lyd)

subplot(221)
plt.semilogy(xs, ys, "k.", ms=12)
plt.semilogy(xs, np.exp(loglins.predict(xs)), linewidth=3)
title("Size")
ylabel("Log-Linear")

subplot(222)
plt.semilogy(xd, yd, "k.", ms=12)
plt.semilogy(xd, np.exp(loglind.predict(xd)), linewidth=3)
title("Duration")

subplot(223)
plt.loglog(xs, ys, "k.", ms=12)
plt.loglog(np.exp(lxs), np.exp(loglogs.predict(lxs)), linewidth=3)
ylabel("Log-Log")

subplot(224)
Beispiel #3
0
    plt.plot(X, Y, label='iSVD u={}'.format(num))
  """
  print 'Testing raw SVD => exact reconstruction'
  svT = scipy.linalg.diagsvd(s, u.shape[0], vT.shape[1]).dot(vT)
  for y in xrange(train.shape[0]):
    for x in xrange(train.shape[1]):
      colU = u[y, :]
      rowV = svT[:, x]
      assert np.allclose(train[y, x], single_dot(u, svT, x, y))
  """
  ##
  plt.title('SVD reconstruction error on {}x{} matrix'.format(*train.shape))
  plt.xlabel('Low rank approximation (k)')
  plt.ylabel('Frobenius norm')
  plt.ylim(0, max(svdY))
  plt.legend(loc='best')
  plt.savefig('reconstruct_fro_{}x{}.pdf'.format(*train.shape))
  plt.show(block=True)
  ##
  plt.plot(orthoX, orthoY, label="SVD", color='black', linewidth=2, linestyle='--')
  for label, X, Y in incr_ortho:
    plt.plot(X, Y, label=label)
  plt.title('SVD orthogonality error on {}x{} matrix'.format(*train.shape))
  plt.xlabel('Low rank approximation (k)')
  plt.ylabel('Deviation from orthogonality')
  plt.semilogy()
  #plt.ylim(0, max(orthoY))
  plt.legend(loc='best')
  plt.savefig('reconstruct_ortho_{}x{}.pdf'.format(*train.shape))
  plt.show(block=True)
Beispiel #4
0

loglins = linear_model.BayesianRidge()
loglogs = linear_model.BayesianRidge()
loglind = linear_model.BayesianRidge()
loglogd = linear_model.BayesianRidge()

loglins.fit(xs, lys)
loglogs.fit(lxs, lys)
loglind.fit(xd, lyd)
loglogd.fit(lxd, lyd)



subplot(221)
plt.semilogy(xs, ys, "k.", ms=12)
plt.semilogy(xs, np.exp(loglins.predict(xs)), linewidth=3)
title("Size")
ylabel("Log-Linear")

subplot(222)
plt.semilogy(xd, yd, "k.", ms=12)
plt.semilogy(xd, np.exp(loglind.predict(xd)), linewidth=3)
title("Duration")

subplot(223)
plt.loglog(xs, ys, "k.", ms=12)
plt.loglog(np.exp(lxs), np.exp(loglogs.predict(lxs)), linewidth=3)
ylabel("Log-Log")

subplot(224)