Пример #1
0
#eps = -0.01
#reconstruction = numpy.reshape(ae.reconstruct(pt + eps), (28,-1))
#grid[num + 1].imshow(reconstruction)
#eps = 0.05
#reconstruction = numpy.reshape(ae.reconstruct(pt + eps), (28,-1))
#grid[num + 2].imshow(reconstruction)
#eps = -0.05
#reconstruction = numpy.reshape(ae.reconstruct(pt + eps), (28,-1))
#grid[num + 3].imshow(reconstruction)
#plt.show()


# Show the contraction by putting a sphere of radius eps around the target
# and measure the variation in encoding between the target and new point
print "Epsilon, Variation in Encoding"
encoded = ae.encode(pt)
vnum = 10
v = []
for j in range(0, vnum):
  vdir = [random.uniform(-1,1) for i in range(0, len(pt))]
  vlen = pnorm(2, vdir)
  v.append([i/vlen for i in vdir])
for h in range(30, 0, -2):
  h_contr = 0
  for j in range(0, vnum):
    encoded_v = ae.encode([i*h for i in v[j]])
    h_contr += pnorm(2, encoded-encoded_v)/h
  print h, h_contr/vnum

#eps = -0.1
#while eps >= 0.1:
Пример #2
0
          W=W,
          c=c,
          b=b,
          learning_rate=learning_rate,
          jacobi_penalty=jacobi_penalty,
          batch_size=batch_size,
          epochs=epochs,
          schatten_p=schatten_p )

# 2) Load the datasets (such as MNIST)
# read_amat_file is in helper_functions
[rX, rY] = read_amat_file(training_file_name, training_sample_size)
[tX, tY] = read_amat_file(testing_file_name, testing_sample_size)

# For each training point, encode
encoded_rX = [ae.encode(x) for x in rX]
#for x in rX:
#  encoded_rX.append(ae.encode(x))

# 3) Measures the accuracy of the KNN on the test set
# For each testing point, encode and see how it compares
correct = 0
incorrect = 0
total = 0
j = 0
while j < len(tX):
  x = tX[j]
  encoded_x = ae.encode(x)
  # Find the closest training point
  distances = {}
  i = 0