-
Notifications
You must be signed in to change notification settings - Fork 0
/
ShapeStatistics.py
executable file
·688 lines (621 loc) · 29.6 KB
/
ShapeStatistics.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
#Purpose: To implement a suite of 3D shape statistics and to use them for point
#cloud classification
#TODO: Fill in all of this code for group assignment 2
import sys
sys.path.append("S3DGLPy")
from Primitives3D import *
from PolyMesh import *
import numpy as np
import matplotlib.pyplot as plt
import math
POINTCLOUD_CLASSES = ['biplane', 'desk_chair', 'dining_chair', 'fighter_jet', 'fish', 'flying_bird', 'guitar', 'handgun', 'head', 'helicopter', 'human', 'human_arms_out', 'potted_plant', 'race_car', 'sedan', 'shelves', 'ship', 'sword', 'table', 'vase']
NUM_PER_CLASS = 10
#########################################################
## UTILITY FUNCTIONS ##
#########################################################
#Purpose: Export a sampled point cloud into the JS interactive point cloud viewer
#Inputs: Ps (3 x N array of points), Ns (3 x N array of estimated normals),
#filename: Output filename
def exportPointCloud(Ps, Ns, filename):
N = Ps.shape[1]
fout = open(filename, "w")
fmtstr = "%g" + " %g"*5 + "\n"
for i in range(N):
fields = np.zeros(6)
fields[0:3] = Ps[:, i]
fields[3:] = Ns[:, i]
fout.write(fmtstr%tuple(fields.flatten().tolist()))
fout.close()
#Purpose: To sample a point cloud, center it on its centroid, and
#then scale all of the points so that the RMS distance to the origin is 1
def samplePointCloud(mesh, N):
(Ps, Ns) = mesh.randomlySamplePoints(N)
# accounting for translation-- center the point cloud on its centroid
centroid = np.mean(Ps,1)[:, None] # 3x1 matrix with the mean of each row
Ps_centered = Ps - centroid # center the point cloud
# accounting for scale-- RMS distance of each point to the origin is 1
Ps_c_squared = Ps_centered**2 # squares each element of the points in the point cloud
col_sum = np.sum(Ps_c_squared, 0) # sum across the columns
row_sum = np.sum(col_sum) # sum across the row
s = (float(N)/row_sum)**0.5 # plug in calculated values and solve for s
Ps_new = Ps_centered*s # normalize by 's'
return (Ps_new, Ns)
#Purpose: To sample the unit sphere as evenly as possible. The higher
#res is, the more samples are taken on the sphere (in an exponential
#relationship with res). By default, samples 66 points
def getSphereSamples(res = 2):
m = getSphereMesh(1, res)
return m.VPos.T
#Purpose: To compute PCA on a point cloud
#Inputs: X (3 x N array representing a point cloud)
#Returns: (eigs, V) where eigs are the eigenvalues sorted in decreasing order and
#V is a 3x3 matrix with each row being the eigenvector corresponding to the eigenvalue
#of the same index in eigs
def doPCA(X):
if len(X) == 0:
return (np.zeros(3), np.eye(3))
A = np.dot(X, X.T)
(eigs, V) = np.linalg.eig(A) #retrieves eigenvalues and column matrix of eigenvectors
eig_tuples = zip(eigs, V.T)
eig_tuples = sorted(eig_tuples, key= lambda eig_pair: eig_pair[0], reverse=True) #sorting eigs in decreasing order
(eigs, V) = zip(*eig_tuples)
return (eigs, V)
#########################################################
## SHAPE DESCRIPTORS ##
#########################################################
#Purpose: To compute a shape histogram, counting points distributed in concentric spherical shells centered at the origin
#Inputs: Ps (3 x N point cloud), Ns (3 x N array of normals) (not needed here
#but passed along for consistency)
#NShells (number of shells), RMax (maximum radius)
#Returns: hist (histogram of length NShells)
def getShapeHistogram(Ps, Ns, NShells, RMax):
hist = np.zeros(NShells) #initialize histogram values to zero
centroid = np.mean(Ps,1)[:, None] #find centroid of point cloud
Ps_centered = Ps - centroid #center point cloud at origin
distances = np.linalg.norm(Ps_centered, axis = 0) #calculate point distance from origin
hist, bins = np.histogram(distances, bins = int(NShells), range=[0, float(RMax)]) #generate histogram
return hist
#Purpose: To create shape histogram with concentric spherical shells and
#sectors within each shell, sorted in decreasing order of number of points
#Inputs: Ps (3 x N point cloud), Ns (3 x N array of normals) (not needed here
#but passed along for consistency), NShells (number of shells),
#RMax (maximum radius), SPoints: A 3 x S array of points sampled evenly on
#the unit sphere (get these with the function "getSphereSamples")
def getShapeShellHistogram(Ps, Ns, NShells, RMax, SPoints):
NSectors = SPoints.shape[1] #A number of sectors equal to the number of points sampled on the sphere
hist = np.zeros((NShells, NSectors)) #Create a 2D histogram that is NShells x NSectors & initialize with zeros
centroid = np.mean(Ps,1)[:, None]
Ps_centered = Ps - centroid
distances = np.linalg.norm(Ps_centered, axis = 0) #calculate point distance from origin
shells = distances//(float(RMax)/NShells) #calculate shell values
dots = np.dot(Ps_centered.T, SPoints) #calculate dot products
sectors = np.argmax(dots, axis = 1) #calculate sector values
#generate histogram
hist, xedges, yedges = np.histogram2d(shells, sectors, bins=[int(NShells), int(NSectors)], range = [[0.0, float(NShells)],[0.0, float(NSectors)]])
hist = np.fliplr(np.sort(hist)) # reverse-sort sectors in each shell
return hist.flatten()
#Purpose: To create shape histogram with concentric spherical shells and to
#compute the PCA eigenvalues in each shell
#Inputs: Ps (3 x N point cloud), Ns (3 x N array of normals) (not needed here
#but passed along for consistency), NShells (number of shells),
#RMax (maximum radius), sphereRes: An integer specifying points on thes phere
#to be used to cluster shells
def getShapeHistogramPCA(Ps, Ns, NShells, RMax):
#Create a 2D histogram, with 3 eigenvalues for each shell
hist = np.zeros((NShells, 3))
centroid = np.mean(Ps,1)[:, None]
Ps_centered = Ps - centroid
Ps_organized = []
interval = float(RMax)/NShells
for i in range(NShells):
Ps_organized.append([])
for point in Ps_centered.T:
tempDist = np.linalg.norm(point)
pos = int(tempDist//interval)
Ps_organized[pos].append(point)
for i in range(NShells):
(eigs, V) = doPCA(np.transpose(Ps_organized[i]))
hist[i] = eigs
return hist
#Purpose: To create shape histogram of the pairwise Euclidean distances between
#randomly sampled points in the point cloud
#Inputs: Ps (3 x N point cloud), Ns (3 x N array of normals) (not needed here
#but passed along for consistency), DMax (Maximum distance to consider),
#NBins (number of histogram bins), NSamples (number of pairs of points sample
#to compute distances)
def getD2Histogram(Ps, Ns, DMax, NBins, NSamples):
hist = np.zeros(NBins)
sampledPairs = np.random.randint(len(Ps[0]), size = (NSamples, 2.)) # get random point pairs
P1A = Ps[:, sampledPairs[:,0]]
P2A = Ps[:, sampledPairs[:,1]]
TEMP = np.subtract(P1A, P2A)
DISTANCES = np.linalg.norm(TEMP, axis =0)
#print DISTANCES
hist, bins= np.histogram(DISTANCES, bins = int(NBins), range=[0, float(DMax)])
#plt.bar(bins, histogram, width= DMax / NBins * 0.9)
#plt.show()
return hist
#Purpose: To create shape histogram of the angles between randomly sampled triples of points
#Inputs: Ps (3 x N point cloud), Ns (3 x N array of normals) (not needed here
#but passed along for consistency), NBins (number of histogram bins),
#NSamples (number of triples of points sample to compute angles)
def getA3Histogram(Ps, Ns, NBins, NSamples):
hist = np.zeros(NBins)
sampledTriples = np.random.randint(len(Ps[0]), size= (NSamples, 3.)) # get random point triples
P1A = Ps[:, sampledTriples[:,0]]
P2A = Ps[:, sampledTriples[:,1]]
P3A = Ps[:, sampledTriples[:,2]]
U = np.subtract(P1A, P2A)
V = np.subtract(P3A, P2A)
UNORM = np.linalg.norm(U, axis = 0)
VNORM = np.linalg.norm(V, axis = 0)
UDOT = np.sum(U*V, axis = 0)
UVNORM = np.multiply(UNORM, VNORM)
ANGLES = np.arccos(np.divide(UDOT, UVNORM))
ANGLES = np.nan_to_num(ANGLES)
hist, bins= np.histogram(ANGLES, bins = int(NBins), range=[0, float(np.pi)])
#plt.bar(bins, histogram, width= np.pi / NBins * 0.9)
#plt.show()
return hist
#Purpose: To create the Extended Gaussian Image by binning normals to
#sphere directions after rotating the point cloud to align with its principal axes
#Inputs: Ps (3 x N point cloud) (use to compute PCA), Ns (3 x N array of normals),
#SPoints: A 3 x S array of points sampled evenly on the unit sphere used to
#bin the normals
def getEGIHistogram(Ps, Ns, SPoints):
S = SPoints.shape[1]
hist = np.zeros(S)
(eigs, V) = doPCA(Ps)
Ns_aligned = np.dot(V, Ns)
dots2 = np.dot(Ns_aligned.T, SPoints)
pos = np.argmax(dots2, axis = 1)
hist, bins = np.histogram(pos, bins = int(S), range=[0, float(S)]) #generate histogram
return hist
#Purpose: To create an image which stores the amalgamation of rotating
#a bunch of planes around the largest principal axis of a point cloud and
#projecting the points on the minor axes onto the image.
#Inputs: Ps (3 x N point cloud), Ns (3 x N array of normals, not needed here),
#NAngles: The number of angles between 0 and 2*pi through which to rotate
#the plane, Extent: The extent of each axis, Dim: The number of pixels along
#each minor axis
def getSpinImage(Ps, Ns, NAngles, Extent, Dim):
hist = np.zeros((Dim, Dim)) # create a 2D histogram for an image
(eigs, V) = doPCA(Ps) # eigVals in decreasing order w. corresponding eigVecs in V
Ps_aligned = np.dot(V, Ps) # project point cloud onto PCA axes
# rotate the point cloud around axis of greatest variation (x-axis)
angles = np.linspace(0, 2*np.pi, NAngles+1)
cosA = np.around(np.cos(angles[:len(angles)-1:1]), 3)
sinA = np.around(np.sin(angles[:len(angles)-1:1]), 3)
r2 = np.zeros((NAngles, 3))
r2[:, 1] = np.transpose(cosA)
r2[:, 2] = np.transpose(-sinA)
r3 = np.zeros((NAngles, 3))
r3[:, 1] = np.transpose(sinA)
r3[:, 2] = np.transpose(cosA)
y_rot = np.dot(r2, Ps_aligned)
z_rot = np.dot(r3, Ps_aligned)
for i in range(NAngles):
p = np.append(y_rot[i, :], z_rot[i, :])
p = np.reshape(p, (2, len(Ps.T)))
# Bin the point cloud projected onto the other two axes
H, xedges, yedges = np.histogram2d(p[0,:], p[1,:], bins=Dim, range = [[-Extent, Extent],[-Extent, Extent]])
hist = hist + H # sum images
#print hist
#fig1 = plt.figure()
#plt.pcolormesh(xedges, yedges, hist)
#plt.show() # display spin image
return hist.flatten()
#Purpose: To create a histogram of spherical harmonic magnitudes in concentric
#spheres after rasterizing the point cloud to a voxel grid
#Inputs: Ps (3 x N point cloud), Ns (3 x N array of normals, not used here),
#VoxelRes: The number of voxels along each axis (for instance, if 30, then rasterize
#to 30x30x30 voxels), Extent: The number of units along each axis (if 2, then
#rasterize in the box [-1, 1] x [-1, 1] x [-1, 1]), NHarmonics: The number of spherical
#harmonics, NSpheres, the number of concentric spheres to take
def getSphericalHarmonicMagnitudes(Ps, Ns, VoxelRes, Extent, NHarmonics, NSpheres):
hist = np.zeros((NSpheres, NHarmonics))
#TODO: Finish this
return hist.flatten()
#Purpose: Utility function for wrapping around the statistics functions.
#Inputs: PointClouds (a python list of N point clouds), Normals (a python
#list of the N corresponding normals), histFunction (a function
#handle for one of the above functions), *args (addditional arguments
#that the descriptor function needs)
#Returns: AllHists (A KxN matrix of all descriptors, where K is the length
#of each descriptor)
def makeAllHistograms(PointClouds, Normals, histFunction, *args):
N = len(PointClouds)
#Call on first mesh to figure out the dimensions of the histogram
h0 = histFunction(PointClouds[0], Normals[0], *args)
K = h0.size
AllHists = np.zeros((K, N))
AllHists[:, 0] = h0
for i in range(1, N):
print "Computing histogram %i of %i..."%(i+1, N)
AllHists[:, i] = histFunction(PointClouds[i], Normals[i], *args)
return AllHists
#########################################################
## HISTOGRAM COMPARISONS ##
#########################################################
#Purpose: helper method to normalize histograms by mass
#Inputs: hist, a 1D array of length K with the values of the histogram
# h'[i] = h[i] / sum (from k = 1 to K) h[k]
def normalizeHist(hist):
sumHist = np.sum(hist) # sum (from k=1 to K) h[k] #fixed
hist_prime = hist / float(sumHist) # use broadcasting; h'[i] = h[i] / sumHist
return hist_prime
#Purpose: To compute the euclidean distance between a set
#of histograms
#Inputs: AllHists (K x N matrix of histograms, where K is the length
#of each histogram and N is the number of point clouds)
#Returns: D (An N x N matrix, where the ij entry is the Euclidean
#distance between the histogram for point cloud i and point cloud j)
def compareHistsEuclidean(AllHists):
N = AllHists.shape[1] # number of columns aka number of point clouds / histograms
D = np.zeros((N, N))
for i in range (N):
pc1 = normalizeHist(AllHists[:, i]) # normalize histogram i
for j in range (N):
pc2 = normalizeHist(AllHists[:, j]) # normalize histogram j
# dist = sqrt ( (pc1_1 - pc2_1)^2 + ... + (pc1_K - pc2_K)^2 )
D[i][j] = np.linalg.norm(np.subtract(pc1, pc2))
return D
#Purpose: To compute the cosine distance between a set
#of histograms
#Inputs: AllHists (K x N matrix of histograms, where K is the length
#of each histogram and N is the number of point clouds)
#Returns: D (An N x N matrix, where the ij entry is the cosine
#distance between the histogram for point cloud i and point cloud j)
def compareHistsCosine(AllHists):
N = AllHists.shape[1] # number of columns aka number of point clouds / histograms
D = np.zeros((N, N))
for i in range (N): # could change this to range (N-1) for efficiency?
pc1 = normalizeHist(AllHists[:, i]) # normalize histogram i
for j in range (N): # could change this to range (i+1, N) for efficiency?
pc2 = normalizeHist(AllHists[:, j]) # normalize histogram j
# dist = (v_i dot v_j) / (|v_i|*|v_j|)
numerator = np.dot(pc1, pc2) # v_i dot v_j
denominator = np.linalg.norm(pc1)*np.linalg.norm(pc2)
dist = numerator / denominator
D[i][j] = 1-dist # assign cosine distance value for ij
return D
#Purpose: To compute the cosine distance between a set
#of histograms
#Inputs: AllHists (K x N matrix of histograms, where K is the length
#of each histogram and N is the number of point clouds)
#Returns: D (An N x N matrix, where the ij entry is the chi squared
#distance between the histogram for point cloud i and point cloud j)
def compareHistsChiSquared(AllHists):
# note that this can only be used on histograms with non-zero values
N = AllHists.shape[1] # number of columns aka number of point clouds / histograms
D = np.zeros((N, N))
for i in range (N):
pc1 = normalizeHist(AllHists[:, i]) # normalize histogram i
for j in range (N):
pc2 = normalizeHist(AllHists[:, j]) # normalize histogram j
# dist = 0.5*{(sum from k=1 to K) [ (h1[k]-h2[k])^2 / (h1[k] + h2[k]) ]}
numerator = (np.subtract(pc1, pc2))**2 # element-wise subtraction, element-wise square
denominator = np.add(pc1, pc2) # element-wise addition
summation = np.sum(numerator/denominator) # element-wise division, sum over array
D[i][j] = 0.5*summation # scale & assign distance value for ij
return D
#Purpose: To compute the 1D Earth mover's distance between a set
#of histograms (note that this only makes sense for 1D histograms)
#Inputs: AllHists (K x N matrix of histograms, where K is the length
#of each histogram and N is the number of point clouds)
#Returns: D (An N x N matrix, where the ij entry is the earth mover's
#distance between the histogram for point cloud i and point cloud j)
def compareHistsEMD1D(AllHists):
N = AllHists.shape[1] # number of columns aka number of point clouds / histograms
D = np.zeros((N, N))
for i in range (N): #
pc1 = normalizeHist(AllHists[:, i]) # normalize histogram i
for j in range (N):
pc2 = normalizeHist(AllHists[:, j]) # normalize histogram j
# dist = (sum from k = 1 to K) | hC_i[k] - hC_j[k]|
hci= np.cumsum(pc1)
hcj= np.cumsum(pc2)
emd = np.sum(np.absolute(np.subtract(hci,hcj)))
D[i][j] = emd
return D
#########################################################
## CLASSIFICATION CONTEST ##
#########################################################
#Purpose: To implement your own custom distance matrix between all point
#clouds for the point cloud clasification contest
#Inputs: PointClouds, an array of point cloud matrices, Normals: an array
#of normal matrices
#Returns: D: A N x N matrix of distances between point clouds based
#on your metric, where Dij is the distnace between point cloud i and point cloud j
def getMyShapeDistances(PointClouds, Normals):
#TODO: Finish this
#This is just an example, but you should experiment to find which features
#work the best, and possibly come up with a weighted combination of
#different features
HistsD2 = makeAllHistograms(PointClouds, Normals, getD2Histogram, 3.0, 30, 100000)
DEuc = compareHistsEuclidean(HistsD2)
return DEuc
#########################################################
## EVALUATION ##
#########################################################
#Purpose: To return an average precision recall graph for a collection of
#shapes given the similarity scores of all pairs of histograms.
#Inputs: D (An N x N matrix, where the ij entry is the earth mover's distance
#between the histogram for point cloud i and point cloud j). It is assumed
#that the point clouds are presented in contiguous chunks of classes, and that
#there are "NPerClass" point clouds per each class (for the dataset provided
#there are 10 per class so that's the default argument). So the program should
#return a precision recall graph that has 9 elements
#Returns PR, an (NPerClass-1) length array of average precision values for all
#recalls
def getPrecisionRecall(D, NPerClass = 10):
PR = np.zeros(NPerClass-1) #initialize precision value arrays with zeros
rIn = 0 #initialize count index for number of rows
for row in D: #for every row in the similarity matrix
classval = rIn//NPerClass #find the class of current row. i.e since increments of NPerClass belong to same class
#integer division should floor all values in same class to same class value e.g. 30,31,32...39 become 3
sortRow = np.argsort(row) #sort row in question and return the indexes of values
count = 1 #initialize count for total number of shapes looked at for now
correct = 1 #initialize count for number of shapes in correct class looked at for now
for entry in sortRow: #sort through every element in sorted row
if (rIn == entry): #If shape is being queried against itself
#count+= 1 #increment number ofshapes looked at
continue #then skip this iteration
if (entry//NPerClass == classval): #if the class of the current entry is equal to the class of querying entry do this
precision = correct/count #calculate precision i.e. fraction of shapes in the correct class over the fraction of shapes looked at
PR[correct - 1] += precision #add precision value of shape in (correct - 1) index to the rest of the precision values in that index
correct += 1 #increment my counter for shapes in correct class
count+= 1 #increment counter for all shapes looked at.
if(correct >= NPerClass-1): #if we've found all correct shapes in class, no need to proceed, break
break
rIn += 1 #increment row counter i.e. move to next row
PR = PR/len(D) #divide all precision values by number of rows i.e find average as summation of precision values/number of precision values
return PR
#########################################################
## MAIN TESTS ##
#########################################################
#sys.exit("Not executing rest of code")
if __name__ == '__main__':
m = PolyMesh()
m.loadFile("models_off/biplane0.off") #Load a mesh
(Ps, Ns) = samplePointCloud(m, 5) #Sample 20,000 points and associated normals
exportPointCloud(Ps, Ns, "biplane.pts") #Export point cloud
#TESTING GET-SHAPE-HISTOGRAM
#histogram1 = getShapeHistogram(Ps, Ns, 21, 3)
#print histogram1
#print bins1
#plt.bar(bins1, histogram1, width=3.0/21*0.9)
#plt.show()
#TESTING GET-2D-HISTOGRAM
#DMax = 4
#NBins = 20
#NSamples = 5
#histogram = getD2Histogram(Ps, Ns, DMax, NBins, NSamples)
#TESTING GET-A3-HISTOGRAM
#NBins = 2
#NSamples = 10
#histogram = getA3Histogram(Ps, Ns, NBins, NSamples)
#plt.bar(bins, histogram, width= math.pi / NBins * 0.9)
#plt.show()
#TESTING GET-SHAPE-SHELL-HISTOGRAM
#NShells = 4
#RMax = 2
#SPoints = getSphereSamples() # res is auto-set to 2 (66 sample points)
#histogram = getShapeShellHistogram(Ps, Ns, NShells, RMax, SPoints)
#print histogram
#print bins
#plt.bar(bins, histogram, width = float(RMax)/NShells/SPoints.shape[1]*0.9)
#plt.show()
#TESTING GET-SPIN-IMAGE
# NAngles = 4#720
# Extent = 2#2
# Dim = 2#1000
# histogram = getSpinImage(Ps, Ns, NAngles, Extent, Dim)
#
NRandSamples = 10000 #You can tweak this number
#np.random.seed(100) #For repeatable results randomly sampling
#Load in and sample all meshes
PointClouds = []
Normals = []
for i in range(len(POINTCLOUD_CLASSES)):
print "LOADING CLASS %i of %i..."%(i, len(POINTCLOUD_CLASSES))
PCClass = []
for j in range(NUM_PER_CLASS):
m = PolyMesh()
filename = "models_off/%s%i.off"%(POINTCLOUD_CLASSES[i], j)
print "Loading ", filename
m.loadOffFileExternal(filename)
(Ps, Ns) = samplePointCloud(m, NRandSamples)
PointClouds.append(Ps)
Normals.append(Ns)
SPoints = getSphereSamples(2)
##Graph for all Descriptor methods
#
HistsSpin = makeAllHistograms(PointClouds, Normals, getSpinImage, 360, 1.8, 180) #100, 2, 40
HistsEGI = makeAllHistograms(PointClouds, Normals, getEGIHistogram, SPoints)
HistsA3 = makeAllHistograms(PointClouds, Normals, getA3Histogram, 30, 100000)
HistsD2 = makeAllHistograms(PointClouds, Normals, getD2Histogram, 3.0, 30, 100000)
HistShape = makeAllHistograms(PointClouds, Normals, getShapeHistogram, 1000, 3)
HistShapeSect = makeAllHistograms(PointClouds, Normals, getShapeShellHistogram, 1000, 3, SPoints)
#HistShapePCA = makeAllHistograms(PointClouds, Normals, getShapeHistogramPCA, 10000, 3)
#print "here -2"
DSpin = compareHistsEuclidean(HistsSpin)
#print "here -1"
DEGI = compareHistsEuclidean(HistsEGI)
#print "here 0"
DA3 = compareHistsEuclidean(HistsA3)
#print "here 00"
DD2 = compareHistsEuclidean(HistsD2)
#print "here 1"
DShp = compareHistsEuclidean(HistShape)
#print "here 2"
DShpSct = compareHistsEuclidean(HistShapeSect)
#print "here 2a"
#DShpPCA = compareHistsEuclidean(HistShapePCA)
#print "here 3"
PRSpin = getPrecisionRecall(DSpin)
PREGI = getPrecisionRecall(DEGI)
PRA3 = getPrecisionRecall(DA3)
PRD2 = getPrecisionRecall(DD2)
#print "here 4"
PRShp = getPrecisionRecall(DShp)
PRShpSct = getPrecisionRecall(DShpSct)
#PRShpPCA = getPrecisionRecall(DShpPCA)
#print "here 5"
recalls = np.linspace(1.0/9.0, 1.0, 9)
plt.plot(recalls, PREGI, 'c', label='EGI')
plt.hold(True)
plt.plot(recalls, PRSpin, 'b', label='Spin')
plt.plot(recalls, PRA3, 'k', label='A3')
plt.plot(recalls, PRD2, 'r', label='D2')
plt.plot(recalls, PRShp, 'm', label='ShapeShell')
plt.plot(recalls, PRShpSct, 'y', label='ShapeShellSector')
#plt.plot(recalls, PRShpPCA, 'g', label='ShapeShellPCA')
plt.xlabel('Recall')
plt.ylabel('Precision')
plt.title('Recall Graph for all Descriptors (random seeds = 100)')
plt.legend()
plt.show()
#print "end"
## Graph for Basic Shell Histogram
#Create precision recall graphs which show the effect of choosing
#different numbers of bins for the basic shell histogram
# HistBasic10 = makeAllHistograms(PointClouds, Normals, getShapeHistogram, 10, 5)
# HistBasic100 = makeAllHistograms(PointClouds, Normals, getShapeHistogram, 100, 5)
# HistBasic1000 = makeAllHistograms(PointClouds, Normals, getShapeHistogram, 1000, 5)
# HistBasic10000 = makeAllHistograms(PointClouds, Normals, getShapeHistogram, 10000, 5)
#
# DHistBasic10 = compareHistsEuclidean(HistBasic10)
# DHistBasic100 = compareHistsEuclidean(HistBasic100)
# DHistBasic1000 = compareHistsEuclidean(HistBasic1000)
# DHistBasic10000 = compareHistsEuclidean(HistBasic10000)
#
# PRHistBasic10 = getPrecisionRecall(DHistBasic10)
# PRHistBasic100 = getPrecisionRecall(DHistBasic100)
# PRHistBasic1000 = getPrecisionRecall(DHistBasic1000)
# PRHistBasic10000 = getPrecisionRecall(DHistBasic10000)
#
#
# recalls = np.linspace(1.0/9.0, 1.0, 9)
# plt.plot(recalls, PRHistBasic10, 'c', label='10 Shells')
# plt.hold(True)
# plt.plot(recalls, PRHistBasic100, 'k', label='100 Shells')
# plt.plot(recalls, PRHistBasic1000, 'r', label='1000 Shells')
# plt.plot(recalls, PRHistBasic10000, 'b', label='10000 Shells')
# plt.xlabel('Recall')
# plt.ylabel('Precision')
# plt.title('getShapeHistogram (Basic) Different number of Shells/Bins')
# plt.legend()
# plt.show()
## Graph for D2 Histogram
# HistsD22 = makeAllHistograms(PointClouds, Normals, getD2Histogram, 3.0, 30, 100)
# HistsD23 = makeAllHistograms(PointClouds, Normals, getD2Histogram, 3.0, 30, 1000)
# HistsD24 = makeAllHistograms(PointClouds, Normals, getD2Histogram, 3.0, 30, 10000)
# HistsD25 = makeAllHistograms(PointClouds, Normals, getD2Histogram, 3.0, 30, 100000)
#
#
# DHistsD22 = compareHistsEuclidean(HistsD22)
# DHistsD23 = compareHistsEuclidean(HistsD23)
# DHistsD24 = compareHistsEuclidean(HistsD24)
# DHistsD25 = compareHistsEuclidean(HistsD25)
#
# PRHistD22 = getPrecisionRecall(DHistsD22)
# PRHistD23 = getPrecisionRecall(DHistsD23)
# PRHistD24 = getPrecisionRecall(DHistsD24)
# PRHistD25 = getPrecisionRecall(DHistsD25)
#
#
# recalls = np.linspace(1.0/9.0, 1.0, 9)
# plt.plot(recalls, PRHistD22, 'c', label='100 Samples')
# plt.hold(True)
# plt.plot(recalls, PRHistD23, 'k', label='1000 Samples')
# plt.plot(recalls, PRHistD24, 'r', label='10000 Samples')
# plt.plot(recalls, PRHistD25, 'b', label='100000 Samples')
# plt.xlabel('Recall')
# plt.ylabel('Precision')
# plt.title('getD2Histogram Varying number of Distance Points Sampled (Random Seed = 100)')
# plt.legend()
# plt.show()
## Graph for Distance Metrics D2Histograms
# HistD2 = makeAllHistograms(PointClouds, Normals, getD2Histogram, 3.0, 30, 100000)
#
# DEucl = compareHistsEuclidean(HistD2)
# DCos = compareHistsCosine(HistD2)
# DChi = compareHistsChiSquared(HistD2)
# DEMD1D = compareHistsEMD1D(HistD2)
#
# PRDEucl = getPrecisionRecall(DEucl)
# PRDCos = getPrecisionRecall(DCos)
# PRDChi = getPrecisionRecall(DChi)
# PRDEMD1D = getPrecisionRecall(DEMD1D)
#
#
# recalls = np.linspace(1.0/9.0, 1.0, 9)
# plt.plot(recalls, PRDEucl, 'c', label='Euclidean')
# plt.hold(True)
# plt.plot(recalls, PRDCos, 'k', label='Cosine')
# plt.plot(recalls, PRDChi, 'r', label='Chi Squared')
# plt.plot(recalls, PRDEMD1D, 'b', label='DEMD1D')
# plt.xlabel('Recall')
# plt.ylabel('Precision')
# plt.title('getD2Histogram with Varying Distance Metrics')
# plt.legend()
# plt.show()
# # Graph for Distance Metrics EGI
# HistEG = makeAllHistograms(PointClouds, Normals, getEGIHistogram, getSphereSamples(2))
#
# DEucl = compareHistsEuclidean(HistEG)
# DCos = compareHistsCosine(HistEG)
# DChi = compareHistsChiSquared(HistEG)
# DEMD1D = compareHistsEMD1D(HistEG)
#
# PRDEucl = getPrecisionRecall(DEucl)
# PRDCos = getPrecisionRecall(DCos)
# PRDChi = getPrecisionRecall(DChi)
# PRDEMD1D = getPrecisionRecall(DEMD1D)
#
#
# recalls = np.linspace(1.0/9.0, 1.0, 9)
# plt.plot(recalls, PRDEucl, 'c', label='Euclidean')
# plt.hold(True)
# plt.plot(recalls, PRDCos, 'k', label='Cosine')
# plt.plot(recalls, PRDChi, 'r', label='Chi Squared')
# plt.plot(recalls, PRDEMD1D, 'b', label='DEMD1D')
# plt.xlabel('Recall')
# plt.ylabel('Precision')
# plt.title('getEGIHistogram (res 2) with Varying Distance Metrics')
# plt.legend()
# plt.show()
## Recall Graph for EGI
# HistsEG1 = makeAllHistograms(PointClouds, Normals, getEGIHistogram, getSphereSamples(2))
# HistsEG2 = makeAllHistograms(PointClouds, Normals, getEGIHistogram, getSphereSamples(3))
# HistsEG3 = makeAllHistograms(PointClouds, Normals, getEGIHistogram, getSphereSamples(4))
# HistsEG4 = makeAllHistograms(PointClouds, Normals, getEGIHistogram, getSphereSamples(5))
#
#
# DHistsEG1 = compareHistsEuclidean(HistsEG1)
# DHistsEG2 = compareHistsEuclidean(HistsEG2)
# DHistsEG3 = compareHistsEuclidean(HistsEG3)
# DHistsEG4 = compareHistsEuclidean(HistsEG4)
#
# PRHistEG1 = getPrecisionRecall(DHistsEG1)
# PRHistEG2 = getPrecisionRecall(DHistsEG2)
# PRHistEG3 = getPrecisionRecall(DHistsEG3)
# PRHistEG4 = getPrecisionRecall(DHistsEG4)
#
#
# recalls = np.linspace(1.0/9.0, 1.0, 9)
# plt.plot(recalls, PRHistEG1, 'c', label='res = 2')
# plt.hold(True)
# plt.plot(recalls, PRHistEG2, 'k', label='res = 3')
# plt.plot(recalls, PRHistEG3, 'r', label='res = 4')
# plt.plot(recalls, PRHistEG4, 'b', label='res = 5')
# plt.xlabel('Recall')
# plt.ylabel('Precision')
# plt.title('Extended Gaussian Image Varying Sphere (random seed = 100)')
# plt.legend()
# plt.show()
#TODO: Finish this, run experiments. Also in the above code, you might
#just want to load one point cloud and test your histograms on that first
#so you don't have to wait for all point clouds to load when making
#minor tweaks