def cluster_train(clear_data): global cc, ccnames, kat, ax, fig1, wall_cart, TP, FP, TN, FN, annotations_checked, fig3 hogs=[] surfaces=[] ann=[] Eps, cluster_labels= dbscan(clear_data,3) # DB SCAN print len(clear_data),' points in ', np.amax(cluster_labels),'clusters' #print 'Eps = ', Eps, ', outliers=' ,len(np.where(cluster_labels==-1)) max_label=int(np.amax(cluster_labels)) human=np.zeros(len(clear_data)) [xi,yi,zi] = [clear_data[:,0] , clear_data[:,1] , clear_data[:,2]] fig1.clear() kat.clear() kat.plot(wall_cart[:,0],wall_cart[:,1]) for k in range(1,max_label+1) : filter=np.where(cluster_labels==k) if len(filter[0])>timewindow : ax.scatter(xi[filter],yi[filter], zi[filter], 'z', 30,c=cc[k%12]) fig1.add_axes(ax) #kat.scatter(xi[filter],yi[filter],s=20, c=cc[k-1]) kat.scatter(xi[filter],yi[filter],s=20, c=cc[k%12]) grid=gridfit(yi[filter], zi[filter], xi[filter], 16, 16) #extract surface grid=grid-np.amin(grid) #build surface grid fig3.clear() ax3 = fig3.add_subplot(1,1,1, projection='3d') X, Y = np.mgrid[:16, :16] surf = ax3.plot_surface(X, Y, grid, rstride=1, cstride=1, cmap=cm.gray, linewidth=0, antialiased=False) surfaces.append(grid) hogs.append(hog(grid)) #extract features plt.pause(0.0001) #print ccnames[k-1],' cluster size :',len(filter[0]), 'Is',ccnames[k-1],'human? ' print ccnames[k%12],' cluster size :',len(filter[0]), 'Is',ccnames[k%12],'human? ' while True: ha = raw_input() if RepresentsInt(timewindow) and (int(ha)==1 or int(ha)==0): #print timewindow ha = int(ha) break else: print 'Try again, 1 for human or 0 for obstacle' if ha == classifier_annotations[0,annotations_checked]: if ha == 1: TP+=1 print 'TP' print TP else: TN+=1 print 'TN' print TN else: if classifier_annotations[0,annotations_checked] == 1: FP+=1 print 'FP' print FP else: FN+=1 print 'FN' print FN annotations_checked+=1 human[filter]=ha ann.append(ha) return cluster_labels,human,hogs,ann,surfaces
def main_cb(cloud_msg): #DECLARE GLOBAL VARIABLES global slot_count global final_data global all_hogs global train_surfaces global surfacesX global all_surf global labels #CONVERT TO XYZ rospy.loginfo('converting pointcloud %d to XYZ array ',slot_count) raw_data = pointclouds.pointcloud2_to_xyz_array(cloud_msg, remove_nans=True) # mode=0 # mode=0 -->COLLECT DATA mode=1 --> TRAIN AND TEST #BUILD CLUSTER filter_zeros=np.where(raw_data[:, 0] != 0)[0] clear_data = raw_data[filter_zeros,:] # ignore the zeros on X filter_zeros=np.where(clear_data[:, 1] != 0)[0] clear_data = clear_data[filter_zeros,:] # ignore the zeros on Y cluster_labels =np.zeros((len(clear_data),1),int) #% TRAINING eps = 0.5 min_points = 5 rospy.loginfo('call DBscan ') [core_samples,cluster_labels, n_clusters, human, surfacesX]=scikit_dbscan.dbscan(clear_data, eps, min_points,mode,False) clear_data=clear_data[core_samples,:] # SURFACE & HOG Features EXTRACTION all_surf.append(surfacesX) rospy.loginfo('Done.') #EXTRACT AND SAVE HOGS FEATURES rospy.loginfo('extract hogs for timeslot %d',slot_count) [hogs,hog_image] = myhog.hog(surfacesX) #pl.plot(hog_image) #pl.show(0.5) final_data.append(clear_data) labels.append(cluster_labels) human_detection.append(human) all_hogs.append(hogs) rospy.loginfo('all_hogs length %d',len(all_hogs)) rospy.loginfo('final_data length %d',len(final_data)) rospy.loginfo('human_detection length %d',len(human_detection)) if mode==0: f = open("train_hogs.txt","a") simplejson.dump(all_hogs,f) f.close() f = open("train_classifications.txt","a") simplejson.dump(all_human_detection,f) f.close() if mode==1: with open("train_hogs.txt") as f: train_hogs = simplejson.load(f) with open("train_classifications.txt") as f2: train_labels = simplejson.load(f2) X=array(train_hogs) y=array(train_labels) clf = svm.SVC() clf.fit(X, y) #[p0V,p1V,pAb]= trainNB0(array(train_hogs),array(train_labels)) #print testEntry,'classified as: ',classifyNB(array(hogs),p0V,p1V,pAb) slot_count = slot_count + 1