def cluster_procedure(self): cl_array = [] try: for i,scan in enumerate(self.points): if len(scan) < self.num_c: continue Eps, cluster_labels= mt.dbscan(scan, self.num_c, eps=self.eps) max_label=int(np.amax(cluster_labels)) #(-1 is for outliers) #for every cluster/label (-> denotes a new cluster) for k in range(1,max_label+1) : filter = np.where(cluster_labels==k) if len(filter) == 0: continue #get the x,y points of the specific cluster and create a <ScanCluster> instance if (len(filter[0]) >= self.num_c): x_ = zip(*scan)[0] xCl = np.array(x_)[filter] y_ = zip(*scan)[1] yCl = np.array(y_)[filter] cluster_ = ScanCluster(xCl, yCl, i) cl_array.append(cluster_) if not (len(cl_array) == 0): self.combine_clusters(cl_array) cl_array = [] print 'total number of clusters = ',len(self.clusters.boxes) return self.clusters except Exception as ex: print 'Exception in cluster procedure ',ex raise ex
def clustering_procedure(buffer): global num_c, clusters_publisher, frame_id, publish_cluster_labels, cluster_labels_publisher if len(buffer.x) == 0: #the area is empty! clustersmsg = ClustersMsg() clustersmsg.header.stamp = rospy.Time.now() clustersmsg.header.frame_id = frame_id clustersmsg.x = [] clustersmsg.y = [] clustersmsg.z = [] #empty array_sizes means that anyone listening to this message won't loop through the data clustersmsg.array_sizes = [] clustersmsg.num_clusters = [] clusters_publisher.publish(clustersmsg) else: scan_time = buffer.scan_time clear_data = np.zeros((len(buffer.x), 3)) for i in range(0, len(buffer.x)): clear_data[i] = ([buffer.x[i], buffer.y[i], buffer.z[i]]) arr_sz = [] x_ = [] y_ = [] z_ = [] num_clusters = [] if use_overlap: Eps, cluster_labels = mt.dbscan(clear_data, num_c) max_label = int(np.amax(cluster_labels)) for k in range(1, max_label + 1): filter = np.where(cluster_labels == k) if len(filter[0]) > 40: xk = np.array(buffer.x)[filter] yk = np.array(buffer.y)[filter] zk = np.array(buffer.z)[filter] for i in range(0, len(xk)): x_.append(xk[i]) y_.append(yk[i]) z_.append(zk[i]) arr_sz.append(len(xk)) else: prev_clusters, cluster_label = oncl.onlineDBscan(clear_data, num_c) for cl in prev_clusters: if len(x_) == 0: x_.append(cl.getPoints()[:, 0]) y_.append(cl.getPoints()[:, 1]) z_.append(cl.getPoints()[:, 2]) else: x_[0] = np.append(x_[0], cl.getPoints()[:, 0], axis=0) y_[0] = np.append(y_[0], cl.getPoints()[:, 1], axis=0) z_[0] = np.append(z_[0], cl.getPoints()[:, 2], axis=0) if len(num_clusters) == 0: num_clusters = np.array(cl.getSizes()) arr_sz = [cl.getNumPts()] else: num_clusters = np.append(num_clusters, cl.getSizes(), axis=0) arr_sz.append(cl.getNumPts()) if 0 in arr_sz: arr_sz = [i for i in arr_sz if i != 0] x_ = x_[0] y_ = y_[0] z_ = z_[0] clustersmsg = ClustersMsg() clustersmsg.header.stamp = rospy.Time.now() clustersmsg.header.frame_id = frame_id #clustersmsg.clusters = cluster_labels clustersmsg.x = x_ clustersmsg.y = y_ clustersmsg.z = z_ clustersmsg.array_sizes = arr_sz clustersmsg.num_clusters = num_clusters clustersmsg.scan_time = scan_time clusters_publisher.publish(clustersmsg) if publish_cluster_labels: clusterlabelsmsg = ClusterLabelsMsg() clusterlabelsmsg.header = clustersmsg.header clusterlabelsmsg.cluster_labels = cluster_labels cluster_labels_publisher.publish(clusterlabelsmsg)
def clustering_procedure(buffer): global num_c, clusters_publisher, frame_id, publish_cluster_labels, cluster_labels_publisher if len(buffer.x) == 0: #the area is empty! clustersmsg = ClustersMsg() clustersmsg.header.stamp = rospy.Time.now() clustersmsg.header.frame_id = frame_id clustersmsg.x = [] clustersmsg.y = [] clustersmsg.z = [] #empty array_sizes means that anyone listening to this message won't loop through the data clustersmsg.array_sizes = [] clustersmsg.num_clusters = [] clusters_publisher.publish(clustersmsg) else: scan_time = buffer.scan_time clear_data = np.zeros((len(buffer.x), 3)) for i in range(0,len(buffer.x)): clear_data[i] = ([buffer.x[i], buffer.y[i], buffer.z[i]]) Eps, cluster_labels= mt.dbscan(clear_data, num_c) max_label=int(np.amax(cluster_labels)) arr_sz = [] x_ = [] y_ = [] z_ = [] for k in range(1,max_label+1) : filter = np.where(cluster_labels==k) if len(filter[0])>40 : xk = np.array(buffer.x)[filter] yk = np.array(buffer.y)[filter] zk = np.array(buffer.z)[filter] for i in range(0, len(xk)): x_.append(xk[i]) y_.append(yk[i]) z_.append(zk[i]) arr_sz.append(len(xk)) clustersmsg = ClustersMsg() clustersmsg.header.stamp = rospy.Time.now() clustersmsg.header.frame_id = frame_id #clustersmsg.clusters = cluster_labels clustersmsg.x = x_ clustersmsg.y = y_ clustersmsg.z = z_ clustersmsg.array_sizes = arr_sz clustersmsg.num_clusters = [] clustersmsg.scan_time = scan_time clusters_publisher.publish(clustersmsg) if publish_cluster_labels: clusterlabelsmsg = ClusterLabelsMsg() clusterlabelsmsg.header = clustersmsg.header clusterlabelsmsg.cluster_labels = cluster_labels cluster_labels_publisher.publish(clusterlabelsmsg)
def cluster_train(clear_data): global cc, ccnames, kat, ax, fig1, wall_cart, fig3, hogs_temp global annotated_humans, annotated_obstacles, cc, point_clouds hogs=[] surfaces=[] ann=[] cluster_points=[] Eps, cluster_labels= dbscan(clear_data,3) # DB SCAN print 'eps = ',Eps,' , ',len(clear_data),' points in ', np.amax(cluster_labels),'clusters' #print 'Eps = ', Eps, ', outliers=' ,len(np.where(cluster_labels==-1)) max_label=int(np.amax(cluster_labels)) human=np.zeros(len(clear_data)) [xi,yi,zi] = [clear_data[:,0] , clear_data[:,1] , clear_data[:,2]] fig1.clear() kat.clear() kat.plot(wall_cart[:,0],wall_cart[:,1]) for k in range(1,max_label+1) : filter=np.where(cluster_labels==k) if len(filter[0])>timewindow : ax.scatter(xi[filter],yi[filter], zi[filter], 'z', 30,c=cc[k%12]) fig1.add_axes(ax) #kat.scatter(xi[filter],yi[filter],s=20, c=cc[k-1]) kat.scatter(xi[filter],yi[filter],s=20, c=cc[k%12]) [xk,yk,zk] = [xi[filter],yi[filter], zi[filter]] point_clouds.append([xk,yk,zk]) grid=gridfit(yi[filter], zi[filter], xi[filter], 16, 16) #extract surface grid=grid-np.amin(grid) #build surface grid fig3.clear() ax3 = fig3.add_subplot(1,1,1, projection='3d') X, Y = np.mgrid[:16, :16] surf = ax3.plot_surface(X, Y, grid, rstride=1, cstride=1, cmap=cm.gray, linewidth=0, antialiased=False) surfaces.append(grid) hogs.append(hog(grid)) #extract features #list_dist=euclidean_distance(hogs_temp, hog(grid)) plt.pause(0.0001) #print ccnames[k-1],' cluster size :',len(filter[0]), 'Is',ccnames[k-1],'human? ' print ccnames[k%12],' cluster size :',len(filter[0]), 'Is',ccnames[k%12],'human? ' while True: ha = raw_input() if RepresentsInt(timewindow) and (int(ha)==1 or int(ha)==0): #print timewindow ha = int(ha) if ha == 1: annotated_humans = annotated_humans + 1 else : annotated_obstacles = annotated_obstacles + 1 break else: print 'Try again, 1 for human or 0 for obstacle' #grid=gridfit(yi[filter], zi[filter], xi[filter], 16, 16) #extract surface #grid=grid-np.amin(grid) #build surface grid #surfaces.append(grid) #hogs.append(hog(grid)) #extract features human[filter]=ha ann.append(ha) hogs_temp = np.array(np.array(hogs)) return cluster_labels,human,hogs,ann,surfaces,cluster_points
def clustering_procedure(clear_data, num_c): global cc, ccnames, z, z_scale, _3d_figure global all_clusters,all_hogs,all_gridfit,all_orthogonal global tot_results, all_annotations, metrics global pause hogs=[] colors=[] align_cl=[] #contains the aligned data clouds of each cluster vcl=[] #Valid Cluster Labels valid_flag=0 #this flag is only set if we have at least one valid cluster grids=[] cls = [] Eps, cluster_labels= mt.dbscan(clear_data,3) # DB SCAN max_label=int(np.amax(cluster_labels)) [xi,yi,zi] = [clear_data[:,0] , clear_data[:,1] , clear_data[:,2]] #for every created cluster - its data points for k in range(1,max_label+1) : filter=np.where(cluster_labels==k) if len(filter[0])>40 : valid_flag=1 #points of every cluster at each timewindow-frame [xk,yk,zk]=[xi[filter],yi[filter],zi[filter]] speed(xk,yk,zk) trans_matrix =[[xk,yk,zk]] all_clusters.append([xk,yk,zk]) #we get U by applying svd to the covariance matrix. U represents the rotation matrix of each cluster based on the variance of each dimension. U,s,V=np.linalg.svd(np.cov([xk,yk,zk]), full_matrices=False) #translate each cluster to the beginning of the axis and then do the rotation [xnew,ynew,znew]=translate_cluster(xk,yk,zk) #(traslation matrix) x (rotation matrix) = alignemt of cluster alignment_result=[[sum(a*b for a,b in zip(X_row,Y_col)) for X_row in zip(*[xnew,ynew,znew])] for Y_col in U] alignment_result=multiply_array(xnew,ynew,znew, V) #steps2(xk,yk,zk) cls.append([xk,yk,zk]) align_cl.append(alignment_result) all_orthogonal.append(alignment_result) vcl.append(k) colors.append(ccnames[k%12]) grid=gridfit(alignment_result[0], alignment_result[1], alignment_result[2], 16, 16) #extract surface - y,z,x alignment_result[1] all_gridfit.append(grid) grid=grid-np.amin(grid) grids.append(grid) features=hog(grid) f=hog(grid, orientations=6, pixels_per_cell=(8, 8), cells_per_block=(1, 1), visualise=False) all_hogs.append(f) hogs.append(f) #extract hog features if valid_flag != 0: overlap_trace(cls) #3d_figure.show() if pause_function: print '\033[93m '+ str(pause) + ' \033[0m' if not pause: #_3d_figure.clear() if num_of_diagrams > 1: ax.clear() ax.set_title("3D view") ax.set_xlabel('X - Distance') ax.set_ylabel('Y - Robot') ax.set_zlabel('Z - Time') update_plots(valid_flag,hogs,xi,yi,zi,cluster_labels,vcl, align_cl, grids)
def clustering_procedure(clear_data, num_c): global cc, ccnames, z, z_scale, _3d_figure global all_clusters, all_hogs, all_gridfit, all_orthogonal global tot_results, all_annotations, metrics global pause hogs = [] colors = [] align_cl = [] #contains the aligned data clouds of each cluster vcl = [] #Valid Cluster Labels valid_flag = 0 #this flag is only set if we have at least one valid cluster grids = [] cls = [] Eps, cluster_labels = mt.dbscan(clear_data, 3) # DB SCAN max_label = int(np.amax(cluster_labels)) [xi, yi, zi] = [clear_data[:, 0], clear_data[:, 1], clear_data[:, 2]] #for every created cluster - its data points for k in range(1, max_label + 1): filter = np.where(cluster_labels == k) if len(filter[0]) > 40: valid_flag = 1 #points of every cluster at each timewindow-frame [xk, yk, zk] = [xi[filter], yi[filter], zi[filter]] speed(xk, yk, zk) trans_matrix = [[xk, yk, zk]] all_clusters.append([xk, yk, zk]) #we get U by applying svd to the covariance matrix. U represents the rotation matrix of each cluster based on the variance of each dimension. U, s, V = np.linalg.svd(np.cov([xk, yk, zk]), full_matrices=False) #translate each cluster to the beginning of the axis and then do the rotation [xnew, ynew, znew] = translate_cluster(xk, yk, zk) #(traslation matrix) x (rotation matrix) = alignemt of cluster alignment_result = [[ sum(a * b for a, b in zip(X_row, Y_col)) for X_row in zip(*[xnew, ynew, znew]) ] for Y_col in U] alignment_result = multiply_array(xnew, ynew, znew, V) #steps2(xk,yk,zk) cls.append([xk, yk, zk]) align_cl.append(alignment_result) all_orthogonal.append(alignment_result) vcl.append(k) colors.append(ccnames[k % 12]) grid = gridfit(alignment_result[0], alignment_result[1], alignment_result[2], 16, 16) #extract surface - y,z,x alignment_result[1] all_gridfit.append(grid) grid = grid - np.amin(grid) grids.append(grid) features = hog(grid) f = hog(grid, orientations=6, pixels_per_cell=(8, 8), cells_per_block=(1, 1), visualise=False) all_hogs.append(f) hogs.append(f) #extract hog features if valid_flag != 0: overlap_trace(cls) #3d_figure.show() if pause_function: print '\033[93m ' + str(pause) + ' \033[0m' if not pause: #_3d_figure.clear() if num_of_diagrams > 1: ax.clear() ax.set_title("3D view") ax.set_xlabel('X - Distance') ax.set_ylabel('Y - Robot') ax.set_zlabel('Z - Time') update_plots(valid_flag, hogs, xi, yi, zi, cluster_labels, vcl, align_cl, grids)
def cluster_train(clear_data): global cc, ccnames, kat, ax, fig1, wall_cart, TP, FP, TN, FN, annotations_checked, fig3 hogs=[] surfaces=[] ann=[] Eps, cluster_labels= dbscan(clear_data,3) # DB SCAN print len(clear_data),' points in ', np.amax(cluster_labels),'clusters' #print 'Eps = ', Eps, ', outliers=' ,len(np.where(cluster_labels==-1)) max_label=int(np.amax(cluster_labels)) human=np.zeros(len(clear_data)) [xi,yi,zi] = [clear_data[:,0] , clear_data[:,1] , clear_data[:,2]] fig1.clear() kat.clear() kat.plot(wall_cart[:,0],wall_cart[:,1]) for k in range(1,max_label+1) : filter=np.where(cluster_labels==k) if len(filter[0])>timewindow : ax.scatter(xi[filter],yi[filter], zi[filter], 'z', 30,c=cc[k%12]) fig1.add_axes(ax) #kat.scatter(xi[filter],yi[filter],s=20, c=cc[k-1]) kat.scatter(xi[filter],yi[filter],s=20, c=cc[k%12]) grid=gridfit(yi[filter], zi[filter], xi[filter], 16, 16) #extract surface grid=grid-np.amin(grid) #build surface grid fig3.clear() ax3 = fig3.add_subplot(1,1,1, projection='3d') X, Y = np.mgrid[:16, :16] surf = ax3.plot_surface(X, Y, grid, rstride=1, cstride=1, cmap=cm.gray, linewidth=0, antialiased=False) surfaces.append(grid) hogs.append(hog(grid)) #extract features plt.pause(0.0001) #print ccnames[k-1],' cluster size :',len(filter[0]), 'Is',ccnames[k-1],'human? ' print ccnames[k%12],' cluster size :',len(filter[0]), 'Is',ccnames[k%12],'human? ' while True: ha = raw_input() if RepresentsInt(timewindow) and (int(ha)==1 or int(ha)==0): #print timewindow ha = int(ha) break else: print 'Try again, 1 for human or 0 for obstacle' if ha == classifier_annotations[0,annotations_checked]: if ha == 1: TP+=1 print 'TP' print TP else: TN+=1 print 'TN' print TN else: if classifier_annotations[0,annotations_checked] == 1: FP+=1 print 'FP' print FP else: FN+=1 print 'FN' print FN annotations_checked+=1 human[filter]=ha ann.append(ha) return cluster_labels,human,hogs,ann,surfaces