def interact_plot_1d(sample_name, catname, pr1, pr2, pr3, histdd, edgesdd): figs = [None, None, None] axes = [None, None, None] for d in range(0, 3): figs[d], axes[d] = create_subplot((1, 1)) subplot_idx = 0 ylbls = [pr + ' in ' + os.path.basename (sample_name) \ for pr in (pr1, pr2, pr3)] # Don't normalize the 1d hists, in case the magnitudes are drastically # different, then normalizing will hide the differences. Debug info # should be as true as possible. plot_hist_dd_given(histdd, edgesdd, figs, axes, subplot_idx, ylbls, normalize_1d=False, strict_lims=True) img_suff = [ \ catname + \ '_' + os.path.splitext (os.path.basename (sample_name)) [0] + \ '_' + pr \ for pr in (pr1, pr2, pr3)] show_plot(figs, img_suff, ndims=3, show=True, save=True)
break # ROS loop control try: wait_rate.sleep() except rospy.exceptions.ROSInterruptException, err: break # Check termination again in outer loop if thisNode.doSkip: thisNode.doSkip = False continue if thisNode.doTerminate: break # ROS loop control try: wait_rate.sleep() except rospy.exceptions.ROSInterruptException, err: break # Show matplotlib plot and save figure to file show_plot(figs, ylbls) if __name__ == '__main__': main()
def plot(self): # If haven't received any data to plot, nothing to do # Check obj_seq, don't check l1 is None, because the terminating msg -1 # will contain l1 = None! Then this makes it return, and no plot will # ever be saved or shown! if self.obj_seq == self.INIT_VAL: return # If haven't received data for ALL objects yet, do not plot yet. Cannot # plot confusion matrix style plot without having all objects! if self.obj_seq != self.END_CONDITION: return # Publisher signaling to us that it's done # This conditional must be before the two below, else self.l1 gets reverted # to 0 # If already shown, nothing to do. if self.plot_shown: self.doTerminate = True return ##### # Print for user to make sure we got correct info from sample_pcl.cpp ##### print('%sSampling density info received:%s' % (ansi_colors.OKCYAN, ansi_colors.ENDC)) print('%snSamples: %d, nSamplesRatio: %f%s' % (ansi_colors.OKCYAN, self.nSamples, self.nSamplesRatio, ansi_colors.ENDC)) ##### # Find range for histogram bins, so that the same range can be used for # ALL objects. This is a requirement for computing histogram # intersections btw each possible pair of objects. # Default range by np.histogram() is simply (min(data), max(data)). So we # just need to find min and max of data from ALL objects. ##### nObjs = len(self.tri_params) print ('[%d, %d, %d] bins for the 3 dimensions' % ( \ #HistP.bins3D[0], HistP.bins3D[1], HistP.bins3D[2])) self.nbins[0], self.nbins[1], self.nbins[2])) bin_range, bin_range3D, header, row = find_data_range_for_hist ( \ self.tri_params, self.decimeter, self.nbins, (self.pr1_idx, self.pr2_idx, self.pr3_idx)) ##### # Save configs of 3D histogram to a separate .csv file ##### if self.save3D: # Write just one row, with headers to say what each column is (conf_outfile_name, conf_outfile, conf_writer, _) = \ self.create_output_file ('hist_conf', self.HIST3D, header) conf_writer.writerow(dict(zip(header, row))) conf_outfile.close() print('Outputted histogram configs to ' + conf_outfile_name) ##### # Save configs of raw triangles file to a separate .csv file ##### # Write 3 rows. Each row is a string saying what the raw triangle .csv # file's corresponding row is. if self.saveRaw: (tri_conf_outfile_name, tri_conf_outfile, tri_conf_writer, _) = \ self.create_output_file ('tri_conf', self.TRI) # Each row is a single string for i in range(0, len(self.TRI_TITLES)): tri_conf_writer.writerow([self.TRI_TITLES[i]]) tri_conf_outfile.close() print('Outputted raw triangles configs to ' + tri_conf_outfile_name) ##### # Plot confusion matrix style graphs - # graph in axes (i, i) is histogram intersection of histogram[i] with # itself, so it's jsut the histogram itself (100% intersection). ##### if self.doPlot: xlbl_suffix = ' (Meters)' if self.decimeter: xlbl_suffix = ' (Decimeters)' figs, success = plot_conf_mat_hist_inter(self.tri_params, self.bins, bin_range, self.nDims, nObjs, self.plotMinus, self.suptitles, self.obj_names, xlbl_suffix=xlbl_suffix) if not success: self.doTerminate = True return else: # Show the plot print('Showing figure...') show_plot(self.figs, self.file_suff, 1) ##### # Save data to files ##### firstLoop = True # Seconds # Time how long real processing takes. start_time = time.time() # Loop through each object and save files for i in range(0, nObjs): print('Calculating for object %d' % i) # File base name without extension file_name = os.path.splitext(os.path.basename( self.obj_names[i]))[0] # Immediate subdir that the file is in, the category name cat_name = get_meta_cat_name(self.obj_names[i]) # Create the 3D data if self.save3D or self.saveKDE or self.testKDE: #print (self.tri_params [0].shape) # n x 3 tri_params_3D = np.asarray([ self.tri_params[i][:, self.pr1_idx], self.tri_params[i][:, self.pr2_idx], self.tri_params[i][:, self.pr3_idx] ]).T #print (tri_params_3D.shape) # Rescale the lengths by *10, to use decimeters, so KDE smoothing can # work better. if self.decimeter: # NOTE ATTENTION: Must make sure you are rescaling the LENGTHS, NOT # the angles!! So if you change PR#, must check here if self.pr1 == HistP.L0 or self.pr1 == HistP.L1 or self.pr1 == HistP.L2: tri_params_3D[:, 0] *= 10 if self.pr2 == HistP.L0 or self.pr2 == HistP.L1 or self.pr2 == HistP.L2: tri_params_3D[:, 1] *= 10 if self.pr3 == HistP.L0 or self.pr3 == HistP.L1 or self.pr3 == HistP.L2: tri_params_3D[:, 2] *= 10 ##### # Save 3D histograms to .csv file ##### if self.save3D: (outfile_name, outfile, writer, _) = self.create_output_file(file_name, self.HIST3D, cat_name=cat_name) # histdd is nbins[0] x nbins[1] x nbins[2] 3D matrix histdd, _, hist_linear = write_hist_3d_csv( writer, tri_params_3D, #bins=HistP.bins3D, bin_range=bin_range3D, normed=True) bins=self.nbins, bin_range=bin_range3D, normed=True) outfile.close() print ('Histogram sum should be same for all objects this run: %f' % \ np.sum (hist_linear)) print('%d nonzero values' % len(hist_linear[np.nonzero(hist_linear)])) print ('Outputted 3D histogram of ' + self.obj_names [i] + \ ' to ' + outfile_name) ##### # Save concatenated 1D histograms to file ##### if self.save1D: hist1d_pr1, _ = np.histogram(self.tri_params[i][:, self.pr1_idx], bins=self.bins[self.pr1_idx], range=bin_range[self.pr1_idx], normed=True) hist1d_pr2, _ = np.histogram(self.tri_params[i][:, self.pr2_idx], bins=self.bins[self.pr2_idx], range=bin_range[self.pr2_idx], normed=True) hist1d_pr3, _ = np.histogram(self.tri_params[i][:, self.pr3_idx], bins=self.bins[self.pr3_idx], range=bin_range[self.pr3_idx], normed=True) # Concatenate the three 1D histograms row = [] row.extend(hist1d_pr1.tolist()) row.extend(hist1d_pr2.tolist()) row.extend(hist1d_pr3.tolist()) (outfile_name_1d, outfile_1d, writer_1d, _) = self.create_output_file (\ file_name, self.HIST1D, cat_name=cat_name) #file_name + '_1d', self.HIST1D) writer_1d.writerow(row) outfile_1d.close() print ('Outputted 1D histogram of ' + self.obj_names [i] + \ ' to ' + outfile_name_1d) ##### # Save raw triangle 3 params' data, to give to Ani ##### if self.saveRaw: (raw_outfile_name, raw_outfile, raw_writer, _) = \ self.create_output_file (file_name, self.TRI, cat_name=cat_name) # Write the 3 chosen triangle parameters. One parameter per row #raw_writer.writerow (self.tri_params [i] [:, self.pr1_idx].tolist ()) #raw_writer.writerow (self.tri_params [i] [:, self.pr2_idx].tolist ()) #raw_writer.writerow (self.tri_params [i] [:, self.pr3_idx].tolist ()) # Write all 6 triangle parameters. One parameter per row # Ordering in TRI_PARAMS_IDX is same as TRI_PARAMS, which is TRI_TITLES we wrote # to tri_conf.csv. They need to correspond, so reader knows what each row is! raw_writer.writerow( self.tri_params[i][:, HistP.TRI_PARAMS_IDX[0]].tolist()) raw_writer.writerow( self.tri_params[i][:, HistP.TRI_PARAMS_IDX[1]].tolist()) raw_writer.writerow( self.tri_params[i][:, HistP.TRI_PARAMS_IDX[2]].tolist()) raw_writer.writerow( self.tri_params[i][:, HistP.TRI_PARAMS_IDX[3]].tolist()) raw_writer.writerow( self.tri_params[i][:, HistP.TRI_PARAMS_IDX[4]].tolist()) raw_writer.writerow( self.tri_params[i][:, HistP.TRI_PARAMS_IDX[5]].tolist()) raw_outfile.close() print ('Outputted raw triangles of ' + self.obj_names [i] + \ ' to ' + raw_outfile_name) ##### # Save kernel density estimation (KDE) smoothed histograms to file ##### if self.saveKDE or self.testKDE: (kde_name, kde_file, kde_writer, datapath) = self.create_output_file (\ file_name, self.KDE, cat_name=cat_name) # If first loop, save kde config file config_path = None if firstLoop: config_path = datapath histdd, edgesdd, density_linear = write_hist_3d_csv ( \ kde_writer, tri_params_3D, #bins=HistP.bins3D, bin_range=bin_range3D, normed=True, kde=True, bins=self.nbins, bin_range=bin_range3D, normed=True, kde=True, obj_name=file_name, debug=self.testKDE, config_path=config_path) kde_file.close() print ('Outputted 3D kernel density estimated (KDE) histogram of ' + \ self.obj_names [i] + ' to ' + kde_name) ##### # Save probabilities data ##### # Create a new IOProbs object for each object, so its data can be # cleared out each time, and don't have to worry about resetting it. # `.` each object will get saved to a separate file. # Discretize params copied from sample_gazebo.py. # This results in 2389 absolute poses, which n x n is 4 million entries! # Too big. Rounding more. #self.io_probs = IOProbs ('_pcd', discretize_m_q_tri=(2, 3, 2)) # (0.08, 0.3, 2) gets 172 poses, which is more right. But the numbers look # too rounded. Most positions are some combination of 0.08 and 0... how's # that going to capture any important information... Quaternion is way # too rough too. # (0.1, 0.5, 2) gets 42 poses, this number looks more right, but the # values in abs_poses matrix are junk... All positions are 0.1, all quats # are 0.5... Just different combos of 0.1, 0.5, 0. This is so useless! # (0.1, 0.5, 0.05) gets 42 poses. Now file is smaller though since I made # the observations' resolution coarser, 102 MB. # TODO: I should visualize the abs_poses that end up getting collected in # io_probs.py. See if they're any good at all. If they're too sparse, # then the data is useless! # (0.06, 0.4, 0.05) with mean_quaternion=True, gets 32 poses, 5599 # triangles. Wrist poses looks good in RViz with cube object. self.io_probs = IOProbs('_pcd', obj_center=(0.0, 0.0, 0.0), discretize_m_q_tri=(0.06, 0.05, 0.08)) # Set file name for costs and probs data. Else file won't be saved! # Ref: http://stackoverflow.com/questions/13890935/timestamp-python timestamp = time.time() timestring = datetime.datetime.fromtimestamp(timestamp).strftime( '%Y-%m-%d-%H-%M-%S') self.io_probs.set_costs_probs_filenames(timestring) # Simulate a fake wrist pose for each triangle sampled, to train # probabilities data. # Ret val is a list of 3-tuple wrist positions, and a list of 4-tuple # wrist orientations. wrist_ps, wrist_qs = simulate_wrist_poses( self.tri_pts0[i], self.tri_pts1[i], self.tri_pts2[i], np.array( (self.obj_center.x, self.obj_center.y, self.obj_center.z)), (self.obj_radii.x, self.obj_radii.y, self.obj_radii.z), self.vis_arr_pub) if rospy.is_shutdown(): break # Use of io_probs.py functions copied from sample_gazebo.py # For each sampled triangle, add it to probs matrix for tri_i in range(0, self.tri_pts0[i].shape[0]): # Concatenate p and q to make a 7-tuple. # There's only 1 triangle observed. This is by nature of pt cloud # sampling, since I only sample 3 pts at a time. self.io_probs.add_abs_pose_and_obs( wrist_ps[tri_i, :].tolist() + wrist_qs[tri_i, :].tolist(), # Reshape row into 2D array `.` fn wants 2D. np.reshape (, (1, -1)) # http://stackoverflow.com/questions/12575421/convert-a-1d-array-to-a-2d-array-in-numpy np.reshape(self.tri_params[i][tri_i, :], (1, -1))) #uinput = raw_input ('[DEBUG] Press enter or q: ') #if uinput == 'q': if rospy.is_shutdown(): break # Take mean of quaternions, to reduce number of poses if self.mean_quaternion: self.io_probs.take_mean_quaternion() self.io_probs.visualize_abs_poses(self.vis_arr_pub) # Write probs and costs to file # TODO: Does object name really need to be full path? This is partial # path right now. If something doesn't work, then need to make # sample_pcl.cpp publish the full file path in obj_name field of msg. self.io_probs.compute_costs_probs( self.obj_cats[i], self.obj_names[i], np.array( [self.obj_center.x, self.obj_center.y, self.obj_center.z])) self.io_probs.write_costs_probs() # TODO: Check if time delay btw 2 triangles in sample_pcl.cpp is enough # for this probs I/O to finish! Maybe it's too short, then will need # to add manual wait time in sample_pcl.cpp. Unless you want to do it # the ACK way, which is more messy - I don't recommend it! Waste of # time. I spent forever on that for the GSK GUI / state machine / C++ # ACKing! # For next iteration firstLoop = False # end for nObjs # Copied from triangles_reader.py # Print out running time in seconds end_time = time.time() print ('Total time for %d objects: %f seconds.' % \ (nObjs, end_time - start_time)) print ('Average %f seconds per object.\n' % \ ((end_time - start_time) / nObjs)) ##### # Tell main() thread we're done ##### self.plot_shown = True self.doTerminate = True
def plot(self): # If haven't received any data to plot, nothing to do # Check obj_seq, don't check l1 is None, because the terminating msg -1 # will contain l1 = None! Then this makes it return, and no plot will # ever be saved or shown! if self.obj_seq == self.INIT_VAL: return # Publisher signaling to us that it's done # This conditional must be before the two below, else self.l1 gets reverted # to 0 if self.obj_seq == self.END_CONDITION: # If already shown, nothing to do. if self.plot_shown: self.doTerminate = True return self.plot_shown = True self.doTerminate = True # Show the plot print('Showing figure...') show_plot(self.figs, self.ylbls, 3) show_plot(self.figs1D, self.ylbls1D, 1) return # If already plotted the msg in store, nothing new to plot elif self.prev_plotted_seq >= self.obj_seq: return else: self.prev_plotted_seq = self.obj_seq ##### # Plot 3D histograms # NOTE Flaw in this file: Unlike sample_pcl_calc_hist.py , this # file did not find a common bin range for all objects. To be able to do # that, you have to save all objects' histograms in memory, until end of # all objects, then find min and max across all histograms. At the time # this file was written, I only needed to plot the hists, not do any # real calculation (like saving hists to .csv file for real training, # where the meaning of a bin should be SAME across all objects). # # So this file's 3D histograms are only good for viewing purposes. # To perform calculations on 3D histograms, use # sample_pcl_calc_hist.py . ##### # Convert list of lists to np.array, this gives a 3 x n array. Transpose to # get n x 3, which histogramdd() wants. tri_params = np.asarray ( [\ [i for i in self.l1], [i for i in self.l2], [i for i in self.a1]]).T hist, _ = plot_hist_dd(tri_params, self.bins, self.figs, self.axes, self.obj_seq, self.ylbls) ##### # Plot 1D histograms for debugging ##### single_param = np.zeros([len(self.l0), 6]) single_param[:, 0] = np.asarray([i for i in self.l0]) single_param[:, 1] = np.asarray([i for i in self.l1]) single_param[:, 2] = np.asarray([i for i in self.l2]) single_param[:, 3] = np.asarray([i for i in self.a0]) single_param[:, 4] = np.asarray([i for i in self.a1]) single_param[:, 5] = np.asarray([i for i in self.a2]) for i in range(0, 6): plot_hist_dd(single_param[:, i], [self.bins1D[i]], [self.figs1D[i]], [self.axes1D[i]], self.obj_seq, [self.ylbls1D[i]]) print('Plotted for obj_seq %d' % self.obj_seq)
def main(): ##### # Parse cmd line args ##### arg_parser = argparse.ArgumentParser() arg_parser.add_argument ('histSubdirParam1', type=str, help='Used to create directory name to read from.\n' + \ 'For point cloud, nSamples used when triangles were sampled.\n' + \ 'For real robot data, specify the sampling density you want to classify real objects with, e.g. 10, will be used to load histogram bin configs.\n' + \ 'For Gazebo, omit this (pass in like 0) and specify --prs instead. Triangle params to use for 3D histogram, with no spaces, e.g. l0,l1,a0') arg_parser.add_argument ('histSubdirParam2', type=str, help='Used to create directory name to read from.\n' + \ 'For point cloud, nSamplesRatio used when triangles were sampled.\n' + \ 'For real robot data, specify the sampling density you want to classify real objects with, e.g. 0.95, will be used to load histogram bin configs.\n' + \ 'For Gazebo, omit this (pass in like 0) and specify --nbins instead. Number of bins in 3D histogram, with no spaces, e.g. 10,10,10. This will be outputted to hist_conf.csv for all subsequent files in classification to use.') arg_parser.add_argument( '--pcd', action='store_true', default=False, help= 'Boolean flag, no args. Run on synthetic data in csv_tri_lists/ from point cloud' ) arg_parser.add_argument( '--gazebo', action='store_true', default=False, help= 'Boolean flag, no args. Run on synthetic data in csv_tri/ from Gazebo. nSamples and nSamplesRatio do not make sense currently, so just always enter same thing so data gets saved to same folder, like 0 0' ) arg_parser.add_argument( '--real', action='store_true', default=False, help=format('Boolean flag. Run on real-robot data.')) # Custom subdir under csv_tri, that doesn't have nSamples_nSamplesRatio # style, nor triparams_nbins style. # Used for plotting 1D hist intersection plots for simple known shapes. arg_parser.add_argument( '--out_tri_subdir', type=str, default='', help= 'String. Subdirectory name of output directory under csv_tri/. This overwrites --gazebo flag. Do not specify both.' ) arg_parser.add_argument( '--long_csv_path', action='store_true', default=False, help= 'Boolean flag, no args. Specify it to read the full path in config file, as opposed to just using the base name in config file to read csv file from csv_tri or csv_gz_tri. Useful for comparing pcl and Gazebo data, which are outputted to different csv_*_tri paths.' ) # Meters. Though histograms use decimeters, raw triangles recorded from # PCL, real robot, and Gazebo are all in meters arg_parser.add_argument( '--thresh_l', type=float, default=0.5, help='Length threshold in meters, above which to throw away a triangle.' ) arg_parser.add_argument( '--no_prune', action='store_true', default=False, help= 'Specify this if you want no pruning to occur, i.e. keep all triangles.' ) # Number of histogram bins. Used to systematically test a range of different # number of bins, to plot a graph of how number of bins affect SVM # classification accuracy. For paper. arg_parser.add_argument( '--nbins', type=str, default='%d,%d,%d' % (HistP.bins3D[0], HistP.bins3D[1], HistP.bins3D[2]), help= 'Number of histogram bins. Same number for all 3 triangle parameter dimensions. This will be outputted to hist_conf.csv for all subsequent files in classification to use.' ) arg_parser.add_argument( '--prs', type=str, default='%s,%s,%s' % (HistP.PR1, HistP.PR2, HistP.PR3), help= 'Triangle parameters to use for 3D histogram, e.g. l0,l1,a0, no spaces.' ) arg_parser.add_argument( '--plot_hist_inter', action='store_true', default=False, help= 'Plot confusion matrix style histogram minus histogram intersection (only enable if have very few objects in meta list!!!! Plot is nObjs x nObjs big!)' ) # This looks ugly. I'm not using this. Would need to look up how to resize # subplots to like 4 x 3 or something. Now it's all stretched out. Also # not as useful as plot_hist_inter to spot differences. arg_parser.add_argument( '--plot_hists', action='store_true', default=False, help= 'Plot histograms of all objects side by side, in one row. Only enable if you have very few objects in meta list! Else plots will be very small to fit on screen.' ) arg_parser.add_argument( '--save_ind_subplots', action='store_true', default=False, help= 'Save each histogram intersection subplot to an individual file. Only in effect if --plot_hist_inter or --plot_hists is specified.' ) #arg_parser.add_argument ('--meta', type=str, default='models_active_test.txt', arg_parser.add_argument( '--meta', type=str, default='models_gazebo_csv.txt', help= 'String. Base name of meta list file in triangle_sampling/config directory' ) # Set to True to upload to ICRA. (You can't view the plot in OS X Preview) # Set to False if want to see the plot for debugging. arg_parser.add_argument( '--truetype', action='store_true', default=False, help= 'Tell matplotlib to generate TrueType 42 font, instead of rasterized Type 3 font. Specify this flag for uploading to ICRA.' ) arg_parser.add_argument( '--black_bg', action='store_true', default=False, help= 'Boolean flag. Plot with black background, useful for black presentation slides.' ) args = arg_parser.parse_args() if args.out_tri_subdir and args.long_csv_path: print( 'Both --out_tri_subdir and --long_csv_path are specified. These are used to construct subdirectory name under csv_tri/<out_tri_subdir> or an explicit path in config file. Cannot have both paths as input. Pick only one, and try again.' ) return # Construct input dir name out_tri_subdir = args.out_tri_subdir long_csv_path = args.long_csv_path csv_suffix = '' if args.gazebo: sampling_subpath, bins3D = get_triparams_nbins_subpath( args.prs, args.nbins) csv_suffix = 'gz_' elif args.real: sampling_subpath, bins3D = get_triparams_nbins_subpath( args.prs, args.nbins) csv_suffix = 'bx_' # default to pcd mode # though I haven't tried this since added the other modes... else: # Sampling subpath to save different number of samples, for quick accessing # without having to rerun sample_pcl.cpp. nSamples = int(args.histSubdirParam1) nSamplesRatio = float(args.histSubdirParam2) sampling_subpath = get_sampling_subpath(nSamples, nSamplesRatio) bins3D = get_ints_from_comma_string(args.nbins) if args.no_prune: print('%sNo pruning%s' % (ansi_colors.OKCYAN, ansi_colors.ENDC)) else: thresh_l = args.thresh_l print('Length threshold (meters) above which to throw away: %g' % thresh_l) # No decimeters when pruning! All files should be saved in meters, so that # when run triangles_reader(), which calls read_triangles(), the decimeters # isn't double counted!!! All data file on disk should be in meters!! #if HistP.decimeter: # thresh_l *= 10 # Parse the chosen parameters, to get a list of strings # e.g. ['l0', 'l1', 'a0'] prs = args.prs.split(',') # Figure out the index prs_idx = [] for i in range(0, len(prs)): if prs[i] == HistP.A0: prs_idx.append(HistP.A0_IDX) elif prs[i] == HistP.A1: prs_idx.append(HistP.A1_IDX) elif prs[i] == HistP.A2: prs_idx.append(HistP.A2_IDX) elif prs[i] == HistP.L0: prs_idx.append(HistP.L0_IDX) elif prs[i] == HistP.L1: prs_idx.append(HistP.L1_IDX) elif prs[i] == HistP.L2: prs_idx.append(HistP.L2_IDX) plot_hist_inter = args.plot_hist_inter plot_hists = args.plot_hists save_ind_subplots = False if plot_hist_inter: print ('%splot_hist_inter is set to true. Make sure your meta file has no more than 6 objects uncommented!!! Else you may run out of memory, trying to plot nObjs x nObjs plots at the end.%s' % ( \ ansi_colors.OKCYAN, ansi_colors.ENDC)) if plot_hist_inter or plot_hists: # Only in effect if --plot_hist_inter or --plot_hists is specified, else # ignore it save_ind_subplots = args.save_ind_subplots # Background color of bar graph. If black, text will be set to white via # matplotlib_util.py black_background in call from plot_hist_dd.py. if args.black_bg: bg_color = 'black' fg_color = (0.0, 1.0, 1.0) else: bg_color = 'white' fg_color = 'g' # Init node to read triangle csv file triReaderNode = TrianglesOnRobotToHists(sampling_subpath, csv_suffix=csv_suffix) triReaderNode.default_config() # Read meta list file rospack = rospkg.RosPack() meta_name = os.path.join(get_recog_meta_path(), args.meta) meta_list = read_meta_file(meta_name) print ('%sReading meta file from %s%s' % ( \ ansi_colors.OKCYAN, meta_name, ansi_colors.ENDC)) # Init output dir if not args.no_prune: pruned_dir = triReaderNode.tri_path.replace('_tri', '_pruned_tri') print('Pruned files will be outputted to %s' % pruned_dir) if not os.path.exists(pruned_dir): os.makedirs(pruned_dir) if long_csv_path: # This should give the train/ directory train_path = tactile_config.config_paths('custom', '') # For ICRA PDF font compliance. No Type 3 font (rasterized) allowed # Ref: http://phyletica.org/matplotlib-fonts/ # You can do this in code, or edit matplotlibrc. But problem with matplotlibrc # is that it's permanent. When you export EPS using TrueType (42), Mac OS X # cannot convert to PDF. So you won't be able to view the file you # outputted! Better to do it in code therefore. # >>> import matplotlib # >>> print matplotlib.matplotlib_fname() # Ref: http://matplotlib.1069221.n5.nabble.com/Location-matplotlibrc-file-on-my-Mac-td24960.html if args.truetype: matplotlib.rcParams['pdf.fonttype'] = 42 matplotlib.rcParams['ps.fonttype'] = 42 nDims = -1 min_vals = None max_vals = None # Copied from sample_pcl_calc_hist.py # tri_params is a Python list of NumPy 2D arrays. # tri_params [i] [:, dim] gives data for object i, dimension dim. tri_params = [] obj_names = [] ##### # Prune outlier triangles (NOT IN USE ANYMORE. Do NOT prune! It was a # wrong solution for a bug I later found the real solution to (duplicate # threshold in triangles_collect.py was too big).) ##### for line in meta_list: ##### # Read triangle .csv file ##### line = line.strip() # Full path to triangle csv file # Construct csv file name from object model base name in meta file base = os.path.basename(line) if out_tri_subdir: base_tri = os.path.splitext(base)[0] + '_robo.csv' tri_name = os.path.join(triReaderNode.tri_path, out_tri_subdir, base_tri) elif long_csv_path: # Read path from csv file, instead of just the base name base_tri = base tri_name = os.path.join(train_path, line) if not tri_name.endswith('.csv'): print( '%sLine in meta file does not end with .csv. Fix it and try again: %s%s' % (ansi_colors.FAIL, line, ansi_colors.ENDC)) return else: base_tri = os.path.splitext(base)[0] + '_robo.csv' tri_name = os.path.join(triReaderNode.tri_path, base_tri) print(tri_name) # Ret val is a Python list of 3 n-item lists, each list storing one # parameter for all n triangles in the file. tris, param_names = triReaderNode.read_tri_csv(tri_name, read_all_params=True) if tris is None: print ('%sread_tri_csv() encountered error. Terminating...%s' % (\ ansi_colors.FAIL, ansi_colors.ENDC)) return # Only true in first iteration. Initialize nDims and arrays if nDims < 0: nDims = len(param_names) min_vals = [1000] * nDims max_vals = [-1000] * nDims ##### # Prune triangles ##### # Ret val is a list of 6 lists of nTriangles floats, now converted to a # NumPy array of 6 x nTriangles. if not args.no_prune: pruned = prune(tris, param_names, thresh_l) else: pruned = tris # Reshape ret val from prune() to a format accepted by # plot_conf_mat_hist_inter(), for later plotting. # Do a sanity check, for if there are any triangles at all if (plot_hist_inter or plot_hists) and np.shape(pruned)[0] > 0: nTriangles = np.shape(pruned)[1] # tri_params is a Python list of 6 NumPy 2D arrays. # tri_params [i] [:, dim] gives data for object i, dimension dim. tri_params.append(np.zeros([nTriangles, nDims])) if HistP.decimeter: pruned_deci = scale_tris_to_decimeters(pruned, True) else: pruned_deci = pruned for dim in range(0, nDims): tri_params[len(tri_params) - 1][:, dim] = pruned_deci[dim, :] obj_names.append(os.path.splitext(base)[0]) ##### # Write triangles to CSV file # Code copied from triangles_collect.py. Decided not to refactor that # file, `.` that file needs to run fast. ##### if not args.no_prune: pruned_name = os.path.join(pruned_dir, base_tri) print('Pruned triangle data will be outputted to %s' % (pruned_name)) pruned_file = open(pruned_name, 'wb') # Column names (triangle parameters) are defned in param_names, a list of 6 # strings. pruned_writer = csv.DictWriter(pruned_file, fieldnames=param_names, restval='-1') pruned_writer.writeheader() # Write each row. Each row is a triangle, represented by 6 floats for t_idx in range(0, np.shape(pruned)[1]): # Each row is a dictionary. Keys are column titles (triangle parameters in # strings), values are floats. row = dict() for p_idx in range(0, len(param_names)): row[param_names[p_idx]] = pruned[p_idx, t_idx] pruned_writer.writerow(row) ##### # Find min and max in triangle data, for histogram min max edges # Like find_data_range_for_hist() in find_data_range_for_hist.py, but we # don't want to store all objects' data, so we don't call that function, # just call each of the two funcitons separately. ##### min_vals, max_vals = find_data_range_in_one_obj( pruned.T, min_vals, max_vals) # end for each line in meta list file print('') # If meta file was empty (or all commented out) if (plot_hist_inter or plot_hists) and not obj_names: print ('%sNothing was loaded from meta file. Did you specify the correct one? Did you uncomment at least one line? Terminating...%s' % ( \ ansi_colors.FAIL, ansi_colors.ENDC)) return # Debug #print ('min and max vals:') #print (min_vals) #print (max_vals) ##### # Save hist_conf.csv using min max from all objects # Copied from sample_pcl_calc_hist.py. ##### # Pass in decimeter=False, `.` above, when read_tri_csv, already read the # triangles data in as decimeters! Don't need to do another multiplication # by 10 here, it'd be doing it twice!! Then it'd become centimeter scale! # Pass in bins3D to pick bin sizes to write to hist_conf.csv. # Pass in prs_idx to pick the 3 triangle parameters to write to header string # of hist_conf.csv, and pick the appropriate bin ranges out of 6. bin_range, bin_range3D, header, row = make_bin_ranges_from_min_max ( \ min_vals, max_vals, decimeter=False, # Parameters set at top bins3D=bins3D, prs_idx=prs_idx) if not args.no_prune: conf_path = tactile_config.config_paths( 'custom', os.path.join('triangle_sampling', 'csv_' + csv_suffix + 'pruned_hists/', sampling_subpath)) else: # eh... just use csv_gz_pruned_hists, I don't want it to mess up my good # files in csv_gz_hists!!! conf_path = tactile_config.config_paths( 'custom', os.path.join('triangle_sampling', 'csv_' + csv_suffix + 'hists/', sampling_subpath)) # Create output file conf_outfile_name = os.path.join(conf_path, 'hist_conf.csv') conf_outfile = open(conf_outfile_name, 'wb') conf_writer = csv.DictWriter(conf_outfile, fieldnames=header) conf_writer.writeheader() conf_writer.writerow(dict(zip(header, row))) conf_outfile.close() print('Outputted histogram configs to ' + conf_outfile_name) ##### # Plot confusion matrix style histogram minus histogram intersection, # for debugging. ##### tick_rot = 0 if plot_hist_inter or plot_hists: nObjs = len(tri_params) #print ('nDims %d, nObjs %d' % (nDims, nObjs)) xlbl_suffix = ' (Meters)' # When plotting, do mind decimeter mode, because classification will take # this mode into account. Runtime = heed decimeter; files on disk = no # decimeters. if HistP.decimeter: xlbl_suffix = ' (Decimeters)' # Scale bin ranges to decimeters too bin_range_deci, _ = scale_bin_range_to_decimeter( bin_range, bin_range3D) else: bin_range_deci = deepcopy(bin_range) if plot_hist_inter: tick_rot = 45 # Copied from sample_pcl_calc_hist.py # This gives you a detailed look #bins = [30, 30, 30, 30, 30, 30] # These are bins that are actually passed to classifier bins = [10, 10, 10, 10, 10, 10] plotMinus = True suptitles = deepcopy(HistP.TRI_PARAMS) file_suff = deepcopy(HistP.TRI_PARAMS) if plotMinus: suptitles.extend(suptitles) file_suff.extend([i + '_minusHist' for i in file_suff]) # Make histogram intersection confusion matrix plots figs, success = plot_conf_mat_hist_inter(tri_params, bins, bin_range_deci, nDims, nObjs, plotMinus, suptitles, obj_names, xlbl_suffix=xlbl_suffix, bg_color=bg_color, fg_color=fg_color, tick_rot=tick_rot) nRows = nObjs nCols = nObjs elif plot_hists: tick_rot = 45 bins = [10, 10, 10] suptitles = (HistP.TRI_PARAMS[prs_idx[0]], HistP.TRI_PARAMS[prs_idx[1]], HistP.TRI_PARAMS[prs_idx[2]]) file_suff = deepcopy(suptitles) tri_params_3only = [] for i in range(0, len(tri_params)): # Append columns of the 3 chosen parameters tri_params_3only.append(tri_params[i][:, (prs_idx[0], prs_idx[1], prs_idx[2])]) # For each object, make 3 titles # List of nHists (nObjs) lists of nDims (3) strings xlbls = [] #for i in range (0, len (obj_names)): # Include triangle parameter name #xlbls.append ([]) #xlbls [len (xlbls) - 1].append (suptitles[0] + ' in ' + obj_names [i]) #xlbls [len (xlbls) - 1].append (suptitles[1] + ' in ' + obj_names [i]) #xlbls [len (xlbls) - 1].append (suptitles[2] + ' in ' + obj_names [i]) # Just object name #xlbls.append ([obj_names [i] [0 : min(20, len(obj_names[i]))]] * 3) # Pass in explicit edges, so all 4 hists have same edges! Else # np.histogramdd() finds a different range for each object! # bin[i]+1, the +1 is `.` there are n+1 edges for n bins. edges = [] edges.append( np.linspace(bin_range3D[0][0], bin_range3D[0][1], bins[0] + 1)) edges.append( np.linspace(bin_range3D[1][0], bin_range3D[1][1], bins[1] + 1)) edges.append( np.linspace(bin_range3D[2][0], bin_range3D[2][1], bins[2] + 1)) figs, success = plot_hists_side_by_side(tri_params_3only, edges, suptitles, xlbls, bg_color=bg_color, fg_color=fg_color, tick_rot=tick_rot) nCols = nObjs nRows = 1 if success: # Show the plot print('Saving figures...') show_plot(figs, file_suff, 1, show=False, bg_color=bg_color) if save_ind_subplots: if plot_hist_inter: #save_individual_subplots (figs, file_suff, 'l0_minusHist', save_individual_subplots(figs, file_suff, 'l2_minusHist', nRows, nCols, scale_xmin=1.15, scale_x=1.3, scale_y=1.3, ndims=1, show=False, bg_color=bg_color, tick_rot=tick_rot) elif plot_hists: desired_plot = 'l0' save_individual_subplots(figs, file_suff, desired_plot, nRows, nCols, scale_xmin=1.1, scale_x=1.2, scale_y=1.15, ndims=1, show=False, bg_color=bg_color)