def generate_persistence_diagram(pcpds_manager, file, filteration): file_path = os.path.join(pcpds_manager.get_path_manager().get_full_cur_dir(), file) pcpds_obj = file_manager.load(file_path) # TODO: Add capabilitiy to select filtration method using abstract function stuff. result = filteration(pcpds_obj) file_manager.save(result, pcpds_manager.get_path_manager().get_full_cur_dir(), pcpds_obj.get_cellID())
def main(): pfm = PCPDS_Manager() number_of_data = 200 #Max 256 when saving to excel num_partitions_to_slide = 3 # Will need the filtration method for new point cloud filtering later. filt_method = None leading_zeros = 0 dir_name = "" pfm.get_path_manager().set_cur_dir("") valid = False print("Please enter a collection that has already been filtered:") # If not a valid directory, ask again saying it is invalid while(not valid): if not pfm.get_collection_dir(): print("Invalid collection name:", pfm.get_path_manager().get_cur_dir(), "try again.") dir_name = menu.get_input("Directory: ") pfm.get_path_manager().set_cur_dir(dir_name) valid = pfm.get_collection_dir() # Checks the first pcpds object in this directory for if it has a persistance diagram pcpds_temp = None for file in os.listdir(pfm.get_path_manager().get_full_cur_dir_var(dir_name)): file_path = os.path.join(pfm.get_path_manager().get_full_cur_dir(), file) pcpds_temp = file_manager.load(file_path) break if pcpds_temp is not None: if pcpds_temp.get_persistance_diagram() is not None: print("Valid Directory Chosen:", valid) # Stores the filtration method used to form the persistence diagram for later use. filt_method = pcpds_temp.get_filtration_used() # Stores the leading zeros here based on the directory name. break else: valid = False print("\nNo persistance diagram present for files @ collection:", pfm.get_path_manager().get_full_cur_dir() + ".\n") print("Please Either enter a directory that has been filtrated for persistance diagrams or run 'generate_persistance_diagrams.py' on the collection.") else: print("Problem loading pcpds file, it loaded as None.") wb = Workbook() excel_sheet = wb.add_sheet('Sheet 2') # Grabs the leading_zeros variable using X from a random idx's cell_ID. tmp_cellID = pfm.get_random_pcpds().get_cellID() leading_zeros = int((len(str(tmp_cellID))-1)/3) print("LEADING ZEROS:", leading_zeros) for n in range(number_of_data): pcpds = None valid_idx = False while valid_idx == False: # Grabs a random pcpds from the currently selected directory. pcpds = pfm.get_random_pcpds() (X, Y, Z) = pcpds.get_xyz() print("XYZ of random pcpds: Z", X, "Y:", Y, "Z:", Z) # Do this to check for if we are on a lower bound to avoid errors from negative values. if X < 1 or Y < 1: print("Invalid XYZ") continue slide_left_X = pfm.gen_idx(X-1, Y, leading_zeros) slide_right_X = pfm.gen_idx(X+1, Y, leading_zeros) slide_up_Y = pfm.gen_idx(X, Y+1, leading_zeros) slide_down_Y = pfm.gen_idx(X, Y-1, leading_zeros) slide_left_down = pfm.gen_idx(X-1, Y-1, leading_zeros) slide_right_down = pfm.gen_idx(X+1, Y-1, leading_zeros) slide_right_up = pfm.gen_idx(X+1, Y+1, leading_zeros) slide_left_up = pfm.gen_idx(X-1, Y+1, leading_zeros) if pfm.get_path_manager().validate_file(os.path.join(pfm.get_collection_dir(), str(slide_left_X) +".json")) == True: if pfm.get_path_manager().validate_file(os.path.join(pfm.get_collection_dir(), str(slide_right_X) +".json")) == True: if pfm.get_path_manager().validate_file(os.path.join(pfm.get_collection_dir(), str(slide_up_Y) +".json")) == True: if pfm.get_path_manager().validate_file(os.path.join(pfm.get_collection_dir(), str(slide_down_Y) +".json")) == True: valid_idx = True # Get the random pcpds's details idx = pcpds.get_cellID() print("Random IDX chosen:", str(idx)) (dimX, dimY, dimZ) = pcpds.get_dimensions() bounds = pcpds.get_bounds() # Grab persistance diagram of random idx. test_pd = pcpds.get_persistance_diagram() # TODO: Change how Validation of these slid idx values is done? slide_left_X = pfm.get_pcpds(slide_left_X) slide_right_X = pfm.get_pcpds(slide_right_X) slide_up_Y = pfm.get_pcpds(slide_up_Y) slide_down_Y = pfm.get_pcpds(slide_down_Y) num_slides = 10 num_directions = 4 #results = [0]*(num_slides * num_partitions_to_slide) excel_sheet.write(0, n, str(idx)) # Applies transform to point cloud and generates a persistence diagram to compare for bottleneck distances. print("num_slides * num_partitions_to_slide:",num_slides * num_partitions_to_slide) for overlay in range(1, num_slides * num_partitions_to_slide): # Left bounds_left_X = menu.transform(bounds, dimX, -1, True, overlay, num_slides) left_X_pcpds = menu.within_point_cloud(pcpds, slide_left_X, bounds_left_X) # Right bounds_right_X = menu.transform(bounds, dimX, 1, True, overlay, num_slides) right_X_pcpds = menu.within_point_cloud(pcpds, slide_right_X, bounds_right_X) # Up bounds_up_Y = menu.transform(bounds, dimY, 1, False, overlay, num_slides) up_Y_pcpds = menu.within_point_cloud(pcpds, slide_up_Y, bounds_up_Y) # Down bounds_down_Y = menu.transform(bounds, dimY, -1, False, overlay, num_slides) down_Y_pcpds = menu.within_point_cloud(pcpds, slide_down_Y, bounds_down_Y) overlay_avg = -1 num_dir = 0 sum = 0 try: left_X_pcpds = filt_method(left_X_pcpds) left_X_pd = left_X_pcpds.get_persistance_diagram() sum = sum + bottleneck_distances.get_distances(left_X_pd, test_pd) num_dir = num_dir + 1 except: print("ERROR LEFT") right_bn = 0 try: right_X_pcpds = filt_method(right_X_pcpds) right_X_pd = right_X_pcpds.get_persistance_diagram() sum = sum + bottleneck_distances.get_distances(right_X_pd, test_pd) num_dir = num_dir + 1 except: print("ERROR RIGHT") right_bn = 0 try: up_Y_pcpds = filt_method(up_Y_pcpds) up_Y_pd = up_Y_pcpds.get_persistance_diagram() sum = sum + bottleneck_distances.get_distances(up_Y_pd, test_pd) num_dir = num_dir + 1 except: print("ERROR UP") up_bn = 0 try: down_Y_pcpds = filt_method(down_Y_pcpds) down_Y_pd = down_Y_pcpds.get_persistance_diagram() sum = sum + bottleneck_distances.get_distances(down_Y_pd, test_pd) num_dir = num_dir + 1 except: print("ERROR DOWN") down_bn = 0 if (num_dir != 0): overlay_avg = sum / num_dir else: overlay_avg = -1 excel_sheet.write(overlay, n, str(overlay_avg)) menu.progress(n, number_of_data, ("Processing random grid: "+str(idx)+"...")) menu.progress(1, 1, ("Processing complete.")) # Write results .xls file wb.save(dir_name + '.xls') print("Job done.")
# If not a valid directory, ask again saying it is invalid while (not valid): if not pcpds_manager.get_collection_dir(): print("Invalid collection name:", pcpds_manager.get_path_manager().get_cur_dir(), "try again.") collection = menu.get_input("Directory: ") pcpds_manager.get_path_manager().set_cur_dir(collection) valid = pcpds_manager.get_collection_dir() # Checks the first pcpds object in this directory for if it has a persistance diagram pcpds_temp = None for file in os.listdir( pcpds_manager.get_path_manager().get_full_cur_dir_var(collection)): file_path = os.path.join( pcpds_manager.get_path_manager().get_full_cur_dir(), file) pcpds_temp = file_manager.load(file_path) break if pcpds_temp is not None: if pcpds_temp.get_persistance_diagram() is not None: print("Valid Directory Chosen:", valid) break else: valid = False print("\nNo persistance diagram present for files @ collection:", pcpds_manager.get_path_manager().get_full_cur_dir() + ".\n") print( "Please Either enter a directory that has been filtrated for persistance diagrams or run 'generate_persistance_diagrams.py' on the collection." ) else: print("Problem loading pcpds file, it loaded as None.")
def compute_bottle_neck_dist(): # This computes the bottleneck distance using a pre-processed/filtrated collection pcpds_manager = PCPDS_Manager() print("Collections:") collections_string = "" collections = os.listdir( pcpds_manager.get_path_manager().get_collections_path()) collections.sort() for directory in collections: collections_string += directory + " \t" print(collections_string) print("Please enter a collection that has already been filtrated:") # Loop here for valid directory collection = menu.get_input("Directory: ") pcpds_manager.get_path_manager().set_cur_dir(collection) valid = pcpds_manager.get_collection_dir() while (True): # If not a valid directory, ask again saying it is invalid while (not valid): if not pcpds_manager.get_collection_dir(): print("Invalid collection name:", pcpds_manager.get_path_manager().get_cur_dir(), "try again.") collection = menu.get_input("Directory: ") pcpds_manager.get_path_manager().set_cur_dir(collection) valid = pcpds_manager.get_collection_dir() # Checks the first pcpds object in this directory for if it has a persistance diagram pcpds_temp = None for file in os.listdir( pcpds_manager.get_path_manager().get_full_cur_dir_var( collection)): file_path = os.path.join( pcpds_manager.get_path_manager().get_full_cur_dir(), file) pcpds_temp = file_manager.load(file_path) break if pcpds_temp is not None: if pcpds_temp.get_persistance_diagram() is not None: print("Valid Directory Chosen:", valid) break else: valid = False print( "\nNo persistance diagram present for files @ collection:", pcpds_manager.get_path_manager().get_full_cur_dir() + ".\n") print( "Please Either enter a directory that has been filtrated for persistance diagrams or run 'generate_persistance_diagrams.py' on the collection." ) else: print("Problem loading pcpds file, it loaded as None.") print("Ready to process, how manny n_nearest results would you like?") # TODO: Validate that n_results is a valid number for the current dataset. n_results = menu.get_int_input() # Choose a modifier and apply it here pcpds = choose_pcpds(pcpds_manager) print("PCPDS Selected:", pcpds.get_cellID()) pcpds, mods = modify_pcpds(pcpds) # Calculated closest n matching bottleneck distances. closest_matches = bottleneck_distances.search_distances( n_results, pcpds.get_persistance_diagram(), valid) wb = Workbook() excel_sheet = wb.add_sheet('Bottle_Neck_Distance_Comparison') excel_sheet.write(0, 0, "Closest_" + str(n_results) + "_BD_Matches") excel_sheet.write(0, 1, "Bottle_Neck_Distance") excel_sheet.write(0, 2, "Cell_ID_Compared_Against:") excel_sheet.write(1, 2, pcpds.get_cellID()) if len(mods) > 0: excel_sheet.write(0, 3, str(pcpds.get_cellID()) + " Modifications") iter = 1 for mod in mods: excel_sheet.write(iter, 3, mod) iter += 1 iter = 1 for idx in closest_matches: # Write results .xls file excel_sheet.write(iter, 0, idx[0][:-5]) excel_sheet.write(iter, 1, idx[1]) iter = iter + 1 # Adds a tag to make the file name more unique to avoid mindlessly over writing data file_end_tag = str(pcpds.get_cellID()) if len(mods) > 0: file_end_tag += ":" + mods[0] wb.save( os.path.join("results", pcpds_manager.get_path_manager().get_cur_dir()) + "-" + file_end_tag + ":" + pcpds.get_filtration_used_name() + '.xls') print("Results saved as Excel file.")
# TODO: Make it able to take in a PCPDS object and visualize the point cloud import Classes.file_manager as fm from Classes.PCPDS_manager import PCPDS_Manager as pm import pptk import numpy as np import os.path import laspy path_manager = pm() dir = path_manager.get_path_manager().get_full_cur_dir_var("small_25/") file_name = "1121601.json" pcpds_obj = fm.load(os.path.join(dir, file_name)) point_cloud = pcpds_obj.get_point_cloud() for p in point_cloud: print('X: ' + str(p[0]) + ', Y: ' + str(p[1]) + ', Z: ' + str(p[2])) print("Showing Visualizer:") P = np.random.rand(100, 3) v = pptk.viewer(point_cloud) v.set(point_size=.0001) delay = input("Press any key to exit") # header = laspy.header.Header() # outfile = laspy.file.File("output.las", mode="w", header=header) # outfile.X = [1, 2, 3] # outfile.Y = [0, 0, 0] # outfile.Z = [10, 10, 11] # outfile.close()
def get_random_pcpds(self): dir = self.path_manager.get_full_cur_dir() file_name = fm.get_random_file(dir, '.json') random_pcpds = fm.load(os.path.join(dir, file_name)) return random_pcpds
def get_random_pcpds_idx(self, random_idx): # TODO: have a check for None and index out of bounds in here dir = self.path_manager.get_full_cur_dir() random_pcpds = fm.load(os.path.join(dir, str(random_idx) + '.json')) # return random_pcpds
def get_pcpds(self, cell_ID): dir = self.path_manager.get_full_cur_dir() pcpds = fm.load(os.path.join(dir, str(cell_ID) + '.json')) return pcpds
def main(): pfm = PCPDS_Manager() number_of_data = 400 print("Please enter a collection that has already been filtered:") #TODO: list collections # Loop here for valid directory collection = menu.get_input("Directory: ") pfm.get_path_manager().set_cur_dir(collection) valid = pfm.get_collection_dir() # If not a valid directory, ask again saying it is invalid while (not valid): if not pfm.get_collection_dir(): print("Invalid collection name:", pfm.get_path_manager().get_cur_dir(), "try again.") collection = menu.get_input("Directory: ") pfm.get_path_manager().set_cur_dir(collection) valid = pfm.get_collection_dir() # Checks the first pcpds object in this directory for if it has a persistance diagram pcpds_temp = None for file in os.listdir( pfm.get_path_manager().get_full_cur_dir_var(collection)): file_path = os.path.join(pfm.get_path_manager().get_full_cur_dir(), file) pcpds_temp = file_manager.load(file_path) break if pcpds_temp is not None: if pcpds_temp.get_persistance_diagram() is not None: print("Valid Directory Chosen:", valid) break else: valid = False print( "\nNo persistance diagram present for files @ collection:", pfm.get_path_manager().get_full_cur_dir() + ".\n") print( "Please Either enter a directory that has been filtrated for persistance diagrams or run 'generate_persistance_diagrams.py' on the collection." ) else: print("Problem loading pcpds file, it loaded as None.") cur_dir = pfm.get_path_manager().get_full_cur_dir() wb = Workbook() excel_sheet = wb.add_sheet('Sheet 1') for n in range(number_of_data): # Find random valid index with valid slide pcpds test_idx = file_manager.get_random_file(cur_dir, '.json')[:-5] valid_idx = False while valid_idx == False: # Find valid center pcpds test_idx = file_manager.get_random_file(cur_dir, '.json')[:-5] while pfm.get_path_manager().validate_file( os.path.join(cur_dir, test_idx + ".json")) == False: test_idx = file_manager.get_random_file(cur_dir, '.json')[:-5] test_pcpds = pfm.get_random_pcpds(test_idx) (X, Y, Z) = test_pcpds.get_xyz(str(test_idx)) # Find valid slide directional pcpds objects slide_left_X = las_obj.find_index(X - 1, Y) slide_right_X = las_obj.find_index(X + 1, Y) slide_up_Y = las_obj.find_index(X, Y + 1) slide_down_Y = las_obj.find_index(X, Y - 1) if pfm.get_path_manager().validate_file( os.path.join(dir_name, str(slide_left_X) + ".json")) == True: if pfm.get_path_manager().validate_file( os.path.join(dir_name, str(slide_right_X) + ".json")) == True: if pfm.get_path_manager().validate_file( os.path.join(dir_name, str(slide_up_Y) + ".json")) == True: if pfm.get_path_manager().validate_file( os.path.join(dir_name, str(slide_down_Y) + ".json")) == True: valid_idx = True print("VALID RANDOM ID: ", test_idx) # Get the random pcpds's details print('COORDINATES: ' + 'X:' + str(X) + ' Y:' + str(Y) + ' Z:' + str(Z)) (dimX, dimY, dimZ) = test_pcpds.get_dimensions() bounds = test_pcpds.get_bounds(str(test_idx)) test_pcpds = filtration.get_rips_diagram(test_pcpds) test_pd = test_pcpds.get_persistance_diagram() results = [0] * 11 num_dir = 4 slide_left_X = pfm.get_pcpds(slide_left_X) slide_right_X = pfm.get_pcpds(slide_right_X) slide_up_Y = pfm.get_pcpds(slide_up_Y) slide_down_Y = pfm.get_pcpds(slide_down_Y) # Slide frame 10% across each direction for overlay in range(1, 10): # Left bounds_left_X = menu.transform(bounds, dimX, -1, True, overlay) left_X_pcpds = menu.within_point_cloud(test_pcpds, slide_left_X, bounds_left_X) left_X_pcpds = filtration.get_rips_diagram(left_X_pcpds) left_X_pd = left_X_pcpds.get_persistance_diagram() # Right bounds_right_X = menu.transform(bounds, dimX, 1, True, overlay) right_X_pcpds = menu.within_point_cloud(test_pcpds, slide_right_X, bounds_right_X) right_X_pcpds = filtration.get_rips_diagram(right_X_pcpds) right_X_pd = right_X_pcpds.get_persistance_diagram() # Up bounds_up_Y = menu.transform(bounds, dimY, 1, False, overlay) up_Y_pcpds = menu.within_point_cloud(test_pcpds, slide_up_Y, bounds_up_Y) up_Y_pcpds = filtration.get_rips_diagram(up_Y_pcpds) up_Y_pd = up_Y_pcpds.get_persistance_diagram() # Down bounds_down_Y = menu.transform(bounds, dimY, -1, False, overlay) down_Y_pcpds = menu.within_point_cloud(test_pcpds, slide_down_Y, bounds_down_Y) down_Y_pcpds = filtration.get_rips_diagram(down_Y_pcpds) down_Y_pd = down_Y_pcpds.get_persistance_diagram() # Find average bottleneck at each overlay percentage results[overlay - 1] = bottleneck_distances.get_distances( left_X_pd, test_pd) results[overlay - 1] = results[overlay] + bottleneck_distances.get_distances( right_X_pd, test_pd) results[overlay - 1] = results[overlay] + bottleneck_distances.get_distances( up_Y_pd, test_pd) results[overlay - 1] = (results[overlay] + bottleneck_distances.get_distances( down_Y_pd, test_pd)) / num_dir # Write results .xls file num = 1 excel_sheet.write(n, 0, str(test_idx)) for overlay_avg in results: excel_sheet.write(n, num, str(overlay_avg)) num = num + 1 wb.save(dir_name + '.xls') menu.progress(n, number_of_data, ("Processing random grid: " + str(test_idx) + "...")) print("Job done.")