def initialise_everything(): print('''This procedure can take up to hours to finish. The program will now run: - Normalisation pipeline over the shape database. (~3hrs) - Feature extraction over shape database. (~2hrs)\n Are you sure you want to continue (y/n)?\n ''') choice = input(">> ") if choice == "n" or choice == "no": return with open('config.json') as f: data = json.load(f) path_psd = data["DATA_PATH_PSB"] path_normed = data["DATA_PATH_NORMED"] path_feature = data["FEATURE_DATA_FILE"] db = PSBDataset() if len(os.listdir(path_psd)) == 0: print("No valid dataset found.\nPoint to a valid dataset.") return else: prompt_for_class_files(path_psd) choice = input( "Do you wish to go back to the menu to change the current classification settings? (y/n)\n>> " ) if choice == "n": return if not os.path.isfile(path_normed): print("No valid normalised dataset found.\nRunning normalisation.") norm = Normalizer(db) norm.run_full_pipeline() if not os.path.isfile(path_feature): print("No valid feature file found.\nRun feature extraction.") FE = FeatureExtractor(db) FE.run_full_pipeline()
print("=" * 10 + "Testing full pipeline for dataset reader" + "=" * 10) dataset = PSBDataset(DATA_PATH_DEBUG, class_file_path=CLASS_FILE) dataset.run_full_pipeline() dataset.compute_shape_statistics() dataset.detect_outliers() dataset.convert_all_to_polydata() dataset.save_statistics("./trash", "stats_test.csv") print( "======================================= Done! ===========================================" ) print("=" * 10 + "Testing full pipeline for normalizer" + "=" * 10) init_dataset = PSBDataset(DATA_PATH_DEBUG, class_file_path=CLASS_FILE) norm = Normalizer(init_dataset) norm.target_path = DATA_PATH_NORMED_SUBSET normed_data = norm.run_full_pipeline() print( "======================================= Done! ===========================================" ) print("=" * 10 + "Testing full pipeline for feature extractor" + "=" * 10) normed_dataset = PSBDataset(search_path=DATA_PATH_NORMED_SUBSET, class_file_path=CLASS_FILE) FE = FeatureExtractor(normed_dataset, target_file="./trash/feat_test.jsonl") features = FE.run_full_pipeline() print( "======================================= Done! ===========================================" )