def test_disjointPath(self): cfg.k_Nacre= 20 cfg.defaultBackupStrategy= BackupStrategy.TOR_TO_TOR nacre = Nacre() nacre.generate() source = 'h_1_A_1_1' dest = 'h_3_B_2_1' paths = [] while True: path = nacre.findDisjointPath(source,dest,0,paths) if path is not None: paths.append(path) globals.simulatorLogger.info(path.__str__()) else: break globals.simulatorLogger.info("Total disjoint paths found %s" % len(paths)) return True
## distributing tasks accross nodes ## from mpi4py import MPI comm = MPI.COMM_WORLD rank = comm.Get_rank() size = comm.Get_size() print(rank) #assuming mp3 for now. TODO: generalize candidate_files = sorted(data_path.glob('**/*' + feature_name + '.npy'), key=lambda path: path.parent.__str__()) tasks = distribute_tasks(candidate_files, rank, size) for i in tasks: path = candidate_files[i] feature_file = path.__str__() if new_feature_name is None: if keep_feature_name: new_feature_name = feature_name else: new_feature_name = feature_name + "_applied_" + transform_name base_filename = feature_file[:-(len(feature_name) + 4)] new_feature_file = base_filename + new_feature_name + ".npy" if replace_existing or not os.path.isfile(new_feature_file): features = np.load(feature_file) transform = pickle.load( open( data_path.joinpath(feature_name + '_' + transform_name + '.pkl'), "rb")) features = transform.transform(features) if transform_name == "pca_transform":
def _is_valid_file(path): try: path = Path(path.__str__()) return path.exists() and path.is_file() except TypeError: return False
def _is_valid_file(path): try: path = Path(path.__str__()) return path.exists() and path.is_file() except (TypeError, OSError, ValueError): return False
rank = comm.Get_rank() size = comm.Get_size() print(rank) print("creating tensorblocks") #assuming egg sound format, as used in new BeatSaber format candidate_audio_files = sorted(data_path.glob('**/*.egg'), key=lambda path: path.parent.__str__()) num_tasks = len(candidate_audio_files) num_tasks_per_job = num_tasks//size tasks = list(range(rank*num_tasks_per_job,(rank+1)*num_tasks_per_job)) if rank < num_tasks%size: tasks.append(size*num_tasks_per_job+rank) for i in tasks: path = candidate_audio_files[i] song_file_path = path.__str__() # feature files are going to be saved as numpy files features_file = song_file_path+"_"+feature_name+"_"+str(feature_size)+".npy" # blocks_reduced_file = song_file_path+"_"+difficulties+"_blocks_reduced_.npy" blocks_reduced_classes_file = song_file_path+difficulties+"_blocks_reduced_classes_.npy" level_file_found = False # find level files with target difficulties that exist for diff in difficulties.split(","): if Path(path.parent.__str__()+"/"+diff+".dat").is_file(): level = list(path.parent.glob('./'+diff+'.dat'))[0] level = level.__str__() info_file = list(path.parent.glob('./info.dat'))[0] info_file = info_file.__str__() level_file_found = True if not level_file_found:
def get_features(motion_data): joint_angle_feats = get_rot_matrices_from_axis_angle( (motion_data['smpl_poses'])) return np.concatenate([joint_angle_feats, motion_data['smpl_trans']], 1) ## distributing tasks accross nodes ## from mpi4py import MPI comm = MPI.COMM_WORLD rank = comm.Get_rank() size = comm.Get_size() print(rank) candidate_motion_files = sorted(data_path.glob('**/*.pkl'), key=lambda path: path.parent.__str__()) tasks = distribute_tasks(candidate_motion_files, rank, size) for i in tasks: path = candidate_motion_files[i] motion_file_path = path.__str__() features_file = motion_file_path + "_" + "joint_angles_mats" + ".npy" if replace_existing or not os.path.isfile(features_file): motion_data = pickle.load(open(path, "rb")) features = get_features(motion_data) print(features.shape) features = ResampleLinear1D(features, features.shape[0] * 2) print(features.shape) np.save(features_file, features)