def main(train_index): # load the data from pkl datasets_norm_preproc = joblib.load(datasets_norm_preproc_path) min_max_scaler = joblib.load(min_max_scaler_path) noise_cov = joblib.load(noise_cov_path) # create iProMPs sets ipromps_set = [ ipromps_lib.IProMP(num_joints=num_joints, num_obs_joints=num_obs_joints, num_basis=num_basis, sigma_basis=sigma_basis, num_samples=len_norm, sigmay=noise_cov, min_max_scaler=min_max_scaler, num_alpha_candidate=num_alpha_candidate) for x in datasets_norm_preproc ] train_set = [] for task_idx, demo_list in enumerate([train_index] * len(datasets_norm_preproc)): data = [datasets_norm_preproc[task_idx][i] for i in demo_list] train_set.append(data) # add demo for each IProMPs for idx, ipromp in enumerate(ipromps_set): for demo_idx in train_set[idx]: demo_temp = np.hstack( [demo_idx['left_hand'], demo_idx['left_joints']]) ipromp.add_demonstration(demo_temp) # spatial variance demo ipromp.add_alpha(demo_idx['alpha']) # temporal variance demo return ipromps_set
def main(): # load the data from pkl task_name = joblib.load(task_name_path) datasets_norm_preproc = joblib.load(datasets_norm_preproc_path) min_max_scaler = joblib.load(min_max_scaler_path) noise_cov = joblib.load(noise_cov_path) # create iProMPs sets ipromps_set = [ ipromps_lib.IProMP(num_joints=num_joints, num_obs_joints=num_obs_joints, num_basis=num_basis, sigma_basis=sigma_basis, num_samples=len_norm, sigmay=noise_cov, min_max_scaler=min_max_scaler, num_alpha_candidate=num_alpha_candidate) for x in datasets_norm_preproc ] # add demo for each IProMPs for idx, ipromp in enumerate(ipromps_set): print('Training the IProMP for task: %s...' % task_name[idx]) # for demo_idx in datasets4train[idx]: for demo_idx in datasets_norm_preproc[idx]: # demo_temp = np.hstack([demo_idx['emg'], demo_idx['left_hand'], demo_idx['left_joints']]) demo_temp = np.hstack( [demo_idx['left_hand'], demo_idx['left_joints']]) ipromp.add_demonstration(demo_temp) # spatial variance demo ipromp.add_alpha(demo_idx['alpha']) # temporal variance demo # save the trained models print('Saving the trained models...') joblib.dump(ipromps_set, os.path.join(datasets_pkl_path, 'ipromps_set.pkl')) print('Trained the IProMPs successfully!!!')
dataset_tape_hold_norm = joblib.load('./pkl/dataset_tape_hold_norm.pkl') ################################# # Interaction ProMPs train ################################# # the measurement noise cov matrix imu_meansurement_noise_cov = np.eye(4) * imu_noise emg_meansurement_noise_cov = np.eye(8) * emg_noise pose_meansurement_noise_cov = np.eye(7) * pose_noise meansurement_noise_cov_full = scipy.linalg.block_diag( imu_meansurement_noise_cov, emg_meansurement_noise_cov, pose_meansurement_noise_cov) # create a 3 tasks iProMP ipromp_aluminum_hold = ipromps_lib.IProMP(num_joints=num_joints, num_basis=num_basis, sigma_basis=sigma_basis, num_samples=num_samples, num_obs_joints=num_obs_joints, sigmay=meansurement_noise_cov_full) ipromp_spanner_handover = ipromps_lib.IProMP( num_joints=num_joints, num_basis=num_basis, sigma_basis=sigma_basis, num_samples=num_samples, num_obs_joints=num_obs_joints, sigmay=meansurement_noise_cov_full) ipromp_tape_hold = ipromps_lib.IProMP(num_joints=num_joints, num_basis=num_basis, sigma_basis=sigma_basis, num_samples=num_samples, num_obs_joints=num_obs_joints, sigmay=meansurement_noise_cov_full)