def __getitem__(self, index): """Return a data point and its metadata information. Parameters: index (int) -- a random integer for data indexing Returns a dictionary that contains A, B, A_paths and B_paths A (tensor) -- an image in the input domain B (tensor) -- its corresponding image in the target domain A_paths (str) -- image paths B_paths (str) -- image paths """ A_path = self.A_paths[ index % self.A_size] # make sure index is within then range if self.opt.serial_batches: # make sure index is within then range index_B = index % self.B_size else: # randomize the index for domain B to avoid fixed pairs. index_B = random.randint(0, self.B_size - 1) B_path = self.B_paths[index_B] A_img = np.array(Image.open(A_path).convert('RGB')) A_img = self.stack(A_img) #Added a new loader for loading hsi images. Uncomment the following line for normal images. try: B_img = self.hsi_loader(B_path) except KeyError: print(B_path) B = normalize(B_img, max_=4096) A = normalize(A_img, max_=1) A = adaptive_instance_normalization(A, B) del A_img, B_img return {'A': A, 'B': B, 'A_paths': A_path, 'B_paths': B_path}
def gen_normal_control_point(self, start, end): p_s = self.positionv3[start] p_e = self.positionv3[end] n_s = self.normalv3[start] n_e = self.normalv3[end] n = n_s + n_e v = p_e - p_s if all(abs(v) < ZERO): return normalize(n) else: return normalize(n - 2 * np.dot(n, v) / np.dot(v, v) * v)
def evaluate_crbi_model_using_tf(tf_model, b, l, r, input_mean, input_std, output_mean, output_std): inp1 = l - b inp2 = r - b inp = np.concatenate([inp1, inp2], axis=0) normalized_inp = np.expand_dims(util.normalize(inp, input_mean, input_std), axis=0) output = tf_model(normalized_inp) d_output = util.denormalize(np.squeeze(output), output_mean, output_std) return d_output, output
def get_normal_in_pn_triangle(self, parameter: np.array): result = np.array([0] * 3, dtype='f4') ctrl_point_index = 0 for i in range(2, -1, -1): for j in range(2 - i, -1, -1): k = 2 - i - j n = 2.0 * pow(parameter[0], i) * pow(parameter[1], j) * pow(parameter[2], k) \ / factorial(i) / factorial(j) / factorial(k) result += self._pn_triangle_n[ctrl_point_index] * n ctrl_point_index += 1 return np.append(normalize(result), 0)
def compute_inertia_ellipsoids(base_lin, lf_lin, rf_lin, base_ang, tf_model, input_mean, input_std, output_mean, output_std): n_data = base_lin.shape[0] ret_x, ret_y, ret_z = [None] * n_data, [None] * n_data, [None] * n_data for i in range(n_data): inp1 = lf_lin[i] - base_lin[i] inp2 = rf_lin[i] - base_lin[i] inp = np.concatenate([inp1, inp2], axis=0) normalized_inp = np.expand_dims(util.normalize(inp, input_mean, input_std), axis=0) output = tf_model(normalized_inp) d_output = util.denormalize(np.squeeze(output), output_mean, output_std) local_I = inertia_from_one_hot_vec(d_output) rot_w_base = euler_to_rot(base_ang[i]) global_I = np.dot(np.dot(rot_w_base, local_I), rot_w_base.transpose()) ret_x[i], ret_y[i], ret_z[i] = get_ellipsoid(global_I, base_lin[i]) return ret_x, ret_y, ret_z
sensor_data["joint_vel"], True) rot_world_base = util.quat_to_rot(sensor_data['base_com_quat']) world_I = robot_sys.Ig[0:3, 0:3] local_I = np.dot(np.dot(rot_world_base.transpose(), world_I), rot_world_base) rf_iso = pybullet_util.get_link_iso(robot, link_id["r_sole"]) lf_iso = pybullet_util.get_link_iso(robot, link_id["l_sole"]) denormalized_output, output = evaluate_crbi_model_using_tf( crbi_model, sensor_data["base_com_pos"], lf_iso[0:3, 3], rf_iso[0:3, 3], input_mean, input_std, output_mean, output_std) local_I_est = inertia_from_one_hot_vec(denormalized_output) if b_ik: data_saver.add('gt_inertia', inertia_to_one_hot_vec(local_I)) data_saver.add( 'gt_inertia_normalized', util.normalize(inertia_to_one_hot_vec(local_I), output_mean, output_std)) data_saver.add('est_inertia_normalized', np.copy(np.squeeze(output))) data_saver.add('est_inertia', np.copy(denormalized_output)) data_saver.advance() # Disable forward step # p.stepSimulation() time.sleep(dt) t += dt count += 1