def _make_continuous_partitions(shuffled_results: ShuffledResults, score_threshold: float, frame_rate: float) -> _PartitionedResults: time_window = max(1, int(frame_rate * CONTINOUS_SEGMENT_TIME_WINDOW_SEC)) min_segment_size = max(1, int(frame_rate * MIN_SEGMENT_SIZE_SEC)) partitioned_results = _PartitionedResults(shuffled_results) # discard low score frames early (use the maximum value of both scores for now) good_score_frames = np.where( ma.greater_equal(ma.max(shuffled_results.scores, axis=1), score_threshold))[0] for frame_index in good_score_frames: prev_theta = partitioned_results.theta[ frame_index - min(time_window, frame_index):frame_index, 0] # if there is a big gap > time_window we start a new partition, with a random value (0) if np.all(np.any(prev_theta.mask, axis=1)): partitioned_results.set_partition(frame_index=frame_index, partition=0, new_partition=True) # otherwise we look in the time_window close past the closest non nan frame see if we can continue the # partition as long as the values stay continuous else: last_valid_index = np.where( ~np.any(prev_theta.mask, axis=1))[0][-1] dists = [ angle_distance( shuffled_results.theta[frame_index, k, :], prev_theta[last_valid_index], ) for k in range(2) ] partition = int(np.argmin(dists)) if dists[partition] < CONTINUOUS_ANGLES_DIST_THRESHOLD: partitioned_results.set_partition(frame_index=frame_index, partition=partition) # discard short segments for cur_partition_indices in partitioned_results.get_segments(): if len(cur_partition_indices) < min_segment_size: partitioned_results.mask(cur_partition_indices) return partitioned_results
def test_angle_distance(): dims = 10 input_outputs = [ (np.zeros(dims, dtype=float), np.zeros(dims, dtype=float), 0), (np.ones(dims, dtype=float) * np.pi / 6, np.zeros(dims, dtype=float), np.pi / 6), (np.ones(dims, dtype=float), np.ones(dims, dtype=float), 0), (np.ones(dims, dtype=float) * np.pi / 2, np.ones(dims, dtype=float) * np.pi, np.pi / 2), (np.zeros(dims, dtype=float), np.ones(dims, dtype=float) * 2 * np.pi, 0), (np.ones(dims, dtype=float) * np.pi / 4, np.ones(dims, dtype=float) * np.pi / 8 + 2 * np.pi, np.pi / 8), ] for input_a, input_b, expected_output in input_outputs: output = angle_distance(input_a, input_b) assert np.allclose(output, expected_output, atol=1e-5)
def evaluate(dataset_path: str, **kwargs): """ Evaluate a trained model by predicting synthetic data and recording the image similarity :param dataset_path: Root path of the dataset containing videos of worm """ args = _parse_arguments(dataset_path, kwargs) mp.set_start_method("spawn", force=True) random.seed(args.random_seed) np.random.seed(args.random_seed) results_dir = os.path.join(args.experiment_dir, "evaluation") os.makedirs(results_dir, exist_ok=True) config = load_config(args.config) eigenworms_matrix = load_eigenworms_matrix(args.eigenworms_matrix_path) dataset = load_dataset( dataset_loader=config.dataset_loader, dataset_path=dataset_path, selected_video_names=args.video_names, resize_options=ResizeOptions(resize_factor=config.resize_factor), **{WORM_IS_LIGHTER: config.worm_is_lighter}, ) pkl_filenames = _generate_synthetic_data( dataset, args.num_process, args.num_samples, args.postures_generation, args.temp_dir, args.random_seed, ) keras_model = tf.keras.models.load_model(args.model_path, compile=False) tf_dataset = tf.data.Dataset.from_generator( partial(_eval_data_gen, pkl_filenames), tf.float32, tf.TensorShape(dataset.image_shape + (1, )), ).batch(args.batch_size) network_predictions = keras_model.predict(tf_dataset)[:args.num_samples] shuffled_results = ShuffledResults(random_theta=network_predictions) ResultsScoring( frame_preprocessing=dataset.frame_preprocessing, num_process=args.num_process, temp_dir=args.temp_dir, image_shape=dataset.image_shape, )( results=shuffled_results, scoring_data_manager=_ScoringDataManager(pkl_filenames), ) # Keep the maximum score between the two head/tail options for this evaluation image_scores = np.max(shuffled_results.scores, axis=1) # Now calculate the angle error and mode error angle_error = [] modes_error = [] theta_predictions = [] _, theta_labels = _load_templates(pkl_filenames) for theta_label, theta_results in zip(theta_labels, shuffled_results.theta): dists = [ angle_distance(theta_result, theta_label) for theta_result in theta_results ] closest_index = int(np.argmin(dists)) closest_theta = theta_results[closest_index] theta_predictions.append(closest_theta) angle_error.append(dists[closest_index]) if eigenworms_matrix is not None: modes_label = theta_to_modes(theta_label, eigenworms_matrix) modes_prediction = theta_to_modes(closest_theta, eigenworms_matrix) mode_error = np.abs(modes_label - modes_prediction) modes_error.append(mode_error) np.savetxt(os.path.join(results_dir, "image_score.txt"), image_scores) np.savetxt(os.path.join(results_dir, "angle_error.txt"), angle_error) np.savetxt(os.path.join(results_dir, "theta_labels.txt"), theta_labels) np.savetxt(os.path.join(results_dir, "theta_predictions.txt"), theta_predictions) if eigenworms_matrix is not None: np.savetxt(os.path.join(results_dir, "modes_error.txt"), modes_error) logger.info( f"Evaluated model with synthetic data," f" average image similarity: {np.mean(image_scores):.4f}," f" average angle error (degrees): {np.rad2deg(np.mean(angle_error)):.2f}" )
MODE_THRESHOLD = 12 all_mode_errors = [] for index, (theta_wp, theta_onno) in enumerate(zip(thetas_wp, thetas_onno)): m_wp, t_wp = convert(theta_wp) m_onno, t_onno = convert(theta_onno) if np.abs(m_wp[0][2]) < MODE_THRESHOLD and np.abs( m_wp[1][2]) < MODE_THRESHOLD: continue options = [(0, 0), (0, 1)] dists = [angle_distance(t_wp[x], t_onno[y]) for x, y in options] min_dist = int(np.argmin(dists)) chosen_theta_wp = t_wp[options[min_dist][0]] chosen_theta_onno = t_onno[options[min_dist][1]] chosen_modes_wp = m_wp[options[min_dist][0]] chosen_modes_onno = m_onno[options[min_dist][1]] mode_errors = mode_dist(chosen_modes_wp, chosen_modes_onno) all_mode_errors.append(mode_errors) all_mode_errors = np.array(all_mode_errors) def plot_all_modes(no_text=False):