def test_construction_and_operations02(self):
        dataset_to_use = dataset.PredefinedDatasetEnum.REAL3D_SNOOPY_SET00
        generation_method = ho_cpp.tsdf.FilteringMethod.EWA_VOXEL_SPACE_INCLUSIVE

        camera_intrinsic_matrix = np.array(
            [[700., 0., 320.], [0., 700., 240.], [0., 0., 1.]],
            dtype=np.float32)

        canonical_field, live_field = dataset.datasets[
            dataset_to_use].generate_2d_sdf_fields(generation_method,
                                                   use_cpp=True)

        shared_parameters = build_opt.HierarchicalOptimizer2dSharedParameters()
        shared_parameters.maximum_warp_update_threshold = 0.01
        shared_parameters.maximum_iteration_count = 2

        # Python-specific
        verbosity_parameters_py = ho_py.HierarchicalOptimizer2d.VerbosityParameters(
        )
        visualization_parameters_py = hov_py.HierarchicalOptimizer2dVisualizer.Parameters(
        )
        visualization_parameters_py.out_path = "out"

        # C++-specific
        verbosity_parameters_cpp = ho_cpp.HierarchicalOptimizer2d.VerbosityParameters(
        )
        logging_parameters_cpp = ho_cpp.HierarchicalOptimizer2d.LoggingParameters(
            collect_per_level_convergence_reports=True,
            collect_per_level_iteration_data=False)
        resampling_strategy = ho_cpp.HierarchicalOptimizer2d.ResamplingStrategy.NEAREST_AND_AVERAGE

        optimizer_cpp = build_opt.make_hierarchical_optimizer2d(
            implementation_language=build_opt.ImplementationLanguage.CPP,
            shared_parameters=shared_parameters,
            verbosity_parameters_cpp=verbosity_parameters_cpp,
            logging_parameters_cpp=logging_parameters_cpp,
            verbosity_parameters_py=verbosity_parameters_py,
            visualization_parameters_py=visualization_parameters_py,
            resampling_strategy_cpp=resampling_strategy)

        warp_field_cpp = optimizer_cpp.optimize(canonical_field, live_field)
        warped_live_cpp = resampling.warp_field(live_field, warp_field_cpp)

        optimizer_py = build_opt.make_hierarchical_optimizer2d(
            implementation_language=build_opt.ImplementationLanguage.PYTHON,
            shared_parameters=shared_parameters,
            verbosity_parameters_cpp=verbosity_parameters_cpp,
            logging_parameters_cpp=logging_parameters_cpp,
            verbosity_parameters_py=verbosity_parameters_py,
            visualization_parameters_py=visualization_parameters_py)

        warp_field_py = optimizer_py.optimize(canonical_field, live_field)
        warped_live_py = resampling.warp_field(live_field, warp_field_py)

        self.assertTrue(np.allclose(warp_field_cpp, warp_field_py, atol=10e-6))
        self.assertTrue(
            np.allclose(warped_live_cpp, warped_live_py, atol=10e-6))
    def test_cpp_iteration_data(self):
        optimizer = ho_cpp.HierarchicalOptimizer2d(
            tikhonov_term_enabled=False,
            gradient_kernel_enabled=False,
            maximum_chunk_size=8,
            rate=0.2,
            maximum_iteration_count=100,
            maximum_warp_update_threshold=0.001,
            data_term_amplifier=1.0,
            tikhonov_strength=0.0,
            kernel=sob.generate_1d_sobolev_kernel(size=7, strength=0.1),
            resampling_strategy=ho_cpp.HierarchicalOptimizer2d.
            ResamplingStrategy.NEAREST_AND_AVERAGE,
            verbosity_parameters=ho_cpp.HierarchicalOptimizer2d.
            VerbosityParameters(),
            logging_parameters=ho_cpp.HierarchicalOptimizer2d.
            LoggingParameters(collect_per_level_convergence_reports=True,
                              collect_per_level_iteration_data=True))
        warp_field_out = optimizer.optimize(test_data.canonical_field,
                                            test_data.live_field)
        final_warped_live = resampling.warp_field(test_data.live_field,
                                                  warp_field_out)
        data = optimizer.get_per_level_iteration_data()
        vec = data[3].get_warp_fields()

        self.assertTrue(
            np.allclose(vec[50], test_data.iteration50_warp_field, atol=1e-6))

        self.assertTrue(
            np.allclose(warp_field_out, test_data.warp_field, atol=10e-6))
        self.assertTrue(
            np.allclose(final_warped_live,
                        test_data.final_live_field,
                        atol=10e-6))
    def test_construction_and_operation01(self):
        optimizer = ho_py.HierarchicalOptimizer2d(
            rate=0.2,
            data_term_amplifier=1.0,
            maximum_warp_update_threshold=0.001,
            maximum_iteration_count=100,
            tikhonov_term_enabled=False,
            kernel=None,
            verbosity_parameters=ho_py.HierarchicalOptimizer2d.
            VerbosityParameters(print_max_warp_update=False))
        warp_field_out = optimizer.optimize(test_data.canonical_field,
                                            test_data.live_field)
        final_warped_live = resampling.warp_field(test_data.live_field,
                                                  warp_field_out)

        self.assertTrue(np.allclose(warp_field_out, test_data.warp_field))
        self.assertTrue(
            np.allclose(final_warped_live, test_data.final_live_field))

        optimizer = ho_cpp.HierarchicalOptimizer2d(
            tikhonov_term_enabled=False,
            gradient_kernel_enabled=False,
            maximum_chunk_size=8,
            rate=0.2,
            maximum_iteration_count=100,
            maximum_warp_update_threshold=0.001,
            data_term_amplifier=1.0)

        warp_field_out = optimizer.optimize(test_data.canonical_field,
                                            test_data.live_field)
        final_warped_live = resampling.warp_field(test_data.live_field,
                                                  warp_field_out)
        self.assertTrue(
            np.allclose(warp_field_out, test_data.warp_field, atol=10e-6))
        self.assertTrue(
            np.allclose(final_warped_live,
                        test_data.final_live_field,
                        atol=10e-6))
Exemple #4
0
def main():
    data_to_use = ds.PredefinedDatasetEnum.REAL3D_SNOOPY_SET05
    # tsdf_generation_method = tsdf.GenerationMethod.EWA_TSDF_INCLUSIVE_CPP
    tsdf_generation_method = tsdf.GenerationMethod.BASIC
    # optimizer_implementation_language = build_opt.ImplementationLanguage.CPP
    optimizer_implementation_language = build_opt.ImplementationLanguage.CPP
    visualize_and_save_initial_and_final_fields = False
    out_path = "output/ho/single"
    if not os.path.exists(out_path):
        os.makedirs(out_path)

    sampling.set_focus_coordinates(0, 0)
    generate_test_data = False

    live_field, canonical_field = \
        ds.datasets[data_to_use].generate_2d_sdf_fields(method=tsdf_generation_method, smoothing_coefficient=0.5)

    view_scaling_factor = 1024 // ds.datasets[data_to_use].field_size

    if visualize_and_save_initial_and_final_fields:
        viz.visualize_and_save_initial_fields(canonical_field, live_field,
                                              out_path, view_scaling_factor)

    if generate_test_data:
        live_field = live_field[36:52, 21:37].copy()
        canonical_field = canonical_field[36:52, 21:37].copy()

    shared_parameters = build_opt.HierarchicalOptimizer2dSharedParameters()
    shared_parameters.maximum_warp_update_threshold = 0.01
    shared_parameters.maximum_iteration_count = 100
    verbosity_parameters_py = build_opt.make_common_hierarchical_optimizer2d_py_verbosity_parameters(
    )
    verbosity_parameters_cpp = ho_cpp.HierarchicalOptimizer2d.VerbosityParameters(
        print_max_warp_update=True,
        print_iteration_mean_tsdf_difference=True,
        print_iteration_std_tsdf_difference=True,
        print_iteration_data_energy=True,
        print_iteration_tikhonov_energy=True,
    )
    visualization_parameters_py = build_opt.make_common_hierarchical_optimizer2d_visualization_parameters(
    )
    visualization_parameters_py.out_path = out_path
    logging_parameters_cpp = ho_cpp.HierarchicalOptimizer2d.LoggingParameters(
        collect_per_level_convergence_reports=True,
        collect_per_level_iteration_data=True)
    resampling_strategy_cpp = ho_cpp.HierarchicalOptimizer2d.ResamplingStrategy.NEAREST_AND_AVERAGE
    #resampling_strategy_cpp = ho_cpp.HierarchicalOptimizer2d.ResamplingStrategy.LINEAR

    optimizer = build_opt.make_hierarchical_optimizer2d(
        implementation_language=optimizer_implementation_language,
        shared_parameters=shared_parameters,
        verbosity_parameters_cpp=verbosity_parameters_cpp,
        logging_parameters_cpp=logging_parameters_cpp,
        verbosity_parameters_py=verbosity_parameters_py,
        visualization_parameters_py=visualization_parameters_py,
        resampling_strategy_cpp=resampling_strategy_cpp)

    warp_field = optimizer.optimize(canonical_field, live_field)

    if optimizer_implementation_language == build_opt.ImplementationLanguage.CPP:
        print(
            "==================================================================================="
        )
        print_convergence_reports(
            optimizer.get_per_level_convergence_reports())
        telemetry_log = optimizer.get_per_level_iteration_data()
        metadata = viz_ho.get_telemetry_metadata(telemetry_log)
        frame_count = viz_ho.get_number_of_frames_to_save_from_telemetry_logs(
            [telemetry_log])
        progress_bar = progressbar.ProgressBar(max_value=frame_count)
        viz_ho.convert_cpp_telemetry_logs_to_video(telemetry_log,
                                                   metadata,
                                                   canonical_field,
                                                   live_field,
                                                   out_path,
                                                   progress_bar=progress_bar)

    warped_live = resampling.warp_field(live_field, warp_field)

    if visualize_and_save_initial_and_final_fields:
        viz.visualize_final_fields(canonical_field, warped_live,
                                   view_scaling_factor)

    return EXIT_CODE_SUCCESS
Exemple #5
0
    def __optimize_level(self, canonical_pyramid_level, live_pyramid_level,
                         live_gradient_x_level, live_gradient_y_level,
                         warp_field):

        maximum_warp_update_length = np.finfo(np.float32).max
        iteration_count = 0

        gradient = np.zeros_like(warp_field)
        normalized_tikhonov_energy = 0
        data_gradient = None
        tikhonov_gradient = None

        while not self.__termination_conditions_reached(
                maximum_warp_update_length, iteration_count):
            # resample the live & gradients using current warps
            resampled_live = resampling.warp_field(live_pyramid_level,
                                                   warp_field)
            resampled_live_gradient_x = resampling.warp_field_replacement(
                live_gradient_x_level, warp_field, 0.0)
            resampled_live_gradient_y = resampling.warp_field_replacement(
                live_gradient_y_level, warp_field, 0.0)

            # see how badly our sampled values correspond to the canonical values at the same locations
            # data_gradient = (warped_live - canonical) * warped_gradient(live)
            diff = (resampled_live - canonical_pyramid_level)
            data_gradient_x = diff * resampled_live_gradient_x
            data_gradient_y = diff * resampled_live_gradient_y
            # this results in the data term gradient
            data_gradient = np.dstack((data_gradient_x, data_gradient_y))

            if self.tikhonov_term_enabled:
                # calculate tikhonov regularizer (laplacian of the previous update)
                laplace_u = scipy.ndimage.laplace(gradient[:, :, 0],
                                                  mode='nearest')
                laplace_v = scipy.ndimage.laplace(gradient[:, :, 1],
                                                  mode='nearest')
                tikhonov_gradient = np.stack((laplace_u, laplace_v), axis=2)

                if self.verbosity_parameters.print_iteration_tikhonov_energy:
                    warp_gradient_u_x, warp_gradient_u_y = np.gradient(
                        gradient[:, :, 0])
                    warp_gradient_v_x, warp_gradient_v_y = np.gradient(
                        gradient[:, :, 1])
                    gradient_aggregate = \
                        warp_gradient_u_x ** 2 + warp_gradient_v_x ** 2 + \
                        warp_gradient_u_y ** 2 + warp_gradient_v_y ** 2
                    normalized_tikhonov_energy = 1000000 * 0.5 * gradient_aggregate.mean(
                    )

                gradient = self.data_term_amplifier * data_gradient - self.tikhonov_strength * tikhonov_gradient
            else:
                gradient = self.data_term_amplifier * data_gradient

            if self.gradient_kernel_enabled:
                convolution.convolve_with_kernel(gradient,
                                                 self.gradient_kernel)

            # apply gradient-based update to existing warps
            warp_field -= self.rate * gradient

            # perform termination condition updates
            update_lengths = np.linalg.norm(gradient, axis=2)
            max_at = np.unravel_index(np.argmax(update_lengths),
                                      update_lengths.shape)
            maximum_warp_update_length = update_lengths[max_at]

            # print output to stdout / log
            if self.verbosity_parameters.print_per_iteration_info:
                print("%s[ITERATION %d COMPLETED]%s" %
                      (printing.BOLD_LIGHT_CYAN, iteration_count,
                       printing.RESET),
                      end="")
                if self.verbosity_parameters.print_max_warp_update:
                    print(" max upd. l.: %f" % maximum_warp_update_length,
                          end="")
                if self.verbosity_parameters.print_iteration_data_energy:
                    normalized_data_energy = 1000000 * (diff**2).mean()
                    print(" norm. data energy: %f" % normalized_data_energy,
                          end="")
                if self.verbosity_parameters.print_iteration_tikhonov_energy and self.tikhonov_term_enabled:
                    print(" norm. tikhonov energy: %f" %
                          normalized_tikhonov_energy,
                          end="")
                print()
            inverse_tikhonov_gradient = None if tikhonov_gradient is None else -tikhonov_gradient

            # save & show per-iteration visualizations
            self.visualizer.generate_per_iteration_visualizations(
                self.hierarchy_level,
                iteration_count,
                canonical_pyramid_level,
                resampled_live,
                warp_field,
                data_gradient=data_gradient,
                inverse_tikhonov_gradient=inverse_tikhonov_gradient)
            iteration_count += 1

        return warp_field
def main():
    args = process_arguments(
        Arguments,
        "Runs 2D hierarchical optimizer on TSDF inputs generated from frame-pairs "
        "& random pixel rows from these. Alternatively, generates the said data or "
        "loads it from a folder from further re-use.")
    post_process_enum_args(args, for_3d=True)
    perform_optimization = not Arguments.skip_optimization.v

    filter_method_name_substring, filter_smoothing_substring = get_filter_substrings(
    )
    data_subfolder = "tsdf_pairs_128_{:s}{:s}_{:02d}".format(
        filter_method_name_substring, filter_smoothing_substring,
        Arguments.dataset_number.v)
    data_path = os.path.join(pu.get_reconstruction_data_directory(),
                             "real_data/snoopy", data_subfolder)
    experiment_name = build_experiment_name(filter_method_name_substring,
                                            filter_smoothing_substring)

    print("Running experiment " + experiment_name)

    if Arguments.series_result_subfolder.v is None:
        out_path = os.path.join(args.output_path, experiment_name)
    else:
        out_path = os.path.join(args.output_path,
                                Arguments.series_result_subfolder.v,
                                experiment_name)

    convergence_reports_pickle_path = os.path.join(out_path,
                                                   "convergence_reports.pk")

    df = None
    if not args.analyze_only:
        create_folder_if_necessary(out_path)
        if args.generate_data:
            create_or_clear_folder(data_path)
        initial_fields = []
        frame_numbers_and_rows = []
        if args.generate_data:

            datasets = esr.prepare_datasets_for_2d_frame_pair_processing(
                calibration_path=os.path.join(
                    pu.get_reconstruction_data_directory(),
                    "real_data/snoopy/snoopy_calib.txt"),
                frame_directory=os.path.join(
                    pu.get_reconstruction_data_directory(),
                    "real_data/snoopy/frames"),
                output_directory=out_path,
                y_range=(214, 400),
                replace_empty_rows=True,
                use_masks=True,
                input_case_file=Arguments.generation_case_file.v,
                offset=np.array([-64, -64, 128]),
                field_size=128,
            )

            datasets = datasets[args.start_from_index:min(
                len(datasets), args.stop_before_index)]

            print("Generating initial fields...")
            initial_fields_folder = os.path.join(data_path, "images")
            if args.save_initial_fields_during_generation:
                create_folder_if_necessary(initial_fields_folder)

            for dataset in progressbar.progressbar(datasets):
                canonical_field, live_field = dataset.generate_3d_sdf_fields(
                    args.generation_method, args.smoothing_coefficient)
                initial_fields.append((canonical_field, live_field))
                if args.generate_data:
                    canonical_frame = infer_frame_number_from_filename(
                        dataset.first_frame_path)
                    pixel_row = dataset.image_pixel_row
                    frame_numbers_and_rows.append((canonical_frame, pixel_row))
                    np.savez(os.path.join(
                        data_path,
                        "data_{:d}_{:d}".format(canonical_frame, pixel_row)),
                             canonical=canonical_field,
                             live=live_field)
                    if args.save_initial_fields_during_generation:
                        live_frame = canonical_frame + 1
                        canonical_image_path = os.path.join(
                            initial_fields_folder,
                            "tsdf_frame_{:06d}.png".format(canonical_frame))
                        viz.save_field(canonical_field, canonical_image_path,
                                       1024 // dataset.field_size)
                        live_image_path = os.path.join(
                            initial_fields_folder,
                            "tsdf_frame_{:06d}.png".format(live_frame))
                        viz.save_field(live_field, live_image_path,
                                       1024 // dataset.field_size)

                sys.stdout.flush()
        else:
            files = os.listdir(data_path)
            files.sort()
            if files[len(files) - 1] == "images":
                files = files[:-1]
            print("Loading initial fields from {:s}...".format(data_path))
            for file in files:
                frame_numbers_and_rows.append(
                    infer_frame_number_and_pixel_row_from_filename(file))
            if Arguments.optimization_case_file.v is not None:
                files, frame_numbers_and_rows = \
                    filter_files_based_on_case_file(Arguments.optimization_case_file.v, frame_numbers_and_rows, files)
            for file in progressbar.progressbar(files):
                archive = np.load(os.path.join(data_path, file))
                initial_fields.append((archive["canonical"], archive["live"]))

        # limit ranges
        frame_numbers_and_rows = frame_numbers_and_rows[
            args.start_from_index:min(len(frame_numbers_and_rows), args.
                                      stop_before_index)]
        initial_fields = initial_fields[args.start_from_index:min(
            len(initial_fields), args.stop_before_index)]

        telemetry_logs = []
        telemetry_folder = os.path.join(out_path, "telemetry")
        if perform_optimization:

            optimizer = cpp_module.HierarchicalOptimizer3d(
                tikhonov_term_enabled=Arguments.tikhonov_term_enabled.v,
                gradient_kernel_enabled=Arguments.gradient_kernel_enabled.v,
                maximum_chunk_size=8,
                rate=Arguments.rate.v,
                maximum_iteration_count=Arguments.maximum_iteration_count.v,
                maximum_warp_update_threshold=Arguments.
                maximum_warp_update_threshold.v,
                data_term_amplifier=Arguments.data_term_amplifier.v,
                tikhonov_strength=Arguments.tikhonov_strength.v,
                kernel=sob.generate_1d_sobolev_kernel(
                    Arguments.kernel_size.v, Arguments.kernel_strength.v),
                resampling_strategy=Arguments.resampling_strategy.v,
                verbosity_parameters=cpp_module.HierarchicalOptimizer3d.
                VerbosityParameters(),
                logging_parameters=cpp_module.HierarchicalOptimizer3d.
                LoggingParameters(collect_per_level_convergence_reports=True,
                                  collect_per_level_iteration_data=Arguments.
                                  save_telemetry.v))

            convergence_report_sets = []
            if Arguments.save_initial_and_final_fields.v or Arguments.save_telemetry.v:
                create_folder_if_necessary(telemetry_folder)

            if args.save_telemetry:
                # make all the necessary subfolders
                for frame_number, pixel_row in frame_numbers_and_rows:
                    telemetry_subfolder = get_telemetry_subfolder_path(
                        telemetry_folder, frame_number, pixel_row)
                    create_folder_if_necessary(telemetry_subfolder)

            print("Optimizing...")
            i_pair = 0
            for (canonical_field,
                 live_field) in progressbar.progressbar(initial_fields):
                (frame_number, pixel_row) = frame_numbers_and_rows[i_pair]
                live_copy = live_field.copy()
                warp_field_out = optimizer.optimize(canonical_field,
                                                    live_field)

                if args.save_telemetry:
                    if args.implementation_language == build_opt.ImplementationLanguage.CPP:
                        telemetry_logs.append(
                            optimizer.get_per_level_iteration_data())
                    else:
                        optimizer.visualization_parameters.out_path = \
                            get_telemetry_subfolder_path(telemetry_folder, frame_number, pixel_row)
                if Arguments.save_initial_and_final_fields.v:
                    if not Arguments.save_telemetry.v:
                        frame_file_prefix = "pair_{:d}-{:d}_{:d}".format(
                            frame_number, frame_number + 1, pixel_row)
                        final_live_path = os.path.join(
                            telemetry_folder,
                            frame_file_prefix + "_final_live.png")
                        canonical_path = os.path.join(
                            telemetry_folder,
                            frame_file_prefix + "_canonical.png")
                        initial_live_path = os.path.join(
                            telemetry_folder,
                            frame_file_prefix + "_initial_live.png")
                    else:
                        telemetry_subfolder = get_telemetry_subfolder_path(
                            telemetry_folder, frame_number, pixel_row)
                        final_live_path = os.path.join(telemetry_subfolder,
                                                       "final_live.png")
                        canonical_path = os.path.join(telemetry_subfolder,
                                                      "canonical.png")
                        initial_live_path = os.path.join(
                            telemetry_subfolder, "live.png")
                    final_live_resampled = resampling.warp_field(
                        live_field, warp_field_out)
                    scale = 1024 // final_live_resampled.shape[0]
                    viz.save_field(final_live_resampled, final_live_path,
                                   scale)
                    viz.save_field(canonical_field, canonical_path, scale)
                    viz.save_field(live_copy, initial_live_path, scale)

                convergence_reports = optimizer.get_per_level_convergence_reports(
                )
                convergence_report_sets.append(convergence_reports)
                i_pair += 1

            print("Post-processing convergence reports...")
            df = post_process_convergence_report_sets(convergence_report_sets,
                                                      frame_numbers_and_rows)
            reports_file_name = "convergence_reports"
            if Arguments.optimization_case_file.v is not None:
                reports_file_name = "case_convergence_reports"
            df.to_excel(
                os.path.join(out_path, "{:s}.xlsx".format(reports_file_name)))
            df.to_pickle(
                os.path.join(out_path, "{:s}.pk".format(reports_file_name)))

        if Arguments.save_telemetry.v and \
                Arguments.implementation_language.v == build_opt.ImplementationLanguage.CPP and \
                len(telemetry_logs) > 0:
            print("Saving C++-based telemetry (" + telemetry_folder + ")...")
            i_pair = 0
            telemetry_metadata = ho_viz.get_telemetry_metadata(
                telemetry_logs[0])
            for telemetry_log in progressbar.progressbar(telemetry_logs):
                (frame_number, pixel_row) = frame_numbers_and_rows[i_pair]
                telemetry_subfolder = get_telemetry_subfolder_path(
                    telemetry_folder, frame_number, pixel_row)
                ho_viz.save_telemetry_log(telemetry_log, telemetry_metadata,
                                          telemetry_subfolder)
                i_pair += 1

        if Arguments.convert_telemetry.v and \
                Arguments.implementation_language.v == build_opt.ImplementationLanguage.CPP:
            # TODO: attempt to load telemetry if the array is empty
            if len(telemetry_logs) == 0:
                print("Loading C++-based telemetry (" + telemetry_folder +
                      ")...")
                for frame_number, pixel_row in progressbar.progressbar(
                        frame_numbers_and_rows):
                    telemetry_subfolder = get_telemetry_subfolder_path(
                        telemetry_folder, frame_number, pixel_row)
                    telemetry_log = ho_viz.load_telemetry_log(
                        telemetry_subfolder)
                    telemetry_logs.append(telemetry_log)

            print("Converting C++-based telemetry to videos (" +
                  telemetry_folder + ")...")
            i_pair = 0
            total_frame_count = ho_viz.get_number_of_frames_to_save_from_telemetry_logs(
                telemetry_logs)
            bar = progressbar.ProgressBar(max_value=total_frame_count)
            telemetry_metadata = ho_viz.get_telemetry_metadata(
                telemetry_logs[0])
            for telemetry_log in telemetry_logs:
                canonical_field, live_field = initial_fields[i_pair]
                (frame_number, pixel_row) = frame_numbers_and_rows[i_pair]
                telemetry_subfolder = get_telemetry_subfolder_path(
                    telemetry_folder, frame_number, pixel_row)
                ho_viz.convert_cpp_telemetry_logs_to_video(
                    telemetry_log, telemetry_metadata, canonical_field,
                    live_field, telemetry_subfolder, bar)
                i_pair += 1

    else:
        df = pd.read_pickle(convergence_reports_pickle_path)

    if df is not None:
        analyze_convergence_data(df, out_path)
        if not Arguments.optimization_case_file.v:
            save_bad_cases(df, out_path)
            save_all_cases(df, out_path)

    print()

    return EXIT_CODE_SUCCESS
Exemple #7
0
 def test_warp_field01(self):
     warp_field = fixtures.warp_field_A_16x16
     scalar_field = fixtures.field_A_16x16
     resampled_field = ipt.warp_field(scalar_field, warp_field)
     self.assertTrue(np.allclose(resampled_field, fixtures.fA_resampled_with_wfA))