def test_libsgm_zncc(self):
        """
        Test pandora + plugin_libsgm if ZNCC measure is used
        """

        # Prepare the configuration
        user_cfg = pandora.read_config_file("tests/conf/sgm_zncc.json")

        # Import pandora plugins
        pandora.import_plugin()

        # Instantiate machine
        pandora_machine = PandoraMachine()

        # Run the pandora pipeline
        left, right = pandora.run(pandora_machine, self.left, self.right, -60,
                                  0, user_cfg["pipeline"])

        # Compares the calculated left disparity map with the ground truth
        # If the disparity maps are not equal, raise an error

        if common.strict_error(left["disparity_map"].data[61:-61, 61:-61],
                               self.disp_left_zncc[61:-61, 61:-61]) > 0:
            raise AssertionError

        # Compares the calculated right disparity map with the ground truth
        # If the disparity maps are not equal, raise an error
        if common.strict_error(right["disparity_map"].data,
                               self.disp_right_zncc) > 0:
            raise AssertionError
Example #2
0
    def test_libsgm_zncc(self):
        """
        Test pandora + plugin_libsgm if ZNCC measure is used
        """

        # Prepare the configuration
        user_cfg = pandora.read_config_file("tests/conf/sgm_zncc_python.json")

        # Import pandora plugins
        pandora.import_plugin()

        # Instantiate machine
        pandora_machine = PandoraMachine()

        # Run the pandora pipeline
        left, right = pandora.run(pandora_machine, self.left, self.right, -60,
                                  0, user_cfg["pipeline"])

        # Compares the calculated left disparity map with the ground truth
        # If the disparity maps are not equal, raise an error
        np.testing.assert_allclose(left["disparity_map"].data,
                                   self.disp_left_zncc,
                                   rtol=1e-04)

        # Compares the calculated right disparity map with the ground truth
        # If the disparity maps are not equal, raise an error
        np.testing.assert_allclose(right["disparity_map"].data,
                                   self.disp_right_zncc,
                                   rtol=1e-04)
    def test_libsgm(self):
        """
        Test pandora + plugin_libsgm

        """
        user_cfg = pandora.read_config_file(
            "tests/conf/sgm_python_parall.json")

        # Instantiate machine
        pandora_machine = PandoraMachine()

        # Import pandora plugins
        pandora.import_plugin()

        # Run the pandora pipeline
        left, right = pandora.run(pandora_machine, self.left, self.right, -60,
                                  0, user_cfg)

        # Compares the calculated left disparity map with the ground truth
        # If the percentage of pixel errors is > 0.20, raise an error
        if common.error(left["disparity_map"].data, self.disp_left, 1) > 0.20:
            raise AssertionError

        # Compares the calculated left disparity map with the ground truth
        # If the percentage of pixel errors ( error if ground truth - calculate > 2) is > 0.15, raise an error
        if common.error(left["disparity_map"].data, self.disp_left, 2) > 0.15:
            raise AssertionError

        # Check the left validity mask cross checking ( bit 8 and 9 )
        # Compares the calculated validity mask with the ground truth ( occlusion mask )
        occlusion = np.ones(
            (left["validity_mask"].shape[0], left["validity_mask"].shape[1]))
        occlusion[left["validity_mask"].data >= 512] = 0

        # If the percentage of errors is > 0.15, raise an error
        if common.error_mask(occlusion, self.occlusion) > 0.15:
            raise AssertionError

        # Compares the calculated right disparity map with the ground truth
        # If the percentage of pixel errors is > 0.20, raise an error
        if common.error(-1 * right["disparity_map"].data, self.disp_right,
                        1) > 0.20:
            raise AssertionError

        # Compares the calculated right disparity map with the ground truth
        # If the percentage of pixel errors ( error if ground truth - calculate > 2) is > 0.15, raise an error
        if common.error(-1 * right["disparity_map"].data, self.disp_right,
                        2) > 0.15:
            raise AssertionError
    def test_libsgm_positive_disparities(self):
        """
        Test pandora + plugin_libsgm, with positive disparities

        """
        user_cfg = pandora.read_config_file(
            "tests/conf/sgm_python_parall.json")

        # Import pandora plugins
        pandora.import_plugin()

        # Instantiate machine
        pandora_machine = PandoraMachine()

        right, left = pandora.run(pandora_machine, self.right, self.left, 1,
                                  60, user_cfg)

        # Compares the calculated left disparity map with the ground truth
        # If the percentage of pixel errors is > 0.20, raise an error
        if common.error(left["disparity_map"].data, self.disp_left, 1) > 0.20:
            raise AssertionError

        # Compares the calculated left disparity map with the ground truth
        # If the percentage of pixel errors ( error if ground truth - calculate > 2) is > 0.15, raise an error
        if common.error(left["disparity_map"].data, self.disp_left, 2) > 0.15:
            raise AssertionError

        # Compares the calculated right disparity map with the ground truth
        # If the percentage of pixel errors is > 0.20, raise an error
        if common.error(-1 * right["disparity_map"].data, self.disp_right,
                        1) > 0.20:
            raise AssertionError

        # Compares the calculated right disparity map with the ground truth
        # If the percentage of pixel errors ( error if ground truth - calculate > 2) is > 0.15, raise an error
        if common.error(-1 * right["disparity_map"].data, self.disp_right,
                        2) > 0.15:
            raise AssertionError