コード例 #1
0
ファイル: live_point_picker.py プロジェクト: harnix/pdc-ros
def pick_point_from_image1(event, x, y, flags, param):
    global img1
    if event == cv2.EVENT_LBUTTONDOWN:
        u = x
        v = y
        draw_reticle(img1, u, v, (0, 255, 0))
        print u, v
        print res_a[v, u, :]
        print "is your descriptor"
        save_descriptor_to_yaml(res_a[v, u, :])
コード例 #2
0
    def find_best_match(self, event, u, v, flags, param):
        """
        For each network, find the best match in the target image to point highlighted
        with reticle in the source image. Displays the result
        :return:
        :rtype:
        """

        img_1_with_reticle = np.copy(self.img1)
        draw_reticle(img_1_with_reticle, u, v, self._reticle_color)
        cv2.imshow("source", img_1_with_reticle)

        alpha = self._config["blend_weight_original_image"]
        beta = 1 - alpha

        img_2_with_reticle = np.copy(self.img2)

        print "\n\n"

        self._res_uv = dict()

        # self._res_a_uv = dict()
        # self._res_b_uv = dict()

        for network_name in self._dcn_dict:
            res_a = self._res_a[network_name]
            res_b = self._res_b[network_name]
            best_match_uv, best_match_diff, norm_diffs = \
                DenseCorrespondenceNetwork.find_best_match((u, v), res_a, res_b)
            print "\n\n"
            print "network_name:", network_name
            self._res_uv[network_name] = dict()
            self._res_uv[network_name]['source'] = res_a[v, u, :].tolist()
            self._res_uv[network_name]['target'] = res_b[v, u, :].tolist()

            # print "res_a[v, u, :]:", res_a[v, u, :]
            # print "res_b[v, u, :]:", res_b[v, u, :]

            print "%s best match diff: %.3f" % (network_name, best_match_diff)

            threshold = self._config["norm_diff_threshold"]
            if network_name in self._config["norm_diff_threshold_dict"]:
                threshold = self._config["norm_diff_threshold_dict"][
                    network_name]

            heatmap = self.scale_norm_diffs_to_make_heatmap(
                norm_diffs, threshold)

            reticle_color = self._network_reticle_color[network_name]
            draw_reticle(heatmap, best_match_uv[0], best_match_uv[1],
                         reticle_color)
            draw_reticle(img_2_with_reticle, best_match_uv[0],
                         best_match_uv[1], reticle_color)
            blended = cv2.addWeighted(self.img2_gray, alpha, heatmap, beta, 0)
            cv2.imshow(network_name, blended)

        cv2.imshow("target", img_2_with_reticle)
        if event == cv2.EVENT_LBUTTONDOWN:
            utils.saveToYaml(self._res_uv, 'clicked_point.yaml')
    def find_best_match(self, event,u,v,flags,param):

        """
        For each network, find the best match in the target image to point highlighted
        with reticle in the source image. Displays the result
        :return:
        :rtype:
        """

        img_1_with_reticle = np.copy(self.img1)
        draw_reticle(img_1_with_reticle, u, v, self._reticle_color)
        cv2.imshow("source", img_1_with_reticle)

        alpha = self._config["blend_weight_original_image"]
        beta = 1 - alpha

        img_2_with_reticle = np.copy(self.img2)


        print "\n\n"

        self._res_uv = dict()

        # self._res_a_uv = dict()
        # self._res_b_uv = dict()

        for network_name in self._dcn_dict:
            res_a = self._res_a[network_name]
            res_b = self._res_b[network_name]
            best_match_uv, best_match_diff, norm_diffs = \
                DenseCorrespondenceNetwork.find_best_match((u, v), res_a, res_b)
            print "\n\n"
            print "network_name:", network_name
            self._res_uv[network_name] = dict()
            self._res_uv[network_name]['source'] = res_a[v, u, :].tolist()
            self._res_uv[network_name]['target'] = res_b[v, u, :].tolist()

            # print "res_a[v, u, :]:", res_a[v, u, :]
            # print "res_b[v, u, :]:", res_b[v, u, :]

            print "%s best match diff: %.3f" %(network_name, best_match_diff)

            threshold = self._config["norm_diff_threshold"]
            if network_name in self._config["norm_diff_threshold_dict"]:
                threshold = self._config["norm_diff_threshold_dict"][network_name]

            heatmap = self.scale_norm_diffs_to_make_heatmap(norm_diffs, threshold)

            reticle_color = self._network_reticle_color[network_name]
            draw_reticle(heatmap, best_match_uv[0], best_match_uv[1], reticle_color)
            draw_reticle(img_2_with_reticle, best_match_uv[0], best_match_uv[1], reticle_color)
            blended = cv2.addWeighted(self.img2_gray, alpha, heatmap, beta, 0)
            cv2.imshow(network_name, blended)

        cv2.imshow("target", img_2_with_reticle)
        if event == cv2.EVENT_LBUTTONDOWN:
            utils.saveToYaml(self._res_uv, 'clicked_point.yaml')
コード例 #4
0
    def find_best_match(self, event, u, v, flags, param):
        """
        For each network, find the best match in the target image to point highlighted
        with reticle in the source image. Displays the result
        :return:
        :rtype:
        """

        img_1_with_reticle = np.copy(self.img1)
        draw_reticle(img_1_with_reticle, u, v, self._reticle_color)
        source = img_1_with_reticle

        alpha = self._config["blend_weight_original_image"]
        beta = 1 - alpha

        img_2_with_reticle = np.copy(self.img2)

        self._res_uv = dict()

        for network_name in self._dcn_dict:
            res_a = self._res_a[network_name]
            res_b = self._res_b[network_name]
            best_match_uv, best_match_diff, norm_diffs = \
                DenseCorrespondenceNetwork.find_best_match((u, v), res_a, res_b)
            print "network_name:", network_name
            self._res_uv[network_name] = dict()
            self._res_uv[network_name]['source'] = res_a[v, u, :].tolist()
            self._res_uv[network_name]['target'] = res_b[v, u, :].tolist()

            print "%s best match diff: %.3f" % (network_name, best_match_diff)

            threshold = self._config["norm_diff_threshold"]
            if network_name in self._config["norm_diff_threshold_dict"]:
                threshold = self._config["norm_diff_threshold_dict"][
                    network_name]

            heatmap = self.scale_norm_diffs_to_make_heatmap(
                norm_diffs, threshold)

            reticle_color = self._network_reticle_color[network_name]
            draw_reticle(heatmap, best_match_uv[0], best_match_uv[1],
                         reticle_color)
            draw_reticle(img_2_with_reticle, best_match_uv[0],
                         best_match_uv[1], reticle_color)
            blended = cv2.addWeighted(self.img2_gray, alpha, heatmap, beta, 0)

        target = img_2_with_reticle
        return (source, blended, target, [best_match_uv[0], best_match_uv[1]])
コード例 #5
0
def draw_points(img, img_points_picked):
    for index, img_point in enumerate(img_points_picked):
        color = label_colors[index % len(label_colors)]
        draw_reticle(img, int(img_point["u"]), int(img_point["v"]), color)
コード例 #6
0
ファイル: live_point_picker.py プロジェクト: harnix/pdc-ros
def draw_points(img, img_points_picked):
    for index, img_point in enumerate(img_points_picked):
        draw_reticle(img, int(img_point["u"]), int(img_point["v"]),
                     label_colors[index])