def query(self, picture: Picture) -> List[Picture]:
        mask, rec = detect_text(picture.get_image())
        kp, des = self.sift.detectAndCompute(picture.get_image(), mask)

        return (seq(
            self.db).map(lambda p: (p[0], self.bf.knnMatch(p[2], des, k=2))).
                map(lambda p: (p[0], self._ratio_test(p[1]))).map(lambda p: (p[
                    0], len(p[1]))).filter(lambda p: p[1] > 0).sorted(
                        lambda p: p[1],
                        reverse=True).map(lambda p: p[0]).take(10).to_list())
Esempio n. 2
0
    def query(self, picture: Picture) -> List[Picture]:
        mask, rec = detect_text(picture.get_image())
        kp, des = self.surf.detectAndCompute(picture.get_image(), mask)

        return (
            seq(self.db).map(lambda p: (p[0], self.bf.match(p[2], des))).
            map(lambda p: (p[0], seq(p[1]).filter(lambda d: d.distance < max(
                THRESHOLD,
                seq(p[1]).map(lambda m: m.distance).min())).to_list())).
            map(lambda p: (p[0], len(p[1]))).filter(lambda p: p[1] > 4).sorted(
                lambda p: p[1],
                reverse=True).map(lambda p: p[0]).take(10).to_list())
Esempio n. 3
0
 def query(self, picture: Picture) -> List[Picture]:
     mask, rec = detect_text(picture.get_image())
     kp, des = self.sift.detectAndCompute(picture.get_image(), mask)
     FLANN_INDEX_KDTREE = 1
     flann_params = dict(algorithm=FLANN_INDEX_KDTREE, trees=5)
     flann = cv2.FlannBasedMatcher(flann_params, {})
     return (seq(self.db).map(
         lambda p: (p[0],
                    self.flann.knnMatch(np.asarray(p[2], np.float32),
                                        np.asarray(des, np.float32), 2))
     ).map(lambda p: (p[0], self._ratio_test(p[1]))).map(
         lambda p: (p[0], len(p[1]))).filter(lambda p: p[1] > 4).sorted(
             lambda p: p[1],
             reverse=True).map(lambda p: p[0]).take(10).to_list())
    def query(self, picture: Picture) -> List[Picture]:
        mask,rec=detect_text(picture.get_image())
        kp, des = self.orb.detectAndCompute(picture.get_image(), mask)

        return (
            seq(self.db)
                .map(lambda p: (p[0], p[1], self.bf.knnMatch(des, p[2], k=2)))
                .map(lambda p: (p[0], p[1], self._ratio_test(p[2])))
                .filter(lambda p: len(p[2]) > MIN_MATCH_COUNT)
                .map(lambda p: (p[0], self._homography(kp, p[1], p[2])))
                .filter(lambda p: len(p[1]) > MIN_MATCH_COUNT)
                .sorted(lambda p: p[1], reverse=True)
                .map(lambda p: p[0])
                .take(10)
                .to_list()
        )
Esempio n. 5
0
    def query(self, picture: Picture) -> List[Picture]:
        mask, rec = detect_text(picture.get_image())
        kp, des = self.orb.detectAndCompute(picture.get_image(), mask)

        return (seq(
            self.db).map(lambda p: (p[0], p[1], self.bf.match(des, p[2]))).map(
                lambda p:
                (p[0], p[1], seq(p[2]).filter(lambda d: d.distance < max(
                    THRESHOLD,
                    seq(p[2]).map(lambda m: m.distance).min())).to_list())).
                filter(lambda p: len(p[2]) > MIN_MATCH_COUNT).map(
                    lambda p: (p[0],
                               self._homography(kp, p[1], p[2], p[0].get_image(
                               ), picture.get_image()))).
                filter(lambda p: (p[1]) > MIN_MATCH_COUNT).sorted(
                    lambda p: p[1],
                    reverse=True).map(lambda p: p[0]).take(10).to_list())
    def query(self, picture: Picture) -> List[Picture]:
        mask, rec = detect_text(picture.get_image())
        kp, des = self.orb.detectAndCompute(picture.get_image(), mask)

        return (
            seq(self.db)
                .map(
                lambda p: (p[0], self.flann.knnMatch(np.asarray(p[2], np.float32), np.asarray(des, np.float32), 2)))
                .map(lambda p: (p[0], self._ratio_test(p[1])))
                .map(lambda p: (p[0], len(p[1])))
                .filter(lambda p: p[1] > 4)
                .sorted(lambda p: p[1], reverse=True)
                .map(lambda p: p[0])
                .take(10)
                .to_list()

        )
Esempio n. 7
0
 def query(self, picture: Picture) -> List[Picture]:
     hist = self._get_histogram(picture.get_image())
     return (seq(self.db)
             # Calculate histogram similarity
             .map(lambda entry:
                  (entry[0], self._compare_histograms_full(hist, entry[1])))
             # Calculate distance to center
             .map(lambda entry:
                  (entry[0], self._euclidean_distance_to_origin(entry[1])))
             # Order by distance
             .sorted(
                 lambda entry_res: entry_res[1], False if
                 self.histogram_comparison_method == cv2.HISTCMP_HELLINGER
                 else True).map(lambda entry: entry[0])
             # Take first K
             .take(K).to_list())
Esempio n. 8
0
    def query(self, picture: Picture, frame: Frame = None) -> List[Picture]:
        im = picture.get_image()
        if frame and frame.is_valid():
            side = int(math.sqrt(frame.get_area()) * 0.8)
            m = frame.get_perspective_matrix(
                np.array([[0, side - 1], [side - 1, side - 1], [side - 1, 0],
                          [0, 0]]))
            im = cv2.warpPerspective(im, m, (side, side))

        # plt.imshow(cv2.cvtColor(im, cv2.COLOR_BGR2RGB))
        # plt.show()

        kp, des = self.orb.detectAndCompute(im, None)

        return (
            seq(self.db).map(lambda p: (p[0], self.bf.match(p[2], des))).
            map(lambda p: (p[0], seq(p[1]).filter(lambda d: d.distance < max(
                THRESHOLD,
                seq(p[1]).map(lambda m: m.distance).min())).to_list())).
            map(lambda p: (p[0], len(p[1]))).filter(lambda p: p[1] > 4).sorted(
                lambda p: p[1],
                reverse=True).map(lambda p: p[0]).take(10).to_list())
Esempio n. 9
0
    def query(self, picture: Picture) -> (List[Picture], Frame):
        frame = get_frame_with_lines(picture.get_image())

        return self.orb.query(picture), frame