def query(self, picture: Picture) -> List[Picture]:
        mask, rec = detect_text(picture.get_image())
        kp, des = self.sift.detectAndCompute(picture.get_image(), mask)

        return (seq(
            self.db).map(lambda p: (p[0], self.bf.knnMatch(p[2], des, k=2))).
                map(lambda p: (p[0], self._ratio_test(p[1]))).map(lambda p: (p[
                    0], len(p[1]))).filter(lambda p: p[1] > 0).sorted(
                        lambda p: p[1],
                        reverse=True).map(lambda p: p[0]).take(10).to_list())
    def query(self, picture: Picture) -> List[Picture]:
        mask, rec = detect_text(picture.get_image())
        kp, des = self.surf.detectAndCompute(picture.get_image(), mask)

        return (
            seq(self.db).map(lambda p: (p[0], self.bf.match(p[2], des))).
            map(lambda p: (p[0], seq(p[1]).filter(lambda d: d.distance < max(
                THRESHOLD,
                seq(p[1]).map(lambda m: m.distance).min())).to_list())).
            map(lambda p: (p[0], len(p[1]))).filter(lambda p: p[1] > 4).sorted(
                lambda p: p[1],
                reverse=True).map(lambda p: p[0]).take(10).to_list())
Beispiel #3
0
    def train(self, images: List[Picture], use_mask=True) -> List[Rectangle]:
        bounding_texts = []
        for image in tqdm(images, file=sys.stdout, desc='Training orb'):
            mask, bounding_text = detect_text(image.get_image())
            if use_mask:
                kp, des = self.orb.detectAndCompute(image.get_image(),
                                                    mask=mask)
            else:
                kp, des = self.orb.detectAndCompute(image.get_image(), None)

            self.db.append((image, kp, des))
            bounding_texts.append(bounding_text)
        return bounding_texts
 def query(self, picture: Picture) -> List[Picture]:
     mask, rec = detect_text(picture.get_image())
     kp, des = self.sift.detectAndCompute(picture.get_image(), mask)
     FLANN_INDEX_KDTREE = 1
     flann_params = dict(algorithm=FLANN_INDEX_KDTREE, trees=5)
     flann = cv2.FlannBasedMatcher(flann_params, {})
     return (seq(self.db).map(
         lambda p: (p[0],
                    self.flann.knnMatch(np.asarray(p[2], np.float32),
                                        np.asarray(des, np.float32), 2))
     ).map(lambda p: (p[0], self._ratio_test(p[1]))).map(
         lambda p: (p[0], len(p[1]))).filter(lambda p: p[1] > 4).sorted(
             lambda p: p[1],
             reverse=True).map(lambda p: p[0]).take(10).to_list())
    def query(self, picture: Picture) -> List[Picture]:
        mask,rec=detect_text(picture.get_image())
        kp, des = self.orb.detectAndCompute(picture.get_image(), mask)

        return (
            seq(self.db)
                .map(lambda p: (p[0], p[1], self.bf.knnMatch(des, p[2], k=2)))
                .map(lambda p: (p[0], p[1], self._ratio_test(p[2])))
                .filter(lambda p: len(p[2]) > MIN_MATCH_COUNT)
                .map(lambda p: (p[0], self._homography(kp, p[1], p[2])))
                .filter(lambda p: len(p[1]) > MIN_MATCH_COUNT)
                .sorted(lambda p: p[1], reverse=True)
                .map(lambda p: p[0])
                .take(10)
                .to_list()
        )
Beispiel #6
0
    def query(self, picture: Picture) -> List[Picture]:
        mask, rec = detect_text(picture.get_image())
        kp, des = self.orb.detectAndCompute(picture.get_image(), mask)

        return (seq(
            self.db).map(lambda p: (p[0], p[1], self.bf.match(des, p[2]))).map(
                lambda p:
                (p[0], p[1], seq(p[2]).filter(lambda d: d.distance < max(
                    THRESHOLD,
                    seq(p[2]).map(lambda m: m.distance).min())).to_list())).
                filter(lambda p: len(p[2]) > MIN_MATCH_COUNT).map(
                    lambda p: (p[0],
                               self._homography(kp, p[1], p[2], p[0].get_image(
                               ), picture.get_image()))).
                filter(lambda p: (p[1]) > MIN_MATCH_COUNT).sorted(
                    lambda p: p[1],
                    reverse=True).map(lambda p: p[0]).take(10).to_list())
    def query(self, picture: Picture) -> List[Picture]:
        mask, rec = detect_text(picture.get_image())
        kp, des = self.orb.detectAndCompute(picture.get_image(), mask)

        return (
            seq(self.db)
                .map(
                lambda p: (p[0], self.flann.knnMatch(np.asarray(p[2], np.float32), np.asarray(des, np.float32), 2)))
                .map(lambda p: (p[0], self._ratio_test(p[1])))
                .map(lambda p: (p[0], len(p[1])))
                .filter(lambda p: p[1] > 4)
                .sorted(lambda p: p[1], reverse=True)
                .map(lambda p: p[0])
                .take(10)
                .to_list()

        )
Beispiel #8
0
 def train(self, images: List[Picture]) -> None:
     for image in images:
         mask, bounding_text = detect_text(image.get_image())
         kp, des = self.orb.detectAndCompute(image.get_image(), mask)
         self.db.append((image, kp, des, bounding_text))
Beispiel #9
0
 def train(self, images: List[Picture]) -> None:
     for image in images:
         mask, bounding_text = detect_text(image.get_image())
         kp = self.star.detect(image.get_image(), mask)
         kp, des = self.brief.compute(image.get_image(), kp)
         self.db.append((image, kp, des, bounding_text))