Example #1
0
    def findSimilarImages(self):
        self.allResults = []
        qrCodeDetector = cv2.QRCodeDetector()
        similarFile = open(self.similarImageFile, "w")
        uniqueFile = open(self.uniqueImageFile, "w")

        print("Finding similar images and decoding QR codes if any.... ")
        for (i, imagePath) in enumerate(self.imagePaths):
            # load the image
            image = cv2.imread(imagePath)
            arr = []
            # convert image to hash
            queryHash = dhash(image)
            queryHash = convert_hash(queryHash)
            results = self.treePickle.get_all_in_range(queryHash, 21)

            for (count, (d, h)) in enumerate(results):
                resultPaths = self.hashPickle.get(h, "")
                arr.append(resultPaths[0])

            if len(arr) > 1:
                similarFile.write(", ".join(arr) + os.linesep)

            elif len(arr) == 1:
                uniqueFile.write(arr[0] + os.linesep)

        similarFile.close()
        uniqueFile.close()
    def formHashes(self):
        for (i, imagePath) in enumerate(self.imagePaths):
            # load the input image
            print("[INFO] processing image {}/{} {}".format(
                i + 1, len(self.imagePaths), imagePath))
            image = cv2.imread(imagePath)

            # compute the hash for the image and convert it
            h = dhash(image)
            h = convert_hash(h)

            # update the hashes dictionary
            tmp = self.hashes.get(h, [])
            tmp.append(imagePath)
            self.hashes[h] = tmp
Example #3
0
                default=10,
                help="maximum hamming distance")
args = vars(ap.parse_args())

# load the VP-Tree and hashes dictionary
print("[INFO] loading VP-Tree and hashes...")
tree = pickle.loads(open(args["tree"], "rb").read())
hashes = pickle.loads(open(args["hashes"], "rb").read())

# load the input query image
image = cv2.imread(args["query"])
cv2.imshow("Query", image)

# compute the hash for the query image, then convert it
queryHash = dhash(image)
queryHash = convert_hash(queryHash)

# perform the search
print("[INFO] performing search...")
start = time.time()
results = tree.get_all_in_range(queryHash, args["distance"])
results = sorted(results)
end = time.time()
print("[INFO] search took {} seconds".format(end - start))

# loop over the results
for (d, h) in results:
    # grab all image paths in our dataset with the same hash
    resultPaths = hashes.get(h, [])
    print("[INFO] {} total image(s) with d: {}, h: {}".format(
        len(resultPaths), d, h))
Example #4
0
args = vars(ap.parse_args())

# grab the paths to the input images and initialize the dictionary
# of hashes
imagePaths = list(paths.list_images(args["images"]))
hashes = {}

# loop over the image paths
for (i, imagePath) in enumerate(imagePaths):
    # load the input image
    print("[INFO] processing image {}/{}".format(i + 1, len(imagePaths)))
    image = cv2.imread(imagePath)

    # compute the hash for the image and convert it
    h = dhash(image)
    h = convert_hash(h)

    # update the hashes dictionary
    l = hashes.get(h, [])
    l.append(imagePath)
    hashes[h] = l

# build the VP-Tree
print("[INFO] building VP-Tree...")
points = list(hashes.keys())
tree = vptree.VPTree(points, hamming)

# serialize the VP-Tree to disk
print("[INFO] serializing VP-Tree...")
f = open(args["tree"], "wb")
f.write(pickle.dumps(tree))