예제 #1
0
파일: views.py 프로젝트: jness/MTG-Toolbox
 def get_context_data(self, **kwargs):
     self.create_context(**kwargs)
     
     matches = []
     for h in MTGHash.objects.all():
        s = pHash.hamming_distance(long(self.context['hash']), long(h.hash))
        if s <= 15:
           matches.append((h.card, s))
     
     self.context['matches'] = matches
     return self.context
예제 #2
0
#!/usr/bin/env python
# coding: utf-8

import sys

import pHash

if __name__ == "__main__":
    if len(sys.argv) != 3:
        print "usage: ${prog} src dst"
        sys.exit(0)
    d1 = pHash.image_digest(sys.argv[1], 1.0, 1.0, 180)
    d2 = pHash.image_digest(sys.argv[2], 1.0, 1.0, 180)
    print 'digest', pHash.crosscorr(d1, d2)[1]
    h1 = pHash.imagehash(sys.argv[1])
    h2 = pHash.imagehash(sys.argv[2])
    print 'hash', pHash.hamming_distance(h1, h2)

예제 #3
0
파일: test.py 프로젝트: anty-zhang/mypy


# logo_hash = pHash.imagehash('video_wp.png')
# logo1_hash = pHash.imagehash('logo1.jpg')
# logo2_hash = pHash.imagehash('logo2.jpg')
# logo3_hash = pHash.imagehash('logo3.jpg')
# logo4_hash = pHash.imagehash('logo4.jpg')
#
# print 'Hamming distance: %d (%08x / %08x)' % (pHash.hamming_distance(logo_hash, logo1_hash), logo_hash, logo1_hash)
# print 'Hamming distance: %d (%08x / %08x)' % (pHash.hamming_distance(logo_hash, logo2_hash), logo_hash, logo2_hash)
# print 'Hamming distance: %d (%08x / %08x)' % (pHash.hamming_distance(logo_hash, logo3_hash), logo_hash, logo3_hash)
# print 'Hamming distance: %d (%08x / %08x)' % (pHash.hamming_distance(logo_hash, logo4_hash), logo_hash, logo4_hash)
#
#
# print 'Hamming distance: %d (%08x / %08x)' % (pHash.hamming_distance(logo1_hash, logo3_hash), logo1_hash, logo3_hash)
#




o4n_hash = pHash.imagehash('frame_o4n_03.jpg')
wp_hash = pHash.imagehash('frame_wp_003.jpg')
wpc_hash = pHash.imagehash('frame_wpc_0003.jpg')
wpstar_hash = pHash.imagehash('frame_wpstar_000056.jpg')

print 'Hamming distance: %d (%08x / %08x)' % (pHash.hamming_distance(o4n_hash, wp_hash), o4n_hash, wp_hash)
print 'Hamming distance: %d (%08x / %08x)' % (pHash.hamming_distance(o4n_hash, wpc_hash), o4n_hash, wpc_hash)
print 'Hamming distance: %d (%08x / %08x)' % (pHash.hamming_distance(wp_hash, wpc_hash), wp_hash, wpc_hash)
print 'Hamming distance: %d (%08x / %08x)' % (pHash.hamming_distance(wpstar_hash, wpc_hash), wpstar_hash, wpc_hash)
예제 #4
0
            hash1 = pHash.imagehash(temp.name)

            command = 'blur-detection ' + temp.name
            output = commands.getoutput(command)
            p = re.compile('.*density: (\d+\.\d+)')

            image_quality = float(p.match(output).group(1))

            images = conn.get_images(user_id=user['_id'], is_duplicate=False)

            is_duplicate = False
            group = 1
            is_unique = True
            for image in images:
                if pHash.hamming_distance(hash1, long(image['hash'])) < 15:
                    is_unique = False
                    if image_quality < image['quality']:
                        group = image['group']
                        is_duplicate = True
                        break
                    elif not image['is_duplicate']:
                        group = image['group']
                        conn.mark_image_duplicate(image["_id"])
                        break
            if is_unique:
                highest_group += 1
                group = highest_group
            print "adding image for user " + str(user) + "  --  " + data[
                "path"] + "  quality  " + str(image_quality)
            conn.insert_image(user["_id"],
예제 #5
0


class CompressionSimilarityError(Exception):
    pass

def _images_are_similar(filename1,filename2):
    try:
        import pHash
    except FileNotFoundError, fe:
        logging.info("Could not determine similarity - missing pHash module") 
        return True
    
    hash1 = pHash.imagehash(filename1)
    hash2 = pHash.imagehash(filename2)
    hd = pHash.hamming_distance(hash1,hash2)
    logging.info('Hamming distance: %d (%08x / %08x)' % ( hd, hash1, hash2 ))

    if hd <= SIMILARITY_THRESHOLD:
        return True

    return False
    
    """
    digest1 = pHash.image_digest(filename1, 1.0, 1.0, 180 )
    digest2 = pHash.image_digest(filename2, 1.0, 1.0, 180 )
    print 'Cross-correelation: %d' % ( pHash.crosscorr( digest1, digest2 ) )
    """


def _images_are_equal(filename1, filename2):
예제 #6
0
			urllib.urlretrieve(albumPhoto["images"][-1]["source"], photoName);
			compare_photos.append(photoName)
			compare_hashes.append(pHash.imagehash(photoName))
			print "Saved %s" % (photoName)

response = graph.request("search",{"q":profile["name"], "fields":"id,name,picture", "type":"user"})
next = response["paging"]["next"].replace("https://graph.facebook.com/v1.0/", "")

print "Hunt commencing!"
while next:
	for user in response["data"]:
		urllib.urlretrieve(user["picture"]["data"]["url"], "compared.jpg");
		compared_hash = pHash.imagehash("compared.jpg")
		compare_count += 1
		for compare_hash in compare_hashes:	
			hamming_distance = pHash.hamming_distance( compare_hash, compared_hash )
			if hamming_distance < 8:
				print 'Potential scammer: http://graph.facebook.com/%s Hamming distance: %d (%08x / %08x)' % (user["id"], hamming_distance, compare_hash, compared_hash)
			elif DEBUG:
				print 'http://graph.facebook.com/%s Hamming distance: %d' % (user["id"], hamming_distance)

		response = graph.request(next)
	if ("next" in response["paging"].keys()):
		next = response["paging"]["next"].replace("https://graph.facebook.com/v1.0/", "")
	else:
		next = None

print "Compared %d profiles!" % (compare_count)
	
	
예제 #7
0
 def get_phash_similarity(self, other):
     return pHash.hamming_distance(self.phash, other.phash)
예제 #8
0
파일: scrape.py 프로젝트: watson-app/watson
             hash1 = pHash.imagehash(temp.name)
	
             command = 'blur-detection ' + temp.name
             output = commands.getoutput(command)
             p=re.compile('.*density: (\d+\.\d+)')

             image_quality = float(p.match(output).group(1))

             images = conn.get_images(user_id=user['_id'], is_duplicate=False)
        
             is_duplicate = False
             group = 1
	     is_unique = True
             for image in images:
                 if pHash.hamming_distance(hash1, long(image['hash'])) < 15:
		    is_unique = False
            	    if image_quality < image['quality']:
                         group = image['group']
		         is_duplicate = True 
		         break
		    elif not image['is_duplicate']:
                         group = image['group']
		         conn.mark_image_duplicate(image["_id"])
	                 break
	     if is_unique:
		  highest_group += 1
		  group = highest_group	
	     print "adding image for user " + str(user) + "  --  " + data["path"] + "  quality  " + str(image_quality) 
	     conn.insert_image(user["_id"],data["path"],[latitude, longitude],"xx",hash1,timestamp,group,image_quality,is_duplicate=is_duplicate)
             os.unlink(temp.name)
def ia896_capstone_calculate_dist(img1, img2):
    hash1 = long(img1['phash'])
    hash2 = long(img2['phash'])
    if not hash1 or not hash2:
        return 9999
    return pHash.hamming_distance(hash1, hash2)
예제 #10
0
import pHash
import sys

hash1 = pHash.imagehash(sys.argv[1])
hash2 = pHash.imagehash(sys.argv[2])
print 'Hamming distance: %d (%08x / %08x)' % (pHash.hamming_distance(
    hash1, hash2), hash1, hash2)

digest1 = pHash.image_digest(sys.argv[1], 1.0, 1.0, 180)
digest2 = pHash.image_digest(sys.argv[2], 1.0, 1.0, 180)
print(pHash.crosscorr(digest1, digest2))