def do_vhash(i, image): print ('\n[compute vhash] (%s/%s)\t%s' % ( i, total_images-i, image.short_hash)), image_data = context.create_partial(image.get_data)() try: image.vhash = str(compute(image_data)) print '\t' + image.vhash image.save() except Exception, ex: print '\n%s:: %s' % (image.short_hash, ex)
def image_data(short_hash, get_Image, context): global image_urls image = get_Image().get_one(short_hash=short_hash) if not image: return 'not found' # go directly to the source if image.short_hash in image_urls: print 'cached url' redirect(image_urls.get(image.short_hash)) url = context.create_partial(image.get_public_url)() if not image.short_hash in image_urls: image_urls[short_hash] = url return redirect(url) # TODO: stream fn = context.create_partial(image.get_data) data_stream = fn(stream=True) return Response(data_stream, mimetype='image')
FINDER_COUNT = 5 finder_threads = set() downloader_threads = set() download_queue = Queue(DOWNLOADER_COUNT * 10) prefix_queue = Queue() stopper = Event() context.update(download_queue=download_queue, prefix_queue=prefix_queue, stopper=stopper) try: # the downloaders for i in xrange(DOWNLOADER_COUNT): # create and start our threads downloader_thread = Thread(target=context.create_partial(downloader)) downloader_thread.start() downloader_threads.add(downloader_thread) # and the finders for i in xrange(FINDER_COUNT): # create and start our threads finder_thread = Thread(target=context.create_partial(finder)) finder_thread.start() finder_threads.add(finder_thread) if prefix == 'ALL': print 'Downloading all objects' for prefix in itertools.combinations('abcdef0123456789', 3): if context.stopper.is_set(): break