def _count_tiles(self, image): count_key = 'count_all_tiles:%s' % image['fileid'] count = self.redis.get(count_key) if count is None: count = count_all_tiles(image['fileid'], self.application.settings['static_path']) self.redis.setex(count_key, count, 60 * 5) return count
def _count_tiles(self, image): count_key = 'count_all_tiles:%s' % image['fileid'] count = self.redis.get(count_key) if count is None: count = count_all_tiles( image['fileid'], self.application.settings['static_path'] ) self.redis.setex( count_key, count, 60 * 5 ) return count
def get(self, fileid): image_filename = ( fileid[:1] + '/' + fileid[1:3] + '/' + fileid[3:] ) # we might want to read from a database what the most # appropriate numbers should be here. ranges = [self.DEFAULT_RANGE_MIN, self.DEFAULT_RANGE_MAX] default_zoom = self.DEFAULT_ZOOM metadata_key = 'metadata:%s' % fileid metadata = self.redis.get(metadata_key) #metadata=None;self.redis.delete('uploading:%s' % fileid) if metadata and 'width' not in metadata: # legacy metadata = None if metadata and 'date_timestamp' not in metadata: # legacy metadata = None if metadata is not None: metadata = json.loads(metadata) content_type = metadata['content_type'] owner = metadata['owner'] title = metadata['title'] date_timestamp = metadata['date_timestamp'] width = metadata['width'] cdn_domain = metadata.get('cdn_domain') else: logging.info("Meta data cache miss (%s)" % fileid) document = yield motor.Op( self.db.images.find_one, {'fileid': fileid} ) if not document: raise tornado.web.HTTPError(404, "File not found") content_type = document['contenttype'] owner = document['user'] title = document.get('title', '') width = document['width'] cdn_domain = document.get('cdn_domain', None) date_timestamp = time.mktime(document['date'].timetuple()) metadata = { 'content_type': content_type, 'owner': owner, 'title': title, 'date_timestamp': date_timestamp, 'width': width, 'cdn_domain': cdn_domain, } if document.get('ranges'): metadata['ranges'] = document['ranges'] self.redis.setex( metadata_key, json.dumps(metadata), 60 * 60 # * 24 ) now = time.mktime(datetime.datetime.utcnow().timetuple()) age = now - date_timestamp if metadata.get('ranges'): ranges = metadata.get('ranges') else: ranges = [] _range = self.DEFAULT_RANGE_MIN while True: ranges.append(_range) range_width = 256 * (2 ** _range) if range_width > width or _range >= self.DEFAULT_RANGE_MAX: break _range += 1 can_edit = self.get_current_user() == owner if content_type == 'image/jpeg': extension = 'jpg' elif content_type == 'image/png': extension = 'png' else: print "Guessing extension :(" extension = self.DEFAULT_EXTENSION extension = self.get_argument('extension', extension) assert extension in ('png', 'jpg'), extension if age > 60 * 60 and not cdn_domain: # it might be time to upload this to S3 lock_key = 'uploading:%s' % fileid if self.redis.get(lock_key): print "AWS uploading is locked" else: # we're ready to upload it _no_tiles = count_all_tiles( fileid, self.application.settings['static_path'] ) self.redis.setex(lock_key, time.time(), 60 * 60) priority = ( self.application.settings['debug'] and 'default' or 'low' ) q = Queue(priority, connection=self.redis) logging.info("About to upload %s tiles" % _no_tiles) # bulk the queue workers with 100 each for i in range(_no_tiles / 100 + 1): q.enqueue( upload_tiles, fileid, self.application.settings['static_path'], max_count=100 ) # upload the original q.enqueue( upload_original, fileid, extension, self.application.settings['static_path'], settings.ORIGINALS_BUCKET_ID ) og_image_url = None # if the image is old enough to have been given a chance to generate a # thumbnail, then set that if age > 60: og_image_url = self.make_thumbnail_url( fileid, 100, extension=extension, absolute_url=True, ) self.render( 'image.html', page_title=title or '/%s' % fileid, image_filename=image_filename, ranges=ranges, default_zoom=default_zoom, extension=extension, can_edit=can_edit, age=age, og_image_url=og_image_url, prefix=cdn_domain and '//' + cdn_domain or '', )