def bulk_dereference_events(posts): """ Bulk dereference image. This is to increase performance for rss and rest collections """ # Step 1: Collect all the resources we need and prep the map entity_map = {} for post in posts: # Default the dereferenced prop placeholders setattr(post, PRIMARY_IMAGE_PROP, None) # Collect properties we want to collect if post.primary_image_resource_id: entity_map[get_key_from_resource_id( post.primary_image_resource_id)] = None # Fetch all of the entities we want to deref entities = ndb.get_multi(entity_map.keys()) # Repopulate the map NOTE: This adds keys to map using resource_id rather than key for entity in entities: entity_map[get_resource_id_from_key(entity.key)] = entity # Step 3: Iterate over posts and link up the dereferenced props for post in posts: if post.primary_image_resource_id: e = entity_map.get(post.primary_image_resource_id, None) setattr(post, PRIMARY_IMAGE_PROP, e) return posts
def from_resource(self, obj, field): """ Outout a field to dic value """ try: resource_id = get_resource_id_from_key(obj.key) except: logging.error( 'Attempting to get ResourceID for a non ndb Entity...') logging.error(obj) resource_id = None return resource_id
def _get(self): """ Get a collection of Written Posts """ # Check if we want to get a post by its slug get_by_slug = self.cleaned_params.get('get_by_slug', None) if get_by_slug: return self._get_by_slug_or_404(get_by_slug) # Get a list of all posts limit = self.cleaned_params.get('limit', None) cursor = self.cleaned_params.get('cursor', None) start_date = self.cleaned_params.get('start_date', None) category_slug = self.cleaned_params.get('category_slug', None) optional_params = {} if 'is_published' in self.params: optional_params['is_published'] = self.cleaned_params['is_published'] if start_date: optional_params['start_date'] = start_date category_resource_id_filter = None if category_slug: category = posts_api.get_post_category_by_slug(category_slug) if not category: self.serve_404('Category with slug %s Not Found' % category_slug) return optional_params['category_resource_id'] = get_resource_id_from_key(category.key) # TODO: If you are not admin, default is_published to True... key_stamp = str(hash(json.dumps(self.params))) cache_key = 'written_resources_%s' % key_stamp cached_result = ubercache.cache_get(cache_key) if False and cached_result: results, cursor, more = cached_result else: # Posts were not Cached for this set of properties entities, cursor, more = posts_api.get_posts(limit=limit, cursor=cursor, **optional_params) posts_api.bulk_dereference_posts(entities) # Create A set of results based upon this result set - iterator?? results = [] for e in entities: results.append(Resource(e, REST_RULES).to_dict()) if cursor: cursor = cursor.urlsafe() # Store In Cache ubercache.cache_set(cache_key, (results, cursor, more), category='written') self.serve_success(results, {'cursor': cursor, 'more': more})
def test_multiple_pair(self): key = ndb.Key('Parent', 123, 'Child', 'el-ni\u2099o') result = utils.get_resource_id_from_key(key) self.assertEqual(result, 'UGFyZW50Hh8xMjMeQ2hpbGQeZWwtbmlcdTIwOTlv')
def from_resource(self, obj, field): """ Outout a field to dic value """ return self.url_template % get_resource_id_from_key(obj.key)
def test_single_pair(self): key = ndb.Key('Parent', 123) result = utils.get_resource_id_from_key(key) self.assertEqual(result, 'UGFyZW50Hh8xMjM')
def test_multiple_pair(self, m_encode): key = ndb.Key('Parent', 123, 'Child', 'el-ni\u2099o') utils.get_resource_id_from_key(key) m_encode.assert_called_once_with( u'Parent\x1e\x1f123\x1eChild\x1eel-ni\\u2099o')
def test_single_pair(self, m_encode): key = ndb.Key('Parent', 123) utils.get_resource_id_from_key(key) m_encode.assert_called_once_with(u'Parent\x1e\x1f123')
def post(self): """ Callback for a successful upload... keep this lightweight """ fs = Filesystem(BUCKET_NAME) has_files = fs.get_uploads(self.request, 'the_file', populate_post=True) if has_files: file_info = has_files[0] original_filename = file_info.filename content_type = file_info.content_type size = file_info.size gs_object_name = file_info.gs_object_name # We could urlfetch this, but file not public blob_key = blobstore.create_gs_key(gs_object_name) data = fs.read(gs_object_name.replace('/gs', '')) # What we want to do now is create a copy of the file with our own info dest_filename = 'juniper/%s' % original_filename # Prep the file object file_obj = self.create_image(fs, data, dest_filename) file_obj_key = file_obj.key resource_id = get_resource_id_from_key(file_obj_key) # Finally delete the tmp file #data = fs.delete(gs_object_name.replace('/gs', '')) # "Return" a rest resource of sorts payload = { 'status': 200, 'messages': [], 'results': Resource(file_obj, REST_RESOURCE_RULES).to_dict() } self.response.set_status(200) self.response.headers['Content-Type'] = 'application/json' self.response.write(json.dumps(payload)) # Handle Attachment to Resource # Do this in a deferred task? attach_to_resource_id = self.request.get('attach_to_resource', None) # TODO: This should be done in a txn - especially when there are multiple uploads if attach_to_resource_id: attachment_resource_key = get_key_from_resource_id( attach_to_resource_id) attachment_resource = attachment_resource_key.get() if not attachment_resource: raise Exception( 'Resource with key %s not found. File was uploaded...' % attach_to_resource_id) if not attachment_resource.attachment_resources: attachment_resource.attachment_resources = [] # Update attachments attachment_resource.attachment_resources.append(resource_id) target_property = self.request.get('target_property', None) if target_property: setattr(attachment_resource, target_property, resource_id) attachment_resource.put() return
def create_image(self, fs, temp_file_data, dest_filename): """ Helper to put an image on the Cloud # FULL and SIZED are JPGs at 85 quality (DEFAULT) # CARD_* and THUMB are PNGs """ versions_data = { VERSIONS.FULL[KEY]: '', VERSIONS.SIZED[KEY]: '', VERSIONS.CARD_LARGE[KEY]: '', VERSIONS.CARD_SMALL[KEY]: '', VERSIONS.THUMB[KEY]: '' } ''' FULL: no crop scaled to max 1500 width ("original") SIZED: no crop scaled to 700 width ("sized") CARD_LARGE: 1200 631 CARD_SMALL: 600 x 312 THUMB: 160 x 160 ''' ids = FileContainer.allocate_ids(size=1) file_obj_key = ndb.Key('FileContainer', ids[0]) resource_id = get_resource_id_from_key(file_obj_key) dest_folder_name = 'file_container/%s/' % (resource_id) # Sized Images #img = images.Image(data) #img.resize(width=1500, height=1500) #file_content = img.execute_transforms(output_encoding=images.JPEG) #img = images.Image(temp_file_data) # TODO: Take in filename= or blob_key= # VERSION.FULL - scaled to max dimension 1200px ''' img.resize(width=VERSIONS.FULL[WIDTH], height=VERSIONS.FULL[HEIGHT]) full_data = img.execute_transforms(output_encoding=images.JPEG) full_height, full_width = img.height, img.width # Write FULL version file ASAP so as not to lose it full_filename = fs.write(dest_folder_name + 'full.jpg', full_data, MIME_JPEG) logging.warning(full_filename) #SIZED VERSION: img.resize(width=VERSIONS.SIZED[WIDTH], height=VERSIONS.SIZED[HEIGHT]) sized_data = img.execute_transforms(output_encoding=images.JPEG) sized_filename = fs.write(dest_folder_name + 'sized.jpg', sized_data, MIME_JPEG) # TODO: filename sized_height, sized_width = img.height, img.width # CARD_LARGE card_large_data, card_large_height, card_large_width = rescale(full_data, VERSIONS.CARD_LARGE[WIDTH], VERSIONS.CARD_LARGE[HEIGHT], halign='middle', valign='middle') card_large_filename = fs.write(dest_folder_name + 'card_large.png', card_large_data, MIME_PNG) # TODO: filename ''' # CARD_SMALL card_small_data, card_small_height, card_small_width = rescale( temp_file_data, VERSIONS.CARD_SMALL[WIDTH], VERSIONS.CARD_SMALL[HEIGHT], halign='middle', valign='middle') card_small_filename = fs.write(dest_folder_name + 'card_small.png', card_small_data, MIME_PNG) # TODO: filename # THUMB thumb_data, thumb_height, thumb_width = rescale(temp_file_data, VERSIONS.THUMB[WIDTH], VERSIONS.THUMB[HEIGHT], halign='middle', valign='middle') thumb_filename = fs.write(dest_folder_name + 'thumb.png', thumb_data, MIME_PNG) # TODO: filename # Prep the data to be stored url_prefx = '' if is_appspot(): url_prefix = 'http://%s/' % BUCKET_NAME else: url_prefix = 'http://%s/_ah/gcs/%s/' % (get_domain(), BUCKET_NAME) #full_url = url_prefix + full_filename #sized_url = url_prefix + sized_filename #card_large_url = url_prefix + card_large_filename card_small_url = url_prefix + card_small_filename thumb_url = url_prefix + thumb_filename ''' versions_data[VERSIONS.FULL[KEY]] = {'url': full_url, 'height': full_height, 'width': full_width} versions_data[VERSIONS.SIZED[KEY]] = {'url': sized_url, 'height': sized_height, 'width': sized_width} versions_data[VERSIONS.CARD_LARGE[KEY]] = {'url': card_large_url, 'height': card_large_height, 'width': card_large_width} ''' versions_data[VERSIONS.CARD_SMALL[KEY]] = { 'url': card_small_url, 'height': card_small_height, 'width': card_small_width } versions_data[VERSIONS.THUMB[KEY]] = { 'url': thumb_url, 'height': thumb_height, 'width': thumb_width } # Create Datastore entity file_obj = FileContainer(key=file_obj_key, filename=dest_filename, gcs_filename=dest_filename, versions=versions_data, file_type='image') file_obj.put() return file_obj