def post(self, filename): self.response.headers['Content-Type'] = "application/json" img = UserImage.query(UserImage.filename == filename).get() if not img: return self.response.write(json.dumps({"status" : "error", "message" : "Image does not exit"})) post = Post.query(Post.date == img.date).get() #Remove it from the post if post: try: post.images.remove(filename) except: pass if len(post.images) == 0: post.has_images = False post.put() filestore.delete(img.serving_size_key) filestore.delete(img.original_size_key) img.key.delete() self.response.write(json.dumps({"status" : "ok"}))
def post(self, filename): self.response.headers['Content-Type'] = "application/json" img = UserImage.query(UserImage.filename == filename).get() if not img: return self.response.write(json.dumps({"status" : "error", "message" : "Image does not exit"})) post = Post.query(Post.date == img.date).get() #Remove it from the post if post: try: post.images.remove(filename) post.text = post.text.replace('$IMG:' + filename, '').replace('\n\n\n\n', '\n\n') except: pass if len(post.images) == 0: post.has_images = False post.put() filestore.delete(img.serving_size_key) filestore.delete(img.original_size_key) img.key.delete() self.response.write(json.dumps({"status" : "ok"}))
def post(self): file_info = self.get_file_infos()[0] self.response.headers['Content-Type'] = "application/json" year = self.request.get('year') month = self.request.get('month') day = self.request.get('day') date = datetime.datetime(int(year), int(month), int(day)) if file_info.content_type.lower() not in ('image/jpeg', 'image/jpg', 'image/png', 'image/gif', 'image/bmp'): return self.response.write(json.dumps({"status" : "error", "message" : "Unsupported content type: " + file_info.content_type})) bytes = filestore.read(file_info.gs_object_name) existing_images = [u.filename for u in UserImage.query(UserImage.date == date).fetch()] filename = UserImage.create_image_name(file_info.filename, date, existing_images) img = UserImage() img.import_image(filename, file_info.filename, bytes, date) img.put() filestore.delete(file_info.gs_object_name) #If there's a post here we should add the image... post = Post.query(Post.date == date).get() if post: post.has_images = True if post.images is None: post.images = [] post.images.append(filename) post.put() self.response.write(json.dumps({"status" : "ok", "filename" : filename}))
def post(self): file_info = self.get_file_infos()[0] self.response.headers['Content-Type'] = "application/json" year = self.request.get('year') month = self.request.get('month') day = self.request.get('day') date = datetime.datetime(int(year), int(month), int(day)) if file_info.content_type.lower() not in ('image/jpeg', 'image/jpg', 'image/png', 'image/gif', 'image/bmp'): return self.response.write(json.dumps({"status" : "error", "message" : "Unsupported content type: " + file_info.content_type})) bytes = filestore.read(file_info.gs_object_name) existing_images = [u.filename for u in UserImage.query(UserImage.date == date).fetch()] filename = UserImage.create_image_name(file_info.filename, date, existing_images) img = UserImage() img.import_image(filename, file_info.filename, bytes, date, None) img.put() filestore.delete(file_info.gs_object_name) #If there's a post here we should add the image... post = Post.query(Post.date == date).get() if post: post.has_images = True if post.images is None: post.images = [] post.images.append(filename) post.put() self.response.write(json.dumps({"status" : "ok", "filename" : filename}))
def cleanup_old_export_tasks(self): #Lets delete any old export tasks hanging around... old_deleted = 0 for ex in ExportTask.query().fetch(): if ex.status in ('finished', 'failed'): try: filestore.delete(ex.filename) except: pass ex.key.delete() old_deleted += 1 if old_deleted > 0: logging.info('Deleted %s old export tasks' % old_deleted)
def post(self): export_task_key = ndb.Key(urlsafe=self.request.get('task')) export_task = export_task_key.get() if not export_task: logging.info('Export task already deleted') return try: filestore.delete(export_task.filename) except: logging.info('Failed to delete export blob') export_task.key.delete() logging.info('Deleted export task')
def delete_post(self, post): images = UserImage.query(UserImage.date == post.date).fetch() for img in images: filestore.delete(img.serving_size_key) filestore.delete(img.original_size_key) img.key.delete() emails = RawMail.query(RawMail.date == post.date).fetch() for email in emails: email.delete() post.key.delete() PostCounter.get().decrement(post.date.year, post.date.month) logging.info('Deleted %s images, %s emails and 1 post from %s' % (len(images), len(emails), post.date.strftime('%Y-%m-%d')))
def delete_post(self, post): images = UserImage.query(UserImage.date == post.date).fetch() for img in images: filestore.delete(img.serving_size_key) filestore.delete(img.original_size_key) img.key.delete() emails = RawMail.query(RawMail.date == post.date).fetch() for email in emails: email.key.delete() post.key.delete() PostCounter.get().decrement(post.date.year, post.date.month) logging.info('Deleted %s images, %s emails and 1 post from %s' % (len(images), len(emails), post.date.strftime('%Y-%m-%d')))
def post(self): import_task_key = ndb.Key(urlsafe=self.request.get('task')) import_task = import_task_key.get() import_task.update('Unpacking zip file...', status='inprogress') logging.info('Starting import ...') counter = PostCounter.get() try: posts, images = self.read_zip_file(import_task.uploaded_file) import_task.update('Importing...', total_photos=len(images), total_posts=len(posts)) logging.info('Importing %s posts, %s images' % (len(posts), len(images))) posts = self.filter_posts(posts) for date, text in posts: str_date = date.strftime('%Y-%m-%d') p = Post(date=date, source='ohlife', text=text.decode('utf-8')) p.images = [] p.has_images = False post_images = [(k, images[k]) for k in images.keys() if str_date in k] if len(post_images): logging.info('Importing %s images for date %s' % (len(post_images), str_date)) p.images = [] p.has_images = True for name, bytes in post_images: user_image = UserImage() img_name = name.replace('img_', '').replace('.jpeg', '.jpg') user_image.import_image(img_name, name, bytes, date) p.images.append(img_name) import_task.imported_photos += 1 user_image.put() p.put() counter.increment(p.date.year, p.date.month, False) import_task.imported_posts += 1 if import_task.imported_posts % 10 == 0: import_task.update( 'Imported %s/%s post, %s/%s photos...' % (import_task.imported_posts, import_task.total_posts, import_task.imported_photos, import_task.total_photos)) logging.info(import_task.message) counter.put() counter.put() skipped_posts = import_task.total_posts - import_task.imported_posts skipped_photos = import_task.total_photos - import_task.imported_photos msg = 'Imported %s posts and %s photos.' % ( import_task.imported_posts, import_task.imported_photos) if skipped_posts or skipped_photos: msg += ' %s posts and %s photos already existed and were skipped.' % ( skipped_posts, skipped_photos) import_task.update(msg, status='finished') logging.info(import_task.message) filestore.delete(import_task.uploaded_file) except Exception, ex: try: filestore.delete(import_task.uploaded_file) except: pass try: counter.put() except: pass import_task.update('Failed to import: %s' % ex, status='failed') log_error('Failed import', traceback.format_exc(6))
def post(self): import_task_key = ndb.Key(urlsafe=self.request.get('task')) import_task = import_task_key.get() import_task.update('Unpacking zip file...', status='inprogress') logging.info('Starting import ...') counter = PostCounter.get() try: posts, images = self.read_zip_file(import_task.uploaded_file) import_task.update('Importing...', total_photos=len(images), total_posts=len(posts)) logging.info('Importing %s posts, %s images' % (len(posts), len(images))) posts = self.filter_posts(posts) for date, text in posts: str_date = date.strftime('%Y-%m-%d') p = Post( date=date, source='ohlife', text=text.decode('utf-8') ) p.images = [] p.has_images = False post_images = [(k,images[k]) for k in images.keys() if str_date in k] if len(post_images): logging.info('Importing %s images for date %s' % (len(post_images), str_date)) p.images = [] p.has_images = True for name, bytes in post_images: user_image = UserImage() img_name = name.replace('img_', '').replace('.jpeg', '.jpg') user_image.import_image(img_name, name, bytes, date) p.images.append(img_name) import_task.imported_photos += 1 user_image.put() p.put() counter.increment(p.date.year, p.date.month, False) import_task.imported_posts += 1 if import_task.imported_posts % 10 == 0: import_task.update('Imported %s/%s post, %s/%s photos...' % (import_task.imported_posts, import_task.total_posts,import_task.imported_photos, import_task.total_photos)) logging.info(import_task.message) counter.put() counter.put() skipped_posts = import_task.total_posts - import_task.imported_posts skipped_photos = import_task.total_photos - import_task.imported_photos msg = 'Imported %s posts and %s photos.' % (import_task.imported_posts, import_task.imported_photos) if skipped_posts or skipped_photos: msg += ' %s posts and %s photos already existed and were skipped.' % (skipped_posts, skipped_photos) import_task.update(msg, status='finished') logging.info(import_task.message) filestore.delete(import_task.uploaded_file) except Exception, ex: try: filestore.delete(import_task.uploaded_file) except: pass try: counter.put() except: pass import_task.update('Failed to import: %s' % ex, status='failed') log_error('Failed import', traceback.format_exc(6))