def post(self): file_info = self.get_file_infos()[0] self.response.headers['Content-Type'] = "application/json" year = self.request.get('year') month = self.request.get('month') day = self.request.get('day') date = datetime.datetime(int(year), int(month), int(day)) if file_info.content_type.lower() not in ('image/jpeg', 'image/jpg', 'image/png', 'image/gif', 'image/bmp'): return self.response.write(json.dumps({"status" : "error", "message" : "Unsupported content type: " + file_info.content_type})) bytes = filestore.read(file_info.gs_object_name) existing_images = [u.filename for u in UserImage.query(UserImage.date == date).fetch()] filename = UserImage.create_image_name(file_info.filename, date, existing_images) img = UserImage() img.import_image(filename, file_info.filename, bytes, date) img.put() filestore.delete(file_info.gs_object_name) #If there's a post here we should add the image... post = Post.query(Post.date == date).get() if post: post.has_images = True if post.images is None: post.images = [] post.images.append(filename) post.put() self.response.write(json.dumps({"status" : "ok", "filename" : filename}))
def post(self, filename): self.response.headers['Content-Type'] = "application/json" img = UserImage.query(UserImage.filename == filename).get() if not img: return self.response.write(json.dumps({"status" : "error", "message" : "Image does not exit"})) post = Post.query(Post.date == img.date).get() #Remove it from the post if post: try: post.images.remove(filename) except: pass if len(post.images) == 0: post.has_images = False post.put() filestore.delete(img.serving_size_key) filestore.delete(img.original_size_key) img.key.delete() self.response.write(json.dumps({"status" : "ok"}))
def post(self, filename): self.response.headers['Content-Type'] = "application/json" img = UserImage.query(UserImage.filename == filename).get() if not img: return self.response.write(json.dumps({"status" : "error", "message" : "Image does not exit"})) post = Post.query(Post.date == img.date).get() #Remove it from the post if post: try: post.images.remove(filename) post.text = post.text.replace('$IMG:' + filename, '').replace('\n\n\n\n', '\n\n') except: pass if len(post.images) == 0: post.has_images = False post.put() filestore.delete(img.serving_size_key) filestore.delete(img.original_size_key) img.key.delete() self.response.write(json.dumps({"status" : "ok"}))
def post(self): file_info = self.get_file_infos()[0] self.response.headers['Content-Type'] = "application/json" year = self.request.get('year') month = self.request.get('month') day = self.request.get('day') date = datetime.datetime(int(year), int(month), int(day)) if file_info.content_type.lower() not in ('image/jpeg', 'image/jpg', 'image/png', 'image/gif', 'image/bmp'): return self.response.write(json.dumps({"status" : "error", "message" : "Unsupported content type: " + file_info.content_type})) bytes = filestore.read(file_info.gs_object_name) existing_images = [u.filename for u in UserImage.query(UserImage.date == date).fetch()] filename = UserImage.create_image_name(file_info.filename, date, existing_images) img = UserImage() img.import_image(filename, file_info.filename, bytes, date, None) img.put() filestore.delete(file_info.gs_object_name) #If there's a post here we should add the image... post = Post.query(Post.date == date).get() if post: post.has_images = True if post.images is None: post.images = [] post.images.append(filename) post.put() self.response.write(json.dumps({"status" : "ok", "filename" : filename}))
def post(self): task_key = ndb.Key(urlsafe=self.request.get('task')) task = task_key.get() task.update('Starting migration...', status='inprogress') logging.info('Starting migration ...') try: images = [i for i in UserImage.query() if i.filename != i.original_size_key] task.update('Migrating...', total_images=len(images)) logging.info('Migrating %s images' % len(images)) for img in images: img.migrate_to_gcs() task.migrated_images += 1 if task.migrated_images % 3 == 0: task.update('Migrated %s/%s images' % (task.migrated_images, task.total_images)) logging.info(task.message) task.put() task.update('Finished migrating images. Have a nice day :)', status='finished') logging.info(task.message) except Exception, ex: task.update('Failed to migrate: %s' % ex, status='failed') log_error('Failed migrate images', traceback.format_exc(6))
def post(self): task_key = ndb.Key(urlsafe=self.request.get('task')) task = task_key.get() task.update('Starting migration...', status='inprogress') logging.info('Starting migration ...') try: images = [ i for i in UserImage.query() if i.filename != i.original_size_key ] task.update('Migrating...', total_images=len(images)) logging.info('Migrating %s images' % len(images)) for img in images: img.migrate_to_gcs() task.migrated_images += 1 if task.migrated_images % 3 == 0: task.update('Migrated %s/%s images' % (task.migrated_images, task.total_images)) logging.info(task.message) task.put() task.update('Finished migrating images. Have a nice day :)', status='finished') logging.info(task.message) except Exception, ex: task.update('Failed to migrate: %s' % ex, status='failed') log_error('Failed migrate images', traceback.format_exc(6))
def get(self, filename): image = UserImage.query(UserImage.filename == filename).get() if self.request.get('fullsize'): key = image.original_size_key else: key = image.serving_size_key if not image: self.error(404) else: self.send_blob(filestore.get_blob_key(key))
def add_images_to_zip(self, export_task, archive): export_task.update('Fetching image information...') images = [i for i in UserImage.query().order(UserImage.filename).fetch()] export_task.update('Found %s images...' % len(images)) for i, img in enumerate(images): img_data = filestore.read(img.original_size_key) archive.writestr('/img_%s' % img.filename.replace('.jpg', '.jpeg'), img_data) if i % 5 == 0: export_task.update('Added %s of %s images to zip... ' % (i+1,len(images))) export_task.update('Finished adding images...')
def delete_post(self, post): images = UserImage.query(UserImage.date == post.date).fetch() for img in images: filestore.delete(img.serving_size_key) filestore.delete(img.original_size_key) img.key.delete() emails = RawMail.query(RawMail.date == post.date).fetch() for email in emails: email.delete() post.key.delete() PostCounter.get().decrement(post.date.year, post.date.month) logging.info('Deleted %s images, %s emails and 1 post from %s' % (len(images), len(emails), post.date.strftime('%Y-%m-%d')))
def delete_post(self, post): images = UserImage.query(UserImage.date == post.date).fetch() for img in images: filestore.delete(img.serving_size_key) filestore.delete(img.original_size_key) img.key.delete() emails = RawMail.query(RawMail.date == post.date).fetch() for email in emails: email.key.delete() post.key.delete() PostCounter.get().decrement(post.date.year, post.date.month) logging.info('Deleted %s images, %s emails and 1 post from %s' % (len(images), len(emails), post.date.strftime('%Y-%m-%d')))
def get(self): #Check whether the migration is done so we can see whether to show the Blobstore Migration #or not... settings = Settings.get() if not settings.blobstore_migration_done: migration_task_finished = bool(MigrateTask.query(MigrateTask.status == 'finished').get()) if migration_task_finished: settings.blobstore_migration_done = True settings.put() else: #Try to figure out whether this is a new user that has nothing in the blobstore... if not UserImage.query().get(): settings.blobstore_migration_done = True settings.put() self._render(settings)
def add_images_to_zip(self, export_task, archive): export_task.update('Fetching image information...') images = [ i for i in UserImage.query().order(UserImage.filename).fetch() ] export_task.update('Found %s images...' % len(images)) for i, img in enumerate(images): img_data = filestore.read(img.original_size_key) archive.writestr('/img_%s' % img.filename.replace('.jpg', '.jpeg'), img_data) if i % 5 == 0: export_task.update('Added %s of %s images to zip... ' % (i + 1, len(images))) export_task.update('Finished adding images...')
def post(self, kind, year, month, day): date = datetime.datetime(int(year),int(month),int(day)).date() post = Post.query(Post.date == date).get() is_new = False if not post: post = Post(date=date, source='web',images=[]) is_new = True post.text = self.request.get('text') save = self.request.get('action') == 'save' delete = self.request.get('action') == 'delete' if save and delete: raise Exception('Something weird happened...') if save: if is_new: post.images = [u.filename for u in UserImage.query(UserImage.date == date).fetch()] post.images.sort() post.has_images = True post.put() if is_new: PostCounter.get().increment(post.date.year, post.date.month) self.redirect_to_date(post.date) elif delete: self.delete_post(post) next_post = Post.query(Post.date > date).order(Post.date).get() if next_post and next_post.date.month == date.month: return self.redirect_to_date(next_post.date) #No way, we'll have to just redirect to the empty month self.redirect('/past/%s' % date.strftime('%Y-%m')) else: raise Exception('How the hell did we get here...?')
def get(self, kind, year, month, day): date = datetime.datetime(int(year),int(month),int(day)).date() post = Post.query(Post.date == date).get() if kind == 'write' and post: return self.redirect('/edit/%s' % date.strftime('%Y-%m-%d')) if kind == 'edit' and not post: return self.redirect('/write/%s' % date.strftime('%Y-%m-%d')) data = { "date" : date, "text" : "", "page" : "write", "kind" : kind } if post: data["page"] = "edit" data["text"] = post.text data["images"] = post.images else: data["images"] = [u.filename for u in UserImage.query(UserImage.date == date).fetch()] self.response.write(get_template('edit.html').render(data))
def get(self): images_total = 0 images_backed_up = 0 try: self.response.headers['Content-Type'] = 'text/plain' settings = Settings.get() if not settings.dropbox_access_token: self.log( 'No access token available, no backup will be performed.') return posts = [p for p in Post.query().order(Post.date).fetch()] self.log('Backing up %s posts to Dropbox' % len(posts)) post_text = StringIO() for p in posts: post_text.write(p.date.strftime('%Y-%m-%d')) post_text.write('\r\n\r\n') post_text.write( p.text.replace('\r\n', '\n').replace('\n', '\r\n').rstrip()) post_text.write('\r\n\r\n') result = self.put_file(settings.dropbox_access_token, 'MyLife.txt', post_text.getvalue().encode('utf-8')) post_text.close() self.log('Backed up posts. Revision: %s' % result['rev']) self.log('Fetching Dropbox file list') files_in_dropbox = self.get_dropbox_filelist( settings.dropbox_access_token) self.log('Got %s files from Dropbox' % len(files_in_dropbox)) self.log('Fetching images...') images = [ i for i in UserImage.query().order(UserImage.date).fetch() ] self.log('Total images in MyLife: %s' % len(images)) not_backed_up = [i for i in images if not i.backed_up_in_dropbox] not_in_dropbox = [ i for i in images if not i.filename in files_in_dropbox ] self.log('\nFiles not backed up: \n\n' + '\n'.join([i.filename for i in not_backed_up])) self.log('\nFiles marked as backed up, but not in Dropbox: \n\n' + '\n'.join([i.filename for i in not_in_dropbox])) images = not_backed_up + not_in_dropbox images_total = len(images) self.log('Found %s images that need to be backed up in Dropbox' % images_total) for img in images: self.log('Backing up %s' % img.filename) bytes = filestore.read(img.original_size_key) result = self.put_file(settings.dropbox_access_token, img.filename, bytes) self.log('Backed up %s. Revision: %s' % (img.filename, result['rev'])) img.backed_up_in_dropbox = True img.put() images_backed_up += 1 settings.dropbox_last_backup = datetime.datetime.now() settings.put() self.log('Finished backup successfully') except apiproxy_errors.OverQuotaError, ex: self.log(ex) log_error( 'Error backing up to Dropbox, quota exceeded', 'The backup operation did not complete because it ran out of quota. ' + 'The next time it runs it will continue backing up your posts and images.' + '%s images out of %s were backed up before failing' % (images_backed_up, images_total))
def get(self): images_total = 0 images_backed_up = 0 try: self.response.headers['Content-Type'] = 'text/plain' settings = Settings.get() if not settings.dropbox_access_token: self.log('No access token available, no backup will be performed.') return posts = [p for p in Post.query().order(Post.date).fetch()] self.log('Backing up %s posts to Dropbox' % len(posts)) post_text = StringIO() for p in posts: post_text.write(p.date.strftime('%Y-%m-%d')) post_text.write('\r\n\r\n') post_text.write(p.text.replace('\r\n', '\n').replace('\n', '\r\n').rstrip()) post_text.write('\r\n\r\n') result = self.put_file(settings.dropbox_access_token, 'MyLife.txt', post_text.getvalue().encode('utf-8')) post_text.close() self.log('Backed up posts. Revision: %s' % result['rev']) self.log('Fetching Dropbox file list') files_in_dropbox = self.get_dropbox_filelist(settings.dropbox_access_token) self.log('Got %s files from Dropbox' % len(files_in_dropbox)) self.log('Fetching images...') images = [i for i in UserImage.query().order(UserImage.date).fetch()] self.log('Total images in MyLife: %s' % len(images)) not_backed_up = [i for i in images if not i.backed_up_in_dropbox] not_in_dropbox = [i for i in images if not i.filename in files_in_dropbox] self.log('\nFiles not backed up: \n\n' + '\n'.join([i.filename for i in not_backed_up])) self.log('\nFiles marked as backed up, but not in Dropbox: \n\n' + '\n'.join([i.filename for i in not_in_dropbox])) images = not_backed_up + not_in_dropbox images_total = len(images) self.log('Found %s images that need to be backed up in Dropbox' % images_total) for img in images: self.log('Backing up %s' % img.filename) bytes = filestore.read(img.original_size_key) result = self.put_file(settings.dropbox_access_token, img.filename, bytes) self.log('Backed up %s. Revision: %s' % (img.filename, result['rev'])) img.backed_up_in_dropbox = True img.put() images_backed_up += 1 settings.dropbox_last_backup = datetime.datetime.now() settings.put() self.log('Finished backup successfully') except apiproxy_errors.OverQuotaError, ex: self.log(ex) log_error('Error backing up to Dropbox, quota exceeded', 'The backup operation did not complete because it ran out of quota. ' + 'The next time it runs it will continue backing up your posts and images.' + '%s images out of %s were backed up before failing' % (images_backed_up, images_total))