def post(self): file_info = self.get_file_infos()[0] self.response.headers['Content-Type'] = "application/json" year = self.request.get('year') month = self.request.get('month') day = self.request.get('day') date = datetime.datetime(int(year), int(month), int(day)) if file_info.content_type.lower() not in ('image/jpeg', 'image/jpg', 'image/png', 'image/gif', 'image/bmp'): return self.response.write(json.dumps({"status" : "error", "message" : "Unsupported content type: " + file_info.content_type})) bytes = filestore.read(file_info.gs_object_name) existing_images = [u.filename for u in UserImage.query(UserImage.date == date).fetch()] filename = UserImage.create_image_name(file_info.filename, date, existing_images) img = UserImage() img.import_image(filename, file_info.filename, bytes, date, None) img.put() filestore.delete(file_info.gs_object_name) #If there's a post here we should add the image... post = Post.query(Post.date == date).get() if post: post.has_images = True if post.images is None: post.images = [] post.images.append(filename) post.put() self.response.write(json.dumps({"status" : "ok", "filename" : filename}))
def post(self): file_info = self.get_file_infos()[0] self.response.headers['Content-Type'] = "application/json" year = self.request.get('year') month = self.request.get('month') day = self.request.get('day') date = datetime.datetime(int(year), int(month), int(day)) if file_info.content_type.lower() not in ('image/jpeg', 'image/jpg', 'image/png', 'image/gif', 'image/bmp'): return self.response.write(json.dumps({"status" : "error", "message" : "Unsupported content type: " + file_info.content_type})) bytes = filestore.read(file_info.gs_object_name) existing_images = [u.filename for u in UserImage.query(UserImage.date == date).fetch()] filename = UserImage.create_image_name(file_info.filename, date, existing_images) img = UserImage() img.import_image(filename, file_info.filename, bytes, date) img.put() filestore.delete(file_info.gs_object_name) #If there's a post here we should add the image... post = Post.query(Post.date == date).get() if post: post.has_images = True if post.images is None: post.images = [] post.images.append(filename) post.put() self.response.write(json.dumps({"status" : "ok", "filename" : filename}))
def note_page_GET(): #Input via GET requesting a specific note note = form.getvalue('note') text_content = filestore.read(note) url = base_url + "/" + note renderer(text_content, url, note)
def read_zip_file(self, uploaded_file): zip_data = filestore.read(uploaded_file) zip = zipfile.ZipFile(StringIO(zip_data)) text = None images = {} good_names = [n for n in zip.namelist() if not '__MACOSX' in n] text_files = [n for n in good_names if n.endswith('.txt') and not '__MACOSX' in n] image_files = [n for n in good_names if re.search('\\.(jpe?g|bmp|png|gif|tiff)$', n, re.I)] if len(text_files) > 1: raise Exception('More than one possible text files in zip file: %s' % ','.join(text_files)) other_files = [n for n in good_names if not n in text_files + image_files] if len(other_files) > 0: raise Exception('Got files that we don\'t know how to handle: %s' % ','.join(other_files)) text = zip.read(text_files[0]) for name in image_files: images[re.sub('^/', '', name)] = zip.read(name) text = text.replace('\r\n', '\n').strip() lines = text.split('\n') posts = [] prev_line_empty = True current_date, current_text = None, '' for i,line in enumerate(lines): next_line_empty = i == len(lines)-1 or lines[i+1] == '' m = re.match(r'^(\d\d\d\d)-(\d\d)-(\d\d)$', line) if m and prev_line_empty and next_line_empty: if current_date: posts.append((current_date, current_text.rstrip())) current_text = '' current_date = datetime.date(int(m.group(1)), int(m.group(2)), int(m.group(3))) else: current_text += line + '\r\n' prev_line_empty = line == '' if current_text.strip() and current_date: posts.append((current_date, current_text.strip())) return posts, images
def add_images_to_zip(self, export_task, archive): export_task.update('Fetching image information...') images = [i for i in UserImage.query().order(UserImage.filename).fetch()] export_task.update('Found %s images...' % len(images)) for i, img in enumerate(images): img_data = filestore.read(img.original_size_key) archive.writestr('/img_%s' % img.filename.replace('.jpg', '.jpeg'), img_data) if i % 5 == 0: export_task.update('Added %s of %s images to zip... ' % (i+1,len(images))) export_task.update('Finished adding images...')
def add_images_to_zip(self, export_task, archive): export_task.update('Fetching image information...') images = [ i for i in UserImage.query().order(UserImage.filename).fetch() ] export_task.update('Found %s images...' % len(images)) for i, img in enumerate(images): img_data = filestore.read(img.original_size_key) archive.writestr('/img_%s' % img.filename.replace('.jpg', '.jpeg'), img_data) if i % 5 == 0: export_task.update('Added %s of %s images to zip... ' % (i + 1, len(images))) export_task.update('Finished adding images...')
def read_zip_file(self, uploaded_file): zip_data = filestore.read(uploaded_file) zip = zipfile.ZipFile(StringIO(zip_data)) text = None images = {} for name in zip.namelist(): if name.endswith('.txt'): text = zip.read(name) else: images[re.sub('^/', '', name)] = zip.read(name) text = text.replace('\r\n', '\n').strip() lines = text.split('\n') posts = [] prev_line_empty = True current_date, current_text = None, '' for i,line in enumerate(lines): next_line_empty = i == len(lines)-1 or lines[i+1] == '' m = re.match(r'^(\d\d\d\d)-(\d\d)-(\d\d)$', line) if m and prev_line_empty and next_line_empty: if current_date: posts.append((current_date, current_text.rstrip())) current_text = '' current_date = datetime.date(int(m.group(1)), int(m.group(2)), int(m.group(3))) else: current_text += line + '\r\n' prev_line_empty = line == '' if current_text: posts.append((current_date, current_text.rstrip())) return posts, images
def read_zip_file(self, uploaded_file): zip_data = filestore.read(uploaded_file) zip = zipfile.ZipFile(StringIO(zip_data)) text = None images = {} good_names = [n for n in zip.namelist() if not '__MACOSX' in n] text_files = [ n for n in good_names if n.endswith('.txt') and not '__MACOSX' in n ] image_files = [ n for n in good_names if re.search('\\.(jpe?g|bmp|png|gif|tiff)$', n, re.I) ] if len(text_files) > 1: raise Exception( 'More than one possible text files in zip file: %s' % ','.join(text_files)) other_files = [ n for n in good_names if not n in text_files + image_files ] if len(other_files) > 0: raise Exception('Got files that we don\'t know how to handle: %s' % ','.join(other_files)) text = zip.read(text_files[0]) for name in image_files: images[re.sub('^/', '', name)] = zip.read(name) text = text.replace('\r\n', '\n').strip() lines = text.split('\n') posts = [] prev_line_empty = True current_date, current_text = None, '' for i, line in enumerate(lines): next_line_empty = i == len(lines) - 1 or lines[i + 1] == '' m = re.match(r'^(\d\d\d\d)-(\d\d)-(\d\d)$', line) if m and prev_line_empty and next_line_empty: if current_date: posts.append((current_date, current_text.rstrip())) current_text = '' current_date = datetime.date(int(m.group(1)), int(m.group(2)), int(m.group(3))) else: current_text += line + '\r\n' prev_line_empty = line == '' if current_text.strip() and current_date: posts.append((current_date, current_text.strip())) return posts, images
def landing_page(): text_content = filestore.read(about) url = base_url renderer(text_content, url)
def get(self): images_total = 0 images_backed_up = 0 try: self.response.headers['Content-Type'] = 'text/plain' settings = Settings.get() if not settings.dropbox_access_token: self.log('No access token available, no backup will be performed.') return posts = [p for p in Post.query().order(Post.date).fetch()] self.log('Backing up %s posts to Dropbox' % len(posts)) post_text = StringIO() for p in posts: post_text.write(p.date.strftime('%Y-%m-%d')) post_text.write('\r\n\r\n') post_text.write(p.text.replace('\r\n', '\n').replace('\n', '\r\n').rstrip()) post_text.write('\r\n\r\n') result = self.put_file(settings.dropbox_access_token, 'MyLife.txt', post_text.getvalue().encode('utf-8')) post_text.close() self.log('Backed up posts. Revision: %s' % result['rev']) self.log('Fetching Dropbox file list') files_in_dropbox = self.get_dropbox_filelist(settings.dropbox_access_token) self.log('Got %s files from Dropbox' % len(files_in_dropbox)) self.log('Fetching images...') images = [i for i in UserImage.query().order(UserImage.date).fetch()] self.log('Total images in MyLife: %s' % len(images)) not_backed_up = [i for i in images if not i.backed_up_in_dropbox] not_in_dropbox = [i for i in images if not i.filename in files_in_dropbox] self.log('\nFiles not backed up: \n\n' + '\n'.join([i.filename for i in not_backed_up])) self.log('\nFiles marked as backed up, but not in Dropbox: \n\n' + '\n'.join([i.filename for i in not_in_dropbox])) images = not_backed_up + not_in_dropbox images_total = len(images) self.log('Found %s images that need to be backed up in Dropbox' % images_total) for img in images: self.log('Backing up %s' % img.filename) bytes = filestore.read(img.original_size_key) result = self.put_file(settings.dropbox_access_token, img.filename, bytes) self.log('Backed up %s. Revision: %s' % (img.filename, result['rev'])) img.backed_up_in_dropbox = True img.put() images_backed_up += 1 settings.dropbox_last_backup = datetime.datetime.now() settings.put() self.log('Finished backup successfully') except apiproxy_errors.OverQuotaError, ex: self.log(ex) log_error('Error backing up to Dropbox, quota exceeded', 'The backup operation did not complete because it ran out of quota. ' + 'The next time it runs it will continue backing up your posts and images.' + '%s images out of %s were backed up before failing' % (images_backed_up, images_total))
def get(self): images_total = 0 images_backed_up = 0 try: self.response.headers['Content-Type'] = 'text/plain' settings = Settings.get() if not settings.dropbox_access_token: self.log( 'No access token available, no backup will be performed.') return posts = [p for p in Post.query().order(Post.date).fetch()] self.log('Backing up %s posts to Dropbox' % len(posts)) post_text = StringIO() for p in posts: post_text.write(p.date.strftime('%Y-%m-%d')) post_text.write('\r\n\r\n') post_text.write( p.text.replace('\r\n', '\n').replace('\n', '\r\n').rstrip()) post_text.write('\r\n\r\n') result = self.put_file(settings.dropbox_access_token, 'MyLife.txt', post_text.getvalue().encode('utf-8')) post_text.close() self.log('Backed up posts. Revision: %s' % result['rev']) self.log('Fetching Dropbox file list') files_in_dropbox = self.get_dropbox_filelist( settings.dropbox_access_token) self.log('Got %s files from Dropbox' % len(files_in_dropbox)) self.log('Fetching images...') images = [ i for i in UserImage.query().order(UserImage.date).fetch() ] self.log('Total images in MyLife: %s' % len(images)) not_backed_up = [i for i in images if not i.backed_up_in_dropbox] not_in_dropbox = [ i for i in images if not i.filename in files_in_dropbox ] self.log('\nFiles not backed up: \n\n' + '\n'.join([i.filename for i in not_backed_up])) self.log('\nFiles marked as backed up, but not in Dropbox: \n\n' + '\n'.join([i.filename for i in not_in_dropbox])) images = not_backed_up + not_in_dropbox images_total = len(images) self.log('Found %s images that need to be backed up in Dropbox' % images_total) for img in images: self.log('Backing up %s' % img.filename) bytes = filestore.read(img.original_size_key) result = self.put_file(settings.dropbox_access_token, img.filename, bytes) self.log('Backed up %s. Revision: %s' % (img.filename, result['rev'])) img.backed_up_in_dropbox = True img.put() images_backed_up += 1 settings.dropbox_last_backup = datetime.datetime.now() settings.put() self.log('Finished backup successfully') except apiproxy_errors.OverQuotaError, ex: self.log(ex) log_error( 'Error backing up to Dropbox, quota exceeded', 'The backup operation did not complete because it ran out of quota. ' + 'The next time it runs it will continue backing up your posts and images.' + '%s images out of %s were backed up before failing' % (images_backed_up, images_total))