def delete_post(self, post): images = UserImage.query(UserImage.date == post.date).fetch() for img in images: filestore.delete(img.serving_size_key) filestore.delete(img.original_size_key) img.key.delete() emails = RawMail.query(RawMail.date == post.date).fetch() for email in emails: email.delete() post.key.delete() PostCounter.get().decrement(post.date.year, post.date.month) logging.info('Deleted %s images, %s emails and 1 post from %s' % (len(images), len(emails), post.date.strftime('%Y-%m-%d')))
def get(self, year, month, day, type): date = datetime.date(int(year), int(month), int(day)) min_date, max_date = Post.min_date(), Post.max_date() if type == 'prev': posts = Post.query(Post.date < date).order(-Post.date).fetch(1) elif type == 'next': posts = Post.query(Post.date > date).order(Post.date).fetch(1) elif type == 'random': count = PostCounter.get().count posts = Post.query().fetch(1, offset=random.randint(0, count - 1)) post = None if posts: post = posts[0] self.response.write( get_template('frontpagepost.html').render({ "page": "frontpage", "post": post, "is_newest": post.date == max_date, "is_oldest": post.date == min_date }))
def delete_post(self, post): images = UserImage.query(UserImage.date == post.date).fetch() for img in images: filestore.delete(img.serving_size_key) filestore.delete(img.original_size_key) img.key.delete() emails = RawMail.query(RawMail.date == post.date).fetch() for email in emails: email.key.delete() post.key.delete() PostCounter.get().decrement(post.date.year, post.date.month) logging.info('Deleted %s images, %s emails and 1 post from %s' % (len(images), len(emails), post.date.strftime('%Y-%m-%d')))
def post(self, kind, year, month, day): date = datetime.datetime(int(year),int(month),int(day)).date() post = Post.query(Post.date == date).get() is_new = False if not post: post = Post(date=date, source='web',images=[]) is_new = True post.text = self.request.get('text') save = self.request.get('action') == 'save' delete = self.request.get('action') == 'delete' if save and delete: raise Exception('Something weird happened...') if save: if is_new: post.images = [u.filename for u in UserImage.query(UserImage.date == date).fetch()] post.images.sort() post.has_images = True post.put() if is_new: PostCounter.get().increment(post.date.year, post.date.month) self.redirect_to_date(post.date) elif delete: self.delete_post(post) next_post = Post.query(Post.date > date).order(Post.date).get() if next_post and next_post.date.month == date.month: return self.redirect_to_date(next_post.date) #No way, we'll have to just redirect to the empty month self.redirect('/past/%s' % date.strftime('%Y-%m')) else: raise Exception('How the hell did we get here...?')
def get_old_post(self, today): #Lets try to put in an old post... old_post = None old_type = '' #First try a year ago... if today.day == 29 and today.month == 2: old_post = None else: year_ago = datetime.date(today.year-1, today.month, today.day) old_post = Post.query(Post.date == year_ago).get() old_type = 'year' if not old_post: #lets try a month ago... last_day_of_last_month = datetime.date(today.year, today.month, 1) + datetime.timedelta(days=-1) if last_day_of_last_month.day >= today.day: month_ago = datetime.date(last_day_of_last_month.year, last_day_of_last_month.month, today.day) old_post = Post.query(Post.date == month_ago).get() old_type = 'month' if not old_post: # lets try a week ago week_ago = today + datetime.timedelta(days=-7) old_post = Post.query(Post.date == week_ago).get() old_type = 'week' if not old_post: # lets try a completely random post count = PostCounter.get().count old_list = Post.query().fetch(1, offset=random.randint(0, count-1)) old_post = old_list[0] old_type = 'random' if not old_post: logging.info('Looked for but didnt find old_post %s' % (old_post)) return None, None else: logging.info('Found and returning old_post') return old_post, old_type
def get(self, year, month, day, type): date = datetime.date(int(year), int(month), int(day)) min_date, max_date = Post.min_date(), Post.max_date() if type == 'prev': posts = Post.query(Post.date < date).order(-Post.date).fetch(1) elif type == 'next': posts = Post.query(Post.date > date).order(Post.date).fetch(1) elif type == 'random': count = PostCounter.get().count posts = Post.query().fetch(1, offset=random.randint(0, count-1)) post = None if posts: post = posts[0] self.response.write(get_template('frontpagepost.html').render( { "page":"frontpage", "post":post, "is_newest":post.date == max_date, "is_oldest":post.date == min_date }))
def receive(self, mail_message): try: id = self.get_id(mail_message) if not id: return slug = Slug.query(Slug.slug == id).get() if not slug: log_error('Invalid slug', 'Found no slug for id %s', id) return body_text, body_html = self.get_bodies(mail_message) raw_mail = RawMail( subject=mail_message.subject, sender=mail_message.sender, slug=id, date=slug.date, text=body_text, html=body_html ) raw_mail.put() post = Post.query(Post.date == slug.date).get() is_new_post = post is None if is_new_post: post = Post( date=slug.date, source='email', has_images=False ) #Now let's try parsing it into a good post... if body_html: post_text = strip_html(body_html) #Prefer html because then we don't get linebreak issues logging.info('Parsing post from html') else: post_text = body_text logging.info('Parsing post from plain text') if not post_text: raise Exception('No plain text body in email, html body can\'t be parsed yet!') try: email_index = post_text.index('post+%s@' % id) post_text = post_text[:email_index] newline_index = post_text.rstrip().rindex('\n') post_text = post_text[:newline_index].strip() except: logging.info('Failed to remove all crap from post') #Strip 'Sent from my iPhone' if it's there. There are probably endless other Sent from #we could handle, but hey, I have an iPhone so that's the one I care about... post_text = re.sub('\s*Sent from my iPhone\s*$', '', post_text) post_text = post_text.rstrip() if post.text: post.text = post.text + '\r\n\r\n' + Post.seperator + '\r\n\r\n' + post_text else: post.text = post_text self.process_attachments(mail_message, post) post.put() if is_new_post: counter = PostCounter.get() counter.increment(post.date.year, post.date.month) except: log_error('Failed to parse incoming email', traceback.format_exc(6))
def receive(self, mail_message): try: id = self.get_id(mail_message) if not id: return slug = Slug.query(Slug.slug == id).get() if not slug: log_error('Invalid slug', 'Found no slug for id %s', id) return body_text, body_html = self.get_bodies(mail_message) raw_mail = RawMail(subject=mail_message.subject, sender=mail_message.sender, slug=id, date=slug.date, text=body_text, html=body_html) raw_mail.put() post = Post.query(Post.date == slug.date).get() is_new_post = post is None if is_new_post: post = Post(date=slug.date, source='email', has_images=False) #Now let's try parsing it into a good post... if body_html: post_text = strip_html( body_html ) #Prefer html because then we don't get linebreak issues logging.info('Parsing post from html') else: post_text = body_text logging.info('Parsing post from plain text') if not post_text: raise Exception( 'No plain text body in email, html body can\'t be parsed yet!' ) try: email_index = post_text.index('post+%s@' % id) post_text = post_text[:email_index] newline_index = post_text.rstrip().rindex('\n') post_text = post_text[:newline_index].strip() except: logging.info('Failed to remove all crap from post') #Strip 'Sent from my iPhone' if it's there. There are probably endless other Sent from #we could handle, but hey, I have an iPhone so that's the one I care about... post_text = re.sub('\s*Sent from my iPhone\s*$', '', post_text) post_text = post_text.rstrip() if post.text: post.text = post.text + '\r\n\r\n' + Post.seperator + '\r\n\r\n' + post_text else: post.text = post_text self.process_attachments(mail_message, post) post.put() if is_new_post: counter = PostCounter.get() counter.increment(post.date.year, post.date.month) except: log_error('Failed to parse incoming email', traceback.format_exc(6))
def post(self): import_task_key = ndb.Key(urlsafe=self.request.get('task')) import_task = import_task_key.get() import_task.update('Unpacking zip file...', status='inprogress') logging.info('Starting import ...') counter = PostCounter.get() try: posts, images = self.read_zip_file(import_task.uploaded_file) import_task.update('Importing...', total_photos=len(images), total_posts=len(posts)) logging.info('Importing %s posts, %s images' % (len(posts), len(images))) posts = self.filter_posts(posts) for date, text in posts: str_date = date.strftime('%Y-%m-%d') p = Post(date=date, source='ohlife', text=text.decode('utf-8')) p.images = [] p.has_images = False post_images = [(k, images[k]) for k in images.keys() if str_date in k] if len(post_images): logging.info('Importing %s images for date %s' % (len(post_images), str_date)) p.images = [] p.has_images = True for name, bytes in post_images: user_image = UserImage() img_name = name.replace('img_', '').replace('.jpeg', '.jpg') user_image.import_image(img_name, name, bytes, date) p.images.append(img_name) import_task.imported_photos += 1 user_image.put() p.put() counter.increment(p.date.year, p.date.month, False) import_task.imported_posts += 1 if import_task.imported_posts % 10 == 0: import_task.update( 'Imported %s/%s post, %s/%s photos...' % (import_task.imported_posts, import_task.total_posts, import_task.imported_photos, import_task.total_photos)) logging.info(import_task.message) counter.put() counter.put() skipped_posts = import_task.total_posts - import_task.imported_posts skipped_photos = import_task.total_photos - import_task.imported_photos msg = 'Imported %s posts and %s photos.' % ( import_task.imported_posts, import_task.imported_photos) if skipped_posts or skipped_photos: msg += ' %s posts and %s photos already existed and were skipped.' % ( skipped_posts, skipped_photos) import_task.update(msg, status='finished') logging.info(import_task.message) filestore.delete(import_task.uploaded_file) except Exception, ex: try: filestore.delete(import_task.uploaded_file) except: pass try: counter.put() except: pass import_task.update('Failed to import: %s' % ex, status='failed') log_error('Failed import', traceback.format_exc(6))
def get(self, year, month): Settings.get() #Force email address update... now = datetime.datetime.now() if not year: last_post = Post.query().order(-Post.date).get() if last_post: year, month = last_post.date.year, last_post.date.month else: year, month = now.year, now.month else: year, month = int(year), int(month) from_date = datetime.date(year, month, 1) to_month = month + 1 to_year = year if to_month == 13: to_month = 1 to_year += 1 to_date = datetime.date(to_year, to_month, 1) posts = [ p for p in Post.query( ndb.AND(Post.date >= from_date, Post.date < to_date)).order( -Post.date).fetch() ] month_name = from_date.strftime('%B %Y') #Get month list months = PostCounter.get().months[:] def cmp_months(a, b): if a.year != b.year: return cmp(a.year, b.year) else: return cmp(a.month, b.month) months.sort(cmp_months) archive = [] next_link, prev_link = None, None for i, m in enumerate(months): date = datetime.date(m.year, m.month, 1) descr = '%s, %s posts' % (date.strftime('%B %Y'), m.count) value = date.strftime('%Y-%m') archive.append((value, descr, m.year == year and m.month == month)) if m.year == year and m.month == month: if i != 0: prev_link = '/past/%s' % datetime.date( months[i - 1].year, months[i - 1].month, 1).strftime('%Y-%m') if i < len(months) - 1: next_link = '/past/%s' % datetime.date( months[i + 1].year, months[i + 1].month, 1).strftime('%Y-%m') if not archive: archive.append(('', '%s, 0 posts' % now.strftime('%B %Y'), False)) data = { "page": "past", "posts": posts, "month": month_name, "archive": archive, "next": next_link, "prev": prev_link } self.response.write(get_template('past.html').render(data))
def get(self, year, month): Settings.get() #Force email address update... now = datetime.datetime.now() if not year: last_post = Post.query().order(-Post.date).get() if last_post: year, month = last_post.date.year, last_post.date.month else: year, month = now.year, now.month else: year, month = int(year), int(month) from_date = datetime.date(year, month, 1) to_month = month + 1 to_year = year if to_month == 13: to_month = 1 to_year += 1 to_date = datetime.date(to_year, to_month, 1) posts = [p for p in Post.query(ndb.AND(Post.date >= from_date, Post.date < to_date)).order(-Post.date).fetch()] month_name = from_date.strftime('%B %Y') #Get month list months = PostCounter.get().months[:] def cmp_months(a,b): if a.year != b.year: return cmp(a.year, b.year) else: return cmp(a.month, b.month) months.sort(cmp_months) archive = [] next_link, prev_link = None, None for i, m in enumerate(months): date = datetime.date(m.year, m.month,1) descr = '%s, %s posts' % (date.strftime('%B %Y'), m.count) value = date.strftime('%Y-%m') archive.append((value,descr, m.year == year and m.month == month)) if m.year == year and m.month == month: if i != 0: prev_link = '/past/%s' % datetime.date(months[i-1].year, months[i-1].month, 1).strftime('%Y-%m') if i < len(months)-1: next_link = '/past/%s' % datetime.date(months[i+1].year, months[i+1].month, 1).strftime('%Y-%m') if not archive: archive.append(('', '%s, 0 posts' % now.strftime('%B %Y'), False)) data = { "page" : "past", "posts" : posts, "month" : month_name, "archive" : archive, "next" : next_link, "prev" : prev_link } self.response.write(get_template('past.html').render(data))
def post(self): import_task_key = ndb.Key(urlsafe=self.request.get('task')) import_task = import_task_key.get() import_task.update('Unpacking zip file...', status='inprogress') logging.info('Starting import ...') counter = PostCounter.get() try: posts, images = self.read_zip_file(import_task.uploaded_file) import_task.update('Importing...', total_photos=len(images), total_posts=len(posts)) logging.info('Importing %s posts, %s images' % (len(posts), len(images))) posts = self.filter_posts(posts) for date, text in posts: str_date = date.strftime('%Y-%m-%d') p = Post( date=date, source='ohlife', text=text.decode('utf-8') ) p.images = [] p.has_images = False post_images = [(k,images[k]) for k in images.keys() if str_date in k] if len(post_images): logging.info('Importing %s images for date %s' % (len(post_images), str_date)) p.images = [] p.has_images = True for name, bytes in post_images: user_image = UserImage() img_name = name.replace('img_', '').replace('.jpeg', '.jpg') user_image.import_image(img_name, name, bytes, date) p.images.append(img_name) import_task.imported_photos += 1 user_image.put() p.put() counter.increment(p.date.year, p.date.month, False) import_task.imported_posts += 1 if import_task.imported_posts % 10 == 0: import_task.update('Imported %s/%s post, %s/%s photos...' % (import_task.imported_posts, import_task.total_posts,import_task.imported_photos, import_task.total_photos)) logging.info(import_task.message) counter.put() counter.put() skipped_posts = import_task.total_posts - import_task.imported_posts skipped_photos = import_task.total_photos - import_task.imported_photos msg = 'Imported %s posts and %s photos.' % (import_task.imported_posts, import_task.imported_photos) if skipped_posts or skipped_photos: msg += ' %s posts and %s photos already existed and were skipped.' % (skipped_posts, skipped_photos) import_task.update(msg, status='finished') logging.info(import_task.message) filestore.delete(import_task.uploaded_file) except Exception, ex: try: filestore.delete(import_task.uploaded_file) except: pass try: counter.put() except: pass import_task.update('Failed to import: %s' % ex, status='failed') log_error('Failed import', traceback.format_exc(6))