def get_num_posts(self, user): if user.num_posts is None: q = ChoosiePost.all() q.filter("user_fb_id =", user.fb_uid) user.num_posts = q.count() user.put() return user.num_posts
def get(self): logging.info("Scraping comments...") q = ChoosiePost.all().filter("posted_to_fb = ", True).filter("created_at > ", datetime.datetime.now() - datetime.timedelta(1)) posts_found = False for p in q.run(): try: posts_found = True choosie_post_key = str(p.key()) logging.info(choosie_post_key) comments, votes, error = ScrapeCommentsHandler.scrape_comments_and_votes_from_facebook(choosie_post_key) if error: logging.warn("Error scraping post from FB. Error = %s", error) except Exception, e: logging.error("Unexpected error while scraping Facebook comments for post [%s] (%s).", p.key(), p.question)
def get_feed_and_cursor(cursor, limit=10, timestamp=None): if not limit: limit = 10 limit = int(limit) logging.info('Retrieving %d posts from db' % limit) posts = ChoosiePost.all() if cursor: posts.with_cursor(cursor) if timestamp: created_after = Utils.parse_iso_format_datetime(timestamp) posts.filter('created_at >', created_after) posts.order("-created_at") posts_result = [] for post in posts.run(limit=limit): posts_result.append(post) new_cursor = posts.cursor() CacheController.set_multi_models(posts_result) return (posts_result, new_cursor)
def get_feed_and_cursor(cursor, limit = 10, timestamp = None): if not limit: limit = 10 limit = int(limit) logging.info('Retrieving %d posts from db' % limit) posts = ChoosiePost.all() if cursor: posts.with_cursor(cursor) if timestamp: created_after = Utils.parse_iso_format_datetime(timestamp) posts.filter('created_at >', created_after) posts.order("-created_at") posts_result = [] for post in posts.run(limit=limit): posts_result.append(post) new_cursor = posts.cursor() CacheController.set_multi_models(posts_result) return (posts_result, new_cursor)