def more_like_this(request): user = get_user(request) get_post = getattr(request, request.method) order = get_post.get('order', 'newest') page = int(get_post.get('page', 1)) limit = int(get_post.get('limit', 10)) offset = limit * (page - 1) story_hash = get_post.get('story_hash') feed_ids = [ us.feed_id for us in UserSubscription.objects.filter(user=user) ] feed_ids, _ = MStory.split_story_hash(story_hash) story_ids = SearchStory.more_like_this([feed_ids], story_hash, order, offset=offset, limit=limit) stories_db = MStory.objects(story_hash__in=story_ids).order_by( '-story_date' if order == "newest" else 'story_date') stories = Feed.format_stories(stories_db) return { "stories": stories, }
def handle(self, *args, **options): if options['daemonize']: daemonize() settings.LOG_TO_STREAM = True # Added by Xinyan Lu: domain based feed refresh if options['domain']: feeds = Feed.objects.filter(feed_address__contains=options['domain']) elif options['id']: feeds = Feed.objects.filter(id=options['id']) elif options['force']: # feeds = Feed.objects.all() feeds = Feed.objects.filter(num_subscribers__gt=2) else: feeds = Feed.objects.filter(next_scheduled_update__lte=now, active=True) feeds = feeds.order_by('?') num_feeds = len(feeds) i=0 for feed in feeds: start = time.time() i += 1 stories = MStory.objects(story_feed_id=feed.pk) for story in stories: if story.story_content_z: story_content = zlib.decompress(story.story_content_z) else: story_content = '' SearchStory.index(story_id=story.story_guid, story_title=story.story_title, story_content=story_content, story_author=story.story_author_name, story_date=story.story_date, db_id=str(story.id)) delta = time.time() - start done_msg = (u'---> [%-30s] ~FYProcessed in ~FM~SB%.4ss~FY~SN ~FB%d~FY[%d]' % ( feed.feed_title[:30],delta, num_feeds,i,)) logging.debug(done_msg) print 'Index fetch done!'
def forwards(self, orm): SearchStory.create_elasticsearch_mapping()