def video_channels(self, video_id, cache_age=3600): vs = VideoSearch(self.get_locale()) vs.add_term('video.id', video_id) vs.add_sort('child_instance_count') vs.set_paging(*self.get_page(default_size=5)) videos = vs.videos(with_channels=True) if not videos: abort(404) return {'channels': {'items': [v['channel'] for v in videos], 'total': vs.total}}
def _search_es(query): vs = VideoSearch(self.get_locale()) # Split the term so that the search phrase is # over each individual word, and not the phrase # as a whole - the index will have tokenised # each word and without splitting we won't get # any results back (standard indexer on video title) if not app.config.get("DOLLY"): vs.add_term("title", query.split()) else: # Snowball analyzer is on the Dolly mapping # so we can do a proper search here vs.search_terms(sub_string(query)) vs.add_term("most_influential", True, occurs=MUST) # also clear any favs from search start, size = self.get_page() vs.set_paging(offset=start, limit=size) total = vs.total return total, vs.videos(add_tracking=_add_tracking)
def test_video_comments(self): with app.test_request_context(): user_id = self.create_test_user().id with self.app.test_client() as client: # create new channel r = client.post( '/ws/{}/channels/'.format(user_id), data=json.dumps(dict( title='test', description='test', category='', cover='', public=True, )), content_type='application/json', headers=[get_auth_header(user_id)] ) self.assertEquals(r.status_code, 201) channel_id = json.loads(r.data)['id'] # add videos r = client.put( '/ws/{}/channels/{}/videos/'.format(user_id, channel_id), data=json.dumps([ VideoInstanceData.video_instance1.id, VideoInstanceData.video_instance2.id, ]), content_type='application/json', headers=[get_auth_header(user_id)] ) self.assertEquals(r.status_code, 204) # add comment instance_data = dict(userid=user_id, channelid=channel_id) r = client.get( '/ws/{userid}/channels/{channelid}/'.format(**instance_data), content_type='application/json', headers=[get_auth_header(user_id)]) instance_data['videoid'] = json.loads(r.data)['videos']['items'][0]['id'] r = client.post( '/ws/{userid}/channels/{channelid}/videos/{videoid}/comments/'.format(**instance_data), data=json.dumps(dict(comment="this is a comment")), content_type='application/json', headers=[get_auth_header(user_id)] ) self.assertEquals(r.status_code, 201) self.wait_for_es() v = VideoSearch('en-gb') v.add_id(instance_data['videoid']) instance = v.videos()[0] self.assertEquals(instance['comments']['total'], 1) # delete comment r = client.get( '/ws/{userid}/channels/{channelid}/videos/{videoid}/comments/'.format(**instance_data), content_type='application/json', headers=[get_auth_header(user_id)] ) comment_id = json.loads(r.data)['comments']['items'][0]['id'] instance_data.update({'commentid': comment_id}) r = client.delete( '/ws/{userid}/channels/{channelid}/videos/{videoid}/comments/{commentid}/'.format(**instance_data), content_type='application/json', headers=[get_auth_header(user_id)] ) self.wait_for_es() v = VideoSearch('en-gb') v.add_id(instance_data['videoid']) instance = v.videos()[0] self.assertEquals(instance['comments']['total'], 0)
def search_video(videoid): v = VideoSearch('en-us') v.add_id(videoid) return v.videos()[0]
def channel_videos(self, locale, channelid): offset, limit = request.args.get('start', 0), request.args.get('size', 20) order_by_position = request.args.get('position', 'f') vs = VideoSearch(locale) vs.add_term('channel', [channelid]) if not order_by_position == 't': vs.add_sort('position', 'asc') vs.date_sort('desc') vs.add_sort('video.date_published', 'desc') vs.set_paging(offset, limit) ctx = { 'videos': [], 'image_cdn': app.config['IMAGE_CDN'], 'referrer': request.args.get('referrer', request.referrer), 'url': request.url, 'path': request.path, 'position': order_by_position, } for video in vs.results(): c = {} c['id'] = video.id c['title'] = video.title try: c['date_added'] = video.date_added[:10] except TypeError: c['date_added'] = video.date_added.isoformat()[:10] c['thumbnail_url'] = video.video.thumbnail_url c['explanation'] = video.__dict__['_meta']['explanation'] c['duration'] = video.video.duration c['source'] = Source.id_to_label(video.video.source) c['source_id'] = video.video.source_id c['subscriber_count'] = video.subscriber_count c['gbcount'] = video.locales['en-gb']['view_count'] c['uscount'] = video.locales['en-us']['view_count'] c['gbstarcount'] = video.locales['en-gb']['star_count'] c['usstarcount'] = video.locales['en-us']['star_count'] ctx['videos'].append(c) cs = ChannelSearch(locale) cs.add_id(channelid) channel = cs.channels()[0] ctx['channel'] = channel ctx['video_count'] = vs.total return self.render('admin/ranking.html', **ctx)
def video_list(self): if not use_elasticsearch(): data, total = get_local_videos(self.get_locale(), self.get_page(), star_order=True, **request.args) return dict(videos=dict(items=data, total=total)) date_order = request.args.get('date_order') if app.config.get('DOLLY'): date_order = 'desc' category = request.args.get('category') if category: try: int(category) except ValueError: abort(400) vs = VideoSearch(self.get_locale()) offset, limit = self.get_page() vs.set_paging(offset, limit) vs.filter_category(category) if app.config.get('DOLLY'): # Filter by tagged/added date vs.add_filter(filters.date_tagged_sort()) vs.add_sort('_score', order='desc') # exclude favs f = pyes.TermFilter(field='is_favourite', value=False) vs._exclusion_filters.append(f) else: vs.star_order_sort(request.args.get('star_order')) vs.date_sort(date_order) location = self.get_location() if location: vs.check_country_allowed(location) videos = vs.videos(with_channels=True) total = vs.total return dict(videos={'items': videos}, total=total)