Exemplo n.º 1
0
def subscription_video_viewed(request, subscription):
    try:
        key = create_composite_key(str(request.user.pk), request.POST["id"])
    except KeyError:
        raise Http404

    with transaction.atomic(xg=True):
        subscription_obj = get_object_or_404(Subscription, id=subscription)
        vid = get_object_or_404(Video.objects.from_subscription(
            user=request.user, subscription=subscription),
                                id=key)

        subscription_obj.last_watched_video = vid.ordering_key
        subscription_obj.save()

        vid.viewed = True
        vid.save()
    return JsonResponse({})
Exemplo n.º 2
0
def bucket_video_viewed(request, bucket):
    try:
        bucket_id = int(bucket)
    except ValueError:
        raise Http404

    try:
        key = create_composite_key(str(request.user.pk), request.POST["id"])
    except KeyError:
        raise Http404

    with transaction.atomic(xg=True):
        bucket_obj = get_object_or_404(Bucket, id=bucket_id)
        vid = get_object_or_404(Video.objects.from_bucket(user=request.user,
                                                          bucket=bucket_id),
                                id=key)

        bucket_obj.last_watched_video = vid.ordering_key
        bucket_obj.save()

        vid.viewed = True
        vid.save()
    return JsonResponse({})
Exemplo n.º 3
0
    def test_import_videos(self, service_mock, defer_mock):
        playlistitems_mock = service_mock.return_value.playlistItems.return_value.list
        videos_mock = service_mock.return_value.videos.return_value.list

        playlistitems_mock.return_value.execute.return_value = {
            'items': [
                {'contentDetails': {'videoId': 'video123'}},
                {'contentDetails': {'videoId': 'video456'}},
            ],
        }
        videos_mock.return_value.execute.return_value = {
            'items': [
                {
                    'id': 'video123',
                    'snippet': {
                        'title': 'my video',
                        'description': 'this is my video',
                        'thumbnails': {},
                        'publishedAt': '1997-07-16T19:20:30.45Z',
                    },
                },
                {
                    'id': 'video456',
                    'snippet': {
                        'title': 'my other video',
                        'description': 'this is my other video',
                        'thumbnails': {},
                        'publishedAt': '1997-07-16T19:20:30.45Z',
                    },
                },
            ],
        }

        user = get_user_model().objects.create(username='******')
        OauthToken.objects.create(user=user, data={})
        subscription = Subscription.objects.create(user=user, channel_id="123", last_update=timezone.now())
        bucket = BucketFactory(user=user, subs=[subscription])

        import_videos(user.id, subscription.id, "upload123", [bucket.id])
        self.assertEqual(playlistitems_mock.call_count, 1)
        self.assertEqual(videos_mock.call_count, 1)
        self.assertEqual(defer_mock.call_count, 0)

        self.assertEqual(playlistitems_mock.call_args, (
            (),
            {'playlistId': 'upload123', 'part': 'contentDetails',
             'fields': 'items(contentDetails(videoId))', 'maxResults': API_MAX_RESULTS, 'pageToken': None}
        ))
        self.assertEqual(videos_mock.call_args, (
            (),
            {'id': 'video123,video456', 'part': 'snippet',
             'fields': 'items(snippet(publishedAt,thumbnails))', 'maxResults': API_MAX_RESULTS}
        ))

        self.assertEqual(Video.objects.count(), 2)
        video1 = Video.objects.get(youtube_id="video123")
        self.assertEqual(video1.published_at, datetime(1997, 7, 16, 19, 20, 30, 450000, tzinfo=UTC))
        self.assertEqual(video1.ordering_key,
                         create_composite_key(str(datetime(1997, 7, 16, 19, 20, 30, 450000, tzinfo=UTC)), "video123"))
        video2 = Video.objects.get(youtube_id="video456")
        self.assertEqual(video2.published_at, datetime(1997, 7, 16, 19, 20, 30, 450000, tzinfo=UTC))
        self.assertEqual(video2.ordering_key,
                         create_composite_key(str(datetime(1997, 7, 16, 19, 20, 30, 450000, tzinfo=UTC)), "video456"))
Exemplo n.º 4
0
    def test_default_video_ordering(self):
        VideoFactory(published_at=datetime(1997,
                                           8,
                                           16,
                                           19,
                                           20,
                                           30,
                                           450000,
                                           tzinfo=UTC),
                     youtube_id="1")
        VideoFactory(published_at=datetime(1997,
                                           6,
                                           16,
                                           19,
                                           20,
                                           30,
                                           450000,
                                           tzinfo=UTC),
                     youtube_id="4")
        VideoFactory(published_at=datetime(1997,
                                           7,
                                           16,
                                           19,
                                           20,
                                           30,
                                           450000,
                                           tzinfo=UTC),
                     youtube_id="2")
        VideoFactory(published_at=datetime(1997,
                                           7,
                                           16,
                                           19,
                                           20,
                                           30,
                                           450000,
                                           tzinfo=UTC),
                     youtube_id="3")

        videos = Video.objects.all()

        self.assertEqual([i.published_at for i in videos], [
            datetime(1997, 6, 16, 19, 20, 30, 450000, tzinfo=UTC),
            datetime(1997, 7, 16, 19, 20, 30, 450000, tzinfo=UTC),
            datetime(1997, 7, 16, 19, 20, 30, 450000, tzinfo=UTC),
            datetime(1997, 8, 16, 19, 20, 30, 450000, tzinfo=UTC),
        ])

        self.assertEqual([i.ordering_key for i in videos], [
            create_composite_key(
                str(datetime(1997, 6, 16, 19, 20, 30, 450000, tzinfo=UTC)),
                "4"),
            create_composite_key(
                str(datetime(1997, 7, 16, 19, 20, 30, 450000, tzinfo=UTC)),
                "2"),
            create_composite_key(
                str(datetime(1997, 7, 16, 19, 20, 30, 450000, tzinfo=UTC)),
                "3"),
            create_composite_key(
                str(datetime(1997, 8, 16, 19, 20, 30, 450000, tzinfo=UTC)),
                "1"),
        ])
Exemplo n.º 5
0
def import_videos(user_id,
                  subscription_id,
                  playlist,
                  bucket_ids,
                  page_token=None,
                  only_first_page=False):
    if page_token is not None and only_first_page:
        # initial import to show some videos, we don't need to do a full import of every video
        return
    try:
        _log.info("Adding videos to buckets: %s", bucket_ids)
        try:
            youtube = get_service(user_id)
        except OauthToken.DoesNotExist:
            return

        while True:
            # TODO: consider
            # https://developers.google.com/youtube/v3/docs/activities/list it
            # has things like "publishedAfter" which could be a better way of
            # working out what we have and have not imported
            playlistitem_list = youtube.playlistItems().list(
                playlistId=playlist,
                part=PLAYLIST_PARTS,
                fields=PLAYLIST_FIELDS,
                pageToken=page_token,
                maxResults=API_MAX_RESULTS).execute()
            ids_from_playlist = [
                item['contentDetails']['videoId']
                for item in playlistitem_list['items']
            ]

            video_list = youtube.videos().list(
                id=','.join(ids_from_playlist),
                part=VIDEO_PARTS,
                fields=VIDEO_FIELDS,
                maxResults=API_MAX_RESULTS).execute()
            ids_from_video = [video['id'] for video in video_list['items']]

            missing_videos = set(ids_from_playlist) - set(ids_from_video)
            extra_videos = set(ids_from_video) - set(ids_from_playlist)
            _log.info("Missing these IDs from the video list endpoint: %s",
                      missing_videos)
            _log.info("Extra IDs from the video list endpoint: %s",
                      extra_videos)

            seen_before = False

            for video in video_list['items']:
                if video['id'] not in ids_from_playlist:
                    continue

                data = dict(
                    subscription_id=subscription_id,
                    user_id=user_id,
                    published_at=parse_datetime(
                        video['snippet']['publishedAt']),
                    thumbnails={
                        size: value.get('url', '')
                        for size, value in video['snippet']
                        ['thumbnails'].items()
                    },
                    youtube_id=video['id'],
                    buckets_ids=bucket_ids,
                )
                key = create_composite_key(str(user_id), video['id'])
                obj, created = Video.objects.get_or_create(id=key,
                                                           defaults=data)
                _log.debug("Video %s%s created", obj.id,
                           "" if created else " not")
                if not created:
                    # we've seen this video before, therefore we've already imported it
                    seen_before = True

            if 'nextPageToken' in playlistitem_list and not seen_before and not only_first_page:
                page_token = playlistitem_list['nextPageToken']
            else:
                break
    except RuntimeExceededError:
        deferred.defer(import_videos,
                       user_id,
                       subscription_id,
                       playlist,
                       bucket_ids,
                       page_token=page_token,
                       only_first_page=only_first_page)
Exemplo n.º 6
0
def subscriptions(user_id, page_token=None):
    """Import new subscriptions into the system

    Loops over subscription data from API, adding new suscriptions and updating
    old ones
    """
    try:
        try:
            youtube = get_service(user_id)
        except OauthToken.DoesNotExist:
            return

        while True:
            subscription_data = {}
            subscription_list = youtube.subscriptions().list(
                mine=True,
                part=SUBSCRIPTION_PARTS,
                fields=SUBSCRIPTION_FIELDS,
                maxResults=API_MAX_RESULTS,
                pageToken=page_token).execute()

            for item in subscription_list['items']:
                channel_id = item['snippet']['resourceId']['channelId']

                subscription_data[channel_id] = dict(
                    id=create_composite_key(str(user_id), channel_id),
                    user_id=user_id,
                    last_update=timezone.now(),
                    channel_id=channel_id,
                    thumbnails={
                        size: value.get('url', '')
                        for size, value in item['snippet']
                        ['thumbnails'].items()
                    },
                    upload_playlist=
                    None,  # must fetch this from the channel data
                )

            ids_from_sub = sorted(subscription_data.keys())

            channel_list = youtube.channels().list(
                id=','.join(ids_from_sub),
                part=CHANNEL_PARTS,
                fields=CHANNEL_FIELDS,
                maxResults=API_MAX_RESULTS).execute()
            ids_from_chan = [
                channel['id'] for channel in channel_list['items']
            ]

            # there are times when a subscription has a channel id, but there
            # isn't channel data for whatever reason, e.g. I'm subscribed to
            # UCMzNCTNmDMBO9oueVWpuOMg but there's no data from the channel API
            missing_channels = set(ids_from_sub) - set(ids_from_chan)
            extra_channels = set(ids_from_chan) - set(ids_from_sub)
            _log.info("Missing these IDs from the channel list endpoint: %s",
                      missing_channels)
            _log.info("Extra IDs from the channel list endpoint: %s",
                      extra_channels)

            for chn in channel_list['items']:
                if chn['id'] in ids_from_sub:
                    subscription_data[chn['id']]['upload_playlist'] = \
                            chn['contentDetails']['relatedPlaylists']['uploads']

            for data in subscription_data.itervalues():
                if data['channel_id'] in missing_channels:
                    continue

                key = data.pop('id')
                obj, created = Subscription.objects.update_or_create(
                    id=key, defaults=data)
                _log.debug("Subscription %s%s created", obj.id,
                           "" if created else " not")
                bucket_ids = []
                if not created:
                    bucket_ids = Bucket.objects.order_by("pk").filter(
                        subs__contains=obj).values_list('pk', flat=True)
                    bucket_ids = list(bucket_ids)
                deferred.defer(import_videos,
                               user_id,
                               key,
                               obj.upload_playlist,
                               bucket_ids,
                               only_first_page=created)

            if 'nextPageToken' in subscription_list:
                page_token = subscription_list['nextPageToken']
            else:
                break
    except RuntimeExceededError:
        deferred.defer(subscriptions, user_id, page_token)