def test_original_video(): """ Tests that the original_video property returns the VideoFile with 'original' encoding """ video = VideoFactory(key="8494dafc-3665-4960-8e00-9790574ec93a") videofiles = [ VideoFileFactory(video=video, s3_object_key="original.mp4", encoding=EncodingNames.ORIGINAL), VideoFileFactory(video=video, s3_object_key="transcoded.hls", encoding=EncodingNames.HLS), ] assert video.original_video == videofiles[0]
def test_transcoded_hls_video(): """ Tests that Video.transcoded_videos returns transcoded HLS videofile""" video = VideoFactory() videofiles = [ VideoFileFactory(video=video, s3_object_key="original.mp4", encoding=EncodingNames.ORIGINAL), VideoFileFactory(video=video, s3_object_key="video.m3u8", encoding=EncodingNames.HLS), ] assert len(video.transcoded_videos) == 1 assert video.transcoded_videos[0] == videofiles[1]
def test_upload_video(mocker): """ Test that the upload_video task calls the YouTube API execute method """ videofile = VideoFileFactory() youtube_id = "M6LymW_8qVk" video_upload_response = { "id": youtube_id, "kind": "youtube#video", "snippet": { "description": "Testing description", "title": "Testing123" }, "status": { "uploadStatus": "uploaded" }, } youtube_mocker = mocker.patch("cloudsync.youtube.build") youtube_mocker( ).videos.return_value.insert.return_value.next_chunk.side_effect = [ (None, None), (None, video_upload_response), ] response = YouTubeApi().upload_video(videofile.video) assert response == video_upload_response
def test_video_sources_mp4(): """ Tests that the video sources property returns the expected sorted results for MP4 """ video = VideoFactory(key="8494dafc-3665-4960-8e00-9790574ec93a") videofiles = [ VideoFileFactory(video=video, s3_object_key="medium.mp4", encoding=EncodingNames.MEDIUM), VideoFileFactory(video=video, s3_object_key="small.mp4", encoding=EncodingNames.SMALL), VideoFileFactory(video=video, s3_object_key="large.mp4", encoding=EncodingNames.LARGE), VideoFileFactory(video=video, s3_object_key="basic.mp4", encoding=EncodingNames.BASIC), VideoFileFactory(video=video, s3_object_key="hd.mp4", encoding=EncodingNames.HD), ] assert video.sources == [ { "src": videofiles[4].cloudfront_url, "label": EncodingNames.HD, "type": "video/mp4", }, { "src": videofiles[2].cloudfront_url, "label": EncodingNames.LARGE, "type": "video/mp4", }, { "src": videofiles[0].cloudfront_url, "label": EncodingNames.MEDIUM, "type": "video/mp4", }, { "src": videofiles[3].cloudfront_url, "label": EncodingNames.BASIC, "type": "video/mp4", }, { "src": videofiles[1].cloudfront_url, "label": EncodingNames.SMALL, "type": "video/mp4", }, ]
def test_transcode_job(mocker, status, expected_status): """ Test that video status is updated properly after a transcode job is successfully created """ video = VideoFactory.create(status=status) videofile = VideoFileFactory.create(video=video) prefix = RETRANSCODE_FOLDER if status == VideoStatus.RETRANSCODE_SCHEDULED else "" preset = { "Key": f"{prefix}transcoded/" + video.hexkey + "/video_1351620000001-000040", "PresetId": "1351620000001-000040", "SegmentDuration": "10.0", } if status != VideoStatus.RETRANSCODE_SCHEDULED: preset["ThumbnailPattern"] = ("thumbnails/" + video.hexkey + "/video_thumbnail_{count}") mocker.patch.multiple( "cloudsync.tasks.settings", ET_PRESET_IDS=("1351620000001-000040", "1351620000001-000020"), AWS_REGION="us-east-1", ET_PIPELINE_ID="foo", ENVIRONMENT="test", ) mock_encoder = mocker.patch("cloudsync.api.VideoTranscoder.encode") mock_delete_objects = mocker.patch("cloudsync.api.delete_s3_objects") mocker.patch("ui.models.tasks") api.transcode_video(video, videofile) # pylint: disable=no-value-for-parameter mock_encoder.assert_called_once_with( {"Key": videofile.s3_object_key}, [ preset, { "Key": f"{prefix}transcoded/" + video.hexkey + "/video_1351620000001-000020", "PresetId": "1351620000001-000020", "SegmentDuration": "10.0", }, ], Playlists=[{ "Format": "HLSv3", "Name": f"{prefix}transcoded/" + video.hexkey + "/video__index", "OutputKeys": [ f"{prefix}transcoded/" + video.hexkey + "/video_1351620000001-000040", f"{prefix}transcoded/" + video.hexkey + "/video_1351620000001-000020", ], }], UserMetadata={"pipeline": "odl-video-service-test"}, ) assert len(video.encode_jobs.all()) == 1 assert mock_delete_objects.call_count == ( 1 if status == VideoStatus.RETRANSCODE_SCHEDULED else 0) assert Video.objects.get(id=video.id).status == expected_status
def test_post_hls_to_edx_wrong_type(mocker): """ post_hls_to_edx should raise an exception if the given video file is not configured correctly for posting to edX """ video_file = VideoFileFactory.create(encoding=EncodingNames.ORIGINAL) with pytest.raises(Exception): api.post_hls_to_edx(video_file)
def test_s3_object_uniqueness(videofile): """ Test that a videoFile with duplicate s3_object_key value should raise an IntegrityError on save """ with pytest.raises(IntegrityError): VideoFileFactory( video=videofile.video, s3_object_key=videofile.s3_object_key, )
def test_video_sources_hls(): """ Tests that the video sources property returns the expected result for HLS """ video = VideoFactory(key="8494dafc-3665-4960-8e00-9790574ec93a") videofile = VideoFileFactory(video=video, encoding=EncodingNames.HLS) assert video.sources == [{ "src": videofile.cloudfront_url, "label": EncodingNames.HLS, "type": "application/x-mpegURL", }]
def test_post_hls_to_edx_no_endpoints(mocker): """post_hls_to_edx should log an error if no endpoints are configured for some video's collection""" patched_log_error = mocker.patch("ui.api.log.error") video_file = VideoFileFactory.create( encoding=EncodingNames.HLS, video__collection__edx_course_id="some-course-id", ) responses = api.post_hls_to_edx(video_file) patched_log_error.assert_called_once() assert responses == {}
def test_download_mp4(encodings, download): """ Tests that video.download returns the most appropriate file for download """ video = VideoFactory() for encoding in encodings: VideoFileFactory(video=video, s3_object_key="{}.mp4".format(encoding), encoding=encoding) if not download: assert video.download is None else: assert video.download.encoding == download
def test_upload_errors_retryable(mocker, error, retryable): """ Test that uploads are retried 10x for retryable exceptions """ youtube_mocker = mocker.patch("cloudsync.youtube.build") mocker.patch("cloudsync.youtube.time") videofile = VideoFileFactory() youtube_mocker( ).videos.return_value.insert.return_value.next_chunk.side_effect = (error) with pytest.raises(Exception) as exc: YouTubeApi().upload_video(videofile.video) assert str(exc.value).startswith("Retried YouTube upload 10x") == retryable
def test_transcoded_mp4_video(): """ Tests that Video.transcoded_videos returns transcoded MP4 videos in the correct order""" video = VideoFactory() videofiles = [ VideoFileFactory(video=video, s3_object_key="original.mp4", encoding=EncodingNames.ORIGINAL), VideoFileFactory(video=video, s3_object_key="small.mp4", encoding=EncodingNames.SMALL), VideoFileFactory(video=video, s3_object_key="basic.mp4", encoding=EncodingNames.BASIC), VideoFileFactory(video=video, s3_object_key="HD.mp4", encoding=EncodingNames.HD), ] assert len(video.transcoded_videos) == 3 assert video.transcoded_videos[0] == videofiles[3] assert video.transcoded_videos[1] == videofiles[2] assert video.transcoded_videos[2] == videofiles[1]
def test_upload_video_no_id(mocker): """ Test that the upload_video task fails if the response contains no id """ videofile = VideoFileFactory() youtube_mocker = mocker.patch("cloudsync.youtube.build") youtube_mocker( ).videos.return_value.insert.return_value.next_chunk.return_value = ( None, {}, ) with pytest.raises(YouTubeUploadException): YouTubeApi().upload_video(videofile.video)
def test_edx_video_file_signal(mocker): """When a VideoFile is created with the right properties, a task to add the video to edX should be called""" patched_edx_task = mocker.patch("ui.signals.ovs_tasks.post_hls_to_edx.delay") collections = CollectionFactory.create_batch( 3, edx_course_id=factory.Iterator(["courseid", None, "courseid"]) ) video_files = VideoFileFactory.create_batch( 3, encoding=factory.Iterator( [EncodingNames.HLS, EncodingNames.HLS, "other-encoding"] ), video__collection=factory.Iterator(collections), ) patched_edx_task.assert_called_once_with(video_files[0].id)
def edx_api_scenario(): """Fixture that provides a VideoFile with the correct properties to post to edX""" course_id = "course-v1:abc" video_file = VideoFileFactory.create( encoding=EncodingNames.HLS, video__title="My Video", video__collection__edx_course_id=course_id, ) collection_edx_endpoint = CollectionEdxEndpointFactory( collection=video_file.video.collection ) return SimpleNamespace( video_file=video_file, course_id=course_id, collection_endpoint=collection_edx_endpoint.edx_endpoint, )
def test_upload_video_long_fields(mocker): """ Test that the upload_youtube_video task truncates title and description if too long """ title = "".join(random.choice(string.ascii_lowercase) for c in range(105)) desc = "".join(random.choice(string.ascii_lowercase) for c in range(5005)) video = VideoFactory.create(title=title, description=desc, is_public=True, status=VideoStatus.COMPLETE) VideoFileFactory(video=video) mocker.patch("cloudsync.youtube.resumable_upload") youtube_mocker = mocker.patch("cloudsync.youtube.build") mock_upload = youtube_mocker().videos.return_value.insert YouTubeApi().upload_video(video) called_args, called_kwargs = mock_upload.call_args assert called_kwargs["body"]["snippet"]["title"] == title[:100] assert called_kwargs["body"]["snippet"]["description"] == desc[:5000]
def test_video_sources_youtube(youtube_status, is_public, stream_source): """ Tests that a public video can play from cloudfront if a youtube video does not exist """ public_video = VideoFactory.create( key="8494dafc-3665-4960-8e00-9790574ec93a", is_public=is_public, collection=CollectionFactory(stream_source=stream_source), ) videofiles = [ VideoFileFactory(video=public_video, s3_object_key="hd.mp4", encoding=EncodingNames.HD), ] if youtube_status is not None: YouTubeVideoFactory.create(video=public_video, status=youtube_status) if (youtube_status == YouTubeStatus.PROCESSED and is_public and stream_source == StreamSource.YOUTUBE): assert public_video.sources == [] else: assert public_video.sources == [{ "src": videofiles[0].cloudfront_url, "label": EncodingNames.HD, "type": "video/mp4", }]
def videofile(): """Fixture to create a video file""" return VideoFileFactory()
def test_video_file_can_add_to_edx(encoding, edx_course_id, expected): """Test that VideoFile.can_add_to_edx returns True under the right conditions""" video_files = VideoFileFactory.create( encoding=encoding, video__collection__edx_course_id=edx_course_id) assert video_files.can_add_to_edx is expected
def test_sort_transcoded_m3u8_files(mocker): # pylint: disable=too-many-locals """ Test that sort_transcoded_m3u8_files changes the m3u8 file on s3 if it needs to be sorted """ s3 = boto3.resource("s3") s3c = boto3.client("s3") bucket_name = "MYBUCKET" s3c.create_bucket(Bucket=bucket_name) bucket = s3.Bucket(bucket_name) mocker.patch("cloudsync.tasks.settings.VIDEO_S3_TRANSCODE_BUCKET", bucket_name) file_key = "key" file_body = """ #EXTM3U #EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=2723000,RESOLUTION=1280x720,CODECS="avc1.4d001f,mp4a.40.2" video_1504127981867-06dkm6.m3u8 #EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=4881000,RESOLUTION=1920x1080,CODECS="avc1.4d001f,mp4a.40.2" video_1504127981921-c2jlwt.m3u8 #EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=2723000,RESOLUTION=1920x1080,CODECS="avc1.4d001f,mp4a.40.2" video_1504127981921-c2jlwt.m3u8 """ s3c.put_object(Body=file_body, Bucket=bucket_name, Key=file_key) VideoFileFactory(s3_object_key=file_key, encoding="HLS") already_sorted_file_key = "already_sorted" already_sorted_file_body = """ #EXTM3U #EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=4881000,RESOLUTION=1920x1080,CODECS="avc1.4d001f,mp4a.40.2" video_1604127981921-c2jlwt.m3u8 #EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=2723000,RESOLUTION=1280x720,CODECS="avc1.4d001f,mp4a.40.2" video_1604127981867-06dkm6.m3u8 """ s3c.put_object(Body=already_sorted_file_body, Bucket=bucket_name, Key=already_sorted_file_key) VideoFileFactory(s3_object_key=already_sorted_file_key, encoding="HLS") invalid_header_file_key = "invalid_header" invalid_header_file_body = """ invalid_header #EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=2723000,RESOLUTION=1280x720,CODECS="avc1.4d001f,mp4a.40.2" video_1504127981867-06dkm6.m3u8 """ s3c.put_object(Body=invalid_header_file_body, Bucket=bucket_name, Key=invalid_header_file_key) VideoFileFactory(s3_object_key=invalid_header_file_key, encoding="HLS") invalid_content_file_key = "invalid_content" invalid_content_file_body = """ #EXTM3U #EXT-X-STREAM-INF: No #EXT-X-STREAM-INF: RESOLUTIONS """ s3c.put_object(Body=invalid_content_file_body, Bucket=bucket_name, Key=invalid_content_file_key) VideoFileFactory(s3_object_key=invalid_content_file_key, encoding="HLS") # The task should not raise an error if a VideoFile hase a s3_object_key without a corresponding # file on s3 VideoFileFactory(s3_object_key="not a valid key", encoding="HLS") sort_transcoded_m3u8_files() expected_file_body = """ #EXTM3U #EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=4881000,RESOLUTION=1920x1080,CODECS="avc1.4d001f,mp4a.40.2" video_1504127981921-c2jlwt.m3u8 #EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=2723000,RESOLUTION=1920x1080,CODECS="avc1.4d001f,mp4a.40.2" video_1504127981921-c2jlwt.m3u8 #EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=2723000,RESOLUTION=1280x720,CODECS="avc1.4d001f,mp4a.40.2" video_1504127981867-06dkm6.m3u8 """ updated_file = s3c.get_object(Bucket=bucket_name, Key=file_key) assert updated_file["Body"].read().decode() == expected_file_body already_sorted_file = s3c.get_object(Bucket=bucket_name, Key=already_sorted_file_key) assert already_sorted_file["Body"].read().decode( ) == already_sorted_file_body invalid_header_file = s3c.get_object(Bucket=bucket_name, Key=invalid_header_file_key) assert invalid_header_file["Body"].read().decode( ) == invalid_header_file_body invalid_content_file = s3c.get_object(Bucket=bucket_name, Key=invalid_content_file_key) assert invalid_content_file["Body"].read().decode( ) == invalid_content_file_body
def test_post_hls_to_edx(mocker): """post_hls_to_edx task should load a VideoFile and call an internal API function to post to edX""" patched_api_method = mocker.patch("ui.tasks.ovs_api.post_hls_to_edx") video_file = VideoFileFactory.create() tasks.post_hls_to_edx.delay(video_file.id) patched_api_method.assert_called_once_with(video_file)
def test_transcode_job_failure(mocker, status, error_status): """ Test that video status is updated properly after a transcode or retranscode job creation fails """ mocker.patch("cloudsync.api.delete_s3_objects") video = VideoFactory.create(status=status) videofile = VideoFileFactory.create(video=video) job_result = { "Job": { "Id": "1498220566931-qtmtcu", "Status": "Error" }, "Error": { "Code": 200, "Message": "FAIL" }, } mocker.patch.multiple( "cloudsync.tasks.settings", ET_PRESET_IDS=("1351620000001-000020", ), AWS_REGION="us-east-1", ET_PIPELINE_ID="foo", ENVIRONMENT="test", ) mocker.patch("ui.models.tasks") mock_encoder = mocker.patch( "cloudsync.api.VideoTranscoder.encode", side_effect=ClientError(error_response=job_result, operation_name="ReadJob"), ) with pytest.raises(ClientError): api.transcode_video(video, videofile) prefix = "" if status == VideoStatus.TRANSCODING else RETRANSCODE_FOLDER preset = { "Key": f"{prefix}transcoded/" + video.hexkey + "/video_1351620000001-000020", "PresetId": "1351620000001-000020", "SegmentDuration": "10.0", } if status == VideoStatus.TRANSCODING: preset["ThumbnailPattern"] = ("thumbnails/" + video.hexkey + "/video_thumbnail_{count}") mock_encoder.assert_called_once_with( {"Key": videofile.s3_object_key}, [preset], Playlists=[{ "Format": "HLSv3", "Name": f"{prefix}transcoded/" + video.hexkey + "/video__index", "OutputKeys": [ f"{prefix}transcoded/" + video.hexkey + "/video_1351620000001-000020" ], }], UserMetadata={"pipeline": "odl-video-service-test"}, ) assert len(video.encode_jobs.all()) == 1 assert Video.objects.get(id=video.id).status == error_status
def test_process_transcode_results(mocker, status): """ Verify that a videofile object is created for each output in the job JSON, and a thumbnail is created for each S3 object in the appropriate bucket virtual subfolder. """ mock_move_s3_objects = mocker.patch("cloudsync.api.move_s3_objects") video = VideoFactory.create(status=status) VideoFileFactory.create(video=video) # We need to create the thumbnail bucket since this is all in the Moto virtual AWS account conn = boto3.resource("s3", region_name="us-east-1") bucket = conn.create_bucket(Bucket=settings.VIDEO_S3_THUMBNAIL_BUCKET) p = RETRANSCODE_FOLDER if status == VideoStatus.RETRANSCODING else "" # Throw a fake thumbnail in the bucket: data = io.BytesIO(b"00000001111111") bucket.upload_fileobj( data, "thumbnails/1/05a06f21-7625-4c20-b416-ae161f31722a/lastjedi_00001.jpg") job = { "Id": "1498765896748-e0p0qr", "Input": { "Key": "1/05a06f21-7625-4c20-b416-ae161f31722a/lastjedi.mp4" }, "Inputs": [{ "Key": "1/05a06f21-7625-4c20-b416-ae161f31722a/lastjedi.mp4" }], "Output": { "Id": "1", "Key": f"{p}transcoded/1/05a06f21-7625-4c20-b416-ae161f31722a/lastjedi_1498700489769-iyi2t4", "PresetId": "1498700489769-iyi2t4", "SegmentDuration": "10.0", "Status": "Complete", }, "Outputs": [ { "Id": "1", "Key": f"{p}transcoded/1/05a06f21-7625-4c20-b416-ae161f31722a/lastjedi_1498700489769-iyi2t4", "PresetId": "1498700489769-iyi2t4", "SegmentDuration": "10.0", "Status": "Complete", "ThumbnailPattern": "thumbnails/1/05a06f21-7625-4c20-b416-ae161f31722a/lastjedi_{count}", "Watermarks": [], "Width": 1280, }, { "Id": "2", "Key": f"{p}transcoded/1/05a06f21-7625-4c20-b416-ae161f31722a/lastjedi_1498700403561-zc5oo5", "PresetId": "1498700403561-zc5oo5", "SegmentDuration": "10.0", "Status": "Complete", "Watermarks": [], "Width": 1280, }, { "Id": "3", "Key": f"{p}transcoded/1/05a06f21-7625-4c20-b416-ae161f31722a/lastjedi_1498700578799-qvvjor", "PresetId": "1498700578799-qvvjor", "SegmentDuration": "10.0", "Status": "Complete", "Watermarks": [], "Width": 854, }, { "Id": "4", "Key": f"{p}transcoded/1/05a06f21-7625-4c20-b416-ae161f31722a/lastjedi_1498700649488-6t9m3h", "PresetId": "1498700649488-6t9m3h", "SegmentDuration": "10.0", "Status": "Complete", "Watermarks": [], "Width": 640, }, ], "PipelineId": "1497455687488-evsuze", "Playlists": [{ "Format": "HLSv4", "Name": f"{p}transcoded/1/05a06f21-7625-4c20-b416-ae161f31722a/lastjedi__index", "OutputKeys": [ f"{p}transcoded/1/05a06f21-7625-4c20-b416-ae161f31722a/lastjedi_1498700489769-iyi2t4", f"{p}transcoded/1/05a06f21-7625-4c20-b416-ae161f31722a/lastjedi_1498700403561-zc5oo5", f"{p}transcoded/1/05a06f21-7625-4c20-b416-ae161f31722a/lastjedi_1498700578799-qvvjor", f"{p}transcoded/1/05a06f21-7625-4c20-b416-ae161f31722a/lastjedi_1498700649488-6t9m3h", ], "Status": "Complete", }], "Status": "Complete", } MockClientET.preset = { "Preset": { "Thumbnails": { "MaxHeight": 190, "MaxWidth": 100 } } } mocker.patch("ui.utils.get_transcoder_client", return_value=MockClientET()) api.process_transcode_results(video, job) assert len(video.videofile_set.all()) == 2 assert len(video.videothumbnail_set.all()) == 1 assert mock_move_s3_objects.call_count == ( 1 if status == VideoStatus.RETRANSCODING else 0)
def video_with_file(): """ Fixture to create a video with an original videofile """ video_file = VideoFileFactory( video__is_public=True, encoding=EncodingNames.ORIGINAL ) return video_file.video