def test_deserialize(self): source = Source('/source.jpg') destination = Destination('/path.png', None, ACL.private) data = { 'job': { 'status': 'pending', 'dateCreated': '2001-12-25T00:00:00Z', 'sources': [], 'result': None, 'id': 'group-id_job-key', 'issuer': 'urn:member:xxx', 'specification': { 'source': source.serialize(), 'destination': destination.serialize(), 'extractedFilesReport': { 'destination': destination.serialize(), 'format': 'json' } }, 'groupId': 'group-id', 'flowId': None, 'dateUpdated': '2001-12-25T00:00:00Z', 'type': 'urn:job:archive.extract' }, 'attachment': { 'dog': 'bull' } } payload = JobCallbackPayload.deserialize(data) assert_that(payload.attachment, is_({'dog': 'bull'})) assert_that(payload.job.id, is_('group-id_job-key'))
def test_serialize(self): destination = Destination('/fish.jpg') assert_that( destination.serialize(), is_({ 'directory': None, 'path': '/fish.jpg', 'lifecycle': None, 'acl': 'public', 'bucket': None }))
def test_copy_file_request(self): response_body = RestResult( 0, 'OK', FileDescriptor('/file.copy.txt', 'file-new-id', FileType.file, 'text/plain', 123).serialize()) httpretty.register_uri(httpretty.POST, 'https://fish.barrel/_api/copy/file', body=json.dumps(response_body.serialize())) file_descriptor = self.file_service.copy_file_request().set_source( Source('/file.txt')).set_destination( Destination('/file.copy.txt')).execute() assert_that(file_descriptor, instance_of(FileDescriptor)) assert_that(file_descriptor.path, is_('/file.copy.txt')) assert_that( json.loads(httpretty.last_request().body), is_({ 'source': { 'path': '/file.txt', 'fileId': None }, 'destination': { 'directory': None, 'path': '/file.copy.txt', 'lifecycle': None, 'acl': 'public', 'bucket': None } }))
def deserialize(cls, data): # type: (dict) -> TranscodeSpecification destination = Destination.deserialize(data['destination']) video_data = data.get('video') video = StreamSpecification.deserialize( video_data) if video_data else None audio_data = data.get('audio') audio = StreamSpecification.deserialize( audio_data) if audio_data else None quality_range_data = data.get('qualityRange') quality_range = VideoQualityRange.deserialize( quality_range_data) if quality_range_data else None quality = data.get('quality') clipping_data = data.get('clipping') clipping = Clipping.deserialize( clipping_data) if clipping_data else None return TranscodeSpecification(destination, video, audio, quality_range, quality, clipping)
def test_create_archive_manifest_request(self): payload = FileDescriptor('/m.zip', 'file-id', FileType.file, 'application/vnd.wix-media.zip', 123).serialize() response_body = RestResult(0, 'OK', payload) httpretty.register_uri( httpretty.POST, 'https://fish.appspot.com/_api/archive/create/manifest', body=json.dumps(response_body.serialize())) file_descriptor = self.archive_service.create_archive_manifest_request( ).add_sources(Source('/video.mp4')).set_destination( Destination('/m.zip')).set_algorithm( ZipAlgorithm.store).set_name('archive.zip').execute() assert_that(file_descriptor, instance_of(FileDescriptor)) assert_that(file_descriptor.path, is_('/m.zip')) assert_that( json.loads(httpretty.last_request().body), is_({ 'sources': [{ 'path': '/video.mp4', 'fileId': None }], 'destination': { 'directory': None, 'path': '/m.zip', 'lifecycle': None, 'acl': 'public', 'bucket': None }, 'name': 'archive.zip', 'algorithm': 'store' }))
def test_image_operation_request(self): payload = FileDescriptor('/pony.png', 'file-id', FileType.file, 'image/png', 123).serialize() response = RestResult(0, 'OK', payload) httpretty.register_uri(httpretty.POST, 'https://fish.barrel/_api/images/operations', body=json.dumps(response.serialize())) features = self.image_service.image_operation_request().set_source( Source('/omg.png')).set_specification( ImageOperationSpecification( '/v1/fit/w_200,h_100', Destination('/pony.png'))).execute() assert_that(features, instance_of(FileDescriptor)) assert_that( json.loads(httpretty.last_request().body), is_({ 'source': { 'path': '/omg.png', 'fileId': None }, 'specification': { 'destination': { 'directory': None, 'path': '/pony.png', 'lifecycle': None, 'acl': 'public', 'bucket': None }, 'command': '/v1/fit/w_200,h_100' } }))
def test_invoke_flow_with_add_sources(self): self._register_invoke_flow_response( invoke_flow_with_add_sources_response) extra_metadata = AudioExtraMetadata( 'track_name', 'artist', 'album_name', 'track_number', 'genre', 'composer', 'year', Image('image_url', 'mime_type', 'image_description'), Lyrics('text', 'eng', 'lyrics_description')) flow_state = self.flow_control_service.invoke_flow_request( ).set_invocation(Invocation([ 'addSources1', 'addSources2' ], [])).set_flow(Flow().add_component( 'addSources1', Component(ComponentType.add_sources, [ 'metadata1' ], AddSourcesSpecification([ Source('/source/path.mp3') ]))).add_component( 'addSources2', Component( ComponentType.add_sources, ['metadata2' ], AddSourcesSpecification([ Source('/source/path2.mp3') ]))).add_component( 'metadata1', Component( ComponentType.replace_extra_metadata, [], ReplaceAudioExtraMetadataSpecification( Destination( '/destination/path.mp3', None, ACL.private), extra_metadata))).add_component( 'metadata2', Component( ComponentType.replace_extra_metadata, [], ReplaceAudioExtraMetadataSpecification( Destination( '/destination/path2.mp3', None, ACL.private), extra_metadata)))).execute() self._assert_flow(invoke_flow_with_add_sources_request, flow_state)
def test_subset_font_request(self): payload = { 'groupId': 'g', 'jobs': [{ 'type': 'urn:job:text.font.subset', 'id': 'g_1', 'groupId': 'g', 'status': 'pending', 'issuer': 'urn:app:app-id', 'sources': [{ 'path': '/font.ttf' }], 'specification': { 'destination': { 'path': '/font.en.ttf', 'acl': 'public' }, 'languageCode': 'en' }, 'callback': { 'url': 'https://i.will.be.back/' }, 'dateUpdated': '2017-05-22T07:17:44Z', 'dateCreated': '2017-05-22T07:17:44Z' }] } response = RestResult(0, 'OK', payload) httpretty.register_uri(httpretty.POST, 'https://fish.appspot.com/_api/fonts/subset', body=json.dumps(response.serialize())) job_group = self.text_service.subset_font_request().set_source( Source('/font.ttf')).set_specification( SubsetFontSpecification(Destination('/font.en.ttf'), 'en')).execute() assert_that(job_group.group_id, is_('g')) assert_that( json.loads(httpretty.last_request().body), is_({ 'source': { 'path': '/font.ttf', 'fileId': None }, 'specification': { 'destination': { 'directory': None, 'path': '/font.en.ttf', 'bucket': None, 'lifecycle': None, 'acl': 'public' }, 'languageCode': 'en' }, 'jobCallback': None }))
def deserialize(cls, data): # type: (dict) -> ExtractStoryboardSpecification destination = Destination.deserialize(data['destination']) return ExtractStoryboardSpecification(destination, data['columns'], data['rows'], data.get('tileWidth'), data.get('tileHeight'), data.get('format'), data.get('segmentDuration'))
def deserialize(cls, data): # type: (dict) -> CreateArchiveSpecification sources_data = data.get('sources') sources = [ ArchiveSource.deserialize(source_info) for source_info in sources_data if source_info ] return CreateArchiveSpecification( sources, Destination.deserialize(data['destination']), data['archiveType'])
def test_deserialize(self): data = { 'directory': None, 'path': '/fish.jpg', 'lifecycle': None, 'acl': 'public', 'bucket': None } destination = Destination.deserialize(data) assert_that(destination.serialize(), is_(data))
def test_invoke_flow_with_group_wait(self): self._register_invoke_flow_response(invoke_flow_group_wait_response) flow_state = self.flow_control_service.invoke_flow_request( ).set_invocation( Invocation(['copy1', 'copy2'], [ Source('/source/path.txt') ])).set_flow(Flow().add_component( 'copy1', Component( ComponentType.copy_file, ['group-wait'], CopyFileSpecification( Destination('/destination/path1.txt')))).add_component( 'copy2', Component( ComponentType.copy_file, ['group-wait'], CopyFileSpecification( Destination('/destination/path2.txt')))). add_component('group-wait', Component(ComponentType.group_wait, []))).execute() self._assert_flow(invoke_flow_group_wait_request, flow_state)
def test_invoke_flow_copy_file(self): self._register_invoke_flow_response(invoke_flow_copy_file_response) flow_state = self.flow_control_service.invoke_flow_request( ).set_invocation( Invocation(['copyfile1'], [Source('/source/path.txt')])).set_flow( Flow().add_component( 'copyfile1', Component( ComponentType.copy_file, [], CopyFileSpecification( Destination('/destination/path.txt'))))).execute() self._assert_flow(invoke_flow_copy_file_request, flow_state)
def deserialize(cls, data): # type: (dict) -> ExtractArchiveSpecification extraction_report_data = data.get('extractedFilesReport') if extraction_report_data: extraction_report = ExtractionReport.deserialize( extraction_report_data) else: extraction_report = None source_data = data.get('source') source = Source.deserialize(source_data) if source_data else None return ExtractArchiveSpecification( source, Destination.deserialize(data['destination']), extraction_report)
def test_transcode_request(self): self._register_transcode_request(transcode1_response) group = self.transcode_service.transcode_request().add_sources( Source('/video.mp4')).add_specifications( TranscodeSpecification(Destination(directory='/'), quality_range=VideoQualityRange( minimum=VideoQuality.res_480p, maximum=VideoQuality.res_1080p, ))).execute() assert_that(group.jobs[0], instance_of(TranscodeJob)) assert_that(group.group_id, is_('g')) assert_that(json.loads(httpretty.last_request().body), is_(transcode1_request))
def test_invoke_flow_convert_font(self): self._register_invoke_flow_response(invoke_flow_convert_font_response) flow_state = self.flow_control_service.invoke_flow_request( ).set_invocation( Invocation(['convert-font'], [Source('/source/font.ttf')])).set_flow( Flow().add_component( 'convert-font', Component( ComponentType.convert_font, [], ConvertFontSpecification( Destination('/destination/font.woff', None, ACL.private), FontType.woff)))).execute() self._assert_flow(invoke_flow_convert_font_request, flow_state)
def test_import_file_request(self): payload = { 'status': 'pending', 'specification': { 'sourceUrl': 'http://source.url/filename.txt', 'destination': { 'directory': '/fish', 'acl': 'public' } }, 'dateCreated': '2017-05-23T08:34:43Z', 'sources': [], 'result': None, 'id': '71f0d3fde7f348ea89aa1173299146f8_19e137e8221b4a709220280b432f947f', 'dateUpdated': '2017-05-23T08:34:43Z', 'type': 'urn:job:import.file', 'groupId': '71f0d3fde7f348ea89aa1173299146f8', 'issuer': 'urn:app:app-id-1' } response_body = RestResult(0, 'OK', payload) httpretty.register_uri(httpretty.POST, 'https://fish.barrel/_api/import/file', body=json.dumps(response_body.serialize())) import_file_job = self.file_service.import_file_request( ).set_destination( Destination('/img.png')).set_source_url('source-url').execute() assert_that( import_file_job.id, is_('71f0d3fde7f348ea89aa1173299146f8_19e137e8221b4a709220280b432f947f' )) assert_that( json.loads(httpretty.last_request().body), is_({ 'sourceUrl': 'source-url', 'destination': { 'directory': None, 'path': '/img.png', 'lifecycle': None, 'acl': 'public', 'bucket': None }, 'externalAuthorization': None }))
def test_serialize(self): data = { 'second': 0.12, 'percentage': None, 'destination': { 'path': '/poster.png', 'acl': 'public', 'directory': None, 'lifecycle': None, 'bucket': None }, 'format': 'png' } specification = ExtractPosterSpecification( 0.12, Destination('/poster.png', acl=ACL.public), 'png') assert_that(specification.serialize(), is_(data))
def test_transcode__audio_clipping(self): self._register_transcode_request(transcode_clip_response) group = self.transcode_service.transcode_request().add_sources( Source('/audio.mp3')).add_specifications( TranscodeSpecification(Destination(directory='/'), quality=AudioQuality.aac_128, clipping=Clipping( start=3, duration=6, fade_in_duration=1, fade_out_duration=2, fade_in_offset=4, fade_out_offset=5))).execute() assert_that(group.jobs[0], instance_of(TranscodeJob)) self.assertEqual(group.group_id, 'g') self.assertEqual(json.loads(httpretty.last_request().body), transcode_clip_request)
def test_invoke_flow1_request(self): self._register_invoke_flow_response(invoke_flow1_response) transcode_specification = TranscodeSpecification( Destination(directory='/deliverables/'), quality_range=VideoQualityRange(VideoQuality.res_720p, VideoQuality.res_1080p)) flow_state = self.flow_control_service.invoke_flow_request( ).set_invocation(Invocation(['import'])).set_flow(Flow().add_component( 'import', Component(ComponentType.import_file, ['transcode'], import_file_specification)).add_component( 'transcode', Component(ComponentType.transcode, ['playlist'], transcode_specification)).add_component( 'playlist', Component(ComponentType.playlist, []))).execute() self._assert_flow(invoke_flow1_request, flow_state)
def test_serialize(self): data = { 'columns': 1, 'rows': 2, 'tileWidth': 100, 'tileHeight': 200, 'format': 'png', 'segmentDuration': 12.3, 'destination': { 'directory': None, 'path': '/poster.png', 'lifecycle': None, 'acl': 'public', 'bucket': None }, } specification = ExtractStoryboardSpecification( Destination('/poster.png', acl=ACL.public), 1, 2, 100, 200, 'png', 12.3) assert_that(specification.serialize(), is_(data))
def test_transcode_request__custom_video_settings(self): self._register_transcode_request(transcode2_response) group = self.transcode_service.transcode_request().add_sources( Source('/video.mp4')).add_specifications( TranscodeSpecification( Destination(path='/video.720.mp4'), video=StreamSpecification( StreamType.video, VideoSpecification( VideoCodec('h264', 'main', '3.1', 25, 10000, GOP(0, 30, 30, 2, 0, 0, 3), 'faster'), Resolution(256, 144, ImageScaling('lanczos'), '1:1'), 30.0, [ ImageFilter('unsharp', {'value': '5:5:0.5:3:3:0.0'}) ], '30000/1001')))).execute() assert_that(group.jobs[0], instance_of(TranscodeJob)) assert_that(group.group_id, is_('g')) self.assertEqual(transcode2_request, json.loads(httpretty.last_request().body))
def deserialize(cls, data): # type: (dict) -> ConvertFontSpecification destination = Destination.deserialize(data['destination']) return ConvertFontSpecification(destination, data.get('fontType'), data.get('fontSet'))
def deserialize(cls, data): # type: (dict) -> ImageOperationSpecification destination = Destination.deserialize(data['destination']) return ImageOperationSpecification(data['command'], destination)
def test_create_archive_request(self): payload = { 'type': 'urn:job:archive.create', 'id': 'g_1', 'groupId': 'g', 'status': 'pending', 'issuer': 'urn:app:app-id', 'sources': [{ 'path': '/video.mp4' }], 'specification': { 'sources': [{ 'path': '/video.mp4' }], 'destination': { 'path': '/video.tar', 'acl': 'public' }, 'archiveType': 'tar' }, 'callback': { 'url': 'https://call.me.back/' }, 'dateUpdated': '2017-05-22T07:17:44Z', 'dateCreated': '2017-05-22T07:17:44Z' } response = RestResult(0, 'OK', payload) httpretty.register_uri(httpretty.POST, 'https://fish.appspot.com/_api/archive/create', body=json.dumps(response.serialize())) job = self.archive_service.create_archive_request().add_sources( Source('/video.mp4')).set_destination( Destination('/video.tar')).set_archive_type( ArchiveType.tar).set_callback( Callback('https://call.me.back/')).execute() assert_that(job, instance_of(CreateArchiveJob)) assert_that(job.group_id, is_('g')) assert_that( json.loads(httpretty.last_request().body), is_({ 'archiveType': 'tar', 'sources': [{ 'path': '/video.mp4', 'fileId': None }], 'destination': { 'directory': None, 'path': '/video.tar', 'lifecycle': None, 'acl': 'public', 'bucket': None }, 'jobCallback': { 'url': 'https://call.me.back/', 'headers': None, 'attachment': None, 'passthrough': False } }))
def test_extract_archive_request(self): payload = { 'id': 'g_1', 'groupId': '8c9063175f214bd78f8f6391dbc49a93', 'type': 'urn:job:archive.extract', 'issuer': 'urn:app:app-id', 'status': 'success', 'sources': [{ 'path': '/video.zip', 'fileId': 'file id' }], 'specification': { 'source': { 'path': '/video.zip', 'fileId': 'file id' }, 'destination': { 'directory': '/video', 'path': None, 'acl': 'public', 'bucket': None }, 'extractedFilesReport': { 'destination': { 'directory': None, 'path': '/video.report.json', 'acl': 'public' }, 'format': 'json' } }, 'result': { 'message': 'OK', 'code': 0, 'payload': { 'reportFileDescriptor': { 'path': '/video.report.json', 'id': 'report file id', 'acl': 'public', 'mimeType': 'application/json', 'size': 1718, 'hash': None, 'type': '-', 'dateUpdated': '2017-07-30T12:46:39Z', 'dateCreated': '2017-07-30T12:46:39Z', } } }, 'dateCreated': '2017-07-30T12:46:31Z', 'dateUpdated': '2017-07-30T12:46:40Z', } response = RestResult(0, 'OK', payload) httpretty.register_uri(httpretty.POST, 'https://fish.appspot.com/_api/archive/extract', body=json.dumps(response.serialize())) job = self.archive_service.extract_archive_request().set_source( Source('/video.zip')).set_destination( Destination(directory='/video')).set_report( ExtractionReport(Destination(path='/video.report.json'), ExtractionReportFormat.json)).execute() assert_that(job, instance_of(ExtractArchiveJob)) assert_that(job.group_id, is_('g')) assert_that( json.loads(httpretty.last_request().body), is_({ 'source': { 'path': '/video.zip', 'fileId': None }, 'destination': { 'directory': '/video', 'path': None, 'lifecycle': None, 'acl': 'public', 'bucket': None }, 'extractedFilesReport': { 'destination': { 'directory': None, 'path': '/video.report.json', 'lifecycle': None, 'acl': 'public', 'bucket': None }, 'format': 'json' }, 'jobCallback': None }))
from unittest import TestCase from media_platform.job.extract_archive_job import ExtractArchiveJob, ExtractArchiveSpecification, ExtractionReport, \ ExtractionReportFormat from media_platform.lang import datetime_serialization from media_platform.service.destination import Destination from media_platform.service.file_descriptor import ACL from media_platform.job.job import Job from media_platform.service.source import Source source = Source('/source.jpg') destination = Destination('/path.png', acl=ACL.private) data = { 'status': 'pending', 'dateCreated': '2001-12-25T00:00:00Z', 'sources': [source.serialize()], 'result': None, 'id': 'group-id_job-key', 'issuer': 'urn:member:xxx', 'specification': { 'source': source.serialize(), 'destination': destination.serialize(), 'extractedFilesReport': { 'destination': destination.serialize(), 'format': 'json' } }, 'groupId': 'group-id', 'flowId': None, 'dateUpdated': '2001-12-25T00:00:00Z',
def test_extract_poster_request(self): payload = { 'groupId': 'g', 'jobs': [{ 'id': 'g_1', 'type': 'urn:job:av.poster', 'groupId': 'g', 'status': 'pending', 'specification': { 'second': 20, 'percentage': None, 'destination': { 'path': '/video.poster.jpg', 'directory': '/', 'acl': 'public' }, 'format': 'jpg' }, 'sources': [{ 'path': '/video.mp4', 'fileId': '123' }], 'result': None, 'issuer': 'urn:app:app-id-1', 'dateUpdated': '2017-05-23T08:34:43Z', 'dateCreated': '2017-05-23T08:34:43Z', }] } response = RestResult(0, 'OK', payload) httpretty.register_uri(httpretty.POST, 'https://fish.barrel/_api/av/poster', body=json.dumps(response.serialize())) group = self.video_service.extract_poster_request().add_sources( Source('/video.mp4')).add_specifications( ExtractPosterSpecification(20, Destination('/video.poster.jpg'), 'jpg')).execute() assert_that(group.jobs[0], instance_of(ExtractPosterJob)) assert_that(group.group_id, is_('g')) assert_that( json.loads(httpretty.last_request().body), is_({ 'specifications': [{ 'second': 20, 'percentage': None, 'destination': { 'directory': None, 'path': '/video.poster.jpg', 'lifecycle': None, 'acl': 'public', 'bucket': None }, 'format': 'jpg' }], 'sources': [{ 'path': '/video.mp4', 'fileId': None }] }))
def test_extract_storyboard_request(self): payload = { 'groupId': 'g', 'jobs': [{ 'id': 'g_1', 'type': 'urn:job:av.storyboard', 'groupId': 'g', 'status': 'pending', 'specification': { 'columns': 5, 'rows': 6, 'destination': { 'path': '/video.story.jpg', 'directory': '/', 'acl': 'public' }, 'format': 'jpg' }, 'sources': [{ 'path': '/video.mp4', 'fileId': '123' }], 'result': None, 'issuer': 'urn:app:app-id-1', 'dateUpdated': '2017-05-23T08:34:43Z', 'dateCreated': '2017-05-23T08:34:43Z', }] } response = RestResult(0, 'OK', payload) httpretty.register_uri(httpretty.POST, 'https://fish.barrel/_api/av/storyboard', body=json.dumps(response.serialize())) group = self.video_service.extract_storyboard_request().add_sources( Source('/video.mp4')).add_specifications( ExtractStoryboardSpecification(Destination('/video.story.jpg'), 5, 6, 256, 512, 'jpg', 12.25)).execute() assert_that(group.jobs[0], instance_of(ExtractStoryboardJob)) assert_that(group.group_id, is_('g')) assert_that( json.loads(httpretty.last_request().body), is_({ 'specifications': [{ 'rows': 6, 'tileHeight': 512, 'format': 'jpg', 'tileWidth': 256, 'destination': { 'directory': None, 'path': '/video.story.jpg', 'lifecycle': None, 'acl': 'public', 'bucket': None }, 'segmentDuration': 12.25, 'columns': 5 }], 'sources': [{ 'path': '/video.mp4', 'fileId': None }], 'jobCallback': None }))
from tests.service.flow_control_service.test_flows.invoke.flow1.response import invoke_flow1_response from tests.service.flow_control_service.test_flows.invoke.flow_callback.request import invoke_flow_callback_request from tests.service.flow_control_service.test_flows.invoke.flow_callback.response import invoke_flow_callback_response from tests.service.flow_control_service.test_flows.invoke.group_wait.request import invoke_flow_group_wait_request from tests.service.flow_control_service.test_flows.invoke.group_wait.response import invoke_flow_group_wait_response from tests.service.flow_control_service.test_flows.invoke.operation_callback.request import \ invoke_flow_operation_callback_request from tests.service.flow_control_service.test_flows.invoke.operation_callback.response import \ invoke_flow_operation_callback_response from tests.service.flow_control_service.test_flows.invoke.replace_extra_metadata.request import \ invoke_flow_replace_extra_metadata_request from tests.service.flow_control_service.test_flows.invoke.replace_extra_metadata.response import \ invoke_flow_replace_extra_metadata_response import_file_specification = ImportFileSpecification( 'http://movs.me/video.mp4', Destination('/imports/video.mp4')) class TestFlowControlService(unittest.TestCase): authenticator = None # type: AppAuthenticator authenticated_http_client = None # type: AuthenticatedHTTPClient flow_control_service = None # type: FlowControlService @classmethod def setUpClass(cls): cls.authenticator = AppAuthenticator('app', 'secret') cls.authenticated_http_client = AuthenticatedHTTPClient( cls.authenticator) cls.flow_control_service = FlowControlService( 'fish.barrel', cls.authenticated_http_client)