def test_deserialize(self): source = Source('/source.jpg') destination = Destination('/path.png', None, ACL.private) data = { 'job': { 'status': 'pending', 'dateCreated': '2001-12-25T00:00:00Z', 'sources': [], 'result': None, 'id': 'group-id_job-key', 'issuer': 'urn:member:xxx', 'specification': { 'source': source.serialize(), 'destination': destination.serialize(), 'extractedFilesReport': { 'destination': destination.serialize(), 'format': 'json' } }, 'groupId': 'group-id', 'flowId': None, 'dateUpdated': '2001-12-25T00:00:00Z', 'type': 'urn:job:archive.extract' }, 'attachment': { 'dog': 'bull' } } payload = JobCallbackPayload.deserialize(data) assert_that(payload.attachment, is_({'dog': 'bull'})) assert_that(payload.job.id, is_('group-id_job-key'))
def test_serialize(self): source = Source('/fish.jpg') assert_that(source.serialize(), is_({ 'fileId': None, 'path': '/fish.jpg', }))
def test_create_archive_manifest_request(self): payload = FileDescriptor('/m.zip', 'file-id', FileType.file, 'application/vnd.wix-media.zip', 123).serialize() response_body = RestResult(0, 'OK', payload) httpretty.register_uri( httpretty.POST, 'https://fish.appspot.com/_api/archive/create/manifest', body=json.dumps(response_body.serialize())) file_descriptor = self.archive_service.create_archive_manifest_request( ).add_sources(Source('/video.mp4')).set_destination( Destination('/m.zip')).set_algorithm( ZipAlgorithm.store).set_name('archive.zip').execute() assert_that(file_descriptor, instance_of(FileDescriptor)) assert_that(file_descriptor.path, is_('/m.zip')) assert_that( json.loads(httpretty.last_request().body), is_({ 'sources': [{ 'path': '/video.mp4', 'fileId': None }], 'destination': { 'directory': None, 'path': '/m.zip', 'lifecycle': None, 'acl': 'public', 'bucket': None }, 'name': 'archive.zip', 'algorithm': 'store' }))
def test_image_operation_request(self): payload = FileDescriptor('/pony.png', 'file-id', FileType.file, 'image/png', 123).serialize() response = RestResult(0, 'OK', payload) httpretty.register_uri(httpretty.POST, 'https://fish.barrel/_api/images/operations', body=json.dumps(response.serialize())) features = self.image_service.image_operation_request().set_source( Source('/omg.png')).set_specification( ImageOperationSpecification( '/v1/fit/w_200,h_100', Destination('/pony.png'))).execute() assert_that(features, instance_of(FileDescriptor)) assert_that( json.loads(httpretty.last_request().body), is_({ 'source': { 'path': '/omg.png', 'fileId': None }, 'specification': { 'destination': { 'directory': None, 'path': '/pony.png', 'lifecycle': None, 'acl': 'public', 'bucket': None }, 'command': '/v1/fit/w_200,h_100' } }))
def deserialize(cls, data): # type: (dict) -> Job sources = [Source.deserialize(source) for source in data['sources']] date_created = datetime_serialization.deserialize(data['dateCreated']) date_updated = datetime_serialization.deserialize(data['dateUpdated']) callback_data = data.get('callback') callback = Callback.deserialize( callback_data) if callback_data else None specification = data['specification'] if cls.specification_type: specification = cls.specification_type.deserialize(specification) result_data = data.get('result') if result_data: # todo: deserialize result payload as specific type result = RestResult.deserialize(result_data) else: result = None job = cls(data['id'], data['issuer'], data['status'], specification, sources, callback, data.get('flowId'), result, date_created, date_updated) job.type = data['type'] return job
def test_copy_file_request(self): response_body = RestResult( 0, 'OK', FileDescriptor('/file.copy.txt', 'file-new-id', FileType.file, 'text/plain', 123).serialize()) httpretty.register_uri(httpretty.POST, 'https://fish.barrel/_api/copy/file', body=json.dumps(response_body.serialize())) file_descriptor = self.file_service.copy_file_request().set_source( Source('/file.txt')).set_destination( Destination('/file.copy.txt')).execute() assert_that(file_descriptor, instance_of(FileDescriptor)) assert_that(file_descriptor.path, is_('/file.copy.txt')) assert_that( json.loads(httpretty.last_request().body), is_({ 'source': { 'path': '/file.txt', 'fileId': None }, 'destination': { 'directory': None, 'path': '/file.copy.txt', 'lifecycle': None, 'acl': 'public', 'bucket': None } }))
def test_invoke_flow_with_add_sources(self): self._register_invoke_flow_response( invoke_flow_with_add_sources_response) extra_metadata = AudioExtraMetadata( 'track_name', 'artist', 'album_name', 'track_number', 'genre', 'composer', 'year', Image('image_url', 'mime_type', 'image_description'), Lyrics('text', 'eng', 'lyrics_description')) flow_state = self.flow_control_service.invoke_flow_request( ).set_invocation(Invocation([ 'addSources1', 'addSources2' ], [])).set_flow(Flow().add_component( 'addSources1', Component(ComponentType.add_sources, [ 'metadata1' ], AddSourcesSpecification([ Source('/source/path.mp3') ]))).add_component( 'addSources2', Component( ComponentType.add_sources, ['metadata2' ], AddSourcesSpecification([ Source('/source/path2.mp3') ]))).add_component( 'metadata1', Component( ComponentType.replace_extra_metadata, [], ReplaceAudioExtraMetadataSpecification( Destination( '/destination/path.mp3', None, ACL.private), extra_metadata))).add_component( 'metadata2', Component( ComponentType.replace_extra_metadata, [], ReplaceAudioExtraMetadataSpecification( Destination( '/destination/path2.mp3', None, ACL.private), extra_metadata)))).execute() self._assert_flow(invoke_flow_with_add_sources_request, flow_state)
def test_subset_font_request(self): payload = { 'groupId': 'g', 'jobs': [{ 'type': 'urn:job:text.font.subset', 'id': 'g_1', 'groupId': 'g', 'status': 'pending', 'issuer': 'urn:app:app-id', 'sources': [{ 'path': '/font.ttf' }], 'specification': { 'destination': { 'path': '/font.en.ttf', 'acl': 'public' }, 'languageCode': 'en' }, 'callback': { 'url': 'https://i.will.be.back/' }, 'dateUpdated': '2017-05-22T07:17:44Z', 'dateCreated': '2017-05-22T07:17:44Z' }] } response = RestResult(0, 'OK', payload) httpretty.register_uri(httpretty.POST, 'https://fish.appspot.com/_api/fonts/subset', body=json.dumps(response.serialize())) job_group = self.text_service.subset_font_request().set_source( Source('/font.ttf')).set_specification( SubsetFontSpecification(Destination('/font.en.ttf'), 'en')).execute() assert_that(job_group.group_id, is_('g')) assert_that( json.loads(httpretty.last_request().body), is_({ 'source': { 'path': '/font.ttf', 'fileId': None }, 'specification': { 'destination': { 'directory': None, 'path': '/font.en.ttf', 'bucket': None, 'lifecycle': None, 'acl': 'public' }, 'languageCode': 'en' }, 'jobCallback': None }))
def test_deserialize(self): data = { 'fileId': None, 'path': '/fish.jpg', } source = Source.deserialize(data) assert_that(source.serialize(), is_(data))
def deserialize(cls, data): # type: (dict) -> Invocation sources = [Source.deserialize(s) for s in data['sources']] callback_data = data.get('callback') callback = Callback.deserialize( callback_data) if callback_data else None return cls(data['entryPoints'], sources, callback, data.get('errorStrategy'))
def deserialize(cls, data): # type: (dict) -> Component specification_type = _SPECIFICATIONS[data['type']] specification = specification_type.deserialize( data['specification']) if specification_type else None callback_data = data.get('callback') callback = Callback.deserialize( callback_data) if callback_data else None sources = [Source.deserialize(s) for s in data.get('sources', [])] return cls(data['type'], data.get('successors', []), specification, data.get('deleteSources', False), callback, sources)
def test_invoke_flow_copy_file(self): self._register_invoke_flow_response(invoke_flow_copy_file_response) flow_state = self.flow_control_service.invoke_flow_request( ).set_invocation( Invocation(['copyfile1'], [Source('/source/path.txt')])).set_flow( Flow().add_component( 'copyfile1', Component( ComponentType.copy_file, [], CopyFileSpecification( Destination('/destination/path.txt'))))).execute() self._assert_flow(invoke_flow_copy_file_request, flow_state)
def deserialize(cls, data): # type: (dict) -> ExtractArchiveSpecification extraction_report_data = data.get('extractedFilesReport') if extraction_report_data: extraction_report = ExtractionReport.deserialize( extraction_report_data) else: extraction_report = None source_data = data.get('source') source = Source.deserialize(source_data) if source_data else None return ExtractArchiveSpecification( source, Destination.deserialize(data['destination']), extraction_report)
def test_transcode_request(self): self._register_transcode_request(transcode1_response) group = self.transcode_service.transcode_request().add_sources( Source('/video.mp4')).add_specifications( TranscodeSpecification(Destination(directory='/'), quality_range=VideoQualityRange( minimum=VideoQuality.res_480p, maximum=VideoQuality.res_1080p, ))).execute() assert_that(group.jobs[0], instance_of(TranscodeJob)) assert_that(group.group_id, is_('g')) assert_that(json.loads(httpretty.last_request().body), is_(transcode1_request))
def test_invoke_flow_convert_font(self): self._register_invoke_flow_response(invoke_flow_convert_font_response) flow_state = self.flow_control_service.invoke_flow_request( ).set_invocation( Invocation(['convert-font'], [Source('/source/font.ttf')])).set_flow( Flow().add_component( 'convert-font', Component( ComponentType.convert_font, [], ConvertFontSpecification( Destination('/destination/font.woff', None, ACL.private), FontType.woff)))).execute() self._assert_flow(invoke_flow_convert_font_request, flow_state)
def test_transcode__audio_clipping(self): self._register_transcode_request(transcode_clip_response) group = self.transcode_service.transcode_request().add_sources( Source('/audio.mp3')).add_specifications( TranscodeSpecification(Destination(directory='/'), quality=AudioQuality.aac_128, clipping=Clipping( start=3, duration=6, fade_in_duration=1, fade_out_duration=2, fade_in_offset=4, fade_out_offset=5))).execute() assert_that(group.jobs[0], instance_of(TranscodeJob)) self.assertEqual(group.group_id, 'g') self.assertEqual(json.loads(httpretty.last_request().body), transcode_clip_request)
def test_invoke_flow_with_group_wait(self): self._register_invoke_flow_response(invoke_flow_group_wait_response) flow_state = self.flow_control_service.invoke_flow_request( ).set_invocation( Invocation(['copy1', 'copy2'], [ Source('/source/path.txt') ])).set_flow(Flow().add_component( 'copy1', Component( ComponentType.copy_file, ['group-wait'], CopyFileSpecification( Destination('/destination/path1.txt')))).add_component( 'copy2', Component( ComponentType.copy_file, ['group-wait'], CopyFileSpecification( Destination('/destination/path2.txt')))). add_component('group-wait', Component(ComponentType.group_wait, []))).execute() self._assert_flow(invoke_flow_group_wait_request, flow_state)
def test_transcode_request__custom_video_settings(self): self._register_transcode_request(transcode2_response) group = self.transcode_service.transcode_request().add_sources( Source('/video.mp4')).add_specifications( TranscodeSpecification( Destination(path='/video.720.mp4'), video=StreamSpecification( StreamType.video, VideoSpecification( VideoCodec('h264', 'main', '3.1', 25, 10000, GOP(0, 30, 30, 2, 0, 0, 3), 'faster'), Resolution(256, 144, ImageScaling('lanczos'), '1:1'), 30.0, [ ImageFilter('unsharp', {'value': '5:5:0.5:3:3:0.0'}) ], '30000/1001')))).execute() assert_that(group.jobs[0], instance_of(TranscodeJob)) assert_that(group.group_id, is_('g')) self.assertEqual(transcode2_request, json.loads(httpretty.last_request().body))
def test_extract_poster_request(self): payload = { 'groupId': 'g', 'jobs': [{ 'id': 'g_1', 'type': 'urn:job:av.poster', 'groupId': 'g', 'status': 'pending', 'specification': { 'second': 20, 'percentage': None, 'destination': { 'path': '/video.poster.jpg', 'directory': '/', 'acl': 'public' }, 'format': 'jpg' }, 'sources': [{ 'path': '/video.mp4', 'fileId': '123' }], 'result': None, 'issuer': 'urn:app:app-id-1', 'dateUpdated': '2017-05-23T08:34:43Z', 'dateCreated': '2017-05-23T08:34:43Z', }] } response = RestResult(0, 'OK', payload) httpretty.register_uri(httpretty.POST, 'https://fish.barrel/_api/av/poster', body=json.dumps(response.serialize())) group = self.video_service.extract_poster_request().add_sources( Source('/video.mp4')).add_specifications( ExtractPosterSpecification(20, Destination('/video.poster.jpg'), 'jpg')).execute() assert_that(group.jobs[0], instance_of(ExtractPosterJob)) assert_that(group.group_id, is_('g')) assert_that( json.loads(httpretty.last_request().body), is_({ 'specifications': [{ 'second': 20, 'percentage': None, 'destination': { 'directory': None, 'path': '/video.poster.jpg', 'lifecycle': None, 'acl': 'public', 'bucket': None }, 'format': 'jpg' }], 'sources': [{ 'path': '/video.mp4', 'fileId': None }] }))
def test_create_archive_request(self): payload = { 'type': 'urn:job:archive.create', 'id': 'g_1', 'groupId': 'g', 'status': 'pending', 'issuer': 'urn:app:app-id', 'sources': [{ 'path': '/video.mp4' }], 'specification': { 'sources': [{ 'path': '/video.mp4' }], 'destination': { 'path': '/video.tar', 'acl': 'public' }, 'archiveType': 'tar' }, 'callback': { 'url': 'https://call.me.back/' }, 'dateUpdated': '2017-05-22T07:17:44Z', 'dateCreated': '2017-05-22T07:17:44Z' } response = RestResult(0, 'OK', payload) httpretty.register_uri(httpretty.POST, 'https://fish.appspot.com/_api/archive/create', body=json.dumps(response.serialize())) job = self.archive_service.create_archive_request().add_sources( Source('/video.mp4')).set_destination( Destination('/video.tar')).set_archive_type( ArchiveType.tar).set_callback( Callback('https://call.me.back/')).execute() assert_that(job, instance_of(CreateArchiveJob)) assert_that(job.group_id, is_('g')) assert_that( json.loads(httpretty.last_request().body), is_({ 'archiveType': 'tar', 'sources': [{ 'path': '/video.mp4', 'fileId': None }], 'destination': { 'directory': None, 'path': '/video.tar', 'lifecycle': None, 'acl': 'public', 'bucket': None }, 'jobCallback': { 'url': 'https://call.me.back/', 'headers': None, 'attachment': None, 'passthrough': False } }))
def test_extract_archive_request(self): payload = { 'id': 'g_1', 'groupId': '8c9063175f214bd78f8f6391dbc49a93', 'type': 'urn:job:archive.extract', 'issuer': 'urn:app:app-id', 'status': 'success', 'sources': [{ 'path': '/video.zip', 'fileId': 'file id' }], 'specification': { 'source': { 'path': '/video.zip', 'fileId': 'file id' }, 'destination': { 'directory': '/video', 'path': None, 'acl': 'public', 'bucket': None }, 'extractedFilesReport': { 'destination': { 'directory': None, 'path': '/video.report.json', 'acl': 'public' }, 'format': 'json' } }, 'result': { 'message': 'OK', 'code': 0, 'payload': { 'reportFileDescriptor': { 'path': '/video.report.json', 'id': 'report file id', 'acl': 'public', 'mimeType': 'application/json', 'size': 1718, 'hash': None, 'type': '-', 'dateUpdated': '2017-07-30T12:46:39Z', 'dateCreated': '2017-07-30T12:46:39Z', } } }, 'dateCreated': '2017-07-30T12:46:31Z', 'dateUpdated': '2017-07-30T12:46:40Z', } response = RestResult(0, 'OK', payload) httpretty.register_uri(httpretty.POST, 'https://fish.appspot.com/_api/archive/extract', body=json.dumps(response.serialize())) job = self.archive_service.extract_archive_request().set_source( Source('/video.zip')).set_destination( Destination(directory='/video')).set_report( ExtractionReport(Destination(path='/video.report.json'), ExtractionReportFormat.json)).execute() assert_that(job, instance_of(ExtractArchiveJob)) assert_that(job.group_id, is_('g')) assert_that( json.loads(httpretty.last_request().body), is_({ 'source': { 'path': '/video.zip', 'fileId': None }, 'destination': { 'directory': '/video', 'path': None, 'lifecycle': None, 'acl': 'public', 'bucket': None }, 'extractedFilesReport': { 'destination': { 'directory': None, 'path': '/video.report.json', 'lifecycle': None, 'acl': 'public', 'bucket': None }, 'format': 'json' }, 'jobCallback': None }))
from unittest import TestCase from media_platform.job.extract_archive_job import ExtractArchiveJob, ExtractArchiveSpecification, ExtractionReport, \ ExtractionReportFormat from media_platform.lang import datetime_serialization from media_platform.service.destination import Destination from media_platform.service.file_descriptor import ACL from media_platform.job.job import Job from media_platform.service.source import Source source = Source('/source.jpg') destination = Destination('/path.png', acl=ACL.private) data = { 'status': 'pending', 'dateCreated': '2001-12-25T00:00:00Z', 'sources': [source.serialize()], 'result': None, 'id': 'group-id_job-key', 'issuer': 'urn:member:xxx', 'specification': { 'source': source.serialize(), 'destination': destination.serialize(), 'extractedFilesReport': { 'destination': destination.serialize(), 'format': 'json' } }, 'groupId': 'group-id', 'flowId': None, 'dateUpdated': '2001-12-25T00:00:00Z',
def test_extract_storyboard_request(self): payload = { 'groupId': 'g', 'jobs': [{ 'id': 'g_1', 'type': 'urn:job:av.storyboard', 'groupId': 'g', 'status': 'pending', 'specification': { 'columns': 5, 'rows': 6, 'destination': { 'path': '/video.story.jpg', 'directory': '/', 'acl': 'public' }, 'format': 'jpg' }, 'sources': [{ 'path': '/video.mp4', 'fileId': '123' }], 'result': None, 'issuer': 'urn:app:app-id-1', 'dateUpdated': '2017-05-23T08:34:43Z', 'dateCreated': '2017-05-23T08:34:43Z', }] } response = RestResult(0, 'OK', payload) httpretty.register_uri(httpretty.POST, 'https://fish.barrel/_api/av/storyboard', body=json.dumps(response.serialize())) group = self.video_service.extract_storyboard_request().add_sources( Source('/video.mp4')).add_specifications( ExtractStoryboardSpecification(Destination('/video.story.jpg'), 5, 6, 256, 512, 'jpg', 12.25)).execute() assert_that(group.jobs[0], instance_of(ExtractStoryboardJob)) assert_that(group.group_id, is_('g')) assert_that( json.loads(httpretty.last_request().body), is_({ 'specifications': [{ 'rows': 6, 'tileHeight': 512, 'format': 'jpg', 'tileWidth': 256, 'destination': { 'directory': None, 'path': '/video.story.jpg', 'lifecycle': None, 'acl': 'public', 'bucket': None }, 'segmentDuration': 12.25, 'columns': 5 }], 'sources': [{ 'path': '/video.mp4', 'fileId': None }], 'jobCallback': None }))