def test_async_complete_multipart_upload(self, ensure_async_mock, handle_async_errors_mock, retrieve_results_mock): interface = CloudInterface(url='s3://bucket/path/to/dir', encryption=None) interface.queue = mock.MagicMock() interface.parts_db = {"key": ["part", "list"]} def retrieve_results_effect(): interface.parts_db["key"].append("complete") retrieve_results_mock.side_effect = retrieve_results_effect interface.async_complete_multipart_upload('mpu', 'key', 3) ensure_async_mock.assert_called_once_with() handle_async_errors_mock.assert_called_once_with() retrieve_results_mock.assert_called_once_with() interface.queue.put.assert_called_once_with({ "job_type": "complete_multipart_upload", "mpu": "mpu", "key": "key", "parts": ["part", "list", "complete"], })
def test_async_upload_part(self, ensure_async_mock, handle_async_errors_mock, temp_file_mock): temp_name = "tmp_file" temp_stream = temp_file_mock.return_value.__enter__.return_value temp_stream.name = temp_name interface = CloudInterface( destination_url='s3://bucket/path/to/dir', encryption=None) interface.queue = Queue() interface.async_upload_part( 'mpu', 'test/key', BytesIO(b'test'), 1) ensure_async_mock.assert_called_once_with() handle_async_errors_mock.assert_called_once_with() assert not interface.queue.empty() assert interface.queue.get() == { "job_type": "upload_part", "mpu": "mpu", "key": "test/key", "body": temp_name, "part_number": 1, }
def test_worker_process_main(self, worker_process_execute_job_mock): job_collection = [ { "job_id": 1, "job_type": "upload_part" }, { "job_id": 2, "job_type": "upload_part" }, { "job_id": 3, "job_type": "upload_part" }, None, ] interface = CloudInterface(url='s3://bucket/path/to/dir', encryption=None) interface.queue = mock.MagicMock() interface.errors_queue = Queue() interface.queue.get.side_effect = job_collection interface.worker_process_main(0) # Jobs are been grabbed from queue, and the queue itself has been # notified of tasks being done assert interface.queue.get.call_count == 4 # worker_process_execute_job is executed only 3 times, because it's # not called for the process stop marker assert worker_process_execute_job_mock.call_count == 3 assert interface.queue.task_done.call_count == 4 assert interface.errors_queue.empty() # If during an execution a job an exception is raised, the worker # process must put the error in the appropriate queue. def execute_mock(job, process_number): if job["job_id"] == 2: raise Boto3Error("Something is gone wrong") interface.queue.reset_mock() worker_process_execute_job_mock.reset_mock() worker_process_execute_job_mock.side_effect = execute_mock interface.queue.get.side_effect = job_collection interface.worker_process_main(0) assert interface.queue.get.call_count == 4 # worker_process_execute_job is executed only 3 times, because it's # not called for the process stop marker assert worker_process_execute_job_mock.call_count == 3 assert interface.queue.task_done.call_count == 4 assert interface.errors_queue.get() == "Something is gone wrong" assert interface.errors_queue.empty()
def test_retrieve_results(self): interface = CloudInterface(url='s3://bucket/path/to/dir', encryption=None) interface.queue = Queue() interface.done_queue = Queue() interface.result_queue = Queue() interface.errors_queue = Queue() # With an empty queue, the parts DB is empty interface._retrieve_results() assert len(interface.parts_db) == 0 # Preset the upload statistics, to avoid a random start_date for name in ["test/file", "test/another_file"]: interface.upload_stats[name] = FileUploadStatistics( status='uploading', start_time=datetime.datetime(2016, 3, 30, 17, 1, 0), ) # Fill the result queue with mock results, and assert that after # the refresh the result queue is empty and the parts_db full with # ordered results interface.result_queue.put({ "key": "test/file", "part_number": 2, "end_time": datetime.datetime(2016, 3, 30, 17, 2, 20), "part": { "ETag": "becb2f30c11b6a2b5c069f3c8a5b798c", "PartNumber": "2" } }) interface.result_queue.put({ "key": "test/file", "part_number": 1, "end_time": datetime.datetime(2016, 3, 30, 17, 1, 20), "part": { "ETag": "27960aa8b7b851eb0277f0f3f5d15d68", "PartNumber": "1" } }) interface.result_queue.put({ "key": "test/file", "part_number": 3, "end_time": datetime.datetime(2016, 3, 30, 17, 3, 20), "part": { "ETag": "724a0685c99b457d4ddd93814c2d3e2b", "PartNumber": "3" } }) interface.result_queue.put({ "key": "test/another_file", "part_number": 1, "end_time": datetime.datetime(2016, 3, 30, 17, 5, 20), "part": { "ETag": "89d4f0341d9091aa21ddf67d3b32c34a", "PartNumber": "1" } }) interface._retrieve_results() assert interface.result_queue.empty() assert interface.parts_db == { "test/file": [{ "ETag": "27960aa8b7b851eb0277f0f3f5d15d68", "PartNumber": "1" }, { "ETag": "becb2f30c11b6a2b5c069f3c8a5b798c", "PartNumber": "2" }, { "ETag": "724a0685c99b457d4ddd93814c2d3e2b", "PartNumber": "3" }], "test/another_file": [{ "ETag": "89d4f0341d9091aa21ddf67d3b32c34a", "PartNumber": "1" }] } assert interface.upload_stats == { 'test/another_file': { 'start_time': datetime.datetime(2016, 3, 30, 17, 1, 0), 'status': 'uploading', 'parts': { 1: { 'end_time': datetime.datetime(2016, 3, 30, 17, 5, 20), 'part_number': 1, }, }, }, 'test/file': { 'start_time': datetime.datetime(2016, 3, 30, 17, 1, 0), 'status': 'uploading', 'parts': { 1: { 'end_time': datetime.datetime(2016, 3, 30, 17, 1, 20), 'part_number': 1, }, 2: { 'end_time': datetime.datetime(2016, 3, 30, 17, 2, 20), 'part_number': 2, }, 3: { 'end_time': datetime.datetime(2016, 3, 30, 17, 3, 20), 'part_number': 3, }, }, }, }