def test_zero_data(sample_data): stream_handler = StreamHandler(StringIO()) bucket = FakeBucket() sup = UploadSupervisor(stream_handler, 'test', bucket=bucket) with pytest.raises(UploadException): sup.main_loop(worker_class=DummyWorker) assert bucket._multipart._canceled is True
def test_supervisor_loop(sample_data): stream_handler = StreamHandler(sample_data) bucket = FakeBucket() sup = UploadSupervisor(stream_handler, 'test', bucket=bucket) etag = sup.main_loop(worker_class=DummyWorker) assert etag == '"d229c1fc0e509475afe56426c89d2724-2"' assert bucket._multipart._completed
def test_integration(sample_data): cfg = get_config() stream_handler = StreamHandler(sample_data) bucket = boto.connect_s3( cfg['S3_KEY_ID'], cfg['S3_SECRET']).get_bucket(cfg['BUCKET']) key_name = "z3_test_" + datetime.now().strftime("%Y%m%d_%H-%M-%S") sup = UploadSupervisor( stream_handler, key_name, bucket=bucket, headers=parse_metadata(["ana=are+mere", "dana=are=pere"]) ) etag = sup.main_loop() uploaded = bucket.get_key(key_name) assert etag == '"d229c1fc0e509475afe56426c89d2724-2"' assert etag == uploaded.etag assert uploaded.metadata == {"ana": "are+mere", "dana": "are=pere"}
def test_handle_results(): sup = UploadSupervisor(None, None, None) sup.inbox = Queue() sup._pending_chunks = 3 sup.inbox.put(Result(success=True, traceback=None, index=1, md5='a')) sup.inbox.put(Result(success=True, traceback=None, index=3, md5='c')) sup.inbox.put(Result(success=True, traceback=None, index=2, md5='b')) sup._handle_results() assert sorted(sup.results) == [(1, 'a'), (2, 'b'), (3, 'c')] assert sup._pending_chunks == 0
def test_supervisor_loop_with_worker_crash(sample_data): stream_handler = StreamHandler(sample_data) bucket = FakeBucket() sup = UploadSupervisor(stream_handler, 'test', bucket=bucket) with pytest.raises(WorkerCrashed): sup.main_loop(worker_class=ErrorWorker)