async def test_putUrl_upload_fails_retried_succeeds(randbytes): "When a putUrl upload's PUT fails with a 500, an exception is raised" data = randbytes(10240) # >8k to avoid using dataInline attempts = 0 class Server(httptest.Handler): def do_PUT(self): nonlocal attempts attempts += 1 if attempts > 2: self.send_response(200) self.end_headers() else: self.send_response(500) self.end_headers() self.wfile.write(b'uhoh') with httptest.Server(Server) as ts: objectService = FakeObject(ts) await upload.upload_from_buf( projectId="taskcluster", expires=taskcluster.fromNow('1 hour'), contentType="text/plain", contentLength=len(data), name="some/object", data=data, objectService=objectService) assert attempts == 3
def test_download_object_artifact(randbytes, monkeypatch): "Download an object artifact" # note: other artifact types are tested in tests/aio; this just # validates the sync wrappers data = randbytes(1024) class Server(httptest.Handler): def do_GET(self): self.send_response(200) self.send_header('content-type', 'text/plain') self.send_header('content-length', str(len(data))) self.end_headers() self.wfile.write(data) with httptest.Server(Server) as ts: # the wrapped async download will create an _async_ Object client, # so we use the fake from the async tests def make_fake_async_object(options): from aio.test_upload_download import FakeObject assert options["credentials"] == {"clientId": "c", "accessToken": "a"} assert options["rootUrl"] == "https://tc-testing.example.com" return FakeObject(ts) monkeypatch.setattr(taskcluster.aio.download, "Object", make_fake_async_object) queueService = FakeQueue("object", ts) buf, content_type = download.downloadArtifactToBuf( taskId='task-id', runId=1, name="public/test.data", queueService=queueService) assert buf == data assert content_type == 'text/plain'
async def test_simple_download_fails_retried_succeeds(randbytes): "When a simple download's GET fails with a 500, it is retried successfully" attempts = 0 data = randbytes(1024) class Server(httptest.Handler): def do_GET(self): nonlocal attempts attempts += 1 if attempts > 2: self.send_response(200) self.send_header('content-type', 'text/plain') self.send_header('content-length', str(len(data))) self.end_headers() self.wfile.write(data) else: self.send_response(500) self.end_headers() self.wfile.write(b'uhoh') with httptest.Server(Server) as ts: objectService = FakeObject(ts) buf, content_type = await download.downloadToBuf( name="some/object", objectService=objectService) assert attempts == 3 assert buf == data assert content_type == 'text/plain'
def test_putUrl_upload_fails_retried(randbytes): "When a putUrl upload's PUT fails with a 500, an exception is raised" data = randbytes(10240) # >8k to avoid using dataInline attempts = 0 class Server(httptest.Handler): def do_PUT(self): nonlocal attempts attempts += 1 self.rfile.read(len(data)) self.send_response(500) self.end_headers() self.wfile.write(b'uhoh') with httptest.Server(Server) as ts: objectService = FakeObject(ts) with pytest.raises(aiohttp.ClientResponseError): upload.uploadFromBuf( projectId="taskcluster", expires=taskcluster.fromNow('1 hour'), contentType="text/plain", contentLength=len(data), name="some/object", data=data, objectService=objectService) assert attempts == 6 # one try plus five retries
def test_putUrl_upload(randbytes): """Test that upload works with a custom sync reader factory.""" data = randbytes(10240) # >8k to avoid using dataInline uploaded_data = b'' class Server(httptest.Handler): def do_PUT(self): nonlocal uploaded_data uploaded_data = self.rfile.read(len(data)) self.send_response(200) self.end_headers() def readerFactory(): return io.BytesIO(data) with httptest.Server(Server) as ts: objectService = FakeObject(ts) upload.upload( projectId="taskcluster", expires=taskcluster.fromNow('1 hour'), contentType="text/plain", contentLength=len(data), name="some/object", readerFactory=readerFactory, objectService=objectService) assert uploaded_data == data
async def run(self, ctx): # Default the port to 8000 given_port = "8000" # Grab port number if given if self.cmd[-1].isdigit(): given_port = self.cmd[-1] if "--cgi" in self.cmd: # Start CGI server if requseted handler_class = http.server.CGIHTTPRequestHandler else: # Default to simple http server handler_class = http.server.SimpleHTTPRequestHandler # Specify directory if given directory = ctx["cwd"] if "--directory" in self.cmd: directory = self.cmd[self.cmd.index("--directory") + 1] # Ensure handler is relative to directory handler_class = functools.partial(handler_class, directory=directory) # Start a server with a random port self.ts = httptest.Server(handler_class).__enter__() # Map the port that was given to the port that was used ctx.setdefault("HTTP_SERVER", {}) ctx["HTTP_SERVER"][given_port] = self.ts.server_port return self
def test_download(randbytes): """Test that download works with a custom sync writer factory.""" data = randbytes(1024) class Server(httptest.Handler): def do_GET(self): self.send_response(200) self.end_headers() self.wfile.write(data) writer = None def writerFactory(): nonlocal writer writer = io.BytesIO() return writer with httptest.Server(Server) as ts: objectService = FakeObject(ts) download.download( name="some/object", writerFactory=writerFactory, objectService=objectService) assert bytes(writer.getbuffer()) == data
async def test_download_object_artifact(randbytes, monkeypatch): "Download an object artifact" data = randbytes(1024) class Server(httptest.Handler): def do_GET(self): self.send_response(200) self.send_header('content-type', 'text/plain') self.send_header('content-length', str(len(data))) self.end_headers() self.wfile.write(data) with httptest.Server(Server) as ts: def make_fake_object(options): assert options["credentials"] == { "clientId": "c", "accessToken": "a" } assert options["rootUrl"] == "https://tc-testing.example.com" return FakeObject(ts) monkeypatch.setattr(taskcluster.aio.download, "Object", make_fake_object) queueService = FakeQueue("object", ts) buf, content_type = await download.downloadArtifactToBuf( taskId='task-id', runId=1, name="public/test.data", queueService=queueService) assert buf == data assert content_type == 'text/plain'
def test_simple_download_fails(): "When a simple download's GET fails with a 400, an exception is raised and no retries occur" getcount = 0 class Server(httptest.Handler): def do_GET(self): nonlocal getcount getcount += 1 self.send_response(400) self.end_headers() self.wfile.write(b'uhoh') with httptest.Server(Server) as ts: objectService = FakeObject(ts) with pytest.raises(requests.RequestException): download.downloadToBuf(name="some/object", objectService=objectService) assert getcount == 1
def test_simple_download_fails_retried(): "When a simple download's GET fails with a 500, an exception is raised after five retries" attempts = 0 class Server(httptest.Handler): def do_GET(self): nonlocal attempts attempts += 1 self.send_response(500) self.end_headers() self.wfile.write(b'uhoh') with httptest.Server(Server) as ts: objectService = FakeObject(ts) with pytest.raises(requests.RequestException): download.downloadToBuf(name="some/object", objectService=objectService) assert attempts == 6 # one try plus five retries
async def test_putUrl_upload_fails(randbytes): "When a putUrl upload's PUT fails with a 400, an exception is raised" data = randbytes(10240) # >8k to avoid using dataInline class Server(httptest.Handler): def do_PUT(self): self.send_response(400) self.end_headers() self.wfile.write(b'uhoh') with httptest.Server(Server) as ts: objectService = FakeObject(ts) with pytest.raises(aiohttp.ClientResponseError): await upload.uploadFromBuf(projectId="taskcluster", expires=taskcluster.fromNow('1 hour'), contentType="text/plain", contentLength=len(data), name="some/object", data=data, objectService=objectService)
async def test_download_s3_artifact(randbytes): "Download an S3 artifact" data = randbytes(1024) class Server(httptest.Handler): def do_GET(self): self.send_response(200) self.send_header('content-type', 'text/plain') self.send_header('content-length', str(len(data))) self.end_headers() self.wfile.write(data) with httptest.Server(Server) as ts: queueService = FakeQueue("s3", ts) buf, content_type = await download.downloadArtifactToBuf( taskId='task-id', runId=1, name="public/test.data", queueService=queueService) assert buf == data assert content_type == 'text/plain'