def test_ctor(self) -> None: err = http_exceptions.HttpProcessingError( code=500, message="Internal error", headers={} ) assert err.code == 500 assert err.message == "Internal error" assert err.headers == {}
def _download_file(url, filepath, validator, LOGGER): try: with requests.get(url) as response: LOGGER.debug("downloading {0} ... {1} ...".format(url, filepath)) with open(filepath, "wb") as f: if response.status_code == 200: # save file chunk = response.content f.write(chunk) elif response.status_code == 404: LOGGER.debug("error downloading {0} ... {1} ".format( url, filepath)) raise web.HTTPNotFound() else: LOGGER.debug("error downloading {0} ... {1} ".format( url, filepath)) raise http_exceptions.HttpProcessingError( code=response.status_code, message=response.reason, headers=response.headers, ) LOGGER.debug("validating {0} ...".format(filepath)) if validator is not None: validator() LOGGER.debug("download done {0}".format(url)) return filepath, (response.status_code, tuple(response.headers.items())) except Exception as e: LOGGER.debug("error downloading {0} ... {1} ".format(url, filepath)) raise e
def test_pickle(self) -> None: err = http_exceptions.HttpProcessingError( code=500, message='Internal error', headers={}) err.foo = 'bar' for proto in range(pickle.HIGHEST_PROTOCOL + 1): pickled = pickle.dumps(err, proto) err2 = pickle.loads(pickled) assert err2.code == 500 assert err2.message == 'Internal error' assert err2.headers == {} assert err2.foo == 'bar'
def get_datafile(data_idx, base_url): url = base_url + data_idx with aiohttp.ClientSession() as sesion: # resp = yield from aiohttp.request('GET', url) resp = yield from sesion.request('GET', url) if resp.status == 200: datafile = yield from resp.read() return datafile elif resp.status == 404: raise web.HTTPNotFound() else: raise http_exceptions.HttpProcessingError(code=resp.status, message=resp.reason, headers=resp.headers)
async def _download_file(url, filepath, session, loop, validator, semaphore, LOGGER): try: async with semaphore, await session.get(url, timeout=600, ssl=False) as response: LOGGER.debug("downloading {0} ... {1} ...".format(url, filepath)) with open(filepath, "wb") as f: if response.status == 200: while True: # save file chunk = await response.content.read(8192) if not chunk: break f.write(chunk) elif response.status == 404: LOGGER.debug("error downloading {0} ... {1} ".format( url, filepath)) raise web.HTTPNotFound() else: LOGGER.debug("error downloading {0} ... {1} ".format( url, filepath)) raise http_exceptions.HttpProcessingError( code=response.status, message=response.reason, headers=response.headers, ) LOGGER.debug("validating {0} ...".format(filepath)) if validator is not None: validator() LOGGER.debug("download done {0}".format(url)) return filepath, (response.status, tuple(response.headers.items())) except asyncio.TimeoutError as e: LOGGER.error("request timed out: {0}".format(url)) raise e except Exception as e: if not loop.is_closed(): LOGGER.debug("error downloading {0} ... {1} ".format( url, filepath)) raise e
def test_repr(self) -> None: err = http_exceptions.HttpProcessingError( code=500, message='Internal error', headers={}) assert repr(err) == ("<HttpProcessingError: 500, " "message='Internal error'>")
def test_str(self) -> None: err = http_exceptions.HttpProcessingError( code=500, message='Internal error', headers={}) assert str(err) == "500, message='Internal error'"
def function2392(): var1955 = http_exceptions.HttpProcessingError(code=500, message='Internal error') assert (var1955.code == 500) assert (var1955.message == 'Internal error')
def test_http_error_exception() -> None: exc = http_exceptions.HttpProcessingError(code=500, message='Internal error') assert exc.code == 500 assert exc.message == 'Internal error'