def setUp(self, timeout_seconds=5): self.contexts = [] self.tempdir = tempfile.TemporaryDirectory() self.contexts.append(self.tempdir) self.port = unused_tcp_port() self.contexts.append( ContextualChildProcess( [ "python", "-m", "http.server", str(self.port), "--bind", "127.0.0.1", ], cwd=self.tempdir.name, ).__enter__()) end = time.time() + timeout_seconds while True: try: requests.get("http://127.0.0.1:{port}".format(port=self.port)) break except requests.ConnectionError: if time.time() > end: raise self.http_backend = HttpBackend( "http://127.0.0.1:{port}".format(port=self.port))
def test_retry(monkeypatch, http_server): """ Verifies that the retry logic is reasonable. Since we normally only retry on http connectivity issues, or 50x errors, we monkeypatch the list of HTTP status codes we retry on to {404}, and induce a 404 error. We also start a thread that creates the file we are looking for. Then we attempt to fetch the file. It should fail a few times, and then successfully return the file. """ tempdir, port = http_server http_backend = HttpBackend("http://127.0.0.1:{port}".format(port=port)) def sleep_and_make_file(): time.sleep(5.0) data = os.urandom(1024) with open(os.path.join(tempdir, "tileset.json"), "w") as fh: fh.write(data) fh.flush() thread = threading.Thread(target=sleep_and_make_file) thread.setDaemon(True) thread.start() with monkeypatch.context() as mc: mc.setattr(_http, "RETRY_STATUS_CODES", frozenset({404})) with http_backend.read_contextmanager("tileset.json") as cm: cm.read()
def test_checksum_good(http_server): tempdir, port = http_server http_backend = HttpBackend("http://127.0.0.1:{port}".format(port=port)) with _test_checksum_setup(tempdir) as setupdata: filename, data, expected_checksum = setupdata with http_backend.read_contextmanager(filename, expected_checksum) as cm: assert cm.read() == data
def test_error(self): """ Verifies that we raise an exception when we fail to find a file. """ with self.assertRaises(HTTPError): backend = HttpBackend( "http://127.0.0.1:{port}".format(port=self.port)) with self.assertRaises(ChecksumValidationError): with backend.read_contextmanager("tileset.json") as cm: cm.read()
def test_error(http_server): """ Verifies that we raise an exception when we fail to find a file. """ tempdir, port = http_server http_backend = HttpBackend("http://127.0.0.1:{port}".format(port=port)) with pytest.raises(HTTPError): with pytest.raises(ChecksumValidationError): with http_backend.read_contextmanager("tileset.json") as cm: cm.read()
def test_checksum_bad(http_server): tempdir, port = http_server http_backend = HttpBackend("http://127.0.0.1:{port}".format(port=port)) with _test_checksum_setup(tempdir) as setupdata: filename, data, expected_checksum = setupdata # make the hash incorrect expected_checksum = "{:x}".format( int(hashlib.sha256().hexdigest(), 16) + 1) with pytest.raises(ChecksumValidationError): with http_backend.read_contextmanager(filename, expected_checksum) as cm: assert cm.read() == data
def test_reentrant(http_server): tempdir, port = http_server http_backend = HttpBackend("http://127.0.0.1:{port}".format(port=port)) with _test_checksum_setup(tempdir) as setupdata: filename, data, expected_checksum = setupdata with http_backend.read_contextmanager(filename, expected_checksum) as cm0: data0 = cm0.read(1) with http_backend.read_contextmanager(filename, expected_checksum) as cm1: data1 = cm1.read() data0 = data0 + cm0.read() assert data == data0 assert data == data1
def infer_backend(baseurl, backend_config=None): """ Guess the backend based on the format of `baseurl`, the consistent part of the URL or file path. The backend_config dictionary can contain flexible parameters for the different backends. Caching parameter keys include: - ["caching"]["directory"] (default: None which disables caching) - ["caching"]["debug"] (default: False) - ["caching"]["size_limit"] (default: SIZE_LIMIT) """ if backend_config is None: backend_config = {} parsed = urllib.parse.urlparse(baseurl) if parsed.scheme == "file": local_path = get_path_from_parsed_file_url(parsed) return DiskBackend(fspath(local_path)) if parsed.scheme in ("http", "https"): backend = HttpBackend(baseurl) elif parsed.scheme == "s3": s3_config = backend_config.get("s3", {}) backend = S3Backend(baseurl, s3_config) else: raise ValueError( "Unable to infer backend for url {}, please verify that baseurl points to a valid " "directory or web address".format(baseurl)) # these backends might use a cache. cache_config = backend_config.get("caching", {}) cache_dir = cache_config.get("directory", None) if cache_dir is not None: cache_dir = os.path.expanduser(cache_dir) size_limit = cache_config.get("size_limit", SIZE_LIMIT) if size_limit > 0: debug = cache_config.get("debug", False) if debug: print("> caching {} to {} (size_limit: {})".format( baseurl, cache_dir, size_limit)) backend = CachingBackend(cache_dir, backend, size_limit) return backend
class TestHttpBackend(unittest.TestCase): def setUp(self, timeout_seconds=5): self.contexts = [] self.tempdir = tempfile.TemporaryDirectory() self.contexts.append(self.tempdir) self.port = unused_tcp_port() self.contexts.append( ContextualChildProcess( [ "python", "-m", "http.server", str(self.port), "--bind", "127.0.0.1", ], cwd=self.tempdir.name, ).__enter__()) end = time.time() + timeout_seconds while True: try: requests.get("http://127.0.0.1:{port}".format(port=self.port)) break except requests.ConnectionError: if time.time() > end: raise self.http_backend = HttpBackend( "http://127.0.0.1:{port}".format(port=self.port)) def tearDown(self): for context in reversed(self.contexts): context.__exit__(*sys.exc_info()) def test_checksum_good(self): with self._test_checksum_setup(self.tempdir.name) as setupdata: filename, data, expected_checksum = setupdata with self.http_backend.read_contextmanager( filename, expected_checksum) as cm: self.assertEqual(cm.read(), data) def test_checksum_bad(self): with self._test_checksum_setup(self.tempdir.name) as setupdata: filename, data, expected_checksum = setupdata # make the hash incorrect expected_checksum = "{:x}".format( int(hashlib.sha256().hexdigest(), 16) + 1) with self.assertRaises(ChecksumValidationError): with self.http_backend.read_contextmanager( filename, expected_checksum) as cm: self.assertEqual(cm.read(), data) def test_reentrant(self): with self._test_checksum_setup(self.tempdir.name) as setupdata: filename, data, expected_checksum = setupdata with self.http_backend.read_contextmanager( filename, expected_checksum) as cm0: data0 = cm0.read(1) with self.http_backend.read_contextmanager( filename, expected_checksum) as cm1: data1 = cm1.read() data0 = data0 + cm0.read() self.assertEqual(data, data0) self.assertEqual(data, data1) @staticmethod @contextlib.contextmanager def _test_checksum_setup(tempdir): """ Write some random data to a temporary file and yield its path, the data, and the checksum of the data. """ # write the file data = os.urandom(1024) expected_checksum = hashlib.sha256(data).hexdigest() with tempfile.NamedTemporaryFile(dir=tempdir, delete=False) as tfh: tfh.write(data) yield os.path.basename(tfh.name), data, expected_checksum def test_error(self): """ Verifies that we raise an exception when we fail to find a file. """ with self.assertRaises(HTTPError): backend = HttpBackend( "http://127.0.0.1:{port}".format(port=self.port)) with self.assertRaises(ChecksumValidationError): with backend.read_contextmanager("tileset.json") as cm: cm.read()