def is_service_enabled(service_name): """Return whether the service with the given name (e.g., "lambda") is available.""" try: url = get_local_service_url(service_name) assert url return is_port_open(url, http_path="/", expect_success=False) except Exception: return False
def test_run_and_stop_server(self): port = get_free_tcp_port() host = "127.0.0.1" LOG.info("%.2f starting server on port %d", time.time(), port) thread = run_server(port=port, bind_address=host, asynchronous=True) try: url = f"http://{host}:{port}" assert poll_condition( lambda: is_port_open(url, http_path="/"), timeout=15), f"gave up waiting for port {port}" finally: LOG.info("%.2f stopping server on port %d", time.time(), port) thread.stop() LOG.info("%.2f waiting on server to shut down", time.time()) thread.join(timeout=15) assert not is_port_open(port), "port is still open after stop" LOG.info("%.2f port stopped %d", time.time(), port)
def test_start_and_stop(self, monkeypatch): monkeypatch.setattr(config, "FORWARD_EDGE_INMEM", False) proxy_port = get_free_tcp_port() backend_port = get_free_tcp_port() server = start_proxy_for_service( "myservice", proxy_port, backend_port, update_listener=None, quiet=True, ) assert server try: assert poll_condition(lambda: is_port_open(proxy_port), timeout=15) finally: server.stop() server.join(timeout=15) assert not is_port_open(proxy_port)
def start_runtime_components(): from localstack.services.edge import start_edge from localstack.services.internal import LocalstackResourceHandler, get_internal_apis # serve internal APIs through the generic proxy ProxyListener.DEFAULT_LISTENERS.append(LocalstackResourceHandler(get_internal_apis())) # TODO: we want a composable LocalStack runtime (edge proxy, service manager, dns, ...) t = start_thread(start_edge, quiet=False) # TODO: properly encapsulate starting/stopping of edge server in a class if not poll_condition( lambda: is_port_open(config.get_edge_port_http()), timeout=15, interval=0.3 ): if LOG.isEnabledFor(logging.DEBUG): # make another call with quiet=False to print detailed error logs is_port_open(config.get_edge_port_http(), quiet=False) raise TimeoutError( f"gave up waiting for edge server on {config.EDGE_BIND_HOST}:{config.EDGE_PORT}" ) return t
def test_run_and_stop_server_from_different_threads(self): port = get_free_tcp_port() host = "127.0.0.1" LOG.info("%.2f starting server on port %d", time.time(), port) thread = run_server(port=port, bind_address=host, asynchronous=True) try: url = f"http://{host}:{port}" self.assertTrue( poll_condition(lambda: is_port_open(url, http_path="/"), timeout=15), "gave up waiting for port %d " % port, ) finally: LOG.info("%.2f stopping server on port %d", time.time(), port) threading.Thread(target=thread.stop).start() LOG.info("%.2f waiting on server to shut down", time.time()) thread.join(timeout=15) self.assertFalse(is_port_open(port), "port is still open after stop") LOG.info("%.2f port stopped %d", time.time(), port)
def proxy_server(proxy_listener, host="127.0.0.1", port=None) -> str: """ Create a temporary proxy server on a random port (or the specified port) with the given proxy listener for the duration of the context manager. """ from localstack.services.generic_proxy import start_proxy_server host = host port = port or get_free_tcp_port() thread = start_proxy_server(port, bind_address=host, update_listener=proxy_listener) url = f"http://{host}:{port}" assert poll_condition( lambda: is_port_open(port), timeout=5 ), f"server on port {port} did not start" yield url thread.stop()
def aws_cmd(service, env): # TODO: use boto3 instead of running aws-cli commands here! cmd = '{ test `which aws` || . .venv/bin/activate; }; aws' endpoint_url = None env = aws_stack.get_environment(env) if env.region == REGION_LOCAL: endpoint_url = aws_stack.get_local_service_url(service) if endpoint_url: if endpoint_url.startswith('https://'): cmd += ' --no-verify-ssl' cmd = '%s --endpoint-url="%s"' % (cmd, endpoint_url) if not is_port_open(endpoint_url): raise socket.error() cmd = '%s %s' % (cmd, service) return cmd
def aws_cmd(service, env): # TODO: use boto3 instead of running aws-cli commands here! cmd = '{ test `which aws` || . .venv/bin/activate; }; aws' endpoint_url = None env = aws_stack.get_environment(env) if env.region == REGION_LOCAL: endpoint_url = aws_stack.get_local_service_url(service) if endpoint_url: if endpoint_url.startswith('https://'): cmd += ' --no-verify-ssl' cmd = '%s --endpoint-url="%s"' % (cmd, endpoint_url) if not is_port_open(endpoint_url): raise socket.error() cmd = '%s %s' % (cmd, service) return cmd
def start_dns_server(asynchronous=False): try: # start local DNS server, if present from localstack_ext import config as config_ext from localstack_ext.services import dns_server if config_ext.DNS_ADDRESS in config.FALSE_STRINGS: return if is_port_open(PORT_DNS): return if is_root(): result = dns_server.start_servers() if not asynchronous: sleep_forever() return result # note: running in a separate process breaks integration with Route53 (to be fixed for local dev mode!) return run_process_as_sudo("dns", PORT_DNS, asynchronous=asynchronous) except Exception: pass
def http_server(handler, host="127.0.0.1", port=None) -> str: """ Create a temporary http server on a random port (or the specified port) with the given handler for the duration of the context manager. Example usage: def handler(request, data): print(request.method, request.path, data) with testutil.http_server(handler) as url: requests.post(url, json={"message": "hello"}) """ from localstack.utils.server.http2_server import run_server host = host port = port or get_free_tcp_port() thread = run_server(port, host, handler=handler, asynchronous=True) url = f"http://{host}:{port}" assert poll_condition(lambda: is_port_open(port), timeout=5), f"server on port {port} did not start" yield url thread.stop()
def get_service_status(service, port=None): port = port or config.parse_service_ports().get(service) status = 'disabled' if ( port or 0) <= 0 else 'running' if is_port_open(port) else 'stopped' return status
def get_service_status(service, port=None): port = port or config.parse_service_ports().get(service) status = "disabled" if ( port or 0) <= 0 else "running" if is_port_open(port) else "stopped" return status
def health(self): """ Runs a health check on the server. The default implementation performs is_port_open on the server URL. """ return is_port_open(self.url)