def start_tcp_proxy(src, dst, handler, **kwargs): """Run a simple TCP proxy (tunneling raw connections from src to dst), using a message handler that can be used to intercept messages and return predefined responses for certain requests. Arguments: src -- Source IP address and port string. I.e.: '127.0.0.1:8000' dst -- Destination IP address and port. I.e.: '127.0.0.1:8888' handler -- a handler function to intercept requests (returns tuple (forward_value, response_value)) """ src = "%s:%s" % (BIND_HOST, src) if is_number(src) else src dst = "%s:%s" % (LOCALHOST_IP, dst) if is_number(dst) else dst thread = kwargs.get("_thread") def ip_to_tuple(ip): ip, port = ip.split(":") return ip, int(port) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(ip_to_tuple(src)) s.listen(1) s.settimeout(10) def handle_request(s_src, thread): s_dst = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s_dst.connect(ip_to_tuple(dst)) sockets = [s_src, s_dst] try: while thread.running: s_read, _, _ = select.select(sockets, [], []) for s in s_read: data = s.recv(BUFFER_SIZE) if data in [b"", "", None]: return if s == s_src: forward, response = data, None if handler: forward, response = handler(data) if forward is not None: s_dst.sendall(forward) elif response is not None: s_src.sendall(response) return elif s == s_dst: s_src.sendall(data) finally: run_safe(s_src.close) run_safe(s_dst.close) while thread.running: try: src_socket, _ = s.accept() start_worker_thread( lambda *args, _thread: handle_request(src_socket, _thread)) except socket.timeout: pass
def _start(self): with self._startup_mutex: if self._started: return self._started = True # startup has to run async, otherwise first call to handle() could block a long time. start_worker_thread(self._do_start_retry)
def init_async(cls): def _run(*args): with (INIT_LOCK): base_dir = cls.get_base_dir() if not os.path.exists(os.path.join(base_dir, '.terraform')): run('cd %s; terraform init -input=false' % (base_dir)) run('cd %s; terraform plan -out=tfplan -input=false' % (base_dir)) start_worker_thread(_run)
def init_async(cls): """ Installs the default elasticsearch version in a worker thread. Used by conftest.py to make sure elasticsearch is downloaded once the tests arrive here. """ def run_install(*args): with INIT_LOCK: LOG.info("installing elasticsearch") install_elasticsearch() LOG.info("done installing elasticsearch") start_worker_thread(run_install)
def init_async(cls): if not is_command_available('terraform'): return def _run(*args): with (INIT_LOCK): base_dir = cls.get_base_dir() if not os.path.exists(os.path.join(base_dir, '.terraform')): run('cd %s; terraform init -input=false' % base_dir) run('cd %s; terraform plan -out=tfplan -input=false' % base_dir) start_worker_thread(_run)
def init_async(cls): available, _ = check_terraform_version() if not available: return def _run(*args): with INIT_LOCK: base_dir = cls.get_base_dir() if not os.path.exists(os.path.join(base_dir, '.terraform', 'plugins')): run('cd %s; terraform init -input=false' % base_dir) # remove any cache files from previous runs for tf_file in ['tfplan', 'terraform.tfstate', 'terraform.tfstate.backup']: rm_rf(os.path.join(base_dir, tf_file)) # create TF plan run('cd %s; terraform plan -out=tfplan -input=false' % base_dir) start_worker_thread(_run)
def submit(self, fn, *args, **kwargs): # if idle threads are available, don't spin new threads if self.has_idle_threads(): return super(AdaptiveThreadPool, self).submit(fn, *args, **kwargs) def _run(*tmpargs): return fn(*args, **kwargs) thread = start_worker_thread(_run) return thread.result_future
def init_async(cls): def _run(*args): with INIT_LOCK: install_terraform() base_dir = get_base_dir() if not os.path.exists(os.path.join(base_dir, ".terraform", "plugins")): run(f"cd {base_dir}; {TERRAFORM_BIN} init -input=false") # remove any cache files from previous runs for tf_file in [ "tfplan", "terraform.tfstate", "terraform.tfstate.backup", ]: rm_rf(os.path.join(base_dir, tf_file)) # create TF plan run(f"cd {base_dir}; {TERRAFORM_BIN} plan -out=tfplan -input=false") start_worker_thread(_run)
def install_async(): """ Installs the default elasticsearch version in a worker thread. Used by conftest.py to make sure elasticsearch is downloaded once the tests arrive here. """ if installed.is_set(): return def run_install(*args): with INIT_LOCK: if installed.is_set(): return LOG.info("installing elasticsearch default version") install_elasticsearch() LOG.info("done installing elasticsearch default version") LOG.info("installing opensearch default version") install_opensearch() LOG.info("done installing opensearch default version") installed.set() start_worker_thread(run_install)
def init_async(cls): available, ver_string = check_terraform_version() if not available: print( "Skipping Terraform test init as version check failed (version: '%s')" % ver_string ) return def _run(*args): with INIT_LOCK: base_dir = cls.get_base_dir() if not os.path.exists(os.path.join(base_dir, ".terraform", "plugins")): run("cd %s; terraform init -input=false" % base_dir) # remove any cache files from previous runs for tf_file in [ "tfplan", "terraform.tfstate", "terraform.tfstate.backup", ]: rm_rf(os.path.join(base_dir, tf_file)) # create TF plan run("cd %s; terraform plan -out=tfplan -input=false" % base_dir) start_worker_thread(_run)