class Context(object): logger = get_logger("context") def __init__(self): self.transfers = [] self.test_objects = [] def _cleanup_test_objects(self, object_list: list): while len(object_list) > 0: item = object_list.pop() try: self.logger.info("cleaning: {}".format(item)) item.cleanup() except (UnexpectedResponseError, AssertionError, CommandExecutionException) as e: self.logger.warning("Error while deleting {}: {}".format( item, e)) def cleanup(self): transfer_titles = [t.title for t in self.transfers] if len(transfer_titles) > 0: data_sets = DataSet.api_get_matching_to_transfer_list( transfer_titles) self._cleanup_test_objects(data_sets) self._cleanup_test_objects(self.transfers) self._cleanup_test_objects(self.test_objects)
class JumpTunnel(JumpClient): logger = get_logger(__name__) def __init__(self, host, port, remote_username, remote_host, remote_key_path): self._host = host self._port = port self._tunnel = None super().__init__(remote_username, remote_host, remote_key_path) @retry(ConnectionRefusedError, tries=20, delay=5) def _check_tunnel_established(self): sock = socket.create_connection((self._host, self._port)) sock.close() def open(self): tunnel_command = ["ssh"] + self._auth_options + [ "-N", "-D", self._port ] + self._user_at_hostname self.logger.info("Opening tunnel {}".format(" ".join(tunnel_command))) self._tunnel = subprocess.Popen(tunnel_command) try: self.logger.info("Wait until tunnel is established") self._check_tunnel_established() except: self.close() raise def close(self): self._tunnel.kill()
class Context(object): logger = get_logger("context") def __init__(self): self.orgs = [] self.users = [] self.invitations = [] self.transfers = [] self.apps = [] self.service_instances = [] def _cleanup_test_objects(self, object_list: list): while len(object_list) > 0: item = object_list.pop() try: self.logger.info("cleaning: {}".format(item)) item.cleanup() except UnexpectedResponseError as e: if e.status != HttpStatus.CODE_NOT_FOUND: self.logger.warning("Error while deleting {}: {}".format(item, e)) def cleanup(self): self._cleanup_test_objects(self.users) self._cleanup_test_objects(self.invitations) transfer_titles = [t.title for t in self.transfers] if len(transfer_titles) > 0: data_sets = DataSet.api_get_matching_to_transfer_list(transfer_titles) self._cleanup_test_objects(data_sets) self._cleanup_test_objects(self.transfers) self._cleanup_test_objects(self.service_instances) self._cleanup_test_objects(self.apps) self._cleanup_test_objects(self.orgs)
class SimpleJumpTunnel(object): _logger = get_logger(__name__) _LOCALHOST = "localhost" def __init__(self): self._jump_client = JumpClient(config.ng_jump_user) self._local_port = config.ng_socks_proxy_port self._tunnel = None @property def _tunnel_command(self): return self._jump_client.ssh_command + [ "-D", str(self._local_port), "-N" ] def open(self): self._logger.info("Open tunnel with '{}'".format(" ".join( self._tunnel_command))) self._tunnel = subprocess.Popen(self._tunnel_command) try: self._logger.info("Wait until tunnel is established") self._check_tunnel_established() Config.TUNNEL_AVAILABLE = True except: self.close() raise def close(self): self._logger.debug("Close local ssh tunnel") self._tunnel.kill() @retry((ConnectionRefusedError, TimeoutError), tries=20, delay=5) def _check_tunnel_established(self): sock = socket.create_connection((self._LOCALHOST, self._local_port)) sock.close()
# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from bson import ObjectId from pymongo import MongoClient from modules.tap_logger import get_logger logger = get_logger(__name__) class DBClient(object): def __init__(self, uri: str): """ uri: mongodb://<host>:<port>/<dbname> """ client = MongoClient(host=uri) database_name = uri.split("/")[-1] self.database = client[database_name] logger.debug("Connected to {} database on {}:{}".format( database_name, client.HOST, client.PORT)) def insert(self, collection_name, document) -> ObjectId: result = self.database[collection_name].insert_one(document)
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os from time import sleep import paramiko from modules.constants import LoggerType from modules.tap_logger import get_logger from .config import Config from .gatling_runner_parameters import GatlingRunnerParameters logger = get_logger(LoggerType.GATLING_RUNNER) class GatlingSshConnector(object): """Gatling remote host ssh connector.""" SSH_POLICY = paramiko.AutoAddPolicy() PATH_TO_LOG_FILE = "simulation.log" PATH_TO_RESULT_FILE = os.path.join("js", "global_stats.json") PATH_TO_RESULTS = "results" WAIT_AFTER_SIMULATION_START = 2 # in seconds def __init__(self, parameters: GatlingRunnerParameters): self._parameters = parameters self._client = paramiko.SSHClient() self._client.set_missing_host_key_policy(self.SSH_POLICY)
class JumpClient(object): _SSH_RETRY_DELAY = 30 _SSH_RETRIES = 5 _logger = get_logger(__name__) def __init__(self, username): self._username = username self._host = config.ng_jump_ip assert self._host is not None, "Missing jumpbox hostname configuration" self._ilab_deploy_path = None self._key_path = config.ng_jump_key_path self.auth_options = ["-o UserKnownHostsFile=/dev/null", "-o StrictHostKeyChecking=no", "-o GSSAPIAuthentication=no", "-o ServerAliveInterval=30"] ssh_options = ["-i", self.key_path] + self.auth_options + ["{}@{}".format(self._username, self._host)] scp_options = ["-r", "-i", self.key_path] + self.auth_options if config.verbose_ssh: ssh_options = ["-vvv"] + ssh_options scp_options = ["-v"] + scp_options self.ssh_command = ["ssh"] + ssh_options self.scp_command = ["scp"] + scp_options @property def key_path(self): """ If key path was not set in configuration, download key. Download is executed at most once. """ if self._key_path is None: self._logger.info("Download repository with ssh key") ilab_deploy = AppSources.get_repository(repo_name=TapGitHub.ilab_deploy, repo_owner=TapGitHub.intel_data) self._ilab_deploy_path = ilab_deploy.path self._key_path = os.path.join(self._ilab_deploy_path, RelativeRepositoryPaths.ilab_jump_key) os.chmod(self._key_path, stat.S_IRUSR | stat.S_IWUSR) self._key_path = os.path.expanduser(self._key_path) assert os.path.isfile(self._key_path), "No such file {}".format(self._key_path) return self._key_path def ssh(self, remote_command): if not isinstance(remote_command, list): remote_command = [remote_command] command = self.ssh_command + remote_command return cli_command.run(command) def scp_from_remote(self, source_path, target_path): command = self.scp_command + ["{}@{}:{}".format(self._username, self._host, source_path), target_path] return self._run_command(command) def scp_to_remote(self, source_path, target_path): command = self.scp_command + [source_path, "{}@{}:{}".format(self._username, self._host, target_path)] return self._run_command(command) def cleanup(self): if self._ilab_deploy_path is not None and os.path.exists(self._ilab_deploy_path): self._logger.debug("Remove directory {}".format(self._ilab_deploy_path)) shutil.rmtree(self._ilab_deploy_path) def _run_command(self, command): self._logger.info("Executing command {}".format(" ".join(command))) process = subprocess.Popen(command) try: self._logger.info("Wait for command {} to finish".format(" ".join(command))) self._ensure_process_finished(process, command) except subprocess.TimeoutExpired: self._logger.info("Killing {}".format(" ".join(command))) process.kill() raise return process.returncode @retry(subprocess.TimeoutExpired, tries=_SSH_RETRIES, delay=_SSH_RETRY_DELAY) def _ensure_process_finished(self, process, command): if process.poll() is None: raise subprocess.TimeoutExpired(command, self._SSH_RETRY_DELAY)
class NestedJumpTunnel(SimpleJumpTunnel): _logger = get_logger(__name__) _LOCALHOST = "localhost" _KEY_COPIED = False _DEFAULT_JUMP_PORT = 5555 _PORT_OPTION = "-D" def __init__(self): super().__init__() self._username = config.ng_jump_user self._jump_client = JumpClient(self._username) self._home_directory_on_jump = os.path.join("/home", self._username) self._ssh_command = self._jump_client.ssh_command self._auth_options = self._jump_client.auth_options self._key_path = self._jump_client.key_path self._local_port = config.ng_socks_proxy_port self._master_0_host = config.master_0_hostname self.__jump_port = None self._remote_tunnel_command_no_port = "ssh {} {} -N -D ".format( " ".join(self._auth_options), self._master_0_host) self._local_tunnel_command = None @property def _remote_tunnel_command(self): """ Before remote tunnel command is built, available port on jumpbox is first established. """ return self._remote_tunnel_command_no_port + str(self.__jump_port) @property def _tunnel_command(self): self._logger.warning("Kubernetes not available directly from jumpbox.") self._get_available_jump_port() self._local_tunnel_command = [ "-L", "{0}:{1}:{2}".format(self._local_port, self._LOCALHOST, self.__jump_port) ] return self._ssh_command + self._local_tunnel_command + [ self._remote_tunnel_command ] def close(self): """ First, close tunnel process started on jumpbox. Second, close tunnel process started locally. """ self._logger.debug("Close ssh tunnel on jumpbox") tunnel_process_on_jump = self._find_tunnel_process_on_jump() if tunnel_process_on_jump is None: self._logger.warning( "Tunnel process does not seem to exist on jumpbox") else: tunnel_process_on_jump.kill() super().close() @classmethod def _get_port_from_remote_tunnel_command(cls, actual_command): assert cls._PORT_OPTION in actual_command, "Process command does not include '{}'".format( cls._PORT_OPTION) index = actual_command.index(cls._PORT_OPTION) try: port = actual_command[index:].split(" ")[1] return int(port, 10) except (ValueError, IndexError): raise ValueError( "Did not find port in command '{}'".format(actual_command)) def _get_available_jump_port(self): """ Search for an available port on jumpbox - only on the first call. This method is suboptimal, it looks only for processes started by this class. We'll see if it works. """ if self.__jump_port is None: jump_processes = RemoteProcess.get_list( ssh_client=self._jump_client) busy_ports = [] for process in jump_processes: if self._remote_tunnel_command_no_port in process.command: port = self._get_port_from_remote_tunnel_command( process.command) self._logger.warning( "Port {} on jumpbox is busy".format(port)) busy_ports.append(port) for port in range(self._DEFAULT_JUMP_PORT, self._DEFAULT_JUMP_PORT + 100): if port not in busy_ports: self.__jump_port = port break return self.__jump_port def _find_tunnel_process_on_jump(self): jump_processes = RemoteProcess.get_list(ssh_client=self._jump_client) for process in jump_processes: if self._remote_tunnel_command in process.command and self._username in process.user: return process