def docker_lvmpy_install(env): sync_docker_lvmpy_repo(env) ensure_filestorage_mapping() update_docker_lvmpy_env(env) run_cmd(cmd=f'sudo -H -E {DOCKER_LVMPY_PATH}/scripts/install.sh'.split(), env=env) logger.info('docker-lvmpy installed')
def test_create_logs_dump(backup_func, skale_container, removed_containers_folder): archive_path = create_logs_dump(G_CONF_HOME) safe_mkdir(TEST_ARCHIVE_FOLDER_PATH) cmd = shlex.split(f'tar xf {archive_path} -C {TEST_ARCHIVE_FOLDER_PATH}') run_cmd(cmd) test_container_log_path = os.path.join(TEST_ARCHIVE_FOLDER_PATH, 'containers', f'{TEST_SKALE_NAME}.log') with open(test_container_log_path) as data_file: content = data_file.readlines() assert content == [ 'Hello, SKALE!\n', '================================================================================\n', # noqa 'Hello, SKALE!\n' ] assert os.path.exists( os.path.join(TEST_ARCHIVE_FOLDER_PATH, 'removed_containers')) assert os.path.exists(os.path.join(TEST_ARCHIVE_FOLDER_PATH, 'cli')) assert os.path.exists(os.path.join(TEST_ARCHIVE_FOLDER_PATH, 'containers')) assert os.path.isfile( os.path.join(TEST_ARCHIVE_FOLDER_PATH, 'cli', 'debug-node-cli.log')) assert os.path.isfile( os.path.join(TEST_ARCHIVE_FOLDER_PATH, 'cli', 'node-cli.log'))
def docker_lvmpy_update(env): sync_docker_lvmpy_repo(env) ensure_filestorage_mapping() logger.info('Running docker-lvmpy update script') update_docker_lvmpy_env(env) run_cmd(cmd=f'sudo -H -E {DOCKER_LVMPY_PATH}/scripts/update.sh'.split(), env=env) logger.info('docker-lvmpy update done')
def restart_docker_service( docker_service_name: str = 'docker') -> DockerConfigResult: logger.info('Executing daemon-reload') run_cmd(['systemctl', 'daemon-reload']) logger.info('Restarting docker service') run_cmd(['systemctl', 'restart', docker_service_name]) return DockerConfigResult.CHANGED
def run_simple_openssl_server(certfile, keyfile, port=DEFAULT_SSL_CHECK_PORT): cmd = [ 'openssl', 's_server', '-cert', certfile, '-key', keyfile, '-WWW', '-port', str(port), '-verify_return_error', '-Verify', '1' ] run_cmd(cmd)
def compose_rm(env={}): logger.info('Removing compose containers') run_cmd(cmd=( 'docker-compose', '-f', COMPOSE_PATH, 'down', '-t', str(COMPOSE_SHUTDOWN_TIMEOUT), ), env=env) logger.info('Compose containers removed')
def cert_key_pair(): cert_path = os.path.abspath('ssl-test-cert') key_path = os.path.abspath('ssl-test-key') run_cmd([ 'openssl', 'req', '-newkey', 'rsa:4096', '-x509', '-sha256', '-days', '365', '-nodes', '-subj', '/', '-out', cert_path, '-keyout', key_path ]) yield cert_path, key_path if os.path.isfile(cert_path): pathlib.Path(cert_path).unlink() if os.path.isfile(key_path): pathlib.Path(key_path).unlink()
def compose_up(env): logger.info('Running base set of containers') if 'SGX_CERTIFICATES_DIR_NAME' not in env: env['SGX_CERTIFICATES_DIR_NAME'] = SGX_CERTIFICATES_DIR_NAME run_cmd(cmd=get_up_compose_cmd(BASE_COMPOSE_SERVICES), env=env) if str_to_bool(env.get('MONITORING_CONTAINERS', '')): logger.info('Running monitoring containers') run_cmd(cmd=get_up_compose_cmd(MONITORING_COMPOSE_SERVICES), env=env) if 'TG_API_KEY' in env and 'TG_CHAT_ID' in env: logger.info('Running containers for Telegram notifications') run_cmd(cmd=get_up_compose_cmd(NOTIFICATION_COMPOSE_SERVICES), env=env)
def docker_compose(self) -> CheckResult: name = 'docker-compose' cmd = shutil.which('docker-compose') if cmd is None: info = 'No such command: "docker-compose"' return self._failed(name=name, info=info) v_cmd_result = run_cmd(['docker-compose', '-v'], check_code=False) output = v_cmd_result.stdout.decode('utf-8').rstrip() if v_cmd_result.returncode != 0: output = v_cmd_result.stdout.decode('utf-8') info = f'Checking docker-compose version failed with: {output}' return self._failed(name=name, info=output) actual_version = output.split(',')[0].split()[-1].strip() expected_version = self.requirements['docker-compose'] info = f'Expected docker-compose version {expected_version}, actual {actual_version}' # noqa if version_parse(actual_version) < version_parse(expected_version): return self._failed(name=name, info=info) else: return self._ok(name=name, info=info)
def _check_apt_package(self, package_name: str, version: str = None) -> CheckResult: # TODO: check versions dpkg_cmd_result = run_cmd(['dpkg', '-s', package_name], check_code=False) output = dpkg_cmd_result.stdout.decode('utf-8').strip() if dpkg_cmd_result.returncode != 0: return self._failed(name=package_name, info=output) actual_version = self._version_from_dpkg_output(output) expected_version = self.requirements[package_name] info = { 'expected_version': expected_version, 'actual_version': actual_version } compare_result = debian_support.version_compare( actual_version, expected_version) if compare_result == -1: return self._failed(name=package_name, info=info) else: return self._ok(name=package_name, info=info)
def compose_build(): logger.info('Building compose containers') run_cmd(cmd=('docker-compose', '-f', COMPOSE_PATH, 'build'), env={'SKALE_DIR': SKALE_DIR})
def compose_pull(): logger.info('Pulling compose containers') run_cmd(cmd=('docker-compose', '-f', COMPOSE_PATH, 'pull'), env={'SKALE_DIR': SKALE_DIR})
def save_iptables_rules_state(): res = run_cmd(['iptables-save']) plain_rules = res.stdout.decode('utf-8').rstrip() with open(IPTABLES_RULES_STATE_FILEPATH, 'w') as state_file: state_file.write(plain_rules)
def check_is_partition(disk_path): res = run_cmd(['blkid', disk_path]) output = str(res.stdout) if 'PARTUUID' in output: return True return False
def get_disk_size(disk_device: str) -> int: disk_size_cmd = construct_disk_size_cmd(disk_device) output = run_cmd(disk_size_cmd).stdout.decode('utf-8') return int(output)
def create_archive(archive_path, source_path): run_cmd(['tar', '-czvf', archive_path, '-C', source_path, '.'])