def test_list_files_files_only(mock_execute, files_only, cmd, tmpdir): root_dir = tmpdir.mkdir('foo') mock_execute.return_value = '', '' ssh = SshClient() ssh.list_files(root_dir, files_only=files_only, recursive=True) mock_execute.assert_called_once_with(cmd.format(root=str(root_dir)))
def __init__(self, kwargs): ssh_kwargs = {} for arg in ["ssh_host", "ssh_port", "ssh_user", "ssh_key"]: if arg in kwargs: ssh_kwargs[arg.replace("ssh_", "")] = kwargs.pop(arg) self._ssh_client = SshClient(**ssh_kwargs) super(RemoteMySQLSource, self).__init__(**kwargs)
def __init__(self, kwargs): ssh_kwargs = {} for arg in ['ssh_host', 'ssh_port', 'ssh_user', 'ssh_key']: if arg in kwargs: ssh_kwargs[arg.replace('ssh_', '')] = kwargs.pop(arg) self._ssh_client = SshClient(**ssh_kwargs) super(RemoteMySQLSource, self).__init__(**kwargs)
def __init__(self, remote_path, ssh_connect_info=SshConnectInfo(), hostname=socket.gethostname()): super(Ssh, self).__init__(remote_path) self._ssh_client = SshClient(ssh_connect_info) self.status_path = "{remote_path}/{hostname}/status".format( remote_path=self.remote_path, hostname=hostname) self.status_tmp_path = self.status_path + ".tmp"
def test_list_files_recursive(mock_execute, recursive, cmd, tmpdir): root_dir = tmpdir.mkdir('foo') mock_execute.return_value = '', '' ssh = SshClient() ssh.list_files(root_dir, recursive=recursive) mock_execute.assert_called_once_with( cmd.format( root=str(root_dir) ))
def __init__(self, remote_path, **kwargs): super(Ssh, self).__init__(remote_path) self._ssh_client = SshClient(host=kwargs.get('ssh_host', '127.0.0.1'), port=kwargs.get('ssh_port', 22), user=kwargs.get('ssh_user', 'root'), key=kwargs.get('ssh_key', '/root/.ssh/id_rsa')) self._hostname = kwargs.get('hostname', socket.gethostname())
def test_list_files(mock_execute, exec_return, expected, tmpdir): root_dir = tmpdir.mkdir('foo') mock_execute.return_value = (exec_return.format(root=str(root_dir)), '') ssh = SshClient() check_result = [] for x in expected: check_result.append(x.format(root=str(root_dir))) assert ssh.list_files(root_dir) == check_result
def __init__(self, remote_path, **kwargs): super(Ssh, self).__init__(remote_path) self._ssh_client = SshClient( host=kwargs.get("ssh_host", "127.0.0.1"), port=kwargs.get("ssh_port", 22), user=kwargs.get("ssh_user", "root"), key=kwargs.get("ssh_key", "/root/.ssh/id_rsa"), ) self._hostname = kwargs.get("hostname", socket.gethostname())
def __init__(self, ssh_connect_info=SshConnectInfo(), remote_path=None, hostname=socket.gethostname()): super(Ssh, self).__init__() if remote_path: self.remote_path = remote_path.rstrip('/') self._ssh_client = SshClient(ssh_connect_info) self.status_path = "{remote_path}/{hostname}/status".format( remote_path=self.remote_path, hostname=hostname)
def test_list_files_files_only_with_result(mock_execute, exec_return, expected, tmpdir): root_dir = tmpdir.mkdir('foo') mock_execute.return_value = ( exec_return.format( root=str(root_dir) ), '' ) ssh = SshClient() check_result = [] for x in expected: check_result.append(x.format(root=str(root_dir))) assert ssh.list_files(root_dir, files_only=True) == check_result
def test_list_files(tmpdir): ssh = SshClient( host='192.168.36.250', key='/vagrant/.vagrant/machines/master1/virtualbox/private_key', user='******') root_dir = tmpdir.mkdir('foo') print(str(root_dir)) files = ssh.list_files(str(root_dir)) print(files) with open(str(root_dir.join('bar.txt')), 'w') as fp: fp.write('xxx') print(ssh.list_files(str(root_dir))) print('blah') print(ssh.list_files('blah')) subdir = root_dir.mkdir('subdir') with open(str(subdir.join('sub_bar.txt')), 'w') as fp: fp.write('xxx') print('subdir') print(ssh.list_files(str(root_dir), recursive=True))
def test_list_files(tmpdir): ssh = SshClient( host='192.168.36.250', key='/vagrant/.vagrant/machines/master1/virtualbox/private_key', user='******' ) root_dir = tmpdir.mkdir('foo') print(str(root_dir)) files = ssh.list_files( str(root_dir) ) print(files) with open(str(root_dir.join('bar.txt')), 'w') as fp: fp.write('xxx') print( ssh.list_files( str(root_dir) ) ) print('blah') print( ssh.list_files('blah') ) subdir = root_dir.mkdir('subdir') with open(str(subdir.join('sub_bar.txt')), 'w') as fp: fp.write('xxx') print('subdir') print( ssh.list_files( str(root_dir), recursive=True ) )
class Ssh(BaseDestination): """ SSH destination class :param ssh_connect_info: SSH connection info :type ssh_connect_info: SshConnectInfo :param remote_path: Path to store backup :param hostname: Hostname """ def __init__(self, ssh_connect_info=SshConnectInfo(), remote_path=None, hostname=socket.gethostname()): super(Ssh, self).__init__() if remote_path: self.remote_path = remote_path.rstrip('/') self._ssh_client = SshClient(ssh_connect_info) self.status_path = "{remote_path}/{hostname}/status".format( remote_path=self.remote_path, hostname=hostname) def save(self, handler, name): """ Read from handler and save it on remote ssh server :param name: relative path to a file to store the backup copy. :param handler: stream with content of the backup. """ remote_name = self.remote_path + '/' + name self._mkdirname_r(remote_name) try: cmd = "cat - > %s" % remote_name with self._ssh_client.get_remote_handlers(cmd) \ as (cin, _, _): with handler as file_obj: cin.write(file_obj.read()) return True except SshClientException: return False def _mkdir_r(self, path): """ Create directory on the remote server :param path: remote directory :type path: str """ cmd = 'mkdir -p "%s"' % path self.execute_command(cmd) def list_files(self, prefix, recursive=False): """ Get list of file by prefix :param prefix: Path :param recursive: Recursive return list of files :type prefix: str :type recursive: bool :return: List of files :rtype: list """ ls_options = "" if recursive: ls_options = "-R" ls_cmd = "ls {ls_options} {prefix}".format(ls_options=ls_options, prefix=prefix) with self._ssh_client.get_remote_handlers(ls_cmd) as (_, cout, _): return sorted(cout.read().split()) def find_files(self, prefix, run_type): """ Find files by prefix :param prefix: Path :param run_type: Run type for search :type prefix: str :type run_type: str :return: List of files :rtype: list """ cmd = "find {prefix}/*/{run_type} -type f".format(prefix=prefix, run_type=run_type) with self._ssh_client.get_remote_handlers(cmd) as (_, cout, _): return sorted(cout.read().split()) def delete(self, obj): """ Delete file by path :param obj: path to a remote file. """ cmd = "rm %s" % obj self.execute_command(cmd) def get_stream(self, path): """ Get a PIPE handler with content of the backup copy streamed from the destination :param path: Path to file :type path: str :return: Standard output. """ cmd = "cat %s" % path with self._ssh_client.get_remote_handlers(cmd) as (_, cout, _): yield cout def _write_status(self, status): """ Write status :param status: Status fo write :type status: str """ raw_status = base64.b64encode(json.dumps(status)) cmd = "cat - > %s" % self.status_path with self._ssh_client.get_remote_handlers(cmd) as (cin, _, _): cin.write(raw_status) def _read_status(self): """ Read status :return: Status in JSON format, if it exist """ if self._status_exists(): cmd = "cat %s" % self.status_path with self._ssh_client.get_remote_handlers(cmd) as (_, stdout, _): return json.loads(base64.b64decode(stdout.read())) else: return self._empty_status def _status_exists(self): """ Check, if status exist :return: Exist status :rtype: bool :raise SshDestinationError: if any error. """ cmd = "bash -c 'if test -s %s; " \ "then echo exists; " \ "else echo not_exists; " \ "fi'" % self.status_path _, cout, _ = self._ssh_client.execute(cmd) status = cout.read() if status.strip() == 'exists': return True elif status.strip() == 'not_exists': return False else: raise SshDestinationError('Unrecognized response: %s' % cout.read()) def execute_command(self, cmd, quiet=False): """Execute ssh command :param cmd: Command for execution :type cmd: str :param quiet: If True don't print errors :return: Handlers of stdin, stdout and stderr :rtype: tuple """ LOG.debug('Executing: %s', cmd) return self._ssh_client.execute(cmd, quiet=quiet) @property def client(self): """Return client""" return self._ssh_client @property def host(self): """IP address of the destination.""" return self._ssh_client.ssh_connect_info.host def _mkdirname_r(self, remote_name): """Create directory for a given file on the destination. For example, for a given file '/foo/bar/xyz' it would create directory '/foo/bar/'. :param remote_name: Full path to a file :type remote_name: str """ return self._mkdir_r(os.path.dirname(remote_name)) def netcat(self, command, port=9990): """ Run netcat on the destination pipe it to a given command. """ try: return self.execute_command('nc -l %d | %s' % (port, command)) except SshDestinationError as err: LOG.error(err)
class RemoteMySQLSource(MySQLSource): """Remote MySQLSource class""" def __init__(self, kwargs): ssh_kwargs = {} for arg in ['ssh_host', 'ssh_port', 'ssh_user', 'ssh_key']: if arg in kwargs: ssh_kwargs[arg.replace('ssh_', '')] = kwargs.pop(arg) self._ssh_client = SshClient(**ssh_kwargs) super(RemoteMySQLSource, self).__init__(**kwargs) @contextmanager def get_stream(self): raise NotImplementedError("Method get_stream not implemented") def clone(self, dest_host, port, compress=False): """ Send backup to destination host :param dest_host: Destination host :type dest_host: str :param port: Port to sending backup :type port: int :param compress: If True compress stream :type compress: bool :raise RemoteMySQLSourceError: if any error """ retry = 1 retry_time = 2 error_log = "/tmp/{src}_{src_port}-{dst}_{dst_port}.log".format( src=self._ssh_client.host, src_port=self._ssh_client.port, dst=dest_host, dst_port=port ) if compress: compress_cmd = "| gzip -c - " else: compress_cmd = "" cmd = "bash -c \"sudo %s " \ "--stream=xbstream " \ "--host=127.0.0.1 " \ "--backup " \ "--target-dir ./ 2> %s" \ " %s | ncat %s %d --send-only\"" \ % (self._xtrabackup, error_log, compress_cmd, dest_host, port) while retry < 3: try: return self._ssh_client.execute(cmd) except SshClientException as err: LOG.warning(err) LOG.info('Will try again in after %d seconds', retry_time) time.sleep(retry_time) retry_time *= 2 retry += 1 def clone_config(self, dst): """ Clone config to destination server :param dst: Destination server :type dst: Ssh """ cfg_path = self._get_root_my_cnf() LOG.debug("Root my.cnf is: %s", cfg_path) self._save_cfg(dst, cfg_path) def _find_all_cnf(self, root_path): """ Return list of embed cnf files""" files = [root_path] cfg_content = self._ssh_client.get_text_content(root_path) for line in cfg_content.splitlines(): if '!includedir' in line: path = line.split()[1] file_list = self._ssh_client.list_files( path, recursive=False, files_only=True ) for sub_file in file_list: files.extend( self._find_all_cnf( sub_file ) ) elif '!include' in line: files.extend( self._find_all_cnf( line.split()[1] ) ) return files @staticmethod def _find_server_id_by_path(cfg): """Find path with server_id""" options = ["server_id", "server-id"] for option in options: try: if cfg.has_option("mysqld", option): return option except ConfigParser.Error: pass return None def _save_cfg(self, dst, root_cfg): """Save configs on destination recursively""" files = self._find_all_cnf(root_cfg) server_id = self._get_server_id(dst.host) is_server_id_set = False valid_cfg = [] for path in files: try: cfg = self._get_config(path) option = self._find_server_id_by_path(cfg) if option: cfg.set('mysqld', option, value=str(server_id)) is_server_id_set = True dst.client.write_config(path, cfg) valid_cfg.append(path) except ConfigParser.ParsingError: cfg_content = self._ssh_client.get_text_content(path) dst.client.write_content(path, cfg_content) if not is_server_id_set: for path in valid_cfg: cfg = self._get_config(path) if cfg.has_section("mysqld"): cfg.set('mysqld', "server_id", value=str(server_id)) dst.client.write_config(path, cfg) return def _get_root_my_cnf(self): """Return root my.cnf path""" for cfg_path in MY_CNF_COMMON_PATHS: try: self._ssh_client.get_text_content(cfg_path) return cfg_path except SshClientException: continue except IOError as err: if err.errno == ENOENT: continue else: raise raise OSError("Root my.cnf not found") def _get_config(self, cfg_path): """ Return parsed config :param cfg_path: Path to config :type cfg_path: str :return: Path and config :rtype: ConfigParser.ConfigParser """ cfg = ConfigParser.ConfigParser(allow_no_value=True) try: cmd = "cat %s" % cfg_path with self._ssh_client.get_remote_handlers(cmd) as (_, cout, _): cfg.readfp(cout) except ConfigParser.ParsingError as err: LOG.error(err) raise return cfg def setup_slave(self, master_info): # noqa # pylint: disable=too-many-arguments """ Change master :param master_info: Master details. :type master_info: MySQLMasterInfo """ try: with self._cursor() as cursor: query = "CHANGE MASTER TO " \ "MASTER_HOST = '{master}', " \ "MASTER_USER = '******', " \ "MASTER_PORT = {port}, " \ "MASTER_PASSWORD = '******', " \ "MASTER_LOG_FILE = '{binlog}', " \ "MASTER_LOG_POS = {binlog_pos}"\ .format( master=master_info.host, user=master_info.user, password=master_info.password, binlog=master_info.binlog, binlog_pos=master_info.binlog_position, port=master_info.port ) cursor.execute(query) cursor.execute("START SLAVE") return True except pymysql.Error as err: LOG.debug(err) return False def apply_backup(self, datadir): """ Apply backup of destination server :param datadir: Path to datadir :return: Binlog file name and position :rtype: tuple :raise RemoteMySQLSourceError: if any error. """ try: use_memory = "--use-memory %d" % int(self._mem_available() / 2) except OSError: use_memory = "" logfile_path = "/tmp/xtrabackup-apply-log.log" cmd = "sudo {xtrabackup} --prepare --apply-log-only " \ "--target-dir {target_dir} {use_memory} " \ "> {logfile} 2>&1" \ "".format( xtrabackup=self._xtrabackup, target_dir=datadir, use_memory=use_memory, logfile=logfile_path ) try: self._ssh_client.execute(cmd) self._ssh_client.execute("sudo chown -R mysql %s" % datadir) return self._get_binlog_info(datadir) except SshClientException as err: LOG.debug("Logfile is:") LOG.debug(self._ssh_client.get_text_content(logfile_path)) raise RemoteMySQLSourceError(err) def _mem_available(self): """ Get available memory size :return: Size of available memory in bytes :rtype: int :raise OSError: if can' detect memory """ # https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/commit/?id=34e431b0a stdout_, _ = self._ssh_client.execute( "awk -v low=$(grep low /proc/zoneinfo | " "awk '{k+=$2}END{print k}') " "'{a[$1]=$2}END{m=" "a[\"MemFree:\"]" "+a[\"Active(file):\"]" "+a[\"Inactive(file):\"]" "+a[\"SReclaimable:\"]; " "print a[\"MemAvailable:\"]}' " "/proc/meminfo" ) mem = stdout_.strip() if not mem: raise OSError("Cant get available mem") free_mem = int(mem) * 1024 return free_mem @staticmethod def _get_server_id(host): """Determinate server id""" try: server_id = struct.unpack("!I", socket.inet_aton(host))[0] except socket.error: server_ip = socket.gethostbyname(host) server_id = struct.unpack("!I", socket.inet_aton(server_ip))[0] return server_id def _get_binlog_info(self, backup_path): """Get binlog coordinates from an xtrabackup_binlog_info. :param backup_path: Path where to look for xtrabackup_binlog_info. :type backup_path: str :return: Tuple with binlog coordinates - (file_name, pos) :rtype: tuple """ stdout_, _ = self._ssh_client.execute( 'sudo cat %s/xtrabackup_binlog_info' % backup_path ) binlog_info = re.split(r'\t+', stdout_.rstrip()) return binlog_info[0], int(binlog_info[1])
class Ssh(BaseDestination): """ The SSH destination class represents a destination backup storage with running SSH demon. :param remote_path: Path to store backups. :type remote_path: str :param kwargs: Keyword arguments. See below. :type kwargs: dict * **hostname** (str): Hostname of the host where backup is taken from. * **ssh_host** (str): Hostname for SSH connection. Default ``127.0.0.1``. * **ssh_user** (str): Username for SSH connection. Default ``root``. * **ssh_port** (int): TCP port for SSH connection. Default 22. * **ssh_key** (str): File with an rsa/dsa key for SSH authentication. Default ``/root/.ssh/id_rsa``. """ def __init__(self, remote_path, **kwargs): super(Ssh, self).__init__(remote_path) self._ssh_client = SshClient(host=kwargs.get('ssh_host', '127.0.0.1'), port=kwargs.get('ssh_port', 22), user=kwargs.get('ssh_user', 'root'), key=kwargs.get('ssh_key', '/root/.ssh/id_rsa')) self._hostname = kwargs.get('hostname', socket.gethostname()) @property def client(self): """ :return: SSH client. :rtype: SshClient """ return self._ssh_client @property def host(self): """ :return: IP address of the destination. :rtype: str """ return self._ssh_client.host @property def port(self): """ :return: TCP port of the destination. :rtype: int """ return self._ssh_client.port @property def user(self): """ :return: SSH user. :rtype: str """ return self._ssh_client.user def delete(self, path): """ Delete file by path. The path is a relative to the ``self.remote_path``. :param path: Path to a remote file. :type path: str """ cmd = "rm %s" % path self.execute_command(cmd) def ensure_tcp_port_listening(self, port, wait_timeout=10): """ Check that tcp port is open and ready to accept connections. Keep checking up to ``wait_timeout`` seconds. :param port: TCP port that is supposed to be listening. :type port: int :param wait_timeout: Wait this many seconds until the port is ready. :type wait_timeout: int :return: ``True`` if the TCP port is listening. :rtype: bool """ stop_waiting_at = time.time() + wait_timeout while time.time() < stop_waiting_at: try: cmd = "netstat -ln | grep -w 0.0.0.0:%d 2>&1 " \ "> /dev/null" % port cout, cerr = self.execute_command(cmd) LOG.debug('stdout: %s', cout) LOG.debug('stderr: %s', cerr) return True except SshClientException as err: LOG.debug(err) time.sleep(1) return False def execute_command(self, cmd, quiet=False, background=False): """Execute ssh command on the remote destination. :param cmd: Command to execute. :type cmd: str :param quiet: If ``True`` don't print errors. :type quiet: bool :param background: If ``True`` don't wait until the command exits. :type background: bool :return: stdin, stdout and stderr handlers. :rtype: tuple """ LOG.debug('Executing: %s', cmd) return self._ssh_client.execute(cmd, quiet=quiet, background=background) @contextmanager def get_stream(self, copy): """ Get a PIPE handler with content of the backup copy streamed from the destination. :param copy: Backup copy. :type copy: BaseCopy :return: Standard output. :rtype: file """ path = "%s/%s" % (self.remote_path, copy.key) cmd = "cat %s" % path def _read_write_chunk(channel, write_fd, size=1024): while channel.recv_ready(): chunk = channel.recv(size) LOG.debug('read %d bytes', len(chunk)) if chunk: os.write(write_fd, chunk) def _write_to_pipe(read_fd, write_fd): try: os.close(read_fd) with self._ssh_client.session() as channel: LOG.debug('Executing %s', cmd) channel.exec_command(cmd) while not channel.exit_status_ready(): _read_write_chunk(channel, write_fd) LOG.debug('closing channel') _read_write_chunk(channel, write_fd) channel.recv_exit_status() except KeyboardInterrupt: return read_process = None try: read_pipe, write_pipe = os.pipe() read_process = Process(target=_write_to_pipe, args=(read_pipe, write_pipe), name='_write_to_pipe') read_process.start() os.close(write_pipe) yield read_pipe os.close(read_pipe) read_process.join() if read_process.exitcode: raise SshDestinationError('Failed to download %s' % path) LOG.debug('Successfully streamed %s', path) finally: if read_process: read_process.join() def netcat(self, command, port=9990): """ Run ``netcat`` on the destination pipe it to a given command:: ncat -l <port> --recv-only | <command> :param command: Command that would accept ``netcat``'s output. :type command: str :param port: TCP port to run ``netcat`` on. Default 9999. :type port: int """ try: return self.execute_command("ncat -l %d --recv-only | " "%s" % (port, command)) except SshDestinationError as err: LOG.error(err) def read(self, filepath): try: return self._ssh_client.get_text_content( osp.join(self.remote_path, filepath)) except IOError as err: if err.errno == ENOENT: raise FileNotFound('File %s does not exist' % filepath) else: raise def save(self, handler, filepath): """ Read from the handler and save it on the remote ssh server in a file ``filepath``. :param filepath: Relative path to a file to store the backup copy. :type filepath: str :param handler: Stream with content of the backup. :type handler: file """ remote_name = osp.join(self.remote_path, filepath) self._mkdir_r(osp.dirname(remote_name)) cmd = "cat - > %s" % remote_name with self._ssh_client.get_remote_handlers(cmd) \ as (cin, _, _): with handler as file_obj: while True: chunk = file_obj.read(1024) if chunk: cin.write(chunk) else: break def write(self, content, filepath): remote_name = osp.join(self.remote_path, filepath) self._ssh_client.write_content(remote_name, content) def _list_files(self, prefix=None, recursive=False, files_only=False): return self._ssh_client.list_files(prefix, recursive=recursive, files_only=files_only) def _mkdir_r(self, path): """ Create directory on the remote server. :param path: Remote directory. :type path: str """ cmd = 'mkdir -p "%s"' % path self.execute_command(cmd) def _move_file(self, source, destination): cmd = 'yes | cp -rf %s %s' % (source, destination) self.execute_command(cmd) def __str__(self): return "Ssh(ssh://%s@%s:%d%s)" % ( self.user, self.host, self.port, self.remote_path, )
def __init__(self, kwargs): self._ssh_client = SshClient(kwargs.pop('ssh_connection_info')) super(RemoteMySQLSource, self).__init__(**kwargs)
class RemoteMySQLSource(MySQLSource): """Remote MySQLSource class""" def __init__(self, kwargs): self._ssh_client = SshClient(kwargs.pop('ssh_connection_info')) super(RemoteMySQLSource, self).__init__(**kwargs) @contextmanager def get_stream(self): raise NotImplementedError("Method get_stream not implemented") def clone(self, dest_host, port, compress=False): """ Send backup to destination host :param dest_host: Destination host :type dest_host: str :param port: Port to sending backup :type port: int :param compress: If True compress stream :type compress: bool :raise RemoteMySQLSourceError: if any error """ retry = 1 retry_time = 2 error_log = "/tmp/{src}_{src_port}-{dst}_{dst_port}.log".format( src=self._ssh_client.ssh_connect_info.host, src_port=self._ssh_client.ssh_connect_info.port, dst=dest_host, dst_port=port ) if compress: compress_cmd = "| gzip -c - " else: compress_cmd = "" cmd = "bash -c \"sudo innobackupex --stream=xbstream ./ 2> %s" \ " %s | nc %s %d\"" \ % (error_log, compress_cmd, dest_host, port) while retry < 3: try: return self._ssh_client.execute(cmd) except SshClientException as err: LOG.warning(err) LOG.info('Will try again in after %d seconds', retry_time) time.sleep(retry_time) retry_time *= 2 retry += 1 def clone_config(self, dst): """ Clone config to destination server :param dst: Destination server :type dst: Ssh """ cfg_path = self._get_root_my_cnf() self._save_cfg(dst, cfg_path) def _save_cfg(self, dst, path): """Save configs on destination recursively""" cfg = self._get_config(path) server_id = self._get_server_id(dst.host) cfg.set('mysqld', 'server_id', value=str(server_id)) for option in cfg.options('mysqld'): val = cfg.get('mysqld', option) if '!includedir' in option: val = val.split()[1] ls_cmd = 'ls %s' % val with self._ssh_client.get_remote_handlers(ls_cmd) \ as (_, cout, _): file_list = sorted(cout.read().split()) for sub_file in file_list: self._save_cfg(dst, val + "/" + sub_file) elif '!include' in option: self._save_cfg(dst, val.split()[1]) with dst.client.get_remote_handlers("cat - > %s" % path) \ as (cin, _, _): cfg.write(cin) def _get_root_my_cnf(self): """Return root my.cnf path""" for cfg_path in MY_CNF_COMMON_PATHS: try: cmd = "cat %s" % cfg_path with self._ssh_client.get_remote_handlers(cmd): return cfg_path except SshDestinationError: continue raise OSError("Root my.cnf not found") def _get_config(self, cfg_path): """ Return parsed config :param cfg_path: Path to config :type cfg_path: str :return: Path and config :rtype: ConfigParser.ConfigParser """ cfg = ConfigParser.ConfigParser(allow_no_value=True) try: cmd = "cat %s" % cfg_path with self._ssh_client.get_remote_handlers(cmd) as (_, cout, _): cfg.readfp(cout) except ConfigParser.ParsingError as err: LOG.error(err) exit(1) return cfg def setup_slave(self, host, user, password, binlog, binlog_position): # noqa # pylint: disable=too-many-arguments """ Change master :param host: Master host name. :type host: str :param user: Replication user. :param password: Replication password :param binlog: Binlog file on the master :param binlog_position: Binlog position """ try: with self._cursor() as cursor: query = "CHANGE MASTER TO " \ "MASTER_HOST = '{master}', " \ "MASTER_USER = '******', " \ "MASTER_PASSWORD = '******', " \ "MASTER_LOG_FILE = '{binlog}', " \ "MASTER_LOG_POS = {binlog_pos}"\ .format( master=host, user=user, password=password, binlog=binlog, binlog_pos=binlog_position) cursor.execute(query) cursor.execute("START SLAVE") return True except pymysql.Error as err: LOG.debug(err) return False def apply_backup(self, datadir): """ Apply backup of destination server :param datadir: Path to datadir :return: Binlog file name and position :rtype: tuple :raise TwinDBBackupError: if binary positions is different """ try: self._ssh_client.execute( 'sudo innobackupex --apply-log %s --use-memory %d ' '> /tmp/innobackupex-apply-log.log 2>&1' % (datadir, self._mem_available() / 2) ) except OSError: self._ssh_client.execute( 'sudo innobackupex --apply-log %s ' '> /tmp/innobackupex-apply-log.log 2>&1' % datadir ) self._ssh_client.execute("sudo chown -R mysql %s" % datadir) _, stdout_, _ = self._ssh_client.execute( 'sudo cat %s/xtrabackup_binlog_pos_innodb' % datadir ) binlog_pos = stdout_.read().strip() _, stdout_, _ = self._ssh_client.execute( 'sudo cat %s/xtrabackup_binlog_info' % datadir ) binlog_info = stdout_.read().strip() if binlog_pos in binlog_info: return tuple(binlog_info.split()) raise RemoteMySQLSourceError("Invalid backup") def _mem_available(self): """ Get available memory size :return: Size of available memory in bytes :raise OSError: if can' detect memory """ # https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/commit/?id=34e431b0a _, stdout_, _ = self._ssh_client.execute( "awk -v low=$(grep low /proc/zoneinfo | " "awk '{k+=$2}END{print k}') " "'{a[$1]=$2}END{m=" "a[\"MemFree:\"]" "+a[\"Active(file):\"]" "+a[\"Inactive(file):\"]" "+a[\"SReclaimable:\"]; " "print a[\"MemAvailable:\"]}' " "/proc/meminfo" ) mem = stdout_.read().strip() if not mem: raise OSError("Cant get available mem") free_mem = int(mem) * 1024 return free_mem @staticmethod def _get_server_id(host): """Determinate server id""" try: server_id = struct.unpack("!I", socket.inet_aton(host))[0] except socket.error: server_ip = socket.gethostbyname(host) server_id = struct.unpack("!I", socket.inet_aton(server_ip))[0] return server_id
class Ssh(BaseDestination): """ SSH destination class :param remote_path: Path to store backup :param kwargs: Keyword arguments. See below :param kwargs: dict :**hostname**(str): Hostname of the host where backup is taken from. :**ssh_host**(str): Hostname for SSH connection. Default '127.0.0.1'. :**ssh_user**(str): Username for SSH connection. Default 'root'. :**ssh_port**(int): TCP port for SSH connection. Default 22. :**ssh_key**(str): File with an rsa/dsa key for SSH authentication. Default '/root/.ssh/id_rsa'. """ def __init__(self, remote_path, **kwargs): super(Ssh, self).__init__(remote_path) self._ssh_client = SshClient(host=kwargs.get('ssh_host', '127.0.0.1'), port=kwargs.get('ssh_port', 22), user=kwargs.get('ssh_user', 'root'), key=kwargs.get('ssh_key', '/root/.ssh/id_rsa')) self._hostname = kwargs.get('hostname', socket.gethostname()) def __str__(self): return "Ssh(ssh://%s@%s:%d%s)" % ( self.user, self.host, self.port, self.remote_path, ) def status_path(self, cls=MySQLStatus): """Path on the destination where status file will be stored.""" return "{remote_path}/{hostname}/{basename}".format( remote_path=self.remote_path, hostname=self._hostname, basename=cls().basename) def save(self, handler, name): """ Read from handler and save it on remote ssh server :param name: relative path to a file to store the backup copy. :param handler: stream with content of the backup. """ remote_name = osp.join(self.remote_path, name) self._mkdir_r(osp.dirname(remote_name)) cmd = "cat - > %s" % remote_name with self._ssh_client.get_remote_handlers(cmd) \ as (cin, _, _): with handler as file_obj: while True: chunk = file_obj.read(1024) if chunk: cin.write(chunk) else: break def _mkdir_r(self, path): """ Create directory on the remote server :param path: remote directory :type path: str """ cmd = 'mkdir -p "%s"' % path self.execute_command(cmd) def _list_files(self, path, recursive=False, files_only=False): return self._ssh_client.list_files(path, recursive=recursive, files_only=files_only) def delete(self, obj): """ Delete file by path :param obj: path to a remote file. """ cmd = "rm %s" % obj self.execute_command(cmd) @contextmanager def get_stream(self, copy): """ Get a PIPE handler with content of the backup copy streamed from the destination :param copy: Backup copy :type copy: BaseCopy :return: Standard output. """ path = "%s/%s" % (self.remote_path, copy.key) cmd = "cat %s" % path def _read_write_chunk(channel, write_fd, size=1024): while channel.recv_ready(): chunk = channel.recv(size) LOG.debug('read %d bytes', len(chunk)) if chunk: os.write(write_fd, chunk) def _write_to_pipe(read_fd, write_fd): try: os.close(read_fd) with self._ssh_client.session() as channel: LOG.debug('Executing %s', cmd) channel.exec_command(cmd) while not channel.exit_status_ready(): _read_write_chunk(channel, write_fd) LOG.debug('closing channel') _read_write_chunk(channel, write_fd) channel.recv_exit_status() except KeyboardInterrupt: return read_process = None try: read_pipe, write_pipe = os.pipe() read_process = Process(target=_write_to_pipe, args=(read_pipe, write_pipe), name='_write_to_pipe') read_process.start() os.close(write_pipe) yield read_pipe os.close(read_pipe) read_process.join() if read_process.exitcode: raise SshDestinationError('Failed to download %s' % path) LOG.debug('Successfully streamed %s', path) finally: if read_process: read_process.join() def _read_status(self, cls=MySQLStatus): if self._status_exists(cls=cls): cmd = "cat %s" % self.status_path(cls=cls) with self._ssh_client.get_remote_handlers(cmd) as (_, stdout, _): return cls(content=stdout.read()) else: return cls() def _write_status(self, status, cls=MySQLStatus): cmd = "cat - > %s" % self.status_path(cls=cls) with self._ssh_client.get_remote_handlers(cmd) as (cin, _, _): cin.write(status.serialize()) def _status_exists(self, cls=MySQLStatus): """ Check, if status exist :return: Exist status :rtype: bool :raise SshDestinationError: if any error. """ cmd = "bash -c 'if test -s %s; " \ "then echo exists; " \ "else echo not_exists; " \ "fi'" % self.status_path(cls=cls) status, cerr = self._ssh_client.execute(cmd) if status.strip() == 'exists': return True elif status.strip() == 'not_exists': return False else: LOG.error(cerr) msg = 'Unrecognized response: %s' % status if status: raise SshDestinationError(msg) else: raise SshDestinationError( 'Empty response from SSH destination') def execute_command(self, cmd, quiet=False, background=False): """Execute ssh command :param cmd: Command for execution :type cmd: str :param quiet: If True don't print errors :param background: Don't wait until the command exits. :type background: bool :return: Handlers of stdin, stdout and stderr :rtype: tuple """ LOG.debug('Executing: %s', cmd) return self._ssh_client.execute(cmd, quiet=quiet, background=background) @property def client(self): """Return client""" return self._ssh_client @property def host(self): """IP address of the destination.""" return self._ssh_client.host @property def port(self): """TCP port of the destination.""" return self._ssh_client.port @property def user(self): """SSH user.""" return self._ssh_client.user def netcat(self, command, port=9990): """ Run netcat on the destination pipe it to a given command. """ try: return self.execute_command("ncat -l %d --recv-only | " "%s" % (port, command)) except SshDestinationError as err: LOG.error(err) def ensure_tcp_port_listening(self, port, wait_timeout=10): """ Check that tcp port is open and ready to accept connections. Keep checking up to wait_timeout seconds. :param port: TCP port that is supposed to be listening. :type port: int :param wait_timeout: wait this many seconds until the port is ready. :type wait_timeout: int :return: True if the TCP port is listening. :rtype: bool """ stop_waiting_at = time.time() + wait_timeout while time.time() < stop_waiting_at: try: cmd = "netstat -ln | grep -w 0.0.0.0:%d 2>&1 " \ "> /dev/null" % port cout, cerr = self.execute_command(cmd) LOG.debug('stdout: %s', cout) LOG.debug('stderr: %s', cerr) return True except SshClientException as err: LOG.debug(err) time.sleep(1) return False def _get_file_content(self, path): cmd = "cat %s" % path with self._ssh_client.get_remote_handlers(cmd) as (_, stdout, _): return stdout.read() def _move_file(self, source, destination): cmd = 'yes | cp -rf %s %s' % (source, destination) self.execute_command(cmd)
class RemoteMySQLSource(MySQLSource): """Remote MySQLSource class""" def __init__(self, kwargs): ssh_kwargs = {} for arg in ['ssh_host', 'ssh_port', 'ssh_user', 'ssh_key']: if arg in kwargs: ssh_kwargs[arg.replace('ssh_', '')] = kwargs.pop(arg) self._ssh_client = SshClient(**ssh_kwargs) super(RemoteMySQLSource, self).__init__(**kwargs) @contextmanager def get_stream(self): raise NotImplementedError("Method get_stream not implemented") def clone(self, dest_host, port, compress=False): """ Send backup to destination host :param dest_host: Destination host :type dest_host: str :param port: Port to sending backup :type port: int :param compress: If True compress stream :type compress: bool :raise RemoteMySQLSourceError: if any error """ retry = 1 retry_time = 2 error_log = "/tmp/{src}_{src_port}-{dst}_{dst_port}.log".format( src=self._ssh_client.host, src_port=self._ssh_client.port, dst=dest_host, dst_port=port) if compress: compress_cmd = "| gzip -c - " else: compress_cmd = "" cmd = "bash -c \"sudo %s " \ "--stream=xbstream " \ "--host=127.0.0.1 " \ "--backup " \ "--target-dir ./ 2> %s" \ " %s | ncat %s %d --send-only\"" \ % (self._xtrabackup, error_log, compress_cmd, dest_host, port) while retry < 3: try: return self._ssh_client.execute(cmd) except SshClientException as err: LOG.warning(err) LOG.info('Will try again in after %d seconds', retry_time) time.sleep(retry_time) retry_time *= 2 retry += 1 def clone_config(self, dst): """ Clone config to destination server :param dst: Destination server :type dst: Ssh """ cfg_path = self._get_root_my_cnf() LOG.debug("Root my.cnf is: %s", cfg_path) self._save_cfg(dst, cfg_path) def _find_all_cnf(self, root_path): """ Return list of embed cnf files""" files = [root_path] cfg_content = self._ssh_client.get_text_content(root_path) for line in cfg_content.splitlines(): if '!includedir' in line: path = line.split()[1] file_list = self._ssh_client.list_files(path, recursive=False, files_only=True) for sub_file in file_list: files.extend(self._find_all_cnf(sub_file)) elif '!include' in line: files.extend(self._find_all_cnf(line.split()[1])) return files @staticmethod def _find_server_id_by_path(cfg): """Find path with server_id""" options = ["server_id", "server-id"] for option in options: try: if cfg.has_option("mysqld", option): return option except ConfigParser.Error: pass return None def _save_cfg(self, dst, root_cfg): """Save configs on destination recursively""" files = self._find_all_cnf(root_cfg) server_id = self._get_server_id(dst.host) is_server_id_set = False valid_cfg = [] for path in files: try: cfg = self._get_config(path) option = self._find_server_id_by_path(cfg) if option: cfg.set('mysqld', option, value=str(server_id)) is_server_id_set = True dst.client.write_config(path, cfg) valid_cfg.append(path) except ConfigParser.ParsingError: cfg_content = self._ssh_client.get_text_content(path) dst.client.write_content(path, cfg_content) if not is_server_id_set: for path in valid_cfg: cfg = self._get_config(path) if cfg.has_section("mysqld"): cfg.set('mysqld', "server_id", value=str(server_id)) dst.client.write_config(path, cfg) return def _get_root_my_cnf(self): """Return root my.cnf path""" for cfg_path in MY_CNF_COMMON_PATHS: try: self._ssh_client.get_text_content(cfg_path) return cfg_path except SshClientException: continue raise OSError("Root my.cnf not found") def _get_config(self, cfg_path): """ Return parsed config :param cfg_path: Path to config :type cfg_path: str :return: Path and config :rtype: ConfigParser.ConfigParser """ cfg = ConfigParser.ConfigParser(allow_no_value=True) try: cmd = "cat %s" % cfg_path with self._ssh_client.get_remote_handlers(cmd) as (_, cout, _): cfg.readfp(cout) except ConfigParser.ParsingError as err: LOG.error(err) raise return cfg def setup_slave(self, master_info): # noqa # pylint: disable=too-many-arguments """ Change master :param master_info: Master details. :type master_info: MySQLMasterInfo """ try: with self._cursor() as cursor: query = "CHANGE MASTER TO " \ "MASTER_HOST = '{master}', " \ "MASTER_USER = '******', " \ "MASTER_PORT = {port}, " \ "MASTER_PASSWORD = '******', " \ "MASTER_LOG_FILE = '{binlog}', " \ "MASTER_LOG_POS = {binlog_pos}"\ .format( master=master_info.host, user=master_info.user, password=master_info.password, binlog=master_info.binlog, binlog_pos=master_info.binlog_position, port=master_info.port ) cursor.execute(query) cursor.execute("START SLAVE") return True except pymysql.Error as err: LOG.debug(err) return False def apply_backup(self, datadir): """ Apply backup of destination server :param datadir: Path to datadir :return: Binlog file name and position :rtype: tuple :raise RemoteMySQLSourceError: if any error. """ try: use_memory = "--use-memory %d" % int(self._mem_available() / 2) except OSError: use_memory = "" logfile_path = "/tmp/xtrabackup-apply-log.log" cmd = "sudo {xtrabackup} --prepare --apply-log-only " \ "--target-dir {target_dir} {use_memory} " \ "> {logfile} 2>&1" \ "".format( xtrabackup=self._xtrabackup, target_dir=datadir, use_memory=use_memory, logfile=logfile_path ) try: self._ssh_client.execute(cmd) self._ssh_client.execute("sudo chown -R mysql %s" % datadir) return self._get_binlog_info(datadir) except SshClientException as err: LOG.debug("Logfile is:") LOG.debug(self._ssh_client.get_text_content(logfile_path)) raise RemoteMySQLSourceError(err) def _mem_available(self): """ Get available memory size :return: Size of available memory in bytes :rtype: int :raise OSError: if can' detect memory """ # https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/commit/?id=34e431b0a stdout_, _ = self._ssh_client.execute( "awk -v low=$(grep low /proc/zoneinfo | " "awk '{k+=$2}END{print k}') " "'{a[$1]=$2}END{m=" "a[\"MemFree:\"]" "+a[\"Active(file):\"]" "+a[\"Inactive(file):\"]" "+a[\"SReclaimable:\"]; " "print a[\"MemAvailable:\"]}' " "/proc/meminfo") mem = stdout_.strip() if not mem: raise OSError("Cant get available mem") free_mem = int(mem) * 1024 return free_mem @staticmethod def _get_server_id(host): """Determinate server id""" try: server_id = struct.unpack("!I", socket.inet_aton(host))[0] except socket.error: server_ip = socket.gethostbyname(host) server_id = struct.unpack("!I", socket.inet_aton(server_ip))[0] return server_id def _get_binlog_info(self, backup_path): """Get binlog coordinates from an xtrabackup_binlog_info. :param backup_path: Path where to look for xtrabackup_binlog_info. :type backup_path: str :return: Tuple with binlog coordinates - (file_name, pos) :rtype: tuple """ stdout_, _ = self._ssh_client.execute( 'sudo cat %s/xtrabackup_binlog_info' % backup_path) binlog_info = re.split(r'\t+', stdout_.rstrip()) return binlog_info[0], int(binlog_info[1])
class Ssh(BaseDestination): """ SSH destination class :param ssh_connect_info: SSH connection info :type ssh_connect_info: SshConnectInfo :param remote_path: Path to store backup :param hostname: Hostname """ def __init__(self, remote_path, ssh_connect_info=SshConnectInfo(), hostname=socket.gethostname()): super(Ssh, self).__init__(remote_path) self._ssh_client = SshClient(ssh_connect_info) self.status_path = "{remote_path}/{hostname}/status".format( remote_path=self.remote_path, hostname=hostname) self.status_tmp_path = self.status_path + ".tmp" def save(self, handler, name): """ Read from handler and save it on remote ssh server :param name: relative path to a file to store the backup copy. :param handler: stream with content of the backup. """ remote_name = self.remote_path + '/' + name try: self._mkdirname_r(remote_name) except SshClientException as err: LOG.error('Failed to create directory for %s: %s', remote_name, err) return False try: cmd = "cat - > %s" % remote_name with self._ssh_client.get_remote_handlers(cmd) \ as (cin, _, _): with handler as file_obj: while True: chunk = file_obj.read(1024) if chunk: cin.write(chunk) else: break return True except SshClientException: return False def _mkdir_r(self, path): """ Create directory on the remote server :param path: remote directory :type path: str """ cmd = 'mkdir -p "%s"' % path self.execute_command(cmd) def list_files(self, prefix, recursive=False): """ Get list of file by prefix :param prefix: Path :param recursive: Recursive return list of files :type prefix: str :type recursive: bool :return: List of files :rtype: list """ return sorted(self._ssh_client.list_files(prefix, recursive)) def find_files(self, prefix, run_type): """ Find files by prefix :param prefix: Path :param run_type: Run type for search :type prefix: str :type run_type: str :return: List of files :rtype: list """ cmd = "find {prefix}/ -wholename '*/{run_type}/*' -type f".format( prefix=prefix, run_type=run_type) cout, _ = self._ssh_client.execute(cmd) return sorted(cout.split()) def delete(self, obj): """ Delete file by path :param obj: path to a remote file. """ cmd = "rm %s" % obj self.execute_command(cmd) @contextmanager def get_stream(self, path): """ Get a PIPE handler with content of the backup copy streamed from the destination :param path: Path to file :type path: str :return: Standard output. """ cmd = "cat %s" % path def _read_write_chunk(channel, write_fd, size=1024): while channel.recv_ready(): chunk = channel.recv(size) LOG.debug('read %d bytes', len(chunk)) if chunk: os.write(write_fd, chunk) def _write_to_pipe(read_fd, write_fd): try: os.close(read_fd) with self._ssh_client.session() as channel: LOG.debug('Executing %s', cmd) channel.exec_command(cmd) while not channel.exit_status_ready(): _read_write_chunk(channel, write_fd) LOG.debug('closing channel') _read_write_chunk(channel, write_fd) channel.recv_exit_status() except KeyboardInterrupt: return read_process = None try: read_pipe, write_pipe = os.pipe() read_process = Process(target=_write_to_pipe, args=(read_pipe, write_pipe), name='_write_to_pipe') read_process.start() os.close(write_pipe) yield read_pipe os.close(read_pipe) read_process.join() if read_process.exitcode: raise SshDestinationError('Failed to download %s' % path) LOG.debug('Successfully streamed %s', path) finally: if read_process: read_process.join() def _read_status(self): if self._status_exists(): cmd = "cat %s" % self.status_path with self._ssh_client.get_remote_handlers(cmd) as (_, stdout, _): return MySQLStatus(content=stdout.read()) else: return MySQLStatus() def _write_status(self, status): cmd = "cat - > %s" % self.status_path with self._ssh_client.get_remote_handlers(cmd) as (cin, _, _): cin.write(status.serialize()) def _status_exists(self): """ Check, if status exist :return: Exist status :rtype: bool :raise SshDestinationError: if any error. """ cmd = "bash -c 'if test -s %s; " \ "then echo exists; " \ "else echo not_exists; " \ "fi'" % self.status_path status, cerr = self._ssh_client.execute(cmd) if status.strip() == 'exists': return True elif status.strip() == 'not_exists': return False else: LOG.error(cerr) msg = 'Unrecognized response: %s' % status if status: raise SshDestinationError(msg) else: raise SshDestinationError('Empty response from ' 'SSH destination') def execute_command(self, cmd, quiet=False, background=False): """Execute ssh command :param cmd: Command for execution :type cmd: str :param quiet: If True don't print errors :param background: Don't wait until the command exits. :type background: bool :return: Handlers of stdin, stdout and stderr :rtype: tuple """ LOG.debug('Executing: %s', cmd) return self._ssh_client.execute(cmd, quiet=quiet, background=background) @property def client(self): """Return client""" return self._ssh_client @property def host(self): """IP address of the destination.""" return self._ssh_client.ssh_connect_info.host def _mkdirname_r(self, remote_name): """Create directory for a given file on the destination. For example, for a given file '/foo/bar/xyz' it would create directory '/foo/bar/'. :param remote_name: Full path to a file :type remote_name: str """ return self._mkdir_r(os.path.dirname(remote_name)) def netcat(self, command, port=9990): """ Run netcat on the destination pipe it to a given command. """ try: return self.execute_command("ncat -l %d --recv-only | " "%s" % (port, command)) except SshDestinationError as err: LOG.error(err) def ensure_tcp_port_listening(self, port, wait_timeout=10): """ Check that tcp port is open and ready to accept connections. Keep checking up to wait_timeout seconds. :param port: TCP port that is supposed to be listening. :type port: int :param wait_timeout: wait this many seconds until the port is ready. :type wait_timeout: int :return: True if the TCP port is listening. :rtype: bool """ stop_waiting_at = time.time() + wait_timeout while time.time() < stop_waiting_at: try: cmd = "netstat -ln | grep -w 0.0.0.0:%d 2>&1 " \ "> /dev/null" % port cout, cerr = self.execute_command(cmd) LOG.debug('stdout: %s', cout) LOG.debug('stderr: %s', cerr) return True except SshClientException as err: LOG.debug(err) time.sleep(1) return False def _get_file_content(self, path): cmd = "cat %s" % path with self._ssh_client.get_remote_handlers(cmd) as (_, stdout, _): return stdout.read() def _move_file(self, source, destination): cmd = 'yes | cp -rf %s %s' % (source, destination) self.execute_command(cmd)