def test__get_connection_raises_mysql_source_error(mock_connect): mock_connect.side_effect = OperationalError source = MySQLSource(MySQLConnectInfo(None, hostname=None), 'daily', 'full') with pytest.raises(MySQLSourceError): with source.get_connection(): pass
def test_last_full_lsn(full_backup, run_type, status, tmpdir): mock_dst = mock.Mock() mock_dst.status.return_value = status src = MySQLSource(MySQLConnectInfo('/foo/bar'), run_type, full_backup, mock_dst) assert src.parent_lsn == 19629412
def test_mysql_raises_on_wrong_run_type(run_type): with pytest.raises(MySQLSourceError): MySQLSource( MySQLConnectInfo('/foo/bar'), 'foo', 'full', dst=mock.Mock() )
def test_full_copy_exists(run_type, full_backup, status, expected): mock_dst = mock.Mock() mock_dst.status.return_value = status src = MySQLSource(MySQLConnectInfo('/foo/bar'), run_type, full_backup, mock_dst) assert src._full_copy_exists() == expected
def test_mysql_source_has_methods(): src = MySQLSource(MySQLConnectInfo('/foo/bar'), 'hourly', 'full', dst=mock.Mock()) assert src._connect_info.defaults_file == '/foo/bar' assert src.run_type == 'hourly' assert src.suffix == 'xbstream' assert src._media_type == 'mysql'
def test_get_backup_type(full_backup, run_type, backup_type, status, tmpdir): mock_dst = mock.Mock() mock_dst.status.return_value = status src = MySQLSource(MySQLConnectInfo('/foo/bar'), run_type, full_backup, mock_dst) assert src._get_backup_type() == backup_type
def test_get_name(mock_time, mock_socket): src = MySQLSource(MySQLConnectInfo('/foo/bar'), 'daily', 'hourly', mock.Mock()) host = 'some-host' mock_socket.gethostname.return_value = host timestamp = '2017-02-13_15_40_29' mock_time.strftime.return_value = timestamp assert src.get_name( ) == "some-host/daily/mysql/mysql-2017-02-13_15_40_29.xbstream"
def test__save_cfg(mock_all_cnf, mock_get_config, mock_get_content, tmpdir, root, files, writing_config_count, writing_text_count): root_file = "%s/%s" % (str(tmpdir), root) # Prepare steps (writing config files with contents) for key in files.keys(): path = key.split('/') if len(path) > 1: try: tmpdir.mkdir('', path[0]) except Exception: pass cfg_file = tmpdir.join(key) if '!includedir' in files[key]: files[key] = files[key].format(tmp_dir=str(tmpdir)) cfg_file.write(files[key]) new_key = "%s/%s" % (str(tmpdir), key) files[new_key] = files.pop(key) mock_all_cnf.return_value = files.keys() def get_config(value): cfg = ConfigParser.ConfigParser(allow_no_value=True) cfg.readfp(StringIO.StringIO(files[value])) return cfg def get_content(value): return files[value] mock_get_config.side_effect = get_config rmt_sql = RemoteMySQLSource({ "run_type": INTERVALS[0], "backup_type": 'full', "mysql_connect_info": MySQLConnectInfo("/"), "ssh_connection_info": None }) mock_cin = mock.MagicMock() mock_cin.channel.recv_exit_status.return_value = 0 dst = mock.MagicMock() dst.host = "localhost" mock_get_content.side_effect = get_content rmt_sql._save_cfg(dst, root_file) mock_all_cnf.assert_called_once_with(root_file) assert dst.client.write_config.call_count == writing_config_count assert mock_get_content.call_count == writing_text_count
def test__get_binlog_info_parses_file(): mock_client = mock.Mock() mock_client.execute.return_value = ("mysql-bin.000002\t1054", None) rmt_sql = RemoteMySQLSource({ "run_type": INTERVALS[0], "backup_type": "full", "mysql_connect_info": MySQLConnectInfo("/"), "ssh_connection_info": None, }) rmt_sql._ssh_client = mock_client assert rmt_sql._get_binlog_info("foo") == ("mysql-bin.000002", 1054)
def test__is_galera_returns_true_on_str_higher_wsrep_on(mock_connect): logging.basicConfig() mock_cursor = mock.MagicMock() mock_cursor.fetchone.return_value = {'wsrep_on': 'ON'} mock_connect.return_value.__enter__.return_value. \ cursor.return_value.__enter__.return_value = mock_cursor source = MySQLSource(MySQLConnectInfo(None), 'daily', 'daily', None) assert source.is_galera() is True
def test__is_galera_returns_false_on_int_wsrep_on(mock_connect): logging.basicConfig() mock_cursor = mock.MagicMock() mock_cursor.fetchone.return_value = {'wsrep_on': 0} mock_connect.return_value.__enter__.return_value. \ cursor.return_value.__enter__.return_value = mock_cursor source = MySQLSource(MySQLConnectInfo(None), 'daily', 'full') assert source.is_galera() is False
def test__enable_wsrep_desync_sets_wsrep_desync_to_on(mock_connect): logging.basicConfig() mock_cursor = mock.MagicMock() mock_connect.return_value.__enter__.return_value. \ cursor.return_value.__enter__.return_value = mock_cursor source = MySQLSource(MySQLConnectInfo(None), 'daily', 'full') source.enable_wsrep_desync() mock_cursor.execute.assert_called_with("SET GLOBAL wsrep_desync=ON")
def test__clone_config(mock_get_root, mock_save): mock_get_root.return_value = "/etc/my.cnf" dst = Ssh() rmt_sql = RemoteMySQLSource({ "run_type": INTERVALS[0], "full_backup": INTERVALS[0], "mysql_connect_info": MySQLConnectInfo("/"), "ssh_connection_info": None }) rmt_sql.clone_config(dst) mock_get_root.assert_called_with() mock_save.assert_called_with(dst, "/etc/my.cnf")
def test___mem_available(): mock_client = mock.Mock() mock_client.execute.return_value = ("100500", None) rmt_sql = RemoteMySQLSource({ "run_type": INTERVALS[0], "backup_type": "full", "mysql_connect_info": MySQLConnectInfo("/"), "ssh_connection_info": None, }) rmt_sql._ssh_client = mock_client assert rmt_sql._mem_available() == 100500 * 1024
def test__is_galera_returns_true_on_galera_node(mock_connect): logging.basicConfig() mock_cursor = mock.MagicMock() mock_cursor.execute.side_effect = InternalError( 1193, "Unknown system variable " "'wsrep_on'") mock_connect.return_value.__enter__.return_value. \ cursor.return_value.__enter__.return_value = mock_cursor source = MySQLSource(MySQLConnectInfo(None), 'daily', 'full') assert source.is_galera() is False
def test__clone(dest, port): arg = 'bash -c "sudo innobackupex --stream=xbstream ./ | gzip -c - | nc %s %d"' % ( dest, port) mock_client = mock.Mock() rmt_sql = RemoteMySQLSource({ "run_type": INTERVALS[0], "full_backup": INTERVALS[0], "mysql_connect_info": MySQLConnectInfo("/"), "ssh_connection_info": None }) rmt_sql._ssh_client = mock_client rmt_sql.clone(dest, port) mock_client.execute.assert_called_with(arg)
def test__mem_available_raise_exception(): mock_client = mock.Mock() mock_client.execute.return_value = ("", None) rmt_sql = RemoteMySQLSource({ "run_type": INTERVALS[0], "backup_type": "full", "mysql_connect_info": MySQLConnectInfo("/"), "ssh_connection_info": None, }) rmt_sql._ssh_client = mock_client with pytest.raises(OSError): rmt_sql._mem_available()
def test__wsrep_provider_version_returns_correct_version(mock_connect): logging.basicConfig() mock_cursor = mock.MagicMock() mock_cursor.fetchall.return_value = [ { 'Variable_name': 'wsrep_provider_version', 'Value': '3.19(rb98f92f)' }, ] mock_connect.return_value.__enter__.return_value. \ cursor.return_value.__enter__.return_value = mock_cursor source = MySQLSource(MySQLConnectInfo(None), 'daily', 'full') assert source.wsrep_provider_version == '3.19'
def test_apply_retention_policy(mock_get_files_to_delete, mock_delete_local_files, mock_get_prefix, tmpdir): mock_get_files_to_delete.return_value = [] mock_get_prefix.return_value = 'master.box/hourly' my_cnf = tmpdir.join('my.cnf') mock_config = mock.Mock() src = MySQLSource(MySQLConnectInfo(str(my_cnf)), 'hourly', 'daily', mock.Mock()) mock_dst = mock.Mock() mock_dst.remote_path = '/foo/bar' src.apply_retention_policy(mock_dst, mock_config, 'hourly', mock.Mock()) mock_delete_local_files.assert_called_once_with('mysql', mock_config) mock_dst.list_files.assert_called_once_with( '/foo/bar/master.box/hourly/mysql/mysql-')
def test__disable_wsrep_desync_sets_wsrep_desync_to_off(mock_connect): logging.basicConfig() mock_cursor = mock.MagicMock() mock_cursor.fetchall.return_value = [ {'Variable_name': 'wsrep_local_recv_queue', 'Value': '0'}, ] mock_connect.return_value.__enter__.return_value. \ cursor.return_value.__enter__.return_value = mock_cursor source = MySQLSource(MySQLConnectInfo(None), 'daily', 'full') source.disable_wsrep_desync() mock_cursor.execute.assert_any_call("SHOW GLOBAL STATUS LIKE " "'wsrep_local_recv_queue'") mock_cursor.execute.assert_called_with("SET GLOBAL wsrep_desync=OFF")
def backup_mysql(run_type, config): """Take backup of local MySQL instance :param run_type: Run type :type run_type: str :param config: Tool configuration :type config: ConfigParser.ConfigParser :return: None """ try: if not config.getboolean('source', 'backup_mysql'): raise TwinDBBackupError('MySQL backups are not enabled in config') except (ConfigParser.NoOptionError, TwinDBBackupError) as err: LOG.debug(err) LOG.debug('Not backing up MySQL') return dst = get_destination(config) try: full_backup = config.get('mysql', 'full_backup') except ConfigParser.NoOptionError: full_backup = 'daily' backup_start = time.time() src = MySQLSource( MySQLConnectInfo(config.get('mysql', 'mysql_defaults_file')), run_type, full_backup, dst) callbacks = [] src_name = _backup_stream(config, src, dst, callbacks) status = prepare_status(dst, src, run_type, src_name, backup_start) status = src.apply_retention_policy(dst, config, run_type, status) backup_duration = \ status[run_type][src_name]['backup_finished'] - \ status[run_type][src_name]['backup_started'] export_info(config, data=backup_duration, category=ExportCategory.mysql, measure_type=ExportMeasureType.backup) dst.status(status) LOG.debug('Callbacks are %r', callbacks) for callback in callbacks: callback[0].callback(**callback[1])
def test_apply_retention_policy_remove(mock_get_files_to_delete, mock_delete_local_files, tmpdir): mock_get_files_to_delete.return_value = ['key-foo'] my_cnf = tmpdir.join('my.cnf') mock_config = mock.Mock() src = MySQLSource(MySQLConnectInfo(str(my_cnf)), 'hourly', 'full', dst=mock.Mock()) mock_dst = mock.Mock() mock_dst.remote_path = '/foo/bar' mock_dst.basename.return_value = 'key-foo' mock_status = mock.Mock() # noinspection PyTypeChecker src.apply_retention_policy(mock_dst, mock_config, 'hourly', mock_status) mock_status.remove.assert_called_once_with('key-foo')
def test___find_all_cnf(mock_get_text_content, mock_list, tmpdir, root, files): root_file = "%s/%s" % (str(tmpdir), root) # Prepare steps (writing config files with contents) for key in files.keys(): path = key.split('/') if len(path) > 1: try: tmpdir.mkdir('', path[0]) except Exception: pass cfg_file = tmpdir.join(key) if '!includedir' in files[key]: files[key] = files[key].format(tmp_dir=str(tmpdir)) cfg_file.write(files[key]) files["%s/%s" % (str(tmpdir), key)] = files.pop(key) # Callback for return ConfiParser from local content def get_text_content(value): return files[value] def get_list(value): return os.listdir(value) mock_get_text_content.side_effect = get_text_content mock_list.side_effect = get_list rmt_sql = RemoteMySQLSource({ "run_type": INTERVALS[0], "backup_type": 'full', "mysql_connect_info": MySQLConnectInfo("/"), "ssh_connection_info": None }) assert sorted(rmt_sql._find_all_cnf(root_file)) == sorted(files.keys())
def test___find_all_cnf(mock_get_text_content, mock_list, tmpdir, mycnfs, expected_result_template): mycnf_root = Path(tmpdir) # Prepare steps (writing config files with content) for key in mycnfs.keys(): mycnf_root.joinpath(key).parent.mkdir(exist_ok=True) with open(str(mycnf_root.joinpath(key)), "w") as fp: fp.write(mycnfs[key]) # mock helper functions def get_text_content(full_path): LOG.debug("Getting content of %s", full_path) # cut mysql_root prefix from the full path and lookup for content in the mycnfs dictionary. return mycnfs["/".join( PurePath(full_path).parts[len(mycnf_root.parts):])] def get_list(path, recursive=False, files_only=True): return os.listdir(path) mock_get_text_content.side_effect = get_text_content mock_list.side_effect = get_list # rmt_sql = RemoteMySQLSource({ "run_type": INTERVALS[0], "backup_type": 'full', "mysql_connect_info": MySQLConnectInfo("/"), "ssh_connection_info": None }) expected_result = sorted( [osp.join(str(mycnf_root), item) for item in expected_result_template]) actual_result = sorted(rmt_sql._find_all_cnf( mycnf_root.joinpath("my.cnf"))) assert (actual_result == expected_result), LOG.error( "Expected: %s\nActual: %s" % (pformat(expected_result), pformat(actual_result)))
def clone_mysql( cfg, source, destination, # pylint: disable=too-many-arguments replication_user, replication_password, netcat_port=9990, compress=False): """Clone mysql backup of remote machine and stream it to slave""" try: LOG.debug('Remote MySQL Source: %s', split_host_port(source)[0]) LOG.debug('MySQL defaults: %s', cfg.get('mysql', 'mysql_defaults_file')) LOG.debug('SSH username: %s', cfg.get('ssh', 'ssh_user')) LOG.debug('SSH key: %s', cfg.get('ssh', 'ssh_key')) src = RemoteMySQLSource({ "ssh_host": split_host_port(source)[0], "ssh_user": cfg.get('ssh', 'ssh_user'), "ssh_key": cfg.get('ssh', 'ssh_key'), "mysql_connect_info": MySQLConnectInfo(cfg.get('mysql', 'mysql_defaults_file'), hostname=split_host_port(source)[0]), "run_type": INTERVALS[0], "backup_type": 'full' }) xbstream_binary = cfg.get('mysql', 'xbstream_binary') LOG.debug('SSH destination: %s', split_host_port(destination)[0]) LOG.debug('SSH username: %s', cfg.get('ssh', 'ssh_user')) LOG.debug('SSH key: %s', cfg.get('ssh', 'ssh_key')) dst = Ssh('/tmp', ssh_host=split_host_port(destination)[0], ssh_user=cfg.get('ssh', 'ssh_user'), ssh_key=cfg.get('ssh', 'ssh_key')) datadir = src.datadir LOG.debug('datadir: %s', datadir) if dst.list_files(datadir): LOG.error("Destination datadir is not empty: %s", datadir) exit(1) _run_remote_netcat(compress, datadir, destination, dst, netcat_port, src, xbstream_binary) LOG.debug('Copying MySQL config to the destination') src.clone_config(dst) LOG.debug('Remote MySQL destination: %s', split_host_port(destination)[0]) LOG.debug('MySQL defaults: %s', cfg.get('mysql', 'mysql_defaults_file')) LOG.debug('SSH username: %s', cfg.get('ssh', 'ssh_user')) LOG.debug('SSH key: %s', cfg.get('ssh', 'ssh_key')) dst_mysql = RemoteMySQLSource({ "ssh_host": split_host_port(destination)[0], "ssh_user": cfg.get('ssh', 'ssh_user'), "ssh_key": cfg.get('ssh', 'ssh_key'), "mysql_connect_info": MySQLConnectInfo(cfg.get('mysql', 'mysql_defaults_file'), hostname=split_host_port(destination)[0]), "run_type": INTERVALS[0], "backup_type": 'full' }) binlog, position = dst_mysql.apply_backup(datadir) LOG.debug('Binlog coordinates: (%s, %d)', binlog, position) try: LOG.debug('Starting MySQL on the destination') _mysql_service(dst, action='start') LOG.debug('MySQL started') except TwinDBBackupError as err: LOG.error(err) exit(1) LOG.debug('Setting up replication.') LOG.debug('Master host: %s', source) LOG.debug('Replication user: %s', replication_user) LOG.debug('Replication password: %s', replication_password) dst_mysql.setup_slave( MySQLMasterInfo(host=split_host_port(source)[0], port=split_host_port(source)[1], user=replication_user, password=replication_password, binlog=binlog, binlog_pos=position)) except (ConfigParser.NoOptionError, OperationalError) as err: LOG.error(err) exit(1)
def test_clone(runner, master1, slave, docker_client, config_content_clone): twindb_config_dir = get_twindb_config_dir(docker_client, runner['Id']) twindb_config_host = "%s/twindb-backup-1.cfg" % twindb_config_dir twindb_config_guest = '/etc/twindb/twindb-backup-1.cfg' my_cnf_path = "%s/my.cnf" % twindb_config_dir private_key_host = "%s/private_key" % twindb_config_dir private_key_guest = "/etc/twindb/private_key" contents = """ [client] user=dba password=qwerty """ with open(my_cnf_path, "w") as my_cnf: my_cnf.write(contents) private_key = """-----BEGIN RSA PRIVATE KEY----- MIIEoAIBAAKCAQEAyXxAjPShNGAedbaEtltFI6A7RlsyI+4evxTq6uQrgbJ6Hm+p HBXshXQYXDyVjvytaM+6GKF+r+6+C+6Wc5Xz4lLO/ZiSCdPbyEgqw1JoHrgPNpc6 wmCtjJExxjzvpwSVgbZg3xOdqW1y+TyqeUkXEg/Lm4VZhN1Q/KyGCgBlWuAXoOYR GhaNWqcnr/Wn5YzVHAx2yJNrurtKLVYVMIkGcN/6OUaPpWqKZLaXiK/28PSZ5GdT DmxRg4W0pdyGEYQndpPlpLF4w5gNUEhVZM8hWVE29+DIW3XXVYGYchxmkhU7wrGx xZR+k5AT+7g8VspVS8zNMXM9Z27w55EQuluNMQIBIwKCAQAzz35QIaXLo7APo/Y9 hS8JKTPQQ1YJPTsbMUO4vlRUjPrUoF6vc1oTsCOFbqoddCyXS1u9MNdvEYFThn51 flSn6WhtGJqU0BPxrChA2q0PNqTThfkqqyVQCBQdCFrhzfqPEaPhl1RtZUlzSh01 IWxVGgEn/bfu9xTTQk5aV9+MZQ2XKe4BGzpOZMI/B7ivRCcthEwMTx92opr52bre 4t7DahVLN/2Wu4lxajDzCaKXpjMuL76lFov0mZZN7S8whH5xSx1tpapHqsCAwfLL k49lDdR8aN6oqoeK0e9w//McIaKxN2FVxD4bcuXiQTjihx+QwQOLmlHSRDKhTsYg 4Q5bAoGBAOgVZM2eqC8hNl5UH//uuxOeBKqwz7L/FtGemNr9m0XG8N9yE/K7A5iX 6EDvDyVI51IlIXdxfK8re5yxfbJ4YevenwdEZZ2O8YRrVByJ53PV9CcVeWL4p6f/ I56sYyDfXcnDTEOVYY0mCfYUfUcSb1ExpuIU4RvuQJg6tvbdxD9FAoGBAN4/pVCT krRd6PJmt6Dbc2IF6N09OrAnLB3fivGztF5cp+RpyqZK4ve+akLoe1laTg7vNtnF l/PZtM9v/VT45hb70MFEHO+sKvGa5Yimxkb6YCriJOcLxTysSgFHKz7v+8BqqoHi qY4fORGwPVDv28I8jKRvcuNHendV/Rdcuk79AoGAd1t1q5NscAJzu3u4r4IXEWc1 mZzClpHROJq1AujTgviZInUu1JqxZGthgHrx2KkmggR3nIOB86/2bdefut7TRhq4 L5+Et24VzxKgSTD6sJnrR0zfV3iQvMxbdizFRBsaSoGyMWLEdHn2fo4xzMem9o6Q VwNsdMOsMQhA1rsxuiMCgYBr8wcnIxte68jqxC1OIXKOsmnKi3RG7nSDidXF2vE1 JbCiJMGD+Hzeu5KyyLDw4rgzI7uOWKjkJ+obnMuBCy3t6AZPPlcylXPxsaKwFn2Q MHfaUJWUyzPqRQ4AnukekdINAJv18cAR1Kaw0fHle9Ej1ERP3lxfw6HiMRSHsLJD nwKBgCIXVhXCDaXOOn8M4ky6k27bnGJrTkrRjHaq4qWiQhzizOBTb+7MjCrJIV28 8knW8+YtEOfl5R053SKQgVsmRjjDfvCirGgqC4kSAN4A6MD+GNVXZVUUjAUBVUbU 8Wt4BxW6kFA7+Su7n8o4DxCqhZYmK9ZUhNjE+uUhxJCJaGr4 -----END RSA PRIVATE KEY----- """ with open(private_key_host, "w") as key_fd: key_fd.write(private_key) with open(twindb_config_host, 'w') as fp: content = config_content_clone.format( PRIVATE_KEY=private_key_guest, MY_CNF='/etc/twindb/my.cnf' ) fp.write(content) cmd = '/usr/sbin/sshd' # Run SSH daemon on master1_1 ret, cout = docker_execute(docker_client, master1['Id'], cmd) print(cout) cmd = ['twindb-backup', '--debug', '--config', twindb_config_guest, 'clone', 'mysql', "%s:3306" % master1['ip'], "%s:3306" % slave['ip'] ] ret, cout = docker_execute(docker_client, runner['Id'], cmd) print(cout) assert ret == 0 sql_master_2 = RemoteMySQLSource({ "ssh_host": slave['ip'], "ssh_user": '******', "ssh_key": private_key_guest, "mysql_connect_info": MySQLConnectInfo( my_cnf_path, hostname=slave['ip'] ), "run_type": INTERVALS[0], "backup_type": 'full' }) timeout = time.time() + 30 while time.time() < timeout: with sql_master_2.get_connection() as conn: with conn.cursor() as cursor: cursor.execute('SHOW SLAVE STATUS') row = cursor.fetchone() if row['Slave_IO_Running'] == 'Yes' and row['Slave_SQL_Running'] == 'Yes': LOG.info('Relication is up and running') return LOG.error('Replication is not running after 30 seconds timeout') assert False
def test_suffix(): fs = MySQLSource(MySQLConnectInfo('/foo/bar'), INTERVALS[0], 'full') assert fs.suffix == 'xbstream' fs.suffix += '.gz' assert fs.suffix == 'xbstream.gz'
def backup_mysql(run_type, config): """Take backup of local MySQL instance :param run_type: Run type :type run_type: str :param config: Tool configuration :type config: TwinDBBackupConfig """ if config.backup_mysql is False: LOG.debug("Not backing up MySQL") return dst = config.destination() try: full_backup = config.mysql.full_backup except configparser.NoOptionError: full_backup = "daily" backup_start = time.time() status = MySQLStatus(dst=dst) kwargs = { "backup_type": status.next_backup_type(full_backup, run_type), "dst": dst, "xtrabackup_binary": config.mysql.xtrabackup_binary, } parent = status.candidate_parent(run_type) if kwargs["backup_type"] == "incremental": kwargs["parent_lsn"] = parent.lsn LOG.debug("Creating source %r", kwargs) src = MySQLSource(MySQLConnectInfo(config.mysql.defaults_file), run_type, **kwargs) callbacks = [] try: _backup_stream(config, src, dst, callbacks=callbacks) except (DestinationError, SourceError, SshClientException) as err: raise OperationError(err) LOG.debug("Backup copy name: %s", src.get_name()) kwargs = { "type": src.type, "binlog": src.binlog_coordinate[0], "position": src.binlog_coordinate[1], "lsn": src.lsn, "backup_started": backup_start, "backup_finished": time.time(), "config_files": my_cnfs(MY_CNF_COMMON_PATHS), } if src.incremental: kwargs["parent"] = parent.key backup_copy = MySQLCopy(src.host, run_type, src.basename, **kwargs) status.add(backup_copy) status = src.apply_retention_policy(dst, config, run_type, status) LOG.debug("status after apply_retention_policy():\n%s", status) backup_duration = backup_copy.duration export_info( config, data=backup_duration, category=ExportCategory.mysql, measure_type=ExportMeasureType.backup, ) status.save(dst) LOG.debug("Callbacks are %r", callbacks) for callback in callbacks: callback[0].callback(**callback[1])
def clone_mysql( cfg, source, destination, # pylint: disable=too-many-arguments replication_user, replication_password, netcat_port=9990): """Clone mysql backup of remote machine and stream it to slave""" try: LOG.debug('Remote MySQL Source: %s', split_host_port(source)[0]) LOG.debug('MySQL defaults: %s', cfg.get('mysql', 'mysql_defaults_file')) LOG.debug('SSH username: %s', cfg.get('ssh', 'ssh_user')) LOG.debug('SSH key: %s', cfg.get('ssh', 'ssh_key')) src = RemoteMySQLSource({ "ssh_connection_info": SshConnectInfo(host=split_host_port(source)[0], user=cfg.get('ssh', 'ssh_user'), key=cfg.get('ssh', 'ssh_key')), "mysql_connect_info": MySQLConnectInfo(cfg.get('mysql', 'mysql_defaults_file'), hostname=split_host_port(source)[0]), "run_type": INTERVALS[0], "full_backup": INTERVALS[0], }) LOG.debug('SSH destination: %s', split_host_port(destination)[0]) LOG.debug('SSH username: %s', cfg.get('ssh', 'ssh_user')) LOG.debug('SSH key: %s', cfg.get('ssh', 'ssh_key')) dst = Ssh(ssh_connect_info=SshConnectInfo( host=split_host_port(destination)[0], user=cfg.get('ssh', 'ssh_user'), key=cfg.get('ssh', 'ssh_key')), ) datadir = src.datadir LOG.debug('datadir: %s', datadir) if dst.list_files(datadir): LOG.error("Destination datadir is not empty: %s", datadir) exit(1) try: LOG.debug('Stopping MySQL on the destination') _mysql_service(dst, action='stop') except TwinDBBackupError as err: LOG.error(err) exit(1) proc_netcat = Process( target=dst.netcat, args=("gunzip -c - | xbstream -x -C {datadir}".format( datadir=datadir), ), kwargs={'port': netcat_port}) proc_netcat.start() LOG.debug('Starting netcat on the destination') src.clone(dest_host=split_host_port(destination)[0], port=netcat_port) proc_netcat.join() LOG.debug('Copying MySQL config to the destination') src.clone_config(dst) LOG.debug('Remote MySQL destination: %s', split_host_port(destination)[0]) LOG.debug('MySQL defaults: %s', cfg.get('mysql', 'mysql_defaults_file')) LOG.debug('SSH username: %s', cfg.get('ssh', 'ssh_user')) LOG.debug('SSH key: %s', cfg.get('ssh', 'ssh_key')) dst_mysql = RemoteMySQLSource({ "ssh_connection_info": SshConnectInfo(host=split_host_port(destination)[0], user=cfg.get('ssh', 'ssh_user'), key=cfg.get('ssh', 'ssh_key')), "mysql_connect_info": MySQLConnectInfo(cfg.get('mysql', 'mysql_defaults_file'), hostname=split_host_port(destination)[0]), "run_type": INTERVALS[0], "full_backup": INTERVALS[0], }) binlog, position = dst_mysql.apply_backup(datadir) LOG.debug('Binlog coordinates: (%s, %d)', binlog, position) try: LOG.debug('Starting MySQL on the destination') _mysql_service(dst, action='start') except TwinDBBackupError as err: LOG.error(err) exit(1) LOG.debug('Setting up replication.') LOG.debug('Master host: %s', source) LOG.debug('Replication user: %s', replication_user) LOG.debug('Replication password: %s', replication_password) dst_mysql.setup_slave(source, replication_user, replication_password, binlog, position) except (ConfigParser.NoOptionError, OperationalError) as err: LOG.error(err) exit(1)
def backup_mysql(run_type, config): """Take backup of local MySQL instance :param run_type: Run type :type run_type: str :param config: Tool configuration :type config: ConfigParser.ConfigParser :return: None """ try: if not config.getboolean('source', 'backup_mysql'): raise TwinDBBackupError('MySQL backups are not enabled in config') except (ConfigParser.NoOptionError, TwinDBBackupError) as err: LOG.debug(err) LOG.debug('Not backing up MySQL') return dst = get_destination(config) try: full_backup = config.get('mysql', 'full_backup') except ConfigParser.NoOptionError: full_backup = 'daily' backup_start = time.time() src = MySQLSource( MySQLConnectInfo(config.get('mysql', 'mysql_defaults_file')), run_type, full_backup, dst) callbacks = [] stream = src.get_stream() src_name = src.get_name() # Gzip modifier stream = Gzip(stream).get_stream() src_name += '.gz' # KeepLocal modifier try: keep_local_path = config.get('destination', 'keep_local_path') kl_modifier = KeepLocal(stream, os.path.join(keep_local_path, src_name)) stream = kl_modifier.get_stream() callbacks.append((kl_modifier, { 'keep_local_path': keep_local_path, 'dst': dst })) except ConfigParser.NoOptionError: LOG.debug('keep_local_path is not present in the config file') # GPG modifier try: stream = Gpg(stream, config.get('gpg', 'recipient'), config.get('gpg', 'keyring')).get_stream() src_name += '.gpg' except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): pass except ModifierException as err: LOG.warning(err) LOG.warning('Will skip encryption') if not dst.save(stream, src_name): LOG.error('Failed to save backup copy %s', src_name) exit(1) status = prepare_status(dst, src, run_type, src_name, backup_start) src.apply_retention_policy(dst, config, run_type, status) dst.status(status) LOG.debug('Callbacks are %r', callbacks) for callback in callbacks: callback[0].callback(**callback[1])