Example #1
0
def test_serialize_is_valid(status_raw_content):
    status = MySQLStatus(content=status_raw_content)
    serialized_status = status.serialize()
    dict_from_status = json.loads(serialized_status)
    assert "status" in dict_from_status
    assert "version" in dict_from_status
    assert "md5" in dict_from_status
Example #2
0
def test_add(status_raw_empty, tmpdir):
    status = MySQLStatus(status_raw_empty)
    assert status.valid
    mycnf_1 = tmpdir.join('my-1.cnf')
    mycnf_1.write('some_content_1')
    mycnf_2 = tmpdir.join('my-2.cnf')
    mycnf_2.write('some_content_2')

    backup_copy = MySQLCopy('master1',
                            'daily',
                            'foo.txt',
                            binlog='binlog1',
                            position=101,
                            type='full',
                            lsn=1230,
                            backup_started=123,
                            backup_finished=456,
                            config_files=[str(mycnf_1),
                                          str(mycnf_2)])
    status.add(backup_copy)
    assert len(status.daily) == 1
    assert status.daily[backup_copy.key].binlog == 'binlog1'
    assert status.daily[backup_copy.key].position == 101
    assert status.daily[backup_copy.key].type == 'full'
    assert status.daily[backup_copy.key].lsn == 1230
    assert status.daily[backup_copy.key].backup_started == 123
    assert status.daily[backup_copy.key].backup_finished == 456
    assert status.daily[backup_copy.key].duration == 333
    assert {
        str(mycnf_1): 'some_content_1'
    } in status.daily[backup_copy.key].config
    assert {
        str(mycnf_2): 'some_content_2'
    } in status.daily[backup_copy.key].config
Example #3
0
def test_get_my_cnf_2_cnf(tmpdir):
    status = MySQLStatus()
    mycnf_1 = tmpdir.join('my-1.cnf')
    mycnf_1.write('some_content_1')
    mycnf_2 = tmpdir.join('my-2.cnf')
    mycnf_2.write('some_content_2')

    backup_copy = MySQLCopy(
        'master1', 'daily', 'foo.txt',
        binlog='binlog1',
        position=101,
        type='full',
        lsn=1230,
        backup_started=123,
        backup_finished=456,
        config_files=[str(mycnf_1), str(mycnf_2)]
    )
    status.add(backup_copy)
    expected = {
        str(mycnf_1): 'some_content_1',
        str(mycnf_2): 'some_content_2'
    }
    for path, content in get_my_cnf(status, backup_copy.key):
        assert path in expected
        expected_value = expected.pop(path)
        assert content == expected_value

    assert expected == {}
Example #4
0
 def _read_status(self):
     if self._status_exists():
         cmd = "cat %s" % self.status_path
         with self._ssh_client.get_remote_handlers(cmd) as (_, stdout, _):
             return MySQLStatus(content=stdout.read())
     else:
         return MySQLStatus()
Example #5
0
def test_get_my_cnf_2_cnf(tmpdir):
    status = MySQLStatus()
    mycnf_1 = tmpdir.join('my-1.cnf')
    mycnf_1.write('some_content_1')
    mycnf_2 = tmpdir.join('my-2.cnf')
    mycnf_2.write('some_content_2')

    backup_copy = MySQLCopy('master1',
                            'daily',
                            'foo.txt',
                            binlog='binlog1',
                            position=101,
                            type='full',
                            lsn=1230,
                            backup_started=123,
                            backup_finished=456,
                            config_files=[str(mycnf_1),
                                          str(mycnf_2)])
    status.add(backup_copy)
    expected = {str(mycnf_1): 'some_content_1', str(mycnf_2): 'some_content_2'}
    for path, content in get_my_cnf(status, backup_copy.key):
        assert path in expected
        expected_value = expected.pop(path)
        assert content == expected_value

    assert expected == {}
Example #6
0
    def _read_status(self, cls=MySQLStatus):
        if not self._status_exists(cls=cls):
            return MySQLStatus()

        with open(self.status_path(cls=cls)) as status_descriptor:
            cout = status_descriptor.read()
            return MySQLStatus(content=cout)
Example #7
0
def test_add(status_raw_empty, tmpdir):
    status = MySQLStatus(status_raw_empty)
    mycnf_1 = tmpdir.join('my-1.cnf')
    mycnf_1.write('some_content_1')
    mycnf_2 = tmpdir.join('my-2.cnf')
    mycnf_2.write('some_content_2')

    backup_copy = MySQLCopy(
        'master1', 'daily', 'foo.txt',
        binlog='binlog1',
        position=101,
        type='full',
        lsn=1230,
        backup_started=123,
        backup_finished=456,
        config_files=[str(mycnf_1), str(mycnf_2)]
    )
    status.add(backup_copy)
    assert len(status.daily) == 1
    assert status.daily[backup_copy.key].binlog == 'binlog1'
    assert status.daily[backup_copy.key].position == 101
    assert status.daily[backup_copy.key].type == 'full'
    assert status.daily[backup_copy.key].lsn == 1230
    assert status.daily[backup_copy.key].backup_started == 123
    assert status.daily[backup_copy.key].backup_finished == 456
    assert status.daily[backup_copy.key].duration == 333

    config_content = status.daily[backup_copy.key].config[str(mycnf_1)]
    assert config_content == 'some_content_1'

    config_content = status.daily[backup_copy.key].config[str(mycnf_2)]
    assert config_content == 'some_content_2'
Example #8
0
def test_serialize_is_valid(status_raw_content):
    status = MySQLStatus(content=status_raw_content)
    serialized_status = status.serialize()
    dict_from_status = json.loads(serialized_status)
    assert "status" in dict_from_status
    assert "version" in dict_from_status
    assert "md5" in dict_from_status
Example #9
0
def test_full_copy_exists(run_type, full_backup, status, expected):

    istatus = MySQLStatus(
        content=b64encode(
            json.dumps(status)
        )
    )
    assert istatus.full_copy_exists(run_type) == expected
Example #10
0
    def _read_status(self):
        if self._status_exists():
            response = self.s3_client.get_object(Bucket=self.bucket,
                                                 Key=self.status_path)
            self.validate_client_response(response)

            content = response['Body'].read()
            return MySQLStatus(content=content)
        else:
            return MySQLStatus()
Example #11
0
def test_serialize_new(status_raw_content):
    status_original = MySQLStatus(content=status_raw_content)
    print('\nOriginal status:\n%s' % status_original)

    status_serialized = status_original.serialize()
    print('Serialized status:\n%s' % status_serialized)

    status_converted = MySQLStatus(content=status_serialized)
    print('Deserialized status:\n%s' % status_converted)

    assert status_original == status_converted
Example #12
0
def test_serialize_new(status_raw_content):
    status_original = MySQLStatus(content=status_raw_content)
    print('\nOriginal status:\n%s' % status_original)

    status_serialized = status_original.serialize()
    print('Serialized status:\n%s' % status_serialized)

    status_converted = MySQLStatus(content=status_serialized)
    print('Deserialized status:\n%s' % status_converted)

    assert status_original == status_converted
Example #13
0
def verify_mysql_backup(twindb_config, dst_path, backup_file, hostname=None):
    """
    Restore mysql backup and measure time

    :param hostname:
    :param backup_file:
    :param dst_path:
    :param twindb_config: tool configuration
    :type twindb_config: TwinDBBackupConfig

    """
    dst = twindb_config.destination(backup_source=hostname)
    status = MySQLStatus(dst=dst)
    copy = None

    if backup_file == "latest":
        copy = status.latest_backup
    else:
        for copy in status:
            if backup_file.endswith(copy.key):
                break
    if copy is None:
        return json.dumps(
            {
                'backup_copy': backup_file,
                'restore_time': 0,
                'success': False
            },
            indent=4,
            sort_keys=True)
    start_restore_time = time.time()
    success = True
    tmp_dir = tempfile.mkdtemp()

    try:

        LOG.debug('Verifying backup copy in %s', tmp_dir)
        restore_from_mysql(twindb_config, copy, dst_path, tmp_dir)
        edit_backup_my_cnf(dst_path)

    except (TwinDBBackupError, OSError, IOError) as err:

        LOG.error(err)
        LOG.debug(traceback.format_exc())
        success = False

    finally:

        shutil.rmtree(tmp_dir, ignore_errors=True)

    end_restore_time = time.time()
    restore_time = end_restore_time - start_restore_time
    return json.dumps(
        {
            'backup_copy': copy.key,
            'restore_time': restore_time,
            'success': success
        },
        indent=4,
        sort_keys=True)
Example #14
0
def test_init_creates_empty():
    status = MySQLStatus()
    assert status.version == STATUS_FORMAT_VERSION
    for i in INTERVALS:
        assert getattr(status, i) == {}

    assert status.valid
Example #15
0
def test_get_item_returns_copy_by_basename(deprecated_status_raw_content):
    status = MySQLStatus(deprecated_status_raw_content)
    key = "master1/hourly/mysql/mysql-2018-03-28_04_11_16.xbstream.gz"
    copy = status[key]
    assert type(copy) == MySQLCopy
    assert copy.run_type == 'hourly'
    assert copy.host == 'master1'
    assert copy.name == 'mysql-2018-03-28_04_11_16.xbstream.gz'
Example #16
0
def test_get_item_returns_copy_by_basename(deprecated_status_raw_content):
    status = MySQLStatus(deprecated_status_raw_content)
    key = "master1/hourly/mysql/mysql-2018-03-28_04_11_16.xbstream.gz"
    backup_copy = status[key]
    decoded_status = json.loads(
        b64decode(deprecated_status_raw_content))["hourly"][key]
    backup_copy_dict = backup_copy.as_dict()
    assert cmp(backup_copy_dict, decoded_status) == 0
Example #17
0
def test_get_stream(gs):
    status = MySQLStatus(dst=gs)
    copy = status['master1/daily/mysql/mysql-2019-04-04_05_29_05.xbstream.gz']

    with gs.get_stream(copy) as stream:
        LOG.debug('starting reading from pipe')
        content = stream.read()
        LOG.debug('finished reading from pipe')
    assert len(content), 'Failed to read from GS'
    LOG.info('Read %d bytes', len(content))
Example #18
0
def test_str_is_as_json(deprecated_status_raw_content):
    status_original = MySQLStatus(content=deprecated_status_raw_content)
    status_raw = b64decode(deprecated_status_raw_content)
    status_raw_json = json.dumps(
        json.loads(status_raw),
        indent=4,
        sort_keys=True
    )
    status_original_json = str(status_original)
    assert status_raw_json == status_original_json
Example #19
0
def test_get_my_cnf():
    status = MySQLStatus(content="""{
    "md5": "1939bce689ef7d070beae0860c885caf", 
    "status": "eyJtb250aGx5Ijoge30sICJob3VybHkiOiB7fSwgInllYXJseSI6IHt9LCAiZGFpbHkiOiB7Im1hc3RlcjFfMS9kYWlseS9teXNxbC9teXNxbC0yMDE4LTA4LTA1XzAyXzMyXzE0Lnhic3RyZWFtLmd6LmdwZyI6IHsiZ2FsZXJhIjogZmFsc2UsICJiaW5sb2ciOiAibXlzcWwtYmluLjAwMDAwMiIsICJydW5fdHlwZSI6ICJkYWlseSIsICJuYW1lIjogIm15c3FsLTIwMTgtMDgtMDVfMDJfMzJfMTQueGJzdHJlYW0uZ3ouZ3BnIiwgInBhcmVudCI6IG51bGwsICJsc24iOiAyNTUxMTg1LCAidHlwZSI6ICJmdWxsIiwgImJhY2t1cF9maW5pc2hlZCI6IDE1MzM0MzYzMzgsICJ3c3JlcF9wcm92aWRlcl92ZXJzaW9uIjogbnVsbCwgImhvc3QiOiAibWFzdGVyMV8xIiwgImJhY2t1cF9zdGFydGVkIjogMTUzMzQzNjMzMywgInBvc2l0aW9uIjogMTA1NCwgImNvbmZpZyI6IFt7Ii9ldGMvbXkuY25mIjogIlcyMTVjM0ZzWkYwS1pHRjBZV1JwY2owdmRtRnlMMnhwWWk5dGVYTnhiQXB6YjJOclpYUTlMM1poY2k5c2FXSXZiWGx6Y1d3dmJYbHpjV3d1YzI5amF3cDFjMlZ5UFcxNWMzRnNDaU1nUkdsellXSnNhVzVuSUhONWJXSnZiR2xqTFd4cGJtdHpJR2x6SUhKbFkyOXRiV1Z1WkdWa0lIUnZJSEJ5WlhabGJuUWdZWE56YjNKMFpXUWdjMlZqZFhKcGRIa2djbWx6YTNNS2MzbHRZbTlzYVdNdGJHbHVhM005TUFvS2MyVnlkbVZ5WDJsa1BURXdNQXBzYjJjdFltbHVQVzE1YzNGc0xXSnBiZ3BzYjJjdGMyeGhkbVV0ZFhCa1lYUmxjd29LVzIxNWMzRnNaRjl6WVdabFhRcHNiMmN0WlhKeWIzSTlMM1poY2k5c2IyY3ZiWGx6Y1d4a0xteHZad3B3YVdRdFptbHNaVDB2ZG1GeUwzSjFiaTl0ZVhOeGJHUXZiWGx6Y1d4a0xuQnBaQW89In1dfX0sICJ3ZWVrbHkiOiB7fX0=", 
    "version": 1}
    """)
    print(status)
    key = 'master1_1/daily/mysql/mysql-2018-08-05_02_32_14.xbstream.gz.gpg'
    for path, content in get_my_cnf(status, key):
        assert path == "/etc/my.cnf"
        assert content == """[mysqld]
Example #20
0
def test_serialize_doesnt_change_orignal(status_raw_content,
                                         deprecated_status_raw_content):

    status_original = MySQLStatus(content=deprecated_status_raw_content)
    status_original_before = deepcopy(status_original)
    assert status_original == status_original_before
    status_original.serialize()
    assert status_original == status_original_before

    status_original = MySQLStatus(content=status_raw_content)
    status_original_before = deepcopy(status_original)
    assert status_original == status_original_before
    status_original.serialize()
    assert status_original == status_original_before
Example #21
0
def test_remove(status_raw_empty):
    status = MySQLStatus(status_raw_empty)
    copy = MySQLCopy('foo', 'daily', 'some_file.txt', type='full')
    status.add(copy)
    assert len(status.daily) == 1
    status.remove(copy.key)
    assert len(status.daily) == 0
Example #22
0
def test_init_raises_on_wrong_key():
    with pytest.raises(CorruptedStatus):
        MySQLStatus(content=b64encode(
            json.dumps({
                u'daily': {},
                u'hourly': {},
                u'monthly': {},
                u'weekly': {
                    u'foo/weekly/some_file.txt': {
                        u'type': u'full'
                    }
                },
                u'yearly': {}
            }).encode("utf-8")))
Example #23
0
def restore_mysql(ctx, dst, backup_copy, cache):
    """Restore from mysql backup"""
    LOG.debug('mysql: %r', ctx.obj['twindb_config'])

    if not backup_copy:
        LOG.info('No backup copy specified. Choose one from below:')
        list_available_backups(ctx.obj['twindb_config'])
        exit(1)

    try:
        ensure_empty(dst)

        incomplete_copy = MySQLCopy(path=backup_copy)
        dst_storage = ctx.obj['twindb_config'].destination(
            backup_source=incomplete_copy.host)
        mysql_status = MySQLStatus(dst=dst_storage,
                                   status_directory=incomplete_copy.host)

        copies = [cp for cp in mysql_status if backup_copy.endswith(cp.name)]
        try:
            copy = copies.pop(0)
        except IndexError:
            raise TwinDBBackupError(
                'Can not find copy %s in MySQL status. '
                'Inspect output of `twindb-backup status` and verify '
                'that correct copy is specified.' % backup_copy)
        if copies:
            raise TwinDBBackupError(
                'Multiple copies match pattern %s. Make sure you give unique '
                'copy name for restore.')

        if cache:
            restore_from_mysql(ctx.obj['twindb_config'],
                               copy,
                               dst,
                               cache=Cache(cache))
        else:
            restore_from_mysql(ctx.obj['twindb_config'], copy, dst)

    except (TwinDBBackupError, CacheException) as err:
        LOG.error(err)
        LOG.debug(traceback.format_exc())
        exit(1)
    except (OSError, IOError) as err:
        LOG.error(err)
        LOG.debug(traceback.format_exc())
        exit(1)
Example #24
0
def test_init_example_1():
    content = b64encode(
        json.dumps({
            'hourly': {},
            'daily': {},
            'weekly': {
                'foo/weekly/mysql/some_file.txt': {
                    "type": "full"
                }
            },
            'monthly': {},
            'yearly': {}
        }).encode("utf-8"))
    status = MySQLStatus(content=content)
    assert len(status.weekly) == 1
    assert len(status) == 1
    assert type(status['foo/weekly/mysql/some_file.txt']) == MySQLCopy
def test_init_example_0():
    content = b64encode(
        json.dumps({
            'hourly': {
                'foo/hourly/mysql/some_file.txt': {
                    'type': 'full'
                }
            },
            'daily': {},
            'weekly': {},
            'monthly': {},
            'yearly': {}
        }))
    status = MySQLStatus(content=content)
    assert len(status.hourly) == 1
    assert len(status) == 1
    assert type(status['foo/hourly/mysql/some_file.txt']) == MySQLCopy
Example #26
0
def test_init_weekly_only():
    status = MySQLStatus(content=b64encode(
        json.dumps({
            u'daily': {},
            u'hourly': {},
            u'monthly': {},
            u'weekly': {
                u'foo/weekly/some_file.txt': {
                    u'type': u'full'
                }
            },
            u'yearly': {}
        })))
    assert not status.daily
    assert not status.hourly
    assert not status.monthly
    assert status.weekly
    assert not status.yearly
Example #27
0
def test_serialize_doesnt_change_orignal(
        status_raw_content,
        deprecated_status_raw_content):

    status_original = MySQLStatus(content=deprecated_status_raw_content)
    status_original_before = deepcopy(status_original)
    assert status_original == status_original_before
    status_original.serialize()
    assert status_original == status_original_before

    status_original = MySQLStatus(content=status_raw_content)
    status_original_before = deepcopy(status_original)
    assert status_original == status_original_before
    status_original.serialize()
    assert status_original == status_original_before
Example #28
0
def test_init_creates_instance_from_new(status_raw_content):
    status = MySQLStatus(status_raw_content)
    assert status.version == STATUS_FORMAT_VERSION
    key = 'master1/hourly/mysql/mysql-2018-03-28_04_11_16.xbstream.gz'
    copy = MySQLCopy(
        'master1',
        'hourly',
        'mysql-2018-03-28_04_11_16.xbstream.gz',
        backup_started=1522210276,
        backup_finished=1522210295,
        binlog='mysql-bin.000001',
        parent='master1/daily/mysql/mysql-2018-03-28_04_09_53.xbstream.gz',
        lsn=19903207,
        config={
            '/etc/my.cnf':
            """[mysqld]
datadir=/var/lib/mysql
socket=/var/lib/mysql/mysql.sock
user=mysql
# Disabling symbolic-links is recommended to prevent assorted security risks
symbolic-links=0

server_id=100
gtid_mode=ON
log-bin=mysql-bin
log-slave-updates
enforce-gtid-consistency

[mysqld_safe]
log-error=/var/log/mysqld.log
pid-file=/var/run/mysqld/mysqld.pid
"""
        },
        position=46855,
        type='incremental')
    assert key in status.hourly
    LOG.debug("Copy %s: %r", copy.key, copy)
    LOG.debug("Copy from status %s: %r", key, status[key])
    assert status[key] == copy
Example #29
0
def test_init_with_new_format_with_wrong_checksum(
        status_raw_content_with_invalid_hash):
    with pytest.raises(CorruptedStatus):
        MySQLStatus(status_raw_content_with_invalid_hash)
Example #30
0
def test_init_with_new_format(status_raw_content):
    status = MySQLStatus(status_raw_content)
    assert status.version == 1
Example #31
0
def test_init_invalid_json(invalid_deprecated_status_raw_content):
    with pytest.raises(CorruptedStatus):
        MySQLStatus(invalid_deprecated_status_raw_content)
Example #32
0
 def callback(self, **kwargs):
     local_dst = Local(kwargs['keep_local_path'])
     status = MySQLStatus(dst=kwargs['dst'])
     status.save(local_dst)
Example #33
0
def test_remove_raises(status_raw_empty):
    status = MySQLStatus(status_raw_empty)
    with pytest.raises(StatusKeyNotFound):
        status.remove("foo")
Example #34
0
def test_write_status(gs):
    status = MySQLStatus()
    status.save(gs)
Example #35
0
def backup_mysql(run_type, config):
    """Take backup of local MySQL instance

    :param run_type: Run type
    :type run_type: str
    :param config: Tool configuration
    :type config: TwinDBBackupConfig
    """
    if config.backup_mysql is False:
        LOG.debug('Not backing up MySQL')
        return

    dst = config.destination()

    try:
        full_backup = config.mysql.full_backup
    except ConfigParser.NoOptionError:
        full_backup = 'daily'
    backup_start = time.time()

    status = MySQLStatus(dst=dst)

    kwargs = {
        'backup_type': status.next_backup_type(full_backup, run_type),
        'dst': dst,
        'xtrabackup_binary': config.mysql.xtrabackup_binary
    }
    parent = status.candidate_parent(run_type)

    if kwargs['backup_type'] == 'incremental':
        kwargs['parent_lsn'] = parent.lsn

    LOG.debug('Creating source %r', kwargs)
    src = MySQLSource(
        MySQLConnectInfo(config.mysql.defaults_file),
        run_type,
        **kwargs
    )

    callbacks = []
    try:
        _backup_stream(config, src, dst, callbacks=callbacks)
    except (DestinationError, SourceError, SshClientException) as err:
        raise OperationError(err)
    LOG.debug('Backup copy name: %s', src.get_name())

    kwargs = {
        'type': src.type,
        'binlog': src.binlog_coordinate[0],
        'position': src.binlog_coordinate[1],
        'lsn': src.lsn,
        'backup_started': backup_start,
        'backup_finished': time.time(),
        'config_files': my_cnfs(MY_CNF_COMMON_PATHS)
    }
    if src.incremental:
        kwargs['parent'] = parent.key

    backup_copy = MySQLCopy(
        src.host,
        run_type,
        src.basename,
        **kwargs
    )
    status.add(backup_copy)

    status = src.apply_retention_policy(dst, config, run_type, status)
    LOG.debug('status after apply_retention_policy():\n%s', status)

    backup_duration = backup_copy.duration
    export_info(
        config,
        data=backup_duration,
        category=ExportCategory.mysql,
        measure_type=ExportMeasureType.backup
    )

    status.save(dst)

    LOG.debug('Callbacks are %r', callbacks)
    for callback in callbacks:
        callback[0].callback(**callback[1])
Example #36
0
def test_backup_duration(deprecated_status_raw_content):
    status = MySQLStatus(deprecated_status_raw_content)
    assert \
        status.get_latest_backup() \
        == 'master1/hourly/mysql/mysql-2018-03-28_04_11_16.xbstream.gz'
Example #37
0
def test_backup_duration(deprecated_status_raw_content):
    status = MySQLStatus(deprecated_status_raw_content)
    latest_key = 'master1/hourly/mysql/mysql-2018-03-28_04_11_16.xbstream.gz'
    assert status.latest_backup.key == latest_key
Example #38
0
def backup_mysql(run_type, config):
    """Take backup of local MySQL instance

    :param run_type: Run type
    :type run_type: str
    :param config: Tool configuration
    :type config: TwinDBBackupConfig
    """
    if config.backup_mysql is False:
        LOG.debug("Not backing up MySQL")
        return

    dst = config.destination()

    try:
        full_backup = config.mysql.full_backup
    except configparser.NoOptionError:
        full_backup = "daily"
    backup_start = time.time()

    status = MySQLStatus(dst=dst)

    kwargs = {
        "backup_type": status.next_backup_type(full_backup, run_type),
        "dst": dst,
        "xtrabackup_binary": config.mysql.xtrabackup_binary,
    }
    parent = status.candidate_parent(run_type)

    if kwargs["backup_type"] == "incremental":
        kwargs["parent_lsn"] = parent.lsn

    LOG.debug("Creating source %r", kwargs)
    src = MySQLSource(MySQLConnectInfo(config.mysql.defaults_file), run_type,
                      **kwargs)

    callbacks = []
    try:
        _backup_stream(config, src, dst, callbacks=callbacks)
    except (DestinationError, SourceError, SshClientException) as err:
        raise OperationError(err)
    LOG.debug("Backup copy name: %s", src.get_name())

    kwargs = {
        "type": src.type,
        "binlog": src.binlog_coordinate[0],
        "position": src.binlog_coordinate[1],
        "lsn": src.lsn,
        "backup_started": backup_start,
        "backup_finished": time.time(),
        "config_files": my_cnfs(MY_CNF_COMMON_PATHS),
    }
    if src.incremental:
        kwargs["parent"] = parent.key

    backup_copy = MySQLCopy(src.host, run_type, src.basename, **kwargs)
    status.add(backup_copy)

    status = src.apply_retention_policy(dst, config, run_type, status)
    LOG.debug("status after apply_retention_policy():\n%s", status)

    backup_duration = backup_copy.duration
    export_info(
        config,
        data=backup_duration,
        category=ExportCategory.mysql,
        measure_type=ExportMeasureType.backup,
    )

    status.save(dst)

    LOG.debug("Callbacks are %r", callbacks)
    for callback in callbacks:
        callback[0].callback(**callback[1])
Example #39
0
def test_str_old(deprecated_status_raw_content):
    status = MySQLStatus(deprecated_status_raw_content)
    expected = '{"monthly": {}, "hourly": {"master1/hourly/mysql/mysql-2018-03-28_04_11_16.xbstream.gz": {"galera": false, "binlog": "mysql-bin.000001", "run_type": "hourly", "name": "mysql-2018-03-28_04_11_16.xbstream.gz", "parent": "master1/daily/mysql/mysql-2018-03-28_04_09_53.xbstream.gz", "lsn": 19903207, "type": "incremental", "backup_finished": 1522210295, "wsrep_provider_version": null, "host": "master1", "backup_started": 1522210276, "position": 46855, "config": [{"/etc/my.cnf": "W215c3FsZF0KZGF0YWRpcj0vdmFyL2xpYi9teXNxbApzb2NrZXQ9L3Zhci9saWIvbXlzcWwvbXlzcWwuc29jawp1c2VyPW15c3FsCiMgRGlzYWJsaW5nIHN5bWJvbGljLWxpbmtzIGlzIHJlY29tbWVuZGVkIHRvIHByZXZlbnQgYXNzb3J0ZWQgc2VjdXJpdHkgcmlza3MKc3ltYm9saWMtbGlua3M9MAoKc2VydmVyX2lkPTEwMApndGlkX21vZGU9T04KbG9nLWJpbj1teXNxbC1iaW4KbG9nLXNsYXZlLXVwZGF0ZXMKZW5mb3JjZS1ndGlkLWNvbnNpc3RlbmN5CgpbbXlzcWxkX3NhZmVdCmxvZy1lcnJvcj0vdmFyL2xvZy9teXNxbGQubG9nCnBpZC1maWxlPS92YXIvcnVuL215c3FsZC9teXNxbGQucGlkCg=="}]}}, "yearly": {}, "daily": {"master1/daily/mysql/mysql-2018-03-28_04_09_53.xbstream.gz": {"galera": false, "binlog": "mysql-bin.000001", "run_type": "daily", "name": "mysql-2018-03-28_04_09_53.xbstream.gz", "parent": null, "lsn": 19903199, "type": "full", "backup_finished": 1522210200, "wsrep_provider_version": null, "host": "master1", "backup_started": 1522210193, "position": 46855, "config": [{"/etc/my.cnf": "W215c3FsZF0KZGF0YWRpcj0vdmFyL2xpYi9teXNxbApzb2NrZXQ9L3Zhci9saWIvbXlzcWwvbXlzcWwuc29jawp1c2VyPW15c3FsCiMgRGlzYWJsaW5nIHN5bWJvbGljLWxpbmtzIGlzIHJlY29tbWVuZGVkIHRvIHByZXZlbnQgYXNzb3J0ZWQgc2VjdXJpdHkgcmlza3MKc3ltYm9saWMtbGlua3M9MAoKc2VydmVyX2lkPTEwMApndGlkX21vZGU9T04KbG9nLWJpbj1teXNxbC1iaW4KbG9nLXNsYXZlLXVwZGF0ZXMKZW5mb3JjZS1ndGlkLWNvbnNpc3RlbmN5CgpbbXlzcWxkX3NhZmVdCmxvZy1lcnJvcj0vdmFyL2xvZy9teXNxbGQubG9nCnBpZC1maWxlPS92YXIvcnVuL215c3FsZC9teXNxbGQucGlkCg=="}]}}, "weekly": {}}'

    assert str(status) == expected
Example #40
0
def restore_from_mysql(twindb_config, copy, dst_dir,
                       tmp_dir=None,
                       cache=None,
                       hostname=None):
    """
    Restore MySQL datadir in a given directory

    :param twindb_config: tool configuration
    :type twindb_config: TwinDBBackupConfig
    :param copy: Backup copy instance.
    :type copy: MySQLCopy
    :param dst_dir: Destination directory. Must exist and be empty.
    :type dst_dir: str
    :param tmp_dir: Path to temp directory
    :type tmp_dir: str
    :param cache: Local cache object.
    :type cache: Cache
    :param hostname: Hostname
    :type hostname: str

    """
    LOG.info('Restoring %s in %s', copy, dst_dir)
    mkdir_p(dst_dir)

    dst = None
    restore_start = time.time()
    keep_local_path = twindb_config.keep_local_path
    if keep_local_path and osp.exists(osp.join(keep_local_path, copy.key)):
        dst = Local(twindb_config.keep_local_path)

    if not dst:
        if not hostname:
            hostname = copy.host
            if not hostname:
                raise DestinationError(
                    'Failed to get hostname from %s'
                    % copy
                )
        dst = twindb_config.destination(backup_source=hostname)

    key = copy.key
    status = MySQLStatus(dst=dst)

    stream = dst.get_stream(copy)

    if status[key].type == "full":

        cache_key = os.path.basename(key)
        if cache:
            if cache_key in cache:
                # restore from cache
                cache.restore_in(cache_key, dst_dir)
            else:
                restore_from_mysql_full(
                    stream,
                    dst_dir,
                    twindb_config,
                    redo_only=False
                )
                cache.add(dst_dir, cache_key)
        else:
            restore_from_mysql_full(
                stream,
                dst_dir,
                twindb_config,
                redo_only=False)

    else:
        full_copy = status.candidate_parent(
            copy.run_type
        )
        full_stream = dst.get_stream(full_copy)
        LOG.debug("Full parent copy is %s", full_copy.key)
        cache_key = os.path.basename(full_copy.key)

        if cache:
            if cache_key in cache:
                # restore from cache
                cache.restore_in(cache_key, dst_dir)
            else:
                restore_from_mysql_full(
                    full_stream,
                    dst_dir,
                    twindb_config,
                    redo_only=True
                )
                cache.add(dst_dir, cache_key)
        else:
            restore_from_mysql_full(
                full_stream,
                dst_dir,
                twindb_config,
                redo_only=True
            )

        restore_from_mysql_incremental(
            stream,
            dst_dir,
            twindb_config,
            tmp_dir
        )

    config_dir = os.path.join(dst_dir, "_config")

    for path, content in get_my_cnf(status, key):
        config_sub_dir = os.path.join(
            config_dir,
            os.path.dirname(path).lstrip('/')
        )
        mkdir_p(config_sub_dir, mode=0755)

        with open(os.path.join(config_sub_dir,
                               os.path.basename(path)), 'w') as mysql_config:
            mysql_config.write(content)

    update_grastate(dst_dir, status, key)
    export_info(twindb_config, data=time.time() - restore_start,
                category=ExportCategory.mysql,
                measure_type=ExportMeasureType.restore)
    LOG.info('Successfully restored %s in %s.', copy.key, dst_dir)
    LOG.info('Now copy content of %s to MySQL datadir: '
             'cp -R %s /var/lib/mysql/', dst_dir, osp.join(dst_dir, '*'))
    LOG.info('Fix permissions: chown -R mysql:mysql /var/lib/mysql/')
    LOG.info('Make sure innodb_log_file_size and innodb_log_files_in_group '
             'in %s/backup-my.cnf and in /etc/my.cnf are same.', dst_dir)

    if osp.exists(config_dir):
        LOG.info('Original my.cnf is restored in %s.', config_dir)

    LOG.info('Then you can start MySQL normally.')