Beispiel #1
0
def get_destination(config, hostname=socket.gethostname()):
    """
    Read config and return instance of Destination class.

    :param config: Tool configuration.
    :type config: ConfigParser.ConfigParser
    :param hostname: Local hostname.
    :type hostname: str
    :return: Instance of destination class.
    :rtype: BaseDestination
    """
    destination = None
    try:
        destination = config.get('destination', 'backup_destination')
        LOG.debug('Destination in the config %s', destination)
        destination = destination.strip('"\'')
    except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
        LOG.critical("Backup destination must be specified "
                     "in the config file")
        exit(-1)

    if destination == "ssh":
        host = config.get('ssh', 'backup_host')
        try:
            port = int(config.get('ssh', 'port'))
        except ConfigParser.NoOptionError:
            port = 22
        try:
            ssh_key = config.get('ssh', 'ssh_key')
        except ConfigParser.NoOptionError:
            ssh_key = '/root/.ssh/id_rsa'
            LOG.debug('ssh_key is not defined in config. '
                      'Will use default %s', ssh_key)
        user = config.get('ssh', 'ssh_user')
        remote_path = config.get('ssh', 'backup_dir')
        return Ssh(
            remote_path,
            SshConnectInfo(
                host=host,
                port=port,
                user=user,
                key=ssh_key),
            hostname=hostname)

    elif destination == "s3":
        bucket = config.get('s3', 'BUCKET').strip('"\'')
        access_key_id = config.get('s3', 'AWS_ACCESS_KEY_ID').strip('"\'')
        secret_access_key = config.get('s3',
                                       'AWS_SECRET_ACCESS_KEY').strip('"\'')
        default_region = config.get('s3', 'AWS_DEFAULT_REGION').strip('"\'')

        return S3(bucket, AWSAuthOptions(access_key_id,
                                         secret_access_key,
                                         default_region=default_region),
                  hostname=hostname)

    else:
        LOG.critical('Destination %s is not supported', destination)
        exit(-1)
Beispiel #2
0
def test__delete_bucket_deletes_the_bucket():
    s3 = S3('test-bucket', AWSAuthOptions('access_key', 'secret_key'))
    s3.create_bucket()

    s3.delete_bucket()

    with pytest.raises(ClientError):
        s3.s3_client.head_bucket(Bucket='test-bucket')
Beispiel #3
0
def test__create_bucket_creates_the_bucket():
    s3 = S3('test-bucket',
            AWSAuthOptions('access_key',
                           'secret_key')
            )
    s3.create_bucket()

    assert s3.s3_client.head_bucket(Bucket='test-bucket')
Beispiel #4
0
def test__s3_find_files_returns_sorted(master1, docker_client, s3_client,
                                       config_content_mysql_only,
                                       client_my_cnf):
    # cleanup the bucket first
    s3_client.delete_all_objects()

    twindb_config_dir = get_twindb_config_dir(docker_client, master1["Id"])

    twindb_config_host = "%s/twindb-backup-1.cfg" % twindb_config_dir
    twindb_config_guest = "/etc/twindb/twindb-backup-1.cfg"
    my_cnf_path = "%s/my.cnf" % twindb_config_dir

    with open(my_cnf_path, "w") as my_cnf:
        my_cnf.write(client_my_cnf)

    with open(twindb_config_host, "w") as fp:
        content = config_content_mysql_only.format(
            AWS_ACCESS_KEY_ID=os.environ["AWS_ACCESS_KEY_ID"],
            AWS_SECRET_ACCESS_KEY=os.environ["AWS_SECRET_ACCESS_KEY"],
            BUCKET=s3_client.bucket,
            daily_copies=5,
            hourly_copies=2,
            MY_CNF="/etc/twindb/my.cnf",
        )
        fp.write(content)

    cmd = [
        "twindb-backup",
        "--debug",
        "--config",
        twindb_config_guest,
        "backup",
        "daily",
    ]

    n_runs = 3
    for x in range(n_runs):
        ret, cout = docker_execute(docker_client, master1["Id"], cmd)
        print(cout)
        assert ret == 0
    hostname = "master1_1"
    dst = S3(
        bucket=s3_client.bucket,
        aws_access_key_id=os.environ["AWS_ACCESS_KEY_ID"],
        aws_secret_access_key=os.environ["AWS_SECRET_ACCESS_KEY"],
    )

    for x in range(10):
        result = dst.list_files(dst.remote_path, pattern="/daily/")
        assert len(result) == n_runs
        assert result == sorted(result)
        prefix = "{remote_path}/{hostname}/{run_type}/mysql/mysql-".format(
            remote_path=dst.remote_path, hostname=hostname, run_type="daily")
        files = dst.list_files(prefix)
        assert len(files) == n_runs
        assert files == sorted(files)
Beispiel #5
0
def test__list_files_returns_sorted_list():
    s3 = S3('test-bucket', AWSAuthOptions('access_key', 'secret_key'))
    s3.create_bucket()

    s3.s3_client.put_object(Body='hello world',
                            Bucket='test-bucket',
                            Key='object_1')

    files_list = s3.list_files(prefix='')
    assert len(files_list) == 1
Beispiel #6
0
def test__s3_find_files_returns_sorted(master1, docker_client, s3_client,
                                       config_content_mysql_only):
    # cleanup the bucket first
    s3_client.delete_all_objects()

    twindb_config_dir = get_twindb_config_dir(docker_client, master1['Id'])

    twindb_config_host = "%s/twindb-backup-1.cfg" % twindb_config_dir
    twindb_config_guest = '/etc/twindb/twindb-backup-1.cfg'
    my_cnf_path = "%s/my.cnf" % twindb_config_dir

    contents = """
[client]
user=dba
password=qwerty
"""

    with open(my_cnf_path, "w") as my_cnf:
        my_cnf.write(contents)

    with open(twindb_config_host, 'w') as fp:
        content = config_content_mysql_only.format(
            AWS_ACCESS_KEY_ID=os.environ['AWS_ACCESS_KEY_ID'],
            AWS_SECRET_ACCESS_KEY=os.environ['AWS_SECRET_ACCESS_KEY'],
            BUCKET=s3_client.bucket,
            daily_copies=5,
            hourly_copies=2,
            MY_CNF='/etc/twindb/my.cnf')
        fp.write(content)

    cmd = [
        'twindb-backup', '--debug', '--config', twindb_config_guest, 'backup',
        'daily'
    ]

    n_runs = 3
    for x in xrange(n_runs):
        ret, cout = docker_execute(docker_client, master1['Id'], cmd)
        print(cout)
        assert ret == 0
    hostname = 'master1_1'
    dst = S3(
        s3_client.bucket,
        AWSAuthOptions(os.environ['AWS_ACCESS_KEY_ID'],
                       os.environ['AWS_SECRET_ACCESS_KEY']))

    for x in xrange(10):
        result = dst.find_files(dst.remote_path, 'daily')
        assert len(result) == n_runs
        assert result == sorted(result)
        prefix = "{remote_path}/{hostname}/{run_type}/mysql/mysql-".format(
            remote_path=dst.remote_path, hostname=hostname, run_type='daily')
        files = dst.list_files(prefix)
        assert len(files) == n_runs
        assert files == sorted(files)
Beispiel #7
0
def s3_client(bucket_name):
    LOG.debug('Bucket: %s' % bucket_name)
    client = S3(
        bucket_name,
        AWSAuthOptions(os.environ['AWS_ACCESS_KEY_ID'],
                       os.environ['AWS_SECRET_ACCESS_KEY']))
    assert client.create_bucket()

    yield client

    client.delete_bucket(force=True)
Beispiel #8
0
def test__list_files(prefix, keys, result):
    s3 = S3('test-bucket', AWSAuthOptions('access_key', 'secret_key'))
    s3.create_bucket()

    for key in keys:
        s3.s3_client.put_object(Body='hello world',
                                Bucket='test-bucket',
                                Key=key)

    files_list = s3.list_files(prefix=prefix)
    assert files_list == result
Beispiel #9
0
def test__delete_can_delete_an_object(path, key):
    twindb_s3 = S3('test-bucket', AWSAuthOptions('access_key', 'secret_key'))
    twindb_s3.create_bucket()

    twindb_s3.s3_client.put_object(Body='hello world',
                                   Bucket='test-bucket',
                                   Key=key)

    assert twindb_s3.delete(path)
    with pytest.raises(ClientError):
        twindb_s3.s3_client.head_object(Bucket='test-bucket', Key=key)
Beispiel #10
0
def test_get_status_empty(mock_status_exists):
    mock_status_exists.return_value = False

    dst = S3('a', AWSAuthOptions('b', 'c'))
    assert dst.status() == {
        'hourly': {},
        'daily': {},
        'weekly': {},
        'monthly': {},
        'yearly': {}
    }
Beispiel #11
0
def test_get_file_content():
    s3 = S3('test-bucket', AWSAuthOptions('access_key', 'secret_key'))
    mock_body = mock.Mock()
    s3.s3_client = mock.Mock()
    s3.s3_client.get_object.return_value = {'Body': mock_body}
    s3.validate_client_response = mock.Mock()

    # noinspection PyProtectedMember
    s3._get_file_content('foo')
    s3.s3_client.get_object.assert_called_once_with(Bucket='test-bucket',
                                                    Key='foo')
    s3.validate_client_response.assert_called_once_with({'Body': mock_body})
    mock_body.read.assert_called_once_with()
Beispiel #12
0
def s3_client(bucket_name):
    LOG.debug('Bucket: %s' % bucket_name)
    client = S3(bucket=bucket_name,
                aws_access_key_id=os.environ['AWS_ACCESS_KEY_ID'],
                aws_secret_access_key=os.environ['AWS_SECRET_ACCESS_KEY'])
    try:
        assert client.create_bucket()

        yield client

    finally:

        client.delete_bucket(force=True)
Beispiel #13
0
def test__find_files_returns_sorted_list_of_files():
    s3 = S3('test-bucket', AWSAuthOptions('access_key', 'secret_key'))
    s3.create_bucket()

    s3.s3_client.put_object(Body='hello world',
                            Bucket='test-bucket',
                            Key='test_server/hourly/file1.txt')
    s3.s3_client.put_object(Body='hello world',
                            Bucket='test-bucket',
                            Key='test_server/hourly/file2.txt')
    s3.s3_client.put_object(Body='hello world',
                            Bucket='test-bucket',
                            Key='test_server/daily/file1.txt')

    files_list = s3.find_files(prefix='', run_type='hourly')
    assert len(files_list) == 2
    assert files_list[0] == 's3://test-bucket/test_server/hourly/file1.txt'
Beispiel #14
0
def test_get_file_content_retry(mock_sleep):

    s3 = S3('test-bucket', AWSAuthOptions('access_key', 'secret_key'))
    mock_body = mock.Mock()
    s3.s3_client = mock.Mock()

    mock_error_response = {'ResponseMetadata': {'MaxAttemptsReached': True}}
    s3.s3_client.get_object.side_effect = [
        ClientError(mock_error_response, 'GetObject'),
        ClientError(mock_error_response, 'GetObject'), {
            'Body': mock_body
        }
    ]
    s3.validate_client_response = mock.Mock()

    # noinspection PyProtectedMember
    s3._get_file_content('foo')
    assert s3.s3_client.get_object.call_count == 3
Beispiel #15
0
def test__list_files_with_pattern():
    s3 = S3('test-bucket', AWSAuthOptions('access_key', 'secret_key'))
    s3.create_bucket()

    s3.s3_client.put_object(Body='hello world',
                            Bucket='test-bucket',
                            Key='object_1')
    s3.s3_client.put_object(Body='hello world',
                            Bucket='test-bucket',
                            Key='object/foo/bar')
    s3.s3_client.put_object(Body='hello world',
                            Bucket='test-bucket',
                            Key='object_2')

    files_list = s3.list_files(prefix='', pattern='_2')
    assert files_list == ['s3://test-bucket/object_2']

    files_list = s3.list_files(prefix='', pattern='.*/foo/.*')
    assert files_list == ['s3://test-bucket/object/foo/bar']
Beispiel #16
0
    def destination(self, backup_source=socket.gethostname()):
        """
        :param backup_source: Hostname of the host where backup is taken from.
        :type backup_source: str
        :return: Backup destination instance
        :rtype: BaseDestination
        """
        try:
            backup_destination = self.__cfg.get("destination",
                                                "backup_destination")
            if backup_destination == "ssh":
                return Ssh(
                    self.ssh.path,
                    hostname=backup_source,
                    ssh_host=self.ssh.host,
                    ssh_port=self.ssh.port,
                    ssh_user=self.ssh.user,
                    ssh_key=self.ssh.key,
                )
            elif backup_destination == "s3":
                return S3(
                    bucket=self.s3.bucket,
                    aws_access_key_id=self.s3.aws_access_key_id,
                    aws_secret_access_key=self.s3.aws_secret_access_key,
                    aws_default_region=self.s3.aws_default_region,
                    hostname=backup_source,
                )
            elif backup_destination == "gcs":
                return GCS(
                    bucket=self.gcs.bucket,
                    gc_credentials_file=self.gcs.gc_credentials_file,
                    gc_encryption_key=self.gcs.gc_encryption_key,
                    hostname=backup_source,
                )

            else:
                raise ConfigurationError("Unsupported destination '%s'" %
                                         backup_destination)
        except NoSectionError as err:
            raise ConfigurationError(
                "%s is missing required section 'destination'" %
                self._config_file) from err
Beispiel #17
0
def test__s3_find_files_returns_sorted(s3_client, config_content_mysql_only,
                                       tmpdir):
    # cleanup the bucket first
    s3_client.delete_all_objects()

    config = tmpdir.join('twindb-backup.cfg')
    content = config_content_mysql_only.format(
        AWS_ACCESS_KEY_ID=os.environ['AWS_ACCESS_KEY_ID'],
        AWS_SECRET_ACCESS_KEY=os.environ['AWS_SECRET_ACCESS_KEY'],
        BUCKET=s3_client.bucket,
        daily_copies=5,
        hourly_copies=2)
    config.write(content)

    cmd = [
        'twindb-backup', '--debug', '--config',
        str(config), 'backup', 'daily'
    ]
    n_runs = 3
    for x in xrange(n_runs):
        assert call(cmd) == 0

    dst = S3(
        s3_client.bucket,
        AWSAuthOptions(os.environ['AWS_ACCESS_KEY_ID'],
                       os.environ['AWS_SECRET_ACCESS_KEY']))
    for x in xrange(10):
        result = dst.find_files(dst.remote_path, 'daily')
        assert len(result) == n_runs
        assert result == sorted(result)
        prefix = "{remote_path}/{hostname}/{run_type}/mysql/mysql-".format(
            remote_path=dst.remote_path,
            hostname=socket.gethostname(),
            run_type='daily')
        files = dst.list_files(prefix)
        assert len(files) == n_runs
        assert files == sorted(files)
Beispiel #18
0
def s3():
    return S3(bucket='test-bucket',
              aws_access_key_id='access_key',
              aws_secret_access_key='secret_key')
Beispiel #19
0
def test_basename():
    dst = S3('bucket', AWSAuthOptions('b', 'c'))
    assert dst.basename('s3://bucket/some_dir/some_file.txt') == \
        'some_dir/some_file.txt'