def binlogs_to_backup(cursor, last_binlog=None): """ Finds list of binlogs to copy. It will return the binlogs from the last to the current one (excluding it). If binlog are not enabled in the server the function will return empty list. :param cursor: MySQL cursor :param last_binlog: Name of the last copied binlog. :return: list of binlogs to backup. :rtype: list """ binlogs = [] try: cursor.execute("SHOW BINARY LOGS") for row in cursor.fetchall(): binlog = row["Log_name"] if not last_binlog or binlog > last_binlog: binlogs.append(binlog) return binlogs[:-1] except InternalError as err: if err.args and err.args[0] == 1381: # ER_NO_BINARY_LOGGING return binlogs else: raise OperationError(err)
def _mysql_service(dst, action): """Start or stop MySQL service :param dst: Destination server :type dst: Ssh :param action: string start or stop :type action: str """ for service in ['mysqld', 'mysql']: try: return dst.execute_command("PATH=$PATH:/sbin sudo service %s %s" % (service, action), quiet=True) except SshClientException as err: LOG.debug(err) try: LOG.warning( 'Failed to %s MySQL with an init script. ' 'Will try to %s mysqld.', action, action) if action == "start": ret = dst.execute_command( "PATH=$PATH:/sbin sudo bash -c 'nohup mysqld &'", background=True) time.sleep(10) return ret elif action == "stop": return dst.execute_command( "PATH=$PATH:/sbin sudo kill $(pidof mysqld)") except SshClientException as err: LOG.error(err) raise OperationError('Failed to %s MySQL on %r' % (action, dst))
def ensure_empty(path): """ Check if a given directory is empty and exit if not. :param path: path to directory :type path: str """ try: if os.listdir(path): msg = ('You asked to restore backup copy in directory "%s". ' "But it is not empty." % path) raise OperationError(msg) except OSError as err: if err.errno == 2: # OSError: [Errno 2] No such file or directory pass else: raise
def share(self, s3_url): """ Share S3 file and return public link :param s3_url: S3 url :type s3_url: str :return: Public url :rtype: str :raise S3DestinationError: if failed to share object. """ run_type = s3_url.split('/')[4] backup_urls = self.list_files(self.remote_path, pattern="/%s/" % run_type) if s3_url in backup_urls: self._set_file_access(S3FileAccess.public_read, s3_url) return self._get_file_url(s3_url) else: raise OperationError("File not found via url: %s" % s3_url)
def _get_file_content(self, path): attempts = 10 # up to 1024 seconds sleep_time = 2 while sleep_time <= 2**attempts: try: response = self.s3_client.get_object(Bucket=self._bucket, Key=path) self.validate_client_response(response) content = response['Body'].read() return content except ClientError as err: LOG.warning('Failed to read s3://%s/%s', self._bucket, path) LOG.warning(err) LOG.info('Will try again in %d seconds', sleep_time) time.sleep(sleep_time) sleep_time *= 2 msg = 'Failed to read s3://%s/%s after %d attempts' \ % (self._bucket, path, attempts) raise OperationError(msg)
def backup_files(run_type, config): """Backup local directories :param run_type: Run type :type run_type: str :param config: Configuration :type config: TwinDBBackupConfig """ backup_start = time.time() try: for directory in config.backup_dirs: LOG.debug('copying %s', directory) src = FileSource(directory, run_type) dst = config.destination() _backup_stream(config, src, dst) src.apply_retention_policy(dst, config, run_type) except (DestinationError, SourceError, SshClientException) as err: raise OperationError(err) export_info(config, data=time.time() - backup_start, category=ExportCategory.files, measure_type=ExportMeasureType.backup)
def backup_mysql(run_type, config): """Take backup of local MySQL instance :param run_type: Run type :type run_type: str :param config: Tool configuration :type config: TwinDBBackupConfig """ if config.backup_mysql is False: LOG.debug("Not backing up MySQL") return dst = config.destination() try: full_backup = config.mysql.full_backup except configparser.NoOptionError: full_backup = "daily" backup_start = time.time() status = MySQLStatus(dst=dst) kwargs = { "backup_type": status.next_backup_type(full_backup, run_type), "dst": dst, "xtrabackup_binary": config.mysql.xtrabackup_binary, } parent = status.candidate_parent(run_type) if kwargs["backup_type"] == "incremental": kwargs["parent_lsn"] = parent.lsn LOG.debug("Creating source %r", kwargs) src = MySQLSource(MySQLConnectInfo(config.mysql.defaults_file), run_type, **kwargs) callbacks = [] try: _backup_stream(config, src, dst, callbacks=callbacks) except (DestinationError, SourceError, SshClientException) as err: raise OperationError(err) LOG.debug("Backup copy name: %s", src.get_name()) kwargs = { "type": src.type, "binlog": src.binlog_coordinate[0], "position": src.binlog_coordinate[1], "lsn": src.lsn, "backup_started": backup_start, "backup_finished": time.time(), "config_files": my_cnfs(MY_CNF_COMMON_PATHS), } if src.incremental: kwargs["parent"] = parent.key backup_copy = MySQLCopy(src.host, run_type, src.basename, **kwargs) status.add(backup_copy) status = src.apply_retention_policy(dst, config, run_type, status) LOG.debug("status after apply_retention_policy():\n%s", status) backup_duration = backup_copy.duration export_info( config, data=backup_duration, category=ExportCategory.mysql, measure_type=ExportMeasureType.backup, ) status.save(dst) LOG.debug("Callbacks are %r", callbacks) for callback in callbacks: callback[0].callback(**callback[1])
def backup_mysql(run_type, config): """Take backup of local MySQL instance :param run_type: Run type :type run_type: str :param config: Tool configuration :type config: TwinDBBackupConfig """ if config.backup_mysql is False: LOG.debug('Not backing up MySQL') return dst = config.destination() try: full_backup = config.mysql.full_backup except ConfigParser.NoOptionError: full_backup = 'daily' backup_start = time.time() status = MySQLStatus(dst=dst) kwargs = { 'backup_type': status.next_backup_type(full_backup, run_type), 'dst': dst, 'xtrabackup_binary': config.mysql.xtrabackup_binary } parent = status.candidate_parent(run_type) if kwargs['backup_type'] == 'incremental': kwargs['parent_lsn'] = parent.lsn LOG.debug('Creating source %r', kwargs) src = MySQLSource(MySQLConnectInfo(config.mysql.defaults_file), run_type, **kwargs) callbacks = [] try: _backup_stream(config, src, dst, callbacks=callbacks) except (DestinationError, SourceError, SshClientException) as err: raise OperationError(err) LOG.debug('Backup copy name: %s', src.get_name()) kwargs = { 'type': src.type, 'binlog': src.binlog_coordinate[0], 'position': src.binlog_coordinate[1], 'lsn': src.lsn, 'backup_started': backup_start, 'backup_finished': time.time(), 'config_files': my_cnfs(MY_CNF_COMMON_PATHS) } if src.incremental: kwargs['parent'] = parent.key backup_copy = MySQLCopy(src.host, run_type, src.basename, **kwargs) status.add(backup_copy) status = src.apply_retention_policy(dst, config, run_type, status) LOG.debug('status after apply_retention_policy():\n%s', status) backup_duration = backup_copy.duration export_info(config, data=backup_duration, category=ExportCategory.mysql, measure_type=ExportMeasureType.backup) status.save(dst) LOG.debug('Callbacks are %r', callbacks) for callback in callbacks: callback[0].callback(**callback[1])
def get_destination(config, hostname=socket.gethostname()): """ Read config and return instance of Destination class. :param config: Tool configuration. :type config: ConfigParser.ConfigParser :param hostname: Local hostname. :type hostname: str :return: Instance of destination class. :rtype: BaseDestination """ destination = None try: destination = config.get('destination', 'backup_destination') LOG.debug('Destination in the config %s', destination) destination = destination.strip('"\'') except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): LOG.critical("Backup destination must be specified " "in the config file") exit(-1) if destination == "ssh": host = config.get('ssh', 'backup_host') try: port = int(config.get('ssh', 'port')) except ConfigParser.NoOptionError: port = 22 try: ssh_key = config.get('ssh', 'ssh_key') except ConfigParser.NoOptionError: ssh_key = '/root/.ssh/id_rsa' LOG.debug( 'ssh_key is not defined in config. ' 'Will use default %s', ssh_key) user = config.get('ssh', 'ssh_user') remote_path = config.get('ssh', 'backup_dir') return Ssh( remote_path, hostname=hostname, ssh_host=host, ssh_port=port, ssh_user=user, ssh_key=ssh_key, ) elif destination == "s3": bucket = config.get('s3', 'BUCKET').strip('"\'') access_key_id = config.get('s3', 'AWS_ACCESS_KEY_ID').strip('"\'') secret_access_key = config.get('s3', 'AWS_SECRET_ACCESS_KEY').strip('"\'') default_region = config.get('s3', 'AWS_DEFAULT_REGION').strip('"\'') return S3(bucket, AWSAuthOptions(access_key_id, secret_access_key, default_region=default_region), hostname=hostname) else: LOG.critical('Destination %s is not supported', destination) raise OperationError('Unsupported destination')