def __init__(self, name, config, target_directory, dry_run=False): self.name = name self.config = config self.target_directory = target_directory self.dry_run = dry_run self.config.validate_config(self.CONFIGSPEC) # -> ValidationError # Setup a discovery shell to find schema items # This will iterate over items during the estimate # or backup phase, which will call schema.refresh() self.schema = MySQLSchema() config = self.config["mysqldump"] self.schema.add_database_filter(include_glob(*config["databases"])) self.schema.add_database_filter( exclude_glob(*config["exclude-databases"])) self.schema.add_table_filter(include_glob_qualified(*config["tables"])) self.schema.add_table_filter( exclude_glob_qualified(*config["exclude-tables"])) self.schema.add_engine_filter(include_glob(*config["engines"])) self.schema.add_engine_filter(exclude_glob(*config["exclude-engines"])) self.mysql_config = build_mysql_config(self.config["mysql:client"]) self.client = connect(self.mysql_config["client"]) self.mock_env = None
def __init__(self, name, config, target_directory, dry_run=False): self.name = name self.config = config self.target_directory = target_directory self.dry_run = dry_run self.config.validate_config(self.CONFIGSPEC) # -> ValidationError # Setup a discovery shell to find schema items # This will iterate over items during the estimate # or backup phase, which will call schema.refresh() self.schema = MySQLSchema() config = self.config["mysqldump"] self.schema.add_database_filter(include_glob(*config["databases"])) self.schema.add_database_filter(exclude_glob(*config["exclude-databases"])) self.schema.add_table_filter(include_glob_qualified(*config["tables"])) self.schema.add_table_filter(exclude_glob_qualified(*config["exclude-tables"])) self.schema.add_engine_filter(include_glob(*config["engines"])) self.schema.add_engine_filter(exclude_glob(*config["exclude-engines"])) self.mysql_config = build_mysql_config(self.config["mysql:client"]) self.client = connect(self.mysql_config["client"])
class MySQLDumpPlugin(object): """MySQLDump Backup Plugin interface for Holland""" CONFIGSPEC = CONFIGSPEC def __init__(self, name, config, target_directory, dry_run=False): self.name = name self.config = config self.target_directory = target_directory self.dry_run = dry_run self.config.validate_config(self.CONFIGSPEC) # -> ValidationError # Setup a discovery shell to find schema items # This will iterate over items during the estimate # or backup phase, which will call schema.refresh() self.schema = MySQLSchema() config = self.config['mysqldump'] self.schema.add_database_filter(include_glob(*config['databases'])) self.schema.add_database_filter( exclude_glob(*config['exclude-databases'])) self.schema.add_table_filter(include_glob_qualified(*config['tables'])) self.schema.add_table_filter( exclude_glob_qualified(*config['exclude-tables'])) self.schema.add_engine_filter(include_glob(*config['engines'])) self.schema.add_engine_filter(exclude_glob(*config['exclude-engines'])) self.mysql_config = build_mysql_config(self.config['mysql:client']) self.client = connect(self.mysql_config['client']) def estimate_backup_size(self): """Estimate the size of the backup this plugin will generate""" LOG.info("Estimating size of mysqldump backup") estimate_method = self.config['mysqldump']['estimate-method'] if estimate_method.startswith('const:'): try: return parse_size(estimate_method[6:]) except ValueError, exc: raise BackupError(str(exc)) if estimate_method != 'plugin': raise BackupError("Invalid estimate-method '%s'" % estimate_method) try: db_iter = DatabaseIterator(self.client) tbl_iter = MetadataTableIterator(self.client) try: self.client.connect() self.schema.refresh(db_iter=db_iter, tbl_iter=tbl_iter) except MySQLError, exc: LOG.error("Failed to estimate backup size") LOG.error("[%d] %s", *exc.args) raise BackupError("MySQL Error [%d] %s" % exc.args) return float(sum([db.size for db in self.schema.databases]))
class MySQLDumpPlugin(object): """MySQLDump Backup Plugin interface for Holland""" CONFIGSPEC = CONFIGSPEC def __init__(self, name, config, target_directory, dry_run=False): self.name = name self.config = config self.target_directory = target_directory self.dry_run = dry_run self.config.validate_config(self.CONFIGSPEC) # -> ValidationError # Setup a discovery shell to find schema items # This will iterate over items during the estimate # or backup phase, which will call schema.refresh() self.schema = MySQLSchema() config = self.config['mysqldump'] self.schema.add_database_filter(include_glob(*config['databases'])) self.schema.add_database_filter( exclude_glob(*config['exclude-databases'])) self.schema.add_table_filter(include_glob_qualified(*config['tables'])) self.schema.add_table_filter( exclude_glob_qualified(*config['exclude-tables'])) self.schema.add_engine_filter(include_glob(*config['engines'])) self.schema.add_engine_filter(exclude_glob(*config['exclude-engines'])) self.mysql_config = build_mysql_config(self.config['mysql:client']) self.client = connect(self.mysql_config['client']) def estimate_backup_size(self): """Estimate the size of the backup this plugin will generate""" LOG.info("Estimating size of mysqldump backup") estimate_method = self.config['mysqldump']['estimate-method'] if estimate_method.startswith('const:'): try: return parse_size(estimate_method[6:]) except ValueError as exc: raise BackupError(str(exc)) if estimate_method != 'plugin': raise BackupError("Invalid estimate-method '%s'" % estimate_method) try: db_iter = DatabaseIterator(self.client) tbl_iter = MetadataTableIterator(self.client) try: self.client.connect() except Exception as ex: LOG.error("Failed to connect to database") LOG.error("%s", ex) raise BackupError("MySQL Error %s" % ex) try: self.schema.refresh(db_iter=db_iter, tbl_iter=tbl_iter) except MySQLError as exc: LOG.error("Failed to estimate backup size") LOG.error("[%d] %s", *exc.args) raise BackupError("MySQL Error [%d] %s" % exc.args) return float(sum([db.size for db in self.schema.databases])) finally: self.client.disconnect() def _fast_refresh_schema(self): # determine if we can skip expensive table metadata lookups entirely # and just worry about finding database names # However, with lock-method=auto-detect we must look at table engines # to determine what lock method to use config = self.config['mysqldump'] fast_iterate = config['lock-method'] != 'auto-detect' and \ not config['exclude-invalid-views'] try: db_iter = DatabaseIterator(self.client) tbl_iter = SimpleTableIterator(self.client, record_engines=True) try: self.client.connect() self.schema.refresh(db_iter=db_iter, tbl_iter=tbl_iter, fast_iterate=fast_iterate) except MySQLError as exc: LOG.debug("MySQLdb error [%d] %s", exc_info=True, *exc.args) raise BackupError("MySQL Error [%d] %s" % exc.args) finally: self.client.disconnect() def backup(self): """Run a MySQL backup""" if self.schema.timestamp is None: self._fast_refresh_schema() mock_env = None if self.dry_run: mock_env = MockEnvironment() mock_env.replace_environment() LOG.info("Running in dry-run mode.") try: if self.config['mysqldump']['stop-slave']: self.client = connect(self.mysql_config['client']) slave_status = self.client.show_slave_status() if slave_status is None: raise BackupError("stop-slave enabled, but 'show slave " "status' failed") elif slave_status['slave_sql_running'] != 'Yes': raise BackupError("stop-slave enabled, but replication is " "not running") _stop_slave(self.client, self.config) elif self.config['mysqldump']['bin-log-position']: self.config['mysql:replication'] = {} repl_cfg = self.config['mysql:replication'] try: master_info = self.client.show_master_status() if master_info: repl_cfg['master_log_file'] = master_info['file'] repl_cfg['master_log_pos'] = master_info['position'] except MySQLError as exc: raise BackupError( "Failed to acquire master status [%d] %s" % exc.args) self._backup() finally: if self.config['mysqldump']['stop-slave'] and \ 'mysql:replication' in self.config: _start_slave(self.client, self.config['mysql:replication']) if mock_env: mock_env.restore_environment() def _backup(self): """Real backup method. May raise BackupError exceptions""" config = self.config['mysqldump'] # setup defaults_file with ignore-table exclusions defaults_file = os.path.join(self.target_directory, 'my.cnf') write_options(self.mysql_config, defaults_file) if config['exclude-invalid-views']: LOG.info("* Finding and excluding invalid views...") definitions_path = os.path.join(self.target_directory, 'invalid_views.sql') exclude_invalid_views(self.schema, self.client, definitions_path) add_exclusions(self.schema, defaults_file) # find the path to the mysqldump command mysqldump_bin = find_mysqldump(path=config['mysql-binpath']) LOG.info("Using mysqldump executable: %s", mysqldump_bin) # setup the mysqldump environment extra_defaults = config['extra-defaults'] try: mysqldump = MySQLDump(defaults_file, mysqldump_bin, extra_defaults=extra_defaults) except MySQLDumpError as exc: raise BackupError(str(exc)) except Exception as ex: LOG.warning(ex) LOG.info("mysqldump version %s", '.'.join([str(digit) for digit in mysqldump.version])) options = collect_mysqldump_options(config, mysqldump, self.client) validate_mysqldump_options(mysqldump, options) os.mkdir(os.path.join(self.target_directory, 'backup_data')) if self.config['compression']['method'] != 'none' and \ self.config['compression']['level'] > 0: try: cmd, ext = lookup_compression( self.config['compression']['method']) except OSError as exc: raise BackupError( "Unable to load compression method '%s': %s" % (self.config['compression']['method'], exc)) LOG.info("Using %s compression level %d with args %s", self.config['compression']['method'], self.config['compression']['level'], self.config['compression']['options']) else: LOG.info("Not compressing mysqldump output") cmd = '' ext = '' try: start(mysqldump=mysqldump, schema=self.schema, lock_method=config['lock-method'], file_per_database=config['file-per-database'], open_stream=self._open_stream, compression_ext=ext) except MySQLDumpError as exc: raise BackupError(str(exc)) def _open_stream(self, path, mode, method=None): """Open a stream through the holland compression api, relative to this instance's target directory """ path = str(os.path.join(self.target_directory, 'backup_data', path)) compression_method = method or self.config['compression']['method'] compression_level = self.config['compression']['level'] compression_options = self.config['compression']['options'] compression_inline = self.config['compression']['inline'] stream = open_stream(path, mode, compression_method, compression_level, extra_args=compression_options, inline=compression_inline) return stream def info(self): """Summarize information about this backup""" import textwrap return textwrap.dedent(""" lock-method = %s file-per-database = %s Options used: flush-logs = %s flush-privileges = %s routines = %s events = %s Schema Filters: databases = %s exclude-databases = %s tables = %s exclude-tables = %s """).strip() % (self.config['mysqldump']['lock-method'], self.config['mysqldump']['file-per-database'] and 'yes' or 'no', self.config['mysqldump']['flush-logs'], self.config['mysqldump']['flush-privileges'], self.config['mysqldump']['dump-routines'], self.config['mysqldump']['dump-events'], ','.join( self.config['mysqldump']['databases']), ','.join( self.config['mysqldump']['exclude-databases']), ','.join(self.config['mysqldump']['tables']), ','.join( self.config['mysqldump']['exclude-tables']))
class MySQLDumpPlugin(object): """MySQLDump Backup Plugin interface for Holland""" CONFIGSPEC = CONFIGSPEC def __init__(self, name, config, target_directory, dry_run=False): self.name = name self.config = config self.target_directory = target_directory self.dry_run = dry_run self.config.validate_config(self.CONFIGSPEC) # -> ValidationError # Setup a discovery shell to find schema items # This will iterate over items during the estimate # or backup phase, which will call schema.refresh() self.schema = MySQLSchema() config = self.config["mysqldump"] self.schema.add_database_filter(include_glob(*config["databases"])) self.schema.add_database_filter( exclude_glob(*config["exclude-databases"])) self.schema.add_table_filter(include_glob_qualified(*config["tables"])) self.schema.add_table_filter( exclude_glob_qualified(*config["exclude-tables"])) self.schema.add_engine_filter(include_glob(*config["engines"])) self.schema.add_engine_filter(exclude_glob(*config["exclude-engines"])) self.mysql_config = build_mysql_config(self.config["mysql:client"]) self.client = connect(self.mysql_config["client"]) self.mock_env = None def estimate_backup_size(self): """Estimate the size of the backup this plugin will generate""" LOG.info("Estimating size of mysqldump backup") estimate_method = self.config["mysqldump"]["estimate-method"] if estimate_method.startswith("const:"): try: return parse_size(estimate_method[6:]) except ValueError as exc: raise BackupError(str(exc)) if estimate_method != "plugin": raise BackupError("Invalid estimate-method '%s'" % estimate_method) try: db_iter = DatabaseIterator(self.client) tbl_iter = MetadataTableIterator(self.client) try: self.client.connect() except Exception as ex: LOG.error("Failed to connect to database") LOG.debug("%s", ex) raise BackupError("MySQL Error %s" % ex) try: self.schema.refresh(db_iter=db_iter, tbl_iter=tbl_iter) except MySQLError as exc: LOG.error("Failed to estimate backup size") LOG.debug("[%d] %s", *exc.args) raise BackupError("MySQL Error [%d] %s" % exc.args) return float(sum([db.size for db in self.schema.databases])) finally: self.client.disconnect() def _fast_refresh_schema(self): # determine if we can skip expensive table metadata lookups entirely # and just worry about finding database names # However, with lock-method=auto-detect we must look at table engines # to determine what lock method to use config = self.config["mysqldump"] fast_iterate = (config["lock-method"] != "auto-detect" and not config["exclude-invalid-views"]) try: db_iter = DatabaseIterator(self.client) tbl_iter = SimpleTableIterator(self.client, record_engines=True) try: self.client.connect() self.schema.refresh(db_iter=db_iter, tbl_iter=tbl_iter, fast_iterate=fast_iterate) except MySQLError as exc: LOG.debug("MySQLdb error [%d] %s", exc_info=True, *exc.args) raise BackupError("MySQL Error [%d] %s" % exc.args) finally: self.client.disconnect() def backup(self): """Run a MySQL backup""" if self.schema.timestamp is None: self._fast_refresh_schema() try: self.client = connect(self.mysql_config["client"]) except Exception as ex: LOG.debug("%s", ex) raise BackupError("Failed connecting to database'") if self.dry_run: self.mock_env = MockEnvironment() self.mock_env.replace_environment() LOG.info("Running in dry-run mode.") status = self.client.show_databases() if not status: raise BackupError("Failed to run 'show databases'") try: if self.config["mysqldump"]["stop-slave"]: slave_status = self.client.show_slave_status() if not slave_status: raise BackupError("stop-slave enabled, but 'show slave " "status' failed") if slave_status and slave_status["slave_sql_running"] != "Yes": raise BackupError("stop-slave enabled, but replication is " "not running") if not self.dry_run: _stop_slave(self.client, self.config) elif self.config["mysqldump"]["bin-log-position"]: self.config["mysql:replication"] = {} repl_cfg = self.config["mysql:replication"] try: master_info = self.client.show_master_status() if master_info: repl_cfg["master_log_file"] = master_info["file"] repl_cfg["master_log_pos"] = master_info["position"] except MySQLError as exc: raise BackupError( "Failed to acquire master status [%d] %s" % exc.args) self._backup() finally: if self.config["mysqldump"][ "stop-slave"] and "mysql:replication" in self.config: _start_slave(self.client, self.config["mysql:replication"]) if self.mock_env: self.mock_env.restore_environment() def _backup(self): """Real backup method. May raise BackupError exceptions""" config = self.config["mysqldump"] # setup defaults_file with ignore-table exclusions defaults_file = os.path.join(self.target_directory, "my.cnf") write_options(self.mysql_config, defaults_file) if config["exclude-invalid-views"]: LOG.info("* Finding and excluding invalid views...") definitions_path = os.path.join(self.target_directory, "invalid_views.sql") exclude_invalid_views(self.schema, self.client, definitions_path) add_exclusions(self.schema, defaults_file) # find the path to the mysqldump command mysqldump_bin = find_mysqldump(path=config["mysql-binpath"]) LOG.info("Using mysqldump executable: %s", mysqldump_bin) # setup the mysqldump environment extra_defaults = config["extra-defaults"] try: mysqldump = MySQLDump(defaults_file, mysqldump_bin, extra_defaults=extra_defaults, mock_env=self.mock_env) except MySQLDumpError as exc: raise BackupError(str(exc)) except Exception as ex: # pylint: disable=W0703 LOG.warning(ex) mysqldump_version = ".".join( [str(digit) for digit in mysqldump.version]) LOG.info("mysqldump version %s", mysqldump_version) options = collect_mysqldump_options(config, mysqldump, self.client) validate_mysqldump_options(mysqldump, options) os.mkdir(os.path.join(self.target_directory, "backup_data")) if (self.config["compression"]["method"] != "none" and self.config["compression"]["level"] > 0): try: _, ext = lookup_compression( self.config["compression"]["method"]) except OSError as exc: raise BackupError( "Unable to load compression method '%s': %s" % (self.config["compression"]["method"], exc)) LOG.info( "Using %s compression level %d with args %s", self.config["compression"]["method"], self.config["compression"]["level"], self.config["compression"]["options"], ) else: LOG.info("Not compressing mysqldump output") ext = "" try: start( mysqldump=mysqldump, schema=self.schema, lock_method=config["lock-method"], file_per_database=config["file-per-database"], open_stream=self._open_stream, compression_ext=ext, arg_per_database=config["arg-per-database"], ) except MySQLDumpError as exc: raise BackupError(str(exc)) def _open_stream(self, path, mode, method=None): """Open a stream through the holland compression api, relative to this instance's target directory """ path = str(os.path.join(self.target_directory, "backup_data", path)) config = deepcopy(self.config["compression"]) if method: config["method"] = method stream = open_stream(path, mode, **config) return stream def info(self): """Summarize information about this backup""" return (textwrap.dedent(""" lock-method = %s file-per-database = %s Options used: flush-logs = %s flush-privileges = %s routines = %s events = %s Schema Filters: databases = %s exclude-databases = %s tables = %s exclude-tables = %s """).strip() % ( self.config["mysqldump"]["lock-method"], self.config["mysqldump"]["file-per-database"] and "yes" or "no", self.config["mysqldump"]["flush-logs"], self.config["mysqldump"]["flush-privileges"], self.config["mysqldump"]["dump-routines"], self.config["mysqldump"]["dump-events"], ",".join(self.config["mysqldump"]["databases"]), ",".join(self.config["mysqldump"]["exclude-databases"]), ",".join(self.config["mysqldump"]["tables"]), ",".join(self.config["mysqldump"]["exclude-tables"]), ))
class MySQLDumpPlugin(object): """MySQLDump Backup Plugin interface for Holland""" CONFIGSPEC = CONFIGSPEC def __init__(self, name, config, target_directory, dry_run=False): self.name = name self.config = config self.target_directory = target_directory self.dry_run = dry_run self.config.validate_config(self.CONFIGSPEC) # -> ValidationError # Setup a discovery shell to find schema items # This will iterate over items during the estimate # or backup phase, which will call schema.refresh() self.schema = MySQLSchema() config = self.config["mysqldump"] self.schema.add_database_filter(include_glob(*config["databases"])) self.schema.add_database_filter(exclude_glob(*config["exclude-databases"])) self.schema.add_table_filter(include_glob_qualified(*config["tables"])) self.schema.add_table_filter(exclude_glob_qualified(*config["exclude-tables"])) self.schema.add_engine_filter(include_glob(*config["engines"])) self.schema.add_engine_filter(exclude_glob(*config["exclude-engines"])) self.mysql_config = build_mysql_config(self.config["mysql:client"]) self.client = connect(self.mysql_config["client"]) self.mock_env = None def estimate_backup_size(self): """Estimate the size of the backup this plugin will generate""" LOG.info("Estimating size of mysqldump backup") estimate_method = self.config["mysqldump"]["estimate-method"] if estimate_method.startswith("const:"): try: return parse_size(estimate_method[6:]) except ValueError as exc: raise BackupError(str(exc)) if estimate_method != "plugin": raise BackupError("Invalid estimate-method '%s'" % estimate_method) try: db_iter = DatabaseIterator(self.client) tbl_iter = MetadataTableIterator(self.client) try: self.client.connect() except Exception as ex: LOG.error("Failed to connect to database") LOG.debug("%s", ex) raise BackupError("MySQL Error %s" % ex) try: self.schema.refresh(db_iter=db_iter, tbl_iter=tbl_iter) except MySQLError as exc: LOG.error("Failed to estimate backup size") LOG.debug("[%d] %s", *exc.args) raise BackupError("MySQL Error [%d] %s" % exc.args) return float(sum([db.size for db in self.schema.databases])) finally: self.client.disconnect() def _fast_refresh_schema(self): # determine if we can skip expensive table metadata lookups entirely # and just worry about finding database names # However, with lock-method=auto-detect we must look at table engines # to determine what lock method to use config = self.config["mysqldump"] fast_iterate = ( config["lock-method"] != "auto-detect" and not config["exclude-invalid-views"] ) try: db_iter = DatabaseIterator(self.client) tbl_iter = SimpleTableIterator(self.client, record_engines=True) try: self.client.connect() self.schema.refresh(db_iter=db_iter, tbl_iter=tbl_iter, fast_iterate=fast_iterate) except MySQLError as exc: LOG.debug("MySQLdb error [%d] %s", exc_info=True, *exc.args) raise BackupError("MySQL Error [%d] %s" % exc.args) finally: self.client.disconnect() def backup(self): """Run a MySQL backup""" if self.schema.timestamp is None: self._fast_refresh_schema() try: self.client = connect(self.mysql_config["client"]) except Exception as ex: LOG.debug("%s", ex) raise BackupError("Failed connecting to database'") if self.dry_run: self.mock_env = MockEnvironment() self.mock_env.replace_environment() LOG.info("Running in dry-run mode.") status = self.client.show_databases() if not status: raise BackupError("Failed to run 'show databases'") try: if self.config["mysqldump"]["stop-slave"]: slave_status = self.client.show_slave_status() if not slave_status: raise BackupError("stop-slave enabled, but 'show slave " "status' failed") if slave_status and slave_status["slave_sql_running"] != "Yes": raise BackupError("stop-slave enabled, but replication is " "not running") if not self.dry_run: _stop_slave(self.client, self.config) elif self.config["mysqldump"]["bin-log-position"]: self.config["mysql:replication"] = {} repl_cfg = self.config["mysql:replication"] try: master_info = self.client.show_master_status() if master_info: repl_cfg["master_log_file"] = master_info["file"] repl_cfg["master_log_pos"] = master_info["position"] except MySQLError as exc: raise BackupError("Failed to acquire master status [%d] %s" % exc.args) self._backup() finally: if self.config["mysqldump"]["stop-slave"] and "mysql:replication" in self.config: _start_slave(self.client, self.config["mysql:replication"]) if self.mock_env: self.mock_env.restore_environment() def _backup(self): """Real backup method. May raise BackupError exceptions""" config = self.config["mysqldump"] # setup defaults_file with ignore-table exclusions defaults_file = os.path.join(self.target_directory, "my.cnf") write_options(self.mysql_config, defaults_file) if config["exclude-invalid-views"]: LOG.info("* Finding and excluding invalid views...") definitions_path = os.path.join(self.target_directory, "invalid_views.sql") exclude_invalid_views(self.schema, self.client, definitions_path) add_exclusions(self.schema, defaults_file) # find the path to the mysqldump command mysqldump_bin = find_mysqldump(path=config["mysql-binpath"]) LOG.info("Using mysqldump executable: %s", mysqldump_bin) # setup the mysqldump environment extra_defaults = config["extra-defaults"] try: mysqldump = MySQLDump( defaults_file, mysqldump_bin, extra_defaults=extra_defaults, mock_env=self.mock_env ) except MySQLDumpError as exc: raise BackupError(str(exc)) except Exception as ex: # pylint: disable=W0703 LOG.warning(ex) mysqldump_version = ".".join([str(digit) for digit in mysqldump.version]) LOG.info("mysqldump version %s", mysqldump_version) options = collect_mysqldump_options(config, mysqldump, self.client) validate_mysqldump_options(mysqldump, options) os.mkdir(os.path.join(self.target_directory, "backup_data")) if ( self.config["compression"]["method"] != "none" and self.config["compression"]["level"] > 0 ): try: _, ext = lookup_compression(self.config["compression"]["method"]) except OSError as exc: raise BackupError( "Unable to load compression method '%s': %s" % (self.config["compression"]["method"], exc) ) LOG.info( "Using %s compression level %d with args %s", self.config["compression"]["method"], self.config["compression"]["level"], self.config["compression"]["options"], ) else: LOG.info("Not compressing mysqldump output") ext = "" try: start( mysqldump=mysqldump, schema=self.schema, lock_method=config["lock-method"], file_per_database=config["file-per-database"], open_stream=self._open_stream, compression_ext=ext, arg_per_database=config["arg-per-database"], ) except MySQLDumpError as exc: raise BackupError(str(exc)) def _open_stream(self, path, mode, method=None): """Open a stream through the holland compression api, relative to this instance's target directory """ path = str(os.path.join(self.target_directory, "backup_data", path)) config = deepcopy(self.config["compression"]) if method: config["method"] = method stream = open_stream(path, mode, **config) return stream def info(self): """Summarize information about this backup""" import textwrap return ( textwrap.dedent( """ lock-method = %s file-per-database = %s Options used: flush-logs = %s flush-privileges = %s routines = %s events = %s Schema Filters: databases = %s exclude-databases = %s tables = %s exclude-tables = %s """ ).strip() % ( self.config["mysqldump"]["lock-method"], self.config["mysqldump"]["file-per-database"] and "yes" or "no", self.config["mysqldump"]["flush-logs"], self.config["mysqldump"]["flush-privileges"], self.config["mysqldump"]["dump-routines"], self.config["mysqldump"]["dump-events"], ",".join(self.config["mysqldump"]["databases"]), ",".join(self.config["mysqldump"]["exclude-databases"]), ",".join(self.config["mysqldump"]["tables"]), ",".join(self.config["mysqldump"]["exclude-tables"]), ) )
class MySQLDumpPlugin(object): """MySQLDump Backup Plugin interface for Holland""" CONFIGSPEC = CONFIGSPEC def __init__(self, name, config, target_directory, dry_run=False): self.name = name self.config = config self.target_directory = target_directory self.dry_run = dry_run self.config.validate_config(self.CONFIGSPEC) # -> ValidationError # Setup a discovery shell to find schema items # This will iterate over items during the estimate # or backup phase, which will call schema.refresh() self.schema = MySQLSchema() config = self.config["mysqldump"] self.schema.add_database_filter(include_glob(*config["databases"])) self.schema.add_database_filter(exclude_glob(*config["exclude-databases"])) self.schema.add_table_filter(include_glob_qualified(*config["tables"])) self.schema.add_table_filter(exclude_glob_qualified(*config["exclude-tables"])) self.schema.add_engine_filter(include_glob(*config["engines"])) self.schema.add_engine_filter(exclude_glob(*config["exclude-engines"])) self.mysql_config = build_mysql_config(self.config["mysql:client"]) self.client = connect(self.mysql_config["client"]) def estimate_backup_size(self): """Estimate the size of the backup this plugin will generate""" LOG.info("Estimating size of mysqldump backup") estimate_method = self.config["mysqldump"]["estimate-method"] if estimate_method.startswith("const:"): try: return parse_size(estimate_method[6:]) except ValueError, exc: raise BackupError(str(exc)) if estimate_method != "plugin": raise BackupError("Invalid estimate-method '%s'" % estimate_method) try: db_iter = DatabaseIterator(self.client) tbl_iter = MetadataTableIterator(self.client) try: self.client.connect() self.schema.refresh(db_iter=db_iter, tbl_iter=tbl_iter) except MySQLError, exc: LOG.error("Failed to estimate backup size") LOG.error("[%d] %s", *exc.args) raise BackupError("MySQL Error [%d] %s" % exc.args) return sum([db.size for db in self.schema.databases])