def check_database_dump(client, filepath): """ Checking the last line of the dump file if it contains "-- Dump completed on" :param client: String :param filepath: String :return: """ if system.config['check_dump']: _line = mode.run_command( helper.get_command(client, 'tail') + ' -n 1 ' + filepath, client, True, skip_dry_run=True ) if not _line: return if "-- Dump completed on" not in _line: sys.exit( output.message( output.Subject.ERROR, 'Dump file is corrupted', do_print=False ) ) else: output.message( output.host_to_subject(client), 'Dump file is valid', verbose_only=True )
def truncate_tables(): """ Generate the ignore tables options for the mysqldump command by the given table list # ToDo: Too much conditional nesting :return: String """ # Workaround for config naming if 'truncate_table' in system.config: system.config['truncate_tables'] = system.config['truncate_table'] if 'truncate_tables' in system.config: output.message( output.Subject.TARGET, 'Truncating tables before import', True ) for _table in system.config['truncate_tables']: if '*' in _table: _wildcard_tables = get_database_tables_like(mode.Client.TARGET, _table.replace('*', '%')) if _wildcard_tables: for _wildcard_table in _wildcard_tables: _sql_command = f'TRUNCATE TABLE IF EXISTS {_wildcard_table}' run_database_command(mode.Client.TARGET, _sql_command, True) else: _sql_command = f'TRUNCATE TABLE IF EXISTS {_table}' run_database_command(mode.Client.TARGET, _sql_command, True)
def check_configuration(client): """ Checking Drupal database configuration with Drush :param client: String :return: """ _path = system.config[client]['path'] # Check Drush version _raw_version = mode.run_command( f'{helper.get_command(client, "drush")} status --fields=drush-version --format=string ' f'-r {_path}', client, True ) output.message( output.host_to_subject(client), f'Drush version: {_raw_version}', True ) stdout = mode.run_command( f'{helper.get_command(client, "drush")} core-status --pipe ' f'--fields=db-hostname,db-username,db-password,db-name,db-port ' f'-r {_path}', client, True ) _db_config = parse_database_credentials(json.loads(stdout)) system.config[client]['db'] = _db_config
def run_script(client=None, script='before'): """ Executing script command :param client: String :param script: String :return: """ if client is None: _config = system.config _subject = output.Subject.LOCAL client = mode.Client.LOCAL else: _config = system.config[client] _subject = output.host_to_subject(client) if not 'scripts' in _config: return if f'{script}' in _config['scripts']: output.message( _subject, f'Running script {client}', True ) mode.run_command( _config['scripts'][script], client )
def automatic_type_detection(): """ Detects the framework type by the provided path using the default mapping """ if 'type' in system.config or 'db' in system.config['origin'] or 'db' in system.config[ 'target']: return type = None file = None for _client in [mode.Client.ORIGIN, mode.Client.TARGET]: if 'path' in system.config[_client]: file = helper.get_file_from_path(system.config[_client]['path']) for _key, _files in mapping.items(): if file in _files: type = _key if type: output.message( output.Subject.LOCAL, f'Automatic framework type detection ' f'{output.CliFormat.BLACK}{file}{output.CliFormat.ENDC}', verbose_only=True ) system.config['type'] = type
def load_parser(client, parser): """ Loading parser and checking database configuration :param client: :param parser: :return: """ _path = system.config[client]['path'] output.message( output.host_to_subject(client), f'Checking database configuration {output.CliFormat.BLACK}{_path}{output.CliFormat.ENDC}', True ) if client == mode.Client.ORIGIN: if mode.is_origin_remote(): remote_client.load_ssh_client_origin() else: helper.run_script(client, 'before') else: if mode.is_target_remote(): remote_client.load_ssh_client_target() else: helper.run_script(client, 'before') # Check only if database configuration is a file if not helper.check_file_exists(client, _path) and _path[-1] != '/': sys.exit( output.message( output.Subject.ERROR, f'Database configuration for {client} not found: {_path}', False ) ) parser.check_configuration(client)
def get_password_by_user(client): """ Getting password by user input :param client: String :return: String password """ _password = getpass.getpass( output.message( output.Subject.INFO, 'SSH password ' + helper.get_ssh_host_name(client, True) + ': ', False ) ) while _password.strip() == '': output.message( output.Subject.WARNING, 'Password seems to be empty. Please enter a valid password.', True ) _password = getpass.getpass( output.message( output.Subject.INFO, 'SSH password ' + helper.get_ssh_host_name(client, True) + ': ', False ) ) return _password
def remove_temporary_data_dir(): """ Remove temporary data directory for storing database dump files :return: """ if os.path.exists(system.default_local_sync_path): output.message( output.Subject.LOCAL, 'Cleaning up', True ) shutil.rmtree(system.default_local_sync_path)
def print_footer(): """ Printing console footer :return: """ _message = 'Successfully synchronized files' output.message( output.Subject.INFO, _message, True, True )
def prepare_target_database_dump(): """ Preparing the target database dump by the unpacked .tar.gz file :return: """ output.message(output.Subject.TARGET, 'Extracting database dump', True) mode.run_command( helper.get_command('target', 'tar') + ' xzf ' + helper.get_dump_dir(mode.Client.TARGET) + database_utility.database_dump_file_name + '.tar.gz -C ' + helper.get_dump_dir(mode.Client.TARGET) + ' > /dev/null', mode.Client.TARGET, skip_dry_run=True )
def remove_temporary_dir(): """ Remove temporary directory :return: """ global temp_data_dir if os.path.exists(temp_data_dir): shutil.rmtree(temp_data_dir) output.message( output.Subject.LOCAL, 'Cleaning up', True )
def check_rsync_version(): """ Check rsync version :return: """ _raw_version = mode.run_command( 'rsync --version', mode.Client.LOCAL, True ) _version = parse_version(_raw_version) output.message( output.Subject.LOCAL, f'rsync version {_version}' )
def transfer_files(): """ Transfering configured files between clients :return: """ if 'files' in system.config: for config in system.config['files']['config']: output.message( output.Subject.INFO, f'Starting rsync file transfer' ) if 'exclude' not in config: config['exclude'] = [] if mode.get_sync_mode() == mode.SyncMode.PROXY: # Proxy mode: Transferring from origin to local and from local to target utility.generate_temp_dir_name() helper.check_and_create_dump_dir(mode.Client.LOCAL, utility.temp_data_dir) synchronize( origin_path=config[mode.Client.ORIGIN], target_path=utility.temp_data_dir, exclude=config['exclude'], pseudo_client=mode.Client.ORIGIN ) synchronize( origin_path=f'{utility.temp_data_dir}/*', target_path=config[mode.Client.TARGET], exclude=config['exclude'], pseudo_client=mode.Client.TARGET ) utility.remove_temporary_dir() elif mode.get_sync_mode() == mode.SyncMode.SYNC_REMOTE: synchronize( origin_path=config[mode.Client.ORIGIN], target_path=config[mode.Client.TARGET], exclude=config['exclude'], client=mode.Client.ORIGIN, force_remote=True ) else: synchronize( origin_path=config[mode.Client.ORIGIN], target_path=config[mode.Client.TARGET], exclude=config['exclude'] ) else: f'{output.Subject.WARNING} No file sync configuration provided'
def remove_target_database_dump(): """ Removing the target database dump files :return: """ _file_path = helper.get_dump_dir(mode.Client.TARGET) + database_utility.database_dump_file_name # # Move dump to specified directory # if system.config['keep_dump']: helper.create_local_temporary_data_dir() _keep_dump_path = system.default_local_sync_path + database_utility.database_dump_file_name mode.run_command( helper.get_command('target', 'cp') + ' ' + _file_path + ' ' + _keep_dump_path, mode.Client.TARGET ) output.message( output.Subject.INFO, f'Database dump file is saved to: {_keep_dump_path}', True, True ) # # Clean up # if not mode.is_dump() and not mode.is_import(): output.message( output.Subject.TARGET, 'Cleaning up', True ) if system.config['dry_run']: return if mode.is_target_remote(): sftp = remote_client.ssh_client_target.open_sftp() sftp.remove(_file_path) sftp.remove(f'{_file_path}.tar.gz') sftp.close() else: if os.path.isfile(_file_path): os.remove(_file_path) if os.path.isfile(f'{_file_path}.tar.gz'): os.remove(f'{_file_path}.tar.gz')
def read_stats(stats): """ Read rsync stats and print a summary :param stats: String :return: """ if system.config['verbose']: print(f'{output.Subject.DEBUG}{output.CliFormat.BLACK}{stats}{output.CliFormat.ENDC}') _file_size = parse_string(stats, r'Total transferred file size:\s*([\d.]+[MKG]?)') if _file_size: output.message( output.Subject.INFO, f'Status: {unit_converter(_file_size[0])} transferred' )
def parse_database_credentials(db_credentials): """ Parsing database credentials to needed format :param db_credentials: Dictionary :return: Dictionary """ db_credentials = str(db_credentials).replace('\\n\'','') # DATABASE_URL=mysql://db-user:1234@db-host:3306/db-name db_credentials = re.findall(r"\/{2}(.+):(.+)@(.+):(\d+)\/(.+)", db_credentials)[0] if len(db_credentials) != 5: sys.exit( output.message( output.Subject.ERROR, 'Mismatch of expected database credentials', False ) ) _db_config = { 'name': db_credentials[4], 'host': db_credentials[2], 'password': db_credentials[1], 'port': db_credentials[3], 'user': db_credentials[0], } return _db_config
def check_updates(): """ Check for updates of the db_sync_tool :return: """ try: response = requests.get(f'{info.__pypi_package_url__}/json') latest_version = response.json()['info']['version'] if semantic_version.Version(info.__version__) < semantic_version.Version(latest_version): output.message( output.Subject.WARNING, f'A new version {output.CliFormat.BOLD}v{latest_version}{output.CliFormat.ENDC} is ' f'available for the db-sync-tool: {info.__pypi_package_url__}', True ) finally: return
def reverse_hosts(): """ Checking authorization for clients :return: """ if config['reverse']: _origin = config[mode.Client.ORIGIN] _target = config[mode.Client.TARGET] config[mode.Client.ORIGIN] = _target config[mode.Client.TARGET] = _origin output.message( output.Subject.INFO, 'Reverse origin and target hosts', True )
def read_stats(stats): """ Read rsync stats and print a summary :param stats: String :return: """ if system.config['verbose']: print(f'{output.Subject.DEBUG}{output.CliFormat.BLACK}{stats}{output.CliFormat.ENDC}') _file_number = parse_string(stats, r'Number of regular files transferred:\s*([\d.]+)') _file_size = parse_string(stats, r'Total transferred file size:\s*([\d.]+[MKG]?)') if _file_number and _file_size: output.message( output.Subject.INFO, f'Status: {_file_number[0]} file(s) transferred {output.CliFormat.BLACK}({_file_size[0]}Bytes){output.CliFormat.ENDC}' )
def check_authorizations(): """ Check authorization for clients :return: """ if system.config['use_sshpass']: # When using sshpass, check for passwords system.check_authorization(mode.Client.ORIGIN) system.check_authorization(mode.Client.TARGET) elif not 'ssh_key' in system.config[mode.Client.ORIGIN] and \ not 'ssh_key' in system.config[mode.Client.TARGET] and \ (mode.get_sync_mode() == mode.SyncMode.PROXY or len(system.config['files']['config']) > 1): # Suggest to install sshpass output.message( output.Subject.INFO, f'Suggestion: Install {output.CliFormat.BOLD}sshpass{output.CliFormat.ENDC} to avoid multiple input of ssh passwords' )
def create_origin_database_dump(): """ Creating the origin database dump file :return: """ if not mode.is_import(): parser.get_database_configuration(mode.Client.ORIGIN) database_utility.generate_database_dump_filename() helper.check_and_create_dump_dir(mode.Client.ORIGIN, helper.get_dump_dir(mode.Client.ORIGIN)) _dump_file_path = helper.get_dump_dir( mode.Client.ORIGIN) + database_utility.database_dump_file_name _database_version = database_utility.get_database_version(mode.Client.ORIGIN) output.message( output.Subject.ORIGIN, f'Creating database dump {output.CliFormat.BLACK}{_dump_file_path}{output.CliFormat.ENDC}', True ) _mysqldump_options = '--no-tablespaces ' # Remove --no-tablespaces option for mysql < 5.6 # @ToDo: Better option handling if not _database_version is None: if _database_version[0] == database_utility.DatabaseSystem.MYSQL and \ semantic_version.Version(_database_version[1]) < semantic_version.Version('5.6.0'): _mysqldump_options = '' # Run mysql dump command, e.g. # mysqldump --no-tablespaces -u'db' -p'db' -h'db1' -P'3306' 'db' > /tmp/_db_08-10-2021_07-00.sql mode.run_command( helper.get_command(mode.Client.ORIGIN, 'mysqldump') + ' ' + _mysqldump_options + database_utility.generate_mysql_credentials(mode.Client.ORIGIN) + ' \'' + system.config[mode.Client.ORIGIN]['db']['name'] + '\' ' + database_utility.generate_ignore_database_tables() + database_utility.get_database_tables() + ' > ' + _dump_file_path, mode.Client.ORIGIN, skip_dry_run=True ) database_utility.check_database_dump(mode.Client.ORIGIN, _dump_file_path) database_utility.count_tables(mode.Client.ORIGIN, _dump_file_path) prepare_origin_database_dump()
def prepare_origin_database_dump(): """ Preparing the origin database dump file by compressing them as .tar.gz :return: """ output.message( output.Subject.ORIGIN, 'Compressing database dump', True ) mode.run_command( helper.get_command(mode.Client.ORIGIN, 'tar') + ' cfvz ' + helper.get_dump_dir( mode.Client.ORIGIN) + database_utility.database_dump_file_name + '.tar.gz -C ' + helper.get_dump_dir(mode.Client.ORIGIN) + ' ' + database_utility.database_dump_file_name + ' > /dev/null', mode.Client.ORIGIN, skip_dry_run=True )
def check_sshpass_version(): """ Check sshpass version :return: """ _raw_version = mode.run_command( 'sshpass -V', mode.Client.LOCAL, force_output=True, allow_fail=True ) _version = parse_version(_raw_version) if _version: output.message( output.Subject.LOCAL, f'sshpass version {_version}' ) system.config['use_sshpass'] = True return True
def count_tables(client, filepath): """ Count the reference string in the database dump file to get the count of all exported tables :param client: String :param filepath: String :return: """ _reference = 'CREATE TABLE' _count = mode.run_command( f'{helper.get_command(client, "grep")} -ao "{_reference}" {filepath} | wc -l | xargs', client, True, skip_dry_run=True ) if _count: output.message( output.host_to_subject(client), f'{int(_count)} table(s) exported' )
def run_ssh_command(command, ssh_client=remote_client.ssh_client_origin, client=None): """ Running ssh command :param command: String :param ssh_client: :param client: String :return: """ stdin, stdout, stderr = ssh_client.exec_command(command) exit_status = stdout.channel.recv_exit_status() err = stderr.read().decode() if err and exit_status != 0: helper.run_script(client=client, script='error') sys.exit(output.message(output.Subject.ERROR, err, False)) elif err: output.message(output.Subject.WARNING, err, True) return stdout
def put_origin_database_dump(origin_path): """ Uploading the origin database dump file :param origin_path: String :return: """ if mode.get_sync_mode() == mode.SyncMode.PROXY: _subject = output.Subject.LOCAL else: _subject = output.Subject.ORIGIN output.message( _subject, 'Uploading database dump', True ) helper.check_and_create_dump_dir(mode.Client.TARGET, helper.get_dump_dir(mode.Client.TARGET)) if not system.config['dry_run']: _localpath = origin_path + database_utility.database_dump_file_name + '.tar.gz' _remotepath = helper.get_dump_dir(mode.Client.TARGET) + '/' if system.config['use_rsync']: rsync.run_rsync_command( remote_client=mode.Client.TARGET, origin_path=_localpath, target_path=_remotepath, target_ssh=system.config[mode.Client.TARGET]['user'] + '@' + system.config[mode.Client.TARGET]['host'] ) else: # # Download speed problems # https://github.com/paramiko/paramiko/issues/60 # sftp = get_sftp_client(client.ssh_client_target) sftp.put(origin_path + database_utility.database_dump_file_name + '.tar.gz', helper.get_dump_dir(mode.Client.TARGET) + database_utility.database_dump_file_name + '.tar.gz', upload_status) sftp.close() if not system.config['mute']: print('')
def check_sync_mode(): """ Checking the sync_mode based on the given configuration :return: String subject """ global sync_mode _description = '' _modes = { SyncMode.RECEIVER: '(REMOTE ➔ LOCAL)', SyncMode.SENDER: '(LOCAL ➔ REMOTE)', SyncMode.PROXY: '(REMOTE ➔ LOCAL ➔ REMOTE)', SyncMode.DUMP_LOCAL: '(LOCAL, ONLY EXPORT)', SyncMode.DUMP_REMOTE: '(REMOTE, ONLY EXPORT)', SyncMode.IMPORT_LOCAL: '(REMOTE, ONLY IMPORT)', SyncMode.IMPORT_REMOTE: '(LOCAL, ONLY IMPORT)', SyncMode.SYNC_LOCAL: '(LOCAL ➔ LOCAL)', SyncMode.SYNC_REMOTE: '(REMOTE ➔ REMOTE)' } for _mode, _desc in _modes.items(): if getattr(SyncMode, 'is_' + _mode.lower())(): sync_mode = _mode _description = _desc if is_import(): output.message( output.Subject.INFO, f'Import file {output.CliFormat.BLACK}{system.config["import"]}{output.CliFormat.ENDC}', True ) system.config['is_same_client'] = SyncMode.is_same_host() output.message( output.Subject.INFO, f'Sync mode: {sync_mode} {output.CliFormat.BLACK}{_description}{output.CliFormat.ENDC}', True ) check_for_protection()
def check_for_protection(): """ Check if the target system is protected :return: Boolean """ if sync_mode in (SyncMode.RECEIVER, SyncMode.SENDER, SyncMode.PROXY, SyncMode.SYNC_LOCAL, SyncMode.SYNC_REMOTE, SyncMode.IMPORT_LOCAL, SyncMode.IMPORT_REMOTE) and \ 'protect' in system.config[Client.TARGET]: _host = helper.get_ssh_host_name(Client.TARGET) sys.exit(output.message(output.Subject.ERROR, f'The host {_host} is protected against the import of a database dump. Please ' 'check synchronisation target or adjust the host configuration.', False))
def remove_origin_database_dump(keep_compressed_file=False): """ Removing the origin database dump files :param keep_compressed_file: Boolean :return: """ output.message( output.Subject.ORIGIN, 'Cleaning up', True ) if system.config['dry_run']: return _file_path = helper.get_dump_dir(mode.Client.ORIGIN) + database_utility.database_dump_file_name if mode.is_origin_remote(): sftp = remote_client.ssh_client_origin.open_sftp() sftp.remove(_file_path) if not keep_compressed_file: sftp.remove(f'{_file_path}.tar.gz') sftp.close() else: os.remove(_file_path) if not keep_compressed_file: os.remove(f'{_file_path}.tar.gz') if keep_compressed_file: if 'keep_dumps' in system.config[mode.Client.ORIGIN]: helper.clean_up_dump_dir(mode.Client.ORIGIN, helper.get_dump_dir(mode.Client.ORIGIN) + '*', system.config[mode.Client.ORIGIN]['keep_dumps']) output.message( output.Subject.INFO, f'Database dump file is saved to: {_file_path}.tar.gz', True, True )
def print_footer(): """ Printing console footer :return: """ if system.config['dry_run']: _message = 'Successfully executed dry run' elif not system.config['keep_dump'] and \ not system.config['is_same_client'] and \ not mode.is_import(): _message = 'Successfully synchronized databases' elif mode.is_import(): _message = 'Successfully imported database dump' else: _message = 'Successfully created database dump' output.message( output.Subject.INFO, _message, True, True )