def test_run_bash_with_nonactionable_error(remote_connection): response = libs_pb2.RunBashResponse() na_error = libs_pb2.NonActionableLibraryError() response.error.non_actionable_error.CopyFrom(na_error) with mock.patch('dlpx.virtualization._engine.libs.run_bash', return_value=response, create=True): with pytest.raises(SystemExit): libs.run_bash(remote_connection, 'command')
def test_run_bash_bad_command(remote_connection): # Set the command be an int instead of a string. command = 10 variables = None use_login_shell = False with pytest.raises(IncorrectArgumentTypeError) as err_info: libs.run_bash(remote_connection, command, variables, use_login_shell) assert err_info.value.message == ( "The function run_bash's argument 'command' was" " type 'int' but should be of type 'basestring'.")
def test_run_bash_bad_use_login_shell(remote_connection): command = 'command' variables = None # Set the variables be a string instead of a bool. use_login_shell = 'False' with pytest.raises(IncorrectArgumentTypeError) as err_info: libs.run_bash(remote_connection, command, variables, use_login_shell) assert err_info.value.message == ( "The function run_bash's argument 'use_login_shell' was" " type 'str' but should be of type 'bool' if defined.")
def test_run_bash_variables_not_dict(remote_connection): command = 'command' # Set the variables be a string instead of a dict. variables = 'not a dict' use_login_shell = False with pytest.raises(IncorrectArgumentTypeError) as err_info: libs.run_bash(remote_connection, command, variables, use_login_shell) assert err_info.value.message == ( "The function run_bash's argument 'variables' was" " type 'str' but should be of" " type 'dict of basestring:basestring' if defined.")
def test_run_bash_bad_remote_connection(): # Set the connection be a string instead of a RemoteConnection. connection = 'BadRemoteConnection' command = 'command' variables = None use_login_shell = False with pytest.raises(IncorrectArgumentTypeError) as err_info: libs.run_bash(connection, command, variables, use_login_shell) assert err_info.value.message == ( "The function run_bash's argument 'remote_connection' was" " type 'str' but should be of" " class 'dlpx.virtualization.common._common_classes.RemoteConnection'.")
def test_run_bash_with_actionable_error(remote_connection): expected_id = 15 expected_message = 'Some message' response = libs_pb2.RunBashResponse() response.error.actionable_error.id = expected_id response.error.actionable_error.message = expected_message with mock.patch('dlpx.virtualization._engine.libs.run_bash', return_value=response, create=True): with pytest.raises(LibraryError) as err_info: libs.run_bash(remote_connection, 'command') assert err_info.value._id == expected_id assert err_info.value.message == expected_message
def get_port_status(port, connection): myport = port status = Status.INACTIVE output = "" try: port_status_cmd = "ps -ef | grep -E \"[m]ysqld .*--port=" + myport + "\" | grep -v grep" result = libs.run_bash(connection, port_status_cmd, variables=None, check=True) output = result.stdout.strip() except Exception as err: logger.debug("Port Check Failed.: " + err.message) if output == "": port_status_cmd = "ps -ef | grep -E \"[m]ysqld .*-p.*" + myport + " | grep -v grep" try: result = libs.run_bash(connection, port_status_cmd, variables=None, check=True) output = result.stdout.strip() except Exception as err: logger.debug("Port Check Failed for second cmd: " + err.message) logger.debug("Port Status Response >") logger.debug(output) if output == "": logger.debug("MySQL DB is NOT RUNNING at Port:" + myport) else: logger.debug("A process is running at Port.") output = re.sub("\s\s+", " ", output) process_data = output.split(" ") process_id = process_data[1] bin_dir = process_data[7] data_dir = "" data_dir_attr = process_data[10] data_dir_list = data_dir_attr.split("=") logger.debug("process_id: " + process_id + " bin_dir: " + bin_dir + " data_dir_attr: " + data_dir_attr) if len(data_dir_list) > 1: logger.debug("data_dir_list length is greater than 1") data_dir = data_dir_list[1] logger.debug("data_dir: " + data_dir) if (process_id != "" and bin_dir != "" and data_dir != ""): logger.debug("MySQL DB is running at PORT %s with PROCESS ID: %s" % (myport, process_id)) status = Status.ACTIVE return status
def test_run_bash_check_true_success_exitcode(remote_connection): expected_run_bash_response = libs_pb2.RunBashResponse() expected_run_bash_response.return_value.exit_code = 0 expected_run_bash_response.return_value.stdout = "stdout" expected_run_bash_response.return_value.stderr = "stderr" expected_command = "command" expected_variables = None expected_use_login_shell = False def mock_run_bash(actual_run_bash_request): assert actual_run_bash_request.command == expected_command assert actual_run_bash_request.use_login_shell == expected_use_login_shell assert (actual_run_bash_request.remote_connection.environment.name == remote_connection.environment.name) assert (actual_run_bash_request.remote_connection.environment. reference == remote_connection.environment.reference) return expected_run_bash_response with mock.patch("dlpx.virtualization._engine.libs.run_bash", side_effect=mock_run_bash, create=True): actual_run_bash_result = libs.run_bash(remote_connection, expected_command, expected_variables, expected_use_login_shell, check=True) assert actual_run_bash_result.exit_code == expected_run_bash_response.return_value.exit_code assert actual_run_bash_result.stdout == expected_run_bash_response.return_value.stdout assert actual_run_bash_result.stderr == expected_run_bash_response.return_value.stderr
def start_mysql(installPath, baseDir, mountPath, port, serverId, connection): #This function will stop a running MySQL Database. logger.debug("Commence > start_mysql()") port_stat = get_port_status(port, connection) logger.debug("Port Status > " + port_stat.name) environment_vars = {} if (port_stat == Status.INACTIVE): logger.debug("DB is not running. Starting the MySQL DB") start_cmd = get_start_cmd(installPath, baseDir, mountPath, port, serverId) logger.debug("Startup Command: {}".format(start_cmd)) result = libs.run_bash(connection, start_cmd, environment_vars, check=True) output = result.stdout.strip() error = result.stderr.strip() exit_code = result.exit_code if exit_code != 0: logger.debug("There was an error trying to start the DB : " + error) raise MySQLStartupException(error) else: logger.debug("Output: " + output) time.sleep(25) if (Status.ACTIVE == get_port_status(port, connection)): logger.debug("DB Started Successfully") else: logger.debug("There was an issue starting the DB") else: logger.debug(" DB is already Running.")
def execute_bash(source_connection, command_name, callback_func=None, environment_vars=None): """ :param callback_func: :param source_connection: Connection object for the source environment :param command_name: Command to be search from dictionary of bash command :param environment_vars: Expecting environment variables which are required to execute the command :return: list of output of command, error string, exit code """ if source_connection is None: raise exceptions.PluginScriptError("Connection object cannot be empty") result = libs.run_bash(source_connection, command=command_name, variables=environment_vars, use_login_shell=True) # strip the each part of result to remove spaces from beginning and last of output output = result.stdout.strip() error = result.stderr.strip() exit_code = result.exit_code # Verify the exit code of each executed command. 0 means command ran successfully and for other code it is failed. # For failed cases we need to find the scenario in which programs will die and otherwise execution will continue. _handle_exit_code(exit_code, error, output, callback_func) return [output, error, exit_code]
def start_slave(connection, installPath, port, connString, username, pwd, hostIp): start_slave_cmd = "" environment_vars = {} if (installPath == "" or port == "" or (connString == "" and username == "") or pwd == "" or hostIp == ""): logger.debug( "One of the required parameters are empty. Cannot continue.") raise Exception( "One of the required params for MySQL Connection is empty") else: start_slave_cmd = CommandFactory.start_replication( connection, installPath, port, connString, username, pwd, hostIp) logger.debug("Connection String with {}".format(start_slave_cmd)) try: logger.debug("Starting Slave") result = libs.run_bash(connection, start_slave_cmd, environment_vars, check=True) output = result.stdout.strip() logger.debug("Start Slave Output: {}".format(output)) except Exception as err: logger.debug("Starting Slave Failed: " + err.message) raise err
def linked_status(staged_source, repository, source_config): logger.debug("Checking status of Staging DB") library_script = pkgutil.get_data('resources', 'library.sh') binary_path = staged_source.staged_connection.environment.host.binary_path logger.debug(" Staging Port >>: " + staged_source.parameters.staging_port) environment_vars = { "DLPX_LIBRARY_SOURCE": library_script, "STAGINGPORT": staged_source.parameters.staging_port, "DLPX_BIN": binary_path } status_script = pkgutil.get_data('resources', 'statusStaged.sh') result = libs.run_bash(staged_source.staged_connection, status_script, environment_vars, check=True) output = result.stdout.strip() error = result.stderr.strip() exit_code = result.exit_code if exit_code != 0: logger.debug("Exception while checking Staging DB Status : " + error) #ignore status? else: logger.debug("Staging Status Check: " + output) if output == "ACTIVE": return Status.ACTIVE else: return Status.INACTIVE
def stop_slave(connection, installPath, port, connString, username, pwd, hostIp): stop_slave_cmd = "" environment_vars = {} if (installPath == "" or port == "" or (connString == "" and username == "") or pwd == "" or hostIp == ""): logger.debug( "One of the required parameters are empty. Cannot continue.") raise Exception( "One of the required params for MySQL Connection is empty") else: stop_slave_cmd = CommandFactory.stop_replication( connection, installPath, port, connString, username, pwd, hostIp) logger.debug("Connection String with {}".format(stop_slave_cmd)) try: logger.debug("Stopping Replication") result = libs.run_bash(connection, stop_slave_cmd, environment_vars, check=True) _output = result.stdout.strip() _bashErrMsg = result.stderr.strip() _bashErrCode = result.exit_code if _bashErrCode != 0: logger.debug("Stopping Slave was not succesful") raise Exception(_bashErrMsg) logger.debug("Start Slave Response: {}".format(_output)) except Exception as err: logger.debug("Stop Replication Failed Due To: " + err.message) logger.debug("Ignoring and continuing")
def test_run_bash(remote_connection): expected_run_bash_response = libs_pb2.RunBashResponse() expected_run_bash_response.return_value.exit_code = 0 expected_run_bash_response.return_value.stdout = 'stdout' expected_run_bash_response.return_value.stderr = 'stderr' expected_command = 'command' expected_variables = None expected_use_login_shell = False def mock_run_bash(actual_run_bash_request): assert actual_run_bash_request.command == expected_command assert (actual_run_bash_request.use_login_shell == expected_use_login_shell) actual_environment = ( actual_run_bash_request.remote_connection.environment) assert ( actual_environment.name == remote_connection.environment.name) assert (actual_environment.reference == remote_connection.environment.reference) return expected_run_bash_response with mock.patch('dlpx.virtualization._engine.libs.run_bash', side_effect=mock_run_bash, create=True): actual_run_bash_result = libs.run_bash(remote_connection, expected_command, expected_variables, expected_use_login_shell) expected = expected_run_bash_response.return_value assert actual_run_bash_result.exit_code == expected.exit_code assert actual_run_bash_result.stdout == expected.stdout assert actual_run_bash_result.stderr == expected.stderr
def stop_mysql(port, connection, baseDir, vdbConn, pwd): #This function will stop a running MySQL Database. logger.debug("Commence > stop_mysql()") port_stat = get_port_status(port, connection) logger.debug("Port Status > " + port_stat.name) environment_vars = {} if (port_stat == Status.ACTIVE): logger.debug("DB is Running. Shutting down.") shutdown_cmd = "%s/bin/mysqladmin %s'%s' --protocol=TCP --port=%s shutdown" % ( baseDir, vdbConn, pwd, port) logger.debug("Shutdown Command: {}".format(shutdown_cmd)) result = libs.run_bash(connection, shutdown_cmd, environment_vars, check=True) output = result.stdout.strip() error = result.stderr.strip() exit_code = result.exit_code if exit_code != 0: logger.debug( "There was an error trying to shutdown the database : " + error) raise MySQLShutdownException(error) else: logger.debug("Output: " + output) time.sleep(25) if (Status.ACTIVE == get_port_status(port, connection)): logger.debug("KILL") # TODO: Kill Process else: logger.debug(" DB is already down.")
def test_run_bash_bad_variables(remote_connection): command = 'command' # # Set the value inside the varibles dict to be an int instead of a # string. # variables = {'test0': 'yes', 'test1': 10} use_login_shell = False with pytest.raises(IncorrectArgumentTypeError) as err_info: libs.run_bash(remote_connection, command, variables, use_login_shell) assert err_info.value.message == ( "The function run_bash's argument 'variables' was" " a dict of {type 'str':type 'int', type 'str':type 'str'}" " but should be of" " type 'dict of basestring:basestring' if defined.")
def test_run_bash_with_check_true_failed_exitcode(remote_connection): expected_message = ('The script failed with exit code 1.' ' stdout : stdout and stderr : stderr') response = libs_pb2.RunBashResponse() response.return_value.exit_code = 1 response.return_value.stdout = "stdout" response.return_value.stderr = "stderr" with mock.patch("dlpx.virtualization._engine.libs.run_bash", return_value=response, create=True): with pytest.raises(PluginScriptError) as info: response = libs.run_bash(remote_connection, "test_command", check=True) assert info.value.message == expected_message
def repository_discovery(source_connection): common.add_debug_heading_block("Start Repository Discovery") helpers._record_hook("repository_discovery", source_connection) env = { "DELPHIX_DIR": source_connection.environment.host.binary_path, "DLPX_PLUGIN_WORKFLOW": 'repoDiscovery', "DLPX_TOOLKIT_WORKFLOW": 'repoDiscovery' } logger.debug("env: {}".format(env)) repositories = [] script_content = pkgutil.get_data('resources', 'discover_repos.sh') #logger.debug("discover_repos_repository_script: {}".format(script_content)) res = libs.run_bash(source_connection, script_content, env) logger.debug("res = {}".format(res)) logger.debug("res.stdout = {}".format(res.stdout)) repodiscovery = json.loads(res.stdout) logger.debug(repodiscovery) for item in repodiscovery: logger.debug("item:{}".format(item)) repository = RepositoryDefinition( version=item['version'], mongo_install_path=item['mongo_install_path'], mongo_shell_path=item['mongo_shell_path'], pretty_name=item['pretty_name']) repositories.append(repository) # # Write library file for future use # env = { # "DELPHIX_DIR": source_connection.environment.host.binary_path, # "DLPX_PLUGIN_WORKFLOW": 'sourceConfigDiscovery', # "MONGO_LIBRARY_SOURCE": pkgutil.get_data('resources', 'library.sh') # } # script_content = pkgutil.get_data('resources', 'write_library.sh') # res = libs.run_bash(source_connection, script_content, env) # data = json.loads(res.stdout) # logger.debug(data) common.add_debug_heading_block("End Repository Discovery") return repositories
def repository_discovery(source_connection): # This is an object generated from the repositoryDefinition schema. # In order to use it locally you must run the 'build -g' command provided # by the SDK tools from the plugin's root directory. repositories = [] binary_path = source_connection.environment.host.binary_path library_script = pkgutil.get_data('resources', 'library.sh') environment_vars = { "DLPX_LIBRARY_SOURCE": library_script, "DLPX_BIN": binary_path } find_mysql_binary = pkgutil.get_data('resources', 'repoDiscovery.sh') result = libs.run_bash(source_connection, find_mysql_binary, environment_vars, check=True) output = result.stdout.strip() error = result.stderr.strip() exit_code = result.exit_code if exit_code != 0: logger.debug("Error is : " + error) raise RepositoryDiscoveryError("Exception while discovering:" + error) else: logger.debug("Output: " + output) #process repository json repos_js = json.loads(output) # print the keys and values for repo_js in repos_js: #logger.debug("Adding repository:"+repo_js+" to list") path = repo_js['installPath'] version = repo_js['version'] prettyName = repo_js['prettyName'].split("/bin")[1] repository = RepositoryDefinition(name=prettyName, install_path=path, version=version) repositories.append(repository) logger.debug("output:" + output) return repositories
def _record_hook(hook_name, connection): logger.info('Running %s', hook_name) libs.run_bash( connection, "echo '{}:{}' >> {}".format( datetime.now(), "Running {}".format(hook_name), posixpath.join("/var/tmp", "pythonStaging.log")))
def start_staging(staged_source, repository, source_config): logger.debug("plugin-operations > Starting Staged DB") binary_path = staged_source.staged_connection.environment.host.binary_path staging_ip = "localhost" stagingConn = build_lua_connect_string( staged_source.parameters.source_user, staging_ip) logger.debug("Binary Path in start_staging:" + binary_path) if staged_source.parameters.d_source_type == "Replication": logger.debug("dSourceType is Replication") library_script = pkgutil.get_data('resources', 'library.sh') mount_path = staged_source.parameters.mount_path if staged_source.parameters.log_sync == True: log_sync = "true" else: log_sync = "false" logger.debug("LogSync> " + log_sync) environment_vars = { "DLPX_LIBRARY_SOURCE": library_script, "DLPX_BIN": binary_path, "MYSQLD": repository.install_path, "STAGINGSERVERID": staged_source.parameters.server_id, "STAGINGPORT": staged_source.parameters.staging_port, "STAGINGCONN": stagingConn, "STAGINGPASS": staged_source.parameters.staging_pass, "LOGSYNC": log_sync, "STAGINGDATADIR": mount_path } start_staging_script = pkgutil.get_data('resources', 'startStagedDB.sh') result = libs.run_bash(staged_source.staged_connection, start_staging_script, environment_vars, check=True) output = result.stdout.strip() error = result.stderr.strip() logger.debug("output:" + output) logger.debug("error:" + error) exit_code = result.exit_code if exit_code != 0: logger.debug("There was an error> " + error) raise LinkingException("Exception while Starting Stage:" + error) else: logger.debug("Start Staging - Successful") #elif staged_source.parameters.d_source_type == "Manual Backup Ingestion": else: logger.debug("dSourceType is Manual Backup Ingestion.") library_script = pkgutil.get_data('resources', 'library.sh') mount_path = staged_source.parameters.mount_path environment_vars = { "DLPX_LIBRARY_SOURCE": library_script, "DLPX_BIN": binary_path, "MYSQLD": repository.install_path, "STAGINGSERVERID": staged_source.parameters.server_id, "STAGINGPORT": staged_source.parameters.staging_port, "STAGINGCONN": stagingConn, "STAGINGPASS": staged_source.parameters.staging_pass, "STAGINGDATADIR": mount_path } start_staging_script = pkgutil.get_data('resources', 'startStagedDB.sh') result = libs.run_bash(staged_source.staged_connection, start_staging_script, environment_vars, check=True) output = result.stdout.strip() error = result.stderr.strip() logger.debug("output:" + output) logger.debug("error:" + error) exit_code = result.exit_code if exit_code != 0: logger.debug("Error is : " + error) raise LinkingException("Exception while Starting Stage:" + error) else: logger.debug("Start Staging - Successful")
def linked_pre_snapshot(staged_source, repository, source_config, snapshot_parameters): logger.debug("plugin_operations.linked_pre_snapshot > Start ") dSourceType = staged_source.parameters.d_source_type staging_ip = "localhost" # Check if performing re-sync if int(snapshot_parameters.resync) == 1: # Setting defaults logsync = "true" resync_staging_user = "******" is_delphix_managed = True backup_options = "" if not staged_source.parameters.log_sync: logsync = "false" #Are backups managed by Delphix? backup_path = staged_source.parameters.backup_path if backup_path is not None and backup_path.strip() != "": is_delphix_managed = False logger.debug("Delphix Managed Backups? " + str(is_delphix_managed)) # Building backup options if is_delphix_managed: dbs = utils.parse_db_list(staged_source.parameters.database_list) logger.debug("List of databases to backup >" + dbs) backup_options = utils.create_backup_options(logsync, dbs, logger) logger.debug("Backup Options for restore.sh >" + backup_options) # Create & Copy Backup file to staging host logger.debug( "Resyunc found > Performing Resync > Starting with Backup") binary_path = staged_source.staged_connection.environment.host.binary_path library_script = pkgutil.get_data('resources', 'library.sh') mount_path = staged_source.parameters.mount_path # Buiding Connection Strings for Hybrid Code sourceConn = build_lua_connect_string( staged_source.parameters.source_user, staged_source.parameters.sourceip) stagingConn = build_lua_connect_string(resync_staging_user, staging_ip) logger.debug("source_conection > " + sourceConn) logger.debug("staging_conection > " + stagingConn) if dSourceType == "Replication": logger.debug( "Inside linked_pre_snapshot() > resync () > dSourceType is Replication" ) environment_vars = { "DLPX_LIBRARY_SOURCE": library_script, "DLPX_BIN": binary_path, "MYSQLD": repository.install_path, "MYSQLVER": repository.version, "SOURCEDATADIR": source_config.data_dir, "SOURCEBASEDIR": source_config.base_dir, "SOURCEPORT": source_config.port, "SOURCEIP": staged_source.parameters.sourceip, "BACKUP_PATH": staged_source.parameters.backup_path, "SOURCECONN": sourceConn, "SOURCEUSER": staged_source.parameters.source_user, "SOURCEPASS": staged_source.parameters.source_pass, "REPLICATION_USER": staged_source.parameters.replication_user, "REPLICATION_PASS": staged_source.parameters.replication_pass, "STAGINGSERVERID": staged_source.parameters.server_id, "STAGINGPORT": staged_source.parameters.staging_port, "STAGINGCONN": stagingConn, "STAGINGPASS": staged_source.parameters.staging_pass, "LOGSYNC": logsync, "STAGINGDATADIR": mount_path, "STAGINGHOSTIP": staging_ip, "BACKUP_OPTIONS": backup_options, } logger.debug("Taking Source BackUp") backup_script = pkgutil.get_data('resources', 'restore.sh') result = libs.run_bash(staged_source.staged_connection, backup_script, environment_vars, check=False) output = result.stdout.strip() std_err = result.stderr.strip() exit_code = result.exit_code if exit_code != 0: logger.debug("Error is : " + std_err) logger.debug( "There was an error while taking source backup.Check error.log for details." ) err = utils.process_exit_codes(exit_code, "DBLINK", std_err) logger.error(err) raise err else: logger.debug("Pre-Snapshot/Restore successful " + output) logger.debug("Restoring Backup to Stage") restore_script = pkgutil.get_data('resources', 'restore_stage.sh') result = libs.run_bash(staged_source.staged_connection, restore_script, environment_vars, check=False) logger.debug(result) output = result.stdout.strip() std_err = result.stderr.strip() exit_code = result.exit_code logger.debug(std_err) logger.debug(exit_code) if exit_code == 0: logger.debug( "Creation of Staging DB(Pre-Snapshot) successful." + output) else: logger.debug( "There was an error while creating the staging DB.Check error.log for details." ) err = utils.process_exit_codes(exit_code, "DBLINK", std_err) logger.error(err) raise err elif dSourceType == "Manual Backup Ingestion": logger.debug("dSourceType is Manual Backup Ingestion") logger.debug( "Inside linked_pre_snapshot() > resync () > dSourceType is Replication" ) environment_vars = { "DLPX_LIBRARY_SOURCE": library_script, "DLPX_BIN": binary_path, "MYSQLD": repository.install_path, "MYSQLVER": repository.version, "SOURCEUSER": staged_source.parameters.source_user, "SOURCEPASS": staged_source.parameters.source_pass, "STAGINGSERVERID": staged_source.parameters.server_id, "STAGINGPORT": staged_source.parameters.staging_port, "STAGINGCONN": stagingConn, "STAGINGPASS": staged_source.parameters.staging_pass, "STAGINGDATADIR": mount_path, "SOURCEBASEDIR": source_config.base_dir, "STAGINGHOSTIP": staging_ip } logger.debug("Initializing Seed DB") restore_script = pkgutil.get_data('resources', 'restore_stage_bi.sh') result = libs.run_bash(staged_source.staged_connection, restore_script, environment_vars, check=False) output = result.stdout.strip() std_err = result.stderr.strip() exit_code = result.exit_code if exit_code != 0: logger.debug( "There was an error while creating the seed database. Check error logs for more info." ) logger.error("Error is : " + std_err) err = utils.process_exit_codes(exit_code, "DBLINK", std_err) logger.error(err) raise err else: logger.debug("Pre-Snapshot/Restore_DB successful " + output) else: # Simple Tablespace Option is hidden from the plugin. # This section will not get triggered until the option gets added back in schema.json logger.debug("dSourceType is Simple Tablespace Copy") environment_vars = { "DLPX_LIBRARY_SOURCE": library_script, "DLPX_BIN": binary_path, "MYSQLD": repository.install_path, "MYSQLVER": repository.version, "SOURCEDATADIR": source_config.data_dir, "SOURCEBASEDIR": source_config.base_dir, "SOURCEPORT": source_config.port, "SOURCEIP": staged_source.parameters.sourceip, "SOURCECONN": sourceConn, "SOURCEUSER": staged_source.parameters.source_user, "SOURCEPASS": staged_source.parameters.source_pass, "SOURCEDATABASE": staged_source.parameters.source_database, "SOURCETABLES": staged_source.parameters.source_tables, "STAGINGSERVERID": staged_source.parameters.server_id, "STAGINGPORT": staged_source.parameters.staging_port, "STAGINGCONN": stagingConn, "STAGINGPASS": staged_source.parameters.staging_pass, "SCPUSER": staged_source.parameters.scp_user, "SCPPASS": staged_source.parameters.scp_pass, "STAGINGDATADIR": mount_path, "STAGINGHOSTIP": staging_ip, "STAGINGBASEDIR": staged_source.parameters.staging_basedir } restore_script = pkgutil.get_data('resources', 'restore_stage_si.sh') result = libs.run_bash(staged_source.staged_connection, restore_script, environment_vars, check=True) output = result.stdout.strip() error = result.stderr.strip() exit_code = result.exit_code if exit_code != 0: logger.debug("There was an error while resync : " + error) raise LinkingException( "Exception in pre-snapshot/restore_db:" + error) else: logger.debug("Pre-Snapshot/Restore_DB successful " + output) # Simple Tablespace Option is hidden from the plugin. # This section will not get triggered until the option gets added back in schema.json # if dSourceType == "Simple (Tablespace Backup)": # library_script=pkgutil.get_data('resources','library.sh') # binary_path=staged_source.staged_connection.environment.host.binary_path # mount_path=staged_source.parameters.mount_path # # Buiding Connection Strings for Hybrid Code # sourceConn=build_lua_connect_string(staged_source.parameters.source_user,staged_source.parameters.sourceip) # stagingConn=build_lua_connect_string(staged_source.parameters.staging_user, staging_ip) # logger.debug("PreSnapshot for Simple Tablespace Copy") # environment_vars={ # "DLPX_LIBRARY_SOURCE" : library_script, # "DLPX_BIN" : binary_path, # "MYSQLD":repository.install_path, # "MYSQLVER":repository.version, # "SOURCEDATADIR":source_config.data_dir, # "SOURCEBASEDIR":source_config.base_dir, # "SOURCEPORT":source_config.port, # "SOURCEIP":staged_source.parameters.sourceip, # "SOURCECONN":sourceConn, # "SOURCEUSER":staged_source.parameters.source_user, # "SOURCEPASS":staged_source.parameters.source_pass, # "SOURCEDATABASE":staged_source.parameters.source_database, # "SOURCETABLES":staged_source.parameters.source_tables, # "STAGINGSERVERID":staged_source.parameters.server_id, # "STAGINGPORT":staged_source.parameters.staging_port, # "STAGINGCONN":stagingConn, # "STAGINGPASS":staged_source.parameters.staging_pass, # "SCPUSER":staged_source.parameters.scp_user, # "SCPPASS":staged_source.parameters.scp_pass, # "STAGINGDATADIR":mount_path, # "STAGINGHOSTIP":staging_ip, # "STAGINGBASEDIR": staged_source.parameters.staging_basedir # } # tbsp_script = pkgutil.get_data('resources', 'tablespaces.sh') # result = libs.run_bash(staged_source.staged_connection, tbsp_script,environment_vars,check=True) # output = result.stdout.strip() # error = result.stderr.strip() # exit_code = result.exit_code # if exit_code !=0: # logger.debug("There was an error while copying tablespace : "+error) # raise LinkingException("Exception in pre-snapshot/tablespace copy:"+error) # else: # logger.debug("Pre-Snapshot/Restore_DB successful "+output) # Stopping DB prior to snapshot stop_staging(staged_source, repository, source_config) logger.debug(" linked_pre_snapshot > End ")
def stop_staging(staged_source, repository, source_config): logger.debug("plugin_operations.stop_staging > Stopping Staged DB") staging_ip = "localhost" stagingConn = build_lua_connect_string( staged_source.parameters.source_user, staging_ip) if staged_source.parameters.d_source_type == "Replication": library_script = pkgutil.get_data('resources', 'library.sh') binary_path = staged_source.staged_connection.environment.host.binary_path environment_vars = { "DLPX_LIBRARY_SOURCE": library_script, "DLPX_BIN": binary_path, "MYSQLD": repository.install_path, "STAGINGPORT": staged_source.parameters.staging_port, "STAGINGCONN": stagingConn, "STAGINGPASS": staged_source.parameters.staging_pass } stop_staging_script = pkgutil.get_data('resources', 'stopStagedDB.sh') result = libs.run_bash(staged_source.staged_connection, stop_staging_script, environment_vars, check=True) output = result.stdout.strip() error = result.stderr.strip() exit_code = result.exit_code if exit_code != 0: logger.debug("Error is : " + error) raise LinkingException("Exception while stopping staging:" + error) else: logger.debug("Stop Staging - Successful: " + output) elif staged_source.parameters.d_source_type == "Manual Backup Ingestion": logger.debug( "plugin_operations.stop_staging > Manual Backup Ingestion") library_script = pkgutil.get_data('resources', 'library.sh') binary_path = staged_source.staged_connection.environment.host.binary_path environment_vars = { "DLPX_LIBRARY_SOURCE": library_script, "DLPX_BIN": binary_path, "MYSQLD": repository.install_path, "STAGINGPORT": staged_source.parameters.staging_port, "STAGINGCONN": stagingConn, "STAGINGPASS": staged_source.parameters.staging_pass, } stop_staging_script = pkgutil.get_data('resources', 'stopStagedDB.sh') result = libs.run_bash(staged_source.staged_connection, stop_staging_script, environment_vars, check=True) output = result.stdout.strip() error = result.stderr.strip() exit_code = result.exit_code if exit_code != 0: logger.debug("Error is : " + error) raise LinkingException("Exception while stopping staging:" + error) else: logger.debug("Stop Staging - Successful: " + output) else: logger.debug("dSourceType is Simple Tablespace Backup. ") library_script = pkgutil.get_data('resources', 'library.sh') binary_path = staged_source.staged_connection.environment.host.binary_path environment_vars = { "DLPX_LIBRARY_SOURCE": library_script, "DLPX_BIN": binary_path, "MYSQLD": repository.install_path, "STAGINGPORT": staged_source.parameters.staging_port, "STAGINGCONN": stagingConn, "STAGINGPASS": staged_source.parameters.staging_pass } stop_staging_script = pkgutil.get_data('resources', 'stopStagedDB.sh') result = libs.run_bash(staged_source.staged_connection, stop_staging_script, environment_vars, check=True) output = result.stdout.strip() error = result.stderr.strip() exit_code = result.exit_code if exit_code != 0: logger.debug("Error is : " + error) raise LinkingException("Exception while stopping staging:" + error) else: logger.debug("Stop Staging - Successful: " + output)
def post_snapshot(repository, source_config, virtual_source): helpers._record_hook("virtual post snapshot", virtual_source.connection) logger.info("In Post snapshot...") virtual_source.mongo_install_path = repository.mongo_install_path virtual_source.mongo_shell_path = repository.mongo_shell_path # Define variables mount_path = virtual_source.parameters.mount_path start_portpool = virtual_source.parameters.start_portpool cfgfile = "{}/.delphix/.tgt_config.txt".format(mount_path) cmd = "cat {}|grep DSOURCE_TYPE|awk -F: '{{ print $2 }}'".format(cfgfile) d_source_type = common.execute_bash_cmd(virtual_source.connection, cmd, {}) if d_source_type == "nonshardedsource": virtual_source.parameters.mongos_port = start_portpool mongos_port = virtual_source.parameters.mongos_port shard_count = 0 if d_source_type == "shardedsource": cmd = "cat {}|grep SHARD_COUNT|awk -F: '{{ print $2 }}'".format( cfgfile) shard_count = common.execute_bash_cmd(virtual_source.connection, cmd, {}) if not isinstance(shard_count, int): shard_count = int(shard_count) cmd = "cat {}|grep DSOURCE_TYPE|awk -F: '{{ print $2 }}'".format(cfgfile) d_source_type = common.execute_bash_cmd(virtual_source.connection, cmd, {}) if d_source_type == "shardedsource": source_sharded = True else: source_sharded = False cmd = "cat {}|grep SOURCE_ENCRYPTED|awk -F: '{{ print $2 }}'".format( cfgfile) source_encrypted = common.execute_bash_cmd(virtual_source.connection, cmd, {}) cmd = "cat {}|grep ENCRYPTION_METHOD|awk -F: '{{ print $2 }}'".format( cfgfile) encryption_method = common.execute_bash_cmd(virtual_source.connection, cmd, {}) if encryption_method == "KeyFile": encryption_keyfile = ".delphix/.dlpx_enckeyfile" else: encryption_keyfile = "" logger.debug("encryption_keyfile = {}".format(encryption_keyfile)) dateTimeObj = datetime.now() timestampStr = dateTimeObj.strftime("%m%d%Y-%H%M%S.%f") script_content = 'echo "$(uname):$(uname -p):$(cat /etc/redhat-release)"' res = libs.run_bash(virtual_source.connection, script_content) logger.debug("res = {}".format(res)) output = res.stdout.strip().split(":") logger.debug("output = {}".format(output)) snapshot = SnapshotDefinition(validate=False) snapshot.toolkit_version = _version.Version snapshot.timestamp = timestampStr snapshot.architecture = output[1] snapshot.os_type = output[0] snapshot.os_version = re.sub(r".*\s(\d)", r'\1', output[2]).split(" ")[0] snapshot.mongo_version = repository.version snapshot.delphix_mount = virtual_source.parameters.mount_path snapshot.storage_engine = "WiredTiger" snapshot.user_auth_mode = virtual_source.parameters.user_auth_mode snapshot.keyfile_path = virtual_source.parameters.keyfile_path snapshot.replica_set = "N/A" snapshot.journal_interval = virtual_source.parameters.journal_interval snapshot.oplog_size = virtual_source.parameters.oplog_size snapshot.d_source_type = d_source_type snapshot.append_db_path = "N/A" cmd = "cat {}|grep MONGO_DB_USER|awk -F: '{{ print $2 }}'".format(cfgfile) mongo_db_user = common.execute_bash_cmd(virtual_source.connection, cmd, {}) snapshot.mongo_db_user = mongo_db_user snapshot.mongo_db_password = virtual_source.parameters.mongo_db_password logger.debug("source_sharded = {}".format(source_sharded)) # logger.debug("source_sharded = {}".format(ast.literal_eval(source_sharded))) if source_sharded == 1 or source_sharded == True or source_sharded == "True": snapshot.source_sharded = True else: snapshot.source_sharded = False snapshot.shard_count = shard_count logger.debug("source_encrypted = {}".format(source_encrypted)) # logger.debug("source_encrypted = {}".format(ast.literal_eval(source_encrypted))) if source_encrypted == 1 or source_encrypted == True or source_encrypted == "True": snapshot.source_encrypted = True else: snapshot.source_encrypted = False snapshot.cluster_auth_mode = virtual_source.parameters.cluster_auth_mode snapshot.encryption_method = encryption_method snapshot.encryption_keyfile = encryption_keyfile snapshot.kmip_params = [] # Unlock Freeze #logger.debug("Virtual Post Snapshot - Unfreeze IO") #common.fsync_unlock_sharded_mongo(virtual_source, 'Virtual') #logger.debug("Virtual Post Snapshot - Unfreeze IO - done") mask_snap = copy.deepcopy(snapshot) mask_snap.mongo_db_password = '******' logger.debug("snapshot schema: {}".format(mask_snap)) common.add_debug_heading_block("End Virtual Post Snapshot") return snapshot
def _set_stopped(connection, guid): libs.run_bash(connection, "rm -f /var/tmp/running-{}".format(guid))
def runbash(connection, command, environmentVars): logger.debug("operatins.runbash() >>") return libs.run_bash(connection, command, variables=environmentVars, check=True)
def _set_running(connection, guid): libs.run_bash( connection, "echo {} >> /var/tmp/running-{}".format(random.random(), guid))
def configure(virtual_source, snapshot, repository): logger.debug("virtual.configure") binary_path = virtual_source.connection.environment.host.binary_path library_script = pkgutil.get_data('resources', 'library.sh') mount_path = virtual_source.mounts[0].mount_path vdbConn = build_lua_connect_string(virtual_source.parameters.vdb_user, "localhost") logger.debug("Mount Path:" + mount_path) logger.debug("Snapshot Settings:") logger.debug(snapshot) logger.debug("Snapshot_id" + snapshot.snapshot_id) logger.debug("Config Settings: ") config_settings_prov = virtual_source.parameters.config_settings_prov logger.debug(config_settings_prov) config_params = "" ################################################################### # TODO: Operation fails if there are config settings. Must revisit. ################################################################### if len(config_settings_prov) > 0: for config_setting in config_settings_prov: logger.debug("PropertyName") logger.debug(config_setting['propertyName']) logger.debug("Value") logger.debug(config_setting['value']) config_params += config_setting['propertyName'] config_params += "=" config_params += config_setting['value'] config_params += "\n" logger.debug("config_params:" + config_params) logger.debug("config_params:" + config_params) ################################################################### environment_vars = { "DLPX_LIBRARY_SOURCE": library_script, "DLPX_DATA_DIRECTORY": mount_path, "DLPX_BIN": binary_path, "MYSQLD": repository.install_path, "MYSQLVER": repository.version, "VDBCONN": vdbConn, "VDBPASS": virtual_source.parameters.vdb_pass, "MYBASEDIR": virtual_source.parameters.base_dir, "PORT": virtual_source.parameters.port, "SERVERID": virtual_source.parameters.server_id, "MYCONFIG": config_params, #"STAGED_HOST":snapshot.snap_host, "STAGED_PORT": snapshot.snap_port, "STAGED_DATADIR": snapshot.snap_data_dir, "CONFIG_BASEDIR": snapshot.snap_base_dir, "STAGED_ROOT_PASS": snapshot.snap_pass, "STAGED_BACKUP": snapshot.snap_backup_path } configure_script = pkgutil.get_data('resources', 'provision.sh') result = libs.run_bash(virtual_source.connection, configure_script, environment_vars, check=False) logger.debug(result) output = result.stdout.strip() std_err = result.stderr.strip() exit_code = result.exit_code if exit_code == 0: logger.debug("Pre-Snapshot/Restore_DB successful " + output) else: err = utils.process_exit_codes(exit_code, "PROVISION", std_err) logger.debug( "There was an error while provisioning.Check error.log for details." ) logger.error(err) raise err return SourceConfigDefinition(db_name=output, base_dir=virtual_source.parameters.base_dir, port=virtual_source.parameters.port, data_dir=mount_path)