class Deploy(object): def __init__(self, ip_list, user_name, password): self.config = Config() self.ip_list = ip_list self.user_name = user_name self.password = password self.current_user_name = getpass.getuser() self.master_ip = self.config.get_value("spark_master_ip") self.master_hostname = self.config.get_value('master_hostname') self.local_password = self.config.get_value('local_password') self.shell_utility = ShellUtility(user_name, password, self.local_password) def run(self): pass def write_to_config_file(self, file_path, contents): with open(file_path, "w+", encoding="utf-8") as config_file: for content in contents: config_file.write(content) config_file.write("\n")
class DistributionFramework(object): def __init__(self): self.config = Config() self.framework_folder_root_path = self.config.get_value( "framework_folder_root_path") self.local_tar_files_folder_path = self.config.get_value( "local_tar_files_folder_path") self.shell_utility = ShellUtility() self.init_framework_environment() self.check_framework_environment() def init_framework_environment(self): self.shell_utility.call_shell_command("mkdir -p {}".format( self.local_tar_files_folder_path)) def check_framework_environment(self): if not os.path.exists( self.framework_folder_root_path.format( username=getpass.getuser())): raise FileNotFoundError("{} not found.".format( self.framework_folder_root_path)) if not os.path.exists(self.local_tar_files_folder_path): raise FileNotFoundError("{} not found.".format( self.local_tar_files_folder_path))
class FrameworkDeploy(object): def __init__(self, ip_list, machine_user_account, machine_user_password): self.ip_list = ip_list self.machine_user_account = machine_user_account self.machine_user_password = machine_user_password self.config = Config() def deploy(self): ip_for_ssh_list = list(self.ip_list) ip_for_ssh_list.append(self.config.get_value("spark_master_ip")) print(ip_for_ssh_list) deploies = [ SSHDeploy(ip_for_ssh_list, self.machine_user_account, self.machine_user_password), EtcHostsDeploy(self.ip_list, self.machine_user_account, self.machine_user_password), HadoopDeploy(self.ip_list, self.machine_user_account, self.machine_user_password), SparkDeploy(self.ip_list, self.machine_user_account, self.machine_user_password) ] for deploy in deploies: deploy.run()
command = "echo \"{}\" | sudo tee --append /etc/hosts".format( line) self.shell_utility.run_remote_sudo_command(ip, command) def generate_hosts_default_file_content(self): self.hosts_default_content = [ "127.0.0.1\tlocalhost", "::1\tip6-localhost ip6-loopback", "fe00::0\tip6-localnet", "ff00::0\tip6-mcastprefix", "ff02::1\tip6-allnodes", "ff02::2\tip6-allrouters" ] def generate_hosts_client_info_file_content(self): i = 1 self.hosts_client_info_content = [] print('-' * 50) print(self.ip_list) print('-' * 50) for ip in self.ip_list: self.hosts_client_info_content.append("{}\t{}".format( ip, "{}{}".format(self.client_name_prefix, i))) i = i + 1 if __name__ == "__main__": config = Config() ip_list = config.get_value("test_machines_ip_list") user_name = config.get_value("os_user_name") password = config.get_value("os_password") deploy = EtcHostsDeploy(ip_list, user_name, password) deploy.run()