def create_k8s_dashboard(self): self.logger.info( "Create kubernetes dashboard deployment for kuberentes cluster.") self.logger.info("Create dashboard service.") file_path = "deployment/k8sPaiLibrary/template/dashboard-service.yaml.template" template_data = common.read_template(file_path) dict_map = {"clusterconfig": self.cluster_config['clusterinfo']} generated_data = common.generate_from_template_dict( template_data, dict_map) common.write_generated_file(generated_data, "dashboard-service.yaml") retry_count = 5 common.execute_shell_retry( "kubectl apply --overwrite=true -f dashboard-service.yaml", "Failed to create dashboard-service", retry_count) os.remove("dashboard-service.yaml") self.logger.info("Create dashboard deployment.") file_path = "deployment/k8sPaiLibrary/template/dashboard-deployment.yaml.template" template_data = common.read_template(file_path) dict_map = {"clusterconfig": self.cluster_config['clusterinfo']} generated_data = common.generate_from_template_dict( template_data, dict_map) common.write_generated_file(generated_data, "dashboard-deployment.yaml") common.execute_shell( "kubectl apply --overwrite=true -f dashboard-deployment.yaml", "Failed to create dashboard-deployment") os.remove("dashboard-deployment.yaml")
def run(self): self.logger.warning( "Begin to deploy a new cluster to your machine or vm.") for role in self.cluster_config["remote_deployment"]: listname = self.cluster_config["remote_deployment"][role][ "listname"] if listname not in self.cluster_config: continue for node_key in self.cluster_config[listname]: node_config = self.cluster_config[listname][node_key] self.logger.info( "Begin to deploy k8s on host {0}, the node role is [ {1} ]" .format(node_config["hostip"], role)) self.prepare_package(node_config, "{0}-deployment".format(role)) self.job_executer(node_config, "{0}-deployment".format(role)) if self.clean_flag == True: self.logger.info( " package cleaner is working on the folder of {0}!". format(node_config["hostip"])) self.delete_packege(node_config) self.logger.info(" package cleaner's work finished! ") self.logger.info( " remote host cleaner is working on the host of {0}!". format(node_config["hostip"])) self.remote_host_cleaner(node_config, "{0}-deployment".format(role)) self.logger.info(" remote host cleaning job finished! ") kubectl_install_instance = kubectl_install.kubectl_install( self.cluster_config) kubectl_install_instance.run() # check the registerd api resources common.execute_shell_retry("kubectl api-resources", "kubectl command failed!", 5) # create kube-proxy until daemonset resource is registerd common.execute_shell_retry( "kubectl api-resources | grep -q daemonsets", "Controller manager hasn't create daemonset object!", 5) self.create_kube_proxy() self.create_k8s_dashboard() self.logger.info( "Update node configuration into configmap in the namespace [ kube-system ] as the name [ pai-node-config ] " ) self.update_node_config() self.logger.info("The kubernetes deployment is finished!")
def create_kube_proxy(self): self.logger.info("Create kube-proxy daemon for kuberentes cluster.") file_path = "deployment/k8sPaiLibrary/template/kube-proxy.yaml.template" template_data = common.read_template(file_path) dict_map = {"clusterconfig": self.cluster_config['clusterinfo']} generated_data = common.generate_from_template_dict( template_data, dict_map) common.write_generated_file(generated_data, "kube-proxy.yaml") retry_count = 5 common.execute_shell_retry( "kubectl apply --overwrite=true -f kube-proxy.yaml", "Failed to create kube-proxy", retry_count) os.remove("kube-proxy.yaml")