def execute(self, parameters=None): """ installs worker node on the machine. """ logger = XprLogger() if not linux_utils.check_root(): logger.fatal("Please run this as root") self.cleanup() logger.info("Initialising Kubernetes worker node...") try: if parameters and self.PARAMETER_MASTER_IP in parameters: master_ip = parameters[self.PARAMETER_MASTER_IP] else: master_ip = input("Enter the IP address of the master" " node you want to join:") path = '/mnt/nfs/data/k8/k8_clusters/{}/{}.txt'. \ format(master_ip, master_ip) with open(path, "r") as f: join_command = f.read() # extract join command self.executor.execute( join_command) # run command to join the cluster except CommandExecutionFailedException as e: logger.error("Failed to setup worker node. \n{}".format(str(e))) return False return True
def __init__(self, user_json=None): self.logger = XprLogger() """ Constructor: """ self.logger.debug(f"User constructor called with {user_json}") super().__init__(user_json) self.logger.info(f"user info : {self.data}") # These are mandatory fields that needs to be provided in user_json self.mandatory_fields = [ "uid", "pwd", "firstName", "lastName", "email", "primaryRole" ] # primaryRole of a user has to one of these self.valid_values = {"primaryRole": ["Dev", "PM", "DH", "Admin", "Su"]} # fields that cannot be modified self.unmodifiable_fields = ["pwd"] # fields that should be displayed in the output self.display_fields = [ "uid", "firstName", "lastName", "email", "primaryRole", "nodes", "activationStatus" ] self.logger.debug("User constructed successfully")
def __init__(self, config_path=XprConfigParser.DEFAULT_CONFIG_PATH): self.logger = XprLogger() self.config = XprConfigParser(config_path)[self.PACHYDERM_CONFIG] try: self.pachyderm_client = self.connect_to_pachyderm() except PachydermOperationException as err: raise ValueError(err.message)
def execute(self): """ installs kubeadm on the machine. """ logger = XprLogger() if not linux_utils.check_root(): logger.fatal("Please run this as root") logger.info("Installing Kubeadm...") try: swapoff = 'swapoff -a' self.executor.execute(swapoff) # turns swap off add_key = 'curl -s ' \ 'https://packages.cloud.google.com/apt/doc/apt-key.gpg ' \ '| apt-key add -' self.executor.execute(add_key) add_list_path = '/etc/apt/sources.list.d/kubernetes.list' add_list_content = 'deb https://apt.kubernetes.io/ ' \ 'kubernetes-xenial main' linux_utils.write_to_file(add_list_content, add_list_path, "a") install_kubeadm = 'apt-get update && apt-get install ' \ '-y kubelet kubeadm kubectl' self.executor.execute(install_kubeadm) # installs kubeadm hold_kubeadm = 'apt-mark hold kubelet kubeadm kubectl' self.executor.execute(hold_kubeadm) except CommandExecutionFailedException as e: logger.error("Failed to install Kubeadm. \n{}".format(str(e))) return False return True
def __init__(self): self.controller_client = ControllerClient() self.command = None self.arguments = {} self.SUPPORTED_COMMANDS = {} self.initialize_commands() self.logger = XprLogger()
def __init__(self, config_path=XprConfigParser.DEFAULT_CONFIG_PATH): super().__init__() self.config = XprConfigParser(config_path)["packages_setup"] self.logger = XprLogger() dependency_config_file = self.config[self.DEPENDENCY_SECTION][ self.DEPENDENCY_CONFIG_FILE] if not os.path.exists(dependency_config_file): self.logger.error(("Unable to find the dependency js" "file at the mentioned path")) raise PackageFailedException("Invalid dependency config file") try: with open(dependency_config_file) as config_fs: dependency_config = json.load(config_fs) except EnvironmentError as err: self.logger.fatal(err) raise PackageFailedException("Invalid config file") self.graph = nx.DiGraph() edges = list() for key in dependency_config: for value in dependency_config[key]: edges.append((key, value)) self.graph.add_edges_from(edges) if not nx.is_directed_acyclic_graph(self.graph): self.logger.fatal(("Unable to handle dependencies due to cyclic " "loop")) self.graph = None raise PackageFailedException("Cyclic Dependency Found")
def __init__(self, cfg: XprConfigParser): self.config = cfg[self.JENKINS_SECTION] self.logger = XprLogger() console_handler = logging.StreamHandler() self.logger.addHandler(console_handler) self.logger.setLevel(logging.DEBUG) self.jenkins_server = self.init_connection()
def __init__(self): self.config = XprConfigParser(self.config_path) self.db_utils = Utils(url=self.config[self.MONGO_SECTION][self.URL], db=self.config[self.MONGO_SECTION][self.DB], uid=self.config[self.MONGO_SECTION][self.UID], pwd=self.config[self.MONGO_SECTION][self.PWD], w=self.config[self.MONGO_SECTION][self.W]) self.logger = XprLogger()
def __init__( self, executor: CommandExecutor = None, config_path: XprConfigParser = XprConfigParser.DEFAULT_CONFIG_PATH ): self.executor = executor self.config_path = config_path self.logger = XprLogger()
def __init__(self, token): self.logger = XprLogger() config_path = XprConfigParser.DEFAULT_CONFIG_PATH self.config = XprConfigParser(config_path) self.token = token self.token_expiry = None self.login_expiry = None self.login_status = False
def __init__(self, config_path=XprConfigParser.DEFAULT_CONFIG_PATH): self.logger = XprLogger() self.config = XprConfigParser(config_path) self.path = os.path.join( os.path.expanduser('~'), self.config[self.CONTROLLER_SECTION][self.CLIENT_PATH]) self.token_file = '{}.current'.format(self.path) self.server_path = self.config[self.CONTROLLER_SECTION][ self.SERVER_URL]
def __init__(self, persistence_manager): self.kubeflow_utils = KubeflowUtils(persistence_manager) self.logger = XprLogger() self.executor = LocalShellExecutor() config_path = XprConfigParser.DEFAULT_CONFIG_PATH self.config = XprConfigParser(config_path) self.declarative_pipeline_folder = self.config[PROJECTS_SECTION][ DECLARATIVE_PIPELINE_FOLDER] self.content = self.declarative_pipeline_folder_check()
def __init__(self, config_path=XprConfigParser.DEFAULT_CONFIG_PATH): self.config = XprConfigParser(config_file_path=config_path) self.api_gateway = None self.initialize_gateway( gateway_provider=self.config[GatewayManager.CONFIG_GATEWAY_KEY][ GatewayManager.CONFIG_GATEWAY_PROVIDER], admin_url=self.config[GatewayManager.CONFIG_GATEWAY_KEY][ GatewayManager.CONFIG_GATEWAY_ADMIN], proxy_url=self.config[GatewayManager.CONFIG_GATEWAY_KEY][ GatewayManager.CONFIG_GATEWAY_PROXY], config_path=config_path) self.logger = XprLogger()
def __init__(self): self.logger = XprLogger() # script is supposed to be run on the VM itself, so host is localhost client = MongoClient('localhost', replicaset='rs0') self.db = client.xprdb self.db.authenticate('xprdb_admin', 'xprdb@Abz00ba') config_path = XprConfigParser.DEFAULT_CONFIG_PATH config = XprConfigParser(config_path) MONGO = 'mongodb' FILEPATH = 'formats_file' self.path = config[MONGO][FILEPATH] with open(self.path, 'r') as file: self.data = json.loads(file.read())
def __init__(self, persistence_manager): self.persistence_manager = persistence_manager self.logger = XprLogger() config_path = XprConfigParser.DEFAULT_CONFIG_PATH self.config = XprConfigParser(config_path) PROJECTS_SECTION = 'projects' DEPLOYMENT_FILES_FOLDER = 'deployment_files_folder' self.deployment_files_folder = self.config[PROJECTS_SECTION][ DEPLOYMENT_FILES_FOLDER] if not os.path.isdir(self.deployment_files_folder): os.makedirs(self.deployment_files_folder, 0o755)
def __init__(self, hostname, username=None, password=None, private_key=None, passphrase=None, port=22): self.logger = XprLogger() self.ssh_client = paramiko.SSHClient() self.ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.hostname = hostname self.username = username self.password = password self.private_key = private_key self.passphrase = passphrase self.port = port
def cli_options(**kwargs): logger = XprLogger() xprctl = XpressoControllerCLI() try: response = xprctl.execute(**kwargs) if response: click.echo(CLIResponseFormatter(data=response).get_str()) click.secho("Success", fg="green") except (ControllerClientResponseException, CLICommandFailedException) as cli_error: click.secho(f"Error: {cli_error.message}", err=True, fg="red") except Exception as e: logger.error(e) click.secho(f"Unknown Failure", err=True, fg="red")
def __init__(self, node_json=None): self.logger = XprLogger() """ Constructor: """ self.logger.debug(f"Node constructor called with {node_json}") super().__init__(node_json) print(self.data) # These are mandatory fields that needs to be provided in user_json self.mandatory_fields = ["address", "name"] self.provision_fields = ["address", "nodetype"] # fields that should be displayed in the output self.display_fields = ["address", "name", "nodetype", "provisionStatus", "activationStatus"] self.logger.debug("Node constructed successfully")
def execute(self, **kwargs): """ Mounts the NFS on the VM """ logger = XprLogger() if not linux_utils.check_root(): logger.fatal("Please run this as root") logger.info("Mounting NFS File") subnet_to_nfs_map = self.config[self.NFS_SECTION][self.SUBNET_MAP_KEY] ip_address = linux_utils.get_ip_address() matched_nfs = None for nfs, subnet in subnet_to_nfs_map.items(): logger.info("Matching {} {}".format(subnet, ip_address)) check = re.match(subnet, ip_address) print(check) if check: matched_nfs = nfs break if not matched_nfs: logger.info("Could not determine nfs value") return False mount_location = self.config[self.NFS_SECTION][self.MOUNT_LOCATION_KEY] nfs_location = self.config[self.NFS_SECTION][self.NFS_LOCATION_KEY] mount_script = "mount {}:{} {}".format(matched_nfs, nfs_location, mount_location) logger.info("Mounting {}".format(mount_script)) try: linux_utils.create_directory(mount_location, 0o755) self.executor.execute(mount_script) logger.info("Mount Succesful") logger.info("Updating fstab file") with open(self.FSTAB_FILE, "a+") as f: fstab_statement = "{}:{} {} nfs " \ "auto,nofail,noatime,nolock,intr,tcp," \ "actimeo=1800 0 0" \ .format(matched_nfs, nfs_location, mount_location) logger.info( "Updating fstab file with {}".format(fstab_statement)) f.write(fstab_statement) logger.info("Update Successful") except CommandExecutionFailedException as e: logger.error("Script Failed to run = {}\n{}".format( mount_script, str(e))) return False return True
def __init__(self, config_path=XprConfigParser.DEFAULT_CONFIG_PATH): """ 1. Generate metadata of existing VM 2. Reads the arguments and initiates the instance variable """ self.logger = XprLogger() self.python_version = sys.version.split('\n') self.system = platform.system() self.machine = platform.machine() self.platform = platform.platform() self.uname = platform.uname() self.version = platform.version() self.arch = platform.architecture() self.config_path = config_path self.package_dependency = PackageDependency(config_path=config_path)
def __init__(self): self.logger = XprLogger() self.config = XprConfigParser(self.config_path) self.defaulturl = self.config['bitbucket']['restapi'] self.teamname = self.config['bitbucket']['teamname'] self.username = self.config['bitbucket']['username'] self.password = self.config['bitbucket']['password'] # Following project format provided for bibucket RESTAPI self.defaultprojectbody = { "name": "", "description": "", "key": "", "is_private": False } # Following repo format provided for bibucket RESTAPI self.defaultrepobody = {"scm": "git", "project": {"key": ""}}
def __init__(self, config_path=XprConfigParser.DEFAULT_CONFIG_PATH, executor=None): if not executor: executor = LocalShellExecutor() super().__init__(executor=executor) self.config = XprConfigParser(config_path)["packages_setup"] self.logger = XprLogger() self.apt_config = self.config[self.APT_SECTION] self.public_key = self.apt_config[self.APT_PUBLIC_KEY] self.private_key = self.apt_config[self.APT_PRIVATE_KEY] self.hosted_package_folder = self.apt_config[ self.APT_HOSTED_PACKGE_KEY] self.sign_paraphrase = None self.sign_key_id = None self.home_folder = os.getcwd()
def __init__(self, project_json=None): self.logger = XprLogger() """ Constructor: """ self.logger.debug(f"Project constructor called with {project_json}") super().__init__(project_json) # List of all the fields project can contain self.complete_fields = self.project_config["project_key_list"] # These are mandatory fields that needs to be provided in user_json self.mandatory_fields = ["name", "description", "owner"] # primaryRole of a user has to one of these self.valid_values = {"primaryRole": ["Dev", "PM", "DH", "Admin", "Su"]} # fields that cannot be modified self.unmodifiable_fields = ["name"] # fields that should be displayed in the output self.display_fields = self.project_config["project_output_fields"] self.logger.debug("User constructed successfully")
def execute(self): """ installs kubernetes dashboard on the machine. """ logger = XprLogger() if not linux_utils.check_root(): logger.fatal("Please run this as root") logger.info("Setting up the Kubernetes dashboard...") try: deploy_dashboard = 'kubectl create -f https://raw.githubusercontent' \ '.com/kubernetes/dashboard/master/aio/deploy' \ '/recommended/kubernetes-dashboard.yaml' self.executor.execute(deploy_dashboard) # creates deployment nodeport = """kubectl -n kube-system patch service \ kubernetes-dashboard --type='json' -p \ '[{"op":"replace","path":"/spec/type","value":"NodePort"}]'""" self.executor.execute(nodeport) # exposes dashboard constant_port = """kubectl -n kube-system patch service \ kubernetes-dashboard --type='json' -p \ '[{"op":"replace","path":"/spec/ports/0/nodePort","value":30252}]'""" self.executor.execute(constant_port) # sets constant port content_path = '/opt/xpresso.ai/config/kubernetes-dashboard-access.yaml' with open(content_path, "r") as f: content = f.read() path = '/etc/kubernetes/kube-dashboard-access.yaml' linux_utils.write_to_file(content, path, "w+") dashboard_access = 'kubectl create -f {}'.format(path) self.executor.execute(dashboard_access) # grants permission skip_login = """kubectl patch deployment -n kube-system \ kubernetes-dashboard --type='json' -p='[{"op": "add", "path": \ "/spec/template/spec/containers/0/args/1", \ "value":"--enable-skip-login" }]'""" self.executor.execute(skip_login) # enables skip login except CommandExecutionFailedException as e: logger.error("Failed to setup dashboard. \n{}".format(str(e))) return False return True
def __init__(self, executor=None): if not executor: self.executor = LocalShellExecutor() self.logger = XprLogger() self.service_path = '/lib/systemd/system/mongod.service'
def __init__(self): self.logger = XprLogger() self.empty_response = HTTPResponse(500, {}, {})
def __init__(self, objjson=None): self.logger = XprLogger() self.logger.debug("Inside XprObject constructor") self.data = objjson self.logger.debug("Done")
from xpresso.ai.admin.controller.external.jenkins_manager import JenkinsManager from xpresso.ai.core.utils.xpr_config_parser import XprConfigParser from xpresso.ai.core.logging.xpr_log import XprLogger from xpresso.ai.admin.controller.exceptions.xpr_exceptions import * config_path = XprConfigParser.DEFAULT_CONFIG_PATH config = XprConfigParser(config_path) username = config['bitbucket']['username'] password = config['bitbucket']['password'] escape_password = urllib.parse.quote(password) skeletonpath = f"http://{username}:{escape_password}@bitbucket.org/"\ f"abzooba-screpo/skeleton-build.git" bitbucket = bitbucketapi.bitbucketapi() logger = XprLogger() def replace_string(key, replacement, direc): print(key) print(replacement) print(direc) for dname, dirs, files in os.walk(direc): print(files) for fname in files: fpath = os.path.join(dname, fname) with open(fpath) as f: s = f.read() s = s.replace(key, replacement) print(key) print(replacement)
def __init__(self): self.logger = XprLogger()
def __init__(self): self.xpr_config = XprConfigParser( config_file_path=XprConfigParser.DEFAULT_CONFIG_PATH) self.logger = XprLogger()