class TokenInfo(object):
    """
    This class represents an access token
    """

    def __init__(self, token):
        self.logger = XprLogger()

        config_path = XprConfigParser.DEFAULT_CONFIG_PATH
        self.config = XprConfigParser(config_path)
        self.token = token
        self.token_expiry = None
        self.login_expiry = None
        self.login_status = False

    def generate(self):
        self.logger.debug('Generating token')
        self.token = secrets.token_hex(32)
        self.token_expiry = \
            time() + int(self.config[CONTROLLER_SECTION][TOKEN_EXPIRY])
        self.login_expiry = \
            time() + int(self.config[CONTROLLER_SECTION][LOGIN_EXPIRY])
        self.login_status = True

    def has_expired(self):
        if (self.token_expiry < time() or
                self.login_expiry < time()):
            return True
        else:
            return False

    def revalidate(self):
        self.token_expiry = \
            time() + int(self.config[CONTROLLER_SECTION][TOKEN_EXPIRY])
Example #2
0
class Cluster(XprObject):
    """
    This class represents a cluster
    """

    def __init__(self, cluster=None):
        self.logger = XprLogger()
        self.logger.debug("Cluster constructor called with {}".format(cluster))
        super().__init__(cluster)
        self.mandatory_fields = ['name']
Example #3
0
class User(XprObject):
    """
    This class represents a User
    """
    def __init__(self, user_json=None):
        self.logger = XprLogger()
        """
        Constructor:
        """
        self.logger.debug(f"User constructor called with {user_json}")
        super().__init__(user_json)
        self.logger.info(f"user info : {self.data}")
        # These are mandatory fields that needs to be provided in user_json
        self.mandatory_fields = [
            "uid", "pwd", "firstName", "lastName", "email", "primaryRole"
        ]

        # primaryRole of a user has to one of these
        self.valid_values = {"primaryRole": ["Dev", "PM", "DH", "Admin", "Su"]}

        # fields that cannot be modified
        self.unmodifiable_fields = ["pwd"]

        # fields that should be displayed in the output
        self.display_fields = [
            "uid", "firstName", "lastName", "email", "primaryRole", "nodes",
            "activationStatus"
        ]
        self.logger.debug("User constructed successfully")

    def get_token_info(self):
        token_info = TokenInfo(self.data['token'])
        token_info.token = self.get("token")
        token_info.token_expiry = self.get("tokenExpiry")
        token_info.login_expiry = self.get("loginExpiry")
        return token_info

    @staticmethod
    def check_password(password):
        if len(password) < 6:
            raise PasswordStrengthException("Password is too short")
        reg_exp = "^(((?=.*[a-z])(?=.*[A-Z]))|((?=.*[a-z])(?=.*[0-9]))|((?=.*[A-Z])(?=.*[0-9])))(?=.{6,})"
        match = re.search(reg_exp, password)
        if not match:
            raise PasswordStrengthException(
                "Password is weak. Choose a strong password")
class PromotheusMetricReportGenerator:
    """ Takes list of metrics and generate a promotheus compatible report """

    def __init__(self):
        self.logger = XprLogger()

    def generate_report(self, metrics):
        self.logger.info("converting metrics to promotheus report")
        report_list = []
        for metric in metrics:
            try:
                label = f"xpresso_ctrl_{metric['label']}"
                data = metric['data']
                if type(data) == list:
                    for data_point in data:
                        report_list.append(f'{label}{{value="{data_point}"}} 1')
                elif type(data) == int:
                    report_list.append(f'{label} {data}')
                elif type(data) == str:
                    report_list.append(f'{label} {data}')
            except KeyError:
                self.logger.debug("Ignoring the metric")
        self.logger.info("Report generation complete")
        return '\n'.join(report_list)
class XprProjectManager:

    LINUX_UID_KEY = "linux_uid"
    MIN_VALID_LINUX_UID = 1001

    def __init__(self, persistence_manager):
        self.logger = XprLogger()
        self.persistence_manager = persistence_manager

    def create_project(self, project_json: dict) -> object:
        """
        creates a new project in the persistence and bitbucket

        Creates a new project and repo on bitbucket.
        Then setup necessary nfs mount. Then adds the
        project json to persistence.

        Parameters:
            project_json: json with project information

        Return:
            returns xprresponse object
        """
        # checks if the project_json has the complete info required
        self.logger.info("checking the provided project information")

        # Updating next linux uid
        project_json[self.LINUX_UID_KEY] = self.get_next_linux_uid()
        new_project = Project(project_json)
        new_project.project_info_check(self.persistence_manager)
        new_project.complete_project_info()
        self.logger.debug(f"Updated project info is: {new_project.data}")

        self.logger.info("calling setup_project to complete the setup")
        setup = setup_project(new_project.data)
        setup_code = setup['status']
        self.logger.info(f"setup_project status code is {setup_code}")
        if setup_code != 200:
            self.logger.error("Project setup failed")
            return XprResponse('failure', setup_code,
                               {"message": "Project setup failed"})
        self.logger.info("project setup completed")
        self.logger.info(
            "Adding the project with complete info to persistence")
        self.persistence_manager.insert("projects", setup['project_json'],
                                        False)
        self.logger.info("project successfully added to persistence")

        # allocate required environments
        self.logger.info("Allocating project environments")
        env_manager = EnvManager()
        env_manager.allocate_env(project_json["name"],
                                 project_json["environments"])
        self.logger.info("Allocated project environments")

        new_project.filter_display_fields()
        return XprResponse("success", None, new_project.data)

    def get_next_linux_uid(self):
        """
        Checks the database and finds the next linux uid which needs to be
        assigned to a project
        Returns:
            A valid UID
        """
        all_projects = self.persistence_manager.find("projects", {})
        new_linux_uid = max([self.MIN_VALID_LINUX_UID] + [
            project[self.LINUX_UID_KEY]
            for project in all_projects if self.LINUX_UID_KEY in project
        ]) + 1
        return new_linux_uid

    def get_projects(self,
                     filter_json: dict,
                     apply_display_filter=True) -> object:
        """
            Calls the persistence with input filters to fetch the list of projects.

            Parameters:
                filter_json [json] : json with filter key & value pairs

            Return:
                returns a xprresponse object
        """
        self.logger.info("retrieving the list of projects from persistence")
        projects = self.persistence_manager.find("projects", filter_json)
        self.logger.info("calling filter_project_output")
        self.logger.debug(f"projects are: {projects}")
        if apply_display_filter:
            filtered_projects = []
            for project_json in projects:
                temp_project = Project(project_json)
                temp_project.filter_display_fields()
                filtered_projects.append(temp_project.data)
            projects = filtered_projects
        self.logger.debug(f"\n Filtered projects are: {projects}")
        # get users call retrieves whole user info from persistence
        # Filtering the data that needs to be shown as output
        return projects

    def modify_project(self, changes_json: dict):
        """
            Modifies a project in persistence and on bitbucket

            Parameters:
                changes_json: project information that needs to be modified

            Return:
                returns a xprresponse object
        """
        if 'name' not in changes_json:
            raise IncompleteProjectInfoException(
                "Project name needs to be provided for modify_project")

        uid_json = {'name': changes_json['name']}
        self.logger.info("checking if the project is already present")
        projects = self.persistence_manager.find("projects", uid_json)
        if len(projects) == 0:
            self.logger.error("cannot modify a project which doesn't exist.")
            raise NoProjectException("Cannot Modify unregistered project")
        self.logger.info("calling modify_info_check to validate the info")
        new_project = Project(projects[0])
        new_project.modify_info_check(changes_json, self.persistence_manager)
        self.logger.info("modify_project_locally has been called")
        modify_status = modify_project_locally(projects[0], changes_json)
        if modify_status != 200:
            self.logger.error("modify_project_locally failed")
            XprResponse('failure', modify_status,
                        {"message": "Modify project failed"})

        self.logger.info(
            "project info is being modified before updating @persistence")
        update_json = new_project.modify_project_info(changes_json)
        self.persistence_manager.update("projects", uid_json, update_json)
        self.logger.info("Project modified successfully")

        # allocate required environments
        self.logger.info("Allocating project environments")
        env_manager = EnvManager()
        env_manager.allocate_env(changes_json["name"],
                                 changes_json["environments"])
        self.logger.info("Allocated project environments")

    def deactivate_project(self, uid_json: dict):
        """
            Deactivates a project. updates the appropriate flags in persistence

            Parameters:
                uid [str] : uid of the project

            Return:
                returns xprresponse object
        """
        # deletes the project from persistence
        self.logger.info("Checking if the project actually exists")
        projects = self.persistence_manager.find("projects", uid_json)
        if len(projects) == 0:
            raise NoProjectException()
        elif 'activationStatus' not in projects[0]:
            projects[0]['activationStatus'] = True

        if projects[0]['currentlyDeployed']:
            raise ProjectDeactivationException(
                "Project Currently deployed. Undeploy first.")
        elif not projects[0]['activationStatus']:
            raise ProjectDeactivationException("Project already deactivated")

        active_flag_json = {"activationStatus": False}
        self.persistence_manager.update("projects", uid_json, active_flag_json)
        # update_id = self.db_utils.delete("projects", uid_json)

        # remove allocated environments
        self.logger.info("Removing project environments")
        env_manager = EnvManager()
        env_manager.remove_env(uid_json["name"])
        self.logger.info("Removed project environments")
Example #6
0
class SSHClient:
    """
    It connects to the ssh service using username/passs or sshkeys.
    It is mainly used to run packages on remote ssl based servers
    """
    def __init__(self,
                 hostname,
                 username=None,
                 password=None,
                 private_key=None,
                 passphrase=None,
                 port=22):

        self.logger = XprLogger()

        self.ssh_client = paramiko.SSHClient()
        self.ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())

        self.hostname = hostname
        self.username = username
        self.password = password
        self.private_key = private_key
        self.passphrase = passphrase
        self.port = port

    def connect(self):
        """
        Create SSH connection object
        """
        try:
            if self.private_key:
                self.ssh_client.connect(hostname=self.hostname,
                                        port=self.port,
                                        pkey=self.private_key,
                                        passphrase=self.passphrase)

            elif self.username:
                self.ssh_client.connect(hostname=self.hostname,
                                        port=self.port,
                                        username=self.username,
                                        password=self.password)
            else:
                self.logger.error("Empty credentials provided")

        except (paramiko.BadHostKeyException, paramiko.SSHException,
                paramiko.AuthenticationException) as e:
            self.logger.error(
                "Could not connect to Remote Server {}".format(e))

    def execute(self, command, stream=True):
        """
        Execute a string packages in the remote server
        Args:
            command (str): packages to run
            stream(bool): Print the output of the packages as
                          a continuous stream

        Returns:
             tuple: (tuple of success, stdout and stderr)

        """
        if not self.ssh_client:
            return False, None, None

        try:
            stdin, stdout, stderr = self.ssh_client.exec_command(command)
            if stream:
                stdin.close()
                for line in iter(lambda: stdout.readline(2048), ""):
                    self.logger.debug(line)
        except paramiko.SSHException as e:
            self.logger.error("Command {} failed to run with exception".format(
                command, e))
            return False, e, e
        return True, stdout, stderr
class DeploymentFactory:
    """
    Factory class which generates the specific deployment object
    depending on the project json provided.
    It takes a component json and returns an object of relevant deployment
    object
    """

    JOB_COMPONENTS = ["job", "ds_job"]

    def __init__(self):
        self.config = XprConfigParser()
        self.logger = XprLogger()

    def generate_deployment(self, input_component_json, project_name,
                            component_name, component_type) -> Deployment:
        """
        Parses the input project json and create a deployment object

        Args:
            component_name: Name of the component
            component_type: Component type described during project creation
            project_name: Name of the project
            input_component_json: input json for the component
        Returns:
            Deployment: Object Deployment
        """
        self.logger.debug('Deploying {}'.format(component_name))
        if component_type in self.JOB_COMPONENTS:
            deployment = JobDeployment(project_name, component_name)
        else:
            deployment = ServiceDeployment(project_name, component_name)

        deployment.replicas = self.get_value(
            input_component_json, 'replicas',
            ComponentsSpecifiedIncorrectlyException)

        # verify build version
        deployment.build_version = self.get_value(
            input_component_json,
            'build_version',
            exception=InvalidBuildVersionException)

        deployment.environment = self.get_value(input_component_json,
                                                'environment',
                                                exception=None,
                                                default=[])

        deployment.persistence = []
        try:
            deployment.persistence = input_component_json['persistence']
            if deployment.persistence:
                deployment.volume_size = deployment.persistence[0]["size"]
                deployment.volume_mount_path = deployment.persistence[0][
                    "mount_path"]
        except (IndexError, KeyError, XprExceptions):
            self.logger.error('Persistence specified incorrectly. Ignoring')

        if deployment.is_job():

            deployment.job_type = self.get_value(
                input_component_json,
                'type',
                exception=InvalidJobTypeException)

            deployment.schedule = self.get_value(
                input_component_json,
                'cron_schedule',
                exception=InvalidCronScheduleException)

            deployment.commands = self.get_value(
                input_component_json,
                'commands',
                exception=InvalidJobCommandsException)
        elif deployment.is_service() or deployment.is_database():
            # verify service ports
            deployment.ports = self.get_value(
                input_component_json,
                'ports',
                exception=ComponentsSpecifiedIncorrectlyException)
            if not deployment.ports:
                self.logger.error('Ports are empty')
                raise ComponentsSpecifiedIncorrectlyException
            # Verify if external service required
            deployment.is_external = self.get_value(input_component_json,
                                                    'is_external',
                                                    exception=None,
                                                    default=False)
        return deployment

    def get_value(self, input_component_json, key, exception, default=None):
        """ Fetch value from the input component json for a given key.
        If not found raise the given exception.
        If exception is None, return the default value"""
        try:
            return input_component_json[key]
        except (IndexError, KeyError, XprExceptions):
            error_msg = f'Components specified incorrectly. {key} invalid'
            if not exception:
                self.logger.warn(error_msg)
                return default
            self.logger.error(error_msg)
            raise exception(message=error_msg)
class KubernetesManager:
    def __init__(self, persistence_manager):

        self.persistence_manager = persistence_manager
        self.kubernetes_deploy = KubernetesDeploy(persistence_manager)
        self.logger = XprLogger()

    def set_api_config(self, master_node):
        """
        Sets the kubernetes API config
        :param master_node: IP of master node of the cluster on which
        project is to be deployed
        :return: nothing
        """
        # get the master node of the cluster
        master_info = self.persistence_manager.find('nodes',
                                                    {"address": master_node})
        # get the kubernetes bearer token from master
        try:
            token = master_info[0]['token']
        except (IndexError, KeyError):
            self.logger.error("Cluster is not valid. No valid node exist")
            raise ClusterNotFoundException("Cluster is invalid")
        self.logger.debug('Bearer token retrieved from master node')
        # kubernetes API configurations
        configuration = client.Configuration()
        configuration.host = 'https://{}:6443'.format(master_node)
        configuration.verify_ssl = False
        configuration.debug = True
        configuration.api_key = {"authorization": "Bearer " + token}
        client.Configuration.set_default(configuration)
        self.logger.debug('API configurations set.')

    def check_for_namespace(self, project):
        """
        Check if a namespace exists for the given project.  If not, creates it.
        :param project: project to be deployed
        :return: nothing
        """
        # check if namespace exists for the project
        self.logger.debug('checking for existing namespace')
        namespaces = client.CoreV1Api().list_namespace()
        flag = False
        project_name = project['name']
        for ns in namespaces.items:
            if ns.metadata.name == \
                    project_utils.modify_string_for_deployment(project_name):
                flag = True
                self.logger.debug('Namespace for project already exists.')
        if not flag:  # if project_name not in namespaces
            self.logger.debug('creating namespace for the project')
            ns_path = self.kubernetes_deploy.namespace_yaml_generator(
                project_name)
            # create namespace for project
            self.kubernetes_deploy.create_namespace_client(ns_path)

    def kube_deploy_job(self, deployment: Deployment):
        """
        Deploys a job/cronjob component.
        Args:
            deployment: Job deployment object
        """
        if not isinstance(deployment, JobDeployment):
            raise ComponentsSpecifiedIncorrectlyException("Service component"
                                                          "is invalid")
        self.logger.info('entering kube_deploy_job')
        self.logger.debug('running job steps')
        self.kubernetes_deploy.run_job_steps(deployment)

    def kube_deploy_service(self, deployment: Deployment):
        """
        Deploys a service component.
        Args:
            deployment: Service deployment object
        Returns:
            str: IP of the hosted service
        """
        if not isinstance(deployment, ServiceDeployment):
            raise ComponentsSpecifiedIncorrectlyException("Service component"
                                                          "is invalid")
        self.logger.debug('running deployment steps')
        self.kubernetes_deploy.run_deployment_steps(deployment)
        service_ip = self.kubernetes_deploy.get_service_ip(deployment)
        return service_ip

    def kube_undeploy(self, project_name):
        """
        Undeploys a project
        :param project_name: project to be undeployed
        :return: nothing
        """
        try:
            self.logger.debug('Deleting namespace to undeploy project')
            k8s_beta = client.CoreV1Api()
            resp = k8s_beta.delete_namespace(
                project_utils.modify_string_for_deployment(project_name))
            self.logger.debug("Namespace deleted. Details : {}".format(
                str(resp)))
        except ApiException as e:
            if e.status == 404:  # Not found
                self.logger.error('Project is not deployed currently.')
                raise CurrentlyNotDeployedException
class NodeManager:
    config_path = XprConfigParser.DEFAULT_CONFIG_PATH

    def __init__(self, persistence_manager):
        self.config = XprConfigParser(self.config_path)
        self.logger = XprLogger()
        self.persistence_manager = persistence_manager

    def register_node(self, node_json):
        """
            registers a new node in the database if the server is available

            checks if the node already exists and then checks if the server
            with node ip_address is available. Then adds the node to database

            Parameters:
                node_json [json]: json with node information

            Return:
                Success -> 'OK' [str] : returns 'OK' as response
                Failure -> [str] : returns appropriate failure response
        """
        self.logger.info("registering a new node")
        self.logger.debug(f"node info provided is : {node_json}")
        new_node = Node(node_json)
        self.logger.info('checking if the mandatory fields are provided')
        new_node.validate_mandatory_fields()
        self.logger.info('checking if the address of node is valid')
        new_node.validate_node_address()
        new_node.set('provisionStatus', False)
        new_node.set('activationStatus', True)

        self.logger.info("provisionStatus and activationStatus fields are set")

        self.logger.info("adding node to the database")

        self.persistence_manager.insert("nodes", new_node.data, False)
        self.logger.info("node successfully added to the persistence")

    def get_nodes(self, filter_json, apply_display_filter=True):
        """
            Gets the list of nodes in the database

            Calls the persistence with input filters to fetch the list of nodes
            After fetching, the nodes list is filtered before sending
            as output in order to send relevant information only

            Parameters:
                filter_json [json] : json with filter key & value pairs

            Return:
                Success -> [list] : returns list of nodes
                Failure -> [str] : returns persistence failure response
        """
        self.logger.debug(f"filter_json is : {filter_json}")
        self.logger.info("getting the list of nodes from persistence")
        nodes = self.persistence_manager.find("nodes", filter_json)
        self.logger.info("filtering nodes before sending as output")
        if apply_display_filter:
            filtered_nodes = []
            for node_json in nodes:
                temp_node = Node(node_json)
                temp_node.filter_display_fields()
                filtered_nodes.append(temp_node.data)
            nodes = filtered_nodes
        self.logger.debug("Output of Nodes sent : ", nodes)
        return nodes

    def provision_node(self, provision_json):
        """
            provisions a node either for deployment or development

            checks if node is available and then connects to the server
            through ssh and runs appropriate packages on the server

            Parameters:
                node_id [str]: id i.e. name of the node
                provision_json [json] : json with node provision info

            Return:
                Success -> 'OK' [str] : returns OK if provision_node succeeds
                Failure -> [str] : returns appropriate failure response
        """
        self.logger.debug(f"provision_node info is: {provision_json}")
        self.logger.info("provision of a node is requested")
        new_node = Node(provision_json)
        new_node.provision_info_check()
        address = provision_json["address"]
        node_id_json = {"address": address}
        node_type = provision_json['nodetype']
        self.logger.info("checking persistence if the node is registered")
        node = self.persistence_manager.find("nodes", node_id_json)
        if len(node) == 0:
            self.logger.error("Node not found")
            raise NodeNotFoundException("Node not found to provision")

        for (key, val) in node[0].items():
            new_node.set(key, val)

        print(new_node.data)
        new_node.provision_node_check(provision_json, self.persistence_manager)
        print("provision_node_check passed")
        # checks if ip address of the node and its type is provided or not
        if node_type != 'DEVVM':
            print("updating cluster")
            new_node.update_cluster(provision_json, self.persistence_manager)
        self.logger.info("provision node in progress")
        provision_status = new_node.provision_node_setup()
        if provision_status == 1:
            self.logger.info("provision of node is successful")
            update_json = {"provisionStatus": True, "nodetype": node_type}
            if node_type == 'CLUSTER_MASTER':
                update_json["cluster"] = provision_json["cluster"]
            elif node_type == 'CLUSTER_WORKER':
                update_json["masterip"] = provision_json["masterip"]
            else:
                update_json["flavor"] = provision_json["flavor"].lower()

            self.persistence_manager.update("nodes", node_id_json, update_json)
        elif provision_status == 0:
            self.logger.error("provision failed: kubernetes error")
            raise ProvisionKubernetesException("Provision Failed")
        else:
            self.logger.error('given provision node data is invalid')
            raise InvalidProvisionInfoException("Provision data is invalid")

    def deactivate_node(self, node_id):
        """
            Deactivates a node in persistence

            Deletes all the installed packages of the node on server
            then deactivates the node in database

            Parameters:
                node_id [str] : name of the node

            Return:
                returns appropriate output
        """
        self.logger.info(f"request received for deactivating node {node_id}")
        node_id_json = {"address": node_id}
        self.logger.info("checking persistence if node is present or not")
        nodes = self.persistence_manager.find("nodes", node_id_json)

        if not len(nodes):
            raise NodeNotFoundException("Node not found for deactivation")

        if 'activationStatus' in nodes[0] and not nodes[0]['activationStatus']:
            self.logger.error("This node is already deactivated")
            raise NodeDeactivatedException()

        new_node = Node(nodes[0])

        # deletes all the packages installed on the node
        self.logger.info("deleting all packages on the node")
        node_deprovision = 1
        if new_node.data["provisionStatus"]:
            # deprovision shall be called only on provisioned nodes
            node_deprovision = new_node.deprovision_node()
        if node_deprovision == 1:
            self.logger.info("deleted all of the packages on node")
            # deletes the node entry from the database
            self.logger.info('deactivating node from persistence')
            deactivate_json = {
                "activationStatus": False,
                "provisionStatus": False
            }
            self.persistence_manager.update("nodes", node_id_json,
                                            deactivate_json)
            return XprResponse('success', '', {})
        else:
            self.logger.error('Node deletion failed: kubernetes error')
            raise ProvisionKubernetesException("Deactivation Failed")

    def assign_node(self, assign_json):
        """
            assigns a node to a user

            assigns a node with development vm type to a user

            Parameters:
                assign_json [json] : Json with assignation info

            Return:
                returns appropriate output
        """
        if 'user' not in assign_json or 'node' not in assign_json:
            self.logger.error("Incomplete information in assign_json")
            raise IncompleteNodeInfoException("user and node info is required")
        elif not len(assign_json['user']) or not len(assign_json['node']):
            self.logger.error("Incomplete information in assign_json")
            raise IncompleteNodeInfoException(
                "user & node info shouldn't be empty")

        uid_json = {"address": assign_json['node']}
        user = assign_json['user']
        users = UserManager(self.persistence_manager).get_users({"uid": user})
        nodes = self.persistence_manager.find('nodes', uid_json)
        if len(users) == 0:
            raise UserNotFoundException("User not found")
        elif len(nodes) == 0:
            raise NodeNotFoundException("Node not found")
        else:
            if 'provisionStatus' not in nodes[0]:
                raise UnProvisionedNodeException("Node is not provisioned")
            elif not nodes[0]['provisionStatus']:
                raise UnProvisionedNodeException("Node is not provisioned")
            elif nodes[0]['nodetype'] != 'DEVVM':
                raise NodeTypeException(
                    "Assign only work form node types of devvm")

            user_nodes = []
            for node_dict in users[0]['nodes']:
                user_nodes.append(node_dict['address'])
            if assign_json['node'] in user_nodes:
                raise NodeAlreadyAssignedException()

        new_node = Node(nodes[0])
        out, err = new_node.assign_node_to_user(user)
        try:
            if not len(out.readlines()) and len(err.readlines()):
                print("failure because of errors")
                raise NodeAssignException(
                    "Assignation failed due to internal error")
            else:
                temp_node = {'address': nodes[0]['address']}
                nodes = [] if 'nodes' not in users[0] else users[0]['nodes']
                nodes.append(temp_node)
                self.persistence_manager.update('users', {"uid": user},
                                                {"nodes": nodes})
        except:
            print("caught exception")
            raise NodeAssignException(
                "Assignation failed due to internal error")

    def modify_node(self, changes_json):
        """
            modify_node updates the node info in the persistence

            checks if node is available and then updates
            the info as per changes_json

            Parameters:
                changes_json [json] : json with node changes info

            Return:
                returns xprresponse object
        """
        if 'address' not in changes_json:
            raise IncompleteNodeInfoException("Node address not provided")

        uid_json = {"address": changes_json['address']}
        self.logger.info(f"Modifying node information of {uid_json}")
        self.logger.debug(f"Info provided to be modified is {changes_json}")
        # checks if the user is present in persistence
        self.logger.info("Checking if the node is present in the persistence")
        node = self.persistence_manager.find("nodes", uid_json)
        if len(node) == 0:
            self.logger.error(
                f"node {uid_json['address']} not found in the persistence")
            raise NodeNotFoundException()

        if 'activationStatus' in changes_json and \
            not changes_json['activationStatus']:
            raise CallDeactivateNodeException()

        self.logger.info("updating the user information")
        self.persistence_manager.update("nodes", uid_json, changes_json)

    def delete_node(self, node_id):
        """
            deletes the node from persistence

            Deletes all the installed packages of the node on server
            then deletes the node from database

            Parameters:
                node_id [str] : name of the node

            Return:
                returns appropriate output
        """
        self.logger.info(f"request received for deactivating node {node_id}")
        node_id_json = {"address": node_id}
        self.logger.info("checking persistence if node is present or not")
        nodes = self.persistence_manager.find("nodes", node_id_json)

        if nodes and len(nodes):
            self.logger.info("deleting all packages on the node")
            new_node = Node(nodes[0])
            node_deletion = new_node.deprovision_node()
            if node_deletion == 1:
                self.logger.info("deleted all of the packages on node")
                # deletes the node entry from the database
                self.logger.info('deleting node from persistence')
                self.persistence_manager.delete("nodes", node_id_json)
            else:
                self.logger.error('Node deletion failed: kubernetes error')
                raise NodeDeletionKubernetesException()
        else:
            raise NodeNotFoundException()

    def update_all_nodes(self, filter_json=None, branch_name="master"):
        """
        Update the xpresso project in all the nodes
        Args:
            filter_json: dictionary to updated specific set of nodes
            branch_name: name of the branch to which xpresso project will be
                         updated

        Returns:
            (list, list): list of update node and list of non updated node
        """

        if filter_json is None:
            filter_json = {}
        filtered_node_list = self.get_nodes(filter_json=filter_json)
        updated_list = []
        non_updated_list = []

        update_cmd = (
            f"cd {self.config['general']['package_path']} && "
            f"python3 xpresso/ai/admin/infra/clients/xpr_pkg.py "
            f"--conf config/common.json "
            f"--type install "
            f"--package UpdateLocalXpressoPackage "
            f"--parameters {{\"branch_name\": \"{branch_name}\"}}' && "
            f"cp config/common_{self.config['env']}.json "
            f"config/common.json ")
        self.logger.debug(update_cmd)
        for node in filtered_node_list:
            node_address = node["address"]
            ssh_client = SSHUtils(node_address)

            if ssh_client.client is None:
                self.logger.warning(
                    f"unable to login to server: {node_address}")
                non_updated_list.append(node_address)
                continue
            std_response = ssh_client.exec(update_cmd)
            self.logger.debug(f"\n\n STDERR : \n{std_response['stderr']}\n")
            if std_response['status'] == 0:
                updated_list.append(node_address)
            else:
                non_updated_list.append(node_address)
            ssh_client.close()
        return updated_list, non_updated_list
Example #10
0
class KubeflowUtils:
    def __init__(self, persistence_manager):
        self.persistence_manager = persistence_manager
        self.kubernetes_manager = KubernetesManager(persistence_manager)
        self.logger = XprLogger()

    def install_kubeflow(self, master_node, namespace):
        """
        installs kubeflow on a given node, in a given namespace
        Args:
            master_node: master node of the cluster
            namespace: namespace in which kubeflow is to be installed

        Returns: nothing

        """
        pass

    def fetch_ambassador_port(self, master_node, namespace):
        """
        Fetches the port on which ambassador is running
        Args:
            master_node: master node IP of the cluster
            namespace: namespace on which ambassador is deployed

        Returns: ambassador nodePort

        """
        self.logger.info('entering change_ambassador_port method')
        self.kubernetes_manager.set_api_config(master_node)
        k8s_beta = client.CoreV1Api()
        try:
            self.kubernetes_manager.set_api_config(master_node)
            s = k8s_beta.read_namespaced_service(name='ambassador',
                                                 namespace=namespace)
            ambassador_port = s.spec.ports[0].node_port
        except ApiException as e:
            self.logger.error(f'Ambassaddor port fetching failed. Details : '
                              f'{e.status, e.body}')
            raise AmbassadorPortFetchException(
                'Failed to fetch pipeline port.')
        self.logger.info('exiting fetch_ambassador_port method')
        return ambassador_port

    def set_kubeflow_api_config(self, master_node, ambassador_port):
        """
        sets the Kubeflow API config
        Args:
            ambassador_port: ambassador's service nodePort
            master_node: address of the master node

        Returns: nothing

        """
        self.logger.info('entering set_kubeflow_api_config method')
        try:
            master_info = self.persistence_manager.find(
                'nodes', {"address": master_node})
            token = master_info[0]['token']
        except (IndexError, KeyError):
            self.logger.error("Token retrieval from master node failed.")
            raise IncorrectTokenException(
                "Token retrieval from master node failed.")
        config = kfp_server_api.configuration.Configuration()
        config.verify_ssl = False
        config.debug = True
        config.host = f'http://{master_node}:{ambassador_port}/pipeline'
        config.api_key = {"authorization": "Bearer " + token}
        self.logger.info('exiting set_kubeflow_api_config method')
        return config

    def upload_pipeline_to_kubeflow(self, master_node, namespace,
                                    pipeline_zip):
        """
        uploads given kubeflow pipeline on the given cluster
        Args:
            namespace: namespace on which kubeflow is installed
            master_node: master node IP of the cluster
            pipeline_zip: zip file containing the pipeline yaml

        Returns: ambassador nodePort

        """
        self.logger.info('entering upload_pipeline_to_kubeflow method')
        ambassador_port = self.fetch_ambassador_port(master_node, namespace)
        self.logger.debug('fetched ambassador port')
        config = self.set_kubeflow_api_config(master_node, ambassador_port)
        api_client = kfp_server_api.api_client.ApiClient(config)
        try:
            upload_client = kfp_server_api.api.PipelineUploadServiceApi(
                api_client)
            upload_client.upload_pipeline(pipeline_zip)
        except KFApiException as e:
            if e.status == 500:
                self.logger.error('Trying to upload already existing pipeline')
                raise PipelineUploadFailedException(
                    'Pipeline already exists. Please choose a different name.')
            else:
                self.logger.error(f'Pipeline upload failed. Reason : {e.body}')
                raise PipelineUploadFailedException(e.body)
        return ambassador_port
class DeclarativePipelineBuilder:

    # all the pipeline reference variables will be stored in this array as
    # they are discovered by the code so that we can check for any faulty
    # reference made which is not present in the reference array
    reference_array = []

    def __init__(self, persistence_manager):
        self.kubeflow_utils = KubeflowUtils(persistence_manager)
        self.logger = XprLogger()
        self.executor = LocalShellExecutor()
        config_path = XprConfigParser.DEFAULT_CONFIG_PATH
        self.config = XprConfigParser(config_path)
        self.declarative_pipeline_folder = self.config[PROJECTS_SECTION][
            DECLARATIVE_PIPELINE_FOLDER]
        self.content = self.declarative_pipeline_folder_check()

    def declarative_pipeline_folder_check(self):
        """
        checks whether declarative pipeline folder is present
        Returns: contents of template

        """
        if not os.path.isdir(self.declarative_pipeline_folder):
            os.makedirs(self.declarative_pipeline_folder, permission_755)
        kubeflow_template = self.config[PROJECTS_SECTION][KUBEFLOW_TEMPLATE]
        try:
            with open(kubeflow_template, 'r') as f:
                template_content = f.read()
                return template_content
        except FileNotFoundException:
            self.logger.error('kubeflow template file not found')

    def prevalidate_declarative_json(self, pipeline_info):
        """
        Validates (with dummy data) if the pipeline yaml file is being created
        properly before adding pipeline as a part of project.
        Args:
            pipeline_info: declarative JSON file

        Returns: nothing

        """
        self.logger.info('entering prevalidate_declarative_json')
        temp_component_images = {}
        self.logger.info('creating dict with temporary component images')
        for component in pipeline_info['components']:
            self.validate_component_keys(component.keys())
            temp_component_images[
                component['xpresso_reference']] = "temp_image"
        self.generate_pipeline_file(pipeline_info, temp_component_images, 0)
        self.logger.info('Pipeline validated.')

    def check_for_reference(self, value):
        """
        Checks if the provided value has any faulty reference.
        Args:
            value: value to be checked

        Returns: raises exception if reference is not found

        """
        self.logger.info(f'entering check_for_reference to '
                         f'validate {value}')
        if '.output' in value:
            reference = value.split('.')[0]
            self.check_for_reference(reference)
            if open_parenthesis in reference:
                # in case of typecasting
                reference = reference.split(open_parenthesis)[1]
            if reference not in self.reference_array:
                self.logger.error(f'Reference "{reference}" not found.')
                raise ReferenceNotFoundException(
                    f'Reference "{reference}" not '
                    f'found in declarative JSON')
        self.logger.info('Reference validated. Exiting.')

    def modify_for_function_parameters(self, func_params):
        """
        modifies a string (json key-value pair) to be used as a function's
        parameters
        Args:
            func_params: json key-value pair string (in xpresso defined format)

        Returns: modified string, fit for using as a function's parameters

        """
        self.logger.info('entering modify_for_function_parameters')
        param_list = []
        for key, value in func_params.items():
            modified_key = key.replace(variable_indicator, "")
            if variable_indicator not in str(value):
                if double_quote in value:
                    value = value.replace(double_quote, escape_quote)
                modified_value = f'"{value}"'
            else:
                modified_value = value.replace(variable_indicator, "")
            # check for any faulty reference
            self.check_for_reference(modified_value)
            param_list.append(f'{modified_key}={modified_value}')
            self.reference_array.append(modified_key)
        result = ', '.join(param_list)
        self.logger.info(f'exiting modify_for_function_parameters with '
                         f'output {result}')
        return result

    def modify_for_function_variables(self, func_vars):
        """
        modifies a string (json key-value pair) to be used as a function's
        variables
        Args:
            func_vars: json key-value pair string (in xpresso defined format)

        Returns: modified string, fit for use as a function's variables

        """
        self.logger.info('entering modify_for_function_variables')
        result = """"""
        for key, value in func_vars.items():
            modified_key = key.replace(variable_indicator, "")
            if variable_indicator not in value:
                if double_quote in value:
                    value = value.replace(double_quote, escape_quote)
                modified_value = f'"{value}"'
            else:
                modified_value = value.replace(variable_indicator, "")
            # check for any faulty reference
            self.check_for_reference(modified_value)
            self.reference_array.append(modified_key)
            result = result + f'{modified_key} = {modified_value}\n\t'
        self.logger.info(f'exiting modify_for_function_variables with '
                         f'output {result}')
        return result

    def validate_declarative_json(self, pipeline_info):
        """
        validates the mandatory fields in the provided declarative json
        Args:
            pipeline_info: contents of the json file

        Returns: Raises exception in case of inconsistency

        """
        self.logger.info('entering validate_declarative_json method')
        if not pipeline_info:
            self.logger.error('Declarative JSON empty.')
            raise IncorrectDeclarativeJSONDefinitionException(
                'Declarative JSON empty.')
        pipeline_fields = [
            'name', 'description', 'pvc_name', 'components', 'main_func_params'
        ]
        for field in pipeline_fields:
            if field not in pipeline_info.keys():
                self.logger.error(f'Field "{field}" not present in '
                                  f'declarative JSON')
                raise IncorrectDeclarativeJSONDefinitionException(
                    f'Field "{field}" not present in declarative JSON')

    def validate_component_keys(self, component_keys):
        """
        Validates if the component has all default keys present
        Args:
            component_keys: keys present in the component

        Returns: nothing

        """
        default_keys = [
            'name', 'xpresso_reference', 'description', 'inputs',
            'input_values', 'implementation'
        ]
        for key in default_keys:
            if key not in component_keys:
                self.logger.error(f'Key "{key}" is missing from one or more '
                                  f'components in pipeline JSON')
                raise ComponentsSpecifiedIncorrectlyException(
                    f'Key "{key}" is missing from one or more components '
                    f'in pipeline JSON')

    def generate_pipeline_file(self, pipeline_info, component_images,
                               pipeline_deploy_id):
        """
        generates a python dsl pipeline file using the provided declarative
        json, executes it and uploads the pipeline to kubeflow.
        Args:
            component_images: dict of pipeline components and their
            corresponding docker images
            pipeline_info: declarative json file containing info
            about pipeline
            pipeline_deploy_id : deploy version id of pipeline fetched from
                                database
        Returns: ambassador port to view the pipeline on dashboard

        """
        self.logger.info('entering generate_python_file method')
        self.logger.debug('reading declarative json')

        # check for mandatory fields
        self.validate_declarative_json(pipeline_info)

        # generate code to load pipeline component objects
        components_info = self.generate_pipeline_component_objects(
            pipeline_info)

        # populate the pipeline name and description
        self.populate_name_and_description(pipeline_info)

        # populate main function's parameters
        self.populate_main_func_parameters(pipeline_info)

        # populate main function's variables, if any
        self.populate_main_func_variables(pipeline_info)

        # populate container op, if present
        self.populate_container_op(pipeline_info)

        # generate and populate component definitions with inputs
        self.populate_component_definitions(pipeline_info, components_info)

        # update pipeline yaml location
        pipeline_yaml_location = self.update_pipeline_yaml_location(
            pipeline_deploy_id, pipeline_info)

        # finally, populate and generate the python file
        self.generate_pipeline_python_file(pipeline_deploy_id, pipeline_info)

        # create yaml file for the generated python file to read components from
        self.create_pipeline_yaml(component_images, pipeline_info,
                                  pipeline_yaml_location)

        # run the generated python file to generate the zip file
        self.logger.debug('running generated python file')
        pipeline_file = f'{self.declarative_pipeline_folder}' \
            f'/{pipeline_info["name"]}--declarative_pipeline' \
            f'_{pipeline_deploy_id}.py'
        run_pipeline_python = f'python {pipeline_file}'
        status = self.executor.execute(run_pipeline_python)
        if status:
            raise IncorrectDeclarativeJSONDefinitionException(
                "Failed to run pipeline dsl file. "
                "Please re-check the declarative JSON file.")
        pipeline_zip = f'{pipeline_file}.zip'
        return pipeline_zip

    def create_pipeline_yaml(self, component_images, pipeline_info,
                             pipeline_yaml_location):
        """
        creates yaml file for dsl code to read components from
        Args:
            component_images: dict of pipeline components and their
            corresponding docker images
            pipeline_info: pipeline info from declarative json
            pipeline_yaml_location: location where the file is to be generated

        Returns: nothing

        """
        self.logger.debug('creating yaml for generated python file')
        temp_pipeline = deepcopy(pipeline_info)
        modified_components = temp_pipeline['components']
        for component in modified_components:
            component['implementation']['container']['image'] \
                = component_images[component['xpresso_reference']]
            del component['xpresso_reference']
            del component['input_values']
        data_to_insert = {"components": modified_components}
        with open(pipeline_yaml_location, 'w+') as f:
            f.write(yaml.dump(data_to_insert))

    def generate_pipeline_python_file(self, pipeline_deploy_id, pipeline_info):
        """
        generates pipeline python file
        Args:
            pipeline_deploy_id: deploy version id of pipeline fetched from
                                database
            pipeline_info: pipeline info from declarative json

        Returns: nothing

        """
        self.logger.debug('generating python file')
        with open(
                f'{self.declarative_pipeline_folder}/{pipeline_info["name"]}'
                f'--declarative_pipeline_{pipeline_deploy_id}.py', 'w+') as f:
            f.write(self.content)

    def update_pipeline_yaml_location(self, pipeline_deploy_id, pipeline_info):
        """
        updates location where pipeline yaml will be generated
        Args:
            pipeline_deploy_id: deploy version id of pipeline fetched from
                                database
            pipeline_info: pipeline info from declarative json

        Returns: yaml location

        """
        pipeline_yaml_location = f"{self.declarative_pipeline_folder}" \
            f"/{pipeline_info['name']}--pipeline_components_file_" \
            f"{pipeline_deploy_id}.yaml"
        self.content = self.content.replace('%pipeline_yaml_location%',
                                            f"'{pipeline_yaml_location}'")
        return pipeline_yaml_location

    def populate_container_op(self, pipeline_info):
        """
        populates container op
        Args:
            pipeline_info: pipeline info from declarative json

        Returns: nothing

        """
        if 'container_op' not in pipeline_info.keys():
            self.logger.debug('container op not present')
            self.content = self.content.replace('%container_op%', '')
        else:
            self.logger.debug('populating container op')
            checkout = f"""\t{str(pipeline_info['container_op'][
                                      '$$name$$'])} = dsl.ContainerOp({self.modify_for_function_parameters(
                pipeline_info['container_op'])})"""
            if 'checkout' in pipeline_info['after_dependencies'].keys():
                checkout = checkout + f"""\n\n\tcheckout.after({
                pipeline_info['after_dependencies']['checkout']})"""
            self.reference_array.append('checkout')
            self.content = self.content.replace('%container_op%', checkout)

    def populate_main_func_variables(self, pipeline_info):
        """
        populates main function variables
        Args:
            pipeline_info: pipeline info from declarative json

        Returns: nothing

        """
        if 'main_func_variables' in pipeline_info.keys():
            self.logger.debug("populating main function's variables")
            main_variables = "\t" + self.modify_for_function_variables(
                pipeline_info['main_func_variables'])
            self.content = self.content.replace('%main_function_variables%',
                                                main_variables)
        else:
            self.logger.debug('No variables found for main function')
            self.content = self.content.replace('%main_function_variables%',
                                                '')

    def generate_pipeline_component_objects(self, pipeline_info):
        """
        generates code to load pipeline component objects
        Args:
            pipeline_info: pipeline info from declarative json

        Returns: components info

        """
        self.logger.info('generating code to load pipeline component objects')
        pipeline_comps = """"""
        components_info = pipeline_info['components']
        self.reference_array.extend([comp['name'] for comp in components_info])
        for index, component in enumerate(components_info):
            self.validate_component_keys(component.keys())
            pipeline_comps = pipeline_comps + f"{component['name']}_ = " \
                f"components.load_component_from_text(str(" \
                f"component_info[{index}]))\n"
        self.content = self.content.replace('%load_components%',
                                            pipeline_comps)
        return components_info

    def populate_name_and_description(self, pipeline_info):
        """
        populates the pipeline name and description
        Args:
            pipeline_info: pipeline info from declarative json

        Returns: nothing

        """
        self.logger.debug('populating the pipeline name and description')
        self.content = self.content.replace("%pipeline_name%",
                                            f"'{pipeline_info['name']}'")
        self.content = self.content.replace(
            '%pipeline_description%', f"'{pipeline_info['description']}'")

    def populate_main_func_parameters(self, pipeline_info):
        """
        populates main function parameters
        Args:
            pipeline_info: pipeline info from declarative json

        Returns: nothing

        """
        self.logger.debug("populate main function's parameters")
        main_params = self.modify_for_function_parameters(
            pipeline_info['main_func_params'])
        self.content = self.content.replace('%main_function_params%',
                                            main_params)

    def populate_component_definitions(self, pipeline_info, components_info):
        """
        populates component definitions
        Args:
            pipeline_info: pipeline info from declarative json
            components_info: components info in declarative json

        Returns: nothing

        """
        self.logger.debug('populating component definitions with inputs')
        component_definitions = """"""
        for index, component in enumerate(components_info):
            if index == 0:
                add_pvc = \
                    f"add_volume(k8s_client.V1Volume(name='pipeline-nfs', " \
                    f"persistent_volume_claim=k8s_client." \
                    f"V1PersistentVolumeClaimVolumeSource(claim_name=" \
                    f"'{pipeline_info['pvc_name']}'))).add_volume_mount(" \
                    f"k8s_client.V1VolumeMount(" \
                    f"mount_path='/data', name='pipeline-nfs'))"
            else:
                add_pvc = "add_volume_mount(k8s_client.V1VolumeMount(" \
                          "mount_path='/data', name='pipeline-nfs'))"
            component_definitions = \
                component_definitions + \
                f"\t{component['name']} = {component['name']}_(" \
                f"{self.modify_for_function_parameters(component['input_values'])}).{add_pvc}\n\n"

            if 'after_dependencies' in pipeline_info.keys():
                if component['name'] in pipeline_info[
                        'after_dependencies'].keys():
                    component_definitions = \
                        component_definitions + \
                        f"\t{component['name']}.after({pipeline_info['after_dependencies'][component['name']]})\n\n"
        self.content = self.content.replace('%component_definitions%',
                                            component_definitions)
Example #12
0
class UserManager:

    CONTROLLER_SECTION = 'controller'
    TOKEN_EXPIRY = 'soft_expiry'
    LOGIN_EXPIRY = 'hard_expiry'
    AUTHENTICATION_TYPE = "authentication_type"

    def __init__(self, persistence_manager):
        self.logger = XprLogger()
        self.config = XprConfigParser()
        self.persistence_manager = persistence_manager
        self.ldapmanager = LdapManager()

    def register_user(self, user_json):
        """
        register a new user in the persistence

        checks if the user already exists and then adds to persistence

        Parameters:
            user_json [json]: json with node information

        Return:
            Success -> 'OK' [str] : returns 'OK' as response
            Failure -> [str] : returns appropriate failure response
        """
        self.logger.debug(f"Entered register_user with {user_json}")
        # create user object
        new_user = User(user_json)
        # run validations
        new_user.validate_mandatory_fields()
        new_user.validate_field_values()
        # valid inputs - exception would have been raised in case of missing /
        # invalid info

        # now, set other fields as required
        # Password should not be saved as plain text in database.
        # Encrypting the password before saving it to database
        self.logger.info("Registering a new user")
        user_pwd = user_json["pwd"]
        new_user.set('pwd', sha512_crypt.encrypt(user_json['pwd']))
        new_user.set('loginStatus', False)
        new_user.set('activationStatus', True)
        new_user.set('nodes', [])
        self.logger.info("adding user to the database")

        self.persistence_manager.insert("users", new_user.data, False)
        self.logger.info("user successfully added to the persistence")

        try:
            self.ldapmanager.add(user_json["uid"], user_pwd)
        except Exception as e:
            self.logger.error("Unable to add user")
            print("unable to add user to ldap server : ", e)
            return XprResponse("failure", None, str(e))

        # NFS User directory changes
        self.logger.info("Setting up NFS for the user")
        nfs_manager = NFSUserManager(config=self.config)
        nfs_manager.setup_user_folder(user=user_json['uid'])
        self.logger.info("NFS set up")

        return XprResponse("success", None, None)

    def modify_user(self, filter_json, changes_json):
        """
            modify_user updates the user info in the persistence

            checks if user is available and then updates
            the info as per changes_json

            Parameters:
                filter_json: filter to find user
                changes_json: json with user changes info

            Return:
                Success -> 'OK' [str] : returns OK if provision_node succeeds
                Failure -> [str] : returns appropriate failure response
        """

        self.logger.debug(
            f"Modifying user information of {filter_json} to {changes_json}")
        # checks if the user is present in database
        self.logger.info("Checking if the user is present in the database")
        users = self.persistence_manager.find("users", filter_json)
        if not users or len(users) == 0:
            self.logger.error(f"user {filter_json} not found in the database")
            raise UserNotFoundException()

        # checks if the user password is also present in changes_json
        temp_user = User(changes_json)
        temp_user.validate_field_values()
        temp_user.validate_modifiable_fields()

        self.logger.info("updating the user information")
        self.persistence_manager.update("users", filter_json, changes_json)
        return XprResponse('success', '', {})

    def deactivate_user(self, uid):
        """
            Deletes an user and his info from the persistence

            Deletes the user from database

            Parameters:
                uid [str] : uid of the user

            Return:
                returns appropriate output
        """
        uid_json = {"uid": uid}
        # deletes the user from persistence

        del_users = self.persistence_manager.find("users", uid_json)
        if del_users and len(del_users) != 0:
            self.logger.info(f"deactivating the user {uid_json['uid']}")
            if 'activationStatus' in del_users[0] and \
                    del_users[0]['activationStatus']:
                self.persistence_manager.update("users", uid_json,
                                                {"activationStatus": False})
                self.logger.info(
                    f"user {uid_json['uid']} successfully deactivated")
                return XprResponse('success', '', {})
            else:
                raise DeactivatedUserException
        else:
            raise UserNotFoundException()

    def get_users(self, filter_json, apply_display_filter=True):
        """
            Calls the persistence with input filters to fetch the list of users.
            After fetching, the users list is filtered before sending
            as output in order to send relevant information only

            Parameters:
                filter_json [json] : json with filter key & value pairs

            Return:
                Success -> [list] : returns list of users
                Failure -> [str] : returns persistence failure response
        """
        self.logger.info("getting all the users in the persistence")
        self.logger.debug(f"filter_json is : {filter_json}")
        users = self.persistence_manager.find("users", filter_json)

        # filter user fields before sending the output
        if apply_display_filter:
            new_json = []
            for user_json in users:
                user = User(user_json)
                user.filter_display_fields()
                new_json.append(user.data)
            users = new_json
        # get users call retrieves whole user info from persistence
        # Filtering the data that needs to be shown as output
        self.logger.debug(f'Output of users sent: {users}')
        return users

    def update_password(self, password_json):
        """
        Updates user password

        Checks the password and updates the password on ldap and database

        :param password_json:
            contains the uid, old password & new password
        :return:
            raises exception in case of error
        """
        # uid is mandatory
        if "uid" not in password_json:
            self.logger.info("uid not provided for update password")
            raise IncompleteUserInfoException("User 'uid' not provided")
        uid_json = {"uid": password_json["uid"]}
        # fetches the user information
        users = self.persistence_manager.find("users", uid_json)
        if not len(users):
            self.logger.info("User not found for updating password")
            raise UserNotFoundException()
        # creates user object
        new_user = User(users[0])
        old_password_hash = users[0]["pwd"]
        old_password = password_json["old_pwd"]
        new_password = password_json["new_pwd"]
        # checks if the old password provided is same as the one saved in db
        if not sha512_crypt.verify(old_password, old_password_hash):
            raise InvalidPasswordException("Current password is incorrect")
        # Current and new password should not be same
        if old_password == new_password:
            raise InvalidPasswordException("Current and new password is same.")
        # checks if the password is valid and secure enough
        new_user.check_password(password_json["new_pwd"])
        # updates the password on ldap server
        self.ldapmanager.update_password(password_json["uid"], old_password,
                                         new_password)
        hashed_pwd = sha512_crypt.encrypt(new_password)
        update_json = {"pwd": hashed_pwd}
        self.persistence_manager.update("users", uid_json, update_json)
class KubernetesDeploy:
    """
    class containing methods to deploy a project in Kubernetes
    """
    def __init__(self, persistence_manager):

        self.persistence_manager = persistence_manager
        self.logger = XprLogger()
        config_path = XprConfigParser.DEFAULT_CONFIG_PATH
        self.config = XprConfigParser(config_path)
        PROJECTS_SECTION = 'projects'
        DEPLOYMENT_FILES_FOLDER = 'deployment_files_folder'
        self.deployment_files_folder = self.config[PROJECTS_SECTION][
            DEPLOYMENT_FILES_FOLDER]

        if not os.path.isdir(self.deployment_files_folder):
            os.makedirs(self.deployment_files_folder, 0o755)

    def deployment_yaml_generator(self, deployment):
        """
        generates yaml for creating deployment in kubernetes
        Args:
            deployment: Service Deployment
        Returns:
            str: path of yaml generated

        """
        self.logger.info('entering deployment_yaml_generator')
        # converting any special characters to '-'
        project_name = project_utils.modify_string_for_deployment(
            deployment.project_name)
        component = project_utils.modify_string_for_deployment(
            deployment.component_name)
        # this will be the name used in the deployment file
        deployment_name = '{}--{}'.format(project_name, component)
        # reading contents from the standard xpresso deployment yaml file
        with open("config/kubernetes-deployfile.yaml", "r") as f:
            content = f.read()
        yaml_content = self.populate_yaml_content(content, deployment,
                                                  deployment_name)

        filename = "{}/deployfile--{}.yaml".format(
            self.deployment_files_folder, deployment_name)
        with open(filename, "w+") as f:
            yaml.safe_dump(yaml_content, f)
        self.logger.info('exiting deployment_yaml_generator')
        return filename

    def persistent_volume_yaml_generator(self, deployment, persstence_type):
        """
        generates yaml for creating persistent volumne
        Args:
            deployment: Any Deployment
        Returns:
            str: path of yaml generated

        """
        self.logger.info('entering persistent_yaml_generator')

        # converting any special characters to '-'
        project_name = project_utils.modify_string_for_deployment(
            deployment.project_name)
        component = project_utils.modify_string_for_deployment(
            deployment.component_name)
        # this will be the name used in the deployment file
        deployment_name = '{}--{}'.format(project_name, component)

        # reading contents from the standard xpresso deployment yaml file
        with open(f"config/kubernetes-persistent-{persstence_type}.yaml",
                  "r") as f:
            content = f.read()

        content = content.replace("K8_XPRESSO_COMPONENT_NAME",
                                  str(deployment_name))
        content = content.replace("K8_XPRESSO_PERSISTENT_STORAGE_SIZE",
                                  str(deployment.volume_size))
        content = content.replace("K8_XPRESSO_PROJECT_NAME", str(project_name))
        yaml_content = yaml.safe_load(content)

        filename = (
            f"{self.deployment_files_folder}"
            f"/persistent-{persstence_type}-file--{deployment_name}.yaml")
        with open(filename, "w+") as f:
            yaml.safe_dump(yaml_content, f)
        self.logger.info('exiting persistent_yaml_generator')
        return filename

    def populate_yaml_content(self, content, deployment, deployment_name):
        content = content.replace("K8_XPRESSO_COMPONENT_NAME",
                                  str(deployment_name))
        content = content.replace("K8_XPRESSO_COMPONENT_REPLICAS",
                                  str(deployment.replicas))
        content = content.replace("K8_XPRESSO_COMPONENT_IMAGE_NAME",
                                  str(deployment.docker_image))
        content = content.replace("K8_XPRESSO_COMPONENT_ENVIRONMENT_LIST",
                                  str(deployment.environment))
        content = content.replace("K8_XPRESSO_PROJECT_LINUX_UID",
                                  str(deployment.project_linux_uid))
        if deployment.need_persistence():
            content = content.replace("K8_XPRESSO_COMPONENT_VOLUME_MOUNT_PATH",
                                      str(deployment.volume_mount_path))

        # content = content.format(deployment_name, replicas, deployment_name,
        #                          image, deployment_name, environment)
        yaml_content = yaml.safe_load(content)

        # Remove persistence if not required
        if not deployment.need_persistence():
            try:
                del yaml_content["spec"]["template"]["spec"]["volumes"]
                del yaml_content["spec"]["template"]["spec"]["containers"][0][
                    "volumeMounts"]
            except (IndexError, KeyError):
                self.logger.warning("spec.template.spec.volumes not found")
        return yaml_content

    def service_yaml_generator(self, project_name, component, port):
        """
        generates yaml for creating service in kubernetes
        Args:
            project_name: project to be deployed
            component: component for which this yaml is generated
            port: array containing info of ports to be opened
        Returns: path of yaml generated

        """
        self.logger.info('entering service_yaml_generator')
        # reading contents from the standard xpresso service yaml file
        with open("config/kubernetes-servicefile.yaml", "r") as f:
            content = f.read()
        # converting any special characters to '-'
        project_name = project_utils.modify_string_for_deployment(project_name)
        component = project_utils.modify_string_for_deployment(component)
        ports = []
        for i in port:
            temp = str(i)
            fixed_port = project_utils.modify_string_for_deployment(
                temp).replace("'", '"')
            ports.append(json.loads(fixed_port))
        # this will be the name used in the service file
        service_name = '{}--{}'.format(project_name, component)
        content = content.format(service_name, ports, service_name)
        yaml_content = yaml.safe_load(content)
        filename = "{}/servicefile--{}.yaml".format(
            self.deployment_files_folder, service_name)
        with open(filename, "w+") as f:
            yaml.safe_dump(yaml_content, f)
        self.logger.info('exiting service_yaml_generator')
        return filename

    def namespace_yaml_generator(self, project_name):
        """
        generates yaml file to create a new namespace
        Args:
            project_name: name of the project to be deployed

        Returns: path of the yaml generated

        """
        self.logger.info('entering namespace_yaml_generator')
        with open("config/kubernetes-namespacefile.yaml", "r") as f:
            content = f.read()
        # converting any special characters to '-'
        project_name = project_utils.modify_string_for_deployment(project_name)
        content = content.format(project_name)
        yaml_content = yaml.safe_load(content)
        filename = "{}/namespacefile--{}.yaml".format(
            self.deployment_files_folder, project_name)
        with open(filename, "w+") as f:
            yaml.safe_dump(yaml_content, f)
        self.logger.info('exiting namespace_yaml_generator')
        return filename

    def job_yaml_generator(self, deployment):
        """
        generates yaml file to create a job
         Args:
            deployment: Any Deployment
        Returns:
            str: path of yaml generated
        """
        self.logger.info('entering job_yaml_generator')
        # reading contents from the standard xpresso job yaml file

        # converting any special characters to '-'
        project_name = project_utils.modify_string_for_deployment(
            deployment.project_name)
        component = project_utils.modify_string_for_deployment(
            deployment.component_name)
        # this will be the name used in the job file
        job_name = '{}--{}'.format(project_name, component)

        with open("config/kubernetes-jobfile.yaml", "r") as f:
            content = f.read()

        content = content.replace("K8_XPRESSO_COMPONENT_NAME", str(job_name))
        content = content.replace("K8_XPRESSO_COMPONENT_IMAGE_NAME",
                                  str(deployment.docker_image))
        content = content.replace("K8_XPRESSO_COMPONENT_ENVIRONMENT_LIST",
                                  str(deployment.environment))
        content = content.replace("K8_XPRESSO_COMPONENT_COMMAND",
                                  str(deployment.commands))
        content = content.replace("K8_XPRESSO_COMPONENT_REPLICAS",
                                  str(deployment.replicas))

        if deployment.need_persistence():
            content = content.replace("K8_XPRESSO_COMPONENT_VOLUME_MOUNT_PATH",
                                      str(deployment.volume_mount_path))
        # content = content.format(job_name, job_name, image, environment,
        #                          command, parallelism)
        yaml_content = yaml.safe_load(content)
        # Remove persistence if not required
        if not deployment.need_persistence():
            try:
                del yaml_content["spec"]["template"]["spec"]["volumes"]
                del yaml_content["spec"]["template"]["spec"]["containers"][0][
                    "volumeMounts"]
            except (IndexError, KeyError):
                self.logger.warning("spec.template.spec.volumes not found")
        filename = "{}/jobfile--{}.yaml".format(self.deployment_files_folder,
                                                job_name)
        with open(filename, "w+") as f:
            yaml.safe_dump(yaml_content, f)
        self.logger.info('exiting job_yaml_generator')
        return filename

    def cronjob_yaml_generator(self, project_name, component, schedule, image,
                               environment, args):
        """
        generates yaml file to create a cronjob
        :param environment: environment
        :param project_name: project name
        :param component: component name
        :param schedule: Cron Job schedule in standard Cron format
        :param image: docker image
        :param args: array of args to run
        :return: path of yaml generated
        """
        self.logger.info('entering cronjob_yaml_generator')
        if not project_utils.validate_cronjob_format(schedule):
            self.logger.error('Invalid cron schedule provided. Exiting.')
            raise InvalidCronScheduleException
        # reading contents from the standard xpresso cronjob yaml file
        with open("config/kubernetes-cronjobfile.yaml", "r") as f:
            content = f.read()
        # converting any special characters to '-'
        project_name = project_utils.modify_string_for_deployment(project_name)
        component = project_utils.modify_string_for_deployment(component)
        # this will be the name used in the job file
        cronjob_name = '{}--{}'.format(project_name, component)
        content = content.format(cronjob_name, schedule, cronjob_name, image,
                                 environment, args)
        yaml_content = yaml.safe_load(content)
        filename = "{}/cronjobfile--{}.yaml".format(
            self.deployment_files_folder, cronjob_name)
        with open(filename, "w+") as f:
            yaml.safe_dump(yaml_content, f)
        self.logger.info('exiting cronjob_yaml_generator')
        return filename

    def patch_deployment_client(self, path, project_name):
        """
        helper function to patch deployment for project as a given yaml file on
        Kubernetes via the Kubernetes API
        Args:
            path: path of the yaml to be deployed
            project_name: project to be deployed (needed for namespace)
        :return: status of patching (True/Error Code)
        """
        self.logger.info('entering patch_deploy_client')
        try:
            with open(path) as f:
                dep = yaml.safe_load(f)
                k8s_beta = client.ExtensionsV1beta1Api()
                # collecting response from API
                r = k8s_beta.patch_namespaced_deployment(
                    name=dep['metadata']['name'],
                    body=dep,
                    namespace=project_utils.modify_string_for_deployment(
                        project_name))
                self.logger.debug("Deployment patched. Details : {}".format(
                    str(r)))
            self.logger.info('exiting patch_deploy_client')
            return True
        except ApiException as e:
            self.logger.error('Patching deployment failed. '
                              'Error info : {}.'.format(e))
            raise DeploymentCreationFailedException

    def deploy_client(self, path, project_name):
        """
        helper function to create deployment for a given yaml file on
        Kubernetes via the Kubernetes API
        Args:
            path: path of the yaml to be deployed
            project_name: project to be deployed (needed for namespace)

        Returns: status of deployment (True/Error Code)

        """
        self.logger.info('entering deploy_client')
        try:
            with open(path) as f:
                dep = yaml.safe_load(f)
                k8s_beta = client.ExtensionsV1beta1Api()
                # collecting response from API
                r = k8s_beta.create_namespaced_deployment(
                    body=dep,
                    namespace=project_utils.modify_string_for_deployment(
                        project_name))
                self.logger.debug("Deployment created. Details : {}".format(
                    str(r)))
            self.logger.info('exiting deploy_client')
            return True
        except ApiException as e:
            if e.status == 409:  # in case of conflict, patch the deployment
                self.patch_deployment_client(path, project_name)
                return True
            self.logger.error('Creation of deployment failed. Exiting.')
            raise DeploymentCreationFailedException

    def patch_service_client(self, path, project_name):
        """
                helper function to patch service for project as a given yaml
                file on Kubernetes via the Kubernetes API
                Args:
                    path: path of the yaml to be deployed
                    project_name: project to be deployed (needed for namespace)

                Returns: status of service patching (True/Error code)

                """
        self.logger.info('entering patch_service_client')
        try:
            with open(path) as f:
                dep = yaml.safe_load(f)
                k8s_beta = client.CoreV1Api()
                # collecting response from API
                r = k8s_beta.patch_namespaced_service(
                    namespace=project_utils.modify_string_for_deployment(
                        project_name),
                    body=dep,
                    name=dep['metadata']['name'])
                self.logger.debug("Service patched. Details : {}".format(
                    str(r)))
            self.logger.info('exiting patch_service_client')
            return True
        except ApiException as e:
            self.logger.error('Patching service failed. Error details : '
                              '{}'.format(e))
            if e.status == 422:  # Unprocessable Entity
                self.logger.error("Can't patch service port.")
                raise PortPatchingAttemptedException
            raise ServiceCreationFailedException

    def create_service_client(self, path, project_name):
        """
        helper function to create service for a given yaml file on
        Kubernetes via the Kubernetes API
        Args:
            path: path of the yaml to be deployed
            project_name: project to be deployed (needed for namespace)

        Returns: status of service creation (True/Error code)

        """
        self.logger.info('entering create_service_client')
        try:
            with open(path) as f:
                dep = yaml.safe_load(f)
                k8s_beta = client.CoreV1Api()
                # collecting response from API
                r = k8s_beta.create_namespaced_service(
                    namespace=project_utils.modify_string_for_deployment(
                        project_name),
                    body=dep)
                self.logger.debug("Service created. Details : {}".format(
                    str(r)))
            self.logger.info('exiting create_service_client')
            return True
        except ApiException as e:
            if e.status == 409:
                self.patch_service_client(path, project_name)
                return True
            self.logger.error('Creation of service failed. Exiting.')
            raise ServiceCreationFailedException

    def create_namespace_client(self, path):
        """
        helper function to create namespace for a given yaml file on
        Kubernetes via the Kubernetes API
        Args:
            path: path of the yaml

        Returns: status of namespace creation (True/Error Code)

        """
        self.logger.info('entering create_namespace_client')
        try:
            with open(path) as f:
                dep = yaml.safe_load(f)
                k8s_beta = client.CoreV1Api()
                r = k8s_beta.create_namespace(body=dep)
                self.logger.debug("Namespace created. Details : {}".format(
                    str(r)))
            self.logger.info('exiting create_namespace_client')
            return True
        except:
            self.logger.error('Failed to create namespace. Exiting.')
            raise NamespaceCreationFailedException

    def patch_job_client(self, path, project_name):
        self.logger.info('entering patch_job_client')
        try:
            with open(path) as f:
                dep = yaml.safe_load(f)
                k8s_beta = client.BatchV1Api()
                # collecting response from API
                r = k8s_beta.patch_namespaced_job(
                    name=dep['metadata']['name'],
                    body=dep,
                    namespace=project_utils.modify_string_for_deployment(
                        project_name))
                self.logger.debug("Job patched. Details : {}".format(str(r)))
            self.logger.info('exiting patch_job_client')
            return True
        except ApiException as e:
            self.logger.error('Patching job failed. '
                              'Error info : {}.'.format(e))
            raise JobCreationFailedException

    def create_job_client(self, path, project_name):
        """
        method to create a job in kubernetes
        :param path: path of the yaml file
        :param project_name: project name of which the job is a part
        :return: status (True/Error code)
        """
        self.logger.info('Entering create_job_client')
        try:
            with open(path) as f:
                dep = yaml.safe_load(f)
                k8s_beta = client.BatchV1Api()
                # collecting response from API
                r = k8s_beta.create_namespaced_job(
                    namespace=project_utils.modify_string_for_deployment(
                        project_name),
                    body=dep)
                self.logger.debug("Job created. Details : {}".format(str(r)))
            self.logger.info('exiting create_job_client')
            return True
        except ApiException as e:
            if e.status == 409:  # in case of conflict, patch the job
                self.patch_job_client(path, project_name)
                return True
            self.logger.error('Creation of job failed. Exiting.')
            raise JobCreationFailedException

    def patch_cronjob_client(self, path, project_name):
        self.logger.info('entering patch_cronjob_client')
        try:
            with open(path) as f:
                dep = yaml.safe_load(f)
                k8s_beta = client.BatchV1beta1Api()
                # collecting response from API
                r = k8s_beta.patch_namespaced_cron_job(
                    name=dep['metadata']['name'],
                    body=dep,
                    namespace=project_utils.modify_string_for_deployment(
                        project_name))
                self.logger.debug("CronJob patched. Details : {}".format(
                    str(r)))
            self.logger.info('exiting patch_cronjob_client')
            return True
        except ApiException as e:
            self.logger.error('Patching cronjob failed. '
                              'Error info : {}.'.format(e))
            raise CronjobCreationFailedException

    def create_cronjob_client(self, path, project_name):
        """
                method to create a cronjob in kubernetes
                :param path: path of the yaml file
                :param project_name: project name of which the cronjob is a part
                :return: status (True/Error code)
                """
        self.logger.info('Entering create_cronjob_client')
        try:
            with open(path) as f:
                dep = yaml.safe_load(f)
                k8s_beta = client.BatchV1beta1Api()
                # collecting response from API
                r = k8s_beta.create_namespaced_cron_job(
                    namespace=project_utils.modify_string_for_deployment(
                        project_name),
                    body=dep)
                self.logger.debug("Cron Job created. Details : {}".format(
                    str(r)))
            self.logger.info('exiting create_cronjob_client')
            return True
        except ApiException as e:
            if e.status == 409:  # in case of conflict, patch the cronjob
                self.patch_cronjob_client(path, project_name)
                return True
            self.logger.error('Creation of cron job failed. Exiting.')
            raise CronjobCreationFailedException

    def get_service_ip(self, deployment: ServiceDeployment):
        """
        method to get the list of IP addresses for services of a component
        Args:
            deployment: Service Depoyment Object

        Returns: array of service IPs

        """
        self.logger.info('Entering get_service_ip method')
        service_name = '{}--{}'.format(
            project_utils.modify_string_for_deployment(
                deployment.project_name),
            project_utils.modify_string_for_deployment(
                deployment.component_name))
        k8s_beta = client.CoreV1Api()
        r = k8s_beta.read_namespaced_service(
            name=service_name,
            namespace=project_utils.modify_string_for_deployment(
                deployment.project_name))

        service_ips = []
        for port in r.spec.ports:
            service_ips.append('{}:{}'.format(deployment.master_node,
                                              port.node_port))
        self.logger.info('Exiting get_service_ip method')
        return service_ips

    def patch_persistence_volume(self, pv):
        """
        Helper function to patch persistence volume
        Args:
            pv: persistence volume yaml file
        :return: status of patching (True/Error Code)
        """
        self.logger.info('entering persistence')
        try:
            with open(pv) as f:
                dep = yaml.safe_load(f)
                k8s_beta = client.CoreV1Api()
                # collecting response from API
                r = k8s_beta.patch_persistent_volume(
                    name=dep['metadata']['name'], body=dep)
                self.logger.debug(
                    "Persistence volume patched. Details : {}".format(str(r)))
            self.logger.info('exiting patch_deploy_client')
            return True
        except ApiException as e:
            self.logger.error('Patching PV failed. '
                              'Error info : {}.'.format(e))
            raise DeploymentCreationFailedException

    def patch_persistence_volume_claim(self, pv, pvc, project_name):
        """
        Helper function to patch persistence volume claim
        Args:
            pv: persistence volume yaml file
            pvc: persistence volumet claim yaml fil
            project_name: project to be deployed (needed for namespace)
        :return: status of patching (True/Error Code)
        """
        self.logger.info('entering persistence')
        try:
            with open(pvc) as f:
                dep = yaml.safe_load(f)
                k8s_beta = client.CoreV1Api()
                # collecting response from API
                r = k8s_beta.patch_namespaced_persistent_volume_claim(
                    name=dep['metadata']['name'],
                    body=dep,
                    namespace=project_utils.modify_string_for_deployment(
                        project_name))
                self.logger.debug(
                    "Persistence volume patched. Details : {}".format(str(r)))
            self.logger.info('exiting patch_deploy_client')
            return True
        except ApiException as e:
            self.logger.error('Patching PVC failed. '
                              'Error info : {}.'.format(e))
            raise DeploymentCreationFailedException

    def create_persistence_if_required(self, deployment):
        """ Check if persistence is required, If yes then create one"""
        self.logger.debug("Checking for persistence")
        if not deployment.need_persistence():
            self.logger.debug("Persistence not needed.")
            return False

        self.logger.info("Persistence is needed")
        pv = self.persistent_volume_yaml_generator(deployment,
                                                   persstence_type="volume")
        pvc = self.persistent_volume_yaml_generator(
            deployment, persstence_type="volume-claim")
        try:
            with open(pv) as f:
                dep = yaml.safe_load(f)
                k8s_beta = client.CoreV1Api()
                # collecting response from API
                r = k8s_beta.create_persistent_volume(body=dep)
                self.logger.debug(
                    f"Persistence Volume created. Details : {str(r)}")
        except ApiException as e:
            if e.status == 409:  # in case of conflict, patch the deployment
                self.patch_persistence_volume(pv)
                return True
            self.logger.error('Creation of PV failed. Exiting.')
            raise DeploymentCreationFailedException

        try:
            with open(pvc) as f:
                dep = yaml.safe_load(f)
                k8s_beta = client.CoreV1Api()
                # collecting response from API
                r = k8s_beta.create_namespaced_persistent_volume_claim(
                    body=dep,
                    namespace=project_utils.modify_string_for_deployment(
                        deployment.project_name))
                self.logger.debug(
                    f"Persistence Volume Claim created. Details : {str(r)}")

            self.logger.info('exiting deploy_client')
            return True
        except ApiException as e:
            if e.status == 409:  # in case of conflict, patch the deployment
                self.patch_persistence_volume_claim(pv, pvc,
                                                    deployment.project_name)
                return True
            self.logger.error('Creation of PVC failed. Exiting.')
            raise DeploymentCreationFailedException

    def run_deployment_steps(self, deployment: ServiceDeployment):
        try:
            self.create_persistence_if_required(deployment)
            deployment_yaml = self.deployment_yaml_generator(deployment)
            self.deploy_client(deployment_yaml, deployment.project_name)
            self.logger.debug(f'Deployment created for '
                              f'{deployment.component_name}. '
                              f'Now creating service.')
            service_yaml = self.service_yaml_generator(
                deployment.project_name, deployment.component_name,
                deployment.ports)
            self.create_service_client(service_yaml, deployment.project_name)
            self.logger.debug(f'Service created for '
                              f'{deployment.component_name}')
            return True
        except XprExceptions:
            self.logger.error('Error while running deployment steps. '
                              'Deployment failed.')
            raise ProjectDeploymentFailedException

    def run_job_steps(self, deployment: JobDeployment):
        if deployment.is_base_job():
            try:
                self.create_persistence_if_required(deployment)
                job_yaml = self.job_yaml_generator(deployment)
                self.create_job_client(job_yaml, deployment.project_name)
                self.logger.debug(f'Job created for '
                                  f'{deployment.component_name}')
            except XprExceptions:
                self.logger.error('Error while running job steps. '
                                  'Job creation failed.')
                raise JobCreationFailedException
        elif deployment.is_cronjob():
            try:
                self.create_persistence_if_required(deployment)
                cronjob_yaml = self.cronjob_yaml_generator(deployment)
                self.create_cronjob_client(cronjob_yaml,
                                           deployment.project_name)
                self.logger.debug(f'Cronjob created for '
                                  f'{deployment.component_name}')
            except XprExceptions:
                self.logger.error('Error while running job steps. '
                                  'Cronjob creation failed.')
                raise CronjobCreationFailedException
Example #14
0
class Node(XprObject):
    """
    This class represents a User
    """
    package_root_path = "/opt/xpresso.ai"

    def __init__(self, node_json=None):
        self.logger = XprLogger()
        """
        Constructor:
        """
        self.logger.debug(f"Node constructor called with {node_json}")
        super().__init__(node_json)
        print(self.data)
        # These are mandatory fields that needs to be provided in user_json
        self.mandatory_fields = ["address", "name"]
        self.provision_fields = ["address", "nodetype"]
        # fields that should be displayed in the output
        self.display_fields = ["address", "name", "nodetype",
                               "provisionStatus", "activationStatus"]

        self.logger.debug("Node constructed successfully")

    def validate_node_address(self):
        """
        Check if node is accessible.

        Parameters:
            node_json [json]: json with the node info
        """
        # checks if the server address is valid
        # creates a ssh connection
        node = SSHUtils(self.data['address'])
        # checks if the SSHClient has connected to server
        if node.client is None:
            return False
        node.close()
        return True

    def provision_info_check(self):
        nodetypes = ['DEVVM', 'CLUSTER_MASTER', 'CLUSTER_WORKER']
        if 'nodetype' not in self.data or 'address' not in self.data:
            raise IncompleteProvisionInfoException(
                "Please provide both nodetype and address for provision_node"
            )
        elif self.data['nodetype'] not in nodetypes:
            print("invalid node type")
            raise InvalidProvisionInfoException("Invalid node type")
        elif not len(self.data['address']):
            print("Invalid address")
            raise InvalidProvisionInfoException("Invalid node address")

        if self.data['nodetype'] == 'CLUSTER_MASTER':
            if 'cluster' not in self.data:
                raise IncompleteProvisionInfoException(
                    "Cluster info is required for provision of master node"
                )
        elif self.data['nodetype'] == 'CLUSTER_WORKER':
            if 'masterip' not in self.data:
                raise IncompleteProvisionInfoException(
                    "ip address of master node is required"
                    " for provision of worker node"
                )
        elif self.data['nodetype'] == 'DEVVM':
            if 'flavor' not in self.data:
                raise InvalidProvisionInfoException(
                    "Invalid flavor provided for provision of dev environment"
                )
            elif (self.data['flavor'].lower() != 'python') and\
                    (self.data['flavor'].lower() != 'java'):
                raise InvalidProvisionInfoException(
                    "Unknown flavor provided."
                    "Only Python & Java are supported currently"
                )

    def provision_node_check(self, provision_json, persistence_manager):
        node_type = provision_json['nodetype']
        # if node_type != node_info['nodetype']:
        #     return error_codes.invalid_provision_information
        if 'provisionStatus' in self.data and self.data['provisionStatus']:
            raise NodeReProvisionException("Node is already Provisioned")

        if node_type == 'CLUSTER_WORKER':
            masterip = provision_json['masterip']
            master_node = persistence_manager.find(
                'nodes', {"address": masterip}
                )
            if len(master_node) == 0:
                raise InvalidMasterException("Master Node not found")
            elif master_node[0]['nodetype'] != 'CLUSTER_MASTER':
                raise InvalidMasterException(
                    "Provided master info is incorrect"
                    )
            elif not master_node[0]['provisionStatus']:
                raise MasterNotProvisionedException(
                    "Master node not yet provisioned"
                    )

    def provision_node_setup(self):
        """
        Updates the node to a master/worker on kubernetes
        """
        server = self.data['address']
        nodetype = self.data['nodetype']
        node_client = SSHUtils(server)

        if node_client.client is None:
            print("unable to login to server")
            return 0

        if nodetype == 'CLUSTER_MASTER':
            cmd = ("cd {} && git fetch && git reset --hard FETCH_HEAD && "
                   "python3 xpresso/ai/admin/infra/clients/xpr_pkg.py "
                   "--conf config/common.json "
                   "--type install "
                   "--package KubeadmDashboardPackage"
                   .format(self.package_root_path))
            print(cmd)
            std_response = node_client.exec(cmd)
            print(f"\n\n STDERR : \n{std_response['stderr']}\n\n\n")

            provision_status = 1

            if std_response['status'] != 0:
                provision_status = 0
            elif not len(std_response['stdout']) and\
                    len(std_response['stderr']):
                provision_status = 0

            node_client.close()
            return provision_status
        elif nodetype == 'CLUSTER_WORKER':
            masterip = {"master_ip": self.data['masterip']}

            cmd = (
                "cd {} && git fetch && git reset --hard FETCH_HEAD && "
                "python3 xpresso/ai/admin/infra/clients/xpr_pkg.py "
                "--conf config/common.json "
                "--type install --package KubeadmNodePackage --parameters '{}'"
                .format(self.package_root_path, json.dumps(masterip)))

            std_response = node_client.exec(cmd)
            print(f"\n\n STDERR : \n{std_response['stderr']}\n\n\n")
            provision_status = 1
            if std_response['status'] != 0:
                provision_status = 0
            elif not len(std_response['stdout']) and\
                    len(std_response['stderr']):
                provision_status = 0

            node_client.close()
            return provision_status
        else:
            print("entered devvm case")
            if 'flavor' in self.data:
                if self.data['flavor'].lower() == 'python':
                    flavor_package = 'DevelopmentPythonVMPackage'
                elif self.data['flavor'].lower() == 'java':
                    flavor_package = 'DevelopmentJavaVMPackage'
                else:
                    node_client.close()
                    return 0
            else:
                node_client.close()
                return 0

            print("flavor_package is ", flavor_package)

            cmd = ("cd {} && git fetch && git reset --hard FETCH_HEAD && "
                   "python3 xpresso/ai/admin/infra/clients/xpr_pkg.py  "
                   "--conf config/common.json "
                   "--type install "
                   "--package {}"
                   .format(self.package_root_path, flavor_package))

        print("cmd is ", cmd)
        std_response = node_client.exec(cmd)
        print(f"\n\n STDERR : \n{std_response['stderr']}\n\n\n")
        provision_status = 1
        if std_response['status'] != 0:
            provision_status = 0
        elif not len(std_response['stdout']) and len(std_response['stderr']):
            provision_status = 0

        node_client.close()
        return provision_status

    def deprovision_node(self):
        server = self.data['address']
        if "provisionStatus" in self.data and not self.data["provisionStatus"]:
            return 1
        node_client = SSHUtils(server)
        if node_client.client is None:
            print("Unable to login to server.")
            return 0
        node_type = self.data['nodetype']

        if node_type == 'DEVVM':
            flavor = self.data['flavor']
            if flavor.lower() == 'python':
                flavor_package = 'DevelopmentPythonVMPackage'
            elif flavor.lower() == 'java':
                flavor_package = 'DevelopmentJavaVMPackage'
            else:
                node_client.close()
                return 0
            command = (
                "cd {} &&  "
                "python3 xpresso/ai/admin/infra/clients/xpr_pkg.py  "
                "--conf config/common.json "
                "--type uninstall "
                "--package {}".format(self.package_root_path, flavor_package)
            )
        else:
            command = (
                "cd {} && python3 xpresso/ai/admin/infra/clients/xpr_pkg.py "
                "--conf config/common.json "
                "--type uninstall --package KubeadmNodePackage"
                .format(self.package_root_path)
            )

        std_response = node_client.exec(command)
        print(f"\n\n\n {std_response['stderr']} \n\n\n")
        if not len(std_response['stdout']) and len(std_response['stderr']):
            node_client.close()
            return 0

        node_client.close()
        return 1

    def assign_node_to_user(self, user):
        connection = SSHUtils(self.data["address"])
        command = "sudo -S adduser " + user
        password = "******"
        new_user_password = "******"
        full_name = "\n"
        room_number = "\n"
        work_phone = "\n"
        home_phone = "\n"
        other = "\n"
        info_correct = "Y"
        stdin, stdout, stderr = connection.exec_client(
            command=command, password=password,
            newuserpassword=new_user_password,
            newuserpassword_confirm=new_user_password,
            Fullname=full_name, RoomNumber=room_number,
            WorkPhone=work_phone, HomePhone=home_phone,
            Other=other, InfoCorrect=info_correct)
        connection.close()
        return stdout, stderr

    @staticmethod
    def update_cluster(provision_info, persistence_manager):
        xpr_cluster = XprClusters(persistence_manager)
        node_type = provision_info['nodetype']
        if node_type == 'CLUSTER_MASTER':
            cluster_info = {
                "name": provision_info["cluster"]
            }
            clusters = xpr_cluster.get_clusters(cluster_info)
            if len(clusters) == 0:
                raise ClusterNotFoundException("This Cluster not found")

            if 'master_nodes' not in clusters[0]:
                master_nodes = {"address": ""}
            elif 'address' not in clusters[0]['master_nodes']:
                master_nodes = {"address": ""}
            else:
                master_nodes = clusters[0]['master_nodes']

            if master_nodes['address'] == provision_info['address']:
                return
            else:
                master_nodes['address'] = provision_info['address']
            # # updates persistence if the master node is not present in clusters
            print("updating cluster info for master_node")
            persistence_manager.update("clusters", cluster_info,
                                            {"master_nodes": master_nodes}
                                            )
        elif node_type == 'CLUSTER_WORKER':
            worker_node = {
                "address": provision_info["address"]
            }
            master_ip = provision_info['masterip']
            master = persistence_manager.find("nodes", {"address": master_ip})
            cluster_name = master[0]["cluster"]
            cluster = xpr_cluster.get_clusters({"name": cluster_name})
            worker_nodes_list = [] if 'worker_nodes' not in cluster[0] else\
                cluster[0]["worker_nodes"]
            for node in worker_nodes_list:
                if node["address"] == worker_node["address"]:
                    return
            # updates persistence if the worker node is not present in clusters already
            worker_nodes_list.append(worker_node)
            persistence_manager.update("clusters", {"name": cluster_name},
                                       {"worker_nodes": worker_nodes_list}
                                      )
class Project(XprObject):
    """
    This class represents a User
    """
    project_config_path = "/opt/xpresso.ai/config/project_config.json"
    with open(project_config_path, "r", encoding="utf-8") as config_file:
        try:
            project_config = json.load(config_file)
        except (FileNotFoundError, json.JSONDecodeError):
            raise ProjectConfigException

    def __init__(self, project_json=None):
        self.logger = XprLogger()
        """
        Constructor:
        """
        self.logger.debug(f"Project constructor called with {project_json}")
        super().__init__(project_json)

        # List of all the fields project can contain
        self.complete_fields = self.project_config["project_key_list"]
        # These are mandatory fields that needs to be provided in user_json
        self.mandatory_fields = ["name", "description", "owner"]

        # primaryRole of a user has to one of these
        self.valid_values = {"primaryRole": ["Dev", "PM", "DH", "Admin", "Su"]}

        # fields that cannot be modified
        self.unmodifiable_fields = ["name"]

        # fields that should be displayed in the output
        self.display_fields = self.project_config["project_output_fields"]
        self.logger.debug("User constructed successfully")

    def check_fields(self) -> int:
        """
            checks all the input fields in project information json

            checks if the provided fields type and format is same and in
            accordance with project format.
            Checks if the mandatory fields are provided.
        """
        # checks if the type of the input fields is valid
        for (field, field_type) in self.project_config["project_field_types"].items():
            if field in self.data:
                if type(self.data[field]).__name__ != field_type:
                    raise ProjectFieldsException(
                        f"type of '{field}' in project info is invalid"
                    )

        # checks if the mandatory fields are provided and if they are not empty
        for (field, field_flag) in self.project_config["project_field_flags"].items():
            if field_flag:
                if field in self.data and len(self.data[field]):
                    continue
                else:
                    raise IncompleteProjectInfoException()
        return 200

    @staticmethod
    def check_duplicate_component(component_list):
        """
            checks if any component is specified/used more than once
        """
        component_names = []
        for component in component_list:
            if component["name"] not in component_names:
                component_names.append(component["name"])
            else:
                component_name = component["name"]
                raise DuplicateComponentException(
                    f"Component/Pipeline with name '{component_name}' "
                    f"already exists in this project"
                )

    def component_info_check(self, component_list):
        """
            checks if the components are specified correctly

            checks if the type, flavor and name for each of the component
            is specified correctly in input json
        """
        component_field_set = self.project_config["component_fields"]
        for component in component_list:
            # check if each of type, name & flavor are specified correctly
            for (key, val) in component.items():
                # checks if only allowed keys are present in the component spec
                if key not in component_field_set:
                    raise ComponentFieldException(
                        f"'{key}' field not specified in component information"
                    )
                elif type(val).__name__ != component_field_set[key]:
                    raise ComponentFieldException(
                        f"Invalid field type of '{key}'"
                    )
                elif not len(val):
                    raise ComponentFieldException(f"'{key}' field is empty")

            # checks if all the mandatory fields are provided in the components
            for field in component_field_set:
                if field not in component:
                    raise ComponentFieldException(
                        f"'{field}' is not in the allowed fields of components"
                    )

            temp_type = component["type"]
            temp_flavor = component["flavor"]
            if temp_type not in self.project_config["component_flavors"]:
                raise ComponentFieldException(
                    f"Component type '{temp_type}' is not valid"
                )
            elif temp_flavor not in\
                    self.project_config["component_flavors"][temp_type]:
                raise ComponentFieldException(
                    f"'{temp_flavor}' flavor unavailable for {temp_type}"
                )

    def check_owner(self, persistence_manager):
        if 'uid' not in self.data["owner"]:
            raise ProjectOwnerException("Owner uid needs to be provided")
        elif not len(self.data["owner"]["uid"]):
            raise ProjectOwnerException("Owner uid is empty")
        else:
            uid_json = {'uid': self.data["owner"]["uid"]}
        owner = persistence_manager.find("users", uid_json)
        if not len(owner):
            raise ProjectOwnerException(
                "User not found with provided owner uid"
            )

    def check_developers(self, persistence_manager, devs=None):
        if not devs:
            developers = self.data["developers"]
        else:
            developers = devs
        for dev_name in developers:
            if type(dev_name).__name__ != 'str':
                raise ProjectDeveloperException(
                    f"Developer name should be string"
                )
            if not len(dev_name):
                raise ProjectDeveloperException(
                    "Developer name should not be empty"
                )
            else:
                developer = persistence_manager.find(
                    "users", {"uid": dev_name}
                    )
                if not len(developer):
                    raise ProjectDeveloperException(
                        f"Developer {dev_name} not found"
                    )

    def check_pipelines(self, pipelines, persistence_manager,
                        project_components):
        """
        validates the pipeline format provided and checks for consistency,
        duplicity.
        Args:
            pipelines: input pipeline info
            persistence_manager: persistence manager
            project_components : list of names of project components

        Returns:

        """
        default_pipeline_keys = self.project_config['pipeline_fields'].keys()
        keys_without_version = list(self.project_config['pipeline_fields'].keys())
        keys_without_version.remove('deploy_version_id')
        declarative_pipeline_builder = DeclarativePipelineBuilder(
            persistence_manager)
        for pipeline in pipelines:
            if set(default_pipeline_keys) != set(pipeline.keys()) and \
                    set(keys_without_version) != set(pipeline.keys()):
                self.logger.error("default keys defined incorrectly")
                raise ProjectPipelinesException
            for component in pipeline['components']:
                if component not in project_components:
                    self.logger.error('pipeline component not found in '
                                      'project components')
                    raise ComponentsSpecifiedIncorrectlyException(
                        f'Pipeline component "{component}" not '
                        f'found in project components.')
            declarative_pipeline_builder.prevalidate_declarative_json(
                pipeline['declarative_json'])
        self.check_duplicate_component(pipelines)

    def project_info_check(self, persistence_manager):
        """
        checks if the information provided for project creation is valid

        checks all the mandatory fields and its format. checks the
        components, developers and owner information.
        """
        keys = self.data.keys()
        # checks if the input fields provided are valid and present
        # support case type mistakes in next release
        for key in keys:
            if key not in self.project_config["project_key_list"]:
                raise ProjectFormatException(
                    f"Invalid field '{key}' in project json"
                    )

        # checks if all fields are specified correctly
        self.check_fields()
        # checks if the components are provided with info and validates format
        if "components" in self.data:
            self.component_info_check(self.data["components"])
            self.check_duplicate_component(self.data["components"])

        self.check_owner(persistence_manager)
        if "pipelines" in self.data:
            project_component_names = \
                [component['name'] for component in self.data['components']]
            self.check_pipelines(self.data['pipelines'], persistence_manager,
                                 project_component_names)
        # checks the developers format is valid if specified
        if "developers" in self.data and len(self.data["developers"]):
            self.check_developers(persistence_manager)

    def complete_project_info(self):
        out_json = deepcopy(self.project_config["sample_json"])
        name = self.data["name"]
        for key, val in self.data.items():
            if key != "components":
                out_json[key] = val
        if "components" in self.data:
            for component in self.data["components"]:
                component_name = component["name"]
                out_component = deepcopy(self.project_config["sample_component"])
                for key, val in component.items():
                    if key in self.project_config["sample_component"]:
                        out_component[key] = val
                out_component["dockerPrefix"] = (
                    f"dockerregistry.xpresso.ai/xprops/{name}/{component_name}--"
                )
                out_json["components"].append(out_component)
        self.data = out_json
        if "pipelines" in self.data:
            for pipeline in self.data["pipelines"]:
                if "deploy_version_id" not in pipeline.keys():
                    pipeline['deploy_version_id'] = 1

    def modify_info_check(self, changes_json, persistence_manager):
        # if not self.data["activationStatus"]:
        #     if "activationStatus" not in changes_json:
        #         return error_codes.activate_project_first
        #     elif not changes_json["activationStatus"]:
        #         return error_codes.activate_project_first
        #     else:
        #         changes_json["activationStatus"] = True
        if "activationStatus" in changes_json and \
                not changes_json["activationStatus"]:
            raise ProjectNotFoundException("Project is currently not active.")

        keys = changes_json.keys()
        # support case type mistakes in next release
        for key in keys:
            if key not in self.project_config["project_key_list"]:
                raise ProjectFieldsException(
                    f"Invalid field '{key}' provided for modify_project"
                )

        for (field, field_type) in\
                self.project_config["project_field_types"].items():
            if field in changes_json:
                if type(changes_json[field]).__name__ != field_type:
                    raise ProjectFieldsException(
                        f"Invalid type of field '{field}'"
                    )
        if "owner" in changes_json and len(changes_json["owner"]):
            self.check_owner(changes_json["owner"])

        if "developers" in changes_json and len(changes_json["developers"]):
            self.check_developers(
                persistence_manager, devs=changes_json["developers"]
                )

        if "components" in changes_json and len(changes_json["components"]):
            old_components = self.data["components"]
            new_components = changes_json["components"]
            self.component_info_check(new_components)
            self.check_duplicate_component(old_components + new_components)

        if "pipelines" in changes_json and len(changes_json["pipelines"]):
            new_pipeline = changes_json['pipelines']
            project_components = \
                [component['name'] for component in self.data['components']]
            if "components" in changes_json.keys():
                for component in changes_json['components']:
                    project_components.append(component['name'])
            self.check_pipelines(new_pipeline,
                                 persistence_manager, project_components)

    def modify_project_info(self, changes_json):
        name = self.data["name"]
        out_json = deepcopy(changes_json)
        out_json["components"] = self.data["components"]
        if "components" in changes_json:
            for component in changes_json["components"]:
                component_name = component["name"]
                out_component = deepcopy(self.project_config["sample_component"])
                for key, val in component.items():
                    if key in self.project_config["sample_component"]:
                        out_component[key] = val
                out_component["dockerPrefix"] = (
                    f"dockerregistry.xpresso.ai/xprops/{name}/{component_name}--"
                )
                out_json["components"].append(out_component)
        if 'pipelines' in changes_json:
            for pipeline in changes_json['pipelines']:
                if "deploy_version_id" not in pipeline.keys():
                    pipeline['deploy_version_id'] = 1
            if 'pipelines' not in self.data.keys():
                out_json['pipelines'] = changes_json['pipelines']
            else:
                out_json['pipelines'] = changes_json['pipelines'] + \
                                        self.data['pipelines']

        return out_json
Example #16
0
class bitbucketapi():
    """
        bitbucketapi class defines methods to work with bitbucket repos

        This class is used in creating and updating repos on bitbucket.
        bitbucket standard RestAPI 2.0 has been used for creating project
        and repository. Standard git commands through subprocess are used
        in cloning & updating the repository

        ....

        Methods
        -------
            exec_command()
                executes a linux command through subprocess

            create_bitbucket_project()
                creates a project on bitbucket using RESTAPI

            create_bitbucket_repo()
                creates a repo on bitbucket using RESTAPI

            clone_bitbucket_repo()
                clones a repo on bitbucket using git command
                through subprocess

            push_bitbucket_repo()
                pushes updated code to bitbucket using git
                command through subprocess
    """
    config_path = XprConfigParser.DEFAULT_CONFIG_PATH
    logger = XprLogger()

    def __init__(self):
        self.logger = XprLogger()
        self.config = XprConfigParser(self.config_path)
        self.defaulturl = self.config['bitbucket']['restapi']
        self.teamname = self.config['bitbucket']['teamname']
        self.username = self.config['bitbucket']['username']
        self.password = self.config['bitbucket']['password']

        # Following project format provided for bibucket RESTAPI
        self.defaultprojectbody = {
            "name": "",
            "description": "",
            "key": "",
            "is_private": False
        }
        # Following repo format provided for bibucket RESTAPI
        self.defaultrepobody = {"scm": "git", "project": {"key": ""}}

    def exec_command(self, command: list, inputflag: bool, inputcmd: str,
                     wd: str) -> bool:
        """
            exec_command executes a input command through subprocess

            Takes command and current working directory
            to execute the command there. Also takes the argument
            for prompt input in some cases

            ....

            Parameters
            ----------
                command -> input command to be executed
                inputflg -> flag to specify if any input prompt is present
                inputcmd -> input prompt in case required
                wd -> working directory where the command needs to be
                        executed

            Returns
            -------
                returns a True or False boolean based on execution status
        """
        # executes the command at specified working directory path
        self.logger.info(f"Execute Command :{command} @ {wd}")
        exec = subprocess.Popen(command,
                                cwd=wd,
                                stdin=subprocess.PIPE,
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE)

        # Incase input prompt is expected and provided
        if inputflag and inputcmd:
            # subprocess Popen returns a stdin filestream
            # input string has to be convereted to bytes before
            # writing to the stream
            inputbytes = str.encode(inputcmd + '\n')
            # Providing input for ex password to the stdin stream
            exec.stdin.write(inputbytes)
            exec.stdin.flush()

        # returncode will be None until the process is complete
        # provide timeout case to prevent forever loop
        while exec.returncode is None:
            print("waiting")
            self.logger.info("Waiting for the command execution to end")
            exec.wait()

        # once process is complete returncode will be 0 if execution succeeds
        if exec.returncode != 0:
            stderror = exec.stderr.readlines()
            print(stderror)
            self.logger.error(f"\n Error in command execution: \n {stderror}")
            return False
        else:
            self.logger.info("Command successfully executed")
            return True

    def create_bitbucket_project(self, projectjson: dict) -> dict:
        """
            creates a project on bitbucket

            creates a project on bitbucket through RESTAPI.
            using bitbucket api v2.0. Project info is provided
            as body in post request

            ....
            Parameters
            ----------
                projectjson
                    information on project to be created

            Returns
            -------
                returns the status code of post request

        """
        body = deepcopy(self.defaultprojectbody)
        body['name'] = projectjson['name']
        body['description'] = projectjson['projectDescription']
        body['key'] = projectjson['name'] + '_xpr'
        # Team name should be provided.
        create_project_url = self.defaulturl + teams + self.teamname + projects
        self.logger.debug(f"New project creation url: {create_project_url}")
        self.logger.debug(f"project body : {body}")
        projectcreation = requests.post(create_project_url,
                                        json=body,
                                        auth=(self.username, self.password))
        print("projectcreation is ", projectcreation.text)
        self.logger.info(f"projectcreation response is {projectcreation.text}")
        return projectcreation.json()

    def create_bitbucket_repo(self, projectjson: dict) -> dict:
        """
            creates a repo on bitbucket

            creates a repository on bitbucket through RESTAPI.
            using bitbucket api v2.0. Project info is provided
            as body in post request

            ....
            Parameters
            ----------
                projectjson
                    same project information is used in creating
                    the repository

            Returns
            -------
                returns response json of repocreation. The json
                contain links & info to the repository.
        """
        body = deepcopy(self.defaultrepobody)
        reponame = projectjson['name'] + '_sc'
        body['project']['key'] = projectjson['name'] + '_xpr'
        create_repo_url = self.defaulturl + repos + self.teamname + reponame
        self.logger.debug(f"New repo creation url: {create_repo_url}")
        self.logger.debug(f"repo body : {body}")
        repocreation = requests.post(create_repo_url,
                                     json=body,
                                     auth=(self.username, self.password))
        print("\n repocreation is : ", repocreation.text)
        self.logger.info(f"repocreation response is {repocreation.text}")
        return repocreation.json()

    def clone_bitbucket_repo(self, clone_link: str,
                             local_clone_path: str) -> int:
        """
            Clones a repo from bitbucket to local system.

            clones a repository to the specified path in the
            input argument. uses git commands through subprocess
            to clone the repo.

            ....
            Parameters
            ----------
                clone_link
                    bitbucket link to the repository

                local_clone_path
                    path on local server where the repo
                    needs to be cloned
            Returns
            -------
                returns the status code of cloning the repo

        """
        clone_command = ['git', 'clone', clone_link, local_clone_path]
        self.logger.info(f"Cloning {clone_link} to {local_clone_path}")
        # exec_command internally calls subprocess to clone the repo
        clone_repo_status = self.exec_command(clone_command, False, None, None)
        if not clone_repo_status:
            self.logger.info("Cloning failed")
            raise BitbucketCloneException("Cloning failed.")

    def push_repo_to_bitbucket(self, remotepath: str) -> bool:
        """
            pushes the repository to bitbucket

            After updating the code, the repository is pushed
            to bitbucket using git commands

            ....
            Parameters
            ----------
                remotepath
                    path of the repository on the local server

            Returns
            -------
                returns the status push request

        """
        # reduce add and commit to single call
        gitaddcmd = ["git", "add", "-A"]
        gitcommitcmd = ["git", "commit", "-m", "Initial commit"]
        gitpushcmd = ["git", "push", "-u", "origin", "master"]
        for gitcmd in [gitaddcmd, gitcommitcmd, gitpushcmd]:
            gitstatus = self.exec_command(gitcmd, False, "", wd=remotepath)
            if not gitstatus:
                return False
            self.logger.info(f"{' '.join(gitcmd)} : Done")
        return True

    def delete_bitbucket_repo(self, repo):
        pass
Example #17
0
class LocalShellExecutor(CommandExecutor):
    """ It is used to run shell commands locally on a linux environment"""

    DEFAULT_EXECUTOR = "/bin/bash"

    def __init__(self):
        super().__init__()
        self.logger = XprLogger()

    def execute_with_output(self, command: str, executor=DEFAULT_EXECUTOR):
        """ It runs linux shell command on local server and returns the
        output

        Args;
            command(str): command for execution

        Returns:
            tuple: (response code: int, stdout: str, stderr: str)
        """
        self.logger.debug("Running command {}".format(command))
        try:
            status = subprocess.run(command, capture_output=True, shell=True,
                                    executable=executor)
            self.logger.debug("Command successful")
        except (subprocess.CalledProcessError, FileNotFoundError) as e:
            self.logger.warning("Command failed")
            raise CommandExecutionFailedException(str(e))
        return status.returncode, status.stdout, status.stderr

    def execute(self, command: str, executor=DEFAULT_EXECUTOR):
        """ It runs linux shell command on local server

        Args;
            command(str): command for execution
        Returns:
            int: response code
        """
        self.logger.debug("Running command {}".format(command))
        try:
            status = subprocess.run(command, shell=True, executable=executor)
            self.logger.debug("Command successful")
        except (subprocess.CalledProcessError, FileNotFoundError) as e:
            self.logger.warning("Command failed")
            raise CommandExecutionFailedException(str(e))
        return status.returncode

    def execute_same_shell(self, command: str, executor=DEFAULT_EXECUTOR):
        """ It runs linux shell command on local server and returns the
        output

        Args;
            command(str): command for execution

        Returns:
            tuple: (response code: int, stdout: str, stderr: str)
        """
        self.logger.debug("Running command {}".format(command))
        try:
            status = subprocess.run(command, capture_output=True, shell=False,
                                    executable=executor)
            self.logger.debug("Command successful")
        except (subprocess.CalledProcessError, FileNotFoundError) as e:
            self.logger.warning("Command failed")
            raise CommandExecutionFailedException(str(e))
        return status.returncode, status.stdout, status.stderr
class xpruser():
    config_path = XprConfigParser.DEFAULT_CONFIG_PATH

    CONTROLLER_SECTION = 'controller'
    TOKEN_EXPIRY = 'soft_expiry'
    LOGIN_EXPIRY = 'hard_expiry'
    MONGO_SECTION = 'mongodb'
    URL = 'mongo_url'
    DB = 'database'
    UID = 'mongo_uid'
    PWD = 'mongo_pwd'
    W = 'w'

    def __init__(self):
        self.config = XprConfigParser(self.config_path)
        self.db_utils = Utils(url=self.config[self.MONGO_SECTION][self.URL],
                              db=self.config[self.MONGO_SECTION][self.DB],
                              uid=self.config[self.MONGO_SECTION][self.UID],
                              pwd=self.config[self.MONGO_SECTION][self.PWD],
                              w=self.config[self.MONGO_SECTION][self.W])
        self.logger = XprLogger()

    def registeruser(self, user_json):
        """
        register a new user in the db

        checks if the user already exists and then adds to db

        Parameters:
            userjson [json]: json with node information

        Return:
            Success -> 'OK' [str] : returns 'OK' as response
            Failure -> [str] : returns appropriate failure response
        """
        self.logger.debug(f"user info provided is {user_json}")
        info_check = userinfocheck(user_json)
        # user info_check checks if the user_json has sufficient info
        if info_check == -1:
            errcode = error_codes.incomplete_user_information
            self.logger.error("Insufficient information to create a new user")
            return xprresponse('failure', errcode, {})
        elif info_check == 0:
            errcode = error_codes.incorrect_primaryRole
            self.logger.error("Incorrect primaryRole has been provided")
            return xprresponse('failure', errcode, {})

        # Password should not be saved as plain text in db.
        # Encrypting the password before saving it to db
        password = sha512_crypt.encrypt(user_json['pwd'])
        user_json['pwd'] = password
        # checks if the user is already present in the db
        self.logger.info("Registering a new user")
        uid_json = {'uid': user_json['uid']}
        self.logger.info("Checking the db if user is already present")
        user = self.db_utils.find("users", uid_json)
        if len(user) != 0:
            errcode = error_codes.user_exists
            return xprresponse('failure', errcode, {})

        user_json['loginStatus'] = False
        user_json['activationStatus'] = True
        user_json['nodes'] = []
        self.logger.info("adding user to the db")
        add_user = self.db_utils.insert("users", user_json, False)
        if add_user == -1:
            errcode = error_codes.username_already_exists
            self.logger.error("username already exists in the db")
            return xprresponse('failure', errcode, {})

        self.logger.info("user successfully added to the db")

        # NFS User directory changes
        nfs_manager = NFSUserManager(config=self.config)
        nfs_manager.setup_user_folder(user=user_json['uid'])
        return xprresponse('success', '', {})

    def modifyuser(self, token, changesjson):
        """
            modify_user updates the user info in the db

            checks if user is available and then updates
            the info as per changesjson

            Parameters:
                uid [str]: uid of the user
                changesjson [json] : json with user changes info

            Return:
                Success -> 'OK' [str] : returns OK if provision_node succeeds
                Failure -> [str] : returns appropriate failure response
        """
        check = modify_user_check(changesjson)
        if check != 200:
            return xprresponse('failure', check, {})

        uidjson = {"uid": changesjson['uid']}
        self.logger.info(f"Modifying user information of {uidjson}")
        self.logger.debug(f"Info provided to be modified is {changesjson}")
        # checks if the user is present in db
        self.logger.info("Checking if the user is present in the db")
        user = self.db_utils.find("users", uidjson)
        if len(user) == 0:
            errcode = error_codes.user_not_found
            self.logger.error(f"user {uidjson['uid']} not found in the db")
            return xprresponse('failure', errcode, {})

        self.logger.info("updating the user information")
        updateuser = self.db_utils.update("users", uidjson, changesjson)
        return xprresponse('success', '', {})

    def deactivateuser(self, uid):
        """
            Deletes an user and his info from the db

            Deletes the user from database

            Parameters:
                uid [str] : uid of the user

            Return:
                returns appropriate output
        """
        uidjson = {"uid": uid}
        # deletes the user from db

        deluser = self.db_utils.find("users", uidjson)
        if len(deluser) != 0:
            self.logger.info(f"deactivating the user {uidjson['uid']}")
            if 'activationStatus' in deluser[0] and \
                deluser[0]['activationStatus']:
                self.db_utils.update("users", uidjson,
                                     {"activationStatus": False})
                self.logger.info(f"user {uidjson['uid']} successfully deleted")
                return xprresponse('success', '', {})
            else:
                errcode = error_codes.user_already_deactivated
                return xprresponse('failure', errcode, {})
        else:
            errcode = error_codes.user_not_found
            self.logger.info("user not found")
            return xprresponse('failure', errcode, {})

    def getusers(self, filterjson):
        """
            Calls the db with input filters to fetch the list of users.
            After fetching, the users list is filtered before sending
            as output in order to send relevant information only

            Parameters:
                filterjson [json] : json with filter key & value pairs

            Return:
                Success -> [list] : returns list of users
                Failure -> [str] : returns db failure response
        """
        self.logger.info("getting all the users in the db")
        self.logger.debug(f"filterjson is : {filterjson}")
        users = self.db_utils.find("users", filterjson)
        # get users call retrieves whole user info from db
        # Filtering the data that needs to be shown as output
        self.logger.info("filtering the users before sending output")
        users = filteruseroutput(users)
        self.logger.debug(f'Output of users sent: {users}')
        return xprresponse('success', '', users)
Example #19
0
class ControllerClient:
    CONTROLLER_SECTION = 'controller'
    SERVER_URL = 'server_url'
    CLIENT_PATH = 'client_path'
    JENKINS_SECTION = 'jenkins'
    JENKINS_HOST = 'master_host'
    relogin_response = {
        "outcome": "failure",
        "error_code": "106",
        "results": {}
    }

    API_JSON_OUTCOME = "outcome"
    API_JSON_RESULTS = "results"
    API_JSON_ERROR_CODE = "error_code"
    API_JSON_SUCCESS = "success"
    API_JSON_FAILURE = "failure"

    def __init__(self, config_path=XprConfigParser.DEFAULT_CONFIG_PATH):
        self.logger = XprLogger()
        self.config = XprConfigParser(config_path)
        self.path = os.path.join(
            os.path.expanduser('~'),
            self.config[self.CONTROLLER_SECTION][self.CLIENT_PATH])
        self.token_file = '{}.current'.format(self.path)
        self.server_path = self.config[self.CONTROLLER_SECTION][
            self.SERVER_URL]

    def sso_login(self):
        """ It performs Single Sign-On authentication for the client.
        It follows following steps
        1. Check if token exists
        2. If exists: Send to the server for validation
            2.1 If token is validated then login is successful
            2.2 If token is not validated, assume token does not exist and go
            to point 3
        3. If no token exists:
            3.1 Print the SSO authentication url for user to login
            3.2 Send request to server every few seconds to check if user
            signed in successful. Wait for 60 seconds. Throw error if not
            logged in
            3.3 When user logged in, fetch the token and save

        """
        self.logger.info('CLIENT : Entering SSO Login Method')

        # Check if token exists:
        try:
            token = self.get_token()
        except ControllerClientResponseException:
            self.logger.info("No Token found")
            token = None

        # Since no token exist, ask for new login
        if token:
            url = f"{self.server_path}/sso/token_login"
            self.logger.debug('CLIENT : Making post request to server')
            data = {"token": token}
            try:
                response = self.send_http_request(url=url,
                                                  header=data,
                                                  http_method=HTTPMethod.POST,
                                                  data=data)
                return response
            except ControllerClientResponseException as e:
                self.logger.info("Assuming logging request failed")
                self.logger.info(e.message)

        url = f"{self.server_path}/sso/get_authentication_url"
        self.logger.debug('CLIENT : Making post request to server')
        response = self.send_http_request(url=url, http_method=HTTPMethod.GET)
        return response

    def sso_validate(self, validation_token):
        """
        Check whether SSO authentication is completed and successful
        Args:
            validation_token: sso validation token which is used to check if
                              a user has logged in or not.
        Returns:
        """
        # We keep requesting the sso server to test for
        interval_second = 2
        wait_second = 60
        start_time = time.time()
        while time.time() - start_time < wait_second:
            self.logger.debug('CLIENT : Making post request to server')
            url = f"{self.server_path}/sso/validate"
            data = {"validation_token": validation_token}
            try:

                response = self.send_http_request(url=url,
                                                  http_method=HTTPMethod.POST,
                                                  data=data)
                self.logger.info("Token validated")
                self.save_token(response["token"])
                return {"message": "SSO Login Successfull"}
            except ControllerClientResponseException:
                time.sleep(interval_second)
        self.logger.info('CLIENT : Existing SSO Login Method')
        raise ControllerClientResponseException("Session over without login",
                                                error_codes.server_error)

    def login(self, username, password):
        """Sends request to Controller server and
        get the status on login request"""
        self.logger.info('CLIENT : entering login method')
        if not os.path.isdir(self.path):
            os.makedirs(self.path, 0o755)
        if os.path.isfile(self.token_file):
            os.remove(self.token_file)

        if not username:
            self.logger.error('CLIENT : Empty username passed. Exiting.')
            raise ControllerClientResponseException("Username can't be empty",
                                                    error_codes.empty_uid)
        if not password:
            self.logger.error('CLIENT : Empty password passed. Exiting.')
            raise ControllerClientResponseException("Password can't be empty",
                                                    error_codes.empty_uid)

        url = f"{self.server_path}/auth"
        credentials = {"uid": username, "pwd": password}
        self.logger.debug('CLIENT : Making post request to server')
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.POST,
                                          data=credentials)

        self.save_token(token=response['access_token'])
        if 'relogin' in response and response['relogin']:
            self.logger.debug('CLIENT : already logged in. Saving new token.')
            return {"message": f"You are already logged in"}
        elif 'relogin' in response and not response['relogin']:
            self.logger.info(
                'CLIENT : Login successful. Writing token to file.')
            return {"message": f"Welcome, {username}!"}
        return response

    def save_token(self, token):
        """Token is saved in the local file system for """
        file = open(self.token_file, 'w+')
        file.write(token)
        file.close()
        self.logger.info('CLIENT : Token written to file. Exiting.')

    def get_token(self):
        """Token is saved in the local file system for """
        token = None
        try:
            with open(self.token_file, "r") as f:
                token = f.read()
        except FileNotFoundError:
            self.logger.error("No Token Found. Need to Relogin")
            raise ControllerClientResponseException(
                "No Session found. Login again", error_codes.expired_token)
        return token

    def logout(self):
        self.logger.info('CLIENT : entering logout method')
        url = f'{self.server_path}/auth'
        token = self.get_token()
        headers = {'token': token}
        self.logger.debug('CLIENT : Making delete request to server')
        self.send_http_request(url=url,
                               http_method=HTTPMethod.DELETE,
                               header=headers)
        os.remove(self.token_file)
        self.logger.info('CLIENT : Logout successful. Exiting.')
        return {"message": "Successfully logged out"}

    def get_clusters(self, argument):
        self.logger.info(f'CLIENT : entering get_clusters method '
                         f'with arguments {argument}')
        url = f'{self.server_path}/clusters'
        headers = {"token": self.get_token()}
        self.logger.debug('CLIENT : Making get request to server')
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.GET,
                                          header=headers,
                                          data=argument)
        self.logger.info('CLIENT : Get request successful. Exiting.')
        return response

    def deactivate_cluster(self, argument):

        self.logger.info('CLIENT : Entering deactivate_cluster method')
        if not argument:
            self.logger.error('CLIENT : No input arguments provided. Exiting.')
            raise ControllerClientResponseException(
                f"Please provide some input arguments ===",
                error_codes.incomplete_cluster_info)
        url = f'{self.server_path}/clusters'
        headers = {"token": self.get_token()}
        self.send_http_request(url=url,
                               http_method=HTTPMethod.DELETE,
                               header=headers,
                               data=argument)
        self.logger.info('CLIENT : Deactivation successful. Exiting.')
        return {"message": "Cluster deactivated."}

    def register_cluster(self, argument):
        self.logger.info('CLIENT : Entering register_cluster '
                         'with arguments {}'.format(argument))
        if not argument:
            self.logger.error('CLIENT : No input arguments provided. Exiting.')
            raise ControllerClientResponseException(
                f"Please provide some input arguments ===",
                error_codes.incomplete_cluster_info)
        url = f'{self.server_path}/clusters'
        headers = {"token": self.get_token()}
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.POST,
                                          header=headers,
                                          data=argument)
        self.logger.info('CLIENT : Cluster registration successful.Exiting.')
        return {
            "message":
            f"Cluster successfully registered with "
            f"ID {response} ###"
        }

    def register_user(self, user_json):
        url = f"{self.server_path}/users"
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.POST,
                                          header={"token": self.get_token()},
                                          data=user_json)
        return response

    def get_users(self, filter_json):
        url = f"{self.server_path}/users"
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.GET,
                                          header={"token": self.get_token()},
                                          data=filter_json)
        return response

    def modify_user(self, changes_json):
        url = f"{self.server_path}/users"
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.PUT,
                                          header={"token": self.get_token()},
                                          data=changes_json)
        return response

    def update_password(self, password_json):
        url = f"{self.server_path}/user/pwd"
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.PUT,
                                          header={"token": self.get_token()},
                                          data=password_json)
        return response

    def deactivate_user(self, uid_json):
        url = f"{self.server_path}/users"
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.DELETE,
                                          header={"token": self.get_token()},
                                          data=uid_json)
        return response

    def register_node(self, node_json):
        url = f"{self.server_path}/nodes"
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.POST,
                                          header={"token": self.get_token()},
                                          data=node_json)
        print(response)
        return response

    def get_nodes(self, filter_json):
        url = f"{self.server_path}/nodes"
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.GET,
                                          header={"token": self.get_token()},
                                          data=filter_json)
        return response

    def provision_node(self, changes_json):
        url = f"{self.server_path}/nodes"
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.PUT,
                                          header={"token": self.get_token()},
                                          data=changes_json)
        return response

    def deactivate_node(self, node_json):
        url = f"{self.server_path}/nodes"
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.DELETE,
                                          header={"token": self.get_token()},
                                          data=node_json)
        return response

    def assign_node(self, assign_json):
        url = f"{self.server_path}/assign_node"
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.PUT,
                                          header={"token": self.get_token()},
                                          data=assign_json)
        return response

    def check_for_declarative_json(self, project_json):
        """
        Checks if the provided declarative json exists and replaces that field
        with the contents of declarative json.
        Args:
            project_json: input file from user

        Returns: modified project_json

        """
        for pipeline in project_json['pipelines']:
            if not os.path.isfile(pipeline['declarative_json']):
                self.logger.error("declarative json not found")
                raise FileNotFoundException('Declarative JSON not found.')
            with open(pipeline['declarative_json'], 'r') as f:
                declarative_json_data = json.load(f)
                pipeline['declarative_json'] = declarative_json_data
        return project_json

    def create_project(self, project_json):
        if 'pipelines' in project_json:
            project_json = self.check_for_declarative_json(project_json)
        url = f"{self.server_path}/projects/manage"
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.POST,
                                          header={"token": self.get_token()},
                                          data=project_json)
        return response

    def get_project(self, filter_json):
        url = f"{self.server_path}/projects/manage"
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.GET,
                                          header={"token": self.get_token()},
                                          data=filter_json)
        return response

    def modify_project(self, changes_json):
        if 'pipelines' in changes_json:
            changes_json = self.check_for_declarative_json(changes_json)
        url = f"{self.server_path}/projects/manage"
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.PUT,
                                          header={"token": self.get_token()},
                                          data=changes_json)
        return response

    def deactivate_project(self, project_json):
        url = f"{self.server_path}/projects/manage"
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.DELETE,
                                          header={"token": self.get_token()},
                                          data=project_json)
        print("response is ", response)
        return response

    def build_project(self, argument):
        self.logger.info(f'CLIENT : Entering build_project '
                         f'with arguments {argument}')
        if not argument:
            self.logger.error('CLIENT : No input arguments provided. Exiting.')
            raise ControllerClientResponseException(
                f"Please provide some input arguments ===",
                error_codes.incomplete_cluster_info)
        url = f'{self.server_path}/projects/build'
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.POST,
                                          header={"token": self.get_token()},
                                          data=argument)
        self.logger.info('CLIENT : Project build successful.Exiting.')
        return {
            "message":
            "Project build successful!",
            "Build IDS":
            response,
            "Jenkins Pipeline":
            f"{self.config[self.JENKINS_SECTION][self.JENKINS_HOST]}"
            f"/blue/pipelines"
        }

    def get_build_version(self, argument):
        self.logger.info(f'CLIENT : entering get_build_version method '
                         f'with arguments {argument}')
        url = f'{self.server_path}/projects/build'
        self.logger.debug('CLIENT : Making get request to server')
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.GET,
                                          header={"token": self.get_token()},
                                          data=argument)
        return response

    def deploy_project(self, argument):
        self.logger.info(f'CLIENT : Entering deploy_project '
                         f'with arguments {argument}')
        if not argument:
            self.logger.error('CLIENT : No input arguments provided. Exiting.')

            raise ControllerClientResponseException(
                f"Please provide some input arguments ===",
                error_codes.incomplete_cluster_info)
        url = f'{self.server_path}/projects/deploy'
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.POST,
                                          header={"token": self.get_token()},
                                          data=argument)
        self.logger.info('CLIENT : Project deployed successfully.Exiting.')
        return {
            "message": "Project deployed successfully on the below IPs!",
            "Output": response
        }

    def undeploy_project(self, argument):
        self.logger.info(f'CLIENT : Entering undeploy_project '
                         f'with arguments {argument}')
        if not argument:
            self.logger.error('CLIENT : No input arguments provided. Exiting.')
            raise ControllerClientResponseException(
                f"Please provide some input arguments ===",
                error_codes.incomplete_cluster_info)

        url = f'{self.server_path}/projects/deploy'
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.DELETE,
                                          header={"token": self.get_token()},
                                          data=argument)
        self.logger.info('CLIENT : Project undeployed successfully.Exiting.')
        return {"message": "Project undeployed successfully!"}

    def update_xpresso(self):
        """
        Update xpresso project to the latest commit
        """
        # Send request to update server
        server_update_is_success = False
        url = f'{self.server_path}/update_xpresso'
        try:
            self.send_http_request(url, HTTPMethod.POST)
            server_update_is_success = True
        except ControllerClientResponseException as e:
            self.logger.error(e)

        # Update local
        package_manager = PackageManager()
        package_manager.run(package_to_install="UpdateLocalXpressoPackage",
                            execution_type=ExecutionType.INSTALL)
        response = {"client": "Updated"}
        if server_update_is_success:
            response["server"] = "Updated"
        else:
            response["server"] = "Update Failed"
        return response

    def fetch_version(self):
        """
        Fetches server version and client version, convert to a dict and
        returns.
        """
        url = f'{self.server_path}/version'
        json_response = self.send_http_request(url, HTTPMethod.GET)
        server_version = "None"
        if "version" in json_response:
            server_version = json_response["version"]
        client_version = get_version()
        return {
            "client_version": client_version,
            "server_version": server_version
        }

    def send_http_request(self,
                          url: str,
                          http_method: HTTPMethod,
                          data=None,
                          header: dict = None):
        request = HTTPRequest(method=http_method,
                              url=url,
                              headers=header,
                              data=data)
        handler = HTTPHandler()
        try:
            response = handler.send_request(request)
            json_response = response.get_data_as_json()
            if not json_response:
                raise ControllerClientResponseException(
                    "Request Failed", error_codes.server_error)
            elif (json_response[self.API_JSON_OUTCOME] == self.API_JSON_SUCCESS
                  and self.API_JSON_RESULTS in json_response):
                return json_response[self.API_JSON_RESULTS]
            elif (json_response[self.API_JSON_OUTCOME] == self.API_JSON_SUCCESS
                  and self.API_JSON_RESULTS not in json_response):
                return {}
            elif (self.API_JSON_RESULTS in json_response
                  and self.API_JSON_ERROR_CODE in json_response):
                raise ControllerClientResponseException(
                    json_response[self.API_JSON_RESULTS],
                    json_response[self.API_JSON_ERROR_CODE])
            elif self.API_JSON_ERROR_CODE in json_response:
                raise ControllerClientResponseException(
                    "Request Failed", json_response[self.API_JSON_ERROR_CODE])
            raise ControllerClientResponseException("Request Failed", -1)
        except (HTTPRequestFailedException, HTTPInvalidRequestException) as e:
            self.logger.error(str(e))
            raise ControllerClientResponseException("Server is not accessible",
                                                    error_codes.server_error)
        except JSONDecodeError as e:
            self.logger.error(str(e))
            raise ControllerClientResponseException(
                "Invalid response from server", error_codes.server_error)

    def create_repo(self, repo_json):
        """
        creates a repo on pachyderm cluster

        :param repo_json:
            information of repo i.e. name and description
        :return:
            returns operation status
        """
        url = f"{self.server_path}/repo"
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.POST,
                                          header={"token": self.get_token()},
                                          data=repo_json)
        return response

    # def get_repo(self):
    #     """
    #
    #     :return:
    #     """
    #     url = f"{self.server_path}/repo"
    #     response = self.send_http_request(url=url,
    #                                       http_method=HTTPMethod.GET,
    #                                       header={"token": self.get_token()},
    #                                       data={})
    #     return response

    def create_branch(self, branch_json):
        """
        creates a branch in a repo

        :param branch_json:
            information of branch i.e. repo and branch names
        :return:
            operation status
        """
        url = f"{self.server_path}/repo"
        response = self.send_http_request(url=url,
                                          http_method=HTTPMethod.PUT,
                                          header={"token": self.get_token()},
                                          data=branch_json)
        return response

    def push_dataset(self, dataset_json):
        """
        pushes a dataset into pachyderm cluster

        :param dataset_json:
            information of dataset
        :return:
            operation status
        """
        url = f"{self.server_path}/dataset/manage"
        self.send_http_request(url=url,
                               http_method=HTTPMethod.PUT,
                               header={"token": self.get_token()},
                               data=dataset_json)
        manager = repo_manager.PachydermRepoManager()
        try:
            commit_id = manager.push_files(dataset_json)
            return {
                "message": f"Dataset push successful. commit id: {commit_id}"
            }
        except XprExceptions as err:
            return err.message

    def pull_dataset(self, dataset_json):
        """
        pulls a dataset from pachyderm cluster

        :param dataset_json:
            info of the dataset on pachyderm cluster
        :return:
            path of the dataset on user system
        """
        url = f"{self.server_path}/dataset/manage"
        self.send_http_request(url=url,
                               http_method=HTTPMethod.GET,
                               header={"token": self.get_token()},
                               data=dataset_json)
        manager = repo_manager.PachydermRepoManager()
        try:
            dataset_path = manager.manage_xprctl_dataset('pull', dataset_json)
            return {
                "message": f"Pull Successful, find the files at {dataset_path}"
            }
        except XprExceptions as err:
            return err.message

    def list_dataset(self, filter_json):
        """
        lists datasets saved on pachyderm cluster as per filter specs

        :param filter_json:
            info to filter required dataset
        :return:
            list of all the files and their props as per filter specs
        """
        url = f"{self.server_path}/dataset/list"
        self.send_http_request(url=url,
                               http_method=HTTPMethod.GET,
                               header={"token": self.get_token()},
                               data=filter_json)
        manager = repo_manager.PachydermRepoManager()
        try:
            dataset_list = manager.manage_xprctl_dataset('list', filter_json)
            return dataset_list
        except XprExceptions as err:
            return err.message
class XprObject(object):
    """
    This class serves as the superclass of all objects managed by the controller
    e.g., User, Node, Cluster, etc.
    """
    def __init__(self, objjson=None):
        self.logger = XprLogger()
        self.logger.debug("Inside XprObject constructor")
        self.data = objjson
        self.logger.debug("Done")

    def set(self, key, value):
        self.data[key] = value

    def get(self, key):
        return self.data[key]

    def validate_mandatory_fields(self):
        self.logger.debug("Validating mandatory fields")
        """
        checks if the mandatory fields for an object have been specified
        """
        for f in self.mandatory_fields:
            self.logger.debug("Validating field {}".format(f))
            if f not in self.data:
                raise MissingFieldException("Field '{}' missing in "
                                            "input".format(f))
            elif not len(self.data[f]):
                raise BlankFieldException(
                    "Field '{}' blank in input".format(f))

    def validate_field_values(self):
        """
        checks if the value of the specified field is valid
        :param objjson: JSON object
        :param field: field to bee checked
        :param valid_values: list of valid values for string
        """
        for field in self.valid_values:
            if field in self.data:
                if self.data[field] not in self.valid_values[field]:
                    raise InvalidValueException(
                        "Value {} invalid for field {} in input".format(
                            self.data[field], field))

    def validate_modifiable_fields(self):
        self.logger.debug("Validating modifiable fields")
        """
        checks if the mandatory fields for an object have been specified
        """
        for f in self.unmodifiable_fields:
            self.logger.debug("Validating field {}".format(f))
            if f in self.data:
                raise IllegalModificationException(
                    "Field {} cannot be modified".format(f))

    def filter_display_fields(self):

        filtered_data = {}
        for field in self.display_fields:
            if field in self.data:
                filtered_data[field] = self.data[field]
        self.data = filtered_data
class XprClusters:
    """
    This class provides methods for Xpresso cluster management.
    """
    def __init__(self, persistence_manager):
        self.logger = XprLogger()
        self.persistence_manager = persistence_manager

    def get_clusters(self, cluster):
        """
        Retrieves info about specified cluster.
        Args:
            cluster: cluster name

        Returns: dictionary object with cluster info

        """
        self.logger.info('entering get_clusters method with'
                         ' input {}'.format(cluster))
        if cluster == {}:
            self.logger.debug('getting all clusters')
            clusters = self.persistence_manager.find('clusters', {})
            all_clusters = []
            for current_cluster in clusters:
                try:
                    temp = dict(
                        name=current_cluster['name'],
                        activationStatus=current_cluster['activationStatus'],
                        master_nodes=current_cluster['master_nodes'],
                        worker_nodes=current_cluster['worker_nodes'])

                    all_clusters.append(temp)
                except KeyError:
                    self.logger.error(
                        f"Invalid cluster format {current_cluster}")
            self.logger.info(
                'exiting get_clusters method with list of all clusters')
            return all_clusters
        else:
            self.logger.debug('getting specific cluster(s)')
            info = self.persistence_manager.find('clusters', cluster)
            if not info:
                self.logger.info('exiting get_clusters method with empty list')
                return []
            for item in info:
                if "_id" in item:
                    del item["_id"]
            self.logger.info(
                'exiting get_clusters method with required cluster(s)')
            return info

    def deactivate_cluster(self, cluster):
        """
        removes specified cluster from the database
        Args:
            cluster: cluster name

        Returns: count of items deleted

        """
        self.logger.info('entering deactivate_cluster method '
                         'with input {}'.format(cluster))
        if 'name' not in cluster:
            self.logger.error('Cluster name not provided.')
            raise IncompleteClusterInfoException
        cluster_name = cluster['name']
        self.logger.debug('Checking for already existing cluster.')
        check = self.persistence_manager.find('clusters',
                                              {"name": cluster_name})
        if not check or not check[0]['activationStatus']:
            self.logger.error('Cluster does not exist')
            raise ClusterNotFoundException
        self.persistence_manager.update('clusters', {"name": cluster['name']},
                                        {"activationStatus": False})
        self.logger.info('exiting deactivate_cluster method.')

        # let environment manager know that the cluster has been deactivated
        EnvManager().deactivate_cluster(cluster)
        return True

    def register_cluster(self, cluster):
        """
        registers a new cluster in database
        Args:
            cluster: cluster to be registered

        Returns: cluster's id

        """
        self.logger.info('entering register_cluster method '
                         'with input {}'.format(cluster))
        new_cluster = Cluster(cluster)
        new_cluster.validate_mandatory_fields()
        if not cluster['name']:
            self.logger.error('Cluster name cannot be blank. Exiting.')
            raise ClusterNameBlankException
        check = self.persistence_manager.find('clusters',
                                              {"name": cluster['name']})
        if check and not check[0]["activationStatus"]:
            self.persistence_manager.update('clusters',
                                            {"name": cluster['name']},
                                            {"activationStatus": True})
            return str(check[0]['_id'])
        if 'master_nodes' in cluster:
            new_cluster.set('master_nodes', cluster['master_nodes'])
        else:
            new_cluster.set('master_nodes', [])
        if 'worker_nodes' in cluster:
            new_cluster.set('worker_nodes', cluster['worker_nodes'])
        else:
            new_cluster.set('worker_nodes', [])
        new_cluster.set('activationStatus', True)
        try:
            inserted_id = self.persistence_manager.insert(
                'clusters', cluster, False)
            self.logger.info('exiting register_cluster method '
                             'with insert ID {}'.format(str(inserted_id)))
            return str(inserted_id)
        except UnsuccessfulOperationException:
            self.logger.error('Cluster already exists. Exiting.')
            raise ClusterAlreadyExistsException('cluster already exists')
Example #22
0
class XprDbSetup:
    """
        Class that provides tools to setup mongodb on a node
    """

    def __init__(self, executor=None):
        if not executor:
            self.executor = LocalShellExecutor()
        self.logger = XprLogger()
        self.service_path = '/lib/systemd/system/mongod.service'

    def install_mongo(self):
        """
        installs mongodb on the system
        """
        self.logger.info('entering install_mongo method')
        if not linux_utils.check_root():
            self.logger.fatal("Please run this as root")
        import_key = 'sudo apt-key adv --keyserver ' \
                     'hkp://keyserver.ubuntu.com:80 --recv ' \
                     '9DA31620334BD75D9DCB49F368818C72E52529D4'
        self.executor.execute(import_key)
        create_list = 'echo "deb [ arch=amd64 ] https://repo.mongodb.org/' \
                      'apt/ubuntu bionic/mongodb-org/4.0 multiverse" | ' \
                      'sudo tee /etc/apt/sources.list.d/mongodb-org-4.0.list'
        self.executor.execute(create_list)
        reload_packages = 'sudo apt-get update'
        self.executor.execute(reload_packages)
        self.logger.debug('installing mongo')
        install_mongo = 'sudo apt-get install -y mongodb-org'
        self.executor.execute(install_mongo)
        hold = """echo "mongodb-org hold" | sudo dpkg --set-selections
                  echo "mongodb-org-server hold" | sudo dpkg --set-selections
                  echo "mongodb-org-shell hold" | sudo dpkg --set-selections
                  echo "mongodb-org-mongos hold" | sudo dpkg --set-selections
                  echo "mongodb-org-tools hold" | sudo dpkg --set-selections"""
        self.executor.execute(hold)
        self.logger.info('exiting install_mongo')

    def initial_setup(self, db):
        """
        sets up the initial users and collections in the db
        :param db: database against which the setup is to be done
        :return: nothing
        """
        self.logger.info('entering initial_setup method')
        # initiate users collection
        users = db.users
        self.insert_default_users(users)
        db.users.create_index([('uid', ASCENDING)], unique=True)
        self.logger.debug('created index for users collection')

        # initiate nodes collection
        nodes = db.nodes
        self.logger.debug('setting up initial node')
        initial_node = {
            "name": "initial_node",
            "address": ""
        }
        nodes.insert_one(initial_node)
        nodes.create_index([('address', ASCENDING)], unique=True)
        self.logger.debug('created index for nodes collection')
        nodes.delete_one({"name": "initial_node"})

        # initiate clusters collection
        clusters = db.clusters
        self.logger.debug('setting up initial cluster')
        initial_cluster = {
            "name": "initial_cluster",
            "activationStatus": True,
            "master_nodes": [],
            "worker_nodes": []
        }
        clusters.insert_one(initial_cluster)
        clusters.create_index([('name', ASCENDING)], unique=True)
        self.logger.debug('created index for clusters collection')
        clusters.delete_one({"name": "initial_cluster"})

        # initiate projects collection
        projects = db.projects
        self.logger.debug('setting up initial project')
        initial_project = {
            "name": "initial_project",
            "projectDescription": "Initiates the collection",
            "owner": {},
            "developers": [],
            "components": []
        }
        projects.insert_one(initial_project)
        projects.create_index([('name', ASCENDING)], unique=True)
        self.logger.debug('created index for projects collection')
        projects.delete_one({"name": "initial_project"})

        # create xprdb_admin user in mongo
        self.logger.debug('creating xprdb user in mongo')
        db.command("createUser", "xprdb_admin", pwd="xprdb@Abz00ba",
                   roles=[{"role": "root", "db": "admin"}])
        self.logger.info('exiting initial_setup method')

    def insert_default_users(self, users):
        self.logger.debug('setting up default users')
        admin_user = {
            "uid": "xprdb_admin",
            "firstName": "Xpresso",
            "lastName": "Admin",
            "pwd": sha512_crypt.hash('xprdb@Abz00ba'),
            "email": "*****@*****.**",
            "primaryRole": "Admin",
            "activationStatus": True,
            "loginStatus": False
        }
        users.insert_one(admin_user)
        superuser = {
            "uid": "superuser1",
            "firstName": "superuser1",
            "lastName": "superuser1",
            "pwd": sha512_crypt.hash('superuser1'),
            "email": "*****@*****.**",
            "primaryRole": "Su",
            "activationStatus": True,
            "loginStatus": False
        }
        users.insert_one(superuser)
        admin1_user = {
            "uid": "admin1",
            "firstName": "admin1",
            "lastName": "admin1",
            "pwd": sha512_crypt.hash('admin1'),
            "email": "*****@*****.**",
            "primaryRole": "Admin",
            "activationStatus": True,
            "loginStatus": False
        }
        users.insert_one(admin1_user)

    def enable_replication(self):
        """
        installs replica set for the database
        :return: nothing
        """
        self.logger.info('entering enable_replication method')
        path = '/srv/mongodb/rs0-0'
        linux_utils.create_directory(path, 0o777)
        self.logger.debug('created directory for replica set')
        ip = linux_utils.get_ip_address()
        start = 'mongod --replSet rs0 --port 27017 --bind_ip localhost,' \
                '{} --dbpath /srv/mongodb/rs0-0 --fork ' \
                '--logpath /var/log/mongodb/mongod.log'.format(ip)
        self.executor.execute(start)
        self.logger.debug('mongo daemon started')
        client = MongoClient('localhost', replicaset='rs0')
        db = client.xprdb
        client.admin.command("replSetInitiate")
        self.logger.debug('Replica set initiated')
        time.sleep(5)
        self.initial_setup(db)
        # stop mongo to restart with auth
        stop_mongod = 'pgrep mongod | xargs kill'
        self.executor.execute(stop_mongod)
        self.logger.debug('stopping mongo daemon to restart with auth')
        time.sleep(10)
        restart = 'mongod --replSet rs0 --port 27017 --bind_ip localhost,{} ' \
                  '--dbpath /srv/mongodb/rs0-0 --auth --fork --logpath ' \
                  '/var/log/mongodb/mongod.log'.format(ip)
        config = configparser.ConfigParser()
        config.read(self.service_path)
        config['Service']['ExecStart'] = restart
        with open(self.service_path, 'w') as f:
            config.write(f)
        restart_mongod = 'systemctl restart mongod'
        self.executor.execute(restart_mongod)
        self.logger.debug('db setup complete, exiting enable_replication')
Example #23
0
class AuthenticationManager:
    """
    This class provides user authentication functionality for Xpresso Controller
    """

    AUTHENTICATION_TYPE = "authentication_type"

    def __init__(self, persistence_manager):
        self.persistence_manager = persistence_manager
        self.logger = XprLogger()
        self.ldapmanager = LdapManager()
        config_path = XprConfigParser.DEFAULT_CONFIG_PATH
        self.config = XprConfigParser(config_path)

    def login(self, credentials):
        """
        Authentication method for user login.
        Args:
            credentials: object containing uid and pwd

        Returns: dictionary object consisting of access token

        """
        try:
            # validate data
            authentication_context = AuthenticationContext(credentials)
            uid = authentication_context.get('uid')
            self.logger.debug('entering login method '
                              'with uid {}'.format(uid))
            check = self.validate_credentials(authentication_context)
            if check == error_codes.already_logged_in:
                self.logger.debug('Relogin request from {}.'.format(uid))
                relogin = True
            else:
                relogin = False
            self.logger.debug('Providing new token to {}.'.format(uid))
            token_info = self.generate_token()
            self.save_token({"uid": uid}, token_info)
            credentials = {
                'access_token': token_info.token,
                'relogin': relogin
            }
            response = XprResponse('success', None, credentials)
            self.logger.debug('Login successful. Exiting.')
            return response

        except AuthenticationFailedException as e:
            self.logger.error('Authentication failed for user {}'.format(uid))
            return XprResponse("failure", e.error_code,
                               {"message": "Authentication failed"})

    def validate_credentials(self, authentication_context):

        uid = authentication_context.get('uid')
        pwd = authentication_context.get('pwd')
        self.logger.debug('validating credentials for {}'.format(uid))

        users = UserManager(self.persistence_manager).get_users({"uid": uid},
                                                                False)
        print('validate_credentials: {}'.format(users))
        if not users:
            self.logger.error("User {} not found".format(uid))
            raise UserNotFoundException(f"User {uid} not found")
        hashed_pwd = users[0]['pwd']
        if not self.authenticate_password(uid, pwd, hashed_pwd):
            self.logger.error("Wrong password entered by {}".format(uid))
            raise WrongPasswordException("Wrong password")
        elif not users[0]["activationStatus"]:
            self.logger.error("User {} has been deactivated".format(uid))
            raise DeactivatedUserException("This user is deactivated. "
                                           "Please reactivate first")
        elif users[0]['loginStatus']:
            self.logger.debug("User '{}' is already logged in.".format(uid))
            return error_codes.already_logged_in
        elif not users[0]['activationStatus']:
            raise DeactivatedUserException(
                "This user is deactivated. Please reactivate first")
        print('credentials validated for {}'.format(uid))
        return True

    def authenticate_password(self, uid, pwd, hashed_pwd):
        """
        Uses differnet authentication method to check if credentials are
        valid
        """
        if self.config[self.AUTHENTICATION_TYPE] == "ldap":
            try:
                self.ldapmanager.authenticate(uid, pwd)
                return True
            except ldap.INVALID_CREDENTIALS as e:
                self.logger.error("Wrong password entered by {}".format(uid))
                raise WrongPasswordException(str(e))
            except ldap.LDAPException as e:
                self.logger.error("Invalid credentials {}".format(uid))
                raise AuthenticationFailedException(str(e))
        elif self.config[self.AUTHENTICATION_TYPE] == "mongodb":
            if not sha512_crypt.verify(pwd, hashed_pwd):
                self.logger.error("Wrong password entered by {}".format(uid))
                raise WrongPasswordException("Wrong Password")
            return True
        return False

    def generate_token(self):
        self.logger.debug('Generating token')
        token_info = TokenInfo(None)
        token_info.generate()
        return token_info

    def save_token(self, search_filter, token_info):
        # print(search_filter)
        UserManager(self.persistence_manager).modify_user(
            search_filter, {
                "token": token_info.token,
                "loginStatus": token_info.login_status,
                "loginExpiry": token_info.login_expiry,
                "tokenExpiry": token_info.token_expiry
            })

    def logout(self, token):
        """
        Authentication method for user logout
        Args:
            token: access token

        Returns: Deletion status (True/False)

        """
        self.logger.info('entering logout method')
        status = self.delete_token(token)
        self.logger.debug(
            'exiting logout method with status {}'.format(status))
        return XprResponse("success", None, {})

    def delete_token(self, token):
        # delete token from database & change status
        token_info = TokenInfo(None)
        self.save_token({"token": token}, token_info)
        return True

    def modify_user_access(self, token, uid):
        users = UserManager(self.persistence_manager).get_users(
            {"token": token}, False)
        if users and len(users) > 0:
            if (uid == users[0]['uid']) or (users[0]['primaryRole']
                                            == 'Admin'):
                return True
            else:
                return False
        else:
            return False

    def validate_token(self, token, access_level):
        users = UserManager(self.persistence_manager).get_users(
            {"token": token}, False)
        if not users:
            self.logger.debug('Tokens do not match. Re-login needed.')
            raise IncorrectTokenException
        else:
            user = User(users[0])
            token_info = user.get_token_info()
            if token_info.has_expired():
                self.logger.debug('Token expired for {}.'
                                  'Logging out.'.format(user.get('uid')))
                token_info = TokenInfo(None)
                self.save_token({"token": token}, token_info)
                raise ExpiredTokenException
            elif not self.check_access(user.get('primaryRole'), access_level):
                self.logger.debug('User access check failed! Exiting.')
                raise PermissionDeniedException("Permission Denied")
        self.logger.info('revalidating token')
        self.revalidate_token(token_info)
        self.logger.info('exiting validate_token method')
        return True

    def validate_build_deploy_token(self, token, project):
        users = UserManager(self.persistence_manager).get_users(
            {"token": token}, False)
        if not users or len(users) == 0:
            self.logger.debug('Tokens do not match. Re-login needed.')
            raise IncorrectTokenException
        else:
            user = User(users[0])
            token_info = user.get_token_info()
            if token_info.has_expired():
                self.logger.debug('Token expired for {}.'
                                  'Logging out.'.format(users[0]['uid']))
                token_info = TokenInfo(None)
                self.save_token({"token": token}, token_info)
                raise ExpiredTokenException

        if "name" not in project:
            raise ProjectNotFoundException("Project name is empty")

        search_filter = {"name": project["name"]}
        project_info = self.persistence_manager.find('projects', search_filter)
        if not project_info:
            self.logger.error('No such project found.')
            raise ProjectNotFoundException("No such Project Found")
        if users[0]['uid'] != project_info[0]['owner']['uid'] and \
                levels[users[0]['primaryRole']] < levels['Admin']:
            self.logger.debug('User access check failed! Exiting.')
            raise PermissionDeniedException
        self.logger.info('revalidating token')
        self.revalidate_token(token_info)
        self.logger.info('exiting validate_token method')
        return project_info

    def revalidate_token(self, token_info):
        token_info.revalidate()
        self.save_token({"token": token_info.token}, token_info)
        self.logger.info('token revalidated.')

    def check_access(self, primary_role, access_level):
        self.logger.debug('Checking access')
        return levels[access_level] <= levels[primary_role]