def purge_snapshots_by_volume(volume, purge_limit):
    """
    Purge snapshots per volume
    @return: count of snapshots purged
    """
    config = helpers.get_db_config()
    ontap = OntapService(config['ontap_api'], config['ontap_apiuser'], config['ontap_apipass'],
                         config['ontap_svm_name'], config['ontap_aggr_name'], config['ontap_data_ip'])
    ontap_snapshot_list = ontap.get_snapshot_list(volume)

    if ontap_snapshot_list is None:
        return 0

    delete_count = len(ontap_snapshot_list) - purge_limit

    if delete_count <= 0:
        return 0

    database = helpers.connect_db()

    sorted_by_timestamp = sorted(ontap_snapshot_list, key=lambda snap: snap['timestamp'])
    delete_snapshot_list = sorted_by_timestamp[:delete_count]
    for snap in delete_snapshot_list:
        status = ontap.delete_snapshot(volume, snap['snapshot_name'])
        if helpers.verify_successful_response(status):
            # delete snapshot document from db
            doc = Database.get_document_by_name(database, snap['snapshot_name'])
            if not doc:  # if snapshot to be deleted is not found in DB
                logging.info("Purge: snapshot document not found for %s", snap['snapshot_name'])
            else:
                database.delete(doc)
                logging.info("Purge: snapshot deleted from DB and ONTAP: %s",
                             snap['snapshot_name'])
    return delete_count
Beispiel #2
0
def purge_inconsistent_snapshots(volume):
    """
    Snapshot consistency check - ONTAP vs DB
    Purge inconsistent snapshot documents from DB
    i.e. snapshots in DB that do not exist in ONTAP
    @return: count of snapshots deleted from DB
    """
    config = helpers.get_db_config()
    database = helpers.connect_db()
    snapshots_in_db = Database.get_snapshots_by_volume(database, volume=volume)
    ontap = OntapService(config['ontap_api'], config['ontap_apiuser'],
                         config['ontap_apipass'], config['ontap_svm_name'],
                         config['ontap_aggr_name'], config['ontap_data_ip'])
    ontap_snapshot_data = ontap.get_snapshot_list(volume)
    if not ontap_snapshot_data:
        # purge all snapshots from DB and return:
        return purge_snapshots_from_db(snapshots_ontap=[],
                                       snapshots_db=snapshots_in_db)
    if not snapshots_in_db:
        # return if there are no snapshot documents in db
        return 0

    ontap_snapshots = [snap['snapshot_name'] for snap in ontap_snapshot_data]
    return purge_snapshots_from_db(snapshots_ontap=ontap_snapshots,
                                   snapshots_db=snapshots_in_db)
    def create_pv_and_pvc_and_pod(self, workspace, size, namespace, ontap_cluster_data_lif):
        ''' Create PV, PVC and pod enabled to use volume 'vol_name' '''
        kb_clone_name = workspace['kb_clone_name']
        statuses = self.create_pv_and_pvc(
            kb_clone_name, size, namespace, ontap_cluster_data_lif)
        body = self.create_pod_config(workspace)
        service_body = self.create_service_config(workspace)

        try:
            self.api.create_namespaced_pod(namespace, body)
            self.api.create_namespaced_service(namespace, service_body)
            pod_status = OntapService.set_status(
                201, "Pod", body['metadata']['name'])
        except ApiException as exc:
            if self.parse_exception(exc) == "AlreadyExists":
                pod_status = OntapService.set_status(
                    200, "Pod", body['metadata']['name'])
            else:
                error_message = "Exception when calling CoreV1Api->create_namespaced_pod: %s\n" % exc
                pod_status = OntapService.set_status(
                    400, "Pod", body['metadata']['name'], error_message)

        statuses.append(pod_status)

        return statuses
    def create_pvc_clone(self, pvc_clone_name, pvc_source, size, namespace, storage_class):
        '''
        Create a PVC clone from a source PVC.
        For use with Trident where Trident creates an ONTAP clone and a k8s PV and maps it to the PVC
        :param pvc_clone: PVC clone name
        :param pvc_source: PVC source name to clone from
        :param size: size of the clone PVC in MB
        :param namespace: Kube namespace
        :param storage_class: Storage class (should match with Trident)
        :return: Status of creation
        '''
        body = self.create_pvc_clone_config(pvc_clone_name, pvc_source, size, storage_class)

        try:
            self.api.create_namespaced_persistent_volume_claim(namespace, body)
            status = OntapService.set_status(
                201, "PVC", body['metadata']['name'])
        except ApiException as exc:
            if self.parse_exception(exc) == "AlreadyExists":
                status = OntapService.set_status(
                    200, "PVC", body['metadata']['name'])
            else:
                err = "Exception calling CoreV1Api->create_namespaced_persistent_volume_claim: %s\n" % exc
                status = OntapService.set_status(
                    400, "PVC", body['metadata']['name'], err)

        return status
Beispiel #5
0
def snapshot_list(volume_name):
    """
    List all snapshots
    ---
    tags:
      - snapshot
    parameters:
      - in: path
        name: volume_name
        required: true
        description: parent volume name to list snapshots
        type: string
    responses:
      200:
        description: snapshot was created successfully

    """
    database = helpers.connect_db()
    config_document = helpers.get_db_config()
    if not config_document:
        raise GenericException(
            500,
            "Customer config doc not found, please contact your administrator",
            "Database Exception")
    ontap = OntapService(config_document['ontap_api'],
                         config_document['ontap_apiuser'],
                         config_document['ontap_apipass'],
                         config_document['ontap_svm_name'],
                         config_document['ontap_aggr_name'],
                         config_document['ontap_data_ip'])
    snapshots = ontap.get_snapshot_list(volume_name)
    return jsonify(snapshots)
def purge_old_workspaces():
    """
    Purge workspaces older than X days
    @return: count of workspaces deleted
    """
    database = helpers.connect_db()
    config = helpers.get_db_config()
    projects_in_db = Database.get_documents_by_type(database, doc_type='project')
    if not projects_in_db:
        return 0
    count = 0
    deleted_workspaces = list()
    for project in projects_in_db:
        workspaces_in_project = Database.get_workspaces_by_project(database,
        	                                                          project=project['name'])
        for workspace in workspaces_in_project:
            # ontap doesn't provide last_access_timestamp for volumes
            # hence, snapdiff latest snapshot with snapshot X days older \
            # to find if workspace is active
            ontap = OntapService(config['ontap_api'], config['ontap_apiuser'], config['ontap_apipass'], config['ontap_svm_name'], config['ontap_aggr_name'], config['ontap_data_ip'])
            deleted, error = ontap.get_snapdiff_and_delete(
                volume_name=workspace.value,
                count=project['workspace_purge_limit'])

            # delete inconsistent or old workspace that exceeded purge limit
            if error is not None or deleted is True:
                workspace_doc = Database.get_document_by_name(database, workspace.value)
                database.delete(workspace_doc)
                deleted_workspaces.append(workspace.value)
                logging.info("Purge: deleted workspace %s from DB",
                			          workspace.value)
                count += 1
    return count, deleted_workspaces
Beispiel #7
0
def modify_ssl_for_volume(volume, ssl):
    """
        Apply ssl to volume
    """
    config_document = get_db_config()
    ontap_instance = OntapService(config_document['ontap_api'],
                                  config_document['ontap_apiuser'],
                                  config_document['ontap_apipass'],
                                  config_document['ontap_svm_name'],
                                  config_document['ontap_aggr_name'],
                                  config_document['ontap_data_ip'])
    ontap_instance.modify_volume_ssl(volume, ssl)
    def create_pvc(self, vol_name, pvc_size, namespace):
        ''' Create PVC with name 'vol_name' and size 'pvc_size' '''
        body = self.create_pvc_config(vol_name, pvc_size)

        try:
            self.api.create_namespaced_persistent_volume_claim(namespace, body)
            status = OntapService.set_status(
                201, "PVC", body['metadata']['name'])
        except ApiException as exc:
            if self.parse_exception(exc) == "AlreadyExists":
                status = OntapService.set_status(
                    200, "PVC", body['metadata']['name'])
            else:
                err = "Exception calling CoreV1Api->create_namespaced_persistent_volume_claim: %s\n" % exc
                status = OntapService.set_status(
                    400, "PVC", body['metadata']['name'], err)

        return status
    def create_pv(self, vol_name, pv_size, ontap_cluster_data_lif):
        ''' Create PV with name 'vol_name' and size 'pv_size' '''
        body = self.create_pv_config(
            vol_name, pv_size, ontap_cluster_data_lif)  # V1PersistentVolume

        try:
            # api_response = self.api.create_persistent_volume(body)
            self.api.create_persistent_volume(body)
            status = OntapService.set_status(
                201, "PV", body['metadata']['name'])
        except ApiException as exc:
            if self.parse_exception(exc) == "AlreadyExists":
                status = OntapService.set_status(
                    200, "PV", body['metadata']['name'])
            else:
                error_message = "Exception when calling CoreV1Api->create_persistent_volume: %s\n" % exc
                status = OntapService.set_status(
                    400, "PV", body['metadata']['name'], error_message)

        return status
Beispiel #10
0
def delete_project(name):
    '''
    Delete all elements associated with a given project/pipeline(ontap volume/jenkins job)
    '''
    try:
        config = get_db_config()
        db = connect_db()
        project = Database.get_document_by_name(db, name)
        volume = project['volume']
        ontap = OntapService(config['ontap_api'], config['ontap_apiuser'],
                             config['ontap_apipass'], config['ontap_svm_name'],
                             config['ontap_aggr_name'],
                             config['ontap_data_ip'])
        ontap.delete_volume(volume)
        db.delete(project)
        jenkins = connect_jenkins()
        jenkins.delete_job(name)

    except Exception as e:
        logging.error("Unable to delete project!: %s" % traceback.format_exc())
        raise
Beispiel #11
0
    def test_set_status_for_failure(self):
        '''Test helper to create status dictionary'''
        expected_status = {
            'resource': 'Volume',
            'resource_name': 'test-vol',
            'code': 400,
            'status': 'FAILED',
            'message': '',
            'error_message': 'Error creating PV'
        }

        attempted_status = ontap.set_status(400, 'Volume', 'test-vol', 'Error creating PV')
        self.assertEqual(expected_status, attempted_status)
Beispiel #12
0
    def test_set_status(self):
        '''Test helper to create status dictionary'''
        expected_status = {
            'resource': 'Volume',
            'resource_name': 'test-vol',
            'code': 200,
            'status': 'SUCCESS',
            'message': 'Volume test-vol already exists',
            'error_message': ''
        }

        attempted_status = ontap.set_status(200, 'Volume', 'test-vol')
        self.assertEqual(expected_status, attempted_status)
Beispiel #13
0
def delete_workspace(name):
    '''

    '''
    try:
        config = get_db_config()
        db = connect_db()
        ontap = OntapService(config['ontap_api'], config['ontap_apiuser'],
                             config['ontap_apipass'], config['ontap_svm_name'],
                             config['ontap_aggr_name'],
                             config['ontap_data_ip'])
        ontap.delete_volume(name)
        workspace = Database.get_document_by_name(db, name)
        pod_name = workspace['pod_name']
        db.delete(workspace)
        kube = KubernetesAPI()
        kube.delete_pod(pod_name)

    except Exception as e:
        logging.error("Unable to delete workspace!: %s" %
                      traceback.format_exc())
        raise
Beispiel #14
0
def snapshot_delete():
    """
    Delete snapshot
    ---
    tags:
      - snapshot
    parameters:
      - in: body
        name: snapshot_name
        required: true
        description: name of the snapshot being created
        type: string
      - in: body
        name: volume_name
        required: true
        description: name of the volume that needs to be snapshot
        type: string
    responses:
      200:
        description: snapshot was deleted successfully
    """
    database = helpers.connect_db()
    config_document = helpers.get_db_config()
    if not config_document:
        raise GenericException(
            500,
            "Customer config doc not found, please contact your administrator",
            "Database Exception")
    ontap = OntapService(config_document['ontap_api'],
                         config_document['ontap_apiuser'],
                         config_document['ontap_apipass'],
                         config_document['ontap_svm_name'],
                         config_document['ontap_aggr_name'],
                         config_document['ontap_data_ip'])
    status = ontap.delete_snapshot(request.form['volume_name'],
                                   request.form['snapshot_name'])
    return jsonify(status)
    def create_pvc_clone_and_pod(self, workspace, merge=False):
        """
        Create a Kube PVC (clone), Pod and a service representing the user workspace
        Once PVC clone is created, Trident assigns an ONTAP clone and a PV
        :param workspace: workspace details dict()
        :return: status of PVC and Pod creation
        """
        logging.debug("Received workspace details:: %s" % str(workspace))
        workspace['pvc'] = self.get_kube_resource_name(workspace['name'], 'pvc')
        workspace['source_pvc'] = self.get_kube_resource_name(workspace['build_name'], 'pvc')
        workspace['pipeline_pvc'] = self.get_kube_resource_name(workspace['pipeline'], 'pvc')
        workspace['pod'] = self.get_kube_resource_name(workspace['name'], 'pod')
        workspace['service'] = self.get_kube_resource_name(workspace['name'], 'service')
        logging.debug("KUBE workspace PVC:: %s" % workspace['pvc'])
        logging.debug("KUBE workspace POD:: %s" % workspace['pod'])
        logging.debug("KUBE workspace SERVICE:: %s" % workspace['service'])
        logging.debug("KUBE workspace PIPELINE PVC:: %s" % workspace['pipeline_pvc'])
        logging.debug("KUBE workspace SOURCE (BUILD) PVC:: %s" % workspace['source_pvc'])
        clone_response = self.create_pvc_clone_resource(clone=workspace['pvc'],
                                                        source=workspace['source_pvc'])
        workspace['clone_name'] = self.get_volume_name_from_pvc(workspace['pvc'])
        workspace['pv_name'] = self.get_pv_name_from_pvc(workspace['pvc'])
        if merge:
            workspace['source_workspace_pvc'] = self.get_kube_resource_name(workspace['source_workspace_name'], 'pvc')
            workspace['source_workspace_pv'] = self.get_pv_name_from_pvc(workspace['source_workspace_pvc'])
            logging.debug("KUBE source workspace PVC:: %s" % workspace['source_workspace_pvc'])
            logging.debug("KUBE source workspace PV:: %s" % workspace['source_workspace_pv'])
        workspace['temp_pod_name'] = 'temp-pod-for-uid-gid' + workspace['name']
        temp_pod = self.create_temporary_pod_to_change_uid_gid(workspace)
        body = self.create_pod_config(workspace)
        service_body = self.create_service_config(workspace)
        logging.debug("WORKSPACE DETAILS:::: %s" % str(workspace))
        try:
            # create a temporary pod to set UID GID For workspace
            self.api.create_namespaced_pod(self.namespace, temp_pod)
            logging.info("Changing UID and GID for the workspace clone volume")
            sleep(10)   # TODO: Change this to wait on pod status
            # delete the temp pod
            self.delete_pod(workspace['temp_pod_name'])
            self.api.create_namespaced_pod(self.namespace, body)
            self.api.create_namespaced_service(self.namespace, service_body)
            # TODO: move set_status to helper?
            pod_status = OntapService.set_status(201, "Pod", body['metadata']['name'])
            service_status = OntapService.set_status(201, "Service", body['metadata']['name'])
        except ApiException as exc:
            if self.parse_exception(exc) == "AlreadyExists":
                pod_status = OntapService.set_status(200, "Pod", body['metadata']['name'])
                service_status = OntapService.set_status(200, "Service", body['metadata']['name'])
            else:
                error_message = "Exception when calling create_namespaced_pod or create_namespaced_service: %s\n" % exc
                pod_status = OntapService.set_status(400, "Pod", body['metadata']['name'], error_message)
                service_status = OntapService.set_status(400, "Service", body['metadata']['name'], error_message)

        return [clone_response, pod_status, service_status]
    def create_pvc_and_pod(self, workspace, merge=False, namespace='default'):
        """
        Create a Kube PVC (clone), Pod and a service representing the user workspace
        Once PVC clone is created, Trident assigns an ONTAP clone and a PV
        :param workspace: workspace details dict()
        :param namespace: Kube namespace
        :return: status of PVC and Pod creation
        """
        logging.debug("Received workspace details:: %s" % str(workspace))
        workspace['pvc'] = self.get_kube_resource_name(workspace['name'], 'pvc')
        workspace['source_pvc'] = self.get_kube_resource_name(workspace['build_name'], 'pvc')
        workspace['pod'] = self.get_kube_resource_name(workspace['name'], 'pod')
        workspace['service'] = self.get_kube_resource_name(workspace['name'], 'service')
        logging.debug("KUBE workspace PVC:: %s" % workspace['pvc'])
        logging.debug("KUBE workspace POD:: %s" % workspace['pod'])
        logging.debug("KUBE workspace SERVICE:: %s" % workspace['service'])
        logging.debug("KUBE workspace SOURCE PVC:: %s" % workspace['source_pvc'])
        clone_response = self.create_pvc_clone_resource(clone=workspace['pvc'],
                                                        source=workspace['source_pvc'],
                                                        namespace=namespace)
        workspace['clone_name'] = self.get_volume_name_from_pvc(workspace['pvc'])
        workspace['pv_name'] = self.get_pv_name_from_pvc(workspace['pvc'])
        if merge:
            workspace['source_workspace_pvc'] = self.get_kube_resource_name(workspace['source_workspace_name'], 'pvc')
            workspace['source_workspace_pv'] = self.get_pv_name_from_pvc(workspace['source_workspace_pvc'])
            logging.debug("KUBE source workspace PVC:: %s" % workspace['source_workspace_pvc'])
            logging.debug("KUBE source workspace PV:: %s" % workspace['source_workspace_pv'])
        body = self.create_pod_config(workspace)
        service_body = self.create_service_config(workspace)
        logging.debug("WORKSPACE DETAILS:::: %s" % str(workspace))

        try:
            self.api.create_namespaced_pod(namespace, body)
            self.api.create_namespaced_service(namespace, service_body)
            # TODO: move set_status to helper?
            pod_status = OntapService.set_status(201, "Pod", body['metadata']['name'])
            service_status = OntapService.set_status(201, "Service", body['metadata']['name'])
        except ApiException as exc:
            if self.parse_exception(exc) == "AlreadyExists":
                pod_status = OntapService.set_status(200, "Pod", body['metadata']['name'])
                service_status = OntapService.set_status(200, "Service", body['metadata']['name'])
            else:
                error_message = "Exception when calling create_namespaced_pod or create_namespaced_service: %s\n" % exc
                pod_status = OntapService.set_status(400, "Pod", body['metadata']['name'], error_message)
                service_status = OntapService.set_status(400, "Service", body['metadata']['name'], error_message)

        return [clone_response, pod_status, service_status]
Beispiel #17
0
def workspace_create():
    """
    create developer workspace pod
    ---
    tags:
      - workspace
    parameters:
      - in: path
        name: workspace-name
        required: true
        description: Name of the workspace being created
        type: string
      - in: path
        name: build-name
        required: true
        description: build name (e.g. snapshot) from which clone should be created
        type: string
      - in: path
        name: username
        required: false
        description: username
        type: string
     - in: path
       name: project-name
       required: true
       description: the project/pipeline name
       type: string
    responses:
      200:
        description: workspace created successfully

    """
    # Retrieve customer configuration document from database
    try:
        database = helpers.connect_db()
        config_document = helpers.get_db_config()
    except Exception as e:
        raise GenericException(
            500,
            "Customer configuration document not found, please contact your administrator",
            "Database Exception")
    if not config_document:
        raise GenericException(
            500,
            "Customer configuration document not found, please contact your administrator",
            "Database Exception")

    expected_keys = [
        'workspace-name', 'build-name', 'username', 'project-name'
    ]
    if not helpers.request_validator(request.form, expected_keys):
        raise GenericException(
            400,
            "workspace-name, build-name, project-name and username are required"
        )

    username = request.form['username']
    try:
        user_doc = helpers.get_db_user_document(username)
        uid = user_doc['uid']
        gid = user_doc['gid']
        email = user_doc['email']
    except:
        raise GenericException(
            500, "Error retrieving user information from database",
            "Database Exception")

    try:
        exceeded, workspaces = workspace_obj.exceeded_workspace_count_for_user(
            uid, config_document['user_workspace_limit'])
    except Exception as exc:
        logging.warning(
            "WARNING: Unable to check user workspace limit (%s)  " %
            traceback.format_exc())
    if exceeded is True:
        raise GenericException(
            401, "Please delete one or more workspace(s) from %s and re-try" %
            workspaces)
    # populate the workspace details
    namespace = 'default'
    workspace = dict()
    workspace['project'] = request.form['project-name']
    workspace['snapshot'] = request.form['build-name']
    volume_name = request.form['volume-name']
    workspace['clone'] = volume_name + \
        "_workspace" + helpers.return_random_string(4)
    workspace['kb_clone_name'] = helpers.replace_kube_invalid_characters(
        workspace['clone'])
    workspace['uid'] = uid
    workspace['gid'] = gid
    workspace['username'] = username
    workspace['clone_size_mb'] = "900"
    workspace['pod_image'] = config_document['workspace_pod_image']
    workspace['clone_mount'] = "/mnt/" + workspace['kb_clone_name']
    workspace[
        'build_cmd'] = "No build commands have been specified for this project"
    workspace['service_type'] = config_document['service_type']

    try:
        ontap_instance = OntapService(config_document['ontap_api'],
                                      config_document['ontap_apiuser'],
                                      config_document['ontap_apipass'],
                                      config_document['ontap_svm_name'],
                                      config_document['ontap_aggr_name'],
                                      config_document['ontap_data_ip'])
        ontap_data_ip = ontap_instance.data_ip
        status, vol_size = ontap_instance.create_clone(volume_name,
                                                       workspace['uid'],
                                                       workspace['gid'],
                                                       workspace['clone'],
                                                       workspace['snapshot'])
    except Exception as exc:
        logging.error("Unable to create ontap workspace clone volume: %s" %
                      traceback.format_exc())
        raise GenericException(
            500, "Unable to create ontap workspace clone volume")

    if not helpers.verify_successful_response(status):
        logging.error("ONTAP Clone Creation Error: %s", repr(status))
        return render_template('error.html',
                               error="Workspace clone creation error"), 400
    try:
        kube = KubernetesAPI()
    except Exception as exc:
        logging.error("Unable to connect to Kubernetes: %s" %
                      traceback.format_exc())
        raise GenericException(500, "Unable to connect to Kubernetes")
    try:

        kube_pv_pvc_pod_response = kube.create_pv_and_pvc_and_pod(
            workspace, vol_size, 'default', ontap_data_ip)
    except Exception as exc:
        logging.error("Unable to create Kubernetes Workspace PV/PVC/Pod: %s" %
                      traceback.format_exc())
        raise GenericException(
            500, "Unable to create Kubernetes Workspace PV/PVC/Pod")

    for response in kube_pv_pvc_pod_response:
        status.append(response)

    if not helpers.verify_successful_response(status):
        logging.error("Unable to create Kubernetes Workspace PV/PVC/Pod: %s" %
                      response)
        raise GenericException(
            500, "Unable to create Kubernetes Workspace PV/PVC/Pod")

    workspace_pod = workspace['kb_clone_name'] + "-pod"

    # Record new workspace in database
    try:
        new_ws_document = Workspace(name=workspace['clone'],
                                    project=workspace['project'],
                                    username=workspace['username'],
                                    uid=workspace['uid'],
                                    gid=workspace['gid'],
                                    parent_snapshot=workspace['snapshot'],
                                    pod_name=workspace_pod)
        new_ws_document.store(database)
    except Exception:
        raise GenericException(
            500, "Error recording new workspace in the DB, \
                               please contact your administrator",
            "Database Exception")
    # Wait for pod to be ready before executing any commands
    time.sleep(15)
    # Set git user.email and user.name , we don't care if the command fails
    git_user_cmd = ['git', 'config', '--global', 'user.name', username]
    git_email_cmd = ['git', 'config', '--global', 'user.email', email]
    try:
        response = kube.execute_command_in_pod(workspace_pod, namespace,
                                               git_user_cmd)
        response = kube.execute_command_in_pod(workspace_pod, namespace,
                                               git_email_cmd)
    except:
        logging.warning(
            "WARNING: Unable to configure GIT Username/Email on behalf of user: %s"
            % traceback.format_exc())
    # Wait for IDE to be ready before returning
    try:
        time.sleep(60)
        workspace_ide = kube.get_service_url(workspace['kb_clone_name'] +
                                             "-service")
    except:
        workspace_ide = "NA"
        logging.warning("WARNING: Unable to retrieve workspace URL")
    message = "Workspace created successfully!"
    return render_template('workspace_details.html',
                           message=message,
                           ontap_data_ip=ontap_data_ip,
                           ontap_volume_name=workspace['clone'],
                           workspace_ide=workspace_ide), 200
Beispiel #18
0
def snapshot_create():
    """
    Create snapshot from volume
    ---
    tags:
      - snapshot
    parameters:
      - in: body
        name: snapshot_name
        required: true
        description: name of the snapshot being created
        type: string
      - in: body
        name: volume_name
        required: true
        description: name of the volume that needs to be snapshot
        type: string
      - in: body
        name: build_status
        required: false
        description: specifies whether this snapshot is of a successful or failed build
        type: string
    responses:
      200:
        description: snapshot was created successfully

    """
    database = helpers.connect_db()
    config_document = helpers.get_db_config()
    if not config_document:
        raise GenericException(
            500,
            "Customer config doc not found, please contact your administrator",
            "Database Exception")
    ontap = OntapService(config_document['ontap_api'],
                         config_document['ontap_apiuser'],
                         config_document['ontap_apipass'],
                         config_document['ontap_svm_name'],
                         config_document['ontap_aggr_name'],
                         config_document['ontap_data_ip'])
    build_status = request.form['build_status'] or 'N/A'
    if build_status not in ["passed", "failed", "N/A"]:
        raise GenericException(
            406,
            "Invalid build_status type parameter: accepted values - 'passed', 'failed', 'N/A'"
        )
    status = ontap.create_snapshot(request.form['volume_name'],
                                   request.form['snapshot_name'])
    # record snapshot in db
    db_connect = helpers.connect_db()
    if not db_connect:
        raise GenericException(
            500,
            "Database connection failure, please contact your administrator",
            "Database Exception")
    snapshot_doc = Snapshot(name=request.form['snapshot_name'],
                            volume=request.form['volume_name'],
                            jenkins_build=request.form['jenkins_build'],
                            build_status=build_status)
    snapshot_doc.store(db_connect)
    return jsonify(status)
 def setUp(self):
     api_credentials = {'api_server': 'ip-address.com', 'username': '******', 'password': '******'}
     self.ontap = OntapService(api_credentials, 'vserver-test', 'aggregate-test')
class TestONTAPService(unittest.TestCase):
    ''' Test ONTAP Service '''
    def setUp(self):
        api_credentials = {'api_server': 'ip-address.com', 'username': '******', 'password': '******'}
        self.ontap = OntapService(api_credentials, 'vserver-test', 'aggregate-test')

    @patch('web_service.ontap.ontap_service.Volume.delete_snapshot')
    def test_delete_snapshot(self, mock_delete_snapshot):
        mock_delete_snapshot.return_value = 'COMPLETED', ''

        logging.basicConfig(level='INFO')
        volume_name = 'test_volume_for_ontap_services'
        [response] = self.ontap.delete_snapshot(volume_name, 'test_snapshot')

        self.assertEqual(response['code'], 201)

    @patch('logging.warning')
    @patch('web_service.ontap.ontap_service.Volume.delete_snapshot')
    def test_delete_snapshot_fail(self, mock_delete_snapshot, mock_logger):
        ''' Log ONTAP delete error if snapshot is active '''
        mock_delete_snapshot.return_value = 'FAILED', 'snapshot has not expired or is locked'

        logging.basicConfig(level='WARNING')
        volume_name = 'test_volume_for_ontap_services'
        [response] = self.ontap.delete_snapshot(volume_name, 'test_snapshot')

        mock_logger.assert_called_with(
            'Failed to delete snapshot %s. Most likely clone is in use. error: %s',
            'test_snapshot', 'snapshot has not expired or is locked'
        )
        self.assertEqual(response['code'], 400)

    @patch('logging.error')
    @patch('web_service.ontap.ontap_service.Volume.delete_snapshot')
    def test_delete_snapshot_fail_other(self, mock_delete_snapshot, mock_logger):
        ''' Log ONTAP delete error for other reasons '''
        mock_delete_snapshot.return_value = 'FAILED', 'other error'

        logging.basicConfig(level='WARNING')
        volume_name = 'test_volume_for_ontap_services'
        [response] = self.ontap.delete_snapshot(volume_name, 'test_snapshot')

        mock_logger.assert_called_with(
            'Failed to delete snapshot %s, unexpected error: %s', 'test_snapshot', 'other error'
        )
        self.assertEqual(response['code'], 400)

    @patch('web_service.ontap.ontap_service.OntapService.delete_volume')
    @patch('web_service.ontap.ontap_service.Volume.get_snapdiff')
    @patch('web_service.ontap.ontap_service.OntapService.get_oldest_and_latest_snapshots')
    def test_get_snapdiff_and_delete(self, mock_get_oldest_latest_snapshots, mock_get_snapdiff, mock_delete_volume):
        mock_delete_volume.return_value = mocks.CREATE_VOL_RETURN_VAL
        mock_get_snapdiff.return_value = 0
        mock_get_oldest_latest_snapshots.return_value = ('weekly.5678', '1503002079'), ('weekly.1234', '1503002065')

        deleted, message = self.ontap.get_snapdiff_and_delete('test', 100)
        self.assertEqual(deleted, True)
        self.assertTrue("test has been inactive for" in message)

    @patch('web_service.ontap.ontap_service.Volume.get_snapdiff')
    @patch('web_service.ontap.ontap_service.OntapService.get_oldest_and_latest_snapshots')
    def test_get_snapdiff_and_delete_active(self, mock_get_oldest_latest_snapshots, mock_get_snapdiff):
        mock_get_snapdiff.return_value = 1
        mock_get_oldest_latest_snapshots.return_value = ('weekly.5678', '1503002079'), ('weekly.1234', '1503002065')

        deleted, message = self.ontap.get_snapdiff_and_delete('test', 2)
        self.assertEqual(deleted, False)
        self.assertTrue("test is active" in message)

    @patch('web_service.ontap.ontap_service.OntapService.get_oldest_and_latest_snapshots')
    def test_get_snapdiff_and_delete_new(self, mock_get_oldest_latest_snapshots):
        mock_get_oldest_latest_snapshots.return_value = None, None
        deleted, message = self.ontap.get_snapdiff_and_delete('test', 2)
        self.assertEqual(deleted, False)
        self.assertTrue("Workspace is less than" in message)

    @patch('web_service.ontap.ontap_service.OntapService.get_snapshot_list')
    def test_get_oldest_and_latest_snapshots(self, mock_get_snapshot_list):
        today = datetime.now()
        two_days_old_epoch = (today - timedelta(days=2)).strftime('%s')
        today_epoch = today.strftime('%s')
        two_days_snap = ('two_days_old', two_days_old_epoch)
        today_snap = ('today', today_epoch)
        mock_get_snapshot_list.return_value = [
            two_days_snap,
            today_snap,
        ], ""

        recent, old = self.ontap.get_oldest_and_latest_snapshots('test', 1)
        self.assertEqual(two_days_snap, old)
        self.assertEqual(today_snap, recent)

    @patch('web_service.ontap.ontap_service.OntapService.get_snapshot_list')
    def test_get_oldest_and_latest_snapshots_none(self, mock_get_snapshot_list):
        today = datetime.now()
        one_day_old_epoch = (today - timedelta(days=1)).strftime('%s')
        today_epoch = today.strftime('%s')
        one_day_snap = ('one_day_old', one_day_old_epoch)
        today_snap = ('today', today_epoch)
        mock_get_snapshot_list.return_value = [
            one_day_snap,
            today_snap,
        ], ""

        recent, old = self.ontap.get_oldest_and_latest_snapshots('test', 1)
        self.assertEqual(None, old)
        self.assertEqual(today_snap, recent)

    @patch('web_service.ontap.ontap_service.OntapService.get_snapshot_list')
    def test_get_oldest_and_latest_snapshots_empty(self, mock_get_snapshot_list):
        today = datetime.now()
        one_day_old_epoch = (today - timedelta(days=1)).strftime('%s')
        today_epoch = today.strftime('%s')
        one_day_snap = ('one_day_old', one_day_old_epoch)
        today_snap = ('today', today_epoch)
        mock_get_snapshot_list.return_value = None, "some error message"

        recent, old = self.ontap.get_oldest_and_latest_snapshots('test', 1)
        self.assertIsNone(old)
        self.assertIsNone(recent)
Beispiel #21
0
def project_create():
    """
    create project
    ---
    tags:
      - project
    parameters:
      - in: path
        name: scm-url
        required: true
        description: git url for this project
        type: string
      - in: path
        name: scm-branch
        required: true
        description: git branch for this project
        type: string
      - in: path
        name: export-policy
        required: false
        description: export-policy for this project
        type: string
    responses:
      200:
        description: project was created successfully

    """
    # Retrieve customer configuration document from database

    try:
        database = helpers.connect_db()
        config_document = helpers.get_db_config()
    except Exception as e:
        raise GenericException(
            500,
            "Customer configuration document not found, please contact your administrator",
            "Database Exception")
    if not config_document:
        raise GenericException(
            500,
            "Customer configuration document not found, please contact your administrator",
            "Database Exception")
    expected_keys = ['scm-branch', 'scm-url']
    if not helpers.request_validator(request.form, expected_keys):
        raise GenericException(400, "SCM URL and SCM Branch are required")

    scm_project_url = helpers.sanitize_scm_url(request.form['scm-url'])

    if scm_project_url is None:
        raise GenericException(406, "Invalid SCM URL provided")

    project_name = helpers.extract_name_from_git_url(request.form['scm-url'])
    project_name += "-" + request.form['scm-branch']
    # Kubernetes does not like _
    project_name = helpers.replace_kube_invalid_characters(project_name)
    # ONTAP does not like -
    project_name_no_dashes = helpers.replace_ontap_invalid_char(project_name)

    ontap_instance = OntapService(config_document['ontap_api'],
                                  config_document['ontap_apiuser'],
                                  config_document['ontap_apipass'],
                                  config_document['ontap_svm_name'],
                                  config_document['ontap_aggr_name'],
                                  config_document['ontap_data_ip'])
    ontap_data_ip = ontap_instance.data_ip
    vol_uid = "0"
    vol_gid = "0"
    vol_size = "10000"
    if 'export_policy' in request.form:
        vol_export_policy = request.form['export-policy']
    else:
        vol_export_policy = 'default'
    try:
        status, vol_size = ontap_instance.create_volume(
            project_name_no_dashes, vol_size, vol_uid, vol_gid,
            vol_export_policy)
    except Exception as e:
        error_message = "Unable to create backing ontap volume for pipeline"
        logging.error(
            "Unable to create backing ontap volume for pipeline:\n %s" %
            traceback.format_exc())
        raise GenericException(500, error_message)

    if not helpers.verify_successful_response(status):
        error_message = "Unable to create backing ontap volume for pipeline: "
        try:
            error = status[0]['error_message'].split('(', 1)[0]
        except KeyError:
            error = ''
        error_message = error_message + error
        raise GenericException(500, error_message)

    # if volume creation successful, autosupport log
    # display a warning if this step fails , we don't want to exit out
    try:
        pass
        # helpers.autosupport(project_name_no_dashes, vol_size)
    except Exception as e:
        logging.warning("WARNING: Unable to generate autosupport log (%s)  " %
                        str(e))

    kube_namespace = 'default'
    pv_and_pvc_responses = KubernetesAPI().create_pv_and_pvc(
        project_name, vol_size, kube_namespace, ontap_data_ip)

    for response in pv_and_pvc_responses:
        status.append(response)

    if not helpers.verify_successful_response(status):
        raise GenericException(500, "Kubernetes PV/PVC Error")

    try:
        jenkins = JenkinsAPI(config_document['jenkins_url'],
                             config_document['jenkins_user'],
                             config_document['jenkins_pass'])
    except Exception as exc:
        raise GenericException(500, "Jenkins connection error: %s" % str(exc))
    params = dict()
    params['type'] = 'ci-pipeline'
    params['volume_name'] = project_name_no_dashes
    params['git_volume'] = config_document['git_volume']
    params['service_username'] = config_document['service_username']
    params['service_password'] = config_document['service_password']
    params['broker_url'] = config_document['web_service_url']
    params['container_registry'] = config_document['container_registry']

    try:
        jenkins.create_job(project_name, params, request.form)
    except Exception as exc:
        raise GenericException(500,
                               "Jenkins Job Creation Error: %s" % str(exc))

    jenkins_url = config_document['jenkins_url'] + "job/" + project_name
    # Record new project in database
    try:
        new_project_document = Project(name=project_name,
                                       volume=project_name_no_dashes,
                                       export_policy=vol_export_policy,
                                       scm_url=scm_project_url,
                                       jenkins_url=jenkins_url)
        new_project_document.store(database)
    except Exception as exc:
        raise GenericException(
            500, "Error recording new project in the DB, \
                               please contact your administrator",
            "Database Exception" + str(exc))
    # create trigger-purge jenkins job if not already done
    jenkins_account = dict()
    jenkins_account['url'] = config_document['jenkins_url']
    jenkins_account['username'] = config_document['jenkins_user']
    jenkins_account['password'] = config_document['jenkins_pass']

    try:
        helpers.create_purge_jenkins_job(job='purge_policy_enforcer',
                                         account=jenkins_account)
    except RuntimeError as exc:
        raise GenericException(
            500, "Jenkins Job Creation Error: 'purge_policy_enforcer' ")
    # need not return project_volume once we start storing volume info in DB
    return jsonify({
        'project_name': project_name,
        'project_volume': project_name_no_dashes
    }), 200