Beispiel #1
0
def check_son_validity(project_path: str):
    """
    Checks if the given project path is a valid son project, otherwise it raises an exception. Valid means, it has
    a consistent son file structure, so no semantics will be tested.

    :param project_path: the path of the cloned project
    """
    missing_files = []

    files = [f for f in os.listdir(project_path)]
    logger.warn('Files in {}: '.format(project_path))
    for f in files:
        logger.warn('{}'.format(f))

    for file in REQUIRED_SON_PROJECT_FILES:
        if not os.path.isfile(os.path.join(project_path, file)):
            missing_files.append(file)

    missing_files_count = len(missing_files)
    # If project seems to be valid.
    if missing_files_count is 0:
        return
    elif missing_files_count is 1:
        result = "The project has no '{}' file".format(file)
    else:
        result = "The project has the following missing files: '{}'".format(",".join(missing_files_count))

    # Delete project, if there are missing files.
    shutil.rmtree(project_path)

    raise InvalidArgument(result)
def pull(ws_id: int, project_id: int):
    """
    Pulls data from the given project_id.
    :param user_data: Session data to get access token for GitHub
    :param ws_id: Workspace of the project
    :param project_id: Project to pull.
    :return:
    """
    project = get_project(ws_id, project_id)

    project_full_path = os.path.join(project.workspace.path, PROJECT_REL_PATH,
                                     project.rel_path)

    # Error handling
    if not os.path.isdir(project_full_path):
        raise Exception(
            "Could not find project directory {}".format(project_full_path))

    if not project.repo_url:
        raise InvalidArgument(
            "Project with id {} is missing the repo attribute".format(
                project_id))

    # Pull in project directory
    # If url in GitHub domain, access by token
    out, err, exitcode = git_command(['pull', project.repo_url],
                                     cwd=project_full_path)

    if exitcode is not 0:
        return create_info_dict(err=err, exitcode=exitcode)
    return create_info_dict(out=out)
Beispiel #3
0
def save_image_file(ws_id, project_id, function_id, file):
    """
    Saves the vnf image file into the vnfs folder
    
    :param ws_id: The workspace ID 
    :param project_id: The project ID 
    :param function_id: The function ID
    :param file: The image file
    :return: A success message
    """
    if file.filename == '':
        raise InvalidArgument("No file attached!")
    if file:
        filename = secure_filename(file.filename)
        session = db_session()
        function = session.query(Function). \
            join(Project). \
            join(Workspace). \
            filter(Workspace.id == ws_id). \
            filter(Project.id == project_id). \
            filter(Function.id == function_id).first()
        if function is not None:
            file_path = get_file_path("vnf", function)
            file_path = file_path.replace(get_file_name(function), filename)
            file.save(file_path)
            return "File {} successfully uploaded!".format(filename)
        else:
            raise NotFound("Function with id " + function_id +
                           " does not exist")
Beispiel #4
0
def delete(ws_id: int, project_id: int, remote_repo_name: str, organization_name: str = None):
    """
    Deletes given project on remote repository

    :param project_id:
    :param ws_id: Workspace of the project
    :param remote_repo_name: Remote repository name
    :param organization_name: Optional parameter to specify the organization / login
    :return: a dictionary containing the result of the operation
    """
    if organization_name is None:
        owner = session['user_data']['login']
    else:
        owner = organization_name
    sql_session = db_session()
    project = get_project(ws_id, project_id, sql_session)
    url_decode = parse.urlparse(project.repo_url)
    if _repo_name_from_url(url_decode) == remote_repo_name:
        result = _do_delete(owner, remote_repo_name)
        if result.status_code == 204:
            project.repo_url = None
            sql_session.commit()
            return create_info_dict("Successfully deleted")
        else:
            sql_session.rollback()
            return create_info_dict(result.text, exitcode=1)
    raise InvalidArgument("The given repo name does not correspond to the remote repository name")
def create_service(ws_id: int, project_id: int, service_data: dict) -> dict:
    """
    Creates a service in the given project
    :param ws_id: The Workspace of the project
    :param project_id: The Project of the Service
    :param service_data: the service descriptor
    :return: The created service descriptor
    """
    session = db_session()
    project = session.query(Project).filter_by(id=project_id).first()

    if project:
        # Retrieve post parameters
        try:
            service_name = shlex.quote(service_data['descriptor']["name"])
            vendor_name = shlex.quote(service_data['descriptor']["vendor"])
            version = shlex.quote(service_data['descriptor']["version"])
        except KeyError as ke:
            raise InvalidArgument("Missing key {} in service data".format(
                str(ke)))

        existing_services = list(
            session.query(Service).join(Project).join(Workspace).filter(
                Workspace.id == ws_id).filter(
                    Service.project == project).filter(
                        Service.name == service_name).filter(
                            Service.vendor == vendor_name).filter(
                                Service.version == version))
        if len(existing_services) > 0:
            raise NameConflict(
                "A service with this name/vendor/version already exists")

        # validate service descriptor
        workspace = session.query(Workspace).filter(
            Workspace.id == ws_id).first()
        validate_service_descriptor(workspace.ns_schema_index,
                                    service_data["descriptor"])

        # Create db object
        service = Service(name=service_name,
                          vendor=vendor_name,
                          version=version,
                          project=project,
                          descriptor=json.dumps(service_data["descriptor"]),
                          meta=json.dumps(service_data["meta"]))
        session.add(service)
        try:
            write_ns_vnf_to_disk("nsd", service)
        except:
            logger.exception("Could not create service:")
            session.rollback()
            raise
        session.commit()
        return service.as_dict()

    else:
        session.rollback()
        raise NotFound("Project with id '{}‘ not found".format(project_id))
def update_function(ws_id: int, prj_id: int, func_id: int, func_data: dict) -> dict:
    """
    Update the function descriptor
    :param ws_id:
    :param prj_id:
    :param func_id:
    :param func_data:
    :return: The updated function descriptor
    """
    session = db_session()

    ws = session.query(Workspace).filter(Workspace.id == ws_id).first()
    validate_vnf(ws.vnf_schema_index, func_data)

    # test if ws Name exists in database
    function = session.query(Function). \
        join(Project). \
        join(Workspace). \
        filter(Workspace.id == ws_id). \
        filter(Project.id == prj_id). \
        filter(Function.id == func_id).first()
    if function is None:
        session.rollback()
        raise NotFound("Function with id {} does not exist".format(func_id))
    function.descriptor = json.dumps(func_data)

    old_file_name = get_file_path("vnf", function)
    old_folder_path = old_file_name.replace(get_file_name(function), "")
    try:
        function.name = shlex.quote(func_data["name"])
        function.vendor = shlex.quote(func_data["vendor"])
        function.version = shlex.quote(func_data["version"])
    except KeyError as ke:
        session.rollback()
        raise InvalidArgument("Missing key {} in function data".format(str(ke)))

    try:
        new_file_name = get_file_path("vnf", function)
        new_folder_path = new_file_name.replace(get_file_name(function), "")
        if old_folder_path != new_folder_path:
            # move old files to new location
            os.makedirs(new_folder_path)
            for file in os.listdir(old_folder_path):
                if not old_file_name == os.path.join(old_folder_path, file):  # don't move descriptor yet
                    shutil.move(os.path.join(old_folder_path, file), os.path.join(new_folder_path, file))
        if not new_file_name == old_file_name:
            shutil.move(old_file_name, new_file_name)
        if old_folder_path != new_folder_path:
            # cleanup old folder
            shutil.rmtree(old_folder_path)
        write_ns_vnf_to_disk("vnf", function)
    except:
        session.rollback()
        logger.exception("Could not update descriptor file:")
        raise
    session.commit()
    return function.as_dict()
def update_service(ws_id, project_id, service_id, service_data):
    """
    Update the service using the service data from the request
    :param ws_id:
    :param project_id:
    :param service_id:
    :param service_data:
    :return:
    """
    session = db_session()
    service = session.query(Service). \
        join(Project). \
        join(Workspace). \
        filter(Workspace.id == ws_id). \
        filter(Project.id == project_id). \
        filter(Service.id == service_id).first()
    if service:
        old_file_name = get_file_path("nsd", service)
        # Parse parameters and update record
        if 'descriptor' in service_data:
            # validate service descriptor
            workspace = session.query(Workspace).filter(
                Workspace.id == ws_id).first()
            validate_service_descriptor(workspace.ns_schema_index,
                                        service_data["descriptor"])
            service.descriptor = json.dumps(service_data["descriptor"])
            try:
                service.name = shlex.quote(service_data["descriptor"]["name"])
                service.vendor = shlex.quote(
                    service_data["descriptor"]["vendor"])
                service.version = shlex.quote(
                    service_data["descriptor"]["version"])
            except KeyError as ke:
                raise InvalidArgument("Missing key {} in function data".format(
                    str(ke)))

        if 'meta' in service_data:
            service.meta = json.dumps(service_data["meta"])

        new_file_name = get_file_path("nsd", service)
        try:
            if not old_file_name == new_file_name:
                shutil.move(old_file_name, new_file_name)
            write_ns_vnf_to_disk("nsd", service)
        except:
            logger.exception("Could not update descriptor file:")
            raise
        session.commit()
        return service.as_dict()
    else:
        raise NotFound(
            "Could not update service '{}', because no record was found".
            format(service_id))
def validate_service_descriptor(schema_index: int, descriptor: dict) -> None:
    """
    Validates the given descriptor with the schema loaded from the configuration

    :param schema_index: the workspace
    :param descriptor: the service descriptor
    :raises: InvalidArgument: if the validation fails
    """
    schema = get_schema(schema_index, SCHEMA_ID_NS)
    try:
        jsonschema.validate(descriptor, schema)
    except ValidationError as ve:
        raise InvalidArgument("Validation failed: <br/> Path: {} <br/> Error: {}".format(list(ve.path), ve.message))
Beispiel #9
0
def create_function(ws_id: int, project_id: int, function_data: dict) -> dict:
    """
    Creates a new vnf in the project

    :param ws_id: The workspace ID
    :param project_id: The Project ID
    :param function_data: The function data to create
    :return: The created function as a dict
    """
    try:
        function_name = shlex.quote(function_data['descriptor']["name"])
        vendor_name = shlex.quote(function_data['descriptor']["vendor"])
        version = shlex.quote(function_data['descriptor']["version"])
    except KeyError as ke:
        raise InvalidArgument("Missing key {} in function data".format(
            str(ke)))

    session = db_session()

    ws = session.query(Workspace).filter(
        Workspace.id == ws_id).first()  # type: Workspace
    validate_vnf(ws.schema_index, function_data['descriptor'])

    # test if function Name exists in database
    existing_functions = list(
        session.query(Function).join(Project).join(Workspace).filter(
            Workspace.id == ws_id).filter(
                Function.project_id == project_id).filter(
                    Function.vendor == vendor_name).filter(
                        Function.name == function_name).filter(
                            Function.version == version))
    if len(existing_functions) > 0:
        raise NameConflict("Function with name " + function_name +
                           " already exists")
    project = session.query(Project).filter(Project.id == project_id).first()
    if project is None:
        raise NotFound("No project with id " + project_id + " was found")
    function = Function(name=function_name,
                        project=project,
                        vendor=vendor_name,
                        version=version,
                        descriptor=json.dumps(function_data['descriptor']))
    session.add(function)
    try:
        write_ns_vnf_to_disk("vnf", function)
    except:
        logger.exception("Could not write data to disk:")
        session.rollback()
        raise
    session.commit()
    return function.as_dict()
Beispiel #10
0
def validate_vnf(schema_index: int, descriptor: dict) -> None:
    """
    Validates the VNF against the VNF schema
    
    :param schema_index: The index of the schema repository
    :param descriptor: The descriptor to validate
    :return: Nothing if descriptor id valid
    :raises InvalidArgument: if the schema is not Valid
    """
    schema = get_schema(schema_index, SCHEMA_ID_VNF)
    try:
        jsonschema.validate(descriptor, schema)
    except ValidationError as ve:
        raise InvalidArgument(
            "Validation failed: <br/> Path: {} <br/> Error: {}".format(
                list(ve.path), ve.message))
Beispiel #11
0
def create_service_on_platform(ws_id, platform_id, service_data):
    """
    Deploys the service on the referenced Platform

    :param ws_id: The workspace ID
    :param platform_id: The platform ID
    :param service_data: The service descriptor data
    :return: A  message if the function was deployed successfully
    """
    # TODO test this!
    service_id = int(service_data['id'])
    session = db_session()
    try:
        workspace = session.query(Workspace).filter(
            Workspace.id == ws_id).first()
        project = session.query(Project). \
            join(Service). \
            filter(Project.services.any(Service.id == service_id)). \
            filter(Project.workspace == workspace). \
            first()  # type: Project
        if not len(project.services) == 1:
            raise InvalidArgument(
                "Project must have exactly one service "
                "to push to platform. Number of services: {}".format(
                    len(project.services)))

        platform = session.query(Platform).filter(Platform.id == platform_id). \
            filter(Platform.workspace == workspace).first()
        package_path = pack_project(project)
        service_uuid = push_to_platform(package_path, platform.workspace)
        logger.info("Pushed to platform: " + str(service_uuid))
        # deploy to private catalogue
        service = project.services[0].as_dict()
        publish_private_nsfs(ws_id, service["descriptor"], is_vnf=False)
        publish_referenced_functions(ws_id, project.id, service["descriptor"])
        return {
            'message': 'Deployed successfully: {}'.format(str(service_uuid))
        }
    finally:
        session.commit()
def publish_private_nsfs(ws_id: int, descriptor: dict, is_vnf: bool):
    """
    Publishes a function or service to the private catalogue repository
    :param ws_id:
    :param descriptor:
    :param is_vnf:
    :return:
    """
    try:
        name = descriptor['name']
        vendor = descriptor['vendor']
        version = descriptor['version']
    except KeyError as ke:
        raise InvalidArgument("Missing key {} in descriptor data".format(
            str(ke)))

    try:
        session = db_session
        # create or update descriptor in database
        model = query_private_nsfs(ws_id, vendor, name, version,
                                   is_vnf)  # type: PrivateDescriptor
        if model is None:
            if is_vnf:
                model = PrivateFunction()
            else:
                model = PrivateService()

            model.__init__(ws_id, vendor, name, version)
            session.add(model)
        model.descriptor = json.dumps(descriptor)
        workspace = session.query(Workspace).filter(
            Workspace.id == ws_id).first()
        if workspace is not None:
            write_private_descriptor(workspace.path, is_vnf, descriptor)
            session.commit()
            return
    except:
        session.rollback()
        raise
Beispiel #13
0
def pull(ws_id: int, project_id: int):
    """
    Pulls data from the given project_id.
    :param ws_id: Workspace of the project
    :param project_id: Project to pull.
    :return: a dictionary containing the result of the operation
    """
    dbsession = db_session()
    project = get_project(ws_id, project_id, session=dbsession)

    project_full_path = os.path.join(project.workspace.path, PROJECT_REL_PATH, project.rel_path)

    # Error handling
    if not os.path.isdir(project_full_path):
        raise Exception("Could not find project directory {}".format(project_full_path))

    if not project.repo_url:
        raise InvalidArgument("Project with id {} is missing the repo attribute".format(project_id))

    # Pull in project directory
    # If url in GitHub domain, access by token
    out, err, exitcode = git_command(['pull', project.repo_url], cwd=project_full_path)

    # Return error if pull failed.
    if exitcode is not 0:
        return create_info_dict(err=err, exitcode=exitcode)

    # Rescan project
    try:
        sync_project_descriptor(project)
        dbsession.add(project)
        scan_project_dir(project_full_path, project)
        dbsession.commit()
    except:
        dbsession.rollback()
        raise Exception("Could not scan the project after pull.")

    return create_info_dict(out=out)
Beispiel #14
0
def update_function(ws_id: int, prj_id: int, func_id: int,
                    func_data: dict) -> dict:
    """
    Update the function descriptor

    :param ws_id: The Workspace ID
    :param prj_id: The Project ID
    :param func_id: The function ID
    :param func_data: The funtion Data for updating
    :return: The updated function descriptor
    """
    session = db_session()

    ws = session.query(Workspace).filter(Workspace.id == ws_id).first()
    validate_vnf(ws.schema_index, func_data['descriptor'])
    edit_mode = func_data['edit_mode']

    # test if function exists in database
    function = session.query(Function). \
        join(Project). \
        join(Workspace). \
        filter(Workspace.id == ws_id). \
        filter(Project.id == prj_id). \
        filter(Function.id == func_id).first()
    if function is None:
        session.rollback()
        raise NotFound("Function with id {} does not exist".format(func_id))

    old_file_name = get_file_path("vnf", function)
    old_folder_path = old_file_name.replace(get_file_name(function), "")
    old_uid = get_uid(function.vendor, function.name, function.version)
    try:
        new_name = shlex.quote(func_data['descriptor']["name"])
        new_vendor = shlex.quote(func_data['descriptor']["vendor"])
        new_version = shlex.quote(func_data['descriptor']["version"])
    except KeyError as ke:
        session.rollback()
        raise InvalidArgument("Missing key {} in function data".format(
            str(ke)))

    # check if new name already exists
    function_dup = session.query(Function). \
        join(Project). \
        join(Workspace). \
        filter(Workspace.id == ws_id). \
        filter(Project.id == prj_id). \
        filter(Function.name == new_name). \
        filter(Function.vendor == new_vendor). \
        filter(Function.version == new_version). \
        filter(Function.id != func_id).first()
    if function_dup:
        session.rollback()
        raise NameConflict(
            "A function with that name, vendor and version already exists")

    new_uid = get_uid(new_vendor, new_name, new_version)
    refs = get_references(function, session)
    if old_uid != new_uid:
        if refs:
            if edit_mode == "create_new":
                function = Function(project=function.project)
                session.add(function)
            else:
                replace_function_refs(refs, function.vendor, function.name,
                                      function.version, new_vendor, new_name,
                                      new_version)
        function.vendor = new_vendor
        function.name = new_name
        function.version = new_version
        function.uid = new_uid
    function.descriptor = json.dumps(func_data['descriptor'])

    try:
        if old_uid != new_uid:
            new_file_name = get_file_path("vnf", function)
            new_folder_path = new_file_name.replace(get_file_name(function),
                                                    "")

            if old_folder_path != new_folder_path:
                # move old files to new location
                os.makedirs(new_folder_path)
                for file in os.listdir(old_folder_path):
                    if not file.endswith(".yml"):  # don't move descriptor yet
                        if refs and edit_mode == "create_new":
                            if os.path.isdir(
                                    os.path.join(old_folder_path, file)):
                                shutil.copytree(
                                    os.path.join(old_folder_path, file),
                                    os.path.join(new_folder_path, file))
                            else:
                                shutil.copy(
                                    os.path.join(old_folder_path, file),
                                    os.path.join(new_folder_path, file))
                        else:
                            shutil.move(os.path.join(old_folder_path, file),
                                        os.path.join(new_folder_path, file))
                if refs and edit_mode == "create_new":
                    shutil.copy(old_file_name, new_file_name)
                else:
                    shutil.move(old_file_name, new_file_name)
            if old_folder_path != new_folder_path and not (refs and edit_mode
                                                           == "create_new"):
                # cleanup old folder if no other descriptor exists
                if not os.listdir(old_folder_path):
                    shutil.rmtree(old_folder_path)
        write_ns_vnf_to_disk("vnf", function)
        if refs and old_uid != new_uid and edit_mode == 'replace_refs':
            for service in refs:
                write_ns_vnf_to_disk("ns", service)
    except:
        session.rollback()
        logger.exception("Could not update descriptor file:")
        raise
    session.commit()
    return function.as_dict()
Beispiel #15
0
 def post(ws_id, parent_id, vnf_id):
     if 'image' not in request.files:
         raise InvalidArgument("No file attached!")
     file = request.files['image']
     return prepare_response(
         functionsimpl.save_image_file(ws_id, parent_id, vnf_id, file))
def validate_vnf(schema_index: int, descriptor: dict) -> None:
    schema = get_schema(schema_index, SCHEMA_ID_VNF)
    try:
        jsonschema.validate(descriptor, schema)
    except ValidationError as ve:
        raise InvalidArgument("Validation failed: <br/> Path: {} <br/> Error: {}".format(list(ve.path), ve.message))
Beispiel #17
0
def update_workspace(workspace_data, wsid):
    """
    Updates the workspace with the given workspace data

    :param workspace_data: The new workspace configuration
    :param wsid: the workspace ID
    :return: The updated workspace
    """
    session = db_session()
    workspace = session.query(Workspace).filter(
        Workspace.id == int(wsid)).first()  # type: Workspace
    if workspace is None:
        raise NotFound("Workspace with id {} could not be found".format(wsid))

    # Update name
    if 'name' in workspace_data:
        if path.exists(workspace.path):
            new_name = workspace_data['name']
            old_path = workspace.path
            # only update if name has changed
            if new_name != workspace.name:
                new_path = rreplace(workspace.path, workspace.name, new_name,
                                    1)

                if path.exists(new_path):
                    raise NameConflict(
                        "Invalid name parameter, workspace '{}' already exists"
                        .format(new_name))

                # Do not allow move directories outside of the workspaces_dir
                if not new_path.startswith(WORKSPACES_DIR):
                    raise Exception(
                        "Invalid path parameter, you are not allowed to break out of {}"
                        .format(WORKSPACES_DIR))
                else:
                    # Move the directory
                    shutil.move(old_path, new_path)
                    workspace.name = new_name
                    workspace.path = new_path
    for platform in workspace.platforms:
        deleted = True
        if 'platforms' in workspace_data:
            for updated_platform in workspace_data['platforms']:
                if 'id' in updated_platform and platform.id == updated_platform[
                        'id']:
                    deleted = False
                    break
        if deleted:
            session.delete(platform)
    if 'platforms' in workspace_data:
        for updated_platform in workspace_data['platforms']:
            platform = None
            if 'id' in updated_platform:
                platform = session.query(Platform). \
                    filter(Platform.id == updated_platform['id']). \
                    filter(Platform.workspace == workspace). \
                    first()
            if platform:
                # update existing
                test_url(updated_platform['name'],
                         updated_platform['url'] + "/api/v2/packages")
                platform.name = updated_platform['name']
                platform.url = updated_platform['url']
                if 'token' in updated_platform:
                    platform.token_path = create_token_file(
                        updated_platform['token'])
            else:
                # create new
                test_url(updated_platform['name'], updated_platform['url'] +
                         "/api/v2/packages")  # TODO test this!
                new_platform = Platform(updated_platform['name'],
                                        updated_platform['url'], True,
                                        workspace)
                session.add(new_platform)
    for catalogue in workspace.catalogues:
        deleted = True
        if 'catalogues' in workspace_data:
            for updated_catalogue in workspace_data['catalogues']:
                if 'id' in updated_catalogue and catalogue.id == updated_catalogue[
                        'id']:
                    deleted = False
                    break
        if deleted:
            # check if catalogue is still referenced
            for project in workspace.projects:
                if catalogue.name in project.publish_to:
                    raise InvalidArgument(
                        "Cannot delete catalogue '{}' because it is still used in project '{}'!"
                        .format(catalogue.name, project.name))
            session.delete(catalogue)
    if 'catalogues' in workspace_data:
        for updated_catalogue in workspace_data['catalogues']:
            catalogue = None
            if 'id' in updated_catalogue:
                catalogue = session.query(Catalogue). \
                    filter(Catalogue.id == updated_catalogue['id']). \
                    filter(Catalogue.workspace == workspace). \
                    first()
            if catalogue:
                # update existing
                test_url(updated_catalogue['name'], updated_catalogue['url'])
                catalogue.name = updated_catalogue['name']
                catalogue.url = updated_catalogue['url']
            else:
                # create new
                test_url(updated_catalogue['name'], updated_catalogue['url'])
                new_catalogue = Catalogue(updated_catalogue['name'],
                                          updated_catalogue['url'], True,
                                          workspace)
                session.add(new_catalogue)
    update_workspace_descriptor(workspace)
    db_session.commit()
    return workspace.as_dict()
def update_service(ws_id, project_id, service_id, service_data):
    """
    Update the service using the service data from the request
    
    Will also check for references by other services and create a copy if so

    :param ws_id: The Workspace ID
    :param project_id: The project ID
    :param service_id: The service ID
    :param service_data: The service data containing the "descriptor" and optionally some "meta" data
    :return: The updated service data
    """
    session = db_session()
    project = session.query(Project). \
        filter(Project.id == project_id).first()
    service = session.query(Service). \
        join(Project). \
        join(Workspace). \
        filter(Workspace.id == ws_id). \
        filter(Service.project == project). \
        filter(Service.id == service_id).first()
    if service:
        refs = get_references(service, session)
        old_file_name = get_file_path("nsd", service)
        old_uid = get_uid(service.vendor, service.name, service.version)
        # Parse parameters and update record
        if 'descriptor' in service_data:
            # validate service descriptor
            workspace = session.query(Workspace).filter(Workspace.id == ws_id).first()
            validate_service_descriptor(workspace.schema_index, service_data["descriptor"])
            try:
                newName = shlex.quote(service_data["descriptor"]["name"])
                newVendor = shlex.quote(service_data["descriptor"]["vendor"])
                newVersion = shlex.quote(service_data["descriptor"]["version"])
            except KeyError as ke:
                raise InvalidArgument("Missing key {} in function data".format(str(ke)))
            new_uid = get_uid(newVendor, newName, newVersion)
            if old_uid != new_uid:
                if refs:
                    # keep old version and create new version in db
                    service = Service(newName, newVersion, newVendor, project=project)
                    session.add(service)
                else:
                    service.name = newName
                    service.vendor = newVendor
                    service.version = newVersion
            service.descriptor = json.dumps(service_data["descriptor"])

        if 'meta' in service_data:
            service.meta = json.dumps(service_data["meta"])

        if old_uid != new_uid:
            new_file_name = get_file_path("nsd", service)
            try:
                if not old_file_name == new_file_name:
                    if refs:
                        shutil.copy(old_file_name, new_file_name)
                    else:
                        shutil.move(old_file_name, new_file_name)
            except:
                logger.exception("Could not update descriptor file:")
                raise

        write_ns_vnf_to_disk("nsd", service)
        session.commit()
        return service.as_dict()
    else:
        raise NotFound("Could not update service '{}', because no record was found".format(service_id))