def create_workspace(login: str, workspace_data: dict) -> dict: """ Creates a workspace (on disk and in the database) from the given workspace data :param workspace_data: The workspace configuration data :return: The created workspace """ wsName = shlex.quote(workspace_data["name"]) session = db_session() # test if ws Name exists in database user = get_user(login) existingWorkspaces = list( session.query(Workspace).filter(Workspace.owner == user).filter( Workspace.name == wsName)) if len(existingWorkspaces) > 0: raise NameConflict("Workspace with name " + wsName + " already exists") wsPath = path.join(WORKSPACES_DIR, user.name, wsName) # prepare db insert try: ws = Workspace(name=wsName, path=wsPath, owner=user) session.add(ws) if 'platforms' in workspace_data: for platform in workspace_data['platforms']: session.add(Platform(platform['name'], platform['url'], ws)) test_url(platform['name'], platform['url'] + "/packages") if 'catalogues' in workspace_data: for catalogue in workspace_data['catalogues']: session.add(Catalogue(catalogue['name'], catalogue['url'], ws)) test_url(catalogue['name'], catalogue['url']) except: logger.exception() session.rollback() raise # create workspace on disk proc = Popen(['son-workspace', '--init', '--workspace', wsPath], stdout=PIPE, stderr=PIPE) out, err = proc.communicate() exitcode = proc.returncode if out.decode().find('existing') >= 0: workspace_exists = True else: workspace_exists = False if exitcode == 0 and not workspace_exists: synchronize_workspace_descriptor(ws, session) session.commit() return ws.as_dict() else: session.rollback() if workspace_exists: raise NameConflict(out.decode()) raise Exception(err, out)
def push_to_platform(package_path: str, platform: Platform) -> str: """ Pushes the package located at the package_path to the specified Platform :param package_path: the location of package to be pushed on disk :param platform: The platform to upload to :return: """ proc = Popen(['son-push', platform.url, '-U', package_path], stdout=PIPE, stderr=PIPE) out, err = proc.communicate() out = out.decode() err = err.decode() logger.info("Out:" + out) logger.info("Error:" + err) exitcode = proc.returncode # as of now exitcode is 0 even if there is an error if "ConnectionError" in out or err: raise ExtNotReachable("Could not connect to platform.") elif "error" in out.lower() or err.lower(): raise NameConflict("Out: " + out + "Error: " + err) elif "201" in out: message = out.split(":", 1)[1] # remove son-push message message = message.strip()[1:-1] # remove line break and outer quotes uuid = json.loads(message) return uuid else: return out
def update_catalogue(workspace_id, catalogue_id, catalogue_data): """ Updates a specific catalogue by its id. The catalogue applies the given name and url, that are in the json parameter. :param workspace_id: The Workspace ID :param catalogue_id: The Catalogue ID :return: The updated Catalogue descriptor """ catalogue_name = shlex.quote(catalogue_data['name']) catalogue_url = shlex.quote(catalogue_data['url']) session = db_session() workspace = session.query(Workspace).filter(Workspace.id == workspace_id).first() if workspace is None: raise NotFound("workspace with id {} could not be found".format(workspace_id)) catalogue = session.query(Catalogue). \ filter(Catalogue.workspace == workspace). \ filter(Catalogue.id == catalogue_id). \ first() if catalogue is None: raise NotFound("catalogue with id {} could not be found".format(catalogue_id)) if catalogue_name != catalogue.name: existing_catalogues = session.query(catalogue). \ filter(catalogue.workspace == workspace). \ filter(catalogue.name == catalogue_data['name']). \ all() if len(existing_catalogues) > 0: raise NameConflict("catalogue with name {} already exists".format(catalogue_data['name'])) catalogue.name = catalogue_name catalogue.url = catalogue_url session.commit() update_workspace_descriptor(catalogue.workspace) return catalogue.as_dict()
def create_catalogue(workspace_id: int, catalogue_data): """ Creates a catalgoue in the given workspace. A catalogue is defined by its name and url. These are given as json data :param workspace_id: Workspace ID of the target workspace, where the catalogue should get created. :return: Catalogue descriptor """ catalogue_name = shlex.quote(catalogue_data['name']) catalogue_url = shlex.quote(catalogue_data['url']) session = db_session() workspace = session.query(Workspace).filter(Workspace.id == workspace_id).first() if workspace is None: raise NotFound("workspace with id {} could not be found".format(workspace_id)) existing_catalogues = session.query(Catalogue). \ filter(Catalogue.workspace == workspace). \ filter(Catalogue.name == catalogue_data['name']). \ all() if len(existing_catalogues) > 0: raise NameConflict("catalogue with name {} already exists".format(catalogue_data['name'])) catalogue = Catalogue(name=catalogue_name, url=catalogue_url, workspace=workspace) session.add(catalogue) session.commit() update_workspace_descriptor(catalogue.workspace) return catalogue.as_dict()
def push_to_platform(package_path: str, ws: Workspace) -> str: """ Pushes the package located at the package_path to the specified Platform :param package_path: the location of package to be pushed on disk :param platform: The platform to upload to :return: The uuid returned by the platform """ # TODO use platform id instead of default platform for ws proc = Popen(['son-access', "push", "--workspace", ws.path, '--upload', package_path], stdout=PIPE, stderr=PIPE) out, err = proc.communicate() out = out.decode() err = err.decode() logger.info("Out:" + out) logger.info("Error:" + err) exitcode = proc.returncode # as of now exitcode is 0 even if there is an error if "ConnectionError" in out or "ConnectionError" in err: raise ExtNotReachable("Could not connect to platform.") elif "201" in out: #start_index = out.index('"service_uuid":') #end_index = out.index(',', start_index) #out = out[start_index:end_index + 1] #uuid = out # we need to find a reliable way to find the uuid (only needed for log printing) uuid = 'service_uuid: check in platform' return uuid elif "error" in out.lower() or "error" in err.lower(): raise NameConflict("Out: " + out + "Error: " + err) else: return out
def clone(ws_id: int, url: str, name: str = None): """ Clones a repository by url into given workspace :param name: Optional name of the local repository name, otherwise the remote name is taken :param user_data: Session data to get access token for GitHub :param ws_id: Destination workspace to clone :param url: URL of the source repository :return: True if successful, otherwise NameConflict is thrown """ workspace = get_workspace(ws_id) url_decode = parse.urlparse(url) if is_github(url_decode.netloc): # Take the suffix of url as first name candidate github_project_name = name if github_project_name is None: github_project_name = _repo_name_from_url(url_decode) dbsession = db_session() pj = dbsession.query(Project).join(Workspace)\ .filter(Workspace.id == workspace.id).filter( Project.name == github_project_name).first() dbsession.commit() # Error when the project name in given workspace already exists if pj is not None: raise NameConflict('A project with name {} already exists'.format(github_project_name)) project_target_path = os.path.join(workspace.path, PROJECT_REL_PATH, github_project_name) logger.info('Cloning from github repo...') # If url in GitHub domain, access by token url_with_token = _get_repo_url(url_decode) out, err, exitcode = git_command(['clone', url_with_token, project_target_path]) if exitcode is 0: setup_git_user_email(project_target_path) # Check if the project is a valid son project check_son_validity(project_target_path) # Create project and scan it. dbsession = db_session() try: pj = Project(github_project_name, github_project_name, workspace) pj.repo_url = url sync_project_descriptor(pj) dbsession.add(pj) scan_project_dir(project_target_path, pj) dbsession.commit() # Check if the project is valid result = create_info_dict(out=out) result["id"] = pj.id return result except: dbsession.rollback() shutil.rmtree(project_target_path) raise Exception("Scan project failed") else: return create_info_dict(err=err, exitcode=exitcode) raise NotImplemented("Cloning from other is not implemented yet. Only github is supported for now.")
def create_platform(workspace_id: int, platform_data) -> dict: """ Create a new platform entry :param workspace_id: :param platform_data: :return: """ platform_name = shlex.quote(platform_data['name']) platform_url = shlex.quote(platform_data['url']) session = db_session() workspace = session.query(Workspace).filter( Workspace.id == workspace_id).first() if workspace is None: raise NotFound( "workspace with id {} could not be found".format(workspace_id)) existing_platforms = session.query(Platform). \ filter(Platform.workspace == workspace). \ filter(Platform.name == platform_data['name']). \ all() if len(existing_platforms) > 0: raise NameConflict("Platform with name {} already exists".format( platform_data['name'])) platform = Platform(name=platform_name, url=platform_url, workspace=workspace) session.add(platform) update_workspace_descriptor(platform.workspace) session.commit() return platform.as_dict()
def update_project(project_data, project_id): """ Update the Project :param project_data: :param project_id: :return: """ session = db_session() project = session.query(Project).filter(Project.id == project_id).first() if project is None: raise NotFound("Project with id {} could not be found".format(project_id)) # Update name if 'name' in project_data and project_data['name'] != project.name: if os.path.exists(get_project_path(project.workspace.path, project.rel_path)): new_name = shlex.quote(project_data['name']) old_path = get_project_path(project.workspace.path, project.rel_path) new_path = rreplace(old_path, project.name, new_name, 1) if os.path.exists(new_path): raise NameConflict("Invalid name parameter, workspace '{}' already exists".format(new_name)) # Do not allow move directories outside of the workspaces_dir if not new_path.startswith(WORKSPACES_DIR): raise Exception("Invalid path parameter, you are not allowed to break out of {}".format(WORKSPACES_DIR)) else: # Move the directory shutil.move(old_path, new_path) project.name = new_name project.rel_path = new_name set_data(project, project_data) sync_project_descriptor(project) db_session.commit() return project.as_dict()
def create_service(ws_id: int, project_id: int, service_data: dict) -> dict: """ Creates a service in the given project :param ws_id: The Workspace of the project :param project_id: The Project of the Service :param service_data: the service descriptor :return: The created service descriptor """ session = db_session() project = session.query(Project).filter_by(id=project_id).first() if project: # Retrieve post parameters try: service_name = shlex.quote(service_data['descriptor']["name"]) vendor_name = shlex.quote(service_data['descriptor']["vendor"]) version = shlex.quote(service_data['descriptor']["version"]) except KeyError as ke: raise InvalidArgument("Missing key {} in service data".format( str(ke))) existing_services = list( session.query(Service).join(Project).join(Workspace).filter( Workspace.id == ws_id).filter( Service.project == project).filter( Service.name == service_name).filter( Service.vendor == vendor_name).filter( Service.version == version)) if len(existing_services) > 0: raise NameConflict( "A service with this name/vendor/version already exists") # validate service descriptor workspace = session.query(Workspace).filter( Workspace.id == ws_id).first() validate_service_descriptor(workspace.ns_schema_index, service_data["descriptor"]) # Create db object service = Service(name=service_name, vendor=vendor_name, version=version, project=project, descriptor=json.dumps(service_data["descriptor"]), meta=json.dumps(service_data["meta"])) session.add(service) try: write_ns_vnf_to_disk("nsd", service) except: logger.exception("Could not create service:") session.rollback() raise session.commit() return service.as_dict() else: session.rollback() raise NotFound("Project with id '{}‘ not found".format(project_id))
def create_commit_and_push(ws_id: int, project_id: int, remote_repo_name: str): """ Creates a remote GitHub repository named remote_repo_name and pushes given git project into it. :param ws_id: Workspace ID :param project_id: Project ID to create and push it :param remote_repo_name: Remote repository name :return: """ database_session = db_session() try: project = get_project(ws_id, project_id, database_session) # curl -H "Authorization: token [TOKEN]" -X POST https://api.github.com/user/repos --data '{"name":"repo_name"}' repo_data = {'name': remote_repo_name} request = requests.post(Github.API_URL + Github.API_CREATE_REPO_REL, json=repo_data, headers=create_oauth_header()) # Handle exceptions if request.status_code != 201: # Repository already exists if request.status_code == 422: raise NameConflict( "Repository with name {} already exist on GitHub".format( remote_repo_name)) raise Exception("Unhandled status_code: {}\n{}".format( request.status_code, request.text)) # Get git url and commit to db data = json.loads(request.text) git_url = data['svn_url'] project.repo_url = git_url database_session.commit() except Exception: database_session.rollback() raise # Try to push project try: # Give github some time to see created repo # (dirty hack) time.sleep(0.5) return commit_and_push(ws_id, project_id, "Initial commit") except Exception: # Delete newly created repository if commit and push failed. result = requests.delete(build_github_delete( session['user_data']['login'], remote_repo_name), headers=create_oauth_header()) # Reraise raise
def create_function(ws_id: int, project_id: int, function_data: dict) -> dict: """ Creates a new vnf in the project :param ws_id: The workspace ID :param project_id: The Project ID :param function_data: The function data to create :return: The created function as a dict """ try: function_name = shlex.quote(function_data['descriptor']["name"]) vendor_name = shlex.quote(function_data['descriptor']["vendor"]) version = shlex.quote(function_data['descriptor']["version"]) except KeyError as ke: raise InvalidArgument("Missing key {} in function data".format( str(ke))) session = db_session() ws = session.query(Workspace).filter( Workspace.id == ws_id).first() # type: Workspace validate_vnf(ws.schema_index, function_data['descriptor']) # test if function Name exists in database existing_functions = list( session.query(Function).join(Project).join(Workspace).filter( Workspace.id == ws_id).filter( Function.project_id == project_id).filter( Function.vendor == vendor_name).filter( Function.name == function_name).filter( Function.version == version)) if len(existing_functions) > 0: raise NameConflict("Function with name " + function_name + " already exists") project = session.query(Project).filter(Project.id == project_id).first() if project is None: raise NotFound("No project with id " + project_id + " was found") function = Function(name=function_name, project=project, vendor=vendor_name, version=version, descriptor=json.dumps(function_data['descriptor'])) session.add(function) try: write_ns_vnf_to_disk("vnf", function) except: logger.exception("Could not write data to disk:") session.rollback() raise session.commit() return function.as_dict()
def deploy_on_platform(service_uuid: dict, platform: Platform) -> str: """ Pushes the package located at the package_path to the specified Platform :param service_uuid: a dictionary with the service uuid on the platform :param platform: The platform to upload to :return: """ proc = Popen(['son-push', platform.url, '-D', str(service_uuid['service_uuid'])], stdout=PIPE, stderr=PIPE) out, err = proc.communicate() out = out.decode() err = err.decode() logger.info("Out:" + out) logger.info("Error:" + err) exitcode = proc.returncode # as of now exitcode is 0 even if there is an error if "ConnectionError" in out or err: raise ExtNotReachable("Could not connect to platform.") elif "error" in out.lower() or err.lower(): raise NameConflict("Out: " + out + "Error: " + err) else: return out
def update_function(ws_id: int, prj_id: int, func_id: int, func_data: dict) -> dict: """ Update the function descriptor :param ws_id: The Workspace ID :param prj_id: The Project ID :param func_id: The function ID :param func_data: The funtion Data for updating :return: The updated function descriptor """ session = db_session() ws = session.query(Workspace).filter(Workspace.id == ws_id).first() validate_vnf(ws.schema_index, func_data['descriptor']) edit_mode = func_data['edit_mode'] # test if function exists in database function = session.query(Function). \ join(Project). \ join(Workspace). \ filter(Workspace.id == ws_id). \ filter(Project.id == prj_id). \ filter(Function.id == func_id).first() if function is None: session.rollback() raise NotFound("Function with id {} does not exist".format(func_id)) old_file_name = get_file_path("vnf", function) old_folder_path = old_file_name.replace(get_file_name(function), "") old_uid = get_uid(function.vendor, function.name, function.version) try: new_name = shlex.quote(func_data['descriptor']["name"]) new_vendor = shlex.quote(func_data['descriptor']["vendor"]) new_version = shlex.quote(func_data['descriptor']["version"]) except KeyError as ke: session.rollback() raise InvalidArgument("Missing key {} in function data".format( str(ke))) # check if new name already exists function_dup = session.query(Function). \ join(Project). \ join(Workspace). \ filter(Workspace.id == ws_id). \ filter(Project.id == prj_id). \ filter(Function.name == new_name). \ filter(Function.vendor == new_vendor). \ filter(Function.version == new_version). \ filter(Function.id != func_id).first() if function_dup: session.rollback() raise NameConflict( "A function with that name, vendor and version already exists") new_uid = get_uid(new_vendor, new_name, new_version) refs = get_references(function, session) if old_uid != new_uid: if refs: if edit_mode == "create_new": function = Function(project=function.project) session.add(function) else: replace_function_refs(refs, function.vendor, function.name, function.version, new_vendor, new_name, new_version) function.vendor = new_vendor function.name = new_name function.version = new_version function.uid = new_uid function.descriptor = json.dumps(func_data['descriptor']) try: if old_uid != new_uid: new_file_name = get_file_path("vnf", function) new_folder_path = new_file_name.replace(get_file_name(function), "") if old_folder_path != new_folder_path: # move old files to new location os.makedirs(new_folder_path) for file in os.listdir(old_folder_path): if not file.endswith(".yml"): # don't move descriptor yet if refs and edit_mode == "create_new": if os.path.isdir( os.path.join(old_folder_path, file)): shutil.copytree( os.path.join(old_folder_path, file), os.path.join(new_folder_path, file)) else: shutil.copy( os.path.join(old_folder_path, file), os.path.join(new_folder_path, file)) else: shutil.move(os.path.join(old_folder_path, file), os.path.join(new_folder_path, file)) if refs and edit_mode == "create_new": shutil.copy(old_file_name, new_file_name) else: shutil.move(old_file_name, new_file_name) if old_folder_path != new_folder_path and not (refs and edit_mode == "create_new"): # cleanup old folder if no other descriptor exists if not os.listdir(old_folder_path): shutil.rmtree(old_folder_path) write_ns_vnf_to_disk("vnf", function) if refs and old_uid != new_uid and edit_mode == 'replace_refs': for service in refs: write_ns_vnf_to_disk("ns", service) except: session.rollback() logger.exception("Could not update descriptor file:") raise session.commit() return function.as_dict()
def create_project(ws_id: int, project_data: dict) -> dict: """ Create a new Project in this workspace :param ws_id: :param project_data: :return: The new project descriptor as a dict """ project_name = shlex.quote(project_data["name"]) repo = None if "repo" in project_data: repo = project_data["repo"] if repo: return gitimpl.clone(ws_id, repo, project_name) session = db_session() # test if ws Name exists in database workspace = session.query(Workspace). \ filter(Workspace.id == ws_id).first() if workspace is None: raise NotFound("No workspace with id {} was found".format(ws_id)) existing_projects = list(session.query(Project) .filter(Project.workspace == workspace) .filter(Project.name == project_name)) if len(existing_projects) > 0: raise NameConflict("Project with name '{}' already exists in this workspace".format(project_name)) # prepare db insert try: project = Project(name=project_name, rel_path=project_name, workspace=workspace) set_data(project, project_data) session.add(project) except: session.rollback() raise # create workspace on disk proc = Popen(['son-workspace', '--workspace', workspace.path, '--project', get_project_path(workspace.path, project_name)], stdout=PIPE, stderr=PIPE) out, err = proc.communicate() exitcode = proc.returncode if err.decode().find('exists') >= 0: project_exists = True else: project_exists = False if exitcode == 0 and not project_exists: sync_project_descriptor(project) session.commit() scan_project_dir(get_project_path(workspace.path, project_name), project) return project.as_dict() else: session.rollback() if project_exists: raise NameConflict("Project with name '{}' already exists in this workspace".format(project_name)) raise Exception(err.decode(), out.decode())
def update_workspace(workspace_data, wsid): """ Updates the workspace with the given workspace data :param workspace_data: The new workspace configuration :param wsid: the workspace ID :return: The updated workspace """ session = db_session() workspace = session.query(Workspace).filter( Workspace.id == int(wsid)).first() # type: Workspace if workspace is None: raise NotFound("Workspace with id {} could not be found".format(wsid)) # Update name if 'name' in workspace_data: if path.exists(workspace.path): new_name = workspace_data['name'] old_path = workspace.path # only update if name has changed if new_name != workspace.name: new_path = rreplace(workspace.path, workspace.name, new_name, 1) if path.exists(new_path): raise NameConflict( "Invalid name parameter, workspace '{}' already exists" .format(new_name)) # Do not allow move directories outside of the workspaces_dir if not new_path.startswith(WORKSPACES_DIR): raise Exception( "Invalid path parameter, you are not allowed to break out of {}" .format(WORKSPACES_DIR)) else: # Move the directory shutil.move(old_path, new_path) workspace.name = new_name workspace.path = new_path for platform in workspace.platforms: deleted = True if 'platforms' in workspace_data: for updated_platform in workspace_data['platforms']: if 'id' in updated_platform and platform.id == updated_platform[ 'id']: deleted = False break if deleted: session.delete(platform) if 'platforms' in workspace_data: for updated_platform in workspace_data['platforms']: platform = None if 'id' in updated_platform: platform = session.query(Platform). \ filter(Platform.id == updated_platform['id']). \ filter(Platform.workspace == workspace). \ first() if platform: # update existing test_url(updated_platform['name'], updated_platform['url'] + "/api/v2/packages") platform.name = updated_platform['name'] platform.url = updated_platform['url'] if 'token' in updated_platform: platform.token_path = create_token_file( updated_platform['token']) else: # create new test_url(updated_platform['name'], updated_platform['url'] + "/api/v2/packages") # TODO test this! new_platform = Platform(updated_platform['name'], updated_platform['url'], True, workspace) session.add(new_platform) for catalogue in workspace.catalogues: deleted = True if 'catalogues' in workspace_data: for updated_catalogue in workspace_data['catalogues']: if 'id' in updated_catalogue and catalogue.id == updated_catalogue[ 'id']: deleted = False break if deleted: # check if catalogue is still referenced for project in workspace.projects: if catalogue.name in project.publish_to: raise InvalidArgument( "Cannot delete catalogue '{}' because it is still used in project '{}'!" .format(catalogue.name, project.name)) session.delete(catalogue) if 'catalogues' in workspace_data: for updated_catalogue in workspace_data['catalogues']: catalogue = None if 'id' in updated_catalogue: catalogue = session.query(Catalogue). \ filter(Catalogue.id == updated_catalogue['id']). \ filter(Catalogue.workspace == workspace). \ first() if catalogue: # update existing test_url(updated_catalogue['name'], updated_catalogue['url']) catalogue.name = updated_catalogue['name'] catalogue.url = updated_catalogue['url'] else: # create new test_url(updated_catalogue['name'], updated_catalogue['url']) new_catalogue = Catalogue(updated_catalogue['name'], updated_catalogue['url'], True, workspace) session.add(new_catalogue) update_workspace_descriptor(workspace) db_session.commit() return workspace.as_dict()