def validate_machine_group(machine_group_details): keys_to_validate = ["group_name", "machine_id_list"] for key in keys_to_validate: if not machine_group_details.get(key): raise Exception("Mandatory Field: " + (str(key).replace('_', ' ')).upper() + " is missing") if "," in machine_group_details.get("group_name"): raise Exception("Field: " + (str("group_name").replace('_', ' ')).upper() + " cannot contain comma") if (machine_group_details.get("group_name") and machine_group_details.get("machine_id_list")) is None: raise Exception( "Mandatory fields to create a new group was not found.") HelperServices.validate_name(machine_group_details.get("group_name"), "machine group name") if len(machine_group_details.get("machine_id_list", [])) < 1: raise Exception("atleast one machine is required in group") for machine_id in machine_group_details["machine_id_list"]: if machineDB.GetMachine(machine_id) is None: raise Exception("Machine with _id : " + machine_id + " not exists") if machine_group_details.get("flexible_attributes"): FlexibleAttributesHelper.validate_entity_value( "MachineGroup", machine_group_details.get("flexible_attributes"))
def verify_tool_set_data(toolsetData): """Verify ToolSetData""" if toolsetData.get("name"): HelperServices.validate_name(toolsetData.get("name"),"toolset name") if toolsetData.get("tool_set") and len(toolsetData.get("tool_set")) > 1: tool_set = [] for tool_version in toolsetData.get("tool_set"): if not tool_version.get("version_id"): raise ValueError("version_id was not found in request") if not tool_version.get("tool_version"): raise ValueError("tool_version was not found in request") versionDetails = versionsDB.get_version( tool_version.get("version_id"), False) if not versionDetails: raise ValueError("Version with version_id:" + tool_version.get("version_id") + " was not found in database") if not versionDetails.get("tool_id"): raise ValueError("Version with version_id:" + tool_version.get( "version_id") + " in database has missing tool_id") tool_set.append({"version_id": tool_version.get("version_id"), "tool_version": tool_version.get( "version_id"), "tool_id": versionDetails.get("tool_id")}) toolsetData["tool_set"] = tool_set else: raise ValueError("Specify at least two tool versions in the ToolSet") # CHECK TAG if not toolsetData.get("tag"): toolsetData["tag"] = [] else: toolsetData["tag"] = tagDB.get_tag_ids_from_given_ids_list( toolsetData["tag"]) return toolsetData
def add_update_version(VersionsData, tool_id, version_id, allDetails=False): """Add Update a Version""" versionInsert = {} versionInsert['tool_id'] = tool_id if VersionsData.get("pre_requiests") is not None: versionInsert['pre_requiests'] = VersionsData['pre_requiests'] if VersionsData.get("version_name") is not None: HelperServices.validate_name(VersionsData.get("version_name"),"version label") versionInsert['version_name'] = VersionsData['version_name'] if VersionsData.get("version_date") is not None: try: versionInsert['version_date'] = datetime.strptime( (str(str(VersionsData['version_date']).split()[0])), "%Y-%m-%d") except Exception: # catch *all* exceptions traceback.print_exc() raise Exception( "Failed while parsing version_date. Expected format is %Y-%m-%d %H:%M:%S.%f . Example 2016-07-15 13:01:09.758000") if VersionsData.get("version_number") is not None: HelperServices.validate_name(VersionsData.get("version_number"),"version number") versionInsert['version_number'] = VersionsData['version_number'] if VersionsData.get("backward_compatible") is not None: versionInsert['backward_compatible'] = VersionsData['backward_compatible'] if VersionsData.get("mps_certified") is not None: versionInsert['mps_certified'] = VersionsData['mps_certified'] if VersionsData.get("release_notes") is not None: versionInsert['release_notes'] = VersionsData['release_notes'] if VersionsData.get("gitlab_branch") is not None: versionInsert['gitlab_branch'] = VersionsData['gitlab_branch'] if VersionsData.get("branch_tag") is not None: versionInsert['branch_tag'] = VersionsData['branch_tag'] if VersionsData.get("dependent_tools") is not None: versionInsert['dependent_tools'] = VersionsData['dependent_tools'] else: versionInsert['dependent_tools'] = [] if VersionsData.get("deployer_to_use") is not None: versionInsert['deployer_to_use'] = VersionsData['deployer_to_use'] if VersionsData.get("repository_to_use") is not None: if RepositoryHelperService.check_if_repo_exists_by_name(VersionsData.get("repository_to_use")): versionInsert['repository_to_use'] = VersionsData['repository_to_use'] else: raise Exception(VersionsData.get("repository_to_use") + ": No such repository exists") if allDetails: # WE DONT WANT TO UPDATE THESE FIELS WHILE USING SYNC ONLY WHEN EDIT TOOLS if VersionsData.get("jenkins_job") is not None: versionInsert['jenkins_job'] = VersionsData['jenkins_job'] if VersionsData.get("gitlab_repo") is not None: versionInsert['gitlab_repo'] = VersionsData['gitlab_repo'] if VersionsData.get("git_url") is not None: versionInsert['git_url'] = VersionsData['git_url'] if version_id: versionInsert["_id"] = {} versionInsert["_id"]["oid"] = version_id versionInsert["status"] = VersionsData["status"] result = versionsDB.update_version(versionInsert) else: versionInsert["status"] = "1" result = versionsDB.add_version(versionInsert) if result is None: raise Exception("Unable to add new version") return str(result)
def post(self): user = request.get_json() HelperServices.validate_name(user.get("user"), "username") user_id = authService.get_userid_by_auth_token() if user_id is None: raise Exception("Token verification failed") loggedInUser = userdb.get_user_by_id(user_id, False) loggedInUserRole = roledb.get_role_by_id(loggedInUser["roleid"], True) if loggedInUserRole["name"].lower() == "superadmin": pass else: newUserRole = roledb.get_role_by_id(user.get("roleid"), False) if newUserRole["name"].lower() == "superadmin": raise ValueError( "Only SuperAdmin can create a SuperAdmin user") else: pass if (user.get("employeeid") and user.get("user") and user.get("email") and user.get("accountid")) is None: raise Exception( "Mandatory fields to create a new user was not found.") if userdb.get_user(user.get("user"), False) is not None: raise Exception("User already exists") if accountDB.get_account(user.get("accountid")) is None: raise Exception("Account does not exists") addData = {"user": user.get("user").lower(), "status": "active"} if user.get("roleid") is None: addData["roleid"] = str( roledb.get_role_by_name('Guest', False)["_id"]) else: if roledb.get_role_by_id(user.get("roleid"), False) is None: raise Exception("Role does not exists") user.update(addData) passw = genrate_random_key() user["password"] = passw result = userdb.add_user(user) if user.get("included_in"): for team_id in user["included_in"]: teamDB.add_user_to_team(team_id, str(result)) try: systemdetails = systemDetailsDB.get_system_details_single() mailer.send_html_notification( user.get("email"), None, None, 14, { "name": user.get("user"), "password": passw, "machine_host": systemdetails.get("hostname") }) teamService.generate_details() except Exception as e: traceback.print_exc() return { "result": "success", "message": "A new user was created..Token was generated", "data": { "Token": authService.generate_auth_token(str(result)) } }, 200
def updatedu(self, du, full_sync_flag="false", directory_to_import_from=None): """Start Tool Update """ dudata = du.get("du_data") builds = dudata.get('build') deployment_field = dudata.get('deployment_field') localDu = self.deploymentunitDB.GetDeploymentUnitByName(dudata["name"]) du_id = str(localDu.get("_id")) try: if dudata.get("operation").lower() == "update": DuHelperService.add_update_du(dudata, du_id, self.logo_path, directory_to_import_from, self.full_logo_path, False) # HANLDE BUILD if builds is not None and len(builds) > 0: builds_handled = [ ] # WE need to deactivate all other builds builds_not_to_process = [] for build in builds: if build.get("to_process", "true").lower() == "true": if build.get("to_process"): build.pop("to_process") if build.get("to_process_reason"): build.pop("to_process_reason") BuildHelperService.add_update_build( build, du_id, join( directory_to_import_from, os.path.join("artifacts", dudata["repository_to_use"]))) builds_handled.append(build["build_number"]) else: builds_not_to_process.append(build["build_number"]) # SUPPOSE THE ACCOUNT SENDS 2 BUILDS THAT ARE ACTIVE THEY WILL BE HANDLED # BUT ALL OTHER BUILDS SHOULD BE MADE INACTIVE IN LOCAL for build in self.buildsDB.get_all_builds(du_id): if build["build_number"] not in builds_handled and build[ "build_number"] not in builds_not_to_process: build_id = build.get("_id") build["_id"] = {} build["_id"]["oid"] = str(build_id) build["status"] = "0" self.buildsDB.update_build(build) # HANLDE DEPLOYMENT FIELDS if deployment_field is not None \ and len(deployment_field) > 0: HelperServices.add_update_deployment_fields( deployment_field['fields'], du_id) return { "result": "success", "message": dudata["name"] + " was updated" } except Exception as e_value: # catch *all* exceptions traceback.print_exc() return {"result": "failed", "message": str(e_value)}
def add_update_tool(tooldata, tool_id, logo_path, directory_to_import_from, full_logo_path): """Add Update a Tool""" # CHECK TOOL NAME # if tooldata.get("status"): # if (tool_id): # if(tooldata.get("status")=="3"): # validate_tool(tool_id) # if not tooldata.get("name"): raise ValueError("Invalid Tool Name") HelperServices.validate_name(tooldata.get("name"),"tool name") Tool = {} Tool["name"] = tooldata["name"] # CHECK TAG if not tooldata.get("tag"): Tool["tag"] = [] else: Tool["tag"] = tagDB.get_tag_ids_from_given_ids_list(tooldata["tag"]) if tooldata.get("support_details"): Tool["support_details"] = tooldata["support_details"] if tooldata.get("logo"): Tool["logo"] = tooldata["logo"] if tooldata.get("description"): Tool["description"] = tooldata["description"] if tooldata.get("tool_creation_source"): Tool["tool_creation_source"] = tooldata.get("tool_creation_source") if tooldata.get("allow_build_download"): Tool["allow_build_download"] = str( tooldata.get("allow_build_download")) else: Tool["allow_build_download"] = 'False' if tooldata.get("is_tool_cloneable"): Tool["is_tool_cloneable"] = str( tooldata.get("is_tool_cloneable")) if tooldata.get("artifacts_only"): Tool["artifacts_only"] = str( tooldata.get("artifacts_only")) Tool["status"]=tooldata.get("status","1") Tool = HelperServices.add_update_logo(Tool, logo_path, full_logo_path, directory_to_import_from) # ADD UPDATE DATA if tool_id: Tool["_id"] = {} Tool["_id"]["oid"] = tool_id result = toolDB.update_tool(Tool) else: result = toolDB.add_tool(Tool) if result is None: raise Exception("Unable to create/update tool " + Tool["name"]) else: # RELOAD TEAM PERMISSIONS teamService.generate_details() return str(result)
def put(self): preRequisites = request.get_json() HelperServices.validate_name(preRequisites.get("prerequisites_name"),"prerequisite name") soft = preRequisitesDB.get_pre_requisites( preRequisites.get("prerequisites_name")) if soft and soft.get("prerequisites_name"): preRequisites["_id"] = soft["_id"] preRequisitesDB.update_pre_requisites(preRequisites) return {"result": "success", "message": "preRequisites updated successfully"}, 200 else: raise Exception("Invalid input data")
def post(self): preRequisites = request.get_json() if (preRequisites.get("prerequisites_name"))is None: raise Exception("Mandatory fields prerequisites_name to create a new prerequisite was not found.") HelperServices.validate_name(preRequisites.get("prerequisites_name"),"prerequisite name") if preRequisitesDB.get_pre_requisites(preRequisites.get("prerequisites_name")) is not None: raise Exception("Prerequisite with name " + preRequisites.get("prerequisites_name") + " already exists") if preRequisites["version_command"] == "": preRequisites["version_command"] = "--version" new_ver_id = preRequisitesDB.add_pre_requisites(preRequisites) return {"result": "success", "message": "New preRequisites added successfully", "data": {"id": new_ver_id}}, 200
def put(self): teamDetails = request.get_json() if teamDetails.get("team_name"): HelperServices.validate_name(teamDetails.get("team_name"), "team name") updated_team = teamDB.update_team(teamDetails) teamService.generate_details() return { "result": "success", "message": "Team Group updated successfully" }, 200
def generate_new_state(details): # details has "deployment_field":{"hi":"hellow"} which is handled below state_id = None deployment_fields_id = None dep_fields = None try: state={"type":"dustate","build_id":details.get("build_id"),\ "name":details.get("name"),"parent_entity_id":details.get("parent_entity_id")\ ,"approval_status":"Created"}# DATA VERIFIED IN add_update_state if details.get("deployment_field") and len( details.get("deployment_field").get("fields", [])) > 0: dep_fields = details.get("deployment_field") else: dep_fields = deploymentFieldsDB.GetDeploymentFields( details.get("parent_entity_id")) state_id = add_update_state(state, None) if dep_fields: deployment_fields_id = HelperServices.add_update_deployment_fields( dep_fields.get("fields"), state_id) return state_id except Exception as e: # catch *all* exceptions traceback.print_exc() if state_id is not None: delete_state(state_id) if deployment_fields_id is not None: deploymentFieldsDB.DeleteDeploymentFields(deployment_fields_id) raise e
def download_build_files(build,parent_entity_id,directory_to_export_to, download_build_after,external_artifacts, **keyargs): """Download Build Files""" try: if not os.path.exists(directory_to_export_to): os.makedirs(directory_to_export_to) if download_build_after and "none" not in str(download_build_after).lower(): if build.get("build_date") and \ datetime.strptime( (str(str(build['build_date']).split(".")[0])), "%Y-%m-%d %H:%M:%S") < datetime.strptime( (str(str(download_build_after).split(".")[0])), "%Y-%m-%d %H:%M:%S"): return # SKIP DOWNLOADING THIS BUILD parent_details = HelperServices.get_details_of_parent_entity_id(parent_entity_id) if not parent_details.get("repository_to_use") : raise Exception("Missing key: repository_to_use in parent details") repo_details = repositoryDB.get_repository_by_name(parent_details.get("repository_to_use"), False) deployer_module="Plugins.repositoryPlugins."+repo_details.get("handler") class_obj=CustomClassLoaderService.get_class(deployer_module) method = getattr(class_obj(repo_details),"trnx_handler") # MEDHOD NAME keyargs.update({"transaction_type":"download","build_details":build,"directory_to_export_to":directory_to_export_to}) method(**keyargs) if external_artifacts and build.get("additional_artifacts"): ExternalRepoDownloadHelper.handle_request("download",build.get("additional_artifacts"),directory_to_export_to) except Exception: print "Unable to execute download_build_files : build:"+str(build)+" directory: "+directory_to_export_to traceback.print_exc()
def add_update_du_set(deploymentUnitSetData, deployment_unit_set_id=None, logo_path=None, logo_full_path=None,directory_to_import_from=None): """Add update DeploymentUnitSet data""" # Mandatory Keys keys_to_validate=["name","du_set"] for key in keys_to_validate: if not deploymentUnitSetData.get(key): raise Exception ("mandatory key: "+ key+" is missing in DU details") if (deploymentUnitSetData.get("name")): HelperServices.validate_name(deploymentUnitSetData.get("name"),"deploymentunit package name") verify_du_set_data(deploymentUnitSetData) verify_du_and_du_set_data(deploymentUnitSetData) #ADD LOGO deploymentUnitSetData = HelperServices.add_update_logo( deploymentUnitSetData, logo_path, logo_full_path, directory_to_import_from) #TRIM NOT REQUIRED DATA deploymentUnitSetData = trim_du_duset_data(deploymentUnitSetData) #VALIDATION if deployment_unit_set_id: existing_du_set_details = deploymentunitsetdb.GetDeploymentUnitSetById( deployment_unit_set_id, False) if not existing_du_set_details: raise Exception("No such DeploymentUnit Set was found with _id: ") else: #TRY TO SEARCH WITH NAME existing_du_set=deploymentunitsetdb.GetDeploymentUnitSetByName(deploymentUnitSetData.get("name"),False) if existing_du_set: deployment_unit_set_id=str(existing_du_set["_id"]) if deployment_unit_set_id: deploymentUnitSetData["_id"] = {} deploymentUnitSetData["_id"]["oid"] = deployment_unit_set_id result = deploymentunitsetdb.UpdateDeploymentUnitSet( deploymentUnitSetData) else: add_missing_attributes_for_duset(deploymentUnitSetData) result = deploymentunitsetdb.AddNewDeploymentUnitSet( deploymentUnitSetData) if result is None: raise Exception("Unable to create/update DeploymentUnit Set") else: # RELOAD TEAM PERMISSIONS teamService.generate_details() return str(result)
def getAllToolSets(): limit = int(request.args.get('perpage', '30')) page = int(request.args.get('page', "0")) skip = page * limit finalData = [] tags_filter = [] toolName_filter = [] name_filter = None if request.args.get('tags', None): tags_filter = request.args.get('tags', None).split(",") if request.args.get('toolname', None): toolName_filter = request.args.get('toolname', None).split(",") id_list = teamService.get_user_permissions( authService.get_userid_by_auth_token())[ "parent_entity_set_id_list"] # TOOL SET IDS if request.args.get('name', None): name_filter = request.args.get('name', None) filter_required = {"_id": {"$in": id_list}} if name_filter: filter_required.update( {"name": { "$regex": str(name_filter), "$options": "i" }}) total_count = len( list(toolsetdb.get_all_tool_set({"_id": { "$in": id_list }}))) if name_filter: skip = limit = 0 toolsets = toolsetdb.get_all_tool_set(filter_required, skip, limit) for record in toolsets: if (HelperServices.filter_handler(record, tags_filter, record.get("tag", []), "tag")): continue if len(toolName_filter) > 0: tool_list = [] for eachTool in record.get("tool_set"): if eachTool.get("tool_id"): tool_list.append(str(eachTool["tool_id"])) if tool_list and "any" not in toolName_filter: if toolName_filter and len(toolName_filter) > 0 and len( list(set(toolName_filter) & set(tool_list))) < 1: continue finalData.append(record) return jsonify( json.loads( dumps({ "result": "success", "data": { "data": finalData, "page": page, "total": total_count, "page_total": len(finalData) } }))), 200
def put(self): user_data = request.get_json() if user_data.get("user"): HelperServices.validate_name(user_data.get("user"), "username") if user_data.get("_id"): user_id = user_data["_id"]["oid"] if user_data.get("roleid") is not None: if roledb.get_role_by_id(user_data.get("roleid"), False) is None: raise Exception("Role does not exists") # other way is to get account id from GUI if user_data.get("accountid") is not None: if accountDB.get_account(user_data.get("accountid")) is None: raise Exception("Account does not exists") auth_user_id = authService.get_userid_by_auth_token() if auth_user_id is None: raise Exception("Token verification failed") loggedInUser = userdb.get_user_by_id(auth_user_id, False) loggedInUserRole = roledb.get_role_by_id(loggedInUser["roleid"], True) if loggedInUserRole["name"].lower() == "superadmin": pass else: newUserRole = roledb.get_role_by_id(user_data.get("roleid"), False) if newUserRole["name"].lower() == "superadmin": raise ValueError( "Only SuperAdmin can update role to SuperAdmin") else: pass for group in teamDB.get_team_by_user(user_id): teamDB.remove_user_from_team(str(group["_id"]), user_id) if user_data.get("included_in"): for group_id in user_data.get("included_in"): teamDB.add_user_to_team(group_id, user_id) updated = userdb.update_user(user_data) if updated == 1: teamService.generate_details() return { "result": "success", "message": "User was updated", "data": updated }, 200 else: raise Exception("User was not updated.")
def delete_plugin(file_name, validation_indicator=True): """Start Plugin Deletion""" try: if validation_indicator == True: HelperServices.delete_plugin_repo_validation( str(file_name), "deployer_to_use", "Repository") file_path = str(deployment_plugin_full_path + '/' + file_name + ".py") if os.path.isfile(file_path): os.remove(file_path) file_path = str(sync_plugin_full_path + '/' + file_name + ".py") if os.path.isfile(file_path): os.remove(file_path) present_in_db = exitPointPlugins.get_by_plugin_name(file_name) if present_in_db: exitPointPlugins.delete(str(present_in_db.get("_id"))) return {"result": "success", "message": "File was removed"}, 200 except Exception as e: traceback.print_exc() raise Exception(e)
def verify_deployment_request(req): # CHECK IF parent_entity_id is valid parent_entity = HelperServices.get_details_of_parent_entity_id(req.get("parent_entity_id")) deployer_module="Plugins.deploymentPlugins."+parent_entity.get("deployer_to_use") class_obj=CustomClassLoaderService.get_class(deployer_module) if "verify_deployment_request" in dir(class_obj): method = getattr(class_obj(None),"verify_deployment_request") # MEDHOD NAME keyargs={"input_dep_request_dict":req} method(**keyargs)
def add_update_state(self, state, full_sync_flag="false", directory_to_import_from=None): """Start State Addition/Update""" deployment_fields_data = None try: state_data = state.get("state_data") StateHelperService.convert_parent_names_to_ids(state_data, True) # IF DU STATE if state_data.get("build_id"): build = self.buildsDB.get_build_by_number( state_data.get("parent_entity_id"), state_data.get("build_id"), False) if build: state_data["build_id"] = str(build.get("_id")) else: raise Exception ("Build with number: "+str(state_data.get("build_id"))\ +" and parent_entity_id: "+str(state_data.get("parent_entity_id")) +" was not found in DB") if state_data.get("deployment_field"): deployment_fields_data = state_data.get("deployment_field") # IF DU PACKAGE STATE if state_data.get("states"): StateHelperService.convert_parent_to_states(state_data) existing_state=self.statedb.get_state_by_parent_entity_id_name(state_data.get("name"),\ state_data.get("parent_entity_id"), False) if existing_state: StateHelperService.add_update_state( state_data, str(existing_state.get("_id"))) state_id = str(existing_state.get("_id")) else: state_id = str( StateHelperService.add_update_state(state_data, None)) if deployment_fields_data: HelperServices.add_update_deployment_fields( deployment_fields_data.get("fields"), state_id) return { "result": "success", "message": state_data["name"] + " was handled" } except Exception as e_value: # catch *all* exceptions traceback.print_exc() return {"result": "failed", "message": str(e_value)}
def put(self): tag = request.get_json() if (tag.get("name")) is None: raise Exception( "Mandatory fields name to create a new Tag was not found.") if tagDB.get_tag_by_name(tag.get("name")) is not None: raise Exception("Tag with name " + tag.get("name") + " already exists") HelperServices.validate_name(tag.get("name"), "tag name") is_updated = tagDB.update_tag(tag) if is_updated == 1: # RELOAD TEAM PERMISSIONS teamService.generate_details() return { "result": "success", "message": "The Tag is updated successfully" }, 200 else: raise Exception("Tag was not updated")
def post(self): newTag = request.get_json() if (newTag.get("name")) is None: raise Exception( "Mandatory fields name to create a new Tag was not found.") if tagDB.get_tag_by_name(newTag.get("name")) is not None: raise Exception("Tag with name " + newTag.get("name") + " already exists") HelperServices.validate_name(newTag.get("name"), "tag name") tag_id = tagDB.add_tag(newTag) # RELOAD TEAM PERMISSIONS teamService.generate_details() return { "result": "success", "message": "The Tag is saved successfully", "data": { "id": tag_id } }, 200
def create_request(pt_details): all_default_repos = list( RepositoryDB.get_all({"is_default_repo_ind": "true"})) if not all_default_repos: raise Exception("Default Repository could not be found") final_data = { "name": pt_details.get("name"), "tag": [], "support_details": pt_details.get("support_details"), "request_reason": pt_details.get("request_reason"), "description": pt_details.get("description"), "version": { "version_name": pt_details.get("version").get("version_name"), "version_date": str(datetime.now()), "version_number": pt_details.get("version").get("version_number"), "pre_requiests": [], "branch_tag": "Branch", "gitlab_repo": "", "gitlab_branch": "master", "jenkins_job": "", # "document": {"documents": []}, "backward_compatible": "no", "release_notes": "", "mps_certified": [], # "deployment_field": {"fields": []}, "deployer_to_use": "DefaultDeploymentPlugin", "dependent_tools": [], "repository_to_use": all_default_repos[0].get("name") }, "artifacts_only": "false", "is_tool_cloneable": "true", "allow_build_download": "false" } HelperServices.validate_name(final_data.get("name"), "tool name") final_data = HelperServices.add_update_logo(final_data, logo_path, logo_full_path, None) validate_mandatory_details(final_data) validate_existing_tool(final_data) validate_pk_tool(final_data) validate_mandatory_details(final_data) return final_data
def verify_group_deployment_request(group_request): # CHECK IF parent_entity_id is valid parent_entity = HelperServices.get_details_of_parent_entity_id(group_request[0].get("parent_entity_id")) deployer_module="Plugins.deploymentPlugins."+parent_entity.get("deployer_to_use") class_obj=CustomClassLoaderService.get_class(deployer_module) if "verify_group_deployment_request" in dir(class_obj): method = getattr(class_obj(None),"verify_group_deployment_request") # MEDHOD NAME keyargs={"input_group_dep_request_dict":group_request} method(**keyargs) else: print "Skipping step : verify group deployment request as method doesn't exists in "+parent_entity.get("deployer_to_use")
def validate_machine_details(machine_details): keys_to_validate=["username","account_id","ip","host","password",\ "machine_type","auth_type"] for key in keys_to_validate: if not machine_details.get(key): raise Exception("mandatory key: " + key + " is missing in machine details") machine_details["machine_type"] = validate_machine_type( machine_details.get("machine_type")) machine_details["account_id"] = AccountHelperService.validate_account_id( machine_details.get("account_id")) if not machine_details.get("machine_name"): machine_details["machine_name"] = str( machine_details.get("username")) + '@' + str( machine_details.get('host')) HelperServices.validate_name(machine_details.get("machine_name"), "machine name") if machine_details.get("tag"): machine_details["tag"] = tagDB.get_tag_ids_from_given_ids_list( machine_details.get("tag")) if machine_details.get('permitted_users'): for record in machine_details["permitted_users"]: if str(record).lower() <> "all": if userdb.get_user_by_id(record, False) is None: raise Exception(" user id: " + str(record) + " don't exist") if machine_details.get('permitted_teams'): for team in machine_details["permitted_teams"]: if teamsDb.get_team(team) is None: raise Exception("team id: " + str(team) + "don't exist") if machine_details.get('included_in'): for include in machine_details["included_in"]: if machinegroupDB.get_machine_groups(include) is None: raise Exception("machine group: " + str(include) + "don't exist") if machine_details.get("flexible_attributes"): FlexibleAttributesHelper.validate_entity_value( "Machine", machine_details.get("flexible_attributes")) if machine_details.get("environment_variables"): EnvironmentVariablesHelper.validate_env_vars( machine_details.get("environment_variables"))
def validate_build_structure(build): parent_details = None keys_to_validate=["build_number","status","package_type"] keys_to_pop=["state_details","create_state_ind","operation"] validate_keys_exists(build, "build", keys_to_validate) for key in keys_to_pop: if build.get(key): build.pop(key) if build.get("parent_entity_id"): parent_details = HelperServices.get_details_of_parent_entity_id(build.get("parent_entity_id")) if build.get("build_date"): build["build_date"] = datetime.strptime( (str(build["build_date"]).split(".")[0]), "%Y-%m-%d %H:%M:%S") validate_by_repository_to_use(build,parent_details.get("repository_to_use"))
def post(self): teamDetails = request.get_json() if (teamDetails.get("team_name") and teamDetails.get("users_id_list") and teamDetails.get("distribution_list")) is None: raise Exception( "Mandatory fields to create a new team was not found.") if teamDB.get_team_by_name(teamDetails.get("team_name")): raise Exception("Team with name " + teamDetails.get("team_name") + " already exists") HelperServices.validate_name(teamDetails.get("team_name"), "team name") if not teamDetails.get("users_id_list"): teamDetails["users_id_list"] = [] if not teamDetails.get("tag_id_list"): teamDetails["tag_id_list"] = [] if not teamDetails.get("machine_id_list"): teamDetails["machine_id_list"] = [] if not teamDetails.get("machine_group_id_list"): teamDetails["machine_group_id_list"] = [] if not teamDetails.get("parent_entity_id_tool_list"): teamDetails["parent_entity_id_tool_list"] = [] if not teamDetails.get("parent_entity_id_du_list"): teamDetails["parent_entity_id_du_list"] = [] if not teamDetails.get("parent_entity_tool_set_id_list"): teamDetails["parent_entity_tool_set_id_list"] = [] if not teamDetails.get("parent_entity_du_set_id_list"): teamDetails["parent_entity_du_set_id_list"] = [] if not teamDetails.get("parent_entity_tag_list"): teamDetails["parent_entity_tag_list"] = [] if not teamDetails.get("parent_entity_set_tag_list"): teamDetails["parent_entity_set_tag_list"] = [] new_team_id = teamDB.add_team(teamDetails) teamService.generate_details() return { "result": "success", "message": "The team is created successfully", "data": { "id": new_team_id } }, 200
def add_request(requests_list): ids_list=[] group_status_details = [] try: #Check while adding new group deployment req to database verify_group_deployment_request(requests_list) for req in requests_list: for rec in ["requested_by","parent_entity_id","deployment_type",\ "scheduled_date" ,"request_type"]: if not rec in req.keys(): raise ValueError("Mandatory key: "+rec+" was not found in request") finalData = handle_dependent_and_order(req) #ADD MMISSING DEPLOYMENT ORDER #CHECK if build_id is valid or assign new build get_build_for_parent(finalData) #Check while adding new deployment req to database verify_deployment_request(req) #START ADDING for req in requests_list: try: dep_id=str(deploymentRequestDB.AddDeploymentRequest(req)) except Exception as ex: if req.get("parent_entity_id"): parent_details = HelperServices.get_details_of_parent_entity_id(req.get("parent_entity_id")) if parent_details: raise Exception("For entity "+str(parent_details.get("name","Unknown"))+" :"+str(ex)) raise ex ids_list.append(dep_id) group_status_details.append({"deployment_id": dep_id, "machine_id": req.get( "machine_id"), "status": "New", "status_message": "The request is accepted",\ "deployment_order": int(req.get("deployment_order")), "dependent": req.get("dependent")}) newGroupDeploymentRequest = { "deployment_type": str(req.get("deployment_type")).lower(), "details": group_status_details,\ "requested_by": req.get("requested_by"), "name": "Deployment Group " + CounterDB.get_counter(),\ "scheduled_date": req.get("scheduled_date"),"request_type":str(req.get("request_type"))} if req.get("callback_url"): newGroupDeploymentRequest["callback"]= { "callback_url": str(req.get("callback_url")) } # ADD ADDITIONAL KEYS for key_to_add in ["parent_entity_set_id","package_state_id","machine_group_id"]: if key_to_add in req.keys(): newGroupDeploymentRequest[key_to_add]=str(req.get(key_to_add)) return deploymentRequestGroupDB.add_new_grp_depreq(newGroupDeploymentRequest) except Exception as e: # catch *all* exceptions print "Error :" + str(e) traceback.print_exc() # REMOVE ALREADY ADDED REQUESTS for rec in ids_list: deploymentRequestDB.CancelDeploymentRequest(rec) raise e
def validate_if_file_is_present_in_repository(self, **keyargs): ''' Whenever we need to validate if artifact was uploaded properly ''' url = keyargs["build_details"]["file_path"] if (default_nexus_container_name in url): url = HelperServices.replace_hostname_with_actual( url, default_nexus_container_name, socket.gethostbyname(default_nexus_container_name)) ret = urllib2.urlopen(url) if ret.code == 200: print url + " is present in repository !!" return
def update_state(): deployment_fields_data = None keys_allowed_to_update = ["deployment_field", "approval_status", "_id"] data = request.json validate(data, 'UpdateStateData', relative_path + '/swgger/StateAPI/updateState.yml') state = request.get_json() for key in state.keys(): if key not in keys_allowed_to_update: state.pop(key) if (state.get("deployment_field") is not None): deployment_fields_data = state.get("deployment_field") StateHelperService.add_update_state(state, state.get("_id").get("oid")) if (deployment_fields_data is not None): HelperServices.add_update_deployment_fields( deployment_fields_data.get("fields"), str(state.get("_id").get("oid"))) return jsonify( json.loads( dumps({ "result": "success", "message": "State was updated successfully" }))), 200
def update_build(): request_build_details = request.get_json() if not request_build_details.get("_id"): build_details = buildDB.get_build_by_number( str(request_build_details.get("parent_entity_id")), request_build_details.get("build_number"), True) if build_details is not None: if build_details.get("_id"): request_build_details["_id"] = { "oid": str(build_details.get("_id")) } else: raise Exception( "Unable to find a build id for parent_entity_id" + str(request_build_details.get("parent_entity_id"))) else: raise Exception( "Unable to find a build details for build number " + str(request_build_details.get("build_number")) + " and parent_entity_id " + str(request_build_details.get("parent_entity_id"))) else: if request_build_details.get("parent_entity_id"): HelperServices.get_details_of_parent_entity_id( request_build_details.get("parent_entity_id")) result = BuildHelperService.add_update_build( request_build_details, request_build_details.get("parent_entity_id"), None) return jsonify( json.loads( dumps({ "result": "success", "message": "Build updated successfully", "data": { "id": result } }))), 200
def add_update_tool_set(toolSetData, tool_set_id=None, logo_path=None, logo_full_path=None): """Add ToolSetData""" toolSetData = verify_tool_set_data(toolSetData) if logo_path and logo_full_path: toolSetData = HelperServices.add_update_logo(toolSetData, logo_path, logo_full_path, None) if tool_set_id: toolSetData["_id"] = {} toolSetData["_id"]["oid"] = tool_set_id result = toolsetdb.update_tool_set(toolSetData) else: result = toolsetdb.add_new_tool_set(toolSetData) if result is None: raise Exception("Unable to create/update ToolSet ") else: # RELOAD TEAM PERMISSIONS teamService.generate_details() return str(result)
def remove_artifact(self, build_details): parent_details = HelperServices.get_details_of_parent_entity_id( build_details.get("parent_entity_id")) repository_to_use = parent_details.get("repository_to_use") if not repository_to_use: raise Exception("Missing key: repository_to_use in parent details") repo_details = self.repositoryDB.get_repository_by_name( repository_to_use, False) deployer_module = "Plugins.repositoryPlugins." + repo_details.get( "handler") class_obj = CustomClassLoaderService.get_class(deployer_module) method = getattr(class_obj(repo_details), "trnx_handler") # MEDHOD NAME keyargs = { "transaction_type": "delete", "build_details": build_details } method(**keyargs)