def put(self): toolsetData = request.get_json() tool_set_id = toolsetData["_id"]["oid"] tools = toolsetdb.get_tool_set(tool_set_id) if not tools: raise Exception("No such ToolSet was found") ToolHelperService.add_update_tool_set(toolsetData, tool_set_id, logo_path, logo_full_path) return { "result": "success", "message": "ToolSet was updated successfully" }, 200
def addCloneRequest(): data = request.json validate(data, 'Clone', relative_path + '/swgger/CloneRequestAPI/addCloneRequest.yml') NewCloneRequest = request.get_json() if NewCloneRequest.get("machine_id") is None: raise Exception("machine_id was not found in request") if machineDB.GetMachine(NewCloneRequest["machine_id"]) is None: raise Exception("Machine was not found") result = cloneRequestDB.GetCloneRequestsByMachineId( NewCloneRequest["machine_id"]) if result.count() > 0: raise Exception("Request already exists for this machine") if NewCloneRequest.get("tool_list") and len( NewCloneRequest.get("tool_list")) > 0: # GET ORDER IN WHICH TOOLS SHOULD BE CLONED tool_list = [] tool_names = [] for req in NewCloneRequest.get("tool_list"): version_details = versionsDB.get_version(req.get("version_id"), False) version_details["source_version_id"] = req.get("version_id") process_order, toolNames = ToolHelperService.get_ordered_tools( [version_details]) req["clone_order"] = process_order tool_names = list(set(tool_names + toolNames)) tool_list.append(req) NewCloneRequest["tool_list"] = tool_list # GET TOOL NAMES TO BE CLONED tool_list = [] for req in NewCloneRequest.get("tool_list"): tool_name = tooldb.get_tool_by_version(req.get("version_id"), False)["name"] if tool_name in tool_names: tool_names.remove(tool_name) req["tool_name"] = tool_name tool_list.append(req) NewCloneRequest["tool_list"] = tool_list # IF size of tool_names >0 . Then we are missing tools to clone if len(tool_names) > 0: raise ValueError("Please add missing dependent tools: " + ",".join(tool_names)) # SET THEM UP IN ORDER NewCloneRequest.get("tool_list").sort(key=cloneRequestDB.clone_order, reverse=False) Clone_request_id = cloneRequestDB.AddCloneRequest(NewCloneRequest) return jsonify( json.loads( dumps({ "result": "success", "message": "New clone request has been added successfully", "data": { "id": Clone_request_id } }))), 200
def deleteToolSet(id): isDeleted = ToolHelperService.delete_tool_set(id) return jsonify( json.loads( dumps({ "result": "success", "message": "ToolSet was deleted", "data": isDeleted }))), 200
def post(self): toolsetData = request.get_json() if not toolsetData.get("name"): raise ValueError("ToolSet Name is mandatory") if toolsetdb.get_tool_set_by_group_name(toolsetData.get("name")): raise ValueError("ToolSet with this name already exists") toolset_id = ToolHelperService.add_update_tool_set( toolsetData, None, logo_path, logo_full_path) return { "result": "success", "data": { "_id": toolset_id }, "message": "ToolSet was created successfully" }, 200
def handle_extra_validations(self, record): # VALIDATE THE TOOL DETAILS if record.get("tool_data"): ToolHelperService.validate_tool_data(record.get("tool_data")) # Update tool Dependency tooldata = record.get("tool_data") if tooldata and tooldata.get("versions"): versions_list = [] versions = tooldata.get("versions") if versions and len(versions) > 0: for VersionsData in versions: versions_list.append( ToolHelperService.set_dependend_tools( VersionsData, True)) record["tool_data"]["versions"] = versions_list elif record.get("du_data"): DuHelperService.validate_du_data(record.get("du_data")) elif record.get("duset_data"): DuHelperService.validate_duset_data(record.get("duset_data")) elif record.get("state_data"): StateHelperService.check_state_mandate_fields( record.get("state_data")) else: raise Exception("Validations:Conditions to process were not found")
def handle_delete_operation(self, record, full_sync_flag, directory_to_import_from): if full_sync_flag.lower() == "true": if record.get("tool_data"): return ToolHelperService.delete_tool( record.get("tool_data").get("_id"), False) if record.get("du_data"): return DuHelperService.delete_du( record.get("du_data").get("_id"), False) elif record.get("duset_data"): return DuHelperService.delete_du_set( record.get("duset_data").get("_id"), False) elif record.get("state_data"): return StateHelperService.delete_state( record.get("state_data").get("_id"), False) elif full_sync_flag.lower() == "false": return { "result": "success", "message": "Skipping as full_sync_flag is not true" } raise Exception("Processing:Conditions to process were not found")
def updatetool(self, tool, full_sync_flag="false", directory_to_import_from=None): """Start Tool Update """ tooldata = tool.get("tool_data") localTool = self.toolDB.get_tool_by_name(tooldata["name"]) ToolHelperService.check_if_tool_data_is_valid(tooldata, localTool) tool_id = str(localTool.get("_id")) try: if tooldata.get("operation").lower() == "update": ToolHelperService.add_update_tool(tooldata, tool_id, self.logo_path, directory_to_import_from, self.full_logo_path) versions = tooldata.get("versions") for record in versions: if record.get("operation") not in [ "delete", "update", "insert" ]: continue VersionsData = record localVersion = self.versionsDB.get_version_by_tool_id_name_and_number( tool_id, VersionsData["version_name"], VersionsData["version_number"]) if localVersion: version_id = str(localVersion["_id"]) if record.get("operation").lower( ) == "delete" and full_sync_flag == "true": SyncHelperService.delete_version_and_related_builds( version_id) else: # HANDLE VERSION # WE SEE THAT THE VERSION HAS TO BE BE UPDATED OR INSERTED # IF ITS A EXISTING VERSION WE WILL ALREADY HAVE VERSION_ID if record.get("operation").lower() == "update": ToolHelperService.add_update_version( VersionsData, tool_id, version_id, False) # IF ITS A NEW VERSION if record.get("operation").lower() == "insert": version_id = ToolHelperService.add_update_version( VersionsData, tool_id, None, False) # HANLDE BUILD if VersionsData.get('build') is not None and len( VersionsData.get('build')) > 0: builds_handled = [ ] # WE need to deactivate all other builds for build in VersionsData.get('build'): BuildHelperService.add_update_build( build, version_id, join( directory_to_import_from, os.path.join( "artifacts", VersionsData["repository_to_use"]))) builds_handled.append(build["build_number"]) # SUPPOSE THE ACCOUNT SENDS 2 BUILDS THAT ARE ACTIVE THEY WILL BE HANDLED # BUT ALL OTHER BUILDS SHOULD BE MADE INACTIVE IN LOCAL for build in self.buildsDB.get_all_builds(version_id): if build["build_number"] not in builds_handled: build_id = build.get("_id") build["_id"] = {} build["_id"]["oid"] = str(build_id) build["status"] = "0" self.buildsDB.update_build(build) # HANLDE DOCUMENT if VersionsData.get('document') is not None \ and len(VersionsData.get('document')) > 0: HelperServices.add_update_documents( VersionsData['document']['documents'], version_id) # HANLDE DEPLOYMENT FIELDS if VersionsData.get('deployment_field') is not None \ and len(VersionsData.get('deployment_field')) > 0: HelperServices.add_update_deployment_fields( VersionsData['deployment_field']['fields'], version_id) # HANLDE MEDIA FILES if VersionsData.get('media_file') is not None \ and len(VersionsData.get('media_file')) > 0: HelperServices.add_update_media_files( VersionsData['media_file']['media_files'], version_id, directory_to_import_from, self.full_media_files_path, self.media_files_path) return { "result": "success", "message": tooldata["name"] + " was updated" } except Exception as e_value: # catch *all* exceptions traceback.print_exc() return {"result": "failed", "message": str(e_value)}
def addtool(self, tool, full_sync_flag="false", directory_to_import_from=None): """Start Tool Addition""" # MAINTAINING ARRAY TO MEMORISE INSERTED IDS inserted_tools_list = [] inserted_build_list = [] inserted_versions_list = [] inserted_deployment_fields_list = [] inserted_media_files_list = [] inserted_documents_list = [] tooldata = tool.get("tool_data") ToolHelperService.check_if_tool_exists(tooldata) try: tooldata = tool.get("tool_data") tool_inserted = ToolHelperService.add_update_tool( tool.get("tool_data"), None, self.logo_path, directory_to_import_from, self.full_logo_path) inserted_tools_list.append(tool_inserted) versions = tooldata.get("versions") if versions is None: raise Exception("versions is missing from tool_data") for VersionsData in versions: Versionresult = ToolHelperService.add_update_version( VersionsData, tool_inserted, None, False) inserted_versions_list.append(Versionresult) # preparing version data # preparing DeploymentFields data if VersionsData.get("deployment_field") is not None and len( VersionsData.get("deployment_field")) > 0: inserted_deployment_fields_list.append( HelperServices.add_update_deployment_fields( VersionsData.get("deployment_field")["fields"], Versionresult)) if VersionsData.get('media_file') is not None \ and len(VersionsData.get('media_file')) > 0: inserted_media_files_list.append( HelperServices.add_update_media_files( VersionsData.get('media_file')['media_files'], Versionresult, directory_to_import_from, self.full_media_files_path, self.media_files_path)) # preparing Document data if VersionsData.get("document") is not None and len( VersionsData.get("document")) > 0: inserted_documents_list.append( HelperServices.add_update_documents( VersionsData.get("document")["documents"], Versionresult)) # preparing Build data if VersionsData.get("build") is not None and len( VersionsData.get("build")): for build in VersionsData.get("build"): inserted_build_list.append( BuildHelperService.add_update_build( build, Versionresult, join( directory_to_import_from, os.path.join( "artifacts", VersionsData["repository_to_use"])))) return { "result": "success", "message": tooldata["name"] + " was inserted" } except Exception as e_value: # catch *all* exceptions traceback.print_exc() for rec in inserted_deployment_fields_list: self.deploymentFieldsDB.DeleteDeploymentFields(rec) for rec in inserted_media_files_list: self.mediaFilesDB.delete_media_file(rec) for rec in inserted_documents_list: self.documentsDB.DeleteDocuments(rec) for rec in inserted_versions_list: self.versionsDB.delete_version(rec) for rec in inserted_tools_list: self.toolDB.delete_tool(rec) for rec in inserted_build_list: self.buildsDB.delete_build(rec) return {"result": "failed", "message": str(e_value)}
def compare(self): """Comparing sync data""" try: print "compare: comparing sync data" tools = self.distributionSync.GetNewDistribution() if tools and tools.count() > 0: for tool in tools: tool["changed_object"] = [] try: modified = 0 tool_id = {} tool_id["oid"] = str(tool["_id"]) tool["_id"] = tool_id distributedTool = tool.get("tool_data") # VALIDATE THE TOOL DETAILS ToolHelperService.validate_tool_data(distributedTool) tool_name = distributedTool["name"] ltooldata = self.toolDB.get_tool_by_name(tool_name) if ltooldata is None: print "compare: tool " + tool_name + " was not found in local DB." tool["operation"] = "insert" tool["status"] = "compared" tool["updated_time"] = datetime.now() distributedTool["operation"] = "insert" modified = 1 tool["tool_data"] = distributedTool # updated=1 updated = self.distributionSync.UpdateDistribution( tool) if updated: print "compare:This tool will be created while processing" else: print "compare: Unable to trigger tool creation while processing" continue else: print "compare: tool " + tool_name + " was found in local DB." sub = SyncHelperService.compare_tool_data( copy.deepcopy(distributedTool), copy.deepcopy(ltooldata)) if sub: print "compare:tool_data of tool " + tool_name + " is required to be updated as the tool data has changed." distributedTool["operation"] = "update" tool["operation"] = "update" toolchange = {} toolchange["tool"] = str(sub) tool["changed_object"].append(toolchange) modified = 1 else: print "compare: tool_data of tool " + tool_name + " is not required to be updated.As tool data has not changed" distributedTool["operation"] = "" modified = 0 syncversions = [] for version in distributedTool["versions"]: localversion, isActive = list( self.versionsDB. get_version_detail_and_status( str(ltooldata["_id"]), version["version_number"], version["version_name"], True)) if localversion: ToolHelperService.get_dependend_tools( localversion, True) if isActive: sub = SyncHelperService.compare_version_data( copy.deepcopy(version), copy.deepcopy(localversion)) if sub: print "compare:versions of tool " + tool_name + " is required to be updated as the version has changed." version["operation"] = "update" modified = 1 versionchange = {} versionchange["version"] = str(sub) tool["changed_object"].append( versionchange) # version["version_id"]= str(localversion["_id"]) else: print "compare:versions of tool " + tool_name + " is not required to be updated.As versions has not changed" version["operation"] = "" else: version["operation"] = "update" modified = 1 versionchange = {} versionchange["version"] = str( "Version is not active") tool["changed_object"].append( versionchange) else: version["operation"] = "insert" modified = 1 versionchange = {} versionchange["version"] = str( "Version was not found") tool["changed_object"].append( versionchange) syncversions.append(version) tool["status"] = "compared" tool["updated_time"] = datetime.now() distributedTool["versions"] = syncversions tool["tool_data"] = distributedTool if modified == 1 and tool["operation"] == "": tool["operation"] = "update" elif modified == 0: tool["operation"] = "" tool["status"] = "success" tool[ "status_message"] = "No difference was found while comparing" updated = self.distributionSync.UpdateDistribution( tool) except Exception as e_value: # catch *all* exceptions print 'DistributionSyncServices-Compare :' + str( e_value) traceback.print_exc() self.distributionSync.UpdateDistributionStatus( str(tool["_id"]["oid"]), "failed", "Comparing failed with error :" + str(e_value)) # SEND EMAIL ONLY IF WE HAVE NEW TOOLS try: self.notifyCompared(self.result.get("distribution_list")) except Exception as e_value: # catch *all* exceptions print 'compare : An email will not be sent due to error :' + str( e_value) except Exception as e_value: # catch *all* exceptions traceback.print_exc() print 'DistributionSyncServices-Compare :' + str(e_value)