def splitting(self, requestName): """ _splitting_ Retrieve the current values for splitting parameters for all tasks in the spec. Format them in the manner that the splitting page expects and pass them to the template. """ self.validate(requestName) request = GetRequest.getRequestByName(requestName) helper = Utilities.loadWorkload(request) splittingDict = helper.listJobSplittingParametersByTask(performance = False) taskNames = splittingDict.keys() taskNames.sort() splitInfo = [] for taskName in taskNames: jsonSplittingParams = JsonWrapper.dumps(splittingDict[taskName]) splitInfo.append({"splitAlgo": splittingDict[taskName]["algorithm"], "splitParams": jsonSplittingParams, "taskType": splittingDict[taskName]["type"], "taskName": taskName}) return self.templatepage("Splitting", requestName = requestName, taskInfo = splitInfo, taskNames = taskNames)
def one(self, requestName): """ Assign a single request """ self.validate(requestName) request = GetRequest.getRequestByName(requestName) request = Utilities.prepareForTable(request) requestType = request["RequestType"] # get assignments teams = ProdManagement.listTeams() assignments = GetRequest.getAssignmentsByName(requestName) # might be a list, or a dict team:priority if isinstance(assignments, dict): assignments = assignments.keys() procVer = "" acqEra = "" helper = Utilities.loadWorkload(request) if helper.getAcquisitionEra() != None: acqEra = helper.getAcquisitionEra() if helper.getProcessingVersion() != None: procVer = helper.getProcessingVersion() dashboardActivity = helper.getDashboardActivity() (reqMergedBase, reqUnmergedBase) = helper.getLFNBases() return self.templatepage("Assign", requests=[request], teams=teams, assignments=assignments, sites=self.sites, mergedLFNBases=self.mergedLFNBases[requestType], reqMergedBase=reqMergedBase, unmergedLFNBases=self.allUnmergedLFNBases, reqUnmergedBase=reqUnmergedBase, acqEra = acqEra, procVer = procVer, dashboardActivity=dashboardActivity, badRequests=[])
def splitting(self, requestName): """ _splitting_ Retrieve the current values for splitting parameters for all tasks in the spec. Format them in the manner that the splitting page expects and pass them to the template. """ self.validate(requestName) request = GetRequest.getRequestByName(requestName) helper = Utilities.loadWorkload(request) splittingDict = helper.listJobSplittingParametersByTask() taskNames = splittingDict.keys() taskNames.sort() splitInfo = [] for taskName in taskNames: jsonSplittingParams = JsonWrapper.dumps(splittingDict[taskName]) splitInfo.append({"splitAlgo": splittingDict[taskName]["algorithm"], "splitParams": jsonSplittingParams, "taskType": splittingDict[taskName]["type"], "taskName": taskName}) return self.templatepage("Splitting", requestName = requestName, taskInfo = splitInfo, taskNames = taskNames)
def getMostRecentOutputForPrepID(self, prepID): """Return the datasets produced by the most recently submitted request with this prep ID""" requestIDs = GetRequest.getRequestByPrepID(prepID) # most recent will have the largest ID requestID = max(requestIDs) request = GetRequest.getRequest(requestID) helper = Utilities.loadWorkload(request) return helper.listOutputDatasets()
def getOutputForPrepID(self, prepID): """Return the datasets produced by this prep ID. in a dict of requestName:dataset list""" requestIDs = GetRequest.getRequestByPrepID(prepID) result = {} for requestID in requestIDs: request = GetRequest.getRequest(requestID) requestName = request["RequestName"] helper = Utilities.loadWorkload(request) result[requestName] = helper.listOutputDatasets() return result
def handleSplittingPage(self, requestName, splittingTask, splittingAlgo, **submittedParams): """ _handleSplittingPage_ Parse job splitting parameters sent from the splitting parameter update page. Pull down the request and modify the new spec applying the updated splitting parameters. """ splitParams = {} if splittingAlgo == "FileBased": splitParams["files_per_job"] = int(submittedParams["files_per_job"]) elif splittingAlgo == "TwoFileBased": splitParams["files_per_job"] = int(submittedParams["two_files_per_job"]) elif splittingAlgo == "LumiBased": splitParams["lumis_per_job"] = int(submittedParams["lumis_per_job"]) if str(submittedParams["halt_job_on_file_boundaries"]) == "True": splitParams["halt_job_on_file_boundaries"] = True else: splitParams["halt_job_on_file_boundaries"] = False elif splittingAlgo == "EventAwareLumiBased": splitParams["events_per_job"] = int(submittedParams["avg_events_per_job"]) splitParams["max_events_per_lumi"] = int(submittedParams["max_events_per_lumi"]) if str(submittedParams["halt_job_on_file_boundaries_event_aware"]) == "True": splitParams["halt_job_on_file_boundaries"] = True else: splitParams["halt_job_on_file_boundaries"] = False elif splittingAlgo == "EventBased": splitParams["events_per_job"] = int(submittedParams["events_per_job"]) if "events_per_lumi" in submittedParams: splitParams["events_per_lumi"] = int(submittedParams["events_per_lumi"]) if "lheInputFiles" in submittedParams: if str(submittedParams["lheInputFiles"]) == "True": splitParams["lheInputFiles"] = True else: splitParams["lheInputFiles"] = False elif splittingAlgo == "Harvest": splitParams["periodic_harvest_interval"] = int(submittedParams["periodic_harvest_interval"]) elif 'Merg' in splittingTask: for field in ['min_merge_size', 'max_merge_size', 'max_merge_events', 'max_wait_time']: splitParams[field] = int(submittedParams[field]) if "include_parents" in submittedParams.keys(): if str(submittedParams["include_parents"]) == "True": splitParams["include_parents"] = True else: splitParams["include_parents"] = False self.validate(requestName) request = GetRequest.getRequestByName(requestName) helper = Utilities.loadWorkload(request) logging.info("SetSplitting " + requestName + splittingTask + splittingAlgo + str(splitParams)) helper.setJobSplittingParameters(splittingTask, splittingAlgo, splitParams) Utilities.saveWorkload(helper, request['RequestWorkflow']) return "Successfully updated splitting parameters for " + splittingTask \ + " " + detailsBackLink(requestName)
def assignWorkload(self, requestName, kwargs): """ Make all the necessary changes in the Workload to reflect the new assignment """ request = GetRequest.getRequestByName(requestName) helper = Utilities.loadWorkload(request) for field in ["AcquisitionEra", "ProcessingVersion"]: if type(kwargs[field]) == dict: for value in kwargs[field].values(): self.validate(value, field) else: self.validate(kwargs[field], field) # Set white list and black list whiteList = kwargs.get("SiteWhitelist", []) blackList = kwargs.get("SiteBlacklist", []) helper.setSiteWildcardsLists(siteWhitelist=whiteList, siteBlacklist=blackList, wildcardDict=self.wildcardSites) # Set ProcessingVersion and AcquisitionEra, which could be json encoded dicts helper.setProcessingVersion(kwargs["ProcessingVersion"]) helper.setAcquisitionEra(kwargs["AcquisitionEra"]) #FIXME not validated helper.setLFNBase(kwargs["MergedLFNBase"], kwargs["UnmergedLFNBase"]) helper.setMergeParameters(int(kwargs.get("MinMergeSize", 2147483648)), int(kwargs.get("MaxMergeSize", 4294967296)), int(kwargs.get("MaxMergeEvents", 50000))) helper.setupPerformanceMonitoring( int(kwargs.get("maxRSS", 2411724)), int(kwargs.get("maxVSize", 2411724)), int(kwargs.get("SoftTimeout", 167000)), int(kwargs.get("GracePeriod", 300))) # Check whether we should check location for the data if "useSiteListAsLocation" in kwargs: helper.setLocationDataSourceFlag() # Set phedex subscription information custodialList = kwargs.get("CustodialSites", []) nonCustodialList = kwargs.get("NonCustodialSites", []) if "AutoApprove" in kwargs: autoApproveList = nonCustodialList else: autoApproveList = [] priority = kwargs.get("Priority", "Low") if priority not in ["Low", "Normal", "High"]: raise cherrypy.HTTPError(400, "Invalid subscription priority") helper.setSubscriptionInformationWildCards( wildcardDict=self.wildcardSites, custodialSites=custodialList, nonCustodialSites=nonCustodialList, autoApproveSites=autoApproveList, priority=priority) helper.setDashboardActivity(kwargs.get("dashboard", "")) Utilities.saveWorkload(helper, request['RequestWorkflow'], self.wmstatWriteURL)
def showWorkload(self, requestName): """ Displays the workload """ self.validate(requestName) try: request = GetRequest.getRequestByName(requestName) except (Exception, RuntimeError) as ex: raise cherrypy.HTTPError(400, "Invalid request. %s" % str(ex)) request = Utilities.prepareForTable(request) helper = Utilities.loadWorkload(request) workloadText = str(helper.data) return cgi.escape(workloadText).replace("\n", "<br/>\n")
def showWorkload(self, requestName): """ Displays the workload """ self.validate(requestName) try: request = GetRequest.getRequestByName(requestName) except (Exception, RuntimeError) as ex: raise cherrypy.HTTPError(400, "Invalid request.") request = Utilities.prepareForTable(request) helper = Utilities.loadWorkload(request) workloadText = str(helper.data) return cgi.escape(workloadText).replace("\n", "<br/>\n")
def one(self, requestName): """ Assign a single request """ self.validate(requestName) request = GetRequest.getRequestByName(requestName) request = Utilities.prepareForTable(request) requestType = request["RequestType"] # get assignments teams = ProdManagement.listTeams() assignments = GetRequest.getAssignmentsByName(requestName) # might be a list, or a dict team:priority if isinstance(assignments, dict): assignments = assignments.keys() procVer = "" acqEra = "" procString = "" helper = Utilities.loadWorkload(request) if helper.getAcquisitionEra() != None: acqEra = helper.getAcquisitionEra() if helper.getProcessingVersion() != None: procVer = helper.getProcessingVersion() if helper.getProcessingString(): procString = helper.getProcessingString() dashboardActivity = helper.getDashboardActivity() blockCloseMaxWaitTime = helper.getBlockCloseMaxWaitTime() blockCloseMaxFiles = helper.getBlockCloseMaxFiles() blockCloseMaxEvents = helper.getBlockCloseMaxEvents() blockCloseMaxSize = helper.getBlockCloseMaxSize() (reqMergedBase, reqUnmergedBase) = helper.getLFNBases() return self.templatepage("Assign", requests=[request], teams=teams, assignments=assignments, sites=self.sites, phedexNodes=self.phedexNodes, mergedLFNBases=self.allMergedLFNBases, reqMergedBase=reqMergedBase, unmergedLFNBases=self.allUnmergedLFNBases, reqUnmergedBase=reqUnmergedBase, acqEra=acqEra, procVer=procVer, procString=procString, dashboardActivity=dashboardActivity, badRequests=[], blockCloseMaxWaitTime=blockCloseMaxWaitTime, blockCloseMaxFiles=blockCloseMaxFiles, blockCloseMaxSize=blockCloseMaxSize, blockCloseMaxEvents=blockCloseMaxEvents)
def assignWorkload(self, requestName, kwargs): """ Make all the necessary changes in the Workload to reflect the new assignment """ request = GetRequest.getRequestByName(requestName) helper = Utilities.loadWorkload(request) for field in ["AcquisitionEra", "ProcessingVersion"]: if type(kwargs[field]) == dict: for value in kwargs[field].values(): self.validate(value, field) else: self.validate(kwargs[field], field) # Set white list and black list whiteList = kwargs.get("SiteWhitelist", []) blackList = kwargs.get("SiteBlacklist", []) helper.setSiteWildcardsLists(siteWhitelist = whiteList, siteBlacklist = blackList, wildcardDict = self.wildcardSites) # Set ProcessingVersion and AcquisitionEra, which could be json encoded dicts helper.setProcessingVersion(kwargs["ProcessingVersion"]) helper.setAcquisitionEra(kwargs["AcquisitionEra"]) #FIXME not validated helper.setLFNBase(kwargs["MergedLFNBase"], kwargs["UnmergedLFNBase"]) helper.setMergeParameters(int(kwargs.get("MinMergeSize", 2147483648)), int(kwargs.get("MaxMergeSize", 4294967296)), int(kwargs.get("MaxMergeEvents", 50000))) helper.setupPerformanceMonitoring(int(kwargs.get("maxRSS", 2411724)), int(kwargs.get("maxVSize", 2411724)), int(kwargs.get("SoftTimeout", 171600)), int(kwargs.get("GracePeriod", 300))) # Check whether we should check location for the data if "useSiteListAsLocation" in kwargs: helper.setLocationDataSourceFlag() # Set phedex subscription information custodialList = kwargs.get("CustodialSites", []) nonCustodialList = kwargs.get("NonCustodialSites", []) if "AutoApprove" in kwargs: autoApproveList = nonCustodialList else: autoApproveList = [] priority = kwargs.get("Priority", "Low") if priority not in ["Low", "Normal", "High"]: raise cherrypy.HTTPError(400, "Invalid subscription priority") helper.setSubscriptionInformationWildCards(wildcardDict = self.wildcardSites, custodialSites = custodialList, nonCustodialSites = nonCustodialList, autoApproveSites = autoApproveList, priority = priority) helper.setDashboardActivity(kwargs.get("dashboard", "")) Utilities.saveWorkload(helper, request['RequestWorkflow'], self.wmstatWriteURL)
def one(self, requestName): """ Assign a single request """ self.validate(requestName) request = GetRequest.getRequestByName(requestName) request = Utilities.prepareForTable(request) # get assignments teams = ProdManagement.listTeams() assignments = GetRequest.getAssignmentsByName(requestName) # might be a list, or a dict team:priority if isinstance(assignments, dict): assignments = assignments.keys() procVer = "" acqEra = "" procString = "" helper = Utilities.loadWorkload(request) if helper.getAcquisitionEra() != None: acqEra = helper.getAcquisitionEra() if helper.getProcessingVersion() != None: procVer = helper.getProcessingVersion() if helper.getProcessingString(): procString = helper.getProcessingString() dashboardActivity = helper.getDashboardActivity() blockCloseMaxWaitTime = helper.getBlockCloseMaxWaitTime() blockCloseMaxFiles = helper.getBlockCloseMaxFiles() blockCloseMaxEvents = helper.getBlockCloseMaxEvents() blockCloseMaxSize = helper.getBlockCloseMaxSize() (reqMergedBase, reqUnmergedBase) = helper.getLFNBases() return self.templatepage( "Assign", requests=[request], teams=teams, assignments=assignments, sites=self.sites, phedexNodes=self.phedexNodes, mergedLFNBases=self.allMergedLFNBases, reqMergedBase=reqMergedBase, unmergedLFNBases=self.allUnmergedLFNBases, reqUnmergedBase=reqUnmergedBase, acqEra=acqEra, procVer=procVer, procString=procString, dashboardActivity=dashboardActivity, badRequests=[], blockCloseMaxWaitTime=blockCloseMaxWaitTime, blockCloseMaxFiles=blockCloseMaxFiles, blockCloseMaxSize=blockCloseMaxSize, blockCloseMaxEvents=blockCloseMaxEvents, )
def getConfigIDs(self, prim, proc, tier): """ _getConfigIDs_ Get the ConfigIDs for the specified request """ result = {} dataset = self.getDataset(prim, proc, tier) requests = GetRequest.getRequestsByCriteria("Datasets.GetRequestByInput", dataset) for request in requests: requestName = request["RequestName"] helper = Utilities.loadWorkload(request) result[requestName] = helper.listAllCMSSWConfigCacheIDs() return result
def index(self, all=0): """ Main page """ # returns dict of name:id allRequests = Utilities.requestsWithStatus('assignment-approved') teams = ProdManagement.listTeams() procVer = "" acqEra = "" procString = "" dashboardActivity = None badRequestNames = [] goodRequests = [] reqMergedBase = None reqUnmergedBase = None blockCloseMaxWaitTime = 66400 blockCloseMaxFiles = 500 blockCloseMaxEvents = 250000000 blockCloseMaxSize = 5000000000000 for request in allRequests: # make sure there's a workload attached try: helper = Utilities.loadWorkload(request) except Exception, ex: logging.error("Assign error: %s " % str(ex)) badRequestNames.append(request["RequestName"]) else: # get defaults from the first good one if not goodRequests: # forget it if it fails. try: if helper.getAcquisitionEra() != None: acqEra = helper.getAcquisitionEra() if helper.getProcessingVersion() != None: procVer = helper.getProcessingVersion() if helper.getProcessingString() != None: procString = helper.getProcessingString() blockCloseMaxWaitTime = helper.getBlockCloseMaxWaitTime( ) blockCloseMaxFiles = helper.getBlockCloseMaxFiles() blockCloseMaxEvents = helper.getBlockCloseMaxEvents() blockCloseMaxSize = helper.getBlockCloseMaxSize() (reqMergedBase, reqUnmergedBase) = helper.getLFNBases() dashboardActivity = helper.getDashboardActivity() goodRequests.append(request) except Exception, ex: logging.error("Assign error: %s " % str(ex)) badRequestNames.append(request["RequestName"]) else:
def index(self, all=0): """ Main page """ # returns dict of name:id allRequests = Utilities.requestsWithStatus("assignment-approved") teams = ProdManagement.listTeams() procVer = "" acqEra = "" procString = "" dashboardActivity = None badRequestNames = [] goodRequests = [] reqMergedBase = None reqUnmergedBase = None blockCloseMaxWaitTime = 66400 blockCloseMaxFiles = 500 blockCloseMaxEvents = 250000000 blockCloseMaxSize = 5000000000000 for request in allRequests: # make sure there's a workload attached try: helper = Utilities.loadWorkload(request) except Exception, ex: logging.error("Assign error: %s " % str(ex)) badRequestNames.append(request["RequestName"]) else: # get defaults from the first good one if not goodRequests: # forget it if it fails. try: if helper.getAcquisitionEra() != None: acqEra = helper.getAcquisitionEra() if helper.getProcessingVersion() != None: procVer = helper.getProcessingVersion() if helper.getProcessingString() != None: procString = helper.getProcessingString() blockCloseMaxWaitTime = helper.getBlockCloseMaxWaitTime() blockCloseMaxFiles = helper.getBlockCloseMaxFiles() blockCloseMaxEvents = helper.getBlockCloseMaxEvents() blockCloseMaxSize = helper.getBlockCloseMaxSize() (reqMergedBase, reqUnmergedBase) = helper.getLFNBases() dashboardActivity = helper.getDashboardActivity() goodRequests.append(request) except Exception, ex: logging.error("Assign error: %s " % str(ex)) badRequestNames.append(request["RequestName"]) else:
def deleteRequest(self, requestName): """ Deletes a request from the ReqMgr MySQL/Oracle database and also from CoucDB. """ # 404 will be thrown automatically on a non-existing request request = self.getRequest(requestName) helper = Utilities.loadWorkload(request) couchDocId = requestName helper.deleteCouch(self.couchUrl, self.workloadDBName, couchDocId) # #4289 - Request delete operation deletes the request from # MySQL/Oracle but not from CouchDB, fix here # Seangchan shall also fix here deleting such requests from WMStats (#4398) RequestAdmin.deleteRequest(requestName) return
def assignWorkload(self, requestName, kwargs): """ Make all the necessary changes in the Workload to reflect the new assignment """ request = GetRequest.getRequestByName(requestName) helper = Utilities.loadWorkload(request) #Validate the different parts of the processed dataset processedDatasetParts = ["AcquisitionEra", "ProcessingVersion"] if kwargs.get("ProcessingString", None): processedDatasetParts.append("ProcessingString") for field in processedDatasetParts: if type(kwargs[field]) == dict: for value in kwargs[field].values(): self.validate(value, field) else: self.validate(kwargs[field], field) # Set white list and black list whiteList = kwargs.get("SiteWhitelist", []) blackList = kwargs.get("SiteBlacklist", []) if type(whiteList) != list: whiteList = [whiteList] if type(blackList) != list: blackList = [blackList] helper.setSiteWildcardsLists(siteWhitelist=whiteList, siteBlacklist=blackList, wildcardDict=self.wildcardSites) # Set ProcessingVersion and AcquisitionEra, which could be json encoded dicts helper.setProcessingVersion(kwargs["ProcessingVersion"]) helper.setAcquisitionEra(kwargs["AcquisitionEra"]) helper.setProcessingString(kwargs.get("ProcessingString", None)) # Now verify the output datasets outputDatasets = helper.listOutputDatasets() for dataset in outputDatasets: tokens = dataset.split("/") procds = tokens[2] try: WMCore.Lexicon.procdataset(procds) except AssertionError, ex: raise cherrypy.HTTPError( 400, "Bad output dataset name, check the processed dataset.\n %s" % str(ex))
def assignWorkload(self, requestName, kwargs): """ Make all the necessary changes in the Workload to reflect the new assignment """ request = GetRequest.getRequestByName(requestName) helper = Utilities.loadWorkload(request) for field in ["AcquisitionEra", "ProcessingVersion"]: self.validate(kwargs[field], field) helper.setSiteWhitelist(Utilities.parseSite(kwargs,"SiteWhitelist")) helper.setSiteBlacklist(Utilities.parseSite(kwargs,"SiteBlacklist")) helper.setProcessingVersion(kwargs["ProcessingVersion"]) helper.setAcquisitionEra(kwargs["AcquisitionEra"]) #FIXME not validated helper.setLFNBase(kwargs["MergedLFNBase"], kwargs["UnmergedLFNBase"]) helper.setMergeParameters(int(kwargs["MinMergeSize"]), int(kwargs["MaxMergeSize"]), int(kwargs["MaxMergeEvents"])) helper.setupPerformanceMonitoring(int(kwargs["maxRSS"]), int(kwargs["maxVSize"])) helper.setDashboardActivity(kwargs.get("dashboard", "")) Utilities.saveWorkload(helper, request['RequestWorkflow'])
def assignWorkload(self, requestName, kwargs): """ Make all the necessary changes in the Workload to reflect the new assignment """ request = GetRequest.getRequestByName(requestName) helper = Utilities.loadWorkload(request) #Validate the different parts of the processed dataset processedDatasetParts = ["AcquisitionEra", "ProcessingVersion"] if kwargs.get("ProcessingString", None): processedDatasetParts.append("ProcessingString") for field in processedDatasetParts: if type(kwargs[field]) == dict: for value in kwargs[field].values(): self.validate(value, field) else: self.validate(kwargs[field], field) # Set white list and black list whiteList = kwargs.get("SiteWhitelist", []) blackList = kwargs.get("SiteBlacklist", []) if type(whiteList) != list: whiteList = [whiteList] if type(blackList) != list: blackList = [blackList] helper.setSiteWildcardsLists(siteWhitelist = whiteList, siteBlacklist = blackList, wildcardDict = self.wildcardSites) # Set ProcessingVersion and AcquisitionEra, which could be json encoded dicts helper.setProcessingVersion(kwargs["ProcessingVersion"]) helper.setAcquisitionEra(kwargs["AcquisitionEra"]) helper.setProcessingString(kwargs.get("ProcessingString", None)) # Now verify the output datasets outputDatasets = helper.listOutputDatasets() for dataset in outputDatasets: tokens = dataset.split("/") procds = tokens[2] try: WMCore.Lexicon.procdataset(procds) except AssertionError, ex: raise cherrypy.HTTPError(400, "Bad output dataset name, check the processed dataset.\n %s" % str(ex))
def getMostRecentOutputForPrepID(self, prepID): """Return the datasets produced by the most recently submitted request with this prep ID""" requestIDs = sorted(GetRequest.getRequestByPrepID(prepID)) # most recent will have the largest ID requestIDs.reverse() request = None # Go through each request in order from largest to smallest # looking for the first non-failed/non-canceled request for requestID in requestIDs: request = GetRequest.getRequest(requestID) rejectList = ['aborted', 'failed', 'rejected', 'epic-failed'] requestStatus = request.get("RequestStatus", 'aborted').lower() if requestStatus not in rejectList: break if request != None: helper = Utilities.loadWorkload(request) return helper.listOutputDatasets() else: return []
def index(self, all=0): """ Main page """ # returns dict of name:id allRequests = Utilities.requestsWithStatus('assignment-approved') teams = ProdManagement.listTeams() procVer = "" acqEra = "" dashboardActivity = None badRequestNames = [] goodRequests = [] reqMergedBase = None reqUnmergedBase = None for request in allRequests: # make sure there's a workload attached try: helper = Utilities.loadWorkload(request) except: badRequestNames.append(request["RequestName"]) else: # get defaults from the first good one if not goodRequests: if helper.getAcquisitionEra() != None: acqEra = helper.getAcquisitionEra() if helper.getProcessingVersion() != None: procVer = helper.getProcessingVersion() (reqMergedBase, reqUnmergedBase) = helper.getLFNBases() dashboardActivity = helper.getDashboardActivity() goodRequests.append(request) return self.templatepage("Assign", all=all, requests=goodRequests, teams=teams, assignments=[], sites=self.sites, mergedLFNBases=self.allMergedLFNBases, reqMergedBase=reqMergedBase, unmergedLFNBases=self.allUnmergedLFNBases, reqUnmergedBase=reqUnmergedBase, acqEra = acqEra, procVer = procVer, dashboardActivity=dashboardActivity, badRequests=badRequestNames)
def assignWorkload(self, requestName, kwargs): """ Make all the necessary changes in the Workload to reflect the new assignment """ request = GetRequest.getRequestByName(requestName) helper = Utilities.loadWorkload(request) # Validate the different parts of the processed dataset processedDatasetParts = { "AcquisitionEra": helper.getAcquisitionEra(), "ProcessingString": helper.getProcessingString(), "ProcessingVersion": helper.getProcessingVersion(), } for field, origValue in processedDatasetParts.iteritems(): if field in kwargs and isinstance(kwargs[field], dict): for value in kwargs[field].values(): self.validate(value, field) else: self.validate(kwargs.get(field, origValue)) # Set white list and black list whiteList = kwargs.get("SiteWhitelist", []) blackList = kwargs.get("SiteBlacklist", []) if not isinstance(whiteList, list): whiteList = [whiteList] if not isinstance(blackList, list): blackList = [blackList] helper.setSiteWildcardsLists(siteWhitelist=whiteList, siteBlacklist=blackList, wildcardDict=self.wildcardSites) res = set(whiteList) & set(blackList) if len(res): raise cherrypy.HTTPError(400, "White and blacklist the same site is not allowed %s" % list(res)) # Set AcquisitionEra, ProcessingString and ProcessingVersion # which could be json encoded dicts if "AcquisitionEra" in kwargs: helper.setAcquisitionEra(kwargs["AcquisitionEra"]) if "ProcessingString" in kwargs: helper.setProcessingString(kwargs["ProcessingString"]) if "ProcessingVersion" in kwargs: helper.setProcessingVersion(kwargs["ProcessingVersion"]) # Now verify the output datasets datatier = [] outputDatasets = helper.listOutputDatasets() for dataset in outputDatasets: tokens = dataset.split("/") procds = tokens[2] datatier.append(tokens[3]) try: WMCore.Lexicon.procdataset(procds) except AssertionError as ex: raise cherrypy.HTTPError(400, "Bad output dataset name, check the processed dataset.\n %s" % str(ex)) # Verify whether the output datatiers are available in DBS self.validateDatatier(datatier, dbsUrl=helper.getDbsUrl()) # FIXME not validated helper.setLFNBase(kwargs["MergedLFNBase"], kwargs["UnmergedLFNBase"]) helper.setMergeParameters( int(kwargs.get("MinMergeSize", 2147483648)), int(kwargs.get("MaxMergeSize", 4294967296)), int(kwargs.get("MaxMergeEvents", 50000)), ) helper.setupPerformanceMonitoring( kwargs.get("MaxRSS", None), kwargs.get("MaxVSize", None), kwargs.get("SoftTimeout", None), kwargs.get("GracePeriod", None), ) # Check whether we should check location for the data helper.setTrustLocationFlag( inputFlag=strToBool(kwargs.get("TrustSitelists", False)), pileupFlag=strToBool(kwargs.get("TrustPUSitelists", False)), ) helper.setAllowOpportunistic(allowOpport=strToBool(kwargs.get("AllowOpportunistic", False))) # Set phedex subscription information custodialList = kwargs.get("CustodialSites", []) nonCustodialList = kwargs.get("NonCustodialSites", []) autoApproveList = kwargs.get("AutoApproveSubscriptionSites", []) for site in autoApproveList: if site.endswith("_MSS"): raise cherrypy.HTTPError(400, "Auto-approval to MSS endpoint not allowed %s" % autoApproveList) subscriptionPriority = kwargs.get("SubscriptionPriority", "Low") if subscriptionPriority not in ["Low", "Normal", "High"]: raise cherrypy.HTTPError(400, "Invalid subscription priority %s" % subscriptionPriority) custodialType = kwargs.get("CustodialSubType", "Replica") if custodialType not in ["Move", "Replica"]: raise cherrypy.HTTPError(400, "Invalid custodial subscription type %s" % custodialType) nonCustodialType = kwargs.get("NonCustodialSubType", "Replica") if nonCustodialType not in ["Move", "Replica"]: raise cherrypy.HTTPError(400, "Invalid noncustodial subscription type %s" % nonCustodialType) if "CustodialGroup" in kwargs and not isinstance(kwargs["CustodialGroup"], basestring): raise cherrypy.HTTPError(400, "Invalid CustodialGroup format %s" % kwargs["CustodialGroup"]) if "NonCustodialGroup" in kwargs and not isinstance(kwargs["NonCustodialGroup"], basestring): raise cherrypy.HTTPError(400, "Invalid NonCustodialGroup format %s" % kwargs["NonCustodialGroup"]) if "DeleteFromSource" in kwargs and not isinstance(kwargs["DeleteFromSource"], bool): raise cherrypy.HTTPError(400, "Invalid DeleteFromSource format %s" % kwargs["DeleteFromSource"]) helper.setSubscriptionInformationWildCards( wildcardDict=self.wildcardSites, custodialSites=custodialList, nonCustodialSites=nonCustodialList, autoApproveSites=autoApproveList, custodialSubType=custodialType, nonCustodialSubType=nonCustodialType, custodialGroup=kwargs.get("CustodialGroup", "DataOps"), nonCustodialGroup=kwargs.get("NonCustodialGroup", "DataOps"), priority=subscriptionPriority, deleteFromSource=kwargs.get("DeleteFromSource", False), ) # Block closing information blockCloseMaxWaitTime = int(kwargs.get("BlockCloseMaxWaitTime", helper.getBlockCloseMaxWaitTime())) blockCloseMaxFiles = int(kwargs.get("BlockCloseMaxFiles", helper.getBlockCloseMaxFiles())) blockCloseMaxEvents = int(kwargs.get("BlockCloseMaxEvents", helper.getBlockCloseMaxEvents())) blockCloseMaxSize = int(kwargs.get("BlockCloseMaxSize", helper.getBlockCloseMaxSize())) helper.setBlockCloseSettings(blockCloseMaxWaitTime, blockCloseMaxFiles, blockCloseMaxEvents, blockCloseMaxSize) helper.setMemoryAndCores(kwargs.get("Memory"), kwargs.get("Multicore")) helper.setDashboardActivity(kwargs.get("Dashboard", "")) helper.setTaskProperties(kwargs) Utilities.saveWorkload(helper, request["RequestWorkflow"], self.wmstatWriteURL) # update AcquisitionEra in the Couch document (#4380) # request object returned above from Oracle doesn't have information Couch # database reqDetails = Utilities.requestDetails(request["RequestName"]) couchDb = Database(reqDetails["CouchWorkloadDBName"], reqDetails["CouchURL"]) couchDb.updateDocument( request["RequestName"], "ReqMgr", "updaterequest", fields={ "AcquisitionEra": reqDetails["AcquisitionEra"], "ProcessingVersion": reqDetails["ProcessingVersion"], "CustodialSites": custodialList, "NonCustodialSites": nonCustodialList, "AutoApproveSubscriptionSites": autoApproveList, "SubscriptionPriority": subscriptionPriority, "CustodialSubType": custodialType, "NonCustodialSubType": nonCustodialType, "CustodialGroup": kwargs.get("CustodialGroup", "DataOps"), "NonCustodialGroup": kwargs.get("NonCustodialGroup", "DataOps"), "DeleteFromSource": kwargs.get("DeleteFromSource", False), "Teams": kwargs["Teams"], "OutputDatasets": outputDatasets, "SiteWhitelist": whiteList, "SiteBlacklist": blackList, "MergedLFNBase": kwargs["MergedLFNBase"], "UnmergedLFNBase": kwargs["UnmergedLFNBase"], "Dashboard": kwargs.get("Dashboard", ""), "TrustSitelists": kwargs.get("TrustSitelists", False), "TrustPUSitelists": kwargs.get("TrustPUSitelists", False), "AllowOpportunistic": kwargs.get("AllowOpportunistic", False), }, useBody=True, )
def assignWorkload(self, requestName, kwargs): """ Make all the necessary changes in the Workload to reflect the new assignment """ request = GetRequest.getRequestByName(requestName) helper = Utilities.loadWorkload(request) #Validate the different parts of the processed dataset processedDatasetParts = ["AcquisitionEra", "ProcessingVersion"] if kwargs.get("ProcessingString", None): processedDatasetParts.append("ProcessingString") for field in processedDatasetParts: if type(kwargs[field]) == dict: for value in kwargs[field].values(): self.validate(value, field) else: self.validate(kwargs[field], field) # Set white list and black list whiteList = kwargs.get("SiteWhitelist", []) blackList = kwargs.get("SiteBlacklist", []) if type(whiteList) != list: whiteList = [whiteList] if type(blackList) != list: blackList = [blackList] helper.setSiteWildcardsLists(siteWhitelist = whiteList, siteBlacklist = blackList, wildcardDict = self.wildcardSites) res = set(whiteList) & set(blackList) if len(res): raise cherrypy.HTTPError(400, "White and blacklist the same site is not allowed %s" % list(res)) # Set ProcessingVersion and AcquisitionEra, which could be json encoded dicts helper.setProcessingVersion(kwargs["ProcessingVersion"]) helper.setAcquisitionEra(kwargs["AcquisitionEra"]) helper.setProcessingString(kwargs.get("ProcessingString", None)) # Now verify the output datasets outputDatasets = helper.listOutputDatasets() for dataset in outputDatasets: tokens = dataset.split("/") procds = tokens[2] try: WMCore.Lexicon.procdataset(procds) except AssertionError as ex: raise cherrypy.HTTPError(400, "Bad output dataset name, check the processed dataset.\n %s" % str(ex)) #FIXME not validated helper.setLFNBase(kwargs["MergedLFNBase"], kwargs["UnmergedLFNBase"]) helper.setMergeParameters(int(kwargs.get("MinMergeSize", 2147483648)), int(kwargs.get("MaxMergeSize", 4294967296)), int(kwargs.get("MaxMergeEvents", 50000))) helper.setupPerformanceMonitoring(kwargs.get("MaxRSS", None), kwargs.get("MaxVSize", None), kwargs.get("SoftTimeout",None), kwargs.get("GracePeriod", None)) # Check whether we should check location for the data useAAA = strToBool(kwargs.get("useSiteListAsLocation", False)) if useAAA: helper.setLocationDataSourceFlag(flag = useAAA) # Set phedex subscription information custodialList = kwargs.get("CustodialSites", []) nonCustodialList = kwargs.get("NonCustodialSites", []) autoApproveList = kwargs.get("AutoApproveSubscriptionSites", []) for site in autoApproveList: if site.endswith('_MSS'): raise cherrypy.HTTPError(400, "Auto-approval to MSS endpoint not allowed %s" % autoApproveList) subscriptionPriority = kwargs.get("SubscriptionPriority", "Low") if subscriptionPriority not in ["Low", "Normal", "High"]: raise cherrypy.HTTPError(400, "Invalid subscription priority %s" % subscriptionPriority) custodialType = kwargs.get("CustodialSubType", "Replica") if custodialType not in ["Move", "Replica"]: raise cherrypy.HTTPError(400, "Invalid custodial subscription type %s" % custodialType) nonCustodialType = kwargs.get("NonCustodialSubType", "Replica") if nonCustodialType not in ["Move", "Replica"]: raise cherrypy.HTTPError(400, "Invalid noncustodial subscription type %s" % nonCustodialType) helper.setSubscriptionInformationWildCards(wildcardDict = self.wildcardSites, custodialSites = custodialList, nonCustodialSites = nonCustodialList, autoApproveSites = autoApproveList, custodialSubType = custodialType, nonCustodialSubType = nonCustodialType, priority = subscriptionPriority) # Block closing information blockCloseMaxWaitTime = int(kwargs.get("BlockCloseMaxWaitTime", helper.getBlockCloseMaxWaitTime())) blockCloseMaxFiles = int(kwargs.get("BlockCloseMaxFiles", helper.getBlockCloseMaxFiles())) blockCloseMaxEvents = int(kwargs.get("BlockCloseMaxEvents", helper.getBlockCloseMaxEvents())) blockCloseMaxSize = int(kwargs.get("BlockCloseMaxSize", helper.getBlockCloseMaxSize())) helper.setBlockCloseSettings(blockCloseMaxWaitTime, blockCloseMaxFiles, blockCloseMaxEvents, blockCloseMaxSize) helper.setDashboardActivity(kwargs.get("Dashboard", "")) # set Task properties if they are exist # TODO: need to define the task format (maybe kwargs["tasks"]?) helper.setTaskProperties(kwargs) Utilities.saveWorkload(helper, request['RequestWorkflow'], self.wmstatWriteURL) # update AcquisitionEra in the Couch document (#4380) # request object returned above from Oracle doesn't have information Couch # database reqDetails = Utilities.requestDetails(request["RequestName"]) couchDb = Database(reqDetails["CouchWorkloadDBName"], reqDetails["CouchURL"]) couchDb.updateDocument(request["RequestName"], "ReqMgr", "updaterequest", fields={"AcquisitionEra": reqDetails["AcquisitionEra"], "ProcessingVersion": reqDetails["ProcessingVersion"], "CustodialSites": custodialList, "NonCustodialSites": nonCustodialList, "AutoApproveSubscriptionSites": autoApproveList, "SubscriptionPriority": subscriptionPriority, "CustodialSubType": custodialType, "NonCustodialSubType": nonCustodialType, "Teams": kwargs["Teams"], "OutputDatasets": outputDatasets, "SiteWhitelist": whiteList, "SiteBlacklist": blackList}, useBody = True)
def showWorkload(self, url): """ Displays the workload """ request = {'RequestWorkflow':url} helper = Utilities.loadWorkload(request) workloadText = str(helper.data) return cgi.escape(workloadText).replace("\n", "<br/>\n")
def getRequest(requestId, reverseTypes=None, reverseStatus=None): """ _getRequest_ retrieve a request based on the request id, return a ReqMgr.DataStructs.Request instance containing the information """ factory = DBConnect.getConnection() reqGet = factory(classname="Request.Get") reqData = reqGet.execute(requestId) requestName = reqData['request_name'] if not reverseTypes or not reverseStatus: reverseTypes, reverseStatus = reverseLookups() getGroup = factory(classname="Group.GetGroupFromAssoc") groupData = getGroup.execute(reqData['requestor_group_id']) getUser = factory(classname="Requestor.GetUserFromAssoc") userData = getUser.execute(reqData['requestor_group_id']) request = Request() request["RequestName"] = requestName request["RequestType"] = reverseTypes[reqData['request_type']] request["RequestStatus"] = reverseStatus[reqData['request_status']] request["RequestPriority"] = reqData['request_priority'] request["RequestWorkflow"] = reqData['workflow'] request["RequestNumEvents"] = reqData['request_num_events'] request["RequestSizeFiles"] = reqData['request_size_files'] # there used to be RequestEventSize argument, but then SizePerEvent # got introduce and got adopted so this is replacing it, presenting # this nomenclature inconsistency on Oracle level request["SizePerEvent"] = reqData['request_event_size'] request["PrepID"] = reqData['prep_id'] request["Group"] = groupData['group_name'] request["Requestor"] = userData['requestor_hn_name'] updates = ChangeState.getProgress(requestName) request['percent_complete'], request['percent_success'] = percentages( updates) sqDeps = factory(classname="Software.GetByAssoc") swVers = sqDeps.execute(requestId) if swVers == {}: request['SoftwareVersions'] = ['DEPRECATED'] else: request['SoftwareVersions'] = swVers.values() getDatasetsIn = factory(classname="Datasets.GetInput") getDatasetsOut = factory(classname="Datasets.GetOutput") datasetsIn = getDatasetsIn.execute(requestId) datasetsOut = getDatasetsOut.execute(requestId) request['InputDatasetTypes'] = datasetsIn request['InputDatasets'] = datasetsIn.keys() request['OutputDatasets'] = datasetsOut # fetch AcquisitionEra from spec, it's not stored in Oracle at all import WMCore.HTTPFrontEnd.RequestManager.ReqMgrWebTools as Utilities try: helper = Utilities.loadWorkload(request) request["AcquisitionEra"] = str(helper.getAcquisitionEra()) # add ProcessingVersion and ProcessingString in the response (#4561) request["ProcessingVersion"] = str(helper.getProcessingVersion()) request["ProcessingString"] = str(helper.getProcessingString()) except Exception, ex: logging.error("Could not check workload for %s, reason: %s" % (request["RequestName"], ex))
def assignWorkload(self, requestName, kwargs): """ Make all the necessary changes in the Workload to reflect the new assignment """ request = GetRequest.getRequestByName(requestName) helper = Utilities.loadWorkload(request) try: helper.validateArgumentForAssignment(kwargs) except WMSpecFactoryException as ex: raise cherrypy.HTTPError(400, str(ex.message())) except Exception: msg = traceback.format_exc() raise cherrypy.HTTPError(400, "Unhandled error: %s" % msg) # Validate the different parts of the processed dataset processedDatasetParts = {"AcquisitionEra": kwargs.get("AcquisitionEra"), "ProcessingString": kwargs.get("ProcessingString"), "ProcessingVersion": kwargs.get("ProcessingVersion") } for field, values in processedDatasetParts.iteritems(): if field in kwargs and isinstance(kwargs[field], dict): for value in kwargs[field].values(): self.validate(value, field) else: self.validate(kwargs.get(field, values), field) # Set white list and black list whiteList = kwargs.get("SiteWhitelist", []) blackList = kwargs.get("SiteBlacklist", []) if not isinstance(whiteList, list): whiteList = [whiteList] if not isinstance(blackList, list): blackList = [blackList] helper.setSiteWildcardsLists(siteWhitelist=whiteList, siteBlacklist=blackList, wildcardDict=self.wildcardSites) res = set(whiteList) & set(blackList) if len(res): raise cherrypy.HTTPError(400, "White and blacklist the same site is not allowed %s" % list(res)) helper.setAcquisitionEra(kwargs.get("AcquisitionEra", None)) helper.setProcessingString(kwargs.get("ProcessingString", None)) helper.setProcessingVersion(kwargs.get("ProcessingVersion", None)) # Now verify the output datasets datatier = [] outputDatasets = helper.listOutputDatasets() for dataset in outputDatasets: tokens = dataset.split("/") procds = tokens[2] datatier.append(tokens[3]) try: WMCore.Lexicon.procdataset(procds) except AssertionError as ex: raise cherrypy.HTTPError(400, "Bad output dataset name, check the processed dataset.\n %s" % str(ex)) # Verify whether the output datatiers are available in DBS self.validateDatatier(datatier, dbsUrl=helper.getDbsUrl()) # FIXME not validated helper.setLFNBase(kwargs["MergedLFNBase"], kwargs["UnmergedLFNBase"]) helper.setMergeParameters(int(kwargs.get("MinMergeSize", 2147483648)), int(kwargs.get("MaxMergeSize", 4294967296)), int(kwargs.get("MaxMergeEvents", 50000))) helper.setupPerformanceMonitoring(kwargs.get("MaxRSS", None), kwargs.get("MaxVSize", None), kwargs.get("SoftTimeout", None), kwargs.get("GracePeriod", None)) # Check whether we should check location for the data helper.setTrustLocationFlag(inputFlag=strToBool(kwargs.get("TrustSitelists", False)), pileupFlag=strToBool(kwargs.get("TrustPUSitelists", False))) helper.setAllowOpportunistic(allowOpport=strToBool(kwargs.get("AllowOpportunistic", False))) # Set phedex subscription information custodialList = kwargs.get("CustodialSites", []) nonCustodialList = kwargs.get("NonCustodialSites", []) autoApproveList = kwargs.get("AutoApproveSubscriptionSites", []) subscriptionPriority = kwargs.get("SubscriptionPriority", "Low") custodialType = kwargs.get("CustodialSubType", "Replica") nonCustodialType = kwargs.get("NonCustodialSubType", "Replica") helper.setSubscriptionInformationWildCards(wildcardDict=self.wildcardSites, custodialSites=custodialList, nonCustodialSites=nonCustodialList, autoApproveSites=autoApproveList, custodialSubType=custodialType, nonCustodialSubType=nonCustodialType, custodialGroup=kwargs.get("CustodialGroup", "DataOps"), nonCustodialGroup=kwargs.get("NonCustodialGroup", "DataOps"), priority=subscriptionPriority, deleteFromSource=kwargs.get("DeleteFromSource", False)) # Block closing information blockCloseMaxWaitTime = int(kwargs.get("BlockCloseMaxWaitTime", helper.getBlockCloseMaxWaitTime())) blockCloseMaxFiles = int(kwargs.get("BlockCloseMaxFiles", helper.getBlockCloseMaxFiles())) blockCloseMaxEvents = int(kwargs.get("BlockCloseMaxEvents", helper.getBlockCloseMaxEvents())) blockCloseMaxSize = int(kwargs.get("BlockCloseMaxSize", helper.getBlockCloseMaxSize())) helper.setBlockCloseSettings(blockCloseMaxWaitTime, blockCloseMaxFiles, blockCloseMaxEvents, blockCloseMaxSize) helper.setMemory(kwargs.get("Memory")) helper.setCores(kwargs.get("Multicore")) helper.setDashboardActivity(kwargs.get("Dashboard", "")) helper.setTaskProperties(kwargs) Utilities.saveWorkload(helper, request['RequestWorkflow'], self.wmstatWriteURL) # update AcquisitionEra in the Couch document (#4380) # request object returned above from Oracle doesn't have information Couch # database reqDetails = Utilities.requestDetails(request["RequestName"]) couchDb = Database(reqDetails["CouchWorkloadDBName"], reqDetails["CouchURL"]) couchDb.updateDocument(request["RequestName"], "ReqMgr", "updaterequest", fields={"AcquisitionEra": reqDetails["AcquisitionEra"], "ProcessingVersion": reqDetails["ProcessingVersion"], "CustodialSites": custodialList, "NonCustodialSites": nonCustodialList, "AutoApproveSubscriptionSites": autoApproveList, "SubscriptionPriority": subscriptionPriority, "CustodialSubType": custodialType, "NonCustodialSubType": nonCustodialType, "CustodialGroup": kwargs.get("CustodialGroup", "DataOps"), "NonCustodialGroup": kwargs.get("NonCustodialGroup", "DataOps"), "DeleteFromSource": kwargs.get("DeleteFromSource", False), "Teams": kwargs["Teams"], "OutputDatasets": outputDatasets, "SiteWhitelist": whiteList, "SiteBlacklist": blackList, "MergedLFNBase": kwargs["MergedLFNBase"], "UnmergedLFNBase": kwargs["UnmergedLFNBase"], "Dashboard": kwargs.get("Dashboard", ""), "TrustSitelists": kwargs.get("TrustSitelists", False), "TrustPUSitelists": kwargs.get("TrustPUSitelists", False), "AllowOpportunistic": kwargs.get("AllowOpportunistic", False)}, useBody=True)
def assignWorkload(self, requestName, kwargs): """ Make all the necessary changes in the Workload to reflect the new assignment """ request = GetRequest.getRequestByName(requestName) helper = Utilities.loadWorkload(request) #Validate the different parts of the processed dataset processedDatasetParts = ["AcquisitionEra", "ProcessingVersion"] if kwargs.get("ProcessingString", None): processedDatasetParts.append("ProcessingString") for field in processedDatasetParts: if type(kwargs[field]) == dict: for value in kwargs[field].values(): self.validate(value, field) else: self.validate(kwargs[field], field) # Set white list and black list whiteList = kwargs.get("SiteWhitelist", []) blackList = kwargs.get("SiteBlacklist", []) if type(whiteList) != list: whiteList = [whiteList] if type(blackList) != list: blackList = [blackList] helper.setSiteWildcardsLists(siteWhitelist=whiteList, siteBlacklist=blackList, wildcardDict=self.wildcardSites) res = set(whiteList) & set(blackList) if len(res): raise cherrypy.HTTPError( 400, "White and blacklist the same site is not allowed %s" % list(res)) # Set ProcessingVersion and AcquisitionEra, which could be json encoded dicts helper.setProcessingVersion(kwargs["ProcessingVersion"]) helper.setAcquisitionEra(kwargs["AcquisitionEra"]) helper.setProcessingString(kwargs.get("ProcessingString", None)) # Now verify the output datasets datatier = [] outputDatasets = helper.listOutputDatasets() for dataset in outputDatasets: tokens = dataset.split("/") procds = tokens[2] datatier.append(tokens[3]) try: WMCore.Lexicon.procdataset(procds) except AssertionError as ex: raise cherrypy.HTTPError( 400, "Bad output dataset name, check the processed dataset.\n %s" % str(ex)) # Verify whether the output datatiers are available in DBS self.validateDatatier(datatier, dbsUrl=helper.getDbsUrl()) #FIXME not validated helper.setLFNBase(kwargs["MergedLFNBase"], kwargs["UnmergedLFNBase"]) helper.setMergeParameters(int(kwargs.get("MinMergeSize", 2147483648)), int(kwargs.get("MaxMergeSize", 4294967296)), int(kwargs.get("MaxMergeEvents", 50000))) helper.setupPerformanceMonitoring(kwargs.get("MaxRSS", None), kwargs.get("MaxVSize", None), kwargs.get("SoftTimeout", None), kwargs.get("GracePeriod", None)) # Check whether we should check location for the data useAAA = strToBool(kwargs.get("useSiteListAsLocation", False)) if useAAA: helper.setLocationDataSourceFlag(flag=useAAA) # Set phedex subscription information custodialList = kwargs.get("CustodialSites", []) nonCustodialList = kwargs.get("NonCustodialSites", []) autoApproveList = kwargs.get("AutoApproveSubscriptionSites", []) for site in autoApproveList: if site.endswith('_MSS'): raise cherrypy.HTTPError( 400, "Auto-approval to MSS endpoint not allowed %s" % autoApproveList) subscriptionPriority = kwargs.get("SubscriptionPriority", "Low") if subscriptionPriority not in ["Low", "Normal", "High"]: raise cherrypy.HTTPError( 400, "Invalid subscription priority %s" % subscriptionPriority) custodialType = kwargs.get("CustodialSubType", "Replica") if custodialType not in ["Move", "Replica"]: raise cherrypy.HTTPError( 400, "Invalid custodial subscription type %s" % custodialType) nonCustodialType = kwargs.get("NonCustodialSubType", "Replica") if nonCustodialType not in ["Move", "Replica"]: raise cherrypy.HTTPError( 400, "Invalid noncustodial subscription type %s" % nonCustodialType) helper.setSubscriptionInformationWildCards( wildcardDict=self.wildcardSites, custodialSites=custodialList, nonCustodialSites=nonCustodialList, autoApproveSites=autoApproveList, custodialSubType=custodialType, nonCustodialSubType=nonCustodialType, priority=subscriptionPriority) # Block closing information blockCloseMaxWaitTime = int( kwargs.get("BlockCloseMaxWaitTime", helper.getBlockCloseMaxWaitTime())) blockCloseMaxFiles = int( kwargs.get("BlockCloseMaxFiles", helper.getBlockCloseMaxFiles())) blockCloseMaxEvents = int( kwargs.get("BlockCloseMaxEvents", helper.getBlockCloseMaxEvents())) blockCloseMaxSize = int( kwargs.get("BlockCloseMaxSize", helper.getBlockCloseMaxSize())) helper.setBlockCloseSettings(blockCloseMaxWaitTime, blockCloseMaxFiles, blockCloseMaxEvents, blockCloseMaxSize) helper.setDashboardActivity(kwargs.get("Dashboard", "")) # set Task properties if they are exist # TODO: need to define the task format (maybe kwargs["tasks"]?) helper.setTaskProperties(kwargs) Utilities.saveWorkload(helper, request['RequestWorkflow'], self.wmstatWriteURL) # update AcquisitionEra in the Couch document (#4380) # request object returned above from Oracle doesn't have information Couch # database reqDetails = Utilities.requestDetails(request["RequestName"]) couchDb = Database(reqDetails["CouchWorkloadDBName"], reqDetails["CouchURL"]) couchDb.updateDocument(request["RequestName"], "ReqMgr", "updaterequest", fields={ "AcquisitionEra": reqDetails["AcquisitionEra"], "ProcessingVersion": reqDetails["ProcessingVersion"], "CustodialSites": custodialList, "NonCustodialSites": nonCustodialList, "AutoApproveSubscriptionSites": autoApproveList, "SubscriptionPriority": subscriptionPriority, "CustodialSubType": custodialType, "NonCustodialSubType": nonCustodialType, "Teams": kwargs["Teams"], "OutputDatasets": outputDatasets, "SiteWhitelist": whiteList, "SiteBlacklist": blackList }, useBody=True)
def assignWorkload(self, requestName, kwargs): """ Make all the necessary changes in the Workload to reflect the new assignment """ request = GetRequest.getRequestByName(requestName) helper = Utilities.loadWorkload(request) # Validate the different parts of the processed dataset processedDatasetParts = ["AcquisitionEra", "ProcessingVersion"] if kwargs.get("ProcessingString", None): processedDatasetParts.append("ProcessingString") for field in processedDatasetParts: if type(kwargs[field]) == dict: for value in kwargs[field].values(): self.validate(value, field) else: self.validate(kwargs[field], field) # Set white list and black list whiteList = kwargs.get("SiteWhitelist", []) blackList = kwargs.get("SiteBlacklist", []) helper.setSiteWildcardsLists(siteWhitelist=whiteList, siteBlacklist=blackList, wildcardDict=self.wildcardSites) # Set ProcessingVersion and AcquisitionEra, which could be json encoded dicts helper.setProcessingVersion(kwargs["ProcessingVersion"]) helper.setAcquisitionEra(kwargs["AcquisitionEra"]) helper.setProcessingString(kwargs.get("ProcessingString", None)) # FIXME not validated helper.setLFNBase(kwargs["MergedLFNBase"], kwargs["UnmergedLFNBase"]) helper.setMergeParameters( int(kwargs.get("MinMergeSize", 2147483648)), int(kwargs.get("MaxMergeSize", 4294967296)), int(kwargs.get("MaxMergeEvents", 50000)), ) helper.setupPerformanceMonitoring( int(kwargs.get("maxRSS", 2411724)), int(kwargs.get("maxVSize", 2411724)), int(kwargs.get("SoftTimeout", 129600)), int(kwargs.get("GracePeriod", 300)), ) # Check whether we should check location for the data if "useSiteListAsLocation" in kwargs: helper.setLocationDataSourceFlag() # Set phedex subscription information custodialList = kwargs.get("CustodialSites", []) nonCustodialList = kwargs.get("NonCustodialSites", []) autoApproveList = kwargs.get("AutoApproveSubscriptionSites", []) subscriptionPriority = kwargs.get("SubscriptionPriority", "Low") if subscriptionPriority not in ["Low", "Normal", "High"]: raise cherrypy.HTTPError(400, "Invalid subscription priority") subscriptionType = kwargs.get("CustodialSubType", "Move") if subscriptionType not in ["Move", "Replica"]: raise cherrypy.HTTPError(400, "Invalid custodial subscription type") helper.setSubscriptionInformationWildCards( wildcardDict=self.wildcardSites, custodialSites=custodialList, nonCustodialSites=nonCustodialList, autoApproveSites=autoApproveList, custodialSubType=subscriptionType, priority=subscriptionPriority, ) # Block closing information blockCloseMaxWaitTime = int(kwargs.get("BlockCloseMaxWaitTime", helper.getBlockCloseMaxWaitTime())) blockCloseMaxFiles = int(kwargs.get("BlockCloseMaxFiles", helper.getBlockCloseMaxFiles())) blockCloseMaxEvents = int(kwargs.get("BlockCloseMaxEvents", helper.getBlockCloseMaxEvents())) blockCloseMaxSize = int(kwargs.get("BlockCloseMaxSize", helper.getBlockCloseMaxSize())) helper.setBlockCloseSettings(blockCloseMaxWaitTime, blockCloseMaxFiles, blockCloseMaxEvents, blockCloseMaxSize) helper.setDashboardActivity(kwargs.get("dashboard", "")) Utilities.saveWorkload(helper, request["RequestWorkflow"], self.wmstatWriteURL) # update AcquisitionEra in the Couch document (#4380) # request object returned above from Oracle doesn't have information Couch # database reqDetails = Utilities.requestDetails(request["RequestName"]) couchDb = Database(reqDetails["CouchWorkloadDBName"], reqDetails["CouchURL"]) couchDb.updateDocument( request["RequestName"], "ReqMgr", "updaterequest", fields={"AcquisitionEra": reqDetails["AcquisitionEra"]} )
def getRequest(requestId, reverseTypes=None, reverseStatus=None): """ _getRequest_ retrieve a request based on the request id, return a ReqMgr.DataStructs.Request instance containing the information """ factory = DBConnect.getConnection() reqGet = factory(classname="Request.Get") reqData = reqGet.execute(requestId) requestName = reqData['request_name'] if not reverseTypes or not reverseStatus: reverseTypes, reverseStatus = reverseLookups() getGroup = factory(classname="Group.GetGroupFromAssoc") groupData = getGroup.execute(reqData['requestor_group_id']) getUser = factory(classname="Requestor.GetUserFromAssoc") userData = getUser.execute(reqData['requestor_group_id']) request = Request() request["RequestName"] = requestName request["RequestType"] = reverseTypes[reqData['request_type']] request["RequestStatus"] = reverseStatus[reqData['request_status']] request["RequestPriority"] = reqData['request_priority'] request["RequestWorkflow"] = reqData['workflow'] request["RequestNumEvents"] = reqData['request_num_events'] request["RequestSizeFiles"] = reqData['request_size_files'] # there used to be RequestEventSize argument, but then SizePerEvent # got introduce and got adopted so this is replacing it, presenting # this nomenclature inconsistency on Oracle level request["SizePerEvent"] = reqData['request_event_size'] request["PrepID"] = reqData['prep_id'] request["Group"] = groupData['group_name'] request["Requestor"] = userData['requestor_hn_name'] updates = getProgress(requestName) request['percent_complete'], request['percent_success'] = percentages(updates) sqDeps = factory(classname="Software.GetByAssoc") swVers = sqDeps.execute(requestId) if swVers == {}: request['SoftwareVersions'] = ['DEPRECATED'] else: request['SoftwareVersions'] = swVers.values() getDatasetsIn = factory(classname="Datasets.GetInput") getDatasetsOut = factory(classname="Datasets.GetOutput") datasetsIn = getDatasetsIn.execute(requestId) datasetsOut = getDatasetsOut.execute(requestId) request['InputDatasetTypes'] = datasetsIn request['InputDatasets'] = datasetsIn.keys() request['OutputDatasets'] = datasetsOut # fetch AcquisitionEra from spec, it's not stored in Oracle at all import WMCore.HTTPFrontEnd.RequestManager.ReqMgrWebTools as Utilities try: helper = Utilities.loadWorkload(request) request["AcquisitionEra"] = str(helper.getAcquisitionEra()) # add ProcessingVersion and ProcessingString in the response (#4561) request["ProcessingVersion"] = str(helper.getProcessingVersion()) request["ProcessingString"] = str(helper.getProcessingString()) except Exception as ex: logging.error("Could not check workload for %s, reason: %s", request["RequestName"], ex) return request