def __init__(self, name=None, parent=None, obj=None): pmModule.__init__(self, name, parent, obj) self._faxVariables = 'WithFAX WithoutFAX bytesWithFAX bytesWithoutFAX timeToCopy'.split( ) self._sites, self._cloud4sites, sname, nicks = pmt.getSiteInfo( 'region') self._description, params = utils.normalizeDbSchema( pmt, 'jobsarchived4') self.publishUI(self.doQuery, params=params)
def __init__(self, name=None, parent=None, obj=None): pmModule.__init__(self, name, parent, obj) self._description, params = utils.normalizeDbSchema( pmt, 'jobsarchived4') self._jobParametersName = 'jobParameters' self._allparams = ','.join(params.keys()) self._colNames = 'jobStatus prodUserID prodUserName nInputFiles AtlasRelease transformation \ homepackage prodDBlock destinationDBlock destinationSE computingSite creationTime startTime endTime \ modificationTime jobName attemptNr prodSourceLabel jobDefinitionID jobsetID taskID pilotID \ schedulerID pilotErrorCode workingGroup creationHost parentID\ pilotErrorDiag ddmErrorCode ddmErrorDiag jobDispatcherErrorCode jobDispatcherErrorDiag taskBufferErrorCode \ taskBufferErrorDiag brokerageErrorCode brokerageErrorDiag exeErrorCode exeErrorDiag supErrorCode supErrorDiag \ transExitCode currentPriority cloud countryGroup processingType VO JediTaskID'.split( ) self._jobSum = [ ['States', 'jobStatus'], ['Users', 'prodUserName'], ['Releases', 'AtlasRelease'], ['Processing Types', 'processingType'] # ,['Analysis Tasks' , 'destinationDBlock'] # jobtype = 'analysis' # ,['Output dataset', 'destinationDBlock'] # jobtype = 'analysis' , ['Job Types', 'prodSourceLabel'] # ,['Job Series' , 'prodSeriesLabel'] # , ['Task ID' , 'taskID'] # if ['prodSourceLabel'] == 'managed' and 'taskID' in job, # ,['Batch system ID','batchID' ] , ['Transformations', 'transformation'] # ,['Input dataset' , 'prodDBlock'] , ['Working Groups', 'workingGroup'], ['Creation Hosts', 'creationHost'] # ,['Packages' , 'homepackage'] # need constrain , ['Sites', 'computingSite'] # , ['Regions' , 'Region'] , ['Clouds', 'cloud'], ['Jedi ID', 'JediTaskID'], ['Jedi ID', 'JediTaskID'] # ,[ 'CPU Type','cpuConsumptionUnit'] # ,['Analysis Tasks', 'destinationDBlock'] # ,['Job Priorities' , 'currentPriority' ]# need constrain ] self._errorFields, self._errorCodes, self._errorStages = errorcodes.getErrorCodes( ) self._extraFields = ['tostart', 'duration', 'endt', 'transfert'] self._region4sites, self._site4regions = setutils.region4Sites(pmt) self._siteId, self._siteRegions, self._siteNames, self._siteNicks = pmt.getSiteInfo( 'region') ## 'publishUI' must be the last init statement ! # aliase self._alias = {'username': '******'} for a in self._alias: params[a] = None self.publishUI(self.doJson, params=params)
def region4Sites(self,pmt): sites = pmt.getSiteInfo() region4sites = {} site4regions = {} for site,s in sites.iteritems(): k = s['sitename'] v = s['cloud'] region4sites[k] = v site4regions[v] = site4regions.get(v,[]) + [k] return region4sites,site4regions
def __init__(self, name=None, parent=None, obj=None): pmModule.__init__(self, name, parent, obj) self._description, params = utils.normalizeDbSchema(pmt, "jobsarchived4") self._jobParametersName = "jobParameters" self._allparams = ",".join(params.keys()) self._colNames = "jobStatus prodUserID prodUserName nInputFiles AtlasRelease transformation \ homepackage prodDBlock destinationDBlock destinationSE computingSite creationTime startTime endTime \ modificationTime jobName attemptNr prodSourceLabel jobDefinitionID jobsetID taskID pilotID \ schedulerID pilotErrorCode workingGroup creationHost parentID\ pilotErrorDiag ddmErrorCode ddmErrorDiag jobDispatcherErrorCode jobDispatcherErrorDiag taskBufferErrorCode \ taskBufferErrorDiag brokerageErrorCode brokerageErrorDiag exeErrorCode exeErrorDiag supErrorCode supErrorDiag \ transExitCode currentPriority cloud countryGroup processingType VO JediTaskID".split() self._jobSum = [ ["States", "jobStatus"], ["Users", "prodUserName"], ["Releases", "AtlasRelease"], ["Processing Types", "processingType"] # ,['Analysis Tasks' , 'destinationDBlock'] # jobtype = 'analysis' # ,['Output dataset', 'destinationDBlock'] # jobtype = 'analysis' , ["Job Types", "prodSourceLabel"] # ,['Job Series' , 'prodSeriesLabel'] # , ['Task ID' , 'taskID'] # if ['prodSourceLabel'] == 'managed' and 'taskID' in job, # ,['Batch system ID','batchID' ] , ["Transformations", "transformation"] # ,['Input dataset' , 'prodDBlock'] , ["Working Groups", "workingGroup"], ["Creation Hosts", "creationHost"] # ,['Packages' , 'homepackage'] # need constrain , ["Sites", "computingSite"] # , ['Regions' , 'Region'] , ["Clouds", "cloud"], ["Jedi ID", "JediTaskID"], ["Jedi ID", "JediTaskID"] # ,[ 'CPU Type','cpuConsumptionUnit'] # ,['Analysis Tasks', 'destinationDBlock'] # ,['Job Priorities' , 'currentPriority' ]# need constrain ] self._errorFields, self._errorCodes, self._errorStages = errorcodes.getErrorCodes() self._extraFields = ["tostart", "duration", "endt", "transfert"] self._region4sites, self._site4regions = setutils.region4Sites(pmt) self._siteId, self._siteRegions, self._siteNames, self._siteNicks = pmt.getSiteInfo("region") ## 'publishUI' must be the last init statement ! # aliase self._alias = {"username": "******"} for a in self._alias: params[a] = None self.publishUI(self.doJson, params=params)
def __init__(self,name=None,parent=None,obj=None): pmModule.__init__(self,name,parent,obj) self._faxVariables = 'WithFAX WithoutFAX bytesWithFAX bytesWithoutFAX timeToCopy'.split() self._sites,self._cloud4sites, sname, nicks = pmt.getSiteInfo('region') self._description,params = utils.normalizeDbSchema(pmt,'jobsarchived4') self.publishUI(self.doQuery,params=params)