Example #1
0
    def sendAccountingRecord(self, job, msg, classAdJob):
        """
        Send and accounting record for the failed job
        """
        accountingReport = AccountingJob()
        accountingReport.setStartTime()
        accountingReport.setEndTime()

        owner = classAdJob.getAttributeString("Owner")
        userGroup = classAdJob.getAttributeString("OwnerGroup")
        jobGroup = classAdJob.getAttributeString("JobGroup")
        jobType = classAdJob.getAttributeString("JobType")
        jobClass = "unknown"
        if classAdJob.lookupAttribute("JobSplitType"):
            jobClass = classAdJob.getAttributeString("JobSplitType")
        inputData = []
        processingType = "unknown"
        if classAdJob.lookupAttribute("ProcessingType"):
            processingType = classAdJob.getAttributeString("ProcessingType")
        if classAdJob.lookupAttribute("InputData"):
            inputData = classAdJob.getListFromExpression("InputData")
        inputDataFiles = len(inputData)
        outputData = []
        if classAdJob.lookupAttribute("OutputData"):
            outputData = classAdJob.getListFromExpression("OutputData")
        outputDataFiles = len(outputData)

        acData = {
            "User": owner,
            "UserGroup": userGroup,
            "JobGroup": jobGroup,
            "JobType": jobType,
            "JobClass": jobClass,
            "ProcessingType": processingType,
            "FinalMajorStatus": self.failedStatus,
            "FinalMinorStatus": msg,
            "CPUTime": 0.0,
            "NormCPUTime": 0.0,
            "ExecTime": 0.0,
            "InputDataSize": 0.0,
            "OutputDataSize": 0.0,
            "InputDataFiles": inputDataFiles,
            "OutputDataFiles": outputDataFiles,
            "DiskSpace": 0.0,
            "InputSandBoxSize": 0.0,
            "OutputSandBoxSize": 0.0,
            "ProcessedEvents": 0.0,
        }

        self.log.debug("Accounting Report is:")
        self.log.debug(acData)
        accountingReport.setValuesFromDict(acData)
        return accountingReport.commit()
Example #2
0
    def sendAccountingRecord(self, job, msg, classAdJob):
        """
      Send and accounting record for the failed job
    """
        accountingReport = AccountingJob()
        accountingReport.setStartTime()
        accountingReport.setEndTime()

        owner = classAdJob.getAttributeString('Owner')
        userGroup = classAdJob.getAttributeString('OwnerGroup')
        jobGroup = classAdJob.getAttributeString('JobGroup')
        jobType = classAdJob.getAttributeString('JobType')
        jobClass = 'unknown'
        if classAdJob.lookupAttribute('JobSplitType'):
            jobClass = classAdJob.getAttributeString('JobSplitType')
        inputData = []
        processingType = 'unknown'
        if classAdJob.lookupAttribute('ProcessingType'):
            processingType = classAdJob.getAttributeString('ProcessingType')
        if classAdJob.lookupAttribute('InputData'):
            inputData = classAdJob.getListFromExpression('InputData')
        inputDataFiles = len(inputData)
        outputData = []
        if classAdJob.lookupAttribute('OutputData'):
            outputData = classAdJob.getListFromExpression('OutputData')
        outputDataFiles = len(outputData)

        acData = {
            'User': owner,
            'UserGroup': userGroup,
            'JobGroup': jobGroup,
            'JobType': jobType,
            'JobClass': jobClass,
            'ProcessingType': processingType,
            'FinalMajorStatus': self.failedStatus,
            'FinalMinorStatus': msg,
            'CPUTime': 0.0,
            'NormCPUTime': 0.0,
            'ExecTime': 0.0,
            'InputDataSize': 0.0,
            'OutputDataSize': 0.0,
            'InputDataFiles': inputDataFiles,
            'OutputDataFiles': outputDataFiles,
            'DiskSpace': 0.0,
            'InputSandBoxSize': 0.0,
            'OutputSandBoxSize': 0.0,
            'ProcessedEvents': 0.0
        }

        self.log.debug('Accounting Report is:')
        self.log.debug(acData)
        accountingReport.setValuesFromDict(acData)
        return accountingReport.commit()
Example #3
0
  def sendAccountingRecord(self, job, msg, classAdJob):
    """
      Send and accounting record for the failed job
    """
    accountingReport = AccountingJob()
    accountingReport.setStartTime()
    accountingReport.setEndTime()

    owner = classAdJob.getAttributeString('Owner')
    userGroup = classAdJob.getAttributeString('OwnerGroup')
    jobGroup = classAdJob.getAttributeString('JobGroup')
    jobType = classAdJob.getAttributeString('JobType')
    jobClass = 'unknown'
    if classAdJob.lookupAttribute('JobSplitType'):
      jobClass = classAdJob.getAttributeString('JobSplitType')
    inputData = []
    processingType = 'unknown'
    if classAdJob.lookupAttribute('ProcessingType'):
      processingType = classAdJob.getAttributeString('ProcessingType')
    if classAdJob.lookupAttribute('InputData'):
      inputData = classAdJob.getListFromExpression('InputData')
    inputDataFiles = len(inputData)
    outputData = []
    if classAdJob.lookupAttribute('OutputData'):
      outputData = classAdJob.getListFromExpression('OutputData')
    outputDataFiles = len(outputData)

    acData = {
        'User': owner,
        'UserGroup': userGroup,
        'JobGroup': jobGroup,
        'JobType': jobType,
        'JobClass': jobClass,
        'ProcessingType': processingType,
        'FinalMajorStatus': self.failedStatus,
        'FinalMinorStatus': msg,
        'CPUTime': 0.0,
        'NormCPUTime': 0.0,
        'ExecTime': 0.0,
        'InputDataSize': 0.0,
        'OutputDataSize': 0.0,
        'InputDataFiles': inputDataFiles,
        'OutputDataFiles': outputDataFiles,
        'DiskSpace': 0.0,
        'InputSandBoxSize': 0.0,
        'OutputSandBoxSize': 0.0,
        'ProcessedEvents': 0.0
    }

    self.log.verbose('Accounting Report is:')
    self.log.verbose(acData)
    accountingReport.setValuesFromDict(acData)
    return accountingReport.commit()
    def sendJobAccounting(self, dataFromBDSoft, jobId):
        accountingReport = AccountingJob()
        accountingReport.setStartTime()

        result = jobDB.getJobAttributes(jobId)
        getting = result["Value"]
        if dataFromBDSoft["CPUTime"] == 0:
            cpuTime = 0
            if getting["EndExecTime"] != "None":
                epoch = datetime(1970, 1, 1)
                td = datetime.strptime(getting["EndExecTime"], "%Y-%m-%d %H:%M:%S") - epoch
                EndExecTime = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / 1e6
                td = datetime.strptime(getting["SubmissionTime"], "%Y-%m-%d %H:%M:%S") - epoch
                SubmissionTime = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / 1e6
                cpuTime = EndExecTime - SubmissionTime
        else:
            cpuTime = dataFromBDSoft["CPUTime"] / 1000

        acData = {
            "User": getting["Owner"],
            "UserGroup": getting["OwnerGroup"],
            "JobGroup": "cesga",
            "JobType": "User",
            "JobClass": "unknown",
            "ProcessingType": "unknown",
            "FinalMajorStatus": getting["Status"],
            "FinalMinorStatus": getting["MinorStatus"],
            "CPUTime": cpuTime,
            "Site": getting["Site"],
            # Based on the factor to convert raw CPU to Normalized units (based on the CPU Model)
            "NormCPUTime": 0,
            "ExecTime": cpuTime,
            "InputDataSize": dataFromBDSoft["InputDataSize"],
            "OutputDataSize": dataFromBDSoft["OutputDataSize"],
            "InputDataFiles": dataFromBDSoft["InputDataFiles"],
            "OutputDataFiles": len(self.fileList),
            "DiskSpace": 0,
            "InputSandBoxSize": 0,
            "OutputSandBoxSize": self.outputSandboxSize,
            "ProcessedEvents": 0,
        }
        accountingReport.setEndTime()
        accountingReport.setValuesFromDict(acData)
        self.log.debug("Info for accounting: ", acData)
        result = accountingReport.commit()
        self.log.debug("Accounting insertion: ", result)
        return result
Example #5
0
class JobPlotter(BaseReporter):

    _typeName = "Job"
    _typeKeyFields = [dF[0] for dF in Job().definitionKeyFields]

    def _translateGrouping(self, grouping):
        if grouping == "Country":
            sqlRepr = 'upper( substring( %s, locate( ".", %s, length( %s ) - 4 ) + 1 ) )'
            return (sqlRepr, ['Site', 'Site', 'Site'], sqlRepr)
        elif grouping == "Grid":
            return ('substring_index( %s, ".", 1 )', ['Site'])
        else:
            return ("%s", [grouping])

    _reportCPUEfficiencyName = "CPU efficiency"

    def _reportCPUEfficiency(self, reportRequest):
        selectFields = (
            self._getSelectStringForGrouping(reportRequest['groupingFields']) +
            ", %s, %s, SUM(%s), SUM(%s)", reportRequest['groupingFields'][1] +
            ['startTime', 'bucketLength', 'CPUTime', 'ExecTime'])

        retVal = self._getTimedData(
            reportRequest['startTime'], reportRequest['endTime'], selectFields,
            reportRequest['condDict'], reportRequest['groupingFields'], {
                'checkNone': True,
                'convertToGranularity': 'sum',
                'calculateProportionalGauges': False,
                'consolidationFunction': self._efficiencyConsolidation
            })
        if not retVal['OK']:
            return retVal
        dataDict, granularity = retVal['Value']
        self.stripDataField(dataDict, 0)
        if len(dataDict) > 1:
            # Get the total for the plot
            selectFields = ("'Total', %s, %s, SUM(%s),SUM(%s)", [
                'startTime', 'bucketLength', 'CPUTime', 'ExecTime'
            ])

            retVal = self._getTimedData(
                reportRequest['startTime'], reportRequest['endTime'],
                selectFields, reportRequest['condDict'],
                reportRequest['groupingFields'], {
                    'scheckNone': True,
                    'convertToGranularity': 'sum',
                    'calculateProportionalGauges': False,
                    'consolidationFunction': self._efficiencyConsolidation
                })
            if not retVal['OK']:
                return retVal
            totalDict = retVal['Value'][0]
            self.stripDataField(totalDict, 0)
            for key in totalDict:
                dataDict[key] = totalDict[key]
        return S_OK({'data': dataDict, 'granularity': granularity})

    def _plotCPUEfficiency(self, reportRequest, plotInfo, filename):
        metadata = {
            'title': 'Job CPU efficiency by %s' % reportRequest['grouping'],
            'starttime': reportRequest['startTime'],
            'endtime': reportRequest['endTime'],
            'span': plotInfo['granularity']
        }
        return self._generateQualityPlot(filename, plotInfo['data'], metadata)

    _reportCPUUsedName = "Cumulative CPU time"

    def _reportCPUUsed(self, reportRequest):
        selectFields = (
            self._getSelectStringForGrouping(reportRequest['groupingFields']) +
            ", %s, %s, SUM(%s)", reportRequest['groupingFields'][1] +
            ['startTime', 'bucketLength', 'CPUTime'])
        retVal = self._getTimedData(reportRequest['startTime'],
                                    reportRequest['endTime'], selectFields,
                                    reportRequest['condDict'],
                                    reportRequest['groupingFields'], {})
        if not retVal['OK']:
            return retVal
        dataDict, granularity = retVal['Value']
        self.stripDataField(dataDict, 0)
        dataDict = self._fillWithZero(granularity, reportRequest['startTime'],
                                      reportRequest['endTime'], dataDict)
        dataDict = self._accumulate(granularity, reportRequest['startTime'],
                                    reportRequest['endTime'], dataDict)
        baseDataDict, graphDataDict, _maxValue, unitName = self._findSuitableUnit(
            dataDict, self._getAccumulationMaxValue(dataDict), "time")
        return S_OK({
            'data': baseDataDict,
            'graphDataDict': graphDataDict,
            'granularity': granularity,
            'unit': unitName
        })

    def _plotCPUUsed(self, reportRequest, plotInfo, filename):
        metadata = {
            'title': 'CPU used by %s' % reportRequest['grouping'],
            'starttime': reportRequest['startTime'],
            'endtime': reportRequest['endTime'],
            'span': plotInfo['granularity'],
            'ylabel': plotInfo['unit'],
            'sort_labels': 'max_value'
        }
        return self._generateCumulativePlot(filename,
                                            plotInfo['graphDataDict'],
                                            metadata)

    _reportCPUUsageName = "CPU time"

    def _reportCPUUsage(self, reportRequest):
        selectFields = (
            self._getSelectStringForGrouping(reportRequest['groupingFields']) +
            ", %s, %s, SUM(%s)", reportRequest['groupingFields'][1] +
            ['startTime', 'bucketLength', 'CPUTime'])
        retVal = self._getTimedData(reportRequest['startTime'],
                                    reportRequest['endTime'], selectFields,
                                    reportRequest['condDict'],
                                    reportRequest['groupingFields'], {})
        if not retVal['OK']:
            return retVal
        dataDict, granularity = retVal['Value']
        self.stripDataField(dataDict, 0)
        dataDict, _maxValue = self._divideByFactor(dataDict, granularity)
        dataDict = self._fillWithZero(granularity, reportRequest['startTime'],
                                      reportRequest['endTime'], dataDict)
        baseDataDict, graphDataDict, _maxValue, unitName = self._findSuitableRateUnit(
            dataDict, self._getAccumulationMaxValue(dataDict), "time")
        return S_OK({
            'data': baseDataDict,
            'graphDataDict': graphDataDict,
            'granularity': granularity,
            'unit': unitName
        })

    def _plotCPUUsage(self, reportRequest, plotInfo, filename):
        metadata = {
            'title': 'CPU usage by %s' % reportRequest['grouping'],
            'starttime': reportRequest['startTime'],
            'endtime': reportRequest['endTime'],
            'span': plotInfo['granularity'],
            'ylabel': plotInfo['unit']
        }
        return self._generateStackedLinePlot(filename,
                                             plotInfo['graphDataDict'],
                                             metadata)

    _reportNormCPUUsedName = "Cumulative Normalized CPU"

    def _reportNormCPUUsed(self, reportRequest):
        selectFields = (
            self._getSelectStringForGrouping(reportRequest['groupingFields']) +
            ", %s, %s, SUM(%s)", reportRequest['groupingFields'][1] +
            ['startTime', 'bucketLength', 'NormCPUTime'])
        retVal = self._getTimedData(reportRequest['startTime'],
                                    reportRequest['endTime'], selectFields,
                                    reportRequest['condDict'],
                                    reportRequest['groupingFields'], {})
        if not retVal['OK']:
            return retVal
        dataDict, granularity = retVal['Value']
        self.stripDataField(dataDict, 0)
        dataDict = self._fillWithZero(granularity, reportRequest['startTime'],
                                      reportRequest['endTime'], dataDict)
        dataDict = self._accumulate(granularity, reportRequest['startTime'],
                                    reportRequest['endTime'], dataDict)
        baseDataDict, graphDataDict, _maxValue, unitName = self._findSuitableUnit(
            dataDict, self._getAccumulationMaxValue(dataDict), "cpupower")
        return S_OK({
            'data': baseDataDict,
            'graphDataDict': graphDataDict,
            'granularity': granularity,
            'unit': unitName
        })

    def _plotNormCPUUsed(self, reportRequest, plotInfo, filename):
        metadata = {
            'title': 'Normalized CPU used by %s' % reportRequest['grouping'],
            'starttime': reportRequest['startTime'],
            'endtime': reportRequest['endTime'],
            'span': plotInfo['granularity'],
            'ylabel': plotInfo['unit'],
            'sort_labels': 'max_value'
        }
        return self._generateCumulativePlot(filename,
                                            plotInfo['graphDataDict'],
                                            metadata)

    _reportNormCPUUsageName = "Normalized CPU power"

    def _reportNormCPUUsage(self, reportRequest):
        selectFields = (
            self._getSelectStringForGrouping(reportRequest['groupingFields']) +
            ", %s, %s, SUM(%s)", reportRequest['groupingFields'][1] +
            ['startTime', 'bucketLength', 'NormCPUTime'])
        retVal = self._getTimedData(reportRequest['startTime'],
                                    reportRequest['endTime'], selectFields,
                                    reportRequest['condDict'],
                                    reportRequest['groupingFields'], {})
        if not retVal['OK']:
            return retVal
        dataDict, granularity = retVal['Value']
        self.stripDataField(dataDict, 0)
        dataDict, _maxValue = self._divideByFactor(dataDict, granularity)
        dataDict = self._fillWithZero(granularity, reportRequest['startTime'],
                                      reportRequest['endTime'], dataDict)
        baseDataDict, graphDataDict, _maxValue, unitName = self._findSuitableRateUnit(
            dataDict, self._getAccumulationMaxValue(dataDict), "cpupower")
        return S_OK({
            'data': baseDataDict,
            'graphDataDict': graphDataDict,
            'granularity': granularity,
            'unit': unitName
        })

    def _plotNormCPUUsage(self, reportRequest, plotInfo, filename):
        metadata = {
            'title': 'Normalized CPU usage by %s' % reportRequest['grouping'],
            'starttime': reportRequest['startTime'],
            'endtime': reportRequest['endTime'],
            'span': plotInfo['granularity'],
            'ylabel': plotInfo['unit']
        }
        return self._generateStackedLinePlot(filename,
                                             plotInfo['graphDataDict'],
                                             metadata)

    _reportWallTimeName = "Wall time"

    def _reportWallTime(self, reportRequest):
        selectFields = (
            self._getSelectStringForGrouping(reportRequest['groupingFields']) +
            ", %s, %s, SUM(%s)", reportRequest['groupingFields'][1] +
            ['startTime', 'bucketLength', 'ExecTime'])
        retVal = self._getTimedData(reportRequest['startTime'],
                                    reportRequest['endTime'], selectFields,
                                    reportRequest['condDict'],
                                    reportRequest['groupingFields'], {})
        if not retVal['OK']:
            return retVal
        dataDict, granularity = retVal['Value']
        self.stripDataField(dataDict, 0)
        dataDict, _maxValue = self._divideByFactor(dataDict, granularity)
        dataDict = self._fillWithZero(granularity, reportRequest['startTime'],
                                      reportRequest['endTime'], dataDict)
        baseDataDict, graphDataDict, _maxValue, unitName = self._findSuitableRateUnit(
            dataDict, self._getAccumulationMaxValue(dataDict), "time")
        return S_OK({
            'data': baseDataDict,
            'graphDataDict': graphDataDict,
            'granularity': granularity,
            'unit': unitName
        })

    def _plotWallTime(self, reportRequest, plotInfo, filename):
        metadata = {
            'title': 'Wall Time by %s' % reportRequest['grouping'],
            'starttime': reportRequest['startTime'],
            'endtime': reportRequest['endTime'],
            'span': plotInfo['granularity'],
            'ylabel': plotInfo['unit']
        }
        return self._generateStackedLinePlot(filename,
                                             plotInfo['graphDataDict'],
                                             metadata)

    _reportRunningJobsName = "Running jobs"

    def _reportRunningJobs(self, reportRequest):
        selectFields = (
            self._getSelectStringForGrouping(reportRequest['groupingFields']) +
            ", %s, %s, SUM(%s)", reportRequest['groupingFields'][1] +
            ['startTime', 'bucketLength', 'ExecTime'])
        retVal = self._getTimedData(reportRequest['startTime'],
                                    reportRequest['endTime'], selectFields,
                                    reportRequest['condDict'],
                                    reportRequest['groupingFields'], {})
        if not retVal['OK']:
            return retVal
        dataDict, granularity = retVal['Value']
        self.stripDataField(dataDict, 0)
        dataDict, _maxValue = self._divideByFactor(dataDict, granularity)
        dataDict = self._fillWithZero(granularity, reportRequest['startTime'],
                                      reportRequest['endTime'], dataDict)
        baseDataDict, graphDataDict, _maxValue, unitName = self._findSuitableUnit(
            dataDict, self._getAccumulationMaxValue(dataDict), "jobs")
        return S_OK({
            'data': baseDataDict,
            'graphDataDict': graphDataDict,
            'granularity': granularity,
            'unit': unitName
        })

    def _plotRunningJobs(self, reportRequest, plotInfo, filename):
        metadata = {
            'title': 'Running jobs by %s' % reportRequest['grouping'],
            'starttime': reportRequest['startTime'],
            'endtime': reportRequest['endTime'],
            'span': plotInfo['granularity'],
            'ylabel': plotInfo['unit']
        }
        return self._generateStackedLinePlot(filename,
                                             plotInfo['graphDataDict'],
                                             metadata)

    _reportTotalCPUUsedName = "Pie plot of CPU used"

    def _reportTotalCPUUsed(self, reportRequest):
        selectFields = (
            self._getSelectStringForGrouping(reportRequest['groupingFields']) +
            ", SUM(%s)/86400",
            reportRequest['groupingFields'][1] + ['CPUTime'])
        retVal = self._getSummaryData(reportRequest['startTime'],
                                      reportRequest['endTime'], selectFields,
                                      reportRequest['condDict'],
                                      reportRequest['groupingFields'], {})
        if not retVal['OK']:
            return retVal
        dataDict = retVal['Value']
        return S_OK({'data': dataDict})

    def _plotTotalCPUUsed(self, reportRequest, plotInfo, filename):
        metadata = {
            'title': 'CPU days used by %s' % reportRequest['grouping'],
            'ylabel': 'CPU days',
            'starttime': reportRequest['startTime'],
            'endtime': reportRequest['endTime']
        }
        return self._generatePiePlot(filename, plotInfo['data'], metadata)

    _reportAccumulatedWallTimeName = "Cumulative wall time"

    def _reportAccumulatedWallTime(self, reportRequest):
        selectFields = (
            self._getSelectStringForGrouping(reportRequest['groupingFields']) +
            ", %s, %s, SUM(%s)", reportRequest['groupingFields'][1] +
            ['startTime', 'bucketLength', 'ExecTime'])
        retVal = self._getTimedData(reportRequest['startTime'],
                                    reportRequest['endTime'], selectFields,
                                    reportRequest['condDict'],
                                    reportRequest['groupingFields'], {})
        if not retVal['OK']:
            return retVal
        dataDict, granularity = retVal['Value']
        self.stripDataField(dataDict, 0)
        dataDict = self._fillWithZero(granularity, reportRequest['startTime'],
                                      reportRequest['endTime'], dataDict)
        dataDict = self._accumulate(granularity, reportRequest['startTime'],
                                    reportRequest['endTime'], dataDict)
        baseDataDict, graphDataDict, _maxValue, unitName = self._findSuitableUnit(
            dataDict, self._getAccumulationMaxValue(dataDict), "time")
        return S_OK({
            'data': baseDataDict,
            'graphDataDict': graphDataDict,
            'granularity': granularity,
            'unit': unitName
        })

    def _plotAccumulatedWallTime(self, reportRequest, plotInfo, filename):
        metadata = {
            'title': 'Cumulative wall time by %s' % reportRequest['grouping'],
            'starttime': reportRequest['startTime'],
            'endtime': reportRequest['endTime'],
            'span': plotInfo['granularity'],
            'ylabel': plotInfo['unit'],
            'sort_labels': 'max_value'
        }
        return self._generateCumulativePlot(filename,
                                            plotInfo['graphDataDict'],
                                            metadata)

    _reportTotalWallTimeName = "Pie plot of wall time usage"

    def _reportTotalWallTime(self, reportRequest):
        selectFields = (
            self._getSelectStringForGrouping(reportRequest['groupingFields']) +
            ", SUM(%s)/86400",
            reportRequest['groupingFields'][1] + ['ExecTime'])
        retVal = self._getSummaryData(reportRequest['startTime'],
                                      reportRequest['endTime'], selectFields,
                                      reportRequest['condDict'],
                                      reportRequest['groupingFields'], {})
        if not retVal['OK']:
            return retVal
        dataDict = retVal['Value']
        return S_OK({'data': dataDict})

    def _plotTotalWallTime(self, reportRequest, plotInfo, filename):
        metadata = {
            'title': 'Wall time days used by %s' % reportRequest['grouping'],
            'ylabel': 'CPU days',
            'starttime': reportRequest['startTime'],
            'endtime': reportRequest['endTime']
        }
        return self._generatePiePlot(filename, plotInfo['data'], metadata)

##
#  Jobs
##

    _reportCumulativeNumberOfJobsName = "Cumulative executed jobs"

    def _reportCumulativeNumberOfJobs(self, reportRequest):
        selectFields = (
            self._getSelectStringForGrouping(reportRequest['groupingFields']) +
            ", %s, %s, SUM(%s)", reportRequest['groupingFields'][1] +
            ['startTime', 'bucketLength', 'entriesInBucket'])
        retVal = self._getTimedData(reportRequest['startTime'],
                                    reportRequest['endTime'], selectFields,
                                    reportRequest['condDict'],
                                    reportRequest['groupingFields'], {})
        if not retVal['OK']:
            return retVal
        dataDict, granularity = retVal['Value']
        self.stripDataField(dataDict, 0)
        dataDict = self._fillWithZero(granularity, reportRequest['startTime'],
                                      reportRequest['endTime'], dataDict)
        dataDict = self._accumulate(granularity, reportRequest['startTime'],
                                    reportRequest['endTime'], dataDict)
        baseDataDict, graphDataDict, _maxValue, unitName = self._findSuitableUnit(
            dataDict, self._getAccumulationMaxValue(dataDict), "jobs")
        return S_OK({
            'data': baseDataDict,
            'graphDataDict': graphDataDict,
            'granularity': granularity,
            'unit': unitName
        })

    def _plotCumulativeNumberOfJobs(self, reportRequest, plotInfo, filename):
        metadata = {
            'title': 'Cumulative Jobs by %s' % reportRequest['grouping'],
            'starttime': reportRequest['startTime'],
            'endtime': reportRequest['endTime'],
            'span': plotInfo['granularity'],
            'ylabel': plotInfo['unit'],
            'sort_labels': 'max_value'
        }
        return self._generateCumulativePlot(filename,
                                            plotInfo['graphDataDict'],
                                            metadata)

    _reportNumberOfJobsName = "Job execution rate"

    def _reportNumberOfJobs(self, reportRequest):
        selectFields = (
            self._getSelectStringForGrouping(reportRequest['groupingFields']) +
            ", %s, %s, SUM(%s)", reportRequest['groupingFields'][1] +
            ['startTime', 'bucketLength', 'entriesInBucket'])
        retVal = self._getTimedData(reportRequest['startTime'],
                                    reportRequest['endTime'], selectFields,
                                    reportRequest['condDict'],
                                    reportRequest['groupingFields'], {})
        if not retVal['OK']:
            return retVal
        dataDict, granularity = retVal['Value']
        self.stripDataField(dataDict, 0)
        dataDict, _maxValue = self._divideByFactor(dataDict, granularity)
        dataDict = self._fillWithZero(granularity, reportRequest['startTime'],
                                      reportRequest['endTime'], dataDict)
        baseDataDict, graphDataDict, _maxValue, unitName = self._findSuitableRateUnit(
            dataDict, self._getAccumulationMaxValue(dataDict), "jobs")
        return S_OK({
            'data': baseDataDict,
            'graphDataDict': graphDataDict,
            'granularity': granularity,
            'unit': unitName
        })

    def _plotNumberOfJobs(self, reportRequest, plotInfo, filename):
        metadata = {
            'title': 'Jobs by %s' % reportRequest['grouping'],
            'starttime': reportRequest['startTime'],
            'endtime': reportRequest['endTime'],
            'span': plotInfo['granularity'],
            'ylabel': plotInfo['unit']
        }
        return self._generateStackedLinePlot(filename,
                                             plotInfo['graphDataDict'],
                                             metadata)

    _reportTotalNumberOfJobsName = "Pie plot of executed jobs"

    def _reportTotalNumberOfJobs(self, reportRequest):
        selectFields = (
            self._getSelectStringForGrouping(reportRequest['groupingFields']) +
            ", SUM(%s)",
            reportRequest['groupingFields'][1] + ['entriesInBucket'])
        retVal = self._getSummaryData(reportRequest['startTime'],
                                      reportRequest['endTime'], selectFields,
                                      reportRequest['condDict'],
                                      reportRequest['groupingFields'], {})
        if not retVal['OK']:
            return retVal
        dataDict = retVal['Value']
        return S_OK({'data': dataDict})

    def _plotTotalNumberOfJobs(self, reportRequest, plotInfo, filename):
        metadata = {
            'title': 'Total Number of Jobs by %s' % reportRequest['grouping'],
            'ylabel': 'Jobs',
            'starttime': reportRequest['startTime'],
            'endtime': reportRequest['endTime']
        }
        return self._generatePiePlot(filename, plotInfo['data'], metadata)

##
# Proc bw
##

    _reportProcessingBandwidthName = "Processing bandwidth"

    def _reportProcessingBandwidth(self, reportRequest):
        selectFields = (
            self._getSelectStringForGrouping(reportRequest['groupingFields']) +
            ", %s, %s, SUM((%s)/(%s))/SUM(%s)",
            reportRequest['groupingFields'][1] + [
                'startTime', 'bucketLength', 'InputDataSize', 'CPUTime',
                'entriesInBucket'
            ])
        retVal = self._getTimedData(reportRequest['startTime'],
                                    reportRequest['endTime'], selectFields,
                                    reportRequest['condDict'],
                                    reportRequest['groupingFields'], {})
        if not retVal['OK']:
            return retVal
        dataDict, granularity = retVal['Value']
        self.stripDataField(dataDict, 0)
        dataDict, _maxValue = self._divideByFactor(dataDict, granularity)
        dataDict = self._fillWithZero(granularity, reportRequest['startTime'],
                                      reportRequest['endTime'], dataDict)
        baseDataDict, graphDataDict, _maxValue, unitName = self._findSuitableRateUnit(
            dataDict, self._getAccumulationMaxValue(dataDict), "bytes")
        return S_OK({
            'data': baseDataDict,
            'graphDataDict': graphDataDict,
            'granularity': granularity,
            'unit': unitName
        })

    def _plotProcessingBandwidth(self, reportRequest, plotInfo, filename):
        metadata = {
            'title': 'Processing Bandwidth by %s' % reportRequest['grouping'],
            'starttime': reportRequest['startTime'],
            'endtime': reportRequest['endTime'],
            'span': plotInfo['granularity'],
            'ylabel': plotInfo['unit']
        }
        return self._generateStackedLinePlot(filename,
                                             plotInfo['graphDataDict'],
                                             metadata)

##
# Data sizes
##

    _reportInputSandboxSizeName = "Input sandbox"

    def _reportInputSandboxSize(self, reportRequest):
        return self.__reportFieldSizeinMB(
            reportRequest, ("InputSandBoxSize", "Input sand box size"))

    _reportOutputSandboxSizeName = "Ouput sandbox"

    def _reportOutputSandboxSize(self, reportRequest):
        return self.__reportFieldSizeinMB(
            reportRequest, ("OutputSandBoxSize", "Output sand box size"))

    _reportDiskSpaceSizeName = "Disk space"

    def _reportDiskSpaceSize(self, reportRequest):
        return self.__reportFieldSizeinMB(reportRequest,
                                          ("DiskSpace", "Used disk space"))

    _reportInputDataSizeName = "Input data"

    def _reportInputDataSize(self, reportRequest):
        return self.__reportFieldSizeinMB(reportRequest,
                                          ("InputDataSize", "Input data"))

    _reportOutputDataSizeName = "Output data"

    def _reportOutputDataSize(self, reportRequest):
        return self.__reportFieldSizeinMB(reportRequest,
                                          ("OutputDataSize", "Output data"))

    def __reportFieldSizeinMB(self, reportRequest, fieldTuple):
        selectFields = (
            self._getSelectStringForGrouping(reportRequest['groupingFields']) +
            ", %s, %s, SUM(%s)", reportRequest['groupingFields'][1] +
            ['startTime', 'bucketLength', fieldTuple[0]])
        retVal = self._getTimedData(reportRequest['startTime'],
                                    reportRequest['endTime'], selectFields,
                                    reportRequest['condDict'],
                                    reportRequest['groupingFields'], {})
        if not retVal['OK']:
            return retVal
        dataDict, granularity = retVal['Value']
        self.stripDataField(dataDict, 0)
        dataDict, _maxValue = self._divideByFactor(dataDict, granularity)
        dataDict = self._fillWithZero(granularity, reportRequest['startTime'],
                                      reportRequest['endTime'], dataDict)
        baseDataDict, graphDataDict, _maxValue, unitName = self._findSuitableRateUnit(
            dataDict, self._getAccumulationMaxValue(dataDict), "bytes")
        return S_OK({
            'data': baseDataDict,
            'graphDataDict': graphDataDict,
            'granularity': granularity,
            'unit': unitName
        })

    def _plotInputSandboxSize(self, reportRequest, plotInfo, filename):
        return self.__plotFieldSizeinMB(
            reportRequest, plotInfo, filename,
            ("InputSandBoxSize", "Input sand box size"))

    def _plotOutputSandboxSize(self, reportRequest, plotInfo, filename):
        return self.__plotFieldSizeinMB(
            reportRequest, plotInfo, filename,
            ("OutputSandBoxSize", "Output sand box size"))

    def _plotDiskSpaceSize(self, reportRequest, plotInfo, filename):
        return self.__plotFieldSizeinMB(reportRequest, plotInfo, filename,
                                        ("DiskSpace", "Used disk space"))

    def _plotInputDataSize(self, reportRequest, plotInfo, filename):
        return self.__plotFieldSizeinMB(reportRequest, plotInfo, filename,
                                        ("InputDataSize", "Input data"))

    def _plotOutputDataSize(self, reportRequest, plotInfo, filename):
        return self.__plotFieldSizeinMB(reportRequest, plotInfo, filename,
                                        ("OutputDataSize", "Output data"))

    def __plotFieldSizeinMB(self, reportRequest, plotInfo, filename,
                            fieldTuple):
        metadata = {
            'title': '%s by %s' % (fieldTuple[1], reportRequest['grouping']),
            'starttime': reportRequest['startTime'],
            'endtime': reportRequest['endTime'],
            'span': plotInfo['granularity'],
            'ylabel': plotInfo['unit']
        }
        return self._generateStackedLinePlot(filename,
                                             plotInfo['graphDataDict'],
                                             metadata)

##
#  Cumulative data sizes
##

    _reportCumulativeInputSandboxSizeName = "Cumulative Input sandbox"

    def _reportCumulativeInputSandboxSize(self, reportRequest):
        return self.__reportCumulativeFieldSizeinMB(
            reportRequest, ("InputSandBoxSize", "Input sand box size"))

    _reportCumulativeOutputSandboxSizeName = "Cumulative Ouput sandbox"

    def _reportCumulativeOutputSandboxSize(self, reportRequest):
        return self.__reportCumulativeFieldSizeinMB(
            reportRequest, ("OutputSandBoxSize", "Output sand box size"))

    _reportCumulativeDiskSpaceSizeName = "Cumulative Disk space"

    def _reportCumulativeDiskSpaceSize(self, reportRequest):
        return self.__reportCumulativeFieldSizeinMB(
            reportRequest, ("DiskSpace", "Used disk space"))

    _reportCumulativeInputDataSizeName = "Cumulative Input data"

    def _reportCumulativeInputDataSize(self, reportRequest):
        return self.__reportCumulativeFieldSizeinMB(
            reportRequest, ("InputDataSize", "Input data"))

    _reportCumulativeOutputDataSizeName = "Cumulative Output data"

    def _reportCumulativeOutputDataSize(self, reportRequest):
        return self.__reportCumulativeFieldSizeinMB(
            reportRequest, ("OutputDataSize", "Output data"))

    def __reportCumulativeFieldSizeinMB(self, reportRequest, fieldTuple):
        selectFields = (
            self._getSelectStringForGrouping(reportRequest['groupingFields']) +
            ", %s, %s, SUM(%s)", reportRequest['groupingFields'][1] +
            ['startTime', 'bucketLength', fieldTuple[0]])
        retVal = self._getTimedData(reportRequest['startTime'],
                                    reportRequest['endTime'], selectFields,
                                    reportRequest['condDict'],
                                    reportRequest['groupingFields'], {})
        if not retVal['OK']:
            return retVal
        dataDict, granularity = retVal['Value']
        self.stripDataField(dataDict, 0)
        dataDict = self._fillWithZero(granularity, reportRequest['startTime'],
                                      reportRequest['endTime'], dataDict)
        dataDict = self._accumulate(granularity, reportRequest['startTime'],
                                    reportRequest['endTime'], dataDict)
        baseDataDict, graphDataDict, _maxValue, unitName = self._findSuitableUnit(
            dataDict, self._getAccumulationMaxValue(dataDict), "bytes")
        return S_OK({
            'data': baseDataDict,
            'graphDataDict': graphDataDict,
            'granularity': granularity,
            'unit': unitName
        })

    def _plotCumulativeInputSandboxSize(self, reportRequest, plotInfo,
                                        filename):
        return self.__plotCumulativeFieldSizeinMB(
            reportRequest, plotInfo, filename,
            ("InputSandBoxSize", "Input sand box size"))

    def _plotCumulativeOutputSandboxSize(self, reportRequest, plotInfo,
                                         filename):
        return self.__plotCumulativeFieldSizeinMB(
            reportRequest, plotInfo, filename,
            ("OutputSandBoxSize", "Output sand box size"))

    def _plotCumulativeDiskSpaceSize(self, reportRequest, plotInfo, filename):
        return self.__plotCumulativeFieldSizeinMB(
            reportRequest, plotInfo, filename,
            ("DiskSpace", "Used disk space"))

    def _plotCumulativeInputDataSize(self, reportRequest, plotInfo, filename):
        return self.__plotCumulativeFieldSizeinMB(
            reportRequest, plotInfo, filename, ("InputDataSize", "Input data"))

    def _plotCumulativeOutputDataSize(self, reportRequest, plotInfo, filename):
        return self.__plotCumulativeFieldSizeinMB(
            reportRequest, plotInfo, filename,
            ("OutputDataSize", "Output data"))

    def __plotCumulativeFieldSizeinMB(self, reportRequest, plotInfo, filename,
                                      fieldTuple):
        metadata = {
            'title':
            'Cumulative %s by %s' % (fieldTuple[1], reportRequest['grouping']),
            'starttime':
            reportRequest['startTime'],
            'endtime':
            reportRequest['endTime'],
            'span':
            plotInfo['granularity'],
            'ylabel':
            plotInfo['unit']
        }
        return self._generateCumulativePlot(filename,
                                            plotInfo['graphDataDict'],
                                            metadata)


##
#  Input/Ouput data files
##

    _reportInputDataFilesName = "Input data files"

    def _reportInputDataFiles(self, reportRequest):
        return self.__reportDataFiles(reportRequest,
                                      ("InputDataFiles", "Input files"))

    _reportOuputDataFilesName = "Output data files"

    def _reportOuputDataFiles(self, reportRequest):
        return self.__reportDataFiles(reportRequest,
                                      ("OutputDataFiles", "Output files"))

    def __reportDataFiles(self, reportRequest, fieldTuple):
        selectFields = (
            self._getSelectStringForGrouping(reportRequest['groupingFields']) +
            ", %s, %s, SUM(%s)", reportRequest['groupingFields'][1] +
            ['startTime', 'bucketLength', fieldTuple[0]])
        retVal = self._getTimedData(reportRequest['startTime'],
                                    reportRequest['endTime'], selectFields,
                                    reportRequest['condDict'],
                                    reportRequest['groupingFields'], {})
        if not retVal['OK']:
            return retVal
        dataDict, granularity = retVal['Value']
        self.stripDataField(dataDict, 0)
        dataDict, _maxValue = self._divideByFactor(dataDict, granularity)
        dataDict = self._fillWithZero(granularity, reportRequest['startTime'],
                                      reportRequest['endTime'], dataDict)
        baseDataDict, graphDataDict, _maxValue, unitName = self._findSuitableRateUnit(
            dataDict, self._getAccumulationMaxValue(dataDict), "files")
        return S_OK({
            'data': baseDataDict,
            'graphDataDict': graphDataDict,
            'granularity': granularity,
            'unit': unitName
        })

    def _plotInputDataFiles(self, reportRequest, plotInfo, filename):
        return self.__plotDataFiles(reportRequest, plotInfo, filename,
                                    ("InputDataFiles", "Input files"))

    def _plotOuputDataFiles(self, reportRequest, plotInfo, filename):
        return self.__plotDataFiles(reportRequest, plotInfo, filename,
                                    ("OutputDataFiles", "Output files"))

    def __plotDataFiles(self, reportRequest, plotInfo, filename, fieldTuple):
        metadata = {
            'title': '%s by %s' % (fieldTuple[1], reportRequest['grouping']),
            'starttime': reportRequest['startTime'],
            'endtime': reportRequest['endTime'],
            'span': plotInfo['granularity'],
            'ylabel': plotInfo['unit']
        }
        return self._generateStackedLinePlot(filename,
                                             plotInfo['graphDataDict'],
                                             metadata)

    _reportHistogramCPUUsedName = "Histogram CPU time"

    def _reportHistogramCPUUsed(self, reportRequest):
        selectFields = (
            self._getSelectStringForGrouping(reportRequest['groupingFields']) +
            ", %s", reportRequest['groupingFields'][1] + ['CPUTime'])

        retVal = self._getBucketData(reportRequest['startTime'],
                                     reportRequest['endTime'], selectFields,
                                     reportRequest['condDict'])
        if not retVal['OK']:
            return retVal
        dataDict = retVal['Value']
        return S_OK({'data': dataDict})

    def _plotHistogramCPUUsed(self, reportRequest, plotInfo, filename):
        metadata = {
            'title': 'CPU usage by %s' % reportRequest['grouping'],
            'starttime': reportRequest['startTime'],
            'endtime': reportRequest['endTime']
        }
        return self._generateHistogram(filename, [plotInfo['data']], metadata)
Example #6
0
    def __sendAccounting(self, jobID):
        """ Send WMS accounting data for the given job
"""
        try:
            accountingReport = Job()
            endTime = 'Unknown'
            lastHeartBeatTime = 'Unknown'

            result = self.jobDB.getJobAttributes(jobID)
            if not result['OK']:
                return result
            jobDict = result['Value']

            startTime, endTime = self.__checkLoggingInfo(jobID, jobDict)
            lastCPUTime, lastWallTime, lastHeartBeatTime = self.__checkHeartBeat(
                jobID, jobDict)
            lastHeartBeatTime = fromString(lastHeartBeatTime)
            if lastHeartBeatTime is not None and lastHeartBeatTime > endTime:
                endTime = lastHeartBeatTime

            cpuNormalization = self.jobDB.getJobParameter(
                jobID, 'CPUNormalizationFactor')
            if not cpuNormalization['OK'] or not cpuNormalization['Value']:
                cpuNormalization = 0.0
            else:
                cpuNormalization = float(cpuNormalization['Value'])
        except Exception:
            self.log.exception(
                "Exception in __sendAccounting for job %s: endTime=%s, lastHBTime %s"
                % (str(jobID), str(endTime), str(lastHeartBeatTime)), '',
                False)
            return S_ERROR("Exception")
        processingType = self.__getProcessingType(jobID)

        accountingReport.setStartTime(startTime)
        accountingReport.setEndTime(endTime)
        # execTime = toEpoch( endTime ) - toEpoch( startTime )
        #Fill the accounting data
        acData = {
            'Site': jobDict['Site'],
            'User': jobDict['Owner'],
            'UserGroup': jobDict['OwnerGroup'],
            'JobGroup': jobDict['JobGroup'],
            'JobType': jobDict['JobType'],
            'JobClass': jobDict['JobSplitType'],
            'ProcessingType': processingType,
            'FinalMajorStatus': 'Failed',
            'FinalMinorStatus': 'Stalled',
            'CPUTime': lastCPUTime,
            'NormCPUTime': lastCPUTime * cpuNormalization,
            'ExecTime': lastWallTime,
            'InputDataSize': 0.0,
            'OutputDataSize': 0.0,
            'InputDataFiles': 0,
            'OutputDataFiles': 0,
            'DiskSpace': 0.0,
            'InputSandBoxSize': 0.0,
            'OutputSandBoxSize': 0.0,
            'ProcessedEvents': 0
        }
        self.log.verbose('Accounting Report is:')
        self.log.verbose(acData)
        accountingReport.setValuesFromDict(acData)

        result = accountingReport.commit()
        if result['OK']:
            self.jobDB.setJobAttribute(jobID, 'AccountedFlag', 'True')
        else:
            self.log.error(
                'Failed to send accounting report',
                'Job: %d, Error: %s' % (int(jobID), result['Message']))
        return result
Example #7
0
  def __sendAccounting( self, jobID ):
    """ Send WMS accounting data for the given job
"""
    try:
      accountingReport = Job()
      endTime = 'Unknown'
      lastHeartBeatTime = 'Unknown'

      result = self.jobDB.getJobAttributes( jobID )
      if not result['OK']:
        return result
      jobDict = result['Value']

      startTime, endTime = self.__checkLoggingInfo( jobID, jobDict )
      lastCPUTime, lastWallTime, lastHeartBeatTime = self.__checkHeartBeat( jobID, jobDict )
      lastHeartBeatTime = fromString( lastHeartBeatTime )
      if lastHeartBeatTime is not None and lastHeartBeatTime > endTime:
        endTime = lastHeartBeatTime

      cpuNormalization = self.jobDB.getJobParameter( jobID, 'CPUNormalizationFactor' )
      if not cpuNormalization['OK'] or not cpuNormalization['Value']:
        cpuNormalization = 0.0
      else:
        cpuNormalization = float( cpuNormalization['Value'] )
    except Exception:
      self.log.exception( "Exception in __sendAccounting for job %s: endTime=%s, lastHBTime %s" % ( str( jobID ), str( endTime ), str( lastHeartBeatTime ) ), '' , False )
      return S_ERROR( "Exception" )
    processingType = self.__getProcessingType( jobID )

    accountingReport.setStartTime( startTime )
    accountingReport.setEndTime( endTime )
    # execTime = toEpoch( endTime ) - toEpoch( startTime )
    #Fill the accounting data
    acData = { 'Site' : jobDict['Site'],
               'User' : jobDict['Owner'],
               'UserGroup' : jobDict['OwnerGroup'],
               'JobGroup' : jobDict['JobGroup'],
               'JobType' : jobDict['JobType'],
               'JobClass' : jobDict['JobSplitType'],
               'ProcessingType' : processingType,
               'FinalMajorStatus' : 'Failed',
               'FinalMinorStatus' : 'Stalled',
               'CPUTime' : lastCPUTime,
               'NormCPUTime' : lastCPUTime * cpuNormalization,
               'ExecTime' : lastWallTime,
               'InputDataSize' : 0.0,
               'OutputDataSize' : 0.0,
               'InputDataFiles' : 0,
               'OutputDataFiles' : 0,
               'DiskSpace' : 0.0,
               'InputSandBoxSize' : 0.0,
               'OutputSandBoxSize' : 0.0,
               'ProcessedEvents' : 0
             }
    
    # For accidentally stopped jobs ExecTime can be not set
    if not acData['ExecTime']:
      acData['ExecTime'] = acData['CPUTime']
    elif acData['ExecTime'] < acData['CPUTime']:
      acData['ExecTime'] = acData['CPUTime']
    
    self.log.verbose( 'Accounting Report is:' )
    self.log.verbose( acData )
    accountingReport.setValuesFromDict( acData )

    result = accountingReport.commit()
    if result['OK']:
      self.jobDB.setJobAttribute( jobID, 'AccountedFlag', 'True' )
    else:
      self.log.error( 'Failed to send accounting report', 'Job: %d, Error: %s' % ( int( jobID ), result['Message'] ) )
    return result
    def monitoring( self, loop, parentthread, output ):

      self.initialTiming = os.times()
      accountingReport = AccountingJob()
      accountingReport.setStartTime()

      numberJobsFlag = True
      numberJobs = 0
      numberStartedJobsDict = {}
      numberEndingJobsDict = {}

      job_pattern = re.compile( 'Job =.*?,' )
      job_pattern_2 = re.compile( 'Job =.*?\n' )
      jobid = int( re.split( "_", re.split( "/", output )[int( len( re.split( "/", output ) ) - 1 )] )[0] )

      cmd = '/bin/chmod 555 ' + self.getinfo
      returned = self.commandLaunch( cmd )

      while parentthread.isAlive():
        time.sleep( loop )
        if numberJobsFlag:
          cmd = self.getinfo + ' -c step1'
          returned = self.commandLaunch( cmd )
          self.log.info( 'InteractiveJobMonitorThread:step1:numJobs:', returned )
          if returned != None:
            if ( returned['Value'][1] != "" ):
              if re.split( "=", returned['Value'][1] )[1].strip().isdigit():
                numberJobs = int( re.split( "=", returned['Value'][1] )[1] )
            if ( numberJobs != 0 ):
              numberJobsFlag = False
              BigDataDB.setJobStatus( jobid, "Running" )
        else:
          cmd = self.getinfo + ' -c step2'
          returned = self.commandLaunch( cmd )
          self.log.info( 'InteractiveJobMonitorThread:step2:startedJobs:', returned )
          if returned != "":
            if ( returned['Value'][1] != "" ):
              startedJobs = job_pattern.findall( returned['Value'][1] )
              self.log.info( 'step2:startedJobs:', startedJobs )
          cmd = self.getinfo + ' -c step3'
          returned = self.commandLaunch( cmd )
          self.log.info( 'InteractiveJobMonitorThread:step3:endedJobs:', returned )
          if returned != "":
            if ( returned['Value'][1] != "" ):
              finishedJobs = job_pattern_2.findall( returned['Value'][1] )
              self.log.info( 'step3:finishedJobs:', finishedJobs )
              if ( len( finishedJobs ) == numberJobs ):
                BigDataDB.setJobStatus( jobid, "Done" )
                BigDataDB.setHadoopID( jobid, finishedJobs )
                self.__updateSandBox( jobid, output )

                #Update Accounting                
                EXECUTION_RESULT = {}
                EXECUTION_RESULT['CPU'] = []
                finalStat = os.times()
                for i in range( len( finalStat ) ):
                  EXECUTION_RESULT['CPU'].append( finalStat[i] - self.initialTiming[i] )
                utime, stime, cutime, cstime, elapsed = EXECUTION_RESULT['CPU']
                cpuTime = utime + stime + cutime + cstime
                execTime = elapsed
                result = jobDB.getJobAttributes( jobid )
                getting = result['Value']
                acData = {
                        'User' : getting['Owner'],
                        'UserGroup' : getting['OwnerGroup'],
                        'JobGroup' : 'cesga',
                        'JobType' : 'User',
                        'JobClass' : 'unknown',
                        'ProcessingType' : 'unknown',
                        'FinalMajorStatus' : getting['Status'],
                        'FinalMinorStatus' : getting['MinorStatus'],
                        'CPUTime' : cpuTime,
                        'Site' : getting['Site'],
                        # Based on the factor to convert raw CPU to Normalized units (based on the CPU Model)
                        'NormCPUTime' : 0,
                        'ExecTime' : cpuTime,
                        'InputDataSize' : 0,
                        'OutputDataSize' : 0,
                        'InputDataFiles' : 0,
                        'OutputDataFiles' : 0,
                        'DiskSpace' : 0,
                        'InputSandBoxSize' : 0,
                        'OutputSandBoxSize' : 0,
                        'ProcessedEvents' : 0
                        }
                accountingReport.setEndTime()
                accountingReport.setValuesFromDict( acData )
                result = accountingReport.commit()
Example #9
0
    def _sendAccounting(self, jobID):
        """
        Send WMS accounting data for the given job.

        Run inside thread.
        """
        try:
            accountingReport = Job()
            endTime = "Unknown"
            lastHeartBeatTime = "Unknown"

            result = self.jobDB.getJobAttributes(jobID)
            if not result["OK"]:
                return result
            jobDict = result["Value"]

            startTime, endTime = self._checkLoggingInfo(jobID, jobDict)
            lastCPUTime, lastWallTime, lastHeartBeatTime = self._checkHeartBeat(
                jobID, jobDict)
            lastHeartBeatTime = fromString(lastHeartBeatTime)
            if lastHeartBeatTime is not None and lastHeartBeatTime > endTime:
                endTime = lastHeartBeatTime

            result = JobMonitoringClient().getJobParameter(
                jobID, "CPUNormalizationFactor")
            if not result["OK"] or not result["Value"]:
                self.log.error(
                    "Error getting Job Parameter CPUNormalizationFactor, setting 0",
                    result.get("Message", "No such value"),
                )
                cpuNormalization = 0.0
            else:
                cpuNormalization = float(
                    result["Value"].get("CPUNormalizationFactor"))

        except Exception as e:
            self.log.exception(
                "Exception in _sendAccounting",
                "for job=%s: endTime=%s, lastHBTime=%s" %
                (str(jobID), str(endTime), str(lastHeartBeatTime)),
                lException=e,
            )
            return S_ERROR("Exception")
        processingType = self._getProcessingType(jobID)

        accountingReport.setStartTime(startTime)
        accountingReport.setEndTime(endTime)
        # execTime = toEpoch( endTime ) - toEpoch( startTime )
        # Fill the accounting data
        acData = {
            "Site": jobDict["Site"],
            "User": jobDict["Owner"],
            "UserGroup": jobDict["OwnerGroup"],
            "JobGroup": jobDict["JobGroup"],
            "JobType": jobDict["JobType"],
            "JobClass": jobDict["JobSplitType"],
            "ProcessingType": processingType,
            "FinalMajorStatus": JobStatus.FAILED,
            "FinalMinorStatus": JobMinorStatus.STALLED_PILOT_NOT_RUNNING,
            "CPUTime": lastCPUTime,
            "NormCPUTime": lastCPUTime * cpuNormalization,
            "ExecTime": lastWallTime,
            "InputDataSize": 0.0,
            "OutputDataSize": 0.0,
            "InputDataFiles": 0,
            "OutputDataFiles": 0,
            "DiskSpace": 0.0,
            "InputSandBoxSize": 0.0,
            "OutputSandBoxSize": 0.0,
            "ProcessedEvents": 0,
        }

        # For accidentally stopped jobs ExecTime can be not set
        if not acData["ExecTime"]:
            acData["ExecTime"] = acData["CPUTime"]
        elif acData["ExecTime"] < acData["CPUTime"]:
            acData["ExecTime"] = acData["CPUTime"]

        self.log.verbose("Accounting Report is:")
        self.log.verbose(acData)
        accountingReport.setValuesFromDict(acData)

        result = accountingReport.commit()
        if result["OK"]:
            self.jobDB.setJobAttribute(jobID, "AccountedFlag", "True")
        else:
            self.log.error(
                "Failed to send accounting report",
                "Job: %d, Error: %s" % (int(jobID), result["Message"]))
        return result
Example #10
0
"""
  This python class move the data from BigDataDB to AccountingJobDB
"""

__RCSID__ = "$Id: $"

from DIRAC.AccountingSystem.Client.Types.Job  import Job as AccountingJob
from DIRAC.Core.Utilities.File                import getGlobbedTotalSize, getGlobbedFiles

class MoveToAccounting:

  #############################################################################
  def __init__( self ):
    """ Standard constructor
    """
accountingReport = AccountingJob()
accountingReport.setStartTime()

jobID = 489
status = "Done"
minorStatus = "Completed"
if status:
  wmsMajorStatus = status
if minorStatus:
  wmsMinorStatus = minorStatus

accountingReport.setEndTime()
#CPUTime and ExecTime
if not 'CPU' in EXECUTION_RESULT:
  # If the payload has not started execution (error with input data, SW, SB,...)
  # Execution result is not filled use initialTiming
Example #11
0
  def sendAccounting( self, jobID ):
    """Send WMS accounting data for the given job
    """

    accountingReport = Job()

    result = self.jobDB.getJobAttributes( jobID )
    if not result['OK']:
      return result
    jobDict = result['Value']

    result = self.logDB.getJobLoggingInfo( jobID )
    if not result['OK']:
      logList = []
    else:
      logList = result['Value']

    startTime = jobDict['StartExecTime']
    endTime = ''

    if not startTime or startTime == 'None':
      for status, minor, app, stime, source in logList:
        if status == 'Running':
          startTime = stime
          break
      for status, minor, app, stime, source in logList:
        if status == 'Stalled':
          endTime = stime
      if not startTime or startTime == 'None':
        startTime = jobDict['SubmissionTime']

    if type( startTime ) in types.StringTypes:
      startTime = fromString( startTime )


    result = self.logDB.getJobLoggingInfo( jobID )
    if not result['OK']:
      endTime = dateTime()
    else:
      for status, minor, app, stime, source in result['Value']:
        if status == 'Stalled':
          endTime = stime
          break
    if not endTime:
      endTime = dateTime()

    if type( endTime ) in types.StringTypes:
      endTime = fromString( endTime )

    result = self.jobDB.getHeartBeatData( jobID )

    lastCPUTime = 0
    lastWallTime = 0
    lastHeartBeatTime = jobDict['StartExecTime']
    if result['OK']:
      for name, value, heartBeatTime in result['Value']:
        if 'CPUConsumed' == name:
          try:
            value = int( float( value ) )
            if value > lastCPUTime:
              lastCPUTime = value
          except:
            pass
        if 'WallClockTime' == name:
          try:
            value = int( float( value ) )
            if value > lastWallTime:
              lastWallTime = value
          except:
            pass
        if heartBeatTime > lastHeartBeatTime:
          lastHeartBeatTime = heartBeatTime

    accountingReport.setStartTime( startTime )
    accountingReport.setEndTime()
    # execTime = toEpoch( endTime ) - toEpoch( startTime )
    #Fill the accounting data
    acData = { 'Site' : jobDict['Site'],
               'User' : jobDict['Owner'],
               'UserGroup' : jobDict['OwnerGroup'],
               'JobGroup' : jobDict['JobGroup'],
               'JobType' : jobDict['JobType'],
               'JobClass' : jobDict['JobSplitType'],
               'ProcessingType' : 'unknown',
               'FinalMajorStatus' : 'Failed',
               'FinalMinorStatus' : 'Stalled',
               'CPUTime' : lastCPUTime,
               'NormCPUTime' : 0.0,
               'ExecTime' : lastWallTime,
               'InputDataSize' : 0.0,
               'OutputDataSize' : 0.0,
               'InputDataFiles' : 0,
               'OutputDataFiles' : 0,
               'DiskSpace' : 0.0,
               'InputSandBoxSize' : 0.0,
               'OutputSandBoxSize' : 0.0,
               'ProcessedEvents' : 0
             }
    self.log.verbose( 'Accounting Report is:' )
    self.log.verbose( acData )
    accountingReport.setValuesFromDict( acData )

    result = accountingReport.commit()
    if result['OK']:
      self.jobDB.setJobAttribute( jobID, 'AccountedFlag', 'True' )
    else:
      self.log.warn( 'Failed to send accounting report for job %d' % int( jobID ) )
      self.log.error( result['Message'] )
    return result
Example #12
0
    def __sendAccounting(self, jobID):
        """ Send WMS accounting data for the given job
"""
        try:
            accountingReport = Job()
            endTime = "Unknown"
            lastHeartBeatTime = "Unknown"

            result = self.jobDB.getJobAttributes(jobID)
            if not result["OK"]:
                return result
            jobDict = result["Value"]

            startTime, endTime = self.__checkLoggingInfo(jobID, jobDict)
            lastCPUTime, lastWallTime, lastHeartBeatTime = self.__checkHeartBeat(jobID, jobDict)
            lastHeartBeatTime = fromString(lastHeartBeatTime)
            if lastHeartBeatTime is not None and lastHeartBeatTime > endTime:
                endTime = lastHeartBeatTime

            cpuNormalization = self.jobDB.getJobParameter(jobID, "CPUNormalizationFactor")
            if not cpuNormalization["OK"] or not cpuNormalization["Value"]:
                cpuNormalization = 0.0
            else:
                cpuNormalization = float(cpuNormalization["Value"])
        except Exception:
            self.log.exception(
                "Exception in __sendAccounting for job %s: endTime=%s, lastHBTime %s"
                % (str(jobID), str(endTime), str(lastHeartBeatTime)),
                "",
                False,
            )
            return S_ERROR("Exception")
        processingType = self.__getProcessingType(jobID)

        accountingReport.setStartTime(startTime)
        accountingReport.setEndTime(endTime)
        # execTime = toEpoch( endTime ) - toEpoch( startTime )
        # Fill the accounting data
        acData = {
            "Site": jobDict["Site"],
            "User": jobDict["Owner"],
            "UserGroup": jobDict["OwnerGroup"],
            "JobGroup": jobDict["JobGroup"],
            "JobType": jobDict["JobType"],
            "JobClass": jobDict["JobSplitType"],
            "ProcessingType": processingType,
            "FinalMajorStatus": "Failed",
            "FinalMinorStatus": "Stalled",
            "CPUTime": lastCPUTime,
            "NormCPUTime": lastCPUTime * cpuNormalization,
            "ExecTime": lastWallTime,
            "InputDataSize": 0.0,
            "OutputDataSize": 0.0,
            "InputDataFiles": 0,
            "OutputDataFiles": 0,
            "DiskSpace": 0.0,
            "InputSandBoxSize": 0.0,
            "OutputSandBoxSize": 0.0,
            "ProcessedEvents": 0,
        }

        # For accidentally stopped jobs ExecTime can be not set
        if not acData["ExecTime"]:
            acData["ExecTime"] = acData["CPUTime"]
        elif acData["ExecTime"] < acData["CPUTime"]:
            acData["ExecTime"] = acData["CPUTime"]

        self.log.verbose("Accounting Report is:")
        self.log.verbose(acData)
        accountingReport.setValuesFromDict(acData)

        result = accountingReport.commit()
        if result["OK"]:
            self.jobDB.setJobAttribute(jobID, "AccountedFlag", "True")
        else:
            self.log.error("Failed to send accounting report", "Job: %d, Error: %s" % (int(jobID), result["Message"]))
        return result
Example #13
0
  def __sendAccounting( self, jobID ):
    """ Send WMS accounting data for the given job
    """

    accountingReport = Job()

    result = self.jobDB.getJobAttributes( jobID )
    if not result['OK']:
      return result
    jobDict = result['Value']

    startTime, endTime = self.__checkLoggingInfo( jobID, jobDict )

    lastCPUTime, lastWallTime, lastHeartBeatTime = self.__checkHeartBeat( jobID, jobDict )

    if lastHeartBeatTime and fromString( lastHeartBeatTime ) > endTime:
      endTime = fromString( lastHeartBeatTime )

    cpuNormalization = self.jobDB.getJobParameter( jobID, 'CPUNormalizationFactor' )
    if not cpuNormalization['OK'] or not cpuNormalization['Value']:
      cpuNormalization = 0.0
    else:
      cpuNormalization = float( cpuNormalization['Value'] )

    processingType = self.__getProcessingType( jobID )

    accountingReport.setStartTime( startTime )
    accountingReport.setEndTime( endTime )
    # execTime = toEpoch( endTime ) - toEpoch( startTime )
    #Fill the accounting data
    acData = { 'Site' : jobDict['Site'],
               'User' : jobDict['Owner'],
               'UserGroup' : jobDict['OwnerGroup'],
               'JobGroup' : jobDict['JobGroup'],
               'JobType' : jobDict['JobType'],
               'JobClass' : jobDict['JobSplitType'],
               'ProcessingType' : processingType,
               'FinalMajorStatus' : 'Failed',
               'FinalMinorStatus' : 'Stalled',
               'CPUTime' : lastCPUTime,
               'NormCPUTime' : lastCPUTime * cpuNormalization,
               'ExecTime' : lastWallTime,
               'InputDataSize' : 0.0,
               'OutputDataSize' : 0.0,
               'InputDataFiles' : 0,
               'OutputDataFiles' : 0,
               'DiskSpace' : 0.0,
               'InputSandBoxSize' : 0.0,
               'OutputSandBoxSize' : 0.0,
               'ProcessedEvents' : 0
             }
    self.log.verbose( 'Accounting Report is:' )
    self.log.verbose( acData )
    accountingReport.setValuesFromDict( acData )

    result = accountingReport.commit()
    if result['OK']:
      self.jobDB.setJobAttribute( jobID, 'AccountedFlag', 'True' )
    else:
      self.log.error( 'Failed to send accounting report', 'Job: %d, Error: %s' % ( int( jobID ), result['Message'] ) )
    return result