Ejemplo n.º 1
0
def getJSONMetric(metricNumber, hoursToRead, sitesStr, sitesVar, dateStart="2000-01-01", dateEnd=datetime.now().strftime('%Y-%m-%d')):
    urlstr = "http://dashb-ssb.cern.ch/dashboard/request.py/getplotdata?columnid=" + str(metricNumber) + "&time=" + str(hoursToRead) + "&dateFrom=" + dateStart + "&dateTo=" + dateEnd + "&site=" + sitesStr + "&sites=" + sitesVar + "&clouds=all&batch=1"
    try:
        print "Getting metric " + str(metricNumber) + ", url :" + urlstr
        metricData = url.read(urlstr)
        return dashboard.parseJSONMetric(metricData)
    except:
        print("Fetching URL failed, sleeping and retrying...")
        time.sleep(3)
        try:
            metricData = url.read(urlstr)
            return dashboard.parseJSONMetric(metricData)
        except:
            return None
Ejemplo n.º 2
0
def getJSONMetric(metricNumber, hoursToRead, sitesStr, sitesVar, dateStart="2000-01-01", dateEnd=datetime.now().strftime('%Y-%m-%d')):
    urlstr = "http://dashb-ssb.cern.ch/dashboard/request.py/getplotdata?columnid=" + str(metricNumber) + "&time=" + str(hoursToRead) + "&dateFrom=" + dateStart + "&dateTo=" + dateEnd + "&site=" + sitesStr + "&sites=" + sitesVar + "&clouds=all&batch=1"
    try:
        metricData = url.read(urlstr)
        return dashboard.parseJSONMetric(metricData)
    except:
        return None
Ejemplo n.º 3
0
def check_federation_history(site_name):
    federationHistoryURL = "http://dashb-ssb.cern.ch/dashboard/request.py/getplotdata?columnid=233&time=336&dateFrom=&dateTo=&site=T0_CH_CERN&sites=all&clouds=all&batch=1"
    federationHistory = dashboard.parseJSONMetric(
        url.read(federationHistoryURL))
    for entry in federationHistory.getSiteEntries(site_name).values():
        if entry.value == "prod" or entry.value == "trans":
            return entry.value
def getJSONMetric(metricNumber, hoursToRead, sitesStr, sitesVar, dateStart="2000-01-01", dateEnd=datetime.now().strftime('%Y-%m-%d')):
    urlstr = "http://dashb-ssb.cern.ch/dashboard/request.py/getplotdata?columnid=" + str(metricNumber) + "&time=" + str(hoursToRead) + "&dateFrom=" + dateStart + "&dateTo=" + dateEnd + "&site=" + sitesStr + "&sites=" + sitesVar + "&clouds=all&batch=1"
    print urlstr
    try:
        metricData = url.read(urlstr)
        return dashboard.parseJSONMetric(metricData)
    except:
        return None
Ejemplo n.º 5
0
def getDowntimes(downtimeMetricURL, lowerBound = None):
    # get downtimes from dashboard and parse json
    dashboardDT  = json.loads(url.read(downtimeMetricURL))

    # downtime events to be inserted
    # structure: {1 : [entry1, entry2], 2 : [entry3, entry4, ..] ..}
    downtimeEvents = {}

    for i in dashboardDT['csvdata']:
        color = i['COLORNAME']
        site  = i['VOName']
        tier  = int(site[1])
        stat  = i['Status']

        # slot start and end times
        start = i['Time']    + 'Z'
        end   = i['EndTime'] + 'Z'

        # skip the entry if it is not downtime
        if color == 'green': continue
        # create google calendar entry summary
        summary = "%s %s [%s to %s]" % (site, stat,
                                        start.replace('T', ' ').replace('Z', ''),
                                        end.replace('T', ' ').replace('Z', ''))
        # if service partially down, put the hash mark before the event summary
        # (please, go to the dashboard::siteStatusBoard metric number 121 and
        # see the metric details to understand better)
        if color == 'yellow':
            summary = '# ' + summary

        downtimeEvent = {'summary' : summary,
            'start': {'dateTime': start, 'timeZone' : 'UTC'},
            'end' :  {'dateTime': end  , 'timeZone' : 'UTC'} }

        # check if the downtime entry is in the lower bound
        if lowerBound and isOldEntry(end, lowerBound):
            print '# skip: %s' % summary
            continue

        if not tier in downtimeEvents:
            downtimeEvents[tier] = []

        if not downtimeEvent in downtimeEvents[tier]:
            downtimeEvents[tier].append(downtimeEvent)

    return downtimeEvents
Ejemplo n.º 6
0
except ImportError: import simplejson as json
try: import xml.etree.ElementTree as ET
except ImportError: from elementtree import ElementTree as ET

if len(sys.argv) < 4:
    sys.stderr.write('not enough parameter!\n')
    sys.exit(1)

siteList     = sites.getSites()
ggusXMLFile  = fileOps.read(sys.argv[1])
ggus         = []
samAccessURL = sys.argv[2]
samAccess    = {}
hcURL        = sys.argv[3]
hammerCloud  = {}
federations  = json.loads(url.read(sys.argv[4]))
output       = sys.argv[5]

for site in siteList:
    samAccess[site] = 'n/a'
    hammerCloud[site] = 'n/a'

## parse ggus xml file
for ticket in ET.fromstring(ggusXMLFile).findall('ticket'):
    cmsSiteName  = ticket.find('cms_site').text
    realSiteName = ticket.find('affected_site').text

    # if you don't have CMS site name AND have real site name,
    # try to find its CMS name and add it to the ggus array
    if not cmsSiteName and realSiteName:
        for site in siteList:
Ejemplo n.º 7
0
# this script generates IN/OUT waiting room statistics by sites

from lib import url, dashboard
try: import json
except ImportError: import simplejson as json
import time, sys

if len(sys.argv) < 2:
    sys.stderr.write('not enough parameter!\n')
    sys.exit(1)

data   = url.read(sys.argv[1])
wr     = dashboard.parseJSONMetric(data)
wrStat = {}

for site in wr.getSites():
    # initialize country statistics
    if not wrStat.has_key(site):
        wrStat[site] = {dashboard.green : 0, dashboard.red : 0}

    # to remember the parsed json metric data structure, please see dashboard.py
    entries = wr.getSiteEntries(site)
    for endTime in entries:
        entry = entries[endTime]
        diff  = endTime - entry.date
        if entry.color == dashboard.green or entry.color == dashboard.yellow:
            wrStat[site][dashboard.green] += diff
        elif entry.color == dashboard.red:
            wrStat[site][dashboard.red] += diff

sites = wrStat.keys()
Ejemplo n.º 8
0
def  main():
    parser = OptionParser(usage="usage: %prog [options] filename",
                          version="%prog 1.0")
    parser.add_option("-d", "--date",
                      dest="inputDate",
                      help="date from which to fetch the results for HC in format %Y-%m-%dT%H:%M:%SZ ")
    parser.add_option("-o", "--outputDir",
                      dest="outputDir",
                      help="Directory in which to save the output")
    (options, args) = parser.parse_args()
    if options.inputDate is None:
        print "Please input a date with the --date option"
        exit(-1)
    else:
        try:
            datetmp = dateutil.parser.parse(options.inputDate, ignoretz=True)
        except:
            print "I couldn't recognize the date, please give me one like 2015-12-31T23:59:59Z"
            exit(-1)
    if options.outputDir is None:
        print "Please add a directory with option --outputDir"
        exit(-1)
    else:
        if os.path.isdir(options.outputDir) == False:
            print options.outputDir + " is not a valid directory or you don't have read permissions"
            exit(-1)
# Constants
    interval = 30
    dateFrom = datetmp- timedelta(minutes=datetmp.minute % interval,
                             seconds=datetmp.second,
                             microseconds=datetmp.microsecond)
    dateTo = dateFrom + timedelta(minutes=interval)
    dateFormat = "%Y-%m-%dT%H:%M:%SZ"
    dateFromStr = datetime.strftime(dateFrom, dateFormat)
    print dateFromStr
    dateToStr = datetime.strftime(dateTo, dateFormat)
    OUTPUT_FILE_NAME = os.path.join(options.outputDir,"sam.txt")
    print "Getting SAM Score from " + str(dateFrom) + " to " + str(dateTo)
    samUrl = "http://wlcg-sam-cms.cern.ch/dashboard/request.py/getstatsresultsmin?profile_name=CMS_CRITICAL_FULL&plot_type=quality&start_time=%s&end_time=%s&granularity=single&view=siteavl" % (dateFromStr, dateToStr)    
    print samUrl
    # Download the url or die
    try:
        print "Fetching url : " + samUrl
        jsonStr = url.read(samUrl)
        samInfo = json.loads(jsonStr)
    except:
        exit(100)
    print "Data retrieved!"
    sitesfromDashboard = []
    for samSummary in samInfo['data']:
        sitesfromDashboard.append(samSummary['name'])
    print sitesfromDashboard
    samScoreSites = []
    print"Getting SAM for all sites"
    for site in sitesfromDashboard:
        for samSummary in samInfo['data']:
            if samSummary['name'] == site:
                try:
                    siteOK = float(samSummary['data'][0]['OK'])
                    siteCritical = float(samSummary['data'][0]['CRIT'])
                    siteSched = float(samSummary['data'][0]['SCHED'])
                    if (siteOK + siteCritical + siteSched) > 0.0:
                        siteAvailabilityNum = (float(siteOK) / (float(siteOK + siteCritical + siteSched)))*100.0
                        siteAvailability = int(siteAvailabilityNum)
                        if siteAvailabilityNum > 89.9:
                            siteColor = "c*k"
                        elif (sites.getTier(site) == 2 or sites.getTier(site) == 3)and siteAvailabilityNum > 79.9:
                            siteColor = "c*k" 
                        else:
                            siteColor = "cNotOk"
                    else:
                        siteAvailability = "n/a"
                        siteAvailabilityNum = None
                        siteColor = "cNA"
                except:
                    siteAvailability = "Error"
                    siteAvailabilityNum = None
                    siteColor = "cError"
                print site + "  OK " + str(siteOK) + " CRIT " + str(siteCritical) + " SCHED " + str(siteSched) + " SCORE : " + str(siteAvailability) 
                samScoreSites.append(dashboard.entry(date = dateFrom.strftime("%Y-%m-%d %H:%M:%S"), name = site, value = siteAvailability, color = siteColor, url = getSuccessrateUrl (site, dateFrom, dateTo), nvalue=siteAvailabilityNum))
    print str(samScoreSites)
    if len(samScoreSites) > 1 : 
        OutputFile = open(OUTPUT_FILE_NAME, 'w')
        for site in samScoreSites:
            if site.name != "unknown":
                OutputFile.write(str(site) + '\n')
        print "\n--SAM Score output written to %s" % OUTPUT_FILE_NAME
        OutputFile.close()
    else:
        print "There's no data, I quit!"
Ejemplo n.º 9
0
def check_federation_history(site_name):
    federationHistoryURL = "http://dashb-ssb.cern.ch/dashboard/request.py/getplotdata?columnid=233&time=336&dateFrom=&dateTo=&site=T0_CH_CERN&sites=all&clouds=all&batch=1"
    federationHistory = dashboard.parseJSONMetric(url.read(federationHistoryURL))
    for entry in federationHistory.getSiteEntries(site_name).values():
        if entry.value == "prod" or entry.value == "trans":
            return entry.value
Ejemplo n.º 10
0
    import simplejson as json
try:
    import xml.etree.ElementTree as ET
except ImportError:
    from elementtree import ElementTree as ET

# input: config file, input/output file
if len(sys.argv) < 3:
    print >> sys.stderr, 'Error: not enough parameters.'
    sys.exit(1)

# read the config file and parse it
conf = json.loads(fileOps.read(sys.argv[1]))

# get list of announced cmssw releases and parse it
cmssw = ET.fromstring(url.read(conf['urlRelList']))
# get list of documented cmssw releases and parse it
docCMSSW = ET.fromstring(url.read(conf['urlDocList']))


def isDocNeeded(relName):
    if re.match(conf['pattern'], relName): return True
    else: return False


# cmssw XML structure
class CMSSW:
    def __init__(self, name=None, arch=None, type=None, state=None):
        self.name = name.strip()
        self.arch = arch
        self.type = type
Ejemplo n.º 11
0
if len(sys.argv) < 4:
    sys.stderr.write('not enough parameter!\n')
    sys.exit(1)

siteList  = sites.getSites()
config    = json.loads(fileOps.read(sys.argv[1]))
fieldList = config.keys()
fieldList.sort()
data      = {}
# add field names
data['fields'] = fieldList

# load all fields from dashboard
fields    = {}
for field in fieldList:
    fields[field] = dashboard.parseMetric(url.read(config[field]))
    print field, 'done...'

for site in siteList:
    data[site] = []
    for field in fieldList:
        if not fields[field].hasSite(site):
            data[site].append('black')
            continue
        data[site].append(fields[field].getSiteEntry(site).color)

template  = fileOps.read(sys.argv[2])
template  = template.replace('@DATE@', time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time())))
template  = template.replace('@DATA@', json.dumps(data));

fileOps.write(sys.argv[3], template)
Ejemplo n.º 12
0
siteList = sites.getSites()

# prepare result array. note that for SAM tests
# we don't filter sites by tier number because some
# T3s have SAM tests.
results  = {}
for site in siteList:
    results[site] = 'n/a'

now      = time.time()
start    = date.format(time.strftime("T00:00:00Z", time.localtime(now - 24*60*60)))
# remember, in urllib.quote, '/' is safe by default
# this is why we used quote_plus.
start    = urllib.quote(start)
end      = date.format(time.strftime("T23:00:00Z", time.localtime(now - 24*60*60)))
end      = urllib.quote(end)

print 'SAM test time range:', start, end

# start, end, site
for site in results:
    source = samURL.format(start, end, site)
    data   = json.loads(url.read(source))
    if not (data.has_key('data') and len(data['data']) and data['data'][0].has_key('data')): continue
    data   = data['data'][0]['data'][0]
    if not data['OK'] + data['CRIT'] + data['SCHED'] > 0: continue
    result = data['OK'] / (data['OK'] + data['CRIT'] + data['SCHED']) * 100.0
    results[site] = round(result, 3)

fileOps.write("{0}/{1}.json".format(out, int(time.time())), json.dumps(results, indent = 2))
Ejemplo n.º 13
0
    import xml.etree.ElementTree as ET
except ImportError:
    from elementtree import ElementTree as ET

if len(sys.argv) < 4:
    sys.stderr.write('not enough parameter!\n')
    sys.exit(1)

siteList = sites.getSites()
ggusXMLFile = fileOps.read(sys.argv[1])
ggus = []
samAccessURL = sys.argv[2]
samAccess = {}
hcURL = sys.argv[3]
hammerCloud = {}
federations = json.loads(url.read(sys.argv[4]))
output = sys.argv[5]

for site in siteList:
    samAccess[site] = 'n/a'
    hammerCloud[site] = 'n/a'

## parse ggus xml file
for ticket in ET.fromstring(ggusXMLFile).findall('ticket'):
    cmsSiteName = ticket.find('cms_site').text
    realSiteName = ticket.find('affected_site').text

    # if you don't have CMS site name AND have real site name,
    # try to find its CMS name and add it to the ggus array
    if not cmsSiteName and realSiteName:
        for site in siteList:
Ejemplo n.º 14
0
# this script generates IN/OUT waiting room statistics by sites

from lib import url, dashboard
try:
    import json
except ImportError:
    import simplejson as json
import time, sys

if len(sys.argv) < 2:
    sys.stderr.write('not enough parameter!\n')
    sys.exit(1)

data = url.read(sys.argv[1])
wr = dashboard.parseJSONMetric(data)
wrStat = {}

for site in wr.getSites():
    # initialize country statistics
    if not wrStat.has_key(site):
        wrStat[site] = {dashboard.green: 0, dashboard.red: 0}

    # to remember the parsed json metric data structure, please see dashboard.py
    entries = wr.getSiteEntries(site)
    for endTime in entries:
        entry = entries[endTime]
        diff = endTime - entry.date
        if entry.color == dashboard.green or entry.color == dashboard.yellow:
            wrStat[site][dashboard.green] += diff
        elif entry.color == dashboard.red:
            wrStat[site][dashboard.red] += diff
Ejemplo n.º 15
0
results = {}
for site in siteList:
    results[site] = 'n/a'

now = time.time()
start = date.format(
    time.strftime("T00:00:00Z", time.localtime(now - 24 * 60 * 60)))
# remember, in urllib.quote, '/' is safe by default
# this is why we used quote_plus.
start = urllib.quote(start)
end = date.format(
    time.strftime("T23:00:00Z", time.localtime(now - 24 * 60 * 60)))
end = urllib.quote(end)

print 'SAM test time range:', start, end

# start, end, site
for site in results:
    source = samURL.format(start, end, site)
    data = json.loads(url.read(source))
    if not (data.has_key('data') and len(data['data'])
            and data['data'][0].has_key('data')):
        continue
    data = data['data'][0]['data'][0]
    if not data['OK'] + data['CRIT'] + data['SCHED'] > 0: continue
    result = data['OK'] / (data['OK'] + data['CRIT'] + data['SCHED']) * 100.0
    results[site] = round(result, 3)

fileOps.write("{0}/{1}.json".format(out, int(time.time())),
              json.dumps(results, indent=2))
Ejemplo n.º 16
0
except ImportError:
    from elementtree import ElementTree as ET

if len(sys.argv) < 6:
    sys.stderr.write("not enough parameter!\n")
    sys.exit(1)

siteList = sites.getSites()
ggusXMLFile = fileOps.read(sys.argv[1])
ggus = {}
samAccessURL = sys.argv[2]
samAccess = {}
hcURL = sys.argv[3]
hammerCloud = {}
downTimesURL = sys.argv[4]
downTimes = dashboard.parseJSONMetric(url.read(downTimesURL))
siteDownTimes = {}
federations = json.loads(url.read(sys.argv[5]))
reportFile = sys.argv[6]
reportURL = sys.argv[7]
output = sys.argv[8]
report = {}


def check_federation_history(site_name):
    federationHistoryURL = "http://dashb-ssb.cern.ch/dashboard/request.py/getplotdata?columnid=233&time=336&dateFrom=&dateTo=&site=T0_CH_CERN&sites=all&clouds=all&batch=1"
    federationHistory = dashboard.parseJSONMetric(url.read(federationHistoryURL))
    for entry in federationHistory.getSiteEntries(site_name).values():
        if entry.value == "prod" or entry.value == "trans":
            return entry.value
Ejemplo n.º 17
0
def  main():
    parser = OptionParser(usage="usage: %prog [options] filename",
                          version="%prog 1.0")
    parser.add_option("-d", "--date",
                      dest="inputDate",
                      help="date from which to fetch the results for HC in format %Y-%m-%dT%H:%M:%SZ ")
    parser.add_option("-o", "--outputDir",
                      dest="outputDir",
                      help="Directory in which to save the output")
    (options, args) = parser.parse_args()
    if options.inputDate is None:
        print "Please input a date with the --date option"
        exit(-1)
    else:
        try:
            datetmp = dateutil.parser.parse(options.inputDate, ignoretz=True)
        except:
            print "I couldn't recognize the date, please give me one like 2015-12-31T23:59:59Z"
            exit(-1)
    if options.outputDir is None:
        print "Please add a directory with option --outputDir"
        exit(-1)
    else:
        if os.path.isdir(options.outputDir) == False:
            print options.outputDir + " is not a valid directory or you don't have read permissions"
            exit(-1)
# Constants:
    # Dashboard API for Hammercloud
    # replace (site, startTimeStamp, endTimeStamp)
    interval = 30
    dateFrom = datetmp- timedelta(minutes=datetmp.minute % interval,
                             seconds=datetmp.second,
                             microseconds=datetmp.microsecond)
    dateTo = dateFrom + timedelta(minutes=interval)
    dateFormat = "%Y-%m-%d+%H%%3A%M"
    dateFromStr = datetime.strftime(dateFrom, dateFormat)
    dateToStr = datetime.strftime(dateTo, dateFormat)
    OUTPUT_FILE_NAME = os.path.join(options.outputDir,"hammercloud.txt")
    print "Calcuating Hammercloud Score from " + str(dateFrom) + " to " + str(dateTo)
    urlHC = "http://dashb-cms-job.cern.ch/dashboard/request.py/jobsummary-plot-or-table2?user=&site=&submissiontool=&application=&activity=hctest&status=&check=terminated&tier=&sortby=site&ce=&rb=&grid=&jobtype=&submissionui=&dataset=&submissiontype=&task=&subtoolver=&genactivity=&outputse=&appexitcode=&accesstype=&inputse=&cores=&date1=%s&date2=%s&prettyprint" % (dateFromStr, dateToStr)    
    # Download the url or die
    try:
        print "Fetching url : " + urlHC
        jsonStr = url.read(urlHC)
        hcInfo = json.loads(jsonStr)
    except:
        exit(100)
    print "Data retrieved!"
    print json.dumps(hcInfo, sort_keys=True, indent=1, separators=(',', ': '))
    sitesfromDashboard = []
    for hcSummary in hcInfo['summaries']:
        sitesfromDashboard.append(hcSummary['name'])
    
    hcScoreSites = []
    noNa = 0
    print"Calculating HammerCloud scores"
    for site in sitesfromDashboard:
        for hcSummary in hcInfo['summaries']: 
            if hcSummary['name'] == site and site != "unknown":
                siteTerminated = hcSummary['terminated']
                siteSuccesful = hcSummary['app-succeeded']
                siteUnsuccesful = hcSummary['unsuccess']
                siteCancelled = hcSummary['cancelled']
                siteUnk = hcSummary['allunk']
                siteScore = -1.0 
                siteColor = "white"
                if (siteTerminated - siteCancelled - siteUnk) > 0:
                    siteScore = (float(siteSuccesful - siteUnsuccesful) / float(siteTerminated - siteCancelled - siteUnk)) * 100.0
                    siteColor = "red"
                    if (sites.getTier(site) > 1) and siteScore > 79.9:
                        siteColor = "yellow"
                    if siteScore > 89.9:
                        siteColor = "green"
                    #print site + " (" + str(siteSuccesful) + " - " + str(siteUnsuccesful) + ")/(" +str(siteTerminated)+" - "+str(siteCancelled)+" - "+str(siteUnk)+") =" + str(siteScore)
                if siteScore is not None:
                        print site + "\t" + str(siteScore) + "\t" + siteColor
                	hcScoreSites.append(dashboard.entry(date = dateFrom.strftime("%Y-%m-%d %H:%M:%S"), name = site, value = '%.1f' % siteScore, color = siteColor, url = getSuccessrateUrl (site, dateFromStr, dateToStr)))
    #print str(hcScoreSites)
    if len(hcScoreSites) >  noNa: 
        OutputFile = open(OUTPUT_FILE_NAME, 'w')
        for site in hcScoreSites:
            if site.name != "unknown":
                OutputFile.write(str(site) + '\n')
        print "\n--HC Score output written to %s" % OUTPUT_FILE_NAME
        OutputFile.close()
    else:
        print "There's no data, I quit!"
Ejemplo n.º 18
0
def main():
    parser = OptionParser(usage="usage: %prog [options] filename",
                          version="%prog 1.0")
    parser.add_option(
        "-d",
        "--date",
        dest="inputDate",
        help=
        "date from which to fetch the results for HC in format %Y-%m-%dT%H:%M:%SZ "
    )
    parser.add_option("-o",
                      "--outputDir",
                      dest="outputDir",
                      help="Directory in which to save the output")
    (options, args) = parser.parse_args()
    if options.inputDate is None:
        print "Please input a date with the --date option"
        exit(-1)
    else:
        try:
            datetmp = dateutil.parser.parse(options.inputDate, ignoretz=True)
        except:
            print "I couldn't recognize the date, please give me one like 2015-12-31T23:59:59Z"
            exit(-1)
    if options.outputDir is None:
        print "Please add a directory with option --outputDir"
        exit(-1)
    else:
        if os.path.isdir(options.outputDir) == False:
            print options.outputDir + " is not a valid directory or you don't have read permissions"
            exit(-1)


# Constants:
# Dashboard API for Hammercloud
# replace (site, startTimeStamp, endTimeStamp)
    interval = 15
    dateFrom = datetmp - timedelta(minutes=datetmp.minute % interval,
                                   seconds=datetmp.second,
                                   microseconds=datetmp.microsecond)
    dateTo = dateFrom + timedelta(minutes=interval)
    dateFormat = "%Y-%m-%d+%H%%3A%M"
    dateFromStr = datetime.strftime(dateFrom, dateFormat)
    dateToStr = datetime.strftime(dateTo, dateFormat)
    OUTPUT_FILE_NAME = os.path.join(options.outputDir, "hammercloud.txt")
    print "Calcuating Hammercloud Score from " + str(dateFrom) + " to " + str(
        dateTo)
    urlHC = "http://dashb-cms-job.cern.ch/dashboard/request.py/jobsummary-plot-or-table2?user=&site=&submissiontool=&application=&activity=hctest&status=&check=terminated&tier=&sortby=site&ce=&rb=&grid=&jobtype=&submissionui=&dataset=&submissiontype=&task=&subtoolver=&genactivity=&outputse=&appexitcode=&accesstype=&inputse=&cores=&date1=%s&date2=%s&prettyprint" % (
        dateFromStr, dateToStr)
    # Download the url or die
    try:
        print "Fetching url : " + urlHC
        jsonStr = url.read(urlHC)
        hcInfo = json.loads(jsonStr)
    except:
        exit(100)
    print "Data retrieved!"
    sitesfromDashboard = []
    for hcSummary in hcInfo['summaries']:
        sitesfromDashboard.append(hcSummary['name'])

    hcScoreSites = []
    noNa = 0
    print "Calculating HammerCloud scores"
    for site in sitesfromDashboard:
        for hcSummary in hcInfo['summaries']:
            if hcSummary['name'] == site and site != "unknown":
                siteTerminated = hcSummary['terminated']
                siteSuccesful = hcSummary['app-succeeded']
                siteUnsuccesful = hcSummary['unsuccess']
                siteCancelled = hcSummary['cancelled']
                siteUnk = hcSummary['allunk']
                siteScore = None
                if (siteTerminated - siteCancelled - siteUnk) > 0:
                    siteScore = (float(siteSuccesful - siteUnsuccesful) /
                                 float(siteTerminated - siteCancelled -
                                       siteUnk)) * 100.0
                    siteColor = "cNotOk"
                    if (sites.getTier(site) == 2
                            or sites.getTier(site) == 3) and siteScore > 79.9:
                        siteColor = "c*k"
                    if sites.getTier(site) == 1 and siteScore > 89.9:
                        siteColor = "c*k"
                    print site + " (" + str(siteSuccesful) + " - " + str(
                        siteUnsuccesful) + ")/(" + str(
                            siteTerminated) + " - " + str(
                                siteCancelled) + " - " + str(
                                    siteUnk) + ") =" + str(siteScore)
                elif siteTerminated > 0 or siteCancelled > 0 or siteUnk > 0 or siteUnsuccesful > 0 or siteSuccesful > 0:
                    siteScore = "Error"
                    noNa += 1
                    siteColor = "cError"
                if siteScore is not None:
                    hcScoreSites.append(
                        dashboard.entry(
                            date=dateFrom.strftime("%Y-%m-%d %H:%M:%S"),
                            name=site,
                            value=siteScore,
                            color=siteColor,
                            url=getSuccessrateUrl(site, dateFromStr,
                                                  dateToStr)))
    #print str(hcScoreSites)
    if len(hcScoreSites) > noNa:
        OutputFile = open(OUTPUT_FILE_NAME, 'w')
        for site in hcScoreSites:
            if site.name != "unknown":
                OutputFile.write(str(site) + '\n')
        print "\n--HC Score output written to %s" % OUTPUT_FILE_NAME
        OutputFile.close()
    else:
        print "There's no data, I quit!"
# this script provides data for the 'usable sites - manual changes' metric,
# which is created to control the 'usable sites' metric by hand, and creates
# a closed loop for the metric. when someone changes a value in the 
# 'usable sites - manual changes' metric by using dashboard web interface,
# the script reflects this change to the input text file of the metric.

if len(sys.argv) < 3:
    print 'not enough parameter!'
    sys.exit(1)

# output path
output        = sys.argv[2]

# get the source metric url
metricURL     = sys.argv[1]
# get the entries of the metric
metric        = dashboard.parseJSONMetric(url.read(metricURL))
updatedMetric = dashboard.metric()

for i in sites.getSites():
    # if the site is not in the list add it (this is the
    # case that will be happaned when they create new site
    # in the site db)
    if not metric.hasSite(i):
        updatedMetric.append(dashboard.entry(None, i, 'ready', dashboard.green, metricURL))
    else:
        latestEntry = metric.getLatestEntry(i)
        updatedMetric.append(dashboard.entry(None, i, latestEntry.value, latestEntry.color, metricURL))

fileOps.write(output, str(updatedMetric))
Ejemplo n.º 20
0
import sys
try: import json
except ImportError: import simplejson as json
from lib import fileOps, url, dashboard, sites

if len(sys.argv) < 3:
    sys.stderr.write('not enough parameter!\n')
    sys.exit(1)

federationSource = sys.argv[1]
metricOutput     = sys.argv[2]

federations = json.loads(url.read(federationSource))

federationMetric = dashboard.metric()
for fedName in federations:
    for site in federations[fedName]:
        if fedName == 'prod':
            color = dashboard.green
        elif fedName == 'trans':
            color = dashboard.cyan
        elif fedName == 'nowhere':
            color = dashboard.gray
        else:
            # basically, this is impossible state considering possible
            # federation names but I wanted to consider this in case of
            # a change. --and this change must be reflected to the metric.
            color = dashboard.white
        entry = dashboard.entry(None, site, fedName, color, federationSource)
        federationMetric.append(entry)
Ejemplo n.º 21
0
try: import xml.etree.ElementTree as ET
except ImportError: from elementtree import ElementTree as ET

if len(sys.argv) < 6:
    sys.stderr.write('not enough parameter!\n')
    sys.exit(1)

siteList     = sites.getSites()
ggusXMLFile  = fileOps.read(sys.argv[1])
ggus         = {}
samAccessURL = sys.argv[2]
samAccess    = {}
hcURL        = sys.argv[3]
hammerCloud  = {}
downTimesURL = sys.argv[4]
downTimes    = dashboard.parseJSONMetric(url.read(downTimesURL))
siteDownTimes = {}
federations  = json.loads(url.read(sys.argv[5]))
reportFile   = sys.argv[6]
reportURL    = sys.argv[7]
output       = sys.argv[8]
report       = {}

def check_federation_history(site_name):
    federationHistoryURL="http://dashb-ssb.cern.ch/dashboard/request.py/getplotdata?columnid=233&time=336&dateFrom=&dateTo=&site=T0_CH_CERN&sites=all&clouds=all&batch=1"
    federationHistory=dashboard.parseJSONMetric(url.read(federationHistoryURL))
    for entry in federationHistory.getSiteEntries(site_name).values():
        if entry.value == "prod" or entry.value == "trans":
            return entry.value
              
for site in siteList:
Ejemplo n.º 22
0
# please have a look at the site support team metric script
# development documentation:
# https://twiki.cern.ch/twiki/bin/view/CMSPublic/SiteSupportMonitoringScripts

import sys, time
from lib import sites, dashboard, url, fileOps
try: import json
except ImportError: import simplejson as json

if len(sys.argv) < 7:
    print 'not enough parameter!'
    sys.exit(1)

# manually controlled usable sites list
usableSitesMC = dashboard.parseMetric(url.read(sys.argv[1]))
# morgue list
morgue        = dashboard.parseMetric(url.read(sys.argv[2]))
# prepare hammercloud metric url for last 3 days!
hcURL         = sys.argv[3] % (time.strftime("%Y-%m-%d", time.localtime(time.time()-3*24*60*60)),
                               time.strftime("%Y-%m-%d", time.localtime(time.time())))
hammerCloud   = dashboard.parseJSONMetric(url.read(hcURL))
# get the url stamp for the dashboard input file
urlStamp      = sys.argv[4]
# text output file location
txtOutput     = sys.argv[5]
# json output file location
jsonOutput    = sys.argv[6]

# create new metric object
metricHeader = {'twiki' : 'https://twiki.cern.ch/twiki/bin/view/CMSPublic/UsableSitesForAnalysis'}
metric = dashboard.metric(header = metricHeader)
# which is created to control the 'usable sites' metric by hand, and creates
# a closed loop for the metric. when someone changes a value in the
# 'usable sites - manual changes' metric by using dashboard web interface,
# the script reflects this change to the input text file of the metric.

if len(sys.argv) < 3:
    print 'not enough parameter!'
    sys.exit(1)

# output path
output = sys.argv[2]

# get the source metric url
metricURL = sys.argv[1]
# get the entries of the metric
metric = dashboard.parseJSONMetric(url.read(metricURL))
updatedMetric = dashboard.metric()

# merege sites from the vo-feed and manual control meteric.
siteList = sites.getSites()
for site in metric.getSites():
    if not site in siteList:
        siteList[site] = {}

for i in siteList:
    # if the site is not in the list add it (this is the
    # case that will be happaned when they create new site
    # in the site db)
    if not metric.hasSite(i):
        updatedMetric.append(
            dashboard.entry(None, i, 'ready', dashboard.green, metricURL))
Ejemplo n.º 24
0
def main():
    parser = OptionParser(usage="usage: %prog [options] filename",
                          version="%prog 1.0")
    parser.add_option(
        "-d",
        "--date",
        dest="inputDate",
        help=
        "date from which to fetch the results for HC in format %Y-%m-%dT%H:%M:%SZ "
    )
    parser.add_option("-o",
                      "--outputDir",
                      dest="outputDir",
                      help="Directory in which to save the output")
    (options, args) = parser.parse_args()
    if options.inputDate is None:
        print "Please input a date with the --date option"
        exit(-1)
    else:
        try:
            datetmp = dateutil.parser.parse(options.inputDate, ignoretz=True)
        except:
            print "I couldn't recognize the date, please give me one like 2015-12-31T23:59:59Z"
            exit(-1)
    if options.outputDir is None:
        print "Please add a directory with option --outputDir"
        exit(-1)
    else:
        if os.path.isdir(options.outputDir) == False:
            print options.outputDir + " is not a valid directory or you don't have read permissions"
            exit(-1)


# Constants
    interval = 1439
    dateFrom = datetmp - timedelta(minutes=datetmp.minute % interval,
                                   seconds=datetmp.second,
                                   microseconds=datetmp.microsecond)
    dateTo = dateFrom + timedelta(minutes=interval)
    dateFormat = "%Y-%m-%dT%H:%M:%SZ"
    dateFromStr = datetime.strftime(dateFrom, dateFormat)
    print dateFromStr
    dateToStr = datetime.strftime(dateTo, dateFormat)
    OUTPUT_FILE_NAME = os.path.join(options.outputDir, "site_avail_sum.txt")
    OUTPUT_FILE_CORRECTIONS = os.path.join(options.outputDir,
                                           "site_avail_sum_POST_REQUEST.txt")
    SAM_COLUMN_NUMBER = "126"
    print "Getting SAM Score from " + str(dateFrom) + " to " + str(dateTo)
    samUrl = "http://wlcg-sam-cms.cern.ch/dashboard/request.py/getstatsresultsmin?profile_name=CMS_CRITICAL_FULL&plot_type=quality&start_time=%s&end_time=%s&granularity=single&view=siteavl" % (
        dateFromStr, dateToStr)
    print samUrl
    # Download the url or die
    try:
        print "Fetching url : " + samUrl
        jsonStr = url.read(samUrl)
        samInfo = json.loads(jsonStr)
    except:
        exit(100)
    print "Data retrieved!"
    sitesfromDashboard = []
    for samSummary in samInfo['data']:
        sitesfromDashboard.append(samSummary['name'])
    print sitesfromDashboard
    samScoreSites = []
    print "Getting SAM for all sites"
    for site in sitesfromDashboard:
        for samSummary in samInfo['data']:
            if samSummary['name'] == site:
                try:
                    siteOK = float(samSummary['data'][0]['OK'])
                    siteCritical = float(samSummary['data'][0]['CRIT'])
                    siteSched = float(samSummary['data'][0]['SCHED'])
                    if (siteOK + siteCritical + siteSched) > 0.0:
                        siteAvailabilityNum = (
                            float(siteOK) /
                            (float(siteOK + siteCritical + siteSched))) * 100.0
                        siteAvailability = int(siteAvailabilityNum)
                        if siteAvailabilityNum > 89.9:
                            siteColor = "green"
                        elif (sites.getTier(site) == 2 or sites.getTier(site)
                              == 3) and siteAvailabilityNum > 79.9:
                            siteColor = "green"
                        else:
                            siteColor = "red"
                    else:
                        siteAvailability = "n/a"
                        siteAvailabilityNum = None
                        siteColor = "white"
                except:
                    siteAvailability = "Error"
                    siteAvailabilityNum = None
                    siteColor = "white"
                print site + "  OK " + str(siteOK) + " CRIT " + str(
                    siteCritical) + " SCHED " + str(
                        siteSched) + " SCORE : " + str(siteAvailability)
                samScoreSites.append(
                    dashboard.entry(date=dateTo.strftime("%Y-%m-%d %H:%M:%S"),
                                    name=site,
                                    value=siteAvailability,
                                    color=siteColor,
                                    url=getSuccessrateUrl(
                                        site, dateFrom, dateTo),
                                    nvalue=siteAvailabilityNum))
    print str(samScoreSites)
    if len(samScoreSites) > 1:
        OutputFile = open(OUTPUT_FILE_NAME, 'w')
        correctionOutputFile = open(OUTPUT_FILE_CORRECTIONS, 'a')
        startDateStr = (dateFrom + timedelta(days=1)).replace(
            hour=0, minute=0, second=1,
            microsecond=0).strftime("%Y-%m-%d %H:%M:%S")
        endDateStr = (dateFrom + timedelta(days=1)).replace(
            hour=23, minute=59, second=59,
            microsecond=0).strftime("%Y-%m-%d %H:%M:%S")
        for site in samScoreSites:
            if site.name != "unknown":
                OutputFile.write(str(site) + '\n')
                correctionOutputFile.write(("\t".join([
                    startDateStr, endDateStr,
                    str(SAM_COLUMN_NUMBER), site.name,
                    str(site.value), site.color, site.url, "nvalue=0"
                ])) + "\n")
        print "\n--SAM Score output written to %s" % OUTPUT_FILE_NAME
        OutputFile.close()
        correctionOutputFile.close()
    else:
        print "There's no data, I quit!"
Ejemplo n.º 25
0
from lib import url, fileOps
try: import json
except ImportError: import simplejson as json
try: import xml.etree.ElementTree as ET
except ImportError: from elementtree import ElementTree as ET

# input: config file, input/output file
if len(sys.argv) < 3:
    print >> sys.stderr, 'Error: not enough parameters.'
    sys.exit(1)

# read the config file and parse it
conf     = json.loads(fileOps.read(sys.argv[1]))

# get list of announced cmssw releases and parse it
cmssw    = ET.fromstring(url.read(conf['urlRelList']))
# get list of documented cmssw releases and parse it
docCMSSW = ET.fromstring(url.read(conf['urlDocList']))

def isDocNeeded(relName):
    if re.match(conf['pattern'], relName): return True
    else: return False

# cmssw XML structure
class CMSSW:
    def __init__(self, name = None, arch = None, type = None,
                 state = None):
        self.name     = name.strip()
        self.arch     = arch
        self.type     = type
        self.state    = state