def getJSONMetric(metricNumber, hoursToRead, sitesStr, sitesVar, dateStart="2000-01-01", dateEnd=datetime.now().strftime('%Y-%m-%d')): urlstr = "http://dashb-ssb.cern.ch/dashboard/request.py/getplotdata?columnid=" + str(metricNumber) + "&time=" + str(hoursToRead) + "&dateFrom=" + dateStart + "&dateTo=" + dateEnd + "&site=" + sitesStr + "&sites=" + sitesVar + "&clouds=all&batch=1" try: print "Getting metric " + str(metricNumber) + ", url :" + urlstr metricData = url.read(urlstr) return dashboard.parseJSONMetric(metricData) except: print("Fetching URL failed, sleeping and retrying...") time.sleep(3) try: metricData = url.read(urlstr) return dashboard.parseJSONMetric(metricData) except: return None
def check_federation_history(site_name): federationHistoryURL = "http://dashb-ssb.cern.ch/dashboard/request.py/getplotdata?columnid=233&time=336&dateFrom=&dateTo=&site=T0_CH_CERN&sites=all&clouds=all&batch=1" federationHistory = dashboard.parseJSONMetric( url.read(federationHistoryURL)) for entry in federationHistory.getSiteEntries(site_name).values(): if entry.value == "prod" or entry.value == "trans": return entry.value
def getJSONMetric(metricNumber, hoursToRead, sitesStr, sitesVar, dateStart="2000-01-01", dateEnd=datetime.now().strftime('%Y-%m-%d')): urlstr = "http://dashb-ssb.cern.ch/dashboard/request.py/getplotdata?columnid=" + str(metricNumber) + "&time=" + str(hoursToRead) + "&dateFrom=" + dateStart + "&dateTo=" + dateEnd + "&site=" + sitesStr + "&sites=" + sitesVar + "&clouds=all&batch=1" try: metricData = url.read(urlstr) return dashboard.parseJSONMetric(metricData) except: return None
def getJSONMetric(metricNumber, hoursToRead, sitesStr, sitesVar, dateStart="2000-01-01", dateEnd=datetime.now().strftime('%Y-%m-%d')): urlstr = "http://dashb-ssb.cern.ch/dashboard/request.py/getplotdata?columnid=" + str(metricNumber) + "&time=" + str(hoursToRead) + "&dateFrom=" + dateStart + "&dateTo=" + dateEnd + "&site=" + sitesStr + "&sites=" + sitesVar + "&clouds=all&batch=1" print urlstr try: metricData = url.read(urlstr) return dashboard.parseJSONMetric(metricData) except: return None
# this script generates IN/OUT waiting room statistics by sites from lib import url, dashboard try: import json except ImportError: import simplejson as json import time, sys if len(sys.argv) < 2: sys.stderr.write('not enough parameter!\n') sys.exit(1) data = url.read(sys.argv[1]) wr = dashboard.parseJSONMetric(data) wrStat = {} for site in wr.getSites(): # initialize country statistics if not wrStat.has_key(site): wrStat[site] = {dashboard.green : 0, dashboard.red : 0} # to remember the parsed json metric data structure, please see dashboard.py entries = wr.getSiteEntries(site) for endTime in entries: entry = entries[endTime] diff = endTime - entry.date if entry.color == dashboard.green or entry.color == dashboard.yellow: wrStat[site][dashboard.green] += diff elif entry.color == dashboard.red: wrStat[site][dashboard.red] += diff sites = wrStat.keys()
from lib import sites, dashboard, url, fileOps try: import json except ImportError: import simplejson as json if len(sys.argv) < 7: print 'not enough parameter!' sys.exit(1) # manually controlled usable sites list usableSitesMC = dashboard.parseMetric(url.read(sys.argv[1])) # morgue list morgue = dashboard.parseMetric(url.read(sys.argv[2])) # prepare hammercloud metric url for last 3 days! hcURL = sys.argv[3] % (time.strftime("%Y-%m-%d", time.localtime(time.time()-3*24*60*60)), time.strftime("%Y-%m-%d", time.localtime(time.time()))) hammerCloud = dashboard.parseJSONMetric(url.read(hcURL)) # get the url stamp for the dashboard input file urlStamp = sys.argv[4] # text output file location txtOutput = sys.argv[5] # json output file location jsonOutput = sys.argv[6] # create new metric object metricHeader = {'twiki' : 'https://twiki.cern.ch/twiki/bin/view/CMSPublic/UsableSitesForAnalysis'} metric = dashboard.metric(header = metricHeader) def hasBadHistory(siteName): # if the site is not in the hc metric, return False # (you don't have any idea about the site, you cannot
def check_federation_history(site_name): federationHistoryURL = "http://dashb-ssb.cern.ch/dashboard/request.py/getplotdata?columnid=233&time=336&dateFrom=&dateTo=&site=T0_CH_CERN&sites=all&clouds=all&batch=1" federationHistory = dashboard.parseJSONMetric(url.read(federationHistoryURL)) for entry in federationHistory.getSiteEntries(site_name).values(): if entry.value == "prod" or entry.value == "trans": return entry.value
except ImportError: from elementtree import ElementTree as ET if len(sys.argv) < 6: sys.stderr.write("not enough parameter!\n") sys.exit(1) siteList = sites.getSites() ggusXMLFile = fileOps.read(sys.argv[1]) ggus = {} samAccessURL = sys.argv[2] samAccess = {} hcURL = sys.argv[3] hammerCloud = {} downTimesURL = sys.argv[4] downTimes = dashboard.parseJSONMetric(url.read(downTimesURL)) siteDownTimes = {} federations = json.loads(url.read(sys.argv[5])) reportFile = sys.argv[6] reportURL = sys.argv[7] output = sys.argv[8] report = {} def check_federation_history(site_name): federationHistoryURL = "http://dashb-ssb.cern.ch/dashboard/request.py/getplotdata?columnid=233&time=336&dateFrom=&dateTo=&site=T0_CH_CERN&sites=all&clouds=all&batch=1" federationHistory = dashboard.parseJSONMetric(url.read(federationHistoryURL)) for entry in federationHistory.getSiteEntries(site_name).values(): if entry.value == "prod" or entry.value == "trans": return entry.value
# which is created to control the 'usable sites' metric by hand, and creates # a closed loop for the metric. when someone changes a value in the # 'usable sites - manual changes' metric by using dashboard web interface, # the script reflects this change to the input text file of the metric. if len(sys.argv) < 3: print 'not enough parameter!' sys.exit(1) # output path output = sys.argv[2] # get the source metric url metricURL = sys.argv[1] # get the entries of the metric metric = dashboard.parseJSONMetric(url.read(metricURL)) updatedMetric = dashboard.metric() # merege sites from the vo-feed and manual control meteric. siteList = sites.getSites() for site in metric.getSites(): if not site in siteList: siteList[site] = {} for i in siteList: # if the site is not in the list add it (this is the # case that will be happaned when they create new site # in the site db) if not metric.hasSite(i): updatedMetric.append( dashboard.entry(None, i, 'ready', dashboard.green, metricURL))
try: import xml.etree.ElementTree as ET except ImportError: from elementtree import ElementTree as ET if len(sys.argv) < 6: sys.stderr.write('not enough parameter!\n') sys.exit(1) siteList = sites.getSites() ggusXMLFile = fileOps.read(sys.argv[1]) ggus = {} samAccessURL = sys.argv[2] samAccess = {} hcURL = sys.argv[3] hammerCloud = {} downTimesURL = sys.argv[4] downTimes = dashboard.parseJSONMetric(url.read(downTimesURL)) siteDownTimes = {} federations = json.loads(url.read(sys.argv[5])) reportFile = sys.argv[6] reportURL = sys.argv[7] output = sys.argv[8] report = {} def check_federation_history(site_name): federationHistoryURL="http://dashb-ssb.cern.ch/dashboard/request.py/getplotdata?columnid=233&time=336&dateFrom=&dateTo=&site=T0_CH_CERN&sites=all&clouds=all&batch=1" federationHistory=dashboard.parseJSONMetric(url.read(federationHistoryURL)) for entry in federationHistory.getSiteEntries(site_name).values(): if entry.value == "prod" or entry.value == "trans": return entry.value for site in siteList:
# this script provides data for the 'usable sites - manual changes' metric, # which is created to control the 'usable sites' metric by hand, and creates # a closed loop for the metric. when someone changes a value in the # 'usable sites - manual changes' metric by using dashboard web interface, # the script reflects this change to the input text file of the metric. if len(sys.argv) < 3: print 'not enough parameter!' sys.exit(1) # output path output = sys.argv[2] # get the source metric url metricURL = sys.argv[1] # get the entries of the metric metric = dashboard.parseJSONMetric(url.read(metricURL)) updatedMetric = dashboard.metric() for i in sites.getSites(): # if the site is not in the list add it (this is the # case that will be happaned when they create new site # in the site db) if not metric.hasSite(i): updatedMetric.append(dashboard.entry(None, i, 'ready', dashboard.green, metricURL)) else: latestEntry = metric.getLatestEntry(i) updatedMetric.append(dashboard.entry(None, i, latestEntry.value, latestEntry.color, metricURL)) fileOps.write(output, str(updatedMetric))
# this script generates IN/OUT waiting room statistics by sites from lib import url, dashboard try: import json except ImportError: import simplejson as json import time, sys if len(sys.argv) < 2: sys.stderr.write('not enough parameter!\n') sys.exit(1) data = url.read(sys.argv[1]) wr = dashboard.parseJSONMetric(data) wrStat = {} for site in wr.getSites(): # initialize country statistics if not wrStat.has_key(site): wrStat[site] = {dashboard.green: 0, dashboard.red: 0} # to remember the parsed json metric data structure, please see dashboard.py entries = wr.getSiteEntries(site) for endTime in entries: entry = entries[endTime] diff = endTime - entry.date if entry.color == dashboard.green or entry.color == dashboard.yellow: wrStat[site][dashboard.green] += diff elif entry.color == dashboard.red: wrStat[site][dashboard.red] += diff