Пример #1
0
    parsed[cmsSite][id] = subject

# generate output for twiki meeting page
ticketURL = "https://ggus.eu/?mode=ticket_info&ticket_id="
twikiTable = "\n| *CMS Site* | *Number of Tickets* | * Tickets* |\n"
sum = 0
for site in parsed:
    url = ""
    sum = sum + len(parsed[site])
    for id in parsed[site]:
        url = url + "[[%s][%s]] " % (ticketURL + id, id)
    twikiTable = twikiTable + "| %s | %d | %s |\n" % (site, len(
        parsed[site]), url)
dateStamp = time.strftime("%d/%b/%Y %H:%M:%S (GMT)", time.gmtime())
twikiTable = twikiTable + "| *<i>generated on %s</i>, Total number of tickets: %s* |||" % (
    dateStamp, sum)
fileOps.write(sys.argv[2], twikiTable)

# generate text file for the dashboard metric
metric = dashboard.metric()
allSites = sites.getSites().keys()
url = "https://ggus.eu/?mode=ticket_search&cms_site=%s&timeframe=any&status=open&search_submit=GO%%21"
for site in parsed:
    value = len(parsed[site])
    metric.append(dashboard.entry(None, site, value, dashboard.red,
                                  url % site))
for site in allSites:
    if site in parsed.keys(): continue
    metric.append(dashboard.entry(None, site, 0, dashboard.green, url % site))
fileOps.write(sys.argv[3], str(metric))
Пример #2
0
results = {}
for site in siteList:
    results[site] = 'n/a'

now = time.time()
start = date.format(
    time.strftime("T00:00:00Z", time.localtime(now - 24 * 60 * 60)))
# remember, in urllib.quote, '/' is safe by default
# this is why we used quote_plus.
start = urllib.quote(start)
end = date.format(
    time.strftime("T23:00:00Z", time.localtime(now - 24 * 60 * 60)))
end = urllib.quote(end)

print 'SAM test time range:', start, end

# start, end, site
for site in results:
    source = samURL.format(start, end, site)
    data = json.loads(url.read(source))
    if not (data.has_key('data') and len(data['data'])
            and data['data'][0].has_key('data')):
        continue
    data = data['data'][0]['data'][0]
    if not data['OK'] + data['CRIT'] + data['SCHED'] > 0: continue
    result = data['OK'] / (data['OK'] + data['CRIT'] + data['SCHED']) * 100.0
    results[site] = round(result, 3)

fileOps.write("{0}/{1}.json".format(out, int(time.time())),
              json.dumps(results, indent=2))
Пример #3
0
data         = {}
for i in os.listdir(inOutPath):
    if not '.json' in i: continue
    date     =  time.strftime("%B %d, %Y %H:%M:%S", time.localtime(int(i.replace('.json', ''))))
    data[date]  = json.loads(fileOps.read("{0}/{1}".format(inOutPath, i)))

jsData       = {}
for date in data:
    for site in data[date]:
        if not site in jsData:
            jsData[site] = {}
        if data[date][site] == 'n/a': data[date][site] = -1.0
        jsData[site][date] = data[date][site]

def allTheSame(data):
    oldVal = None
    val    = None
    for i in data:
        val = data[i]
        if oldVal != None and (val != oldVal): return False;
        oldVal = data[i]
    return True

for site in jsData.keys():
    if allTheSame(jsData[site]): del jsData[site]

report = htmlTemplate.replace('@DATA@', json.dumps(jsData))
report = report.replace('@INFO@', json.dumps(info))
fileOps.write("{0}/samObservationReport.html".format(inOutPath), report)
Пример #4
0
    if not cmsSite: continue
    if not parsed.has_key(cmsSite):
        parsed[cmsSite] = {}
    parsed[cmsSite][id] = subject

# generate output for twiki meeting page 
ticketURL  = "https://ggus.eu/?mode=ticket_info&ticket_id="
twikiTable = "\n| *CMS Site* | *Number of Tickets* | * Tickets* |\n"
sum        = 0
for site in parsed:
    url = ""
    sum = sum + len(parsed[site])
    for id in parsed[site]:
        url = url + "[[%s][%s]] " % (ticketURL + id, id)
    twikiTable = twikiTable + "| %s | %d | %s |\n" % (site, len(parsed[site]), url)
dateStamp = time.strftime("%d/%b/%Y %H:%M:%S (GMT)", time.gmtime())
twikiTable = twikiTable + "| *<i>generated on %s</i>, Total number of tickets: %s* |||" % (dateStamp, sum)
fileOps.write(sys.argv[2], twikiTable)

# generate text file for the dashboard metric
metric    = dashboard.metric()
allSites  = sites.getSites().keys()
url       = "https://ggus.eu/?mode=ticket_search&cms_site=%s&timeframe=any&status=open&search_submit=GO%%21"
for site in parsed:
    value = len(parsed[site])
    metric.append(dashboard.entry(None, site, value, dashboard.red, url % site))
for site in allSites:
    if site in parsed.keys(): continue
    metric.append(dashboard.entry(None, site, 0, dashboard.green, url % site))
fileOps.write(sys.argv[3], str(metric))
Пример #5
0
transitional = dashboard.metric()

for site in siteList:
    badSiteFlag = False
    errMsg      = 'bad'

    # conditions to mark a site as bad
    if samAccess[site] < 50.0:
        badSiteFlag = badSiteFlag | True
        errMsg = errMsg + '_SAM(%s)' % round(samAccess[site], 2)
    if hammerCloud[site] < 80.0:
        badSiteFlag = badSiteFlag | True
        errMsg = errMsg + '_HC(%s)' % round(hammerCloud[site], 2)
    if site in ggus:
        badSiteFlag = badSiteFlag | True
    if 'n/a' in [hammerCloud[site], samAccess[site]]:
        basSiteFlag = badSiteFlag | True
        if hammerCloud[site] == 'n/a': errMsg = errMsg + '_HC(n/a)'
        else: errMsg = errMsg + '_SAM(n/a)'

    if badSiteFlag:
        entry = dashboard.entry(None, site, errMsg, dashboard.red, '#')
    else:
        entry = dashboard.entry(None, site, 'on', dashboard.green, '#')

    if site in federations["prod"]: production.append(entry)
    elif site in federations["trans"]: transitional.append(entry)

fileOps.write('%s/aaaProd.txt' % output, str(production))
fileOps.write('%s/aaaTrans.txt' % output, str(transitional))
Пример #6
0
# --look at the loop, we are getting some sites from the manual changes 
# metric. reason: some sites are not listed in the sites.getSites
siteList = sites.getSites()
for site in usableSitesMC.getSites():
    if not site in siteList:
        siteList[site] = {}

for i in siteList:
    badSiteFlag = False
    ## detect bad sites!
    # site has bad hammercloud history
    if sites.getTier(i) == 2 and hasBadHistory(i):
        print i + " hasBadHistory"
        badSiteFlag = True
    # site is in the morgue
    elif morgue.hasSite(i) and morgue.getSiteEntry(i).value == 'Morgue':
        print i + " is in the Morgue"
        badSiteFlag = True
    # site has been blocked
    elif usableSitesMC.hasSite(i) and usableSitesMC.getSiteEntry(i).color == dashboard.red:
        print i + " usableSitesMC.getSiteEntry color is red"
        badSiteFlag = True

    if badSiteFlag:
        metric.append(dashboard.entry(None, i, 'not_usable', dashboard.red, urlStamp))
    else:
        metric.append(dashboard.entry(None, i, 'usable', dashboard.green, urlStamp))

fileOps.write(txtOutput, str(metric))
fileOps.write(jsonOutput, json.dumps(metric.__list__(), indent=2))
Пример #7
0
transitional = dashboard.metric()

for site in siteList:
    badSiteFlag = False
    errMsg = 'bad'

    # conditions to mark a site as bad
    if samAccess[site] < 50.0:
        badSiteFlag = badSiteFlag | True
        errMsg = errMsg + '_SAM(%s)' % round(samAccess[site], 2)
    if hammerCloud[site] < 80.0:
        badSiteFlag = badSiteFlag | True
        errMsg = errMsg + '_HC(%s)' % round(hammerCloud[site], 2)
    if site in ggus:
        badSiteFlag = badSiteFlag | True
    if 'n/a' in [hammerCloud[site], samAccess[site]]:
        basSiteFlag = badSiteFlag | True
        if hammerCloud[site] == 'n/a': errMsg = errMsg + '_HC(n/a)'
        else: errMsg = errMsg + '_SAM(n/a)'

    if badSiteFlag:
        entry = dashboard.entry(None, site, errMsg, dashboard.red, '#')
    else:
        entry = dashboard.entry(None, site, 'on', dashboard.green, '#')

    if site in federations["prod"]: production.append(entry)
    elif site in federations["trans"]: transitional.append(entry)

fileOps.write('%s/aaaProd.txt' % output, str(production))
fileOps.write('%s/aaaTrans.txt' % output, str(transitional))
Пример #8
0
# altunda - [email protected]

import sys
from lib import fileOps
try: import json
except ImportError: import simplejson as json

# input: release list, release, new documentetion status
if len(sys.argv) < 4:
    print >> sys.stderr, 'Error: not enough parameters.'
    sys.exit(1)

# read release list to find one to document.
relList = json.loads(fileOps.read(sys.argv[1]))

print "## documentation status will be updated for %s, %s -> %s" % (sys.argv[2],
      relList[sys.argv[2]]['status'], sys.argv[3])
# update status
relList[sys.argv[2]]['status'] = sys.argv[3]

# write the updated file
fileOps.write(sys.argv[1], json.dumps(relList, indent=2))
# this script provides data for the 'usable sites - manual changes' metric,
# which is created to control the 'usable sites' metric by hand, and creates
# a closed loop for the metric. when someone changes a value in the 
# 'usable sites - manual changes' metric by using dashboard web interface,
# the script reflects this change to the input text file of the metric.

if len(sys.argv) < 3:
    print 'not enough parameter!'
    sys.exit(1)

# output path
output        = sys.argv[2]

# get the source metric url
metricURL     = sys.argv[1]
# get the entries of the metric
metric        = dashboard.parseJSONMetric(url.read(metricURL))
updatedMetric = dashboard.metric()

for i in sites.getSites():
    # if the site is not in the list add it (this is the
    # case that will be happaned when they create new site
    # in the site db)
    if not metric.hasSite(i):
        updatedMetric.append(dashboard.entry(None, i, 'ready', dashboard.green, metricURL))
    else:
        latestEntry = metric.getLatestEntry(i)
        updatedMetric.append(dashboard.entry(None, i, latestEntry.value, latestEntry.color, metricURL))

fileOps.write(output, str(updatedMetric))
Пример #10
0
    sys.stderr.write('not enough parameter!\n')
    sys.exit(1)

siteList  = sites.getSites()
config    = json.loads(fileOps.read(sys.argv[1]))
fieldList = config.keys()
fieldList.sort()
data      = {}
# add field names
data['fields'] = fieldList

# load all fields from dashboard
fields    = {}
for field in fieldList:
    fields[field] = dashboard.parseMetric(url.read(config[field]))
    print field, 'done...'

for site in siteList:
    data[site] = []
    for field in fieldList:
        if not fields[field].hasSite(site):
            data[site].append('black')
            continue
        data[site].append(fields[field].getSiteEntry(site).color)

template  = fileOps.read(sys.argv[2])
template  = template.replace('@DATE@', time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time())))
template  = template.replace('@DATA@', json.dumps(data));

fileOps.write(sys.argv[3], template)
Пример #11
0
diff = {}
for i in cmsswArray:
    # append the undocumented relaease if it is matching with the pattern
    # this patter is used for skipping special releases which we don't
    # need to document. Note that, someone might want you to generate
    # documentation for this special release. In that case, you will
    # need to generate it by hand.
    if not i in cmsswDocArray and isDocNeeded(i.name):
        # make it easy to parse
        diff[i.name] = {'status': 'undocumented', 'arch': i.arch}

try:
    # try to read & parse input file which is old diff file
    oldDiff = json.loads(fileOps.read(sys.argv[2]))
except IOError:
    # file not found, that means this is the first time
    # that the script has been run
    oldDiff = {}

# update the diff file
for i in diff:
    if not i in oldDiff: oldDiff[i] = diff[i]

# updated diff
out = json.dumps(oldDiff, indent=2, sort_keys=True)
print out
fileOps.write(sys.argv[2], out)

sys.exit(0)
Пример #12
0
    if not '.json' in i: continue
    date = time.strftime("%B %d, %Y %H:%M:%S",
                         time.localtime(int(i.replace('.json', ''))))
    data[date] = json.loads(fileOps.read("{0}/{1}".format(inOutPath, i)))

jsData = {}
for date in data:
    for site in data[date]:
        if not site in jsData:
            jsData[site] = {}
        if data[date][site] == 'n/a': data[date][site] = -1.0
        jsData[site][date] = data[date][site]


def allTheSame(data):
    oldVal = None
    val = None
    for i in data:
        val = data[i]
        if oldVal != None and (val != oldVal): return False
        oldVal = data[i]
    return True


for site in jsData.keys():
    if allTheSame(jsData[site]): del jsData[site]

report = htmlTemplate.replace('@DATA@', json.dumps(jsData))
report = report.replace('@INFO@', json.dumps(info))
fileOps.write("{0}/samObservationReport.html".format(inOutPath), report)
Пример #13
0
# merge current report with old ones (delete test results older than 2 weeks!)
tmpData = copy.deepcopy(report['data'])
for site in tmpData:
    for test in tests:
        for timeStamp in tmpData[site][test]:
            # delete value if it is older than 2 weeks
            if time.time() - float(timeStamp) > 60 * 60 * 24 * 14:
                del report['data'][site][test][timeStamp]

# update time stamp
report['lastUpdate'] = time.time()
report['timeOffset'] = time.timezone

htmlTemplate = htmlTemplate.replace(
    '@date@', time.strftime("%Y-%m-%d at %H:%M:%S",
                            time.localtime(time.time())))

# write files and exit
fileOps.write(reportFile, json.dumps(report))
for site in data:
    siteReport = {
        'lastUpdate': report['lastUpdate'],
        'timeOffset': report['timeOffset'],
        'data': {
            site: report['data'][site]
        }
    }
    siteReport = htmlTemplate.replace('@report@', json.dumps(siteReport))
    fileOps.write('%s/%s_report.html' % (htmlOutputPath, site), siteReport)
Пример #14
0
siteList = sites.getSites()

# prepare result array. note that for SAM tests
# we don't filter sites by tier number because some
# T3s have SAM tests.
results  = {}
for site in siteList:
    results[site] = 'n/a'

now      = time.time()
start    = date.format(time.strftime("T00:00:00Z", time.localtime(now - 24*60*60)))
# remember, in urllib.quote, '/' is safe by default
# this is why we used quote_plus.
start    = urllib.quote(start)
end      = date.format(time.strftime("T23:00:00Z", time.localtime(now - 24*60*60)))
end      = urllib.quote(end)

print 'SAM test time range:', start, end

# start, end, site
for site in results:
    source = samURL.format(start, end, site)
    data   = json.loads(url.read(source))
    if not (data.has_key('data') and len(data['data']) and data['data'][0].has_key('data')): continue
    data   = data['data'][0]['data'][0]
    if not data['OK'] + data['CRIT'] + data['SCHED'] > 0: continue
    result = data['OK'] / (data['OK'] + data['CRIT'] + data['SCHED']) * 100.0
    results[site] = round(result, 3)

fileOps.write("{0}/{1}.json".format(out, int(time.time())), json.dumps(results, indent = 2))
Пример #15
0
                dashboard.entry(
                    None, site, "site lost subscription to transitional federation", "blue", reportURL % site
                )
            )
        elif historic_federation == "trans" and siteDownTimeFlag == True:
            transitional.append(
                dashboard.entry(None, site, "site is on downtime", siteDownTimes[site], reportURL % site)
            )
        elif historic_federation == "prod" and siteDownTimeFlag == False:
            production.append(
                dashboard.entry(None, site, "site lost subscription to prod federation", "blue", reportURL % site)
            )
        elif historic_federation == "prod" and siteDownTimeFlag == True:
            production.append(dashboard.entry(None, site, "site is on downtime", siteDownTimes[site], reportURL % site))


report["lastUpdate"] = time.time()
report["data"] = {}
for site in siteList:
    report["data"][site] = {}
    if samAccess.has_key(site):
        report["data"][site]["sam"] = samAccess[site]
    if hammerCloud.has_key(site):
        report["data"][site]["hc"] = hammerCloud[site]
    if ggus.has_key(site):
        report["data"][site]["ggus"] = ggus[site]

fileOps.write(reportFile, json.dumps(report))
fileOps.write("%s/aaaProd.txt" % output, str(production))
fileOps.write("%s/aaaTrans.txt" % output, str(transitional))
Пример #16
0
        entry = dashboard.entry(None, site, 'on', dashboard.green, reportURL % site)

    if site in federations["prod"]: production.append(entry)
    elif site in federations["trans"]: transitional.append(entry)
    else:
        historic_federation = check_federation_history(site)
        if historic_federation == "trans" and siteDownTimeFlag == False:
            transitional.append(dashboard.entry(None, site, 'site lost subscription to transitional federation', 'blue', reportURL % site))
        elif historic_federation == "trans" and siteDownTimeFlag == True:
            transitional.append(dashboard.entry(None, site, 'site is on downtime', siteDownTimes[site], reportURL % site))
        elif historic_federation == "prod" and siteDownTimeFlag == False:
            production.append(dashboard.entry(None, site, 'site lost subscription to prod federation', 'blue', reportURL % site))
        elif historic_federation == "prod" and siteDownTimeFlag == True:
            production.append(dashboard.entry(None, site, 'site is on downtime', siteDownTimes[site], reportURL % site))
        

report['lastUpdate'] = time.time()
report['data']       = {}
for site in siteList:
    report['data'][site] = {}
    if samAccess.has_key(site):
        report['data'][site]['sam']  = samAccess[site]
    if hammerCloud.has_key(site):
        report['data'][site]['hc']   = hammerCloud[site]
    if ggus.has_key(site):
        report['data'][site]['ggus'] = ggus[site]

fileOps.write(reportFile, json.dumps(report))
fileOps.write('%s/aaaProd.txt' % output, str(production))
fileOps.write('%s/aaaTrans.txt' % output, str(transitional))
Пример #17
0
except ImportError: import simplejson as json
from lib import fileOps, url, dashboard, sites

if len(sys.argv) < 3:
    sys.stderr.write('not enough parameter!\n')
    sys.exit(1)

federationSource = sys.argv[1]
metricOutput     = sys.argv[2]

federations = json.loads(url.read(federationSource))

federationMetric = dashboard.metric()
for fedName in federations:
    for site in federations[fedName]:
        if fedName == 'prod':
            color = dashboard.green
        elif fedName == 'trans':
            color = dashboard.cyan
        elif fedName == 'nowhere':
            color = dashboard.gray
        else:
            # basically, this is impossible state considering possible
            # federation names but I wanted to consider this in case of
            # a change. --and this change must be reflected to the metric.
            color = dashboard.white
        entry = dashboard.entry(None, site, fedName, color, federationSource)
        federationMetric.append(entry)

fileOps.write(metricOutput, str(federationMetric))
for i in siteList:
    # if the site is not in the list add it (this is the
    # case that will be happaned when they create new site
    # in the site db)
    if not metric.hasSite(i):
        updatedMetric.append(
            dashboard.entry(None, i, 'ready', dashboard.green, metricURL))
    else:
        latestEntry = metric.getLatestEntry(i)
        updatedMetric.append(
            dashboard.entry(None, i, latestEntry.value, latestEntry.color,
                            metricURL))
        print latestEntry.value + " " + i

#######################
blist = [
    'T2_RU_RRC_KI', 'T3_BY_NCPHEP', 'T3_CH_PSI', 'T3_CN_PKU', 'T3_ES_Oviedo',
    'T3_IN_PUHEP', 'T3_IR_IPM', 'T3_KR_UOS', 'T3_UK_London_RHUL',
    'T3_UK_London_UCL', 'T3_UK_ScotGrid_ECDF', 'T3_US_FNALLPC',
    'T3_US_FNALXEN', 'T3_US_FSU', 'T3_US_JHU', 'T3_US_Kansas', 'T3_US_MIT',
    'T3_US_NU', 'T3_US_Princeton', 'T3_US_Princeton_ICSE', 'T3_US_Rice',
    'T3_BG_UNI_SOFIA'
]
for bsite in blist:
    updatedMetric.append(
        dashboard.entry(None, bsite, 'blocked', dashboard.red, metricURL))
#######################

fileOps.write(output, str(updatedMetric))
Пример #19
0
        report['data'][site] = {}
    for test in tests:
        if not report['data'][site].has_key(test):
            report['data'][site][test] = {}
        if not data[site].has_key(test):
            continue
        report['data'][site][test][currentReport['lastUpdate']] = data[site][test]

# merge current report with old ones (delete test results older than 2 weeks!)
tmpData = copy.deepcopy(report['data'])
for site in tmpData:
    for test in tests:
        for timeStamp in tmpData[site][test]:
            # delete value if it is older than 2 weeks
            if time.time() - float(timeStamp) > 60*60*24*14:
                del report['data'][site][test][timeStamp]

# update time stamp
report['lastUpdate'] = time.time()
report['timeOffset'] = time.timezone

htmlTemplate = htmlTemplate.replace('@date@', time.strftime("%Y-%m-%d at %H:%M:%S", time.localtime(time.time())))

# write files and exit
fileOps.write(reportFile, json.dumps(report))
for site in data:
    siteReport = {'lastUpdate' : report['lastUpdate'], 'timeOffset' : report['timeOffset'],
                  'data' : {site : report['data'][site]}}
    siteReport = htmlTemplate.replace('@report@', json.dumps(siteReport))
    fileOps.write('%s/%s_report.html' % (htmlOutputPath, site), siteReport)
Пример #20
0
diff = {}
for i in cmsswArray:
    # append the undocumented relaease if it is matching with the pattern
    # this patter is used for skipping special releases which we don't
    # need to document. Note that, someone might want you to generate
    # documentation for this special release. In that case, you will
    # need to generate it by hand.
    if not i in cmsswDocArray and isDocNeeded(i.name):
        # make it easy to parse
        diff[i.name] = {'status' : 'undocumented', 'arch':i.arch}

try:
    # try to read & parse input file which is old diff file
    oldDiff = json.loads(fileOps.read(sys.argv[2]))
except IOError:
    # file not found, that means this is the first time
    # that the script has been run
    oldDiff = {}

# update the diff file
for i in diff:
    if not i in oldDiff: oldDiff[i] = diff[i]

# updated diff
out = json.dumps(oldDiff, indent=2, sort_keys=True)
print out
fileOps.write(sys.argv[2], out)

sys.exit(0)