#Check HC for the last 3 days
    for entry in siteHC:
        if entry.color == HAMMERCLOUD_OK_COLOR or useHC == False:
            flagGoodHC = True
    if flagBadLifeStatus == False and flagGoodHC == True:
        newCrabStatus = ENABLED_STATUS
    if flagBadLifeStatus == True and flagGoodHC == True:
        newCrabStatus = DISABLED_STATUS
    elif flagGoodHC == False:
        newCrabStatus = DISABLED_STATUS
    print site + " flagGoodHC :" + str(
        flagGoodHC) + " flagBadLifeStatus :" + str(
            flagBadLifeStatus) + "New crabstatus: " + newCrabStatus
    if newCrabStatus != 'unknown':
        allsitesMetric.append(
            dashboard.entry(
                date=datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
                name=site,
                value=newCrabStatus,
                color=COLORS.get(newCrabStatus, 'white'),
                url=URL_ENTRY))

if len(allsitesMetric) > 1:
    outputFileP = open(OUTPUT_P_FILE_NAME, 'w')
    outputFileP.write(
        dashboard.printHeader(scriptName="LifeStatus", documentationUrl=""))
    for site in allsitesMetric:
        outputFileP.write(str(site) + '\n')
    print "\n--Output written to %s" % OUTPUT_P_FILE_NAME
    outputFileP.close()
                    except:
                        pass
        elif siteTier == 3: 
            site
        else:
            pass
        if dailyMetric == NOT_READY:
            for key, entry in filteredSiteMaintenance.iteritems():
                try:
                    if "OUTAGE SCHEDULED" in entry.value:
                        dailyMetric = DOWNTIME
                except:
                    pass 
        allsitesMetric.append([site, dailyMetric,hammerCloudNaFlag, samNaFlag])
else:
    print "I couldn't find any Sam, Hammercloud, and Maintenance data, Something's wrong, I'm dead Jim"

dailyMetricEntries = []
for metric in allsitesMetric:
        #print metric[1]
    if (not (metric[3] == True and metric[2] == True)):
        dailyMetricEntries.append(dashboard.entry(date = atMidnight, name = metric[0], value = metric[1], color = OUTPUT_COLORS[metric[1]], url = dashboardUrl % metric[0]))

if len(dailyMetricEntries) > 1:
    outputFile = open(OUTPUT_FILE_NAME, 'w')
    for site in dailyMetricEntries:
        outputFile.write(str(site) + '\n')
    print "\n--Output written to %s" % OUTPUT_FILE_NAME
    outputFile.close()

Beispiel #3
0
            pass
        if dailyMetric == NOT_READY:
            for key, entry in filteredSiteMaintenance.iteritems():
                try:
                    if entry.color == 'saddlebrown':
                        dailyMetric = DOWNTIME
                except:
                    pass
        allsitesMetric.append(
            [site, dailyMetric, hammerCloudNaFlag, samNaFlag])
else:
    print "I couldn't find any Sam, Hammercloud, and Maintenance data, Something's wrong, I'm dead Jim"

dailyMetricEntries = []
for metric in allsitesMetric:
    #print metric[1]
    if (not (metric[3] == True and metric[2] == True)):
        dailyMetricEntries.append(
            dashboard.entry(date=atMidnight,
                            name=metric[0],
                            value=metric[1],
                            color=OUTPUT_COLORS[metric[1]],
                            url=dashboardUrl % metric[0]))

if len(dailyMetricEntries) > 1:
    outputFile = open(OUTPUT_FILE_NAME, 'w')
    for site in dailyMetricEntries:
        outputFile.write(str(site) + '\n')
    print "\n--Output written to %s" % OUTPUT_FILE_NAME
    outputFile.close()
Beispiel #4
0
                newProdStatus = ON_STATUS
        if siteCurrentProd_Status != None and siteCurrentProd_Status.value == DRAIN_STATUS:
            if not flagDowntime and not flagLifeStatus and readinessScore2day > 0.9:
                newProdStatus = ON_STATUS
            else:
                newProdStatus = DRAIN_STATUS
        if siteCurrentProd_Status == None and newProdStatus == 'unknown':
            if (flagDowntime or flagLifeStatus) or readinessScore < 0.6:
                newProdStatus = DRAIN_STATUS
            elif readinessScore2day > 0.9:
                newProdStatus = ON_STATUS
            else:
                newProdStatus = DRAIN_STATUS
        if newProdStatus != 'unknown':
            allsitesMetric.append(
                dashboard.entry(
                    date=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
                    name=site,
                    value=newProdStatus,
                    color=COLORS.get(newProdStatus, 'white'),
                    url=
                    'https://twiki.cern.ch/twiki/bin/view/CMS/SiteSupportSiteStatusSiteReadiness'
                ))

if len(allsitesMetric) > 1:
    outputFileP = open(OUTPUT_P_FILE_NAME, 'w')
    for site in allsitesMetric:
        outputFileP.write(str(site) + '\n')
    print "\n--Output written to %s" % OUTPUT_P_FILE_NAME
    outputFileP.close()
                newProdStatus = ON_STATUS
        if siteCurrentProd_Status != None and siteCurrentProd_Status.value == DRAIN_STATUS:
            if not flagDowntime and not flagLifeStatus and readinessScore2day > 0.9:
                newProdStatus = ON_STATUS
            else:
                newProdStatus = DRAIN_STATUS
        if siteCurrentProd_Status == None and newProdStatus == "unknown":
            if (flagDowntime or flagLifeStatus) or readinessScore < 0.6:
                newProdStatus = DRAIN_STATUS
            elif readinessScore2day > 0.9:
                newProdStatus = ON_STATUS
            else:
                newProdStatus = DRAIN_STATUS
        if newProdStatus != "unknown":
            allsitesMetric.append(
                dashboard.entry(
                    date=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
                    name=site,
                    value=newProdStatus,
                    color=COLORS.get(newProdStatus, "white"),
                    url="https://twiki.cern.ch/twiki/bin/view/CMS/SiteSupportSiteStatusSiteReadiness",
                )
            )

if len(allsitesMetric) > 1:
    outputFileP = open(OUTPUT_P_FILE_NAME, "w")
    for site in allsitesMetric:
        outputFileP.write(str(site) + "\n")
    print "\n--Output written to %s" % OUTPUT_P_FILE_NAME
    outputFileP.close()
Beispiel #6
0
def  main():
    parser = OptionParser(usage="usage: %prog [options] filename",
                          version="%prog 1.0")
    parser.add_option("-d", "--date",
                      dest="inputDate",
                      help="date from which to fetch the results for HC in format %Y-%m-%dT%H:%M:%SZ ")
    parser.add_option("-o", "--outputDir",
                      dest="outputDir",
                      help="Directory in which to save the output")
    (options, args) = parser.parse_args()
    if options.inputDate is None:
        print "Please input a date with the --date option"
        exit(-1)
    else:
        try:
            datetmp = dateutil.parser.parse(options.inputDate, ignoretz=True)
        except:
            print "I couldn't recognize the date, please give me one like 2015-12-31T23:59:59Z"
            exit(-1)
    if options.outputDir is None:
        print "Please add a directory with option --outputDir"
        exit(-1)
    else:
        if os.path.isdir(options.outputDir) == False:
            print options.outputDir + " is not a valid directory or you don't have read permissions"
            exit(-1)
# Constants
    interval = 30
    dateFrom = datetmp- timedelta(minutes=datetmp.minute % interval,
                             seconds=datetmp.second,
                             microseconds=datetmp.microsecond)
    dateTo = dateFrom + timedelta(minutes=interval)
    dateFormat = "%Y-%m-%dT%H:%M:%SZ"
    dateFromStr = datetime.strftime(dateFrom, dateFormat)
    print dateFromStr
    dateToStr = datetime.strftime(dateTo, dateFormat)
    OUTPUT_FILE_NAME = os.path.join(options.outputDir,"sam.txt")
    print "Getting SAM Score from " + str(dateFrom) + " to " + str(dateTo)
    samUrl = "http://wlcg-sam-cms.cern.ch/dashboard/request.py/getstatsresultsmin?profile_name=CMS_CRITICAL_FULL&plot_type=quality&start_time=%s&end_time=%s&granularity=single&view=siteavl" % (dateFromStr, dateToStr)    
    print samUrl
    # Download the url or die
    try:
        print "Fetching url : " + samUrl
        jsonStr = url.read(samUrl)
        samInfo = json.loads(jsonStr)
    except:
        exit(100)
    print "Data retrieved!"
    sitesfromDashboard = []
    for samSummary in samInfo['data']:
        sitesfromDashboard.append(samSummary['name'])
    print sitesfromDashboard
    samScoreSites = []
    print"Getting SAM for all sites"
    for site in sitesfromDashboard:
        for samSummary in samInfo['data']:
            if samSummary['name'] == site:
                try:
                    siteOK = float(samSummary['data'][0]['OK'])
                    siteCritical = float(samSummary['data'][0]['CRIT'])
                    siteSched = float(samSummary['data'][0]['SCHED'])
                    if (siteOK + siteCritical + siteSched) > 0.0:
                        siteAvailabilityNum = (float(siteOK) / (float(siteOK + siteCritical + siteSched)))*100.0
                        siteAvailability = int(siteAvailabilityNum)
                        if siteAvailabilityNum > 89.9:
                            siteColor = "c*k"
                        elif (sites.getTier(site) == 2 or sites.getTier(site) == 3)and siteAvailabilityNum > 79.9:
                            siteColor = "c*k" 
                        else:
                            siteColor = "cNotOk"
                    else:
                        siteAvailability = "n/a"
                        siteAvailabilityNum = None
                        siteColor = "cNA"
                except:
                    siteAvailability = "Error"
                    siteAvailabilityNum = None
                    siteColor = "cError"
                print site + "  OK " + str(siteOK) + " CRIT " + str(siteCritical) + " SCHED " + str(siteSched) + " SCORE : " + str(siteAvailability) 
                samScoreSites.append(dashboard.entry(date = dateFrom.strftime("%Y-%m-%d %H:%M:%S"), name = site, value = siteAvailability, color = siteColor, url = getSuccessrateUrl (site, dateFrom, dateTo), nvalue=siteAvailabilityNum))
    print str(samScoreSites)
    if len(samScoreSites) > 1 : 
        OutputFile = open(OUTPUT_FILE_NAME, 'w')
        for site in samScoreSites:
            if site.name != "unknown":
                OutputFile.write(str(site) + '\n')
        print "\n--SAM Score output written to %s" % OUTPUT_FILE_NAME
        OutputFile.close()
    else:
        print "There's no data, I quit!"
    print site
    tier = sites.getTier(site)
    siteCurrentLifeStatus = lfStatus.getLatestEntry(site)
    flagBadLifeStatus = False
    flagGoodHC = False
    newCrabStatus = 'unknown'
    if siteCurrentLifeStatus is not None and (siteCurrentLifeStatus.value in BAD_LIFESTATUS):
        flagBadLifeStatus = True
    siteHC = hcStatus.getSiteEntries(site).values()
    #Check HC for the last 3 days
    for entry in siteHC:
        if entry.color == HAMMERCLOUD_OK_COLOR or useHC == False:
            flagGoodHC = True 
    if flagBadLifeStatus == False and flagGoodHC == True:
        newCrabStatus = ENABLED_STATUS 
    if flagBadLifeStatus == True and flagGoodHC == True:
        newCrabStatus = DISABLED_STATUS 
    elif flagGoodHC == False:
        newCrabStatus = DISABLED_STATUS
    print site + " flagGoodHC :" + str(flagGoodHC) + " flagBadLifeStatus :" + str(flagBadLifeStatus)+ "New crabstatus: " + newCrabStatus     
    if newCrabStatus != 'unknown':
        	allsitesMetric.append(dashboard.entry(date = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), name = site, value = newCrabStatus, color = COLORS.get(newCrabStatus, 'white'), url = URL_ENTRY))

if len(allsitesMetric) > 1:
    outputFileP = open(OUTPUT_P_FILE_NAME, 'w')
    outputFileP.write(dashboard.printHeader(scriptName = "LifeStatus", documentationUrl=""))
    for site in allsitesMetric:
        outputFileP.write(str(site) + '\n')
    print "\n--Output written to %s" % OUTPUT_P_FILE_NAME
    outputFileP.close()
Beispiel #8
0
def main():
    parser = OptionParser(usage="usage: %prog [options] filename",
                          version="%prog 1.0")
    parser.add_option(
        "-d",
        "--date",
        dest="inputDate",
        help=
        "date from which to fetch the results for HC in format %Y-%m-%dT%H:%M:%SZ "
    )
    parser.add_option("-o",
                      "--outputDir",
                      dest="outputDir",
                      help="Directory in which to save the output")
    (options, args) = parser.parse_args()
    if options.inputDate is None:
        print "Please input a date with the --date option"
        exit(-1)
    else:
        try:
            datetmp = dateutil.parser.parse(options.inputDate, ignoretz=True)
        except:
            print "I couldn't recognize the date, please give me one like 2015-12-31T23:59:59Z"
            exit(-1)
    if options.outputDir is None:
        print "Please add a directory with option --outputDir"
        exit(-1)
    else:
        if os.path.isdir(options.outputDir) == False:
            print options.outputDir + " is not a valid directory or you don't have read permissions"
            exit(-1)


# Constants:
# Dashboard API for Hammercloud
# replace (site, startTimeStamp, endTimeStamp)
    interval = 15
    dateFrom = datetmp - timedelta(minutes=datetmp.minute % interval,
                                   seconds=datetmp.second,
                                   microseconds=datetmp.microsecond)
    dateTo = dateFrom + timedelta(minutes=interval)
    dateFormat = "%Y-%m-%d+%H%%3A%M"
    dateFromStr = datetime.strftime(dateFrom, dateFormat)
    dateToStr = datetime.strftime(dateTo, dateFormat)
    OUTPUT_FILE_NAME = os.path.join(options.outputDir, "hammercloud.txt")
    print "Calcuating Hammercloud Score from " + str(dateFrom) + " to " + str(
        dateTo)
    urlHC = "http://dashb-cms-job.cern.ch/dashboard/request.py/jobsummary-plot-or-table2?user=&site=&submissiontool=&application=&activity=hctest&status=&check=terminated&tier=&sortby=site&ce=&rb=&grid=&jobtype=&submissionui=&dataset=&submissiontype=&task=&subtoolver=&genactivity=&outputse=&appexitcode=&accesstype=&inputse=&cores=&date1=%s&date2=%s&prettyprint" % (
        dateFromStr, dateToStr)
    # Download the url or die
    try:
        print "Fetching url : " + urlHC
        jsonStr = url.read(urlHC)
        hcInfo = json.loads(jsonStr)
    except:
        exit(100)
    print "Data retrieved!"
    sitesfromDashboard = []
    for hcSummary in hcInfo['summaries']:
        sitesfromDashboard.append(hcSummary['name'])

    hcScoreSites = []
    noNa = 0
    print "Calculating HammerCloud scores"
    for site in sitesfromDashboard:
        for hcSummary in hcInfo['summaries']:
            if hcSummary['name'] == site and site != "unknown":
                siteTerminated = hcSummary['terminated']
                siteSuccesful = hcSummary['app-succeeded']
                siteUnsuccesful = hcSummary['unsuccess']
                siteCancelled = hcSummary['cancelled']
                siteUnk = hcSummary['allunk']
                siteScore = None
                if (siteTerminated - siteCancelled - siteUnk) > 0:
                    siteScore = (float(siteSuccesful - siteUnsuccesful) /
                                 float(siteTerminated - siteCancelled -
                                       siteUnk)) * 100.0
                    siteColor = "cNotOk"
                    if (sites.getTier(site) == 2
                            or sites.getTier(site) == 3) and siteScore > 79.9:
                        siteColor = "c*k"
                    if sites.getTier(site) == 1 and siteScore > 89.9:
                        siteColor = "c*k"
                    print site + " (" + str(siteSuccesful) + " - " + str(
                        siteUnsuccesful) + ")/(" + str(
                            siteTerminated) + " - " + str(
                                siteCancelled) + " - " + str(
                                    siteUnk) + ") =" + str(siteScore)
                elif siteTerminated > 0 or siteCancelled > 0 or siteUnk > 0 or siteUnsuccesful > 0 or siteSuccesful > 0:
                    siteScore = "Error"
                    noNa += 1
                    siteColor = "cError"
                if siteScore is not None:
                    hcScoreSites.append(
                        dashboard.entry(
                            date=dateFrom.strftime("%Y-%m-%d %H:%M:%S"),
                            name=site,
                            value=siteScore,
                            color=siteColor,
                            url=getSuccessrateUrl(site, dateFromStr,
                                                  dateToStr)))
    #print str(hcScoreSites)
    if len(hcScoreSites) > noNa:
        OutputFile = open(OUTPUT_FILE_NAME, 'w')
        for site in hcScoreSites:
            if site.name != "unknown":
                OutputFile.write(str(site) + '\n')
        print "\n--HC Score output written to %s" % OUTPUT_FILE_NAME
        OutputFile.close()
    else:
        print "There's no data, I quit!"
Beispiel #9
0
def main():
    #Make a pretty printer
    print "--------------------------\nStarting at " +str(datetime.now()) 
    pp = pprint.PrettyPrinter()
    OUTPUT_FILE_NAME = os.path.join(sys.argv[2],"xfers.txt")
    OUTPUT_FILE_CORRECTIONS = os.path.join(sys.argv[2],"xfers_POSTREQUEST.txt")
    #Get transfer history
    binwidth = 6*60*60
    datetmp = dateutil.parser.parse(sys.argv[1], ignoretz=True)
    print datetmp
    endtime = (int(calendar.timegm(datetmp.timetuple()))/binwidth)*binwidth
    starttime = endtime-binwidth
    starttime_str = (datetime.fromtimestamp(starttime)).strftime("%Y-%m-%d %H:%M:%S")
    endtime_srt =  (datetime.fromtimestamp(endtime)).strftime("%Y-%m-%d %H:%M:%S")
    params = {'binwidth': binwidth, 'starttime': starttime , 'endtime': endtime}
    Morgue_Sites =  []
    lifeStatus = getJSONMetricforAllSites(235, 24)
    sites = lifeStatus.getSites()
    for site in sites:
        site_status = lifeStatus.getLatestEntry(site)
        if site_status.value != "enabled":
            Morgue_Sites.append(site)
    url = 'https://cmsweb.cern.ch/phedex/datasvc/json/%s/TransferHistory'
    print url+"?"+urllib.urlencode(params)
    context = None #ssl._create_unverified_context()
    f_debug = urllib.urlopen(url %'debug',data=urllib.urlencode(params))
    items_debug =json.load(f_debug)
    f_prod = urllib.urlopen(url %'prod',data=urllib.urlencode(params))
    items_prod =json.load(f_prod)
    timeslots = set()
    xferdata = {}
    noTransfersToFrom = {}
    noTransfersToFrom2 = {}
    for item in items_debug["phedex"]["link"] + items_prod["phedex"]["link"]:
        from_site = item['from'] #.replace('_MSS','').replace('_Buffer', '').replace('_Disk','').replace('_Export','')
        to_site= item['to'] #.replace('_MSS','').replace('_Buffer', '').replace('_Disk','').replace('_Export','')
        if to_site == from_site:
            continue 
        to_tier = int(to_site[1])
        from_tier = int(from_site[1])
        to_tier = 1 if to_tier==0 else to_tier
        from_tier = 1 if from_tier==0 else from_tier
        for transferslot in item['transfer']:
            try:
                quality = float(transferslot['quality'])
            except:
                quality = 0.0
            done_files = int(transferslot['done_files'])
            fail_files = int(transferslot['fail_files'])
            done_bytes = int(transferslot['done_bytes'])
            fail_bytes = int(transferslot['fail_bytes'])
            try_files = int(transferslot['try_files'])
            timeslot = int(transferslot['timebin'])
            if from_site not in Morgue_Sites and to_site not in Morgue_Sites and try_files > 0:
                noTransfersToFrom[from_site][timeslot] = noTransfersToFrom.setdefault(from_site,{}).setdefault(timeslot,0) + try_files
                noTransfersToFrom[to_site][timeslot] = noTransfersToFrom.setdefault(to_site,{}).setdefault(timeslot,0) + try_files
                noTransfersToFrom2[from_site][timeslot] = noTransfersToFrom2.setdefault(from_site,{}).setdefault(timeslot,0) + 1
                noTransfersToFrom2[to_site][timeslot] = noTransfersToFrom2.setdefault(to_site,{}).setdefault(timeslot,0) + 1 
 
            timeslots.add(timeslot)
            if to_tier < 3 and from_tier < 3 and (done_bytes > 0 or fail_bytes >0) and try_files > 0 :
                xferdata.setdefault(from_site,{}).setdefault(timeslot,{}).setdefault("to", {}).setdefault(to_tier,[]).append([quality, done_files, fail_files, done_bytes, fail_bytes, to_site])
                xferdata.setdefault(to_site,{}).setdefault(timeslot,{}).setdefault("from", {}).setdefault(from_tier,[]).append([quality, done_files, fail_files, done_bytes, fail_bytes, from_site])
            #xferdate[site][from][tier][site][donefile, failfiles, donebytes, failbytes]
            #pp.pprint([from_site, to_site, quality, done_files, fail_files])
    del items_debug
    siteScores = {}
    
    for site, bincontent in xferdata.iteritems():
        for timeslot, xfers in bincontent.iteritems():
            print site
            for toFrom in ['from', 'to']:
                xfers_tofrom = xfers.get(toFrom, {})
                for tier, xfers_from_tier in xfers_tofrom.iteritems():
                    print "----------"
                    print "Tier" + str(tier)
                    print "Original sorted"
                    xfers_from_tier.sort(key=lambda x: x[2])
                    pp.pprint(len(xfers_from_tier))
                    pp.pprint(xfers_from_tier)
                    # Remove up to [1 for T1, ]
                    for _ in range(0, min(xfers_from_tier, perTierVeto[tier])):
                        xfers_from_tier.sort(key=lambda x: x[2])
                        if len(xfers_from_tier)>0 and xfers_from_tier[-1][2] > 1:
                            #print "  deleted for highest failure files " + str(xfers_from_tier[-1]) 
                            xfers_from_tier.remove(xfers_from_tier[-1])
                        xfers_from_tier.sort(key=lambda x: x[0], reverse=True)
                        if len(xfers_from_tier)>0 and xfers_from_tier[-1][0] < 0.9 :
                            #print "  deleted for highest failure rate " + str(xfers_from_tier[-1])
                            xfers_from_tier.remove(xfers_from_tier[-1])
                    print "After deletion"
                    pp.pprint(len(xfers_from_tier))
                    pp.pprint(xfers_from_tier)
                    print "----------"
                    for xfer in xfers_from_tier:
                        siteScores.setdefault(site,{}).setdefault(timeslot,{}).setdefault(toFrom,{}).setdefault(tier,{}).setdefault('done',0)
                        siteScores[site][timeslot][toFrom][tier]['done'] += xfer[1] 
                        siteScores.setdefault(site,{}).setdefault(timeslot,{}).setdefault(toFrom,{}).setdefault(tier,{}).setdefault('failed',0)
                        siteScores[site][timeslot][toFrom][tier]['failed'] += xfer[2]
    #pp.pprint(timeslots)
    slotList = (list(timeslots))
    slotList.sort()
    emptylist = ['n'] * len(slotList)
    outputFile = open('output.csv', 'w')
    excelDate = lambda x: (float(x) / 86400.0) + 25569.0
    outputFile.write("Site," + ",".join([str(excelDate(x)) for x in slotList])+"\n")
    for site, scoresPerSlot in siteScores.iteritems():
        siteList = list(emptylist)
        for i in range(0, len(slotList)):
            if slotList[i] in scoresPerSlot:                
                scores = scoresPerSlot[slotList[i]]
                score = 10.0
                for tag in ['to','from']:
                        for tier in [0,1,2]:
                            try:
                                score = min(score, float(scores.get(tag,{}).get(tier,{}).get('done',0) )/ float(scores.get(tag,{}).get(tier,{}).get('done',0) + scores.get(tag,{}).get(tier,{}).get('failed',0)))
                            except:
                                    continue
                            #print tag + " " + str(tier) + ", score: " + str(score) 
                siteList[i] = score
        #print site + " " + str(siteList)
        outputFile.write(site+ "," + ",".join([str(x) for x in siteList])+"\n")
    xferMetricEntries = []
    
    for site, timeslotObj in siteScores.iteritems():
        score = 666
        for timeslot, scores in timeslotObj.iteritems():
            for x in slotList:
                print site + "," + str(timeslot) 
                for tag in ['to','from']:
                    for tier in [0,1,2]:
                        try:
                            print "tier: " + str(tier) + "to/from: "+ tag + "score: " + str(float(scores.get(tag,{}).get(tier,{}).get('done',0) )/ float(scores.get(tag,{}).get(tier,{}).get('done',0) + scores.get(tag,{}).get(tier,{}).get('failed',0)))
                            score = min(score, float(scores.get(tag,{}).get(tier,{}).get('done',0) )/ float(scores.get(tag,{}).get(tier,{}).get('done',0) + scores.get(tag,{}).get(tier,{}).get('failed',0)))
                        except:
                                continue
                site_value = "%.1f" % (score*100)
                site_nvalue = score*100
                if score == 666:
                    site_color = "white"
                    site_value = "n/a"
                elif score > 0.6 :
                    site_color = "green"
                else :
                    site_color = "red"
                justSite = site.replace("_Disk","").replace("_Buffer","").replace("_Export","").replace("_MSS","") 
                if site != justSite:
                    addnew = True
                    for entry in xferMetricEntries:
                        if entry.name == justSite and entry.nvalue < site_nvalue:
                           addnew = False
                           break
                        if entry.name == justSite and entry.nvalue > site_nvalue:
                           xferMetricEntries.remove(entry)
                           addnew = True

                    if (addnew):
                        xferMetricEntries.append(dashboard.entry(date = starttime_str, name = justSite, value = site_value, color = site_color, url = makelink(site, starttime, binwidth, endtime), nvalue = site_nvalue))

                xferMetricEntries.append(dashboard.entry(date = starttime_str, name = site, value = site_value, color = site_color, url = makelink(site, starttime, binwidth, endtime), nvalue = site_nvalue))
    if len(xferMetricEntries) > 1:
        outputFile = open(OUTPUT_FILE_NAME, 'w')
        correctionOutputFile = open(OUTPUT_FILE_CORRECTIONS, 'a')
        for site in xferMetricEntries:
            outputFile.write(str(site) + '\n')
            correctionOutputFile.write(("\t".join([starttime_str, endtime_srt, str(XFERS_COLUMN_NUMBER), site.name, site.value, site.color, site.url, "nvalue=0"]))+"\n")
        print "\n--Output written to %s" % OUTPUT_FILE_NAME
        outputFile.close()
        correctionOutputFile.close()
    print "--------------------------\nFinished at " +str(datetime.now()) 
def  main():
    parser = OptionParser(usage="usage: %prog [options] filename",
                          version="%prog 1.0")
    parser.add_option("-d", "--date",
                      dest="inputDate",
                      help="date from which to fetch the results for HC in format %Y-%m-%dT%H:%M:%SZ ")
    parser.add_option("-o", "--outputDir",
                      dest="outputDir",
                      help="Directory in which to save the output")
    (options, args) = parser.parse_args()
    if options.inputDate is None:
        print "Please input a date with the --date option"
        exit(-1)
    else:
        try:
            datetmp = dateutil.parser.parse(options.inputDate, ignoretz=True)
        except:
            print "I couldn't recognize the date, please give me one like 2015-12-31T23:59:59Z"
            exit(-1)
    if options.outputDir is None:
        print "Please add a directory with option --outputDir"
        exit(-1)
    else:
        if os.path.isdir(options.outputDir) == False:
            print options.outputDir + " is not a valid directory or you don't have read permissions"
            exit(-1)
# Constants:
    # Dashboard API for Hammercloud
    # replace (site, startTimeStamp, endTimeStamp)
    interval = 30
    dateFrom = datetmp- timedelta(minutes=datetmp.minute % interval,
                             seconds=datetmp.second,
                             microseconds=datetmp.microsecond)
    dateTo = dateFrom + timedelta(minutes=interval)
    dateFormat = "%Y-%m-%d+%H%%3A%M"
    dateFromStr = datetime.strftime(dateFrom, dateFormat)
    dateToStr = datetime.strftime(dateTo, dateFormat)
    OUTPUT_FILE_NAME = os.path.join(options.outputDir,"hammercloud.txt")
    print "Calcuating Hammercloud Score from " + str(dateFrom) + " to " + str(dateTo)
    urlHC = "http://dashb-cms-job.cern.ch/dashboard/request.py/jobsummary-plot-or-table2?user=&site=&submissiontool=&application=&activity=hctest&status=&check=terminated&tier=&sortby=site&ce=&rb=&grid=&jobtype=&submissionui=&dataset=&submissiontype=&task=&subtoolver=&genactivity=&outputse=&appexitcode=&accesstype=&inputse=&cores=&date1=%s&date2=%s&prettyprint" % (dateFromStr, dateToStr)    
    # Download the url or die
    try:
        print "Fetching url : " + urlHC
        jsonStr = url.read(urlHC)
        hcInfo = json.loads(jsonStr)
    except:
        exit(100)
    print "Data retrieved!"
    print json.dumps(hcInfo, sort_keys=True, indent=1, separators=(',', ': '))
    sitesfromDashboard = []
    for hcSummary in hcInfo['summaries']:
        sitesfromDashboard.append(hcSummary['name'])
    
    hcScoreSites = []
    noNa = 0
    print"Calculating HammerCloud scores"
    for site in sitesfromDashboard:
        for hcSummary in hcInfo['summaries']: 
            if hcSummary['name'] == site and site != "unknown":
                siteTerminated = hcSummary['terminated']
                siteSuccesful = hcSummary['app-succeeded']
                siteUnsuccesful = hcSummary['unsuccess']
                siteCancelled = hcSummary['cancelled']
                siteUnk = hcSummary['allunk']
                siteScore = -1.0 
                siteColor = "white"
                if (siteTerminated - siteCancelled - siteUnk) > 0:
                    siteScore = (float(siteSuccesful - siteUnsuccesful) / float(siteTerminated - siteCancelled - siteUnk)) * 100.0
                    siteColor = "red"
                    if (sites.getTier(site) > 1) and siteScore > 79.9:
                        siteColor = "yellow"
                    if siteScore > 89.9:
                        siteColor = "green"
                    #print site + " (" + str(siteSuccesful) + " - " + str(siteUnsuccesful) + ")/(" +str(siteTerminated)+" - "+str(siteCancelled)+" - "+str(siteUnk)+") =" + str(siteScore)
                if siteScore is not None:
                        print site + "\t" + str(siteScore) + "\t" + siteColor
                	hcScoreSites.append(dashboard.entry(date = dateFrom.strftime("%Y-%m-%d %H:%M:%S"), name = site, value = '%.1f' % siteScore, color = siteColor, url = getSuccessrateUrl (site, dateFromStr, dateToStr)))
    #print str(hcScoreSites)
    if len(hcScoreSites) >  noNa: 
        OutputFile = open(OUTPUT_FILE_NAME, 'w')
        for site in hcScoreSites:
            if site.name != "unknown":
                OutputFile.write(str(site) + '\n')
        print "\n--HC Score output written to %s" % OUTPUT_FILE_NAME
        OutputFile.close()
    else:
        print "There's no data, I quit!"
# 'usable sites - manual changes' metric by using dashboard web interface,
# the script reflects this change to the input text file of the metric.

if len(sys.argv) < 3:
    print 'not enough parameter!'
    sys.exit(1)

# output path
output = sys.argv[2]

# get the source metric url
metricURL = sys.argv[1]
# get the entries of the metric
metric = dashboard.parseJSONMetric(url.read(metricURL))
updatedMetric = dashboard.metric()

for i in sites.getSites():
    # if the site is not in the list add it (this is the
    # case that will be happaned when they create new site
    # in the site db)
    if not metric.hasSite(i):
        updatedMetric.append(
            dashboard.entry(None, i, 'ready', dashboard.green, metricURL))
    else:
        latestEntry = metric.getLatestEntry(i)
        updatedMetric.append(
            dashboard.entry(None, i, latestEntry.value, latestEntry.color,
                            metricURL))

fileOps.write(output, str(updatedMetric))
        waitingRoomColor = "red"
        morgueValue = "in"
        waitingRoomValue = "in"
    elif newlifeStatus == STATUS_WAITING_ROOM:
        siteColor = COLOR_WAITING_ROOM
        morgueColor = "green"
        waitingRoomColor = "red"
        morgueValue = "out"
        waitingRoomValue = "in"
    elif newlifeStatus == STATUS_OK:
        siteColor = COLOR_OK
        morgueColor = "green"
        waitingRoomColor = "green"
        morgueValue = "out"
        waitingRoomValue = "out"
    if newlifeStatus is not None: lifeStatusEntries.append(dashboard.entry(date = todayAtMidnight, name = siteInfo.name, value = newlifeStatus, color = siteColor, url = LOGFILE_URL))
    if newlifeStatus is not None: morgueEntries.append(dashboard.entry(date = todayAtMidnight, name = siteInfo.name, value = morgueValue, color = morgueColor, url = LOGFILE_URL))
    if newlifeStatus is not None: waitingRoomEntries.append(dashboard.entry(date = todayAtMidnight, name = siteInfo.name, value = waitingRoomValue, color = waitingRoomColor, url = LOGFILE_URL))

logOutputFile = open(OUTPUT_FILE_LOGFILE, 'a')
#logOutputFile.write(LOG_HEADER+'\n')
#lifeStatusOutputFile = open(OUTPUT_FILE_LIFESTATUS_NAME, 'w')
lifeStatusPOutputFile = open(OUTPUT_FILE_LIFESTATUSPRIMAL_NAME, 'w')
#lifeStatusOutputFile.write(dashboard.printHeader(scriptName = "LifeStatus", documentationUrl=""))
lifeStatusPOutputFile.write(dashboard.printHeader(scriptName = "LifeStatus", documentationUrl=""))
for site in lifeStatusEntries:
    #lifeStatusOutputFile.write(str(site) + '\n')
    lifeStatusPOutputFile.write(str(site) + '\n')
    if str(allSitesInfo.get(site.name, "")) != "":
        logline = str(allSitesInfo.get(site.name, ""))
        logOutputFile.write(logline + '\n')
        #Logic to calculate new prod status
        newProdStatus = 'unknown'
        if siteCurrentProd_Status != None and siteCurrentProd_Status.value == 'tier0':
            newProdStatus = 'tier0'
        if siteCurrentProd_Status != None and siteCurrentProd_Status.value == 'down':
            newProdStatus = 'down'
        if siteCurrentProd_Status != None and siteCurrentProd_Status.value == 'on':
            if flagDowntime or flagLifeStatus:
                newProdStatus = 'drain'
            else:
                newProdStatus = 'on'
        if siteCurrentProd_Status != None and siteCurrentProd_Status.value == 'drain':
            if not flagDowntime and not flagLifeStatus and readinessScore > 0.6:
                newProdStatus = 'on'
            else:
                newProdStatus = 'drain'
        allsitesMetric.append(
            dashboard.entry(
                date=now.strftime("%Y-%m-%d %H:%M:%S"),
                name=site,
                value=newProdStatus,
                color=COLORS.get(newProdStatus, 'white'),
                url='https://cmst1.web.cern.ch/CMST1/SST/drain_log.txt'))

if len(allsitesMetric) > 1:
    outputFileP = open(OUTPUT_P_FILE_NAME, 'w')
    for site in allsitesMetric:
        outputFileP.write(str(site) + '\n')
    print "\n--Output written to %s" % OUTPUT_P_FILE_NAME
    outputFileP.close()
Beispiel #14
0
    parsed[cmsSite][id] = subject

# generate output for twiki meeting page
ticketURL = "https://ggus.eu/?mode=ticket_info&ticket_id="
twikiTable = "\n| *CMS Site* | *Number of Tickets* | * Tickets* |\n"
sum = 0
for site in parsed:
    url = ""
    sum = sum + len(parsed[site])
    for id in parsed[site]:
        url = url + "[[%s][%s]] " % (ticketURL + id, id)
    twikiTable = twikiTable + "| %s | %d | %s |\n" % (site, len(
        parsed[site]), url)
dateStamp = time.strftime("%d/%b/%Y %H:%M:%S (GMT)", time.gmtime())
twikiTable = twikiTable + "| *<i>generated on %s</i>, Total number of tickets: %s* |||" % (
    dateStamp, sum)
fileOps.write(sys.argv[2], twikiTable)

# generate text file for the dashboard metric
metric = dashboard.metric()
allSites = sites.getSites().keys()
url = "https://ggus.eu/?mode=ticket_search&cms_site=%s&timeframe=any&status=open&search_submit=GO%%21"
for site in parsed:
    value = len(parsed[site])
    metric.append(dashboard.entry(None, site, value, dashboard.red,
                                  url % site))
for site in allSites:
    if site in parsed.keys(): continue
    metric.append(dashboard.entry(None, site, 0, dashboard.green, url % site))
fileOps.write(sys.argv[3], str(metric))
Beispiel #15
0
    # the time range), return False
    slots = hammerCloud.getSiteEntries(siteName).values()
    for slot in slots:
        if slot.color == dashboard.green: return False
    return True


for i in sites.getSites():
    badSiteFlag = False
    ## detect bad sites!
    # site has bad hammercloud history
    if sites.getTier(i) == 2 and hasBadHistory(i):
        badSiteFlag = True
    # site is in the morgue
    elif morgue.hasSite(i) and morgue.getSiteEntry(i).color == dashboard.red:
        badSiteFlag = True
    # site has been blocked
    elif usableSitesMC.hasSite(i) and usableSitesMC.getSiteEntry(
            i).color == dashboard.red:
        badSiteFlag = True

    if badSiteFlag:
        metric.append(
            dashboard.entry(None, i, 'not_usable', dashboard.red, urlStamp))
    else:
        metric.append(
            dashboard.entry(None, i, 'usable', dashboard.green, urlStamp))

fileOps.write(txtOutput, str(metric))
fileOps.write(jsonOutput, json.dumps(metric.__list__(), indent=2))
Beispiel #16
0
            val = round(samAccess[site], 2)
        errMsg = errMsg + "_SAM(%s)" % val
    if (hammerCloud[site] < 70.0 or hammerCloud[site] == "n/a") and sites.getTier(site) != 3:
        badSiteFlag = badSiteFlag | True
        if hammerCloud[site] == "n/a":
            val = hammerCloud[site]
        else:
            val = round(hammerCloud[site], 2)
        errMsg = errMsg + "_HC(%s)" % val
    if site in ggus.keys() and len(ggus[site]):
        badSiteFlag = badSiteFlag | True
        errMsg = errMsg + "_GGUS(%s)" % str(ggus[site])
    if siteDownTimes[site] in downTimeColors:
        siteDownTimeFlag = True
    if badSiteFlag:
        entry = dashboard.entry(None, site, errMsg, dashboard.red, reportURL % site)
        if siteDownTimeFlag:
            entry = dashboard.entry(None, site, "site is on downtime", siteDownTimes[site], reportURL % site)
    else:
        entry = dashboard.entry(None, site, "on", dashboard.green, reportURL % site)

    if site in federations["prod"]:
        production.append(entry)
    elif site in federations["trans"]:
        transitional.append(entry)
    else:
        historic_federation = check_federation_history(site)
        if historic_federation == "trans" and siteDownTimeFlag == False:
            transitional.append(
                dashboard.entry(
                    None, site, "site lost subscription to transitional federation", "blue", reportURL % site
# this script provides data for the 'usable sites - manual changes' metric,
# which is created to control the 'usable sites' metric by hand, and creates
# a closed loop for the metric. when someone changes a value in the 
# 'usable sites - manual changes' metric by using dashboard web interface,
# the script reflects this change to the input text file of the metric.

if len(sys.argv) < 3:
    print 'not enough parameter!'
    sys.exit(1)

# output path
output        = sys.argv[2]

# get the source metric url
metricURL     = sys.argv[1]
# get the entries of the metric
metric        = dashboard.parseJSONMetric(url.read(metricURL))
updatedMetric = dashboard.metric()

for i in sites.getSites():
    # if the site is not in the list add it (this is the
    # case that will be happaned when they create new site
    # in the site db)
    if not metric.hasSite(i):
        updatedMetric.append(dashboard.entry(None, i, 'ready', dashboard.green, metricURL))
    else:
        latestEntry = metric.getLatestEntry(i)
        updatedMetric.append(dashboard.entry(None, i, latestEntry.value, latestEntry.color, metricURL))

fileOps.write(output, str(updatedMetric))
except ImportError: import simplejson as json
from lib import fileOps, url, dashboard, sites

if len(sys.argv) < 3:
    sys.stderr.write('not enough parameter!\n')
    sys.exit(1)

federationSource = sys.argv[1]
metricOutput     = sys.argv[2]

federations = json.loads(url.read(federationSource))

federationMetric = dashboard.metric()
for fedName in federations:
    for site in federations[fedName]:
        if fedName == 'prod':
            color = dashboard.green
        elif fedName == 'trans':
            color = dashboard.cyan
        elif fedName == 'nowhere':
            color = dashboard.gray
        else:
            # basically, this is impossible state considering possible
            # federation names but I wanted to consider this in case of
            # a change. --and this change must be reflected to the metric.
            color = dashboard.white
        entry = dashboard.entry(None, site, fedName, color, federationSource)
        federationMetric.append(entry)

fileOps.write(metricOutput, str(federationMetric))
Beispiel #19
0
        badSiteFlag = badSiteFlag | True
        if samAccess[site] == 'n/a': val = samAccess[site]
        else: val = round(samAccess[site], 2)
        errMsg = errMsg + '_SAM(%s)' % val
    if (hammerCloud[site] < 70.0 or hammerCloud[site] == 'n/a') and sites.getTier(site) != 3:
        badSiteFlag = badSiteFlag | True
        if hammerCloud[site] == 'n/a': val = hammerCloud[site]
        else: val = round(hammerCloud[site], 2)
        errMsg = errMsg + '_HC(%s)' % val
    if site in ggus.keys() and len(ggus[site]):
        badSiteFlag = badSiteFlag | True
        errMsg = errMsg + '_GGUS(%s)' % str(ggus[site])
    if site in siteDownTimes and siteDownTimes[site] in downTimeColors:
        siteDownTimeFlag = True
    if badSiteFlag:
        entry = dashboard.entry(None, site, errMsg, dashboard.red, reportURL % site)
        if siteDownTimeFlag:
            entry = dashboard.entry(None, site, 'site is on downtime', siteDownTimes[site], reportURL % site)
    else:
        entry = dashboard.entry(None, site, 'on', dashboard.green, reportURL % site)

    if site in federations["prod"]: production.append(entry)
    elif site in federations["trans"]: transitional.append(entry)
    else:
        historic_federation = check_federation_history(site)
        if historic_federation == "trans" and siteDownTimeFlag == False:
            transitional.append(dashboard.entry(None, site, 'site lost subscription to transitional federation', 'blue', reportURL % site))
        elif historic_federation == "trans" and siteDownTimeFlag == True:
            transitional.append(dashboard.entry(None, site, 'site is on downtime', siteDownTimes[site], reportURL % site))
        elif historic_federation == "prod" and siteDownTimeFlag == False:
            production.append(dashboard.entry(None, site, 'site lost subscription to prod federation', 'blue', reportURL % site))
def main():
    #Make a pretty printer
    print "--------------------------\nStarting at " + str(datetime.now())
    pp = pprint.PrettyPrinter()
    OUTPUT_FILE_NAME = os.path.join(sys.argv[2], "xfers.txt")
    OUTPUT_FILE_CORRECTIONS = os.path.join(sys.argv[2],
                                           "xfers_POSTREQUEST.txt")
    #Get transfer history
    binwidth = 6 * 60 * 60
    datetmp = dateutil.parser.parse(sys.argv[1], ignoretz=True)
    print datetmp
    endtime = (int(calendar.timegm(datetmp.timetuple())) / binwidth) * binwidth
    starttime = endtime - binwidth
    starttime_str = (
        datetime.fromtimestamp(starttime)).strftime("%Y-%m-%d %H:%M:%S")
    endtime_srt = (
        datetime.fromtimestamp(endtime)).strftime("%Y-%m-%d %H:%M:%S")
    params = {'binwidth': binwidth, 'starttime': starttime, 'endtime': endtime}
    Morgue_Sites = []
    lifeStatus = getJSONMetricforAllSites(235, 24)
    sites = lifeStatus.getSites()
    for site in sites:
        site_status = lifeStatus.getLatestEntry(site)
        if site_status.value != "enabled":
            Morgue_Sites.append(site)
    url = 'https://cmsweb.cern.ch/phedex/datasvc/json/%s/TransferHistory'
    print url + "?" + urllib.urlencode(params)
    context = None  #ssl._create_unverified_context()
    f_debug = urllib.urlopen(url % 'debug', data=urllib.urlencode(params))
    items_debug = json.load(f_debug)
    f_prod = urllib.urlopen(url % 'prod', data=urllib.urlencode(params))
    items_prod = json.load(f_prod)
    timeslots = set()
    xferdata = {}
    noTransfersToFrom = {}
    noTransfersToFrom2 = {}
    for item in items_debug["phedex"]["link"] + items_prod["phedex"]["link"]:
        from_site = item[
            'from']  #.replace('_MSS','').replace('_Buffer', '').replace('_Disk','').replace('_Export','')
        to_site = item[
            'to']  #.replace('_MSS','').replace('_Buffer', '').replace('_Disk','').replace('_Export','')
        if to_site == from_site:
            continue
        to_tier = int(to_site[1])
        from_tier = int(from_site[1])
        to_tier = 1 if to_tier == 0 else to_tier
        from_tier = 1 if from_tier == 0 else from_tier
        for transferslot in item['transfer']:
            try:
                quality = float(transferslot['quality'])
            except:
                quality = 0.0
            done_files = int(transferslot['done_files'])
            fail_files = int(transferslot['fail_files'])
            done_bytes = int(transferslot['done_bytes'])
            fail_bytes = int(transferslot['fail_bytes'])
            try_files = int(transferslot['try_files'])
            timeslot = int(transferslot['timebin'])
            if from_site not in Morgue_Sites and to_site not in Morgue_Sites and try_files > 0:
                noTransfersToFrom[
                    from_site][timeslot] = noTransfersToFrom.setdefault(
                        from_site, {}).setdefault(timeslot, 0) + try_files
                noTransfersToFrom[
                    to_site][timeslot] = noTransfersToFrom.setdefault(
                        to_site, {}).setdefault(timeslot, 0) + try_files
                noTransfersToFrom2[
                    from_site][timeslot] = noTransfersToFrom2.setdefault(
                        from_site, {}).setdefault(timeslot, 0) + 1
                noTransfersToFrom2[
                    to_site][timeslot] = noTransfersToFrom2.setdefault(
                        to_site, {}).setdefault(timeslot, 0) + 1

            timeslots.add(timeslot)
            if to_tier < 3 and from_tier < 3 and (
                    done_bytes > 0 or fail_bytes > 0) and try_files > 0:
                xferdata.setdefault(from_site,
                                    {}).setdefault(timeslot, {}).setdefault(
                                        "to",
                                        {}).setdefault(to_tier, []).append([
                                            quality, done_files, fail_files,
                                            done_bytes, fail_bytes, to_site
                                        ])
                xferdata.setdefault(to_site,
                                    {}).setdefault(timeslot, {}).setdefault(
                                        "from",
                                        {}).setdefault(from_tier, []).append([
                                            quality, done_files, fail_files,
                                            done_bytes, fail_bytes, from_site
                                        ])
            #xferdate[site][from][tier][site][donefile, failfiles, donebytes, failbytes]
            #pp.pprint([from_site, to_site, quality, done_files, fail_files])
    del items_debug
    siteScores = {}

    for site, bincontent in xferdata.iteritems():
        for timeslot, xfers in bincontent.iteritems():
            print site
            for toFrom in ['from', 'to']:
                xfers_tofrom = xfers.get(toFrom, {})
                for tier, xfers_from_tier in xfers_tofrom.iteritems():
                    print "----------"
                    print "Tier" + str(tier)
                    print "Original sorted"
                    xfers_from_tier.sort(key=lambda x: x[2])
                    pp.pprint(len(xfers_from_tier))
                    pp.pprint(xfers_from_tier)
                    # Remove up to [1 for T1, ]
                    for _ in range(0, min(xfers_from_tier, perTierVeto[tier])):
                        xfers_from_tier.sort(key=lambda x: x[2])
                        if len(xfers_from_tier
                               ) > 0 and xfers_from_tier[-1][2] > 1:
                            #print "  deleted for highest failure files " + str(xfers_from_tier[-1])
                            xfers_from_tier.remove(xfers_from_tier[-1])
                        xfers_from_tier.sort(key=lambda x: x[0], reverse=True)
                        if len(xfers_from_tier
                               ) > 0 and xfers_from_tier[-1][0] < 0.9:
                            #print "  deleted for highest failure rate " + str(xfers_from_tier[-1])
                            xfers_from_tier.remove(xfers_from_tier[-1])
                    print "After deletion"
                    pp.pprint(len(xfers_from_tier))
                    pp.pprint(xfers_from_tier)
                    print "----------"
                    for xfer in xfers_from_tier:
                        siteScores.setdefault(site, {}).setdefault(
                            timeslot, {}).setdefault(toFrom, {}).setdefault(
                                tier, {}).setdefault('done', 0)
                        siteScores[site][timeslot][toFrom][tier][
                            'done'] += xfer[1]
                        siteScores.setdefault(site, {}).setdefault(
                            timeslot, {}).setdefault(toFrom, {}).setdefault(
                                tier, {}).setdefault('failed', 0)
                        siteScores[site][timeslot][toFrom][tier][
                            'failed'] += xfer[2]
    #pp.pprint(timeslots)
    slotList = (list(timeslots))
    slotList.sort()
    emptylist = ['n'] * len(slotList)
    outputFile = open('output.csv', 'w')
    excelDate = lambda x: (float(x) / 86400.0) + 25569.0
    outputFile.write("Site," + ",".join([str(excelDate(x))
                                         for x in slotList]) + "\n")
    for site, scoresPerSlot in siteScores.iteritems():
        siteList = list(emptylist)
        for i in range(0, len(slotList)):
            if slotList[i] in scoresPerSlot:
                scores = scoresPerSlot[slotList[i]]
                score = 10.0
                for tag in ['to', 'from']:
                    for tier in [0, 1, 2]:
                        try:
                            score = min(
                                score,
                                float(
                                    scores.get(tag, {}).get(tier, {}).get(
                                        'done', 0)) /
                                float(
                                    scores.get(tag, {}).get(tier, {}).get(
                                        'done', 0) + scores.get(tag, {}).get(
                                            tier, {}).get('failed', 0)))
                        except:
                            continue
                        #print tag + " " + str(tier) + ", score: " + str(score)
                siteList[i] = score
        #print site + " " + str(siteList)
        outputFile.write(site + "," + ",".join([str(x)
                                                for x in siteList]) + "\n")
    xferMetricEntries = []

    for site, timeslotObj in siteScores.iteritems():
        score = 666
        for timeslot, scores in timeslotObj.iteritems():
            for x in slotList:
                print site + "," + str(timeslot)
                for tag in ['to', 'from']:
                    for tier in [0, 1, 2]:
                        try:
                            print "tier: " + str(
                                tier) + "to/from: " + tag + "score: " + str(
                                    float(
                                        scores.get(tag, {}).get(tier, {}).get(
                                            'done', 0)) /
                                    float(
                                        scores.get(tag, {}).get(tier, {}).get(
                                            'done', 0) + scores.get(tag, {}).
                                        get(tier, {}).get('failed', 0)))
                            score = min(
                                score,
                                float(
                                    scores.get(tag, {}).get(tier, {}).get(
                                        'done', 0)) /
                                float(
                                    scores.get(tag, {}).get(tier, {}).get(
                                        'done', 0) + scores.get(tag, {}).get(
                                            tier, {}).get('failed', 0)))
                        except:
                            continue
                site_value = "%.1f" % (score * 100)
                site_nvalue = score * 100
                if score == 666:
                    site_color = "white"
                    site_value = "n/a"
                elif score > 0.6:
                    site_color = "green"
                else:
                    site_color = "red"
                justSite = site.replace("_Disk",
                                        "").replace("_Buffer", "").replace(
                                            "_Export", "").replace("_MSS", "")
                if site != justSite:
                    addnew = True
                    for entry in xferMetricEntries:
                        if entry.name == justSite and entry.nvalue < site_nvalue:
                            addnew = False
                            break
                        if entry.name == justSite and entry.nvalue > site_nvalue:
                            xferMetricEntries.remove(entry)
                            addnew = True

                    if (addnew):
                        xferMetricEntries.append(
                            dashboard.entry(date=starttime_str,
                                            name=justSite,
                                            value=site_value,
                                            color=site_color,
                                            url=makelink(
                                                site, starttime, binwidth,
                                                endtime),
                                            nvalue=site_nvalue))

                xferMetricEntries.append(
                    dashboard.entry(date=starttime_str,
                                    name=site,
                                    value=site_value,
                                    color=site_color,
                                    url=makelink(site, starttime, binwidth,
                                                 endtime),
                                    nvalue=site_nvalue))
    if len(xferMetricEntries) > 1:
        outputFile = open(OUTPUT_FILE_NAME, 'w')
        correctionOutputFile = open(OUTPUT_FILE_CORRECTIONS, 'a')
        for site in xferMetricEntries:
            outputFile.write(str(site) + '\n')
            correctionOutputFile.write(("\t".join([
                starttime_str, endtime_srt,
                str(XFERS_COLUMN_NUMBER), site.name, site.value, site.color,
                site.url, "nvalue=0"
            ])) + "\n")
        print "\n--Output written to %s" % OUTPUT_FILE_NAME
        outputFile.close()
        correctionOutputFile.close()
    print "--------------------------\nFinished at " + str(datetime.now())
metric = dashboard.parseJSONMetric(url.read(metricURL))
updatedMetric = dashboard.metric()

# merege sites from the vo-feed and manual control meteric.
siteList = sites.getSites()
for site in metric.getSites():
    if not site in siteList:
        siteList[site] = {}

for i in siteList:
    # if the site is not in the list add it (this is the
    # case that will be happaned when they create new site
    # in the site db)
    if not metric.hasSite(i):
        updatedMetric.append(
            dashboard.entry(None, i, 'ready', dashboard.green, metricURL))
    else:
        latestEntry = metric.getLatestEntry(i)
        updatedMetric.append(
            dashboard.entry(None, i, latestEntry.value, latestEntry.color,
                            metricURL))
        print latestEntry.value + " " + i

#######################
blist = [
    'T2_RU_RRC_KI', 'T3_BY_NCPHEP', 'T3_CH_PSI', 'T3_CN_PKU', 'T3_ES_Oviedo',
    'T3_IN_PUHEP', 'T3_IR_IPM', 'T3_KR_UOS', 'T3_UK_London_RHUL',
    'T3_UK_London_UCL', 'T3_UK_ScotGrid_ECDF', 'T3_US_FNALLPC',
    'T3_US_FNALXEN', 'T3_US_FSU', 'T3_US_JHU', 'T3_US_Kansas', 'T3_US_MIT',
    'T3_US_NU', 'T3_US_Princeton', 'T3_US_Princeton_ICSE', 'T3_US_Rice',
    'T3_BG_UNI_SOFIA'
Beispiel #22
0
# --look at the loop, we are getting some sites from the manual changes 
# metric. reason: some sites are not listed in the sites.getSites
siteList = sites.getSites()
for site in usableSitesMC.getSites():
    if not site in siteList:
        siteList[site] = {}

for i in siteList:
    badSiteFlag = False
    ## detect bad sites!
    # site has bad hammercloud history
    if sites.getTier(i) == 2 and hasBadHistory(i):
        print i + " hasBadHistory"
        badSiteFlag = True
    # site is in the morgue
    elif morgue.hasSite(i) and morgue.getSiteEntry(i).value == 'Morgue':
        print i + " is in the Morgue"
        badSiteFlag = True
    # site has been blocked
    elif usableSitesMC.hasSite(i) and usableSitesMC.getSiteEntry(i).color == dashboard.red:
        print i + " usableSitesMC.getSiteEntry color is red"
        badSiteFlag = True

    if badSiteFlag:
        metric.append(dashboard.entry(None, i, 'not_usable', dashboard.red, urlStamp))
    else:
        metric.append(dashboard.entry(None, i, 'usable', dashboard.green, urlStamp))

fileOps.write(txtOutput, str(metric))
fileOps.write(jsonOutput, json.dumps(metric.__list__(), indent=2))
Beispiel #23
0
    if not cmsSite: continue
    if not parsed.has_key(cmsSite):
        parsed[cmsSite] = {}
    parsed[cmsSite][id] = subject

# generate output for twiki meeting page 
ticketURL  = "https://ggus.eu/?mode=ticket_info&ticket_id="
twikiTable = "\n| *CMS Site* | *Number of Tickets* | * Tickets* |\n"
sum        = 0
for site in parsed:
    url = ""
    sum = sum + len(parsed[site])
    for id in parsed[site]:
        url = url + "[[%s][%s]] " % (ticketURL + id, id)
    twikiTable = twikiTable + "| %s | %d | %s |\n" % (site, len(parsed[site]), url)
dateStamp = time.strftime("%d/%b/%Y %H:%M:%S (GMT)", time.gmtime())
twikiTable = twikiTable + "| *<i>generated on %s</i>, Total number of tickets: %s* |||" % (dateStamp, sum)
fileOps.write(sys.argv[2], twikiTable)

# generate text file for the dashboard metric
metric    = dashboard.metric()
allSites  = sites.getSites().keys()
url       = "https://ggus.eu/?mode=ticket_search&cms_site=%s&timeframe=any&status=open&search_submit=GO%%21"
for site in parsed:
    value = len(parsed[site])
    metric.append(dashboard.entry(None, site, value, dashboard.red, url % site))
for site in allSites:
    if site in parsed.keys(): continue
    metric.append(dashboard.entry(None, site, 0, dashboard.green, url % site))
fileOps.write(sys.argv[3], str(metric))
Beispiel #24
0
     siteColor = COLOR_WAITING_ROOM
     morgueColor = "green"
     waitingRoomColor = "red"
     morgueValue = "out"
     waitingRoomValue = "in"
 elif newlifeStatus == STATUS_OK:
     siteColor = COLOR_OK
     morgueColor = "green"
     waitingRoomColor = "green"
     morgueValue = "out"
     waitingRoomValue = "out"
 if newlifeStatus is not None:
     lifeStatusEntries.append(
         dashboard.entry(date=todayAtMidnight,
                         name=siteInfo.name,
                         value=newlifeStatus,
                         color=siteColor,
                         url=LOGFILE_URL))
 if newlifeStatus is not None:
     morgueEntries.append(
         dashboard.entry(date=todayAtMidnight,
                         name=siteInfo.name,
                         value=morgueValue,
                         color=morgueColor,
                         url=LOGFILE_URL))
 if newlifeStatus is not None:
     waitingRoomEntries.append(
         dashboard.entry(date=todayAtMidnight,
                         name=siteInfo.name,
                         value=waitingRoomValue,
                         color=waitingRoomColor,
Beispiel #25
0
transitional = dashboard.metric()

for site in siteList:
    badSiteFlag = False
    errMsg      = 'bad'

    # conditions to mark a site as bad
    if samAccess[site] < 50.0:
        badSiteFlag = badSiteFlag | True
        errMsg = errMsg + '_SAM(%s)' % round(samAccess[site], 2)
    if hammerCloud[site] < 80.0:
        badSiteFlag = badSiteFlag | True
        errMsg = errMsg + '_HC(%s)' % round(hammerCloud[site], 2)
    if site in ggus:
        badSiteFlag = badSiteFlag | True
    if 'n/a' in [hammerCloud[site], samAccess[site]]:
        basSiteFlag = badSiteFlag | True
        if hammerCloud[site] == 'n/a': errMsg = errMsg + '_HC(n/a)'
        else: errMsg = errMsg + '_SAM(n/a)'

    if badSiteFlag:
        entry = dashboard.entry(None, site, errMsg, dashboard.red, '#')
    else:
        entry = dashboard.entry(None, site, 'on', dashboard.green, '#')

    if site in federations["prod"]: production.append(entry)
    elif site in federations["trans"]: transitional.append(entry)

fileOps.write('%s/aaaProd.txt' % output, str(production))
fileOps.write('%s/aaaTrans.txt' % output, str(transitional))
def main():
    parser = OptionParser(usage="usage: %prog [options] filename",
                          version="%prog 1.0")
    parser.add_option(
        "-d",
        "--date",
        dest="inputDate",
        help=
        "date from which to fetch the results for HC in format %Y-%m-%dT%H:%M:%SZ "
    )
    parser.add_option("-o",
                      "--outputDir",
                      dest="outputDir",
                      help="Directory in which to save the output")
    (options, args) = parser.parse_args()
    if options.inputDate is None:
        print "Please input a date with the --date option"
        exit(-1)
    else:
        try:
            datetmp = dateutil.parser.parse(options.inputDate, ignoretz=True)
        except:
            print "I couldn't recognize the date, please give me one like 2015-12-31T23:59:59Z"
            exit(-1)
    if options.outputDir is None:
        print "Please add a directory with option --outputDir"
        exit(-1)
    else:
        if os.path.isdir(options.outputDir) == False:
            print options.outputDir + " is not a valid directory or you don't have read permissions"
            exit(-1)


# Constants
    interval = 1439
    dateFrom = datetmp - timedelta(minutes=datetmp.minute % interval,
                                   seconds=datetmp.second,
                                   microseconds=datetmp.microsecond)
    dateTo = dateFrom + timedelta(minutes=interval)
    dateFormat = "%Y-%m-%dT%H:%M:%SZ"
    dateFromStr = datetime.strftime(dateFrom, dateFormat)
    print dateFromStr
    dateToStr = datetime.strftime(dateTo, dateFormat)
    OUTPUT_FILE_NAME = os.path.join(options.outputDir, "site_avail_sum.txt")
    OUTPUT_FILE_CORRECTIONS = os.path.join(options.outputDir,
                                           "site_avail_sum_POST_REQUEST.txt")
    SAM_COLUMN_NUMBER = "126"
    print "Getting SAM Score from " + str(dateFrom) + " to " + str(dateTo)
    samUrl = "http://wlcg-sam-cms.cern.ch/dashboard/request.py/getstatsresultsmin?profile_name=CMS_CRITICAL_FULL&plot_type=quality&start_time=%s&end_time=%s&granularity=single&view=siteavl" % (
        dateFromStr, dateToStr)
    print samUrl
    # Download the url or die
    try:
        print "Fetching url : " + samUrl
        jsonStr = url.read(samUrl)
        samInfo = json.loads(jsonStr)
    except:
        exit(100)
    print "Data retrieved!"
    sitesfromDashboard = []
    for samSummary in samInfo['data']:
        sitesfromDashboard.append(samSummary['name'])
    print sitesfromDashboard
    samScoreSites = []
    print "Getting SAM for all sites"
    for site in sitesfromDashboard:
        for samSummary in samInfo['data']:
            if samSummary['name'] == site:
                try:
                    siteOK = float(samSummary['data'][0]['OK'])
                    siteCritical = float(samSummary['data'][0]['CRIT'])
                    siteSched = float(samSummary['data'][0]['SCHED'])
                    if (siteOK + siteCritical + siteSched) > 0.0:
                        siteAvailabilityNum = (
                            float(siteOK) /
                            (float(siteOK + siteCritical + siteSched))) * 100.0
                        siteAvailability = int(siteAvailabilityNum)
                        if siteAvailabilityNum > 89.9:
                            siteColor = "green"
                        elif (sites.getTier(site) == 2 or sites.getTier(site)
                              == 3) and siteAvailabilityNum > 79.9:
                            siteColor = "green"
                        else:
                            siteColor = "red"
                    else:
                        siteAvailability = "n/a"
                        siteAvailabilityNum = None
                        siteColor = "white"
                except:
                    siteAvailability = "Error"
                    siteAvailabilityNum = None
                    siteColor = "white"
                print site + "  OK " + str(siteOK) + " CRIT " + str(
                    siteCritical) + " SCHED " + str(
                        siteSched) + " SCORE : " + str(siteAvailability)
                samScoreSites.append(
                    dashboard.entry(date=dateTo.strftime("%Y-%m-%d %H:%M:%S"),
                                    name=site,
                                    value=siteAvailability,
                                    color=siteColor,
                                    url=getSuccessrateUrl(
                                        site, dateFrom, dateTo),
                                    nvalue=siteAvailabilityNum))
    print str(samScoreSites)
    if len(samScoreSites) > 1:
        OutputFile = open(OUTPUT_FILE_NAME, 'w')
        correctionOutputFile = open(OUTPUT_FILE_CORRECTIONS, 'a')
        startDateStr = (dateFrom + timedelta(days=1)).replace(
            hour=0, minute=0, second=1,
            microsecond=0).strftime("%Y-%m-%d %H:%M:%S")
        endDateStr = (dateFrom + timedelta(days=1)).replace(
            hour=23, minute=59, second=59,
            microsecond=0).strftime("%Y-%m-%d %H:%M:%S")
        for site in samScoreSites:
            if site.name != "unknown":
                OutputFile.write(str(site) + '\n')
                correctionOutputFile.write(("\t".join([
                    startDateStr, endDateStr,
                    str(SAM_COLUMN_NUMBER), site.name,
                    str(site.value), site.color, site.url, "nvalue=0"
                ])) + "\n")
        print "\n--SAM Score output written to %s" % OUTPUT_FILE_NAME
        OutputFile.close()
        correctionOutputFile.close()
    else:
        print "There's no data, I quit!"
Beispiel #27
0
transitional = dashboard.metric()

for site in siteList:
    badSiteFlag = False
    errMsg = 'bad'

    # conditions to mark a site as bad
    if samAccess[site] < 50.0:
        badSiteFlag = badSiteFlag | True
        errMsg = errMsg + '_SAM(%s)' % round(samAccess[site], 2)
    if hammerCloud[site] < 80.0:
        badSiteFlag = badSiteFlag | True
        errMsg = errMsg + '_HC(%s)' % round(hammerCloud[site], 2)
    if site in ggus:
        badSiteFlag = badSiteFlag | True
    if 'n/a' in [hammerCloud[site], samAccess[site]]:
        basSiteFlag = badSiteFlag | True
        if hammerCloud[site] == 'n/a': errMsg = errMsg + '_HC(n/a)'
        else: errMsg = errMsg + '_SAM(n/a)'

    if badSiteFlag:
        entry = dashboard.entry(None, site, errMsg, dashboard.red, '#')
    else:
        entry = dashboard.entry(None, site, 'on', dashboard.green, '#')

    if site in federations["prod"]: production.append(entry)
    elif site in federations["trans"]: transitional.append(entry)

fileOps.write('%s/aaaProd.txt' % output, str(production))
fileOps.write('%s/aaaTrans.txt' % output, str(transitional))