Ejemplo n.º 1
0
def main():
    p = subprocess.Popen(["df", "-P", MOUNT], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    (stdoutdata, stderrdata) = p.communicate()
    for line in stdoutdata.split("\n"):
        split_line = line.split()
        if len(split_line) < 5:
            continue
        if split_line[5] == MOUNT:
            dash = dashing.DashingImport('viz.unl.edu', auth_token = '542221b1-b765-4cd9-a9e6-0c0727870375')
            send_dict = { 'min': 0, 'max': float("%.1f" % (float(split_line[1]) / terabyte)) , 'value': float("%.1f" % (float(split_line[2]) / terabyte)), 'moreinfo': "Capacity: %s" % sizeof_fmt(int(split_line[1])) }
            dash.SendEvent('CraneStorage', send_dict)
            dash.SendEvent('HCCAmazonPrice', {'craneStorage': send_dict['value']})


    # Send the number of jobs running
    command = "squeue -t R -O numcpus,account -h".split(" ")
    p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    (stdout, stderr) = p.communicate()
    sum_running_cores = 0
    per_user_cores = {}
    for line in stdout.split("\n"):
        try:
            (cores, username) = line.split()
            sum_running_cores += int(cores)
            username.strip()
            if username not in per_user_cores:
                per_user_cores[username] = 0
            per_user_cores[username] += int(cores)
        except:
            print "Error parsing line: %s" % line
            pass
    dash.SendEvent('CraneRunning', {'current': sum_running_cores, 'last': sum_running_cores})
    dash.SendEvent('HCCAmazonPrice', {'CraneCores': sum_running_cores})
Ejemplo n.º 2
0
def getUNOTimeData(timeDelta, SQLItems, scoreBox, numEntries):
    xdmoddb = SQL.connect(host=SQLItems["xdmodmysql_host"],
                          user=SQLItems["xdmodmysql_username"],
                          password=SQLItems["xdmodmysql_pass"],
                          db=SQLItems["xdmodmysql_db"],
                          cursorclass=pymysql.cursors.DictCursor)
    with open(sys.argv[2], 'r') as file:
        auth_key = file.read().strip()
    dash = dashing.DashingImport('viz.unl.edu', auth_token=auth_key)
    # Dashboard Data Structure
    # Array of dict with keys of label, value, dept, campus

    dataToDash = []
    with xdmoddb:
        cur = xdmoddb.cursor()
        stmt = "SELECT j.person_id,username, SUM(cpu_time), group_name,department,campus FROM jobfact as j INNER JOIN modw.systemaccount as h ON j.person_id=h.person_id INNER JOIN mod_shredder.ldapGroups as s ON j.group_name=s.GroupName WHERE campus not like 'UNL' and campus not like 'IANR' and start_time_ts > " + timeDelta + " GROUP BY j.person_id order by SUM(cpu_time) DESC LIMIT " + numEntries + ";"
        print(stmt)
        cur.execute(stmt)
        result = cur.fetchall()
        print(result)
        for i in result:
            print(i["SUM(cpu_time)"])
            cpuHour = str(round(int(i["SUM(cpu_time)"]) / 3600))
            if i["department"] == i["campus"]:
                i["campus"] = ""
            dataToDash.append({
                "label": i["username"],
                "value": cpuHour,
                "dept": i["department"].replace("College of", ""),
                "campus": i["campus"]
            })
        print(dataToDash)
        ### dostuff
        dash.SendEvent(scoreBox, {'items': dataToDash})
        cur.close()
Ejemplo n.º 3
0
import urllib.request
import re
import dashing
import json

### Import SQL Login
f = open(sys.argv[1], 'r')
lines = f.readlines()
SQLItems = {}
for i in lines:
    SQLItems[i.split(" ")[0]] = i.split(" ")[1][:-1]
    print(i.split(" ")[0], i.split(" ")[1][:-1])
f.close()
with open('key.txt', 'r') as file:
    auth_key = file.read().strip()
dash = dashing.DashingImport('viz.unl.edu', auth_token=auth_key)

### Cloud Service Pricing

###AWS -- 4 Core Instance with 8GB Memory for 1 hour
#Storage: per GB Hour // 0.045 per GB/Month
#Network: per GB
###GCP -- Retrieved from Google Cloud Platform
#CPU:  per CPU Hour
#Storage: per GB Hour
#Memory: per GB Hour // 0.040 per GB/Month
#Network: per GB
###Azure -- 4 Core Instance with 8GB Memory for 1 hour
#Storage: per GB Hour // 0.048 per GB/Month
#Network: per GB
###DO -- 4 Core Instance with 8GB Memory for 1 hour
Ejemplo n.º 4
0
def main():
    with open('key.txt', 'r') as file:
        auth_key = file.read().strip()

    p = subprocess.Popen(["df", "-P", MOUNT],
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE)
    (stdoutdata, stderrdata) = p.communicate()
    for line in stdoutdata.split("\n"):
        split_line = line.split()
        if len(split_line) < 5:
            continue
        if split_line[5] == MOUNT:
            dash = dashing.DashingImport('viz.unl.edu',
                                         port=3030,
                                         auth_token=auth_key)
            send_dict = {
                'min': 0,
                'max': float("%.1f" % (float(split_line[1]) / terabyte)),
                'value': float("%.1f" % (float(split_line[2]) / terabyte)),
                'moreinfo': "Capacity: %s" % sizeof_fmt(int(split_line[1]))
            }
            dash.SendEvent(CLUSTER + 'Storage', send_dict)
            dash.SendEvent('HCCAmazonPrice',
                           {CLUSTER.lower() + 'Storage': send_dict['value']})

    # Send the number of jobs running
    command = "squeue -t R -O numcpus,account -h".split(" ")
    p = subprocess.Popen(command,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE)
    (stdout, stderr) = p.communicate()
    sum_running_cores = 0
    per_user_cores = {}
    for line in stdout.split("\n"):
        if line == "":
            break
        try:
            (cores, username) = line.split()
            sum_running_cores += int(cores)
            username.strip()
            if username not in per_user_cores:
                per_user_cores[username] = 0
            per_user_cores[username] += int(cores)
        except:
            print("Error parsing line: " + line)
            pass

    if os.path.isfile('dashing.txt') and os.access('dashing.txt', os.R_OK):
        date = os.path.getmtime('dashing.txt')
        with open('dashing.txt', 'r') as file:
            last_running_cores = int(file.read())
    else:
        print("Error reading from dashing.txt")
        date = time.time() + 3600
        last_running_cores = sum_running_cores
    with open('dashing.txt', 'w') as file:
        file.write(str(sum_running_cores))
    date = time.strftime('%m-%d-%Y %H:%M:%S', time.localtime(date))
    dash.SendEvent(
        CLUSTER + 'Running', {
            'current': sum_running_cores,
            'last': last_running_cores,
            'last_period': date
        })
    dash.SendEvent('HCCAmazonPrice', {CLUSTER + 'Cores': sum_running_cores})

    # send number of completed jobs
    current_time = time.strftime('%m/%d/%y-%H:%M:%S',
                                 time.localtime(time.time()))
    start_time = time.strftime('%m/%d/%y', time.localtime(time.time()))
    command = "sacct -a -E " + current_time + " -S " + start_time + " -s CD -o JobID -X"
    command = command.split(" ")
    p = subprocess.Popen(command,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE)
    (stdout, stderr) = p.communicate()
    jobs_completed = len(stdout.split())
    path = '/common/swanson/.dashing/'
    files = ['crane_jobs.txt', 'rhino_jobs.txt']
    filename = path + files[0]
    with open(filename, 'w') as file:
        file.write(str(jobs_completed))
    total_jobs = 0
    for filename in files:
        filename = path + filename
        if os.path.isfile(filename) and os.access(filename, os.R_OK):
            with open(filename, 'r') as file:
                total_jobs += int(file.read())
        else:
            print("Error reading from " + filename)

    dash.SendEvent('JobsCompleted', {'current': total_jobs})

    #Send number of CPU Hours for Today
    command = "sacct -a -o CPUTimeRaw -n -T"
    command = command.split(" ")
    p = subprocess.Popen(command,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE)
    (stdout, stderr) = p.communicate()
    stdout = map(int, stdout.split())
    hours_completed = sum(stdout) / 3600
    files = ['crane_hours.txt', 'rhino_hours.txt']
    filename = path + files[0]
    with open(path + CLUSTER.lower() + '_hours.txt', 'w') as file:
        file.write(str(hours_completed))
    total_hours = 0
    for filename in files:
        filename = path + filename
        if os.path.isfile(filename) and os.access(filename, os.R_OK):
            with open(filename, 'r') as file:
                total_hours += int(file.read())
        else:
            print("Error reading from " + filename)

    dash.SendEvent('HoursToday', {'current': total_hours})

    # Send Anvil Information
    ## Time Delay is to allow dashing to keep up with POST

    f = urllib2.urlopen("http://anvil-beta.unl.edu:8123/")
    rawData = json.load(f)
    dash.SendEvent('AnvilTile', {'current_vm': rawData["vm_count"]})
    time.sleep(1)
    dash.SendEvent('AnvilTile', {'current_cores': rawData["core_count"]})
    time.sleep(1)
    dash.SendEvent('AnvilTile', {
        'current_mem':
        str(round(int(rawData["mem_count"]) / (1024.0**2), 2))
    })
    time.sleep(1)
    dash.SendEvent(
        'AnvilTile',
        {'current_vol': str(round(int(rawData["volume_gb"]) / 1024.0, 2))})
    time.sleep(1)
    dash.SendEvent(
        'AnvilTile',
        {'current_disk': str(round(int(rawData["disk_gb"]) / 1024.0, 2))})

    # Red Storage
    redT2 = urllib2.urlopen("http://t2.unl.edu:8088/dfshealth.jsp")
    redData = re.findall("\d+\.\d+", str(redT2.read()))
    dash.SendEvent(
        'RedStorage', {
            'min': 0,
            'max': float(redData[9]) * 1024,
            'value': float(redData[10]) * 1024,
            'Capacity': redData[9] + " PB"
        })
    dash.SendEvent('HCCAmazonPrice', {'redStorage': float(redData[10]) * 1024})

    # Top Users UNL
    dbFile = open('db.yml', 'r')
    lines = dbFile.readlines()
    SQLItems = {}
    for i in lines:
        SQLItems[i.split(" ")[0]] = i.split(" ")[1][:-1]

    f.close()

    rcfdb = SQL.connect(host=SQLItems["rcfmysql_host"],
                        user=SQLItems["rcfmysql_username"],
                        passwd=SQLItems["rcfmysql_pass"],
                        db=SQLItems["rcfmysql_db"],
                        cursorclass=pymysql.cursors.DictCursor)

    ## Grab this clusters squeue

    command = "squeue -h -t R -o '%u %C'"
    p = subprocess.Popen(command,
                         shell=True,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE)
    (stdout, stderr) = p.communicate()
    file = open(path + CLUSTER.lower() + '_users.txt', 'w')
    file.write(stdout)
    file.close()

    ## Pull top Users
    files = ['crane_users.txt', 'rhino_users.txt']
    topUsers = {}
    for file in files:
        filename = path + file
        if os.path.isfile(filename) and os.access(filename, os.R_OK):
            userFile = open(filename, 'r').readlines()
            for line in userFile:
                if line.split(' ')[0] in topUsers:
                    topUsers[line.split(' ')[0]] += int(line.split(' ')[1])
                else:
                    topUsers[line.split(' ')[0]] = int(line.split(' ')[1])
    topUsers25 = sorted(topUsers.items(),
                        key=operator.itemgetter(1),
                        reverse=True)[:25]

    ## The real magic of sql begins
    dataToDash = []
    cur = rcfdb.cursor()
    for k, v in topUsers25:
        stmt = "select Department, Campus from Personal where LoginID = \"" + k + "\";"
        cur.execute(stmt)
        result = cur.fetchall()[0]
        if result["Department"] == None:
            result["Department"] = ""
        if result["Campus"] == None:
            result["Campus"] = ""
        dataToDash.append({
            "label": k[:9],
            "value": v,
            "dept": result["Department"][:14],
            "campus": result["Campus"]
        })
    dash.SendEvent('BiggestUsers', {'items': dataToDash})
    cur.close()
Ejemplo n.º 5
0
import rrdtool as rt
import sys
from time import sleep, gmtime, strftime
from subprocess import call
from sortedcontainers import SortedDict
import dashing
TOKEN = open("key.txt", 'r').readline()[:-1]
dash = dashing.DashingImport('viz.unl.edu', auth_token=TOKEN)


def rrdToArrayPoint(rrdFile):
    result = rt.fetch(rrdFile, "AVERAGE")
    start, end, step = result[0]
    legend = result[1]
    rows = result[2]
    endTime = end - 00
    points = []
    counter = 0
    try:
        for i in range(20, 0, -1):
            #print(rrdFile,i,rows[len(rows)-(3+i)][1])
            points.append({
                "y":
                int(rows[len(rows) - (3 + i)][0] + rows[len(rows) -
                                                        (3 + i)][1]) * 8,
                "x":
                endTime
            })
            last_point = (rows[len(rows) - (3 + i)][0] + rows[len(rows) -
                                                              (3 + i)][1])
            endTime = endTime - 300