Example #1
0
  def get_failed_disks(self):
    SYMCLI = '/opt/emc/SYMCLI/bin/'
 
    cmd = SYMCLI + 'symdisk -sid ' + self.sid  + ' list -failed -output xml_e'

    # Exec the sym command...
    output = os.popen(cmd)

    # Write the XML to an XML file so that it can be passed to the parser
    XML = '/var/www/html/XML/' + self.sid + '_failed_disks.xml'
    f = open(XML, 'w')
    for line in output:
      #line = line.strip()
      f.write(line)
    f.close()

    # Now parse the XML file 
    doc = minidom.parse(XML)
    for node in doc.getElementsByTagName('Disk'):
      ident = ""
      da = ""
      spindle_id = ""
      tech = ""
      speed = ""
      size = ""      
      vendor = ""
      failed = ""            
      for disk in node.getElementsByTagName('Disk_Info'):
        for child in disk.childNodes:
          if child.nodeType == 1:
            if child.nodeName == "ident":
              ident = child.firstChild.nodeValue
            if child.nodeName == "da_number":
              da = str(child.firstChild.nodeValue)
            if child.nodeName == "spindle_id":
              spindle_id = child.firstChild.nodeValue
            if child.nodeName == "technology":
              tech = str(child.firstChild.nodeValue)     
            if child.nodeName == "speed":
              speed = child.firstChild.nodeValue
            if child.nodeName == "vendor":
              vendor = str(child.firstChild.nodeValue)
            if child.nodeName == "rated_gigabytes":
              size = child.firstChild.nodeValue
            if child.nodeName == "failed_disk":
              failed = str(child.firstChild.nodeValue)   
                    
      data = str(ident) + " " + str(da) + " " + str(spindle_id) + " " + str(tech)
      data += " " + str(speed) + " " + str(vendor) + " " + str(size) + " " + str(failed)
      self.FailedDisks[str(spindle_id)] = data
      diskname = 'SID=' + self.sid + '__SPINDLE=' + str(spindle_id)
      if diskname not in self.Known_Failures:
        self.Known_Failures[diskname] = self.now
        self.New_Failures[diskname] = self.now
        
    out = []
    for disk in self.Known_Failures:
      out.append( disk + "  " + str(self.Known_Failures[disk]) + '\n')
    f = Files()
    f.write_file(self.failed_disks,out)
Example #2
0
def createReports(srdfObj,site,datacenter):
    Alerts = {}
    alert = 0
    www = HTML()
    f = Files()
    mailObj = EMail()
    dates = DateString()
    page = []
    csvpage = []
    htmlfile = '/www/SRDF.html'
    csvfile = '/www/SRDF.csv'
    reportDate =  str(time.strftime("%c")) 
    page.append(www.start_html('SRDF Report'))
    html = '<p>Report Date: ' + reportDate + '<br><br>'
    html += 'SRDF Status Report<br><br>'
    page.append(html)
    for group in srdfObj.SRDF_Groups:
      # Print header table
      page.append('<p><br><table align=center  border=3>\n')
      row = ['Group ID','SymID','RemoteSymID','Tracks Pending','Delta Time']
      page.append(www.th_list(row))  
      csvpage.append(', '.join(row) + '\n')
      info = srdfObj.SRDF_Groups_Info[group]
      row = [  group, info['symid'],info['remote_symid'],info['tracks'],info['delta']  ]
      page.append(www.tr_list(row)) 
      page.append(www.end_table)
      csvpage.append(', '.join(row) + '\n')
      # Print Pair data
      page.append('<p><br><table align=center  border=3>\n')
      row = ['Source','Target','Link State','Mode','Replication State']
      page.append(www.th_list(row)) 
      csvpage.append(', '.join(row) + '\n')     
      pairs = srdfObj.SRDF_Groups_Pairs[group]
      for pair in pairs:
        list = pair.split(',')
        if  list[4] != 'Consistent':
          bgcolor="#B0B3AF"
          alert += 1
          Alerts[group] = "SRDF is not in a consistent state for " + group
        else:
          bgcolor="#69F24E"
        page.append(www.tr_list(list,bgcolor)) 
        csvpage.append(', '.join(list) + '\n')     
      page.append(www.end_table)
     
    
    page.append(www.end_html)
    f.write_file(htmlfile,page)
    f.write_file(csvfile,csvpage)

    if alert > 0:
      alertMessage = "The Following SRDF Groups are not Consistent\n\n"
      for groupalert in Alerts:
        alertMessage += Alerts[groupalert]     
      mailObj.subject = "VMAX SRDF Alert " + site + ', ' + datacenter
      mailObj.message = alertMessage
      mailObj.send_mail()   
Example #3
0
def createReports(symObj,site,datacenter,sid):
    #
    www = HTML()
    # Create directories if they don't exist
    f = Files()
    f.dir = "/www/" + site + "/OR/"
    f.mkdir()
    htmlfile = '/www/' + site + '/OR/index.html'
    perf = '/opt/EMC/Scripts/shell/rcopyperf_' + sid + '.log'
    f.read_file(perf)
    page = []
    page.append(www.start_html_refresh('OR Sessions'))
    page.append(www.EMC_Header(site,datacenter))
    page.append(www.start_table(1,"OR Port Performance"))
    header = ["FA Port","MB/s","Ceiling","Number of Devices"]
    page.append(www.th_list(header))
    perftotal = 0
    for line in f.readfile:
      line = line.strip()
      fields = line.split()
      if len(fields) == 5:
        fields = fields[1:5]
      perftotal += int(fields[1])
      page.append(www.tr_list(fields))
    page.append(www.tr + www.th + "Total OR MB/s" + www.th + str(perftotal))
    page.append(www.end_table)  
    page.append('<br><br><pre>\n')
    for line in symObj.or_list:
      page.append(line)
    f.write_file(htmlfile,page)
Example #4
0
def createReports(sgObj,site,datacenter,sid):
  #
  f = Files()
  csv = []
  csv.append('LUN_ID,SIZE,ALLOCATED\n')
  csvfile = '/www/download/ThinLUN_Report_' + sid + '.csv'

  for key in sgObj.TDEVs.keys(): 
      lun_id = key
      size = sgObj.TDEVs[key][0]
      allocated = sgObj.TDEVs[key][1]
      csvline = str(lun_id) + ',' + str(size) + ',' + str(allocated) + '\n'
      csv.append(csvline)
  f.write_file(csvfile,csv)      
Example #5
0
def createReports(sgObj, site, datacenter, sid):
    #
    f = Files()
    csv = []
    csv.append('LUN_ID,SIZE,ALLOCATED\n')
    csvfile = '/www/download/ThinLUN_Report_' + sid + '.csv'

    for key in sgObj.TDEVs.keys():
        lun_id = key
        size = sgObj.TDEVs[key][0]
        allocated = sgObj.TDEVs[key][1]
        csvline = str(lun_id) + ',' + str(size) + ',' + str(allocated) + '\n'
        csv.append(csvline)
    f.write_file(csvfile, csv)
Example #6
0
 def __init__(self):
   
   self.SRDF_Groups = {}
   self.SRDF_Groups_Info = {}
   # self.SRDF_Groups_pairs[group] = pairs (pairs is a dict with keys like [src tgt state] )
   self.SRDF_Groups_Pairs = {}
   self.SRDF_Groups_Info = {}
   
   # Load group file
   self.groupfile = '/opt/EMC/Scripts/SRDF/etc/srdf_groups.conf'
   g = Files()
   g.read_file(self.groupfile)
   for line in g.readfile:
     if not line.startswith("#"):
       self.SRDF_Groups[line] = 1
Example #7
0
def get_thin_luns(sid):
  #
  f = Files()
  csv = []
  csv.append('LUN_ID,SIZE,ALLOCATED\n')
  csvfile = '/www/download/ThinLUN_Report_' + sid + '.csv'
  cmd =  '/emc/symcfg -sid ' + sid  + ' list -tdev -GB '
  output = os.popen(cmd)
  for line in output: 
    if 'PROD_FCR1' in line:
      #line.strip()
      fields = line.split()
      lun_id = fields[0]
      size = fields[3] 
      allocated = fields[4]
      
      csvline = str(lun_id) + ',' + str(size) + ',' + str(allocated) + '\n'
      #print csvline
      csv.append(csvline)
  f.write_file(csvfile,csv)      
Example #8
0
def get_thin_luns(sid):
    #
    f = Files()
    csv = []
    csv.append('LUN_ID,SIZE,ALLOCATED\n')
    csvfile = '/www/download/ThinLUN_Report_' + sid + '.csv'
    cmd = '/emc/symcfg -sid ' + sid + ' list -tdev -GB '
    output = os.popen(cmd)
    for line in output:
        if 'PROD_FCR1' in line:
            #line.strip()
            fields = line.split()
            lun_id = fields[0]
            size = fields[3]
            allocated = fields[4]

            csvline = str(lun_id) + ',' + str(size) + ',' + str(
                allocated) + '\n'
            #print csvline
            csv.append(csvline)
    f.write_file(csvfile, csv)
Example #9
0
 def __init__(self, sid):
   self.sid = sid
   self.FailedDisks = {}
   self.Known_Failures = {}
   self.New_Failures = {}
   # Below code loads the current known failed disks
   self.failed_disks = '/opt/EMC/Scripts/Python/failed_disks.dat'
   self.now = int(time.time())
   DAYS_BETWEEN_INC = 10
   DAY = ( 60 * 60 * 24 )
   h = Files()
   h.read_file(self.failed_disks)
   for line in h.readfile:
     if not line.startswith("#"):
       list = line.split()
       if len(list) == 2:
         diskname = list[0] 
         failtime = int(list[1])
         deltatime = (self.now - failtime)
         if deltatime < ( DAYS_BETWEEN_INC * DAY):
           # Add the disk to the current list if its fail time is less that 5 days ago.
           # Otherwise, drop it from the list so a new INC gets created.
           # Key/value = diskname/time
           self.Known_Failures[diskname] = failtime
Example #10
0
def createReports(fpObj,site,datacenter):
  htmlpage = []
  csvpage = []
  www = HTML()
  f = Files()
  html_index = '/www/' + site + '/fast_demand.html'
  fastCSV = '/www/' + site + '/CSV/' + site + '_fast_demand.csv'
  
  htmlpage.append(www.start_html('MV Summary'))
  linkpath = "/" + site + "/CSV/"  + site + '_fast_demand.csv'
  fpSummLink = '<a href=' + linkpath + '>FAST CSV</a>\n'
  htmlpage.append(www.EMC_Header(site,datacenter,'',fpSummLink))
  csvpage.append('FAST-VP Report for ' + site + '\n\n')
  
  htmlpage.append(www.start_table(3,'FAST-VP Demand report'))
  heading = ['SG_Name']
  for tier in fpObj.tiernames:
    heading.append(tier)
  htmlpage.append(www.th_list(heading))
  csvpage.append(', '.join(heading))

  for sg in fpObj.tiers:
    # 
    line = [str(sg)]
    for tiername in fpObj.tiernames:
      if tiername in fpObj.tiers[sg]:
        line.append(fpObj.tiers[sg][tiername])
      else:
        line.append('0')
    
    csvpage.append(', '.join(line))
    htmlpage.append(www.tr_list(line))
  htmlpage.append(www.end_table)  
  htmlpage.append(www.end_html)  
  f.write_file(html_index,htmlpage)
  f.write_file(fastCSV,csvpage)    
Example #11
0
 def __init__(self, sid):
     self.sid = sid
     self.FailedDisks = {}
     self.Known_Failures = {}
     self.New_Failures = {}
     # Below code loads the current known failed disks
     self.failed_disks = '/opt/EMC/Scripts/Python/failed_disks.dat'
     self.now = int(time.time())
     DAYS_BETWEEN_INC = 10
     DAY = (60 * 60 * 24)
     h = Files()
     h.read_file(self.failed_disks)
     for line in h.readfile:
         if not line.startswith("#"):
             list = line.split()
             if len(list) == 2:
                 diskname = list[0]
                 failtime = int(list[1])
                 deltatime = (self.now - failtime)
                 if deltatime < (DAYS_BETWEEN_INC * DAY):
                     # Add the disk to the current list if its fail time is less that 5 days ago.
                     # Otherwise, drop it from the list so a new INC gets created.
                     # Key/value = diskname/time
                     self.Known_Failures[diskname] = failtime
Example #12
0
def getViews():
    globalViews = []
    curl = '/usr/bin/curl'
    URLS = {}
    URLS[
        'VMAX40K_1794'] = 'http://chapdt3util01.ops.tiaa-cref.org/VMAX40K_1794/CSV/VMAX40K_1794_views.csv'
    URLS[
        'VMAX10K_0957'] = 'http://chatst3utsan01.ops.tiaa-cref.org/VMAX10K_0957/CSV/VMAX10K_0957_views.csv'
    URLS[
        'VMAX40K_1852'] = 'http://dendrt3util01.ops.tiaa-cref.org/VMAX40K_1852/CSV/VMAX40K_1852_views.csv'

    for url in URLS:
        cmd = curl + ' ' + URLS[url]
        output = os.popen(cmd)
        for line in output:
            line = line.strip()
            line = url + ', ' + line + '\n'
            globalViews.append(line)

    outfile = '/var/www/html/CSV/global_views.csv'
    f = Files()
    f.dir = '/var/www/html/CSV'
    f.mkdir()
    f.write_file(outfile, globalViews)
Example #13
0
def getViews():
  globalViews = []
  curl = '/usr/bin/curl' 
  URLS = {}
  URLS['VMAX40K_1794']='http://chapdt3util01.ops.tiaa-cref.org/VMAX40K_1794/CSV/VMAX40K_1794_views.csv'
  URLS['VMAX10K_0957']='http://chatst3utsan01.ops.tiaa-cref.org/VMAX10K_0957/CSV/VMAX10K_0957_views.csv'
  URLS['VMAX40K_1852']='http://dendrt3util01.ops.tiaa-cref.org/VMAX40K_1852/CSV/VMAX40K_1852_views.csv'
    
  for url in URLS:
    cmd = curl + ' ' + URLS[url] 
    output = os.popen(cmd)
    for line in output:
      line = line.strip()
      line = url + ', ' + line + '\n'
      globalViews.append(line)
      
  outfile = '/var/www/html/CSV/global_views.csv'
  f = Files()
  f.dir = '/var/www/html/CSV'
  f.mkdir()
  f.write_file(outfile,globalViews)  
Example #14
0
def createReports(fpObj, site, datacenter):
    htmlpage = []
    csvpage = []
    www = HTML()
    f = Files()
    html_index = '/www/' + site + '/fast_demand.html'
    fastCSV = '/www/' + site + '/CSV/' + site + '_fast_demand.csv'

    htmlpage.append(www.start_html('MV Summary'))
    linkpath = "/" + site + "/CSV/" + site + '_fast_demand.csv'
    fpSummLink = '<a href=' + linkpath + '>FAST CSV</a>\n'
    htmlpage.append(www.EMC_Header(site, datacenter, '', fpSummLink))
    csvpage.append('FAST-VP Report for ' + site + '\n\n')

    htmlpage.append(www.start_table(3, 'FAST-VP Demand report'))
    heading = ['SG_Name']
    for tier in fpObj.tiernames:
        heading.append(tier)
    htmlpage.append(www.th_list(heading))
    csvpage.append(', '.join(heading))

    for sg in fpObj.tiers:
        #
        line = [str(sg)]
        for tiername in fpObj.tiernames:
            if tiername in fpObj.tiers[sg]:
                line.append(fpObj.tiers[sg][tiername])
            else:
                line.append('0')

        csvpage.append(', '.join(line))
        htmlpage.append(www.tr_list(line))
    htmlpage.append(www.end_table)
    htmlpage.append(www.end_html)
    f.write_file(html_index, htmlpage)
    f.write_file(fastCSV, csvpage)
Example #15
0
    def get_failed_disks(self):
        SYMCLI = '/opt/emc/SYMCLI/bin/'

        cmd = SYMCLI + 'symdisk -sid ' + self.sid + ' list -failed -output xml_e'

        # Exec the sym command...
        output = os.popen(cmd)

        # Write the XML to an XML file so that it can be passed to the parser
        XML = '/var/www/html/XML/' + self.sid + '_failed_disks.xml'
        f = open(XML, 'w')
        for line in output:
            #line = line.strip()
            f.write(line)
        f.close()

        # Now parse the XML file
        doc = minidom.parse(XML)
        for node in doc.getElementsByTagName('Disk'):
            ident = ""
            da = ""
            spindle_id = ""
            tech = ""
            speed = ""
            size = ""
            vendor = ""
            failed = ""
            for disk in node.getElementsByTagName('Disk_Info'):
                for child in disk.childNodes:
                    if child.nodeType == 1:
                        if child.nodeName == "ident":
                            ident = child.firstChild.nodeValue
                        if child.nodeName == "da_number":
                            da = str(child.firstChild.nodeValue)
                        if child.nodeName == "spindle_id":
                            spindle_id = child.firstChild.nodeValue
                        if child.nodeName == "technology":
                            tech = str(child.firstChild.nodeValue)
                        if child.nodeName == "speed":
                            speed = child.firstChild.nodeValue
                        if child.nodeName == "vendor":
                            vendor = str(child.firstChild.nodeValue)
                        if child.nodeName == "rated_gigabytes":
                            size = child.firstChild.nodeValue
                        if child.nodeName == "failed_disk":
                            failed = str(child.firstChild.nodeValue)

            data = str(ident) + " " + str(da) + " " + str(
                spindle_id) + " " + str(tech)
            data += " " + str(speed) + " " + str(vendor) + " " + str(
                size) + " " + str(failed)
            self.FailedDisks[str(spindle_id)] = data
            diskname = 'SID=' + self.sid + '__SPINDLE=' + str(spindle_id)
            if diskname not in self.Known_Failures:
                self.Known_Failures[diskname] = self.now
                self.New_Failures[diskname] = self.now

        out = []
        for disk in self.Known_Failures:
            out.append(disk + "  " + str(self.Known_Failures[disk]) + '\n')
        f = Files()
        f.write_file(self.failed_disks, out)
def createReports(sgObj, site, datacenter):
    www = HTML()
    mailObj = EMail()

    dates = DateString()
    page = []
    csvpage = []
    temppage = []
    today = dates.today
    yesterday = dates.yesterday
    tomorrow = dates.tomorrow
    pagedirs = ['download', 'history']
    subdirs = [today, tomorrow, yesterday]
    f = Files()
    for dir in pagedirs:
        f.dir = '/var/www/html/' + dir
        f.mkdir()
        for sub in subdirs:
            f.dir = '/var/www/html/' + dir + '/' + sub
            f.mkdir()
    csvfile = '/www/download/' + today + '/' + datacenter + site + '_failed_disks.csv'
    csvfileperm = '/www/download/' + datacenter + site + '_failed_disks.csv'
    csvlink = '<a href=/download/' + today + '/' + datacenter + site + '_failed_disks.csv>CSV</a>'
    yesterdaylink = '<a href=/history/' + yesterday + '/' + datacenter + site + '_Failed_disks.html>' + yesterday + '</a>'
    tomorrowlink = '<a href=/history/' + tomorrow + '/' + datacenter + site + '_Failed_disks.html>' + tomorrow + '</a>'
    htmlfile1 = '/www/history/' + today + '/' + datacenter + site + '_Failed_disks.html'
    tempfile = '/www/history/' + tomorrow + '/' + datacenter + site + '_Failed_disks.html'
    htmlfile2 = '/www/' + datacenter + site + '_Failed_disks.html'
    logfile = '/www/' + site + '_Failed_disks.log'
    log = []

    reportDate = str(time.strftime("%c"))

    page.append(www.start_html('Failed Disk Report for ' + site))
    page.append(www.EMC_Header(site, datacenter, '', csvlink))

    page.append('<p><br><table align=center  border=3>\n')
    row = ['Failed Disk Report', tomorrowlink, yesterdaylink]
    page.append(www.th_list(row))
    page.append(www.end_table)

    page.append(www.start_table(3, site))
    heading = [
        'Disk_ID', 'DA_Port', 'Spindle_ID', 'Tech', 'Speed', 'Vendor', 'Size',
        'Failed'
    ]
    page.append(www.th_list(heading))
    csvpage.append(', '.join(heading) + '\n')

    alertMessage = 'Disk Error...\n\n'
    alertMessage += ', '.join(heading) + '\n\n'
    alerts = 0

    for disk in sgObj.FailedDisks:
        alerts += 1
        info = sgObj.FailedDisks[disk].split()
        row = [
            info[0], info[1], info[2], info[3], info[4], info[5], info[6],
            info[7]
        ]
        alertMessage += ', '.join(row) + '\n\n'
        csvpage.append(', '.join(row) + '\n')
        page.append(www.tr_list(row))

    page.append(www.end_table)
    page.append(www.end_html)
    f.write_file(htmlfile2, page)
    f.write_file(htmlfile1, page)
    f.write_file(csvfile, csvpage)
    f.write_file(csvfileperm, csvpage)
    temppage.append(www.start_html())
    temppage.append(
        www.Not_Found_Header('Report not created yet for ' + tomorrow))
    temppage.append(www.end_html)
    f.write_file(tempfile, temppage)
    if alerts > 0:

        mailObj.subject = "WARNING: DISK ERROR - VMAX Failed Disk Alert " + site + ', ' + datacenter
        mailObj.message = alertMessage
        mailObj.send_mail()

        if len(sgObj.New_Failures.keys()) > 0:
            reportDate = str(time.strftime("%x - %X"))
            for failed_disk in sgObj.New_Failures.keys():
                log.append(reportDate + " :" + mailObj.subject + " " +
                           failed_disk + "\n")
            f.write_file(logfile, log)
Example #17
0
def createReports(sgObj, site, datacenter):
    www = HTML()
    mailObj = EMail()

    dates = DateString()
    tiers = ['EFD', 'FC', 'SATA']
    alertLimits = {}
    alertLimits['EFD'] = 99
    alertLimits['FC'] = 80
    alertLimits['SATA'] = 85
    page = []
    csvpage = []
    temppage = []
    today = dates.today
    yesterday = dates.yesterday
    tomorrow = dates.tomorrow
    pagedirs = ['download', 'history']
    subdirs = [today, tomorrow, yesterday]
    f = Files()
    for dir in pagedirs:
        f.dir = '/var/www/html/' + dir
        f.mkdir()
        for sub in subdirs:
            f.dir = '/var/www/html/' + dir + '/' + sub
            f.mkdir()
    csvfile = '/www/download/' + today + '/' + site + '_pools.csv'
    csvfileperm = '/www/download/' + site + '_pools.csv'
    csvlink = '<a href=/download/' + today + '/' + site + '_pools.csv>CSV</a>'
    yesterdaylink = '<a href=/history/' + yesterday + '/' + datacenter + site + '_EMC_Summary.html>' + yesterday + '</a>'
    tomorrowlink = '<a href=/history/' + tomorrow + '/' + datacenter + site + '_EMC_Summary.html>' + tomorrow + '</a>'
    htmlfile1 = '/www/history/' + today + '/' + datacenter + site + '_EMC_Summary.html'
    tempfile = '/www/history/' + tomorrow + '/' + datacenter + site + '_EMC_Summary.html'
    htmlfile2 = '/www/' + datacenter + site + '_EMC_Summary.html'

    reportDate = str(time.strftime("%c"))

    page.append(www.start_html('Thin Pool Report for ' + site))
    page.append(www.EMC_Header(site, datacenter, '', csvlink))

    page.append('<p><br><table align=center  border=3>\n')
    row = ['EMC Storage Summary Report', tomorrowlink, yesterdaylink]
    page.append(www.th_list(row))
    page.append(www.end_table)

    page.append(www.start_table(3, site))
    heading = [
        'Pool', 'Capacity', 'Used', 'Percent Used', 'Free', 'Provisioned',
        'Subscription', 'Subscription Limit', 'PRC', 'Technology', 'Protection'
    ]
    page.append(www.th_list(heading))
    total_usable = 0
    total_provisioned = 0
    total_used = 0
    total_free = 0
    #csvpage.append('Thin Pool Report for ' + site + '\n\n')
    csvpage.append(', '.join(heading) + '\n')

    alertMessage = 'The following pools exceed set thresholds...\n\n'
    alerts = 0
    for tier in tiers:
        for pool in sgObj.pools.keys():
            comparepool = str(pool)
            comparepool = pool.upper()
            if tier in comparepool:
                ##########
                compression = sgObj.pooldetails[pool]['COMPRESSION']
                subscription_limit = sgObj.pooldetails[pool][
                    'SUBSCRIPTION_LIMIT']
                PRC = sgObj.pooldetails[pool]['PRC']
                tech = sgObj.pooldetails[pool]['TECH']
                protection = sgObj.pooldetails[pool]['LAYOUT']
                ##########
                usable = float(sgObj.pools[pool]['total_usable_tracks_gb'])
                used = float(sgObj.pools[pool]['total_used_tracks_gb'])
                percent_used = sgObj.pools[pool]['percent_full']
                free = float(sgObj.pools[pool]['total_free_tracks_gb'])
                subscription = sgObj.pools[pool]['subs_percent']
                usable = int(round(usable, 0))
                used = int(round(used, 0))
                free = int(round(free, 0))
                if int(subscription) == 0:
                    provisioned = 0
                else:
                    provisioned = usable * (float(subscription) / 100)
                total_provisioned += provisioned
                total_usable += usable
                total_used += used
                total_free += free
                html = www.tr
                if int(percent_used) >= alertLimits[tier]:
                    html = www.alerttr
                    alertMessage += "Thin pool " + comparepool + " is " + str(
                        percent_used) + "% used.\n"
                    alertMessage += str(free) + " GB free remain in the pool\n"
                    alertMessage += " The threshold for " + comparepool + ' is set to ' + str(
                        alertLimits[tier]) + '%\n\n'
                    alerts += 1
                row = [
                    pool,
                    str(usable),
                    str(used),
                    str(percent_used),
                    str(free),
                    str(provisioned), subscription, subscription_limit, PRC,
                    tech, protection
                ]
                csvpage.append(', '.join(row) + '\n')

                page.append(www.tr_list(row))

    total_pct_used = int((int(total_used) / float(total_usable)) * 100)
    total_subscription = int(
        (int(total_provisioned) / float(total_usable)) * 100)
    row = [
        'Totals',
        str(total_usable),
        str(total_used),
        str(total_pct_used),
        str(total_free),
        str(total_provisioned),
        str(total_subscription), '___', '___', '___', '___'
    ]

    page.append(www.tr_list(row))
    page.append(www.end_table)
    page.append(www.end_html)
    f.write_file(htmlfile2, page)
    f.write_file(htmlfile1, page)
    f.write_file(csvfile, csvpage)
    f.write_file(csvfileperm, csvpage)
    temppage.append(www.start_html())
    temppage.append(
        www.Not_Found_Header('Report not created yet for ' + tomorrow))
    temppage.append(www.end_html)
    f.write_file(tempfile, temppage)
    if alerts > 0:
        mailObj.subject = "Storage Capacity Alert, " + site + ', ' + datacenter
        mailObj.message = alertMessage
        mailObj.send_mail()
Example #18
0
def createReports(sgObj,site,datacenter):
    www = HTML()
    mailObj = EMail()
    swObj = BRCD()
    swObj.get_connection_map()    
    dates = DateString()

    page = []
    csvpage = []
    temppage = []    
    today =  dates.today
    yesterday = dates.yesterday
    tomorrow = dates.tomorrow
    pagedirs = ['download','history']
    subdirs = [today,tomorrow,yesterday]
    f = Files()
    for dir in pagedirs:
      f.dir = '/var/www/html/' + dir
      f.mkdir() 
      for sub in subdirs:
        f.dir = '/var/www/html/' + dir + '/' + sub
        f.mkdir() 
    csvfile = '/www/download/' + today + '/' + site + '_FA.csv'
    csvlink = '<a href=/download/' + today + '/' + site + '_FA.csv>CSV</a>'
    htmlfile = '/www/' + site + '/FA_Ports.html'

        
    reportDate =  str(time.strftime("%c")) 

    page.append(www.start_html('FA Port Report for ' + site))
    page.append(www.EMC_Header(site,datacenter,'',csvlink))
    
     
    page.append(www.start_table(3,site))
    heading = ['FA Port','WWN','Device Count', 'Switch', 'Portname', 'Port']
    page.append(www.th_list(heading))
 
    csvpage.append('FA Port Report for ' + site + '\n\n')
    csvpage.append(', '.join(heading) + '\n')
    
 
    for FAPORT in sorted(sgObj.FA2WWNmap):
      port = str(FAPORT)

      wwn = str(sgObj.FA2WWNmap[FAPORT])
      if wwn.lower() in swObj.connectionmap:
        fields = swObj.connectionmap[wwn.lower()]
        switchname = fields[0] 
        switchportname =   fields[1]
        switchport =   fields[2]
      else:
        switchname = 'Not_Connected' 
        switchportname =   'Not_Connected' 
        switchport =   'Not_Connected'       
      if 'Total' in port:
        color = '#C7C5C6'
        switchname = '---' 
        switchportname =   '---' 
        switchport =   '---'       
      else:
        color = '#DEF2FA'              
      dev_count  = str(sgObj.FA_Device_Count[FAPORT])
      row = [port,wwn, dev_count, switchname, switchportname, switchport]
      csvpage.append(', '.join(row) + '\n')
      page.append(www.tr_list(row,color))
    page.append(www.end_table)
    page.append(www.end_html)
    f.write_file(htmlfile,page)
    f.write_file(csvfile,csvpage)
Example #19
0
def createDemandReport(fpObj, sgObj, site, datacenter):
    dates = DateString()
    htmlpage = []
    csvpage = []
    temppage = []
    www = HTML()
    f = Files()

    today = dates.today
    yesterday = dates.yesterday
    tomorrow = dates.tomorrow
    pagedirs = ['download', 'history']
    subdirs = [today, tomorrow, yesterday]
    f = Files()
    for dir in pagedirs:
        f.dir = '/var/www/html/' + dir
        f.mkdir()
        for sub in subdirs:
            f.dir = '/var/www/html/' + dir + '/' + sub
            f.mkdir()

    csvfile = '/www/download/' + today + '/' + site + '_fast_demand.csv'
    csvfileperm = '/www/download/' + site + '_fast_demand.csv'
    csvlink = '<a href=/download/' + today + '/' + site + '_fast_demand.csv>CSV</a>'
    yesterdaylink = '<a href=/history/' + yesterday + '/' + datacenter + '_fast_demand.html>' + yesterday + '</a>'
    tomorrowlink = '<a href=/history/' + tomorrow + '/' + datacenter + '_fast_demand.html>' + tomorrow + '</a>'
    htmlfile1 = '/www/history/' + today + '/' + datacenter + '_fast_demand.html'
    tempfile = '/www/history/' + tomorrow + '/' + datacenter + '_fast_demand.html'
    htmlfile2 = '/www/' + datacenter + '_fast_demand.html'

    html_index = '/www/' + site + '/fast_demand.html'
    fastCSV = '/www/' + site + '/CSV/' + site + '_fast_demand.csv'

    htmlpage.append(www.start_html('FAST Demand'))
    htmlpage.append(www.EMC_Header(site, datacenter, '', csvlink))

    demandReportPath = '/' + site + '/fast_demand.html'
    DemandReportLink = '<a href=' + demandReportPath + '>FAST-VP Demand Report</a>\n'
    fastReportPath = '/' + site + '/fast.html'
    fastReportLink = '<a href=' + fastReportPath + '>FAST-VP Policy Report</a>\n'

    htmlpage.append('<p><br><table align=center  border=3>\n')
    row = [fastReportLink, DemandReportLink]
    htmlpage.append(www.th_list(row))
    htmlpage.append(www.end_table)

    htmlpage.append('<p><br><table align=center  border=3>\n')
    row = ['FAST Demand Report', tomorrowlink, yesterdaylink]
    htmlpage.append(www.th_list(row))
    htmlpage.append(www.end_table)

    csvpage.append('FAST-VP Report for ' + site + '\n\n')

    htmlpage.append(www.start_table(3, 'FAST-VP Demand report'))
    heading = ['SG_Name', 'Policy']
    for tier in fpObj.tiernames:
        heading.append(tier)
    heading.append('SG_USED')
    heading.append('SG_PROVISIONED')
    htmlpage.append(www.th_list(heading))
    csvpage.append(', '.join(heading) + '\n')

    for sg in fpObj.tiers:
        policy = fpObj.Associations[sg]
        GB = float(sgObj.SG2Capacity[sg]) / 1024
        ProvisionedGB = "%.2f" % GB
        sg_provisioned = str(ProvisionedGB) + ' GB'
        #
        sglinkpath = "/" + site + "/SG/" + sg + ".html"
        sglink = '<a href=' + sglinkpath + '>' + str(sg) + '</a>'
        line = [sglink, policy]
        line2 = [sg, policy]
        for tiername in fpObj.tiernames:
            if tiername in fpObj.tiers[sg]:
                line.append(fpObj.tiers[sg][tiername])
                line2.append(fpObj.tiers[sg][tiername])
            else:
                line.append('0')
                line2.append('0')
        # The next 5 lines add total used and provisioned columns...
        sg_total = str(fpObj.tiers[sg]['SG_USED']) + ' GB'
        line.append(sg_total)
        line2.append(sg_total)
        line.append(sg_provisioned)
        line2.append(sg_provisioned)
        #
        csvpage.append(', '.join(line2) + '\n')
        htmlpage.append(www.tr_list(line))
    htmlpage.append(www.end_table)
    htmlpage.append(www.end_html)
    f.write_file(html_index, htmlpage)
    f.write_file(fastCSV, csvpage)
    f.write_file(htmlfile2, htmlpage)
    f.write_file(htmlfile1, htmlpage)
    f.write_file(csvfile, csvpage)
    f.write_file(csvfileperm, csvpage)
    temppage.append(www.start_html())
    temppage.append(
        www.Not_Found_Header('Report not created yet for ' + tomorrow))
    temppage.append(www.end_html)
    f.write_file(tempfile, temppage)
Example #20
0
def createReports(sgObj, site, datacenter):
    www = HTML()
    mailObj = EMail()
    swObj = BRCD()
    swObj.get_connection_map()
    dates = DateString()

    page = []
    csvpage = []
    temppage = []
    today = dates.today
    yesterday = dates.yesterday
    tomorrow = dates.tomorrow
    pagedirs = ['download', 'history']
    subdirs = [today, tomorrow, yesterday]
    f = Files()
    for dir in pagedirs:
        f.dir = '/var/www/html/' + dir
        f.mkdir()
        for sub in subdirs:
            f.dir = '/var/www/html/' + dir + '/' + sub
            f.mkdir()
    csvfile = '/www/download/' + today + '/' + site + '_FA.csv'
    csvlink = '<a href=/download/' + today + '/' + site + '_FA.csv>CSV</a>'
    htmlfile = '/www/' + site + '/FA_Ports.html'

    reportDate = str(time.strftime("%c"))

    page.append(www.start_html('FA Port Report for ' + site))
    page.append(www.EMC_Header(site, datacenter, '', csvlink))

    page.append(www.start_table(3, site))
    heading = ['FA Port', 'WWN', 'Device Count', 'Switch', 'Portname', 'Port']
    page.append(www.th_list(heading))

    csvpage.append('FA Port Report for ' + site + '\n\n')
    csvpage.append(', '.join(heading) + '\n')

    for FAPORT in sorted(sgObj.FA2WWNmap):
        port = str(FAPORT)

        wwn = str(sgObj.FA2WWNmap[FAPORT])
        if wwn.lower() in swObj.connectionmap:
            fields = swObj.connectionmap[wwn.lower()]
            switchname = fields[0]
            switchportname = fields[1]
            switchport = fields[2]
        else:
            switchname = 'Not_Connected'
            switchportname = 'Not_Connected'
            switchport = 'Not_Connected'
        if 'Total' in port:
            color = '#C7C5C6'
            switchname = '---'
            switchportname = '---'
            switchport = '---'
        else:
            color = '#DEF2FA'
        dev_count = str(sgObj.FA_Device_Count[FAPORT])
        row = [port, wwn, dev_count, switchname, switchportname, switchport]
        csvpage.append(', '.join(row) + '\n')
        page.append(www.tr_list(row, color))
    page.append(www.end_table)
    page.append(www.end_html)
    f.write_file(htmlfile, page)
    f.write_file(csvfile, csvpage)
Example #21
0
def createReports(sgObj, site, datacenter):
    #
    # Create directories if they don't exist
    orange = '#F7EE6F'
    green = '#69F24E'
    dir = "/www/" + site + "/SG/"
    if not os.path.isdir(dir):
        subprocess.call(["mkdir", dir])
    dir = '/www/' + site + '/CSV'
    if not os.path.isdir(dir):
        subprocess.call(["mkdir", dir])
    #
    #
    reportDate = str(time.strftime("%c"))
    GB = float(sgObj.ProvisionedCapacity) / 1024
    ProvisionedGB = "%.2f" % GB
    info1 = 'Total Porvisioned(GB)=' + str(ProvisionedGB)

    www = HTML()
    html_index = '/www/' + site + '/index.html'
    sgSummCSV = '/www/' + site + '/CSV/' + site + '_SG_Summary.csv'
    f_index = open(html_index, 'w')
    f_SummCSV = open(sgSummCSV, 'w')
    f_SummCSV.write(
        "Storage Group,FAST Policy,Capacity GB,Masking View,Max IOPS,Max MB/s\n"
    )
    linkpath = "/" + site + "/CSV/" + site + '_SG_Summary.csv'
    sgSummLink = '<a href=' + linkpath + '>Summary CSV</a>\n'
    f_index.write(www.start_html('SG Summary'))
    f_index.write(www.EMC_Header(site, datacenter, info1, sgSummLink))
    f_index.write(www.start_table())
    f_index.write(www.end_table)

    # Dictionary: key = policy name and value is a list of html formatted lines, one line per SG with that policy.
    fpTables = {}
    # Dictionary of policies. Used to iterate over the above var.
    policies = {}
    demandpage = '/www/' + site + '/fast_demand.html'
    f = Files()
    f.read_file(demandpage)
    dp = f.readfile[:]
    for sg in sgObj.SGs:
        #
        #
        found = False
        demandtable = ''
        for txt in dp:
            if 'SG_Name' in txt:
                hr = txt
            if str(sg) in txt:
                found = True
                rowdata = txt
        if found:
            demandtable = '<br><p><table border=3 align=center><caption>FAST-VP Demand</caption>\n'
            demandtable += hr
            demandtable += '\n'
            demandtable += rowdata
            demandtable += '\n'
            demandtable += '\n</table>'

        color = ''
        if sg in sgObj.Associations:
            fp = sgObj.Associations[sg]
        else:
            fp = 'NO_FAST_POLICY'
            color = orange
        #
        policies[fp] = 1
        if not fp in fpTables:
            fpTables[fp] = []
        #
        link = "/" + site + "/SG/" + sg + ".html"
        sgGB = float(sgObj.SG2Capacity[sg]) / 1024
        sgProvisionedGB = "%.2f" % sgGB
        MVs = sgObj.SGinMV[sg]
        IOPS_LIMIT = str(sgObj.SG_IOPS_Limit[sg])
        MB_LIMIT = str(sgObj.SG_MB_Limit[sg])
        if MVs.upper() == 'NO':
            color = orange

        rowlist = [
            "<a href=" + link + '>' + sg + "</a>", fp,
            str(sgProvisionedGB), MVs, IOPS_LIMIT, MB_LIMIT
        ]
        SGhtml = www.tr_list(rowlist, color)
        fpTables[fp].append(SGhtml)
        f_SummCSV.write(sg + ',' + fp + ',' + str(sgProvisionedGB) + ',' +
                        MVs + ',' + IOPS_LIMIT + ',' + MB_LIMIT + "\n")
        # go ahead and write out the sg detail HTML page.
        sgfile = "/www/" + site + "/SG/" + sg + ".html"
        sgCSV = "/www/" + site + "/CSV/" + site + '_' + sg + ".csv"
        linkpath = "/" + site + "/CSV/" + site + '_' + sg + ".csv"
        link1 = '<a href=' + linkpath + '>SG CSV</a>\n'
        f_sg = open(sgfile, 'w')
        f_sgCSV = open(sgCSV, 'w')
        f_sgCSV.write('Storage Group Report for ' + sg + '\n\n\n')
        f_sg.write(www.start_html('SG Detail'))
        f_sg.write(www.EMC_Header(site, datacenter, '', link1))
        html = "<p><H3><center>Detailed Storage Report for " + sg + " SSC_VMAX40K_1794</center></H3>\n"
        f_sg.write(html)
        f_sg.write(www.start_table())
        f_sg.write(www.tr + www.th +
                   "Storage Group<th>FAST<TH>Capacity GB</tr>\n")
        html = www.tr + www.td + sg + www.td + fp + www.td + str(
            sgProvisionedGB) + "\n"
        f_sg.write(html)
        f_sg.write(www.end_table)
        f_sg.write(demandtable)
        f_sg.write(www.start_table(3, 'Device List'))
        f_sg.write(www.tr + www.th + "Device<th>Capacity GB</tr>\n")
        f_sgCSV.write("Volume,Capacity GB\n")
        for device in sgObj.SGs[sg]:
            devGB = float(sgObj.SGs[sg][device]) / 1024
            dev_formatted = "%.2f" % devGB
            html = www.tr + www.td + device + www.td + str(
                dev_formatted) + "\n"
            f_sg.write(html)
            f_sgCSV.write(device + ',' + str(dev_formatted) + '\n')
        f_sg.write(www.end_table)
        f_sg.write(www.end_html)
        f_sg.close()
        f_sgCSV.close()

    for fp in policies:
        f_index.write(
            www.start_table(
                3, 'Groups with FAST VP ' + fp + ' Policy Association'))
        f_index.write(
            "<tr><th>Storage Group<th>FAST<TH>Capacity GB<TH>Masking View<TH>IOPS Limit<TH>MB/s Limit</tr>\n"
        )
        for line in fpTables[fp]:
            f_index.write(line)
        f_index.write(www.end_table)
    f_index.write(www.end_html)
    f_index.close()
    f_SummCSV.close()
Example #22
0
def createReports(sgObj,site,datacenter):
    www = HTML()
    mailObj = EMail()
    
    dates = DateString()
    page = []
    csvpage = []
    temppage = []    
    today =  dates.today
    yesterday = dates.yesterday
    tomorrow = dates.tomorrow
    pagedirs = ['download','history']
    subdirs = [today,tomorrow,yesterday]
    f = Files()
    for dir in pagedirs:
      f.dir = '/var/www/html/' + dir
      f.mkdir() 
      for sub in subdirs:
        f.dir = '/var/www/html/' + dir + '/' + sub
        f.mkdir() 
    csvfile = '/www/download/' + today + '/' + site + '_failed_disks.csv'
    csvfileperm = '/www/download/' + site + '_failed_disks.csv'
    csvlink = '<a href=/download/' + today + '/' + site + '_failed_disks.csv>CSV</a>'
    yesterdaylink = '<a href=/history/' + yesterday + '/' + datacenter + '_Failed_disks.html>' + yesterday + '</a>'  
    tomorrowlink = '<a href=/history/' + tomorrow + '/' + datacenter + '_Failed_disks.html>' + tomorrow + '</a>' 
    htmlfile1 = '/www/history/' + today + '/' + datacenter + '_Failed_disks.html'
    tempfile = '/www/history/' + tomorrow + '/' + datacenter + '_Failed_disks.html'
    htmlfile2 = '/www/' + datacenter + site + '_Failed_disks.html'
    logfile = '/www/' + site + '_Failed_disks.log'
    log = []
        
    reportDate =  str(time.strftime("%c")) 

    page.append(www.start_html('Failed Disk Report for ' + site))
    page.append(www.EMC_Header(site,datacenter,'',csvlink))
    
    page.append('<p><br><table align=center  border=3>\n')
    row = ['Failed Disk Report',tomorrowlink,yesterdaylink]
    page.append(www.th_list(row)) 
    page.append(www.end_table)
    
    
    page.append(www.start_table(3,site))
    heading = ['Disk_ID','DA_Port','Spindle_ID','Tech','Speed','Vendor','Size','Failed']
    page.append(www.th_list(heading))
    csvpage.append(', '.join(heading) + '\n')

    
    alertMessage = 'Disk Error...\n\n'
    alertMessage += ', '.join(heading) + '\n\n'
    alerts = 0

    for disk in sgObj.FailedDisks:
      alerts += 1
      info = sgObj.FailedDisks[disk].split()
      row = [info[0],info[1],info[2],info[3],info[4],info[5],info[6],info[7]]
      alertMessage += ', '.join(row) + '\n\n'
      csvpage.append(', '.join(row) + '\n')
      page.append(www.tr_list(row))
          

    page.append(www.end_table)
    page.append(www.end_html)
    f.write_file(htmlfile2,page)
    f.write_file(htmlfile1,page)
    f.write_file(csvfile,csvpage)
    f.write_file(csvfileperm,csvpage)
    temppage.append(www.start_html())
    temppage.append(www.Not_Found_Header('Report not created yet for '+tomorrow))
    temppage.append(www.end_html)
    f.write_file(tempfile,temppage)
    if alerts > 0:
      
      mailObj.subject = "WARNING: DISK ERROR - VMAX Failed Disk Alert " + site + ', ' + datacenter
      mailObj.message = alertMessage
      mailObj.send_mail()

      if len(sgObj.New_Failures.keys()) > 0:
        reportDate =  str(time.strftime("%x - %X"))
        for failed_disk in sgObj.New_Failures.keys():
          log.append(reportDate + " :" + mailObj.subject + " " + failed_disk + "\n")      
        f.write_file(logfile,log) 
Example #23
0
def createReports(sgObj,site,datacenter):
    www = HTML()
    mailObj = EMail()
    
    dates = DateString()
    tiers = ['EFD','FC','SATA']
    alertLimits = {}
    alertLimits['EFD'] = 99
    alertLimits['FC'] = 80
    alertLimits['SATA'] = 85
    page = []
    csvpage = []
    temppage = []    
    today =  dates.today
    yesterday = dates.yesterday
    tomorrow = dates.tomorrow
    pagedirs = ['download','history']
    subdirs = [today,tomorrow,yesterday]
    f = Files()
    for dir in pagedirs:
      f.dir = '/var/www/html/' + dir
      f.mkdir() 
      for sub in subdirs:
        f.dir = '/var/www/html/' + dir + '/' + sub
        f.mkdir() 
    csvfile = '/www/download/' + today + '/' + site + '_pools.csv'
    csvfileperm = '/www/download/' + site + '_pools.csv'
    csvlink = '<a href=/download/' + today + '/' + site + '_pools.csv>CSV</a>'
    yesterdaylink = '<a href=/history/' + yesterday + '/' + datacenter + site + '_EMC_Summary.html>' + yesterday + '</a>'  
    tomorrowlink = '<a href=/history/' + tomorrow + '/' + datacenter + site + '_EMC_Summary.html>' + tomorrow + '</a>' 
    htmlfile1 = '/www/history/' + today + '/' + datacenter + site + '_EMC_Summary.html'
    tempfile = '/www/history/' + tomorrow + '/' + datacenter + site + '_EMC_Summary.html'
    htmlfile2 = '/www/' + datacenter + site + '_EMC_Summary.html'

        
    reportDate =  str(time.strftime("%c")) 

    page.append(www.start_html('Thin Pool Report for ' + site))
    page.append(www.EMC_Header(site,datacenter,'',csvlink))
    
    page.append('<p><br><table align=center  border=3>\n')
    row = ['EMC Storage Summary Report',tomorrowlink,yesterdaylink]
    page.append(www.th_list(row)) 
    page.append(www.end_table)
    
    
    page.append(www.start_table(3,site))
    heading = ['Pool','Capacity','Used','Percent Used','Free','Provisioned','Subscription','Subscription Limit','PRC','Technology','Protection']
    page.append(www.th_list(heading))
    total_usable = 0 
    total_provisioned = 0 
    total_used = 0
    total_free = 0
    #csvpage.append('Thin Pool Report for ' + site + '\n\n')
    csvpage.append(', '.join(heading) + '\n')

    
    alertMessage = 'The following pools exceed set thresholds...\n\n'
    alerts = 0
    for tier in tiers:
      for pool in sgObj.pools.keys():
        comparepool = str(pool)
        comparepool = pool.upper()
        if tier in comparepool:
          ##########
          compression = sgObj.pooldetails[pool]['COMPRESSION']
          subscription_limit = sgObj.pooldetails[pool]['SUBSCRIPTION_LIMIT']
          PRC = sgObj.pooldetails[pool]['PRC']
          tech = sgObj.pooldetails[pool]['TECH']
          protection = sgObj.pooldetails[pool]['LAYOUT']
          ##########
          usable = float(sgObj.pools[pool]['total_usable_tracks_gb'])
          used = float(sgObj.pools[pool]['total_used_tracks_gb'])
          percent_used = sgObj.pools[pool]['percent_full']
          free = float(sgObj.pools[pool]['total_free_tracks_gb'])
          subscription = sgObj.pools[pool]['subs_percent']
          usable = int(round(usable,0))
          used = int(round(used,0))
          free = int(round(free,0))
          if int(subscription) == 0:
            provisioned = 0;
          else:
            provisioned = usable * ( float(subscription) / 100)
          total_provisioned += provisioned
          total_usable += usable
          total_used += used
          total_free += free
          html = www.tr
          if int(percent_used) >= alertLimits[tier]:
            html = www.alerttr
            alertMessage += "Thin pool " + comparepool + " is " + str(percent_used) + "% used.\n"
            alertMessage += str(free) + " GB free remain in the pool\n"
            alertMessage +=  " The threshold for " + comparepool + ' is set to ' + str(alertLimits[tier]) + '%\n\n'
            alerts += 1
          row = [pool,str(usable),str(used),str(percent_used),str(free),str(provisioned),subscription,subscription_limit,PRC,tech,protection]
          csvpage.append(', '.join(row) + '\n')
          
          page.append(www.tr_list(row))
          
    total_pct_used = int((int(total_used) / float(total_usable)) * 100)
    total_subscription = int((int(total_provisioned) / float(total_usable)) * 100)
    row = ['Totals',str(total_usable),str(total_used),str(total_pct_used),str(total_free),str(total_provisioned),str(total_subscription),'___','___','___','___']
    
    page.append(www.tr_list(row))  
    page.append(www.end_table)
    page.append(www.end_html)
    f.write_file(htmlfile2,page)
    f.write_file(htmlfile1,page)
    f.write_file(csvfile,csvpage)
    f.write_file(csvfileperm,csvpage)
    temppage.append(www.start_html())
    temppage.append(www.Not_Found_Header('Report not created yet for '+tomorrow))
    temppage.append(www.end_html)
    f.write_file(tempfile,temppage)
    if alerts > 0:
      mailObj.subject = "Storage Capacity Alert, " + site + ', ' + datacenter
      mailObj.message = alertMessage
      mailObj.send_mail()   
Example #24
0
def LogErrors():
  log = []
  unfiltered = []
  adds = []
  failed_disks_log = '/opt/EMC/log/failed_disks.log'
  failed_disks_unfiltered = '/opt/EMC/log/unfiltered.log'
  mylog = '/opt/EMC/log/monitor.log'
  h = Files()
  h.write_log(mylog,"Starting logger..." + "\n")
  h.read_file(failed_disks_log)
  for line in h.readfile:
    if not line.startswith("#"):
      log.append(line)
  h.read_file(failed_disks_unfiltered)
  for line in h.readfile:
    if not line.startswith("#"):
      unfiltered.append(line)  
  for line in unfiltered:
    if line not in log:
      h.write_log(mylog,"New Failed Disk!: " + line + "\n")
      adds.append(line + '\n')
    else:
      h.write_log(mylog,"Already in the log: " + line + "\n")

  if len(adds) > 0:
    h.write_file_append(failed_disks_log,adds)
Example #25
0
def createReports(mvObj,site,datacenter):
    reportDate =  str(time.strftime("%c"))
    GB =  float(mvObj.mvCapTotal)/1024
    ProvisionedGB = "%.2f" % GB    
    info1 = 'Total Porvisioned=' + str(ProvisionedGB)
    www = HTML()
    f = Files()
    html_index = '/www/' + site + '/views.html'
    viewsCSV = '/www/' + site + '/CSV/' + site + '_views.csv'
    htmlpage = []
    csvpage = []
    
    htmlpage.append(www.start_html('MV Summary'))
    linkpath = "/" + site + "/CSV/"  + site + '_views.csv'
    mvSummLink = '<a href=' + linkpath + '>Views CSV</a>\n'
    htmlpage.append(www.EMC_Header(site,datacenter,info1,mvSummLink))
    csvpage.append('MV Report for ' + site + '\n\n')

    # Write out a table for Masking Views

    csvpage.append("View Name,SG,IG,PG,Capacity\n")
    for pg in mvObj.portgroups:
      htmlpage.append(www.start_table(3,'Masking Views on PG, ' + pg ))
      htmlpage.append("<tr><th>View Name<th>SG<th>IG<th>PG<th>Capacity</tr>\n")
      for mv in mvObj.portgroups[pg]:
        viewname = mv['MV']
        ig = mv['IG']
        pg = mv['PG']
        sg = mv['SG']
        gb = float(mv['MB'])/1024
        fmtgb =  "%.2f" % gb
        gb = str(fmtgb)
        sglink = "<a href=/" + site + "/SG/" + sg + ".html>" + sg + "</a>\n"
        iglink = "<a href=/" + site + "/IG/" + ig + ".html>" + ig + "</a>\n"
        row = [viewname,sglink,iglink,pg,gb]      
        htmlpage.append(www.tr_list(row))    
        csrow = [viewname,sg,ig,pg,gb]
        csvpage.append(', '.join(csrow) + '\n')
      htmlpage.append(www.end_table)    
    htmlpage.append(www.end_html)
    f.write_file(html_index,htmlpage)
    f.write_file(viewsCSV,csvpage)    


    
    
    dir = "/www/" + site + "/IG/"
    if not os.path.isdir(dir):
      subprocess.call(["mkdir", dir]) 
    
    for IG in mvObj.HostInitiatorGroups:
      igPage = '/www/' + site + '/IG/' + IG + '.html'
      f_ig = open(igPage,'w')
      f_ig.write(www.start_html('Initiator Group ' + IG))
      f_ig.write(www.EMC_Header(site,datacenter))
      # Write out a table for Masking Views
      f_ig.write(www.start_table(3,'Initiator Group ' + IG))
      f_ig.write("<tr><th>HBA Initiators</tr>\n")
     
      for wwpn in mvObj.HostInitiatorGroups[IG]:
        f_ig.write("<tr><th>" + wwpn + "</tr>\n")
      f_ig.write(www.end_table) 
      f_ig.write(www.end_html)
      f_ig.close()      

    for IG in mvObj.ClusterInitiatorGroups:
      igPage = '/www/' + site + '/IG/' + IG + '.html'
      f_ig = open(igPage,'w')
      f_ig.write(www.start_html('Cluster Initiator Group ' + IG))
      f_ig.write(www.EMC_Header(site,datacenter))
      # Write out a table for Masking Views
      f_ig.write(www.start_table(3,'Initiator Group ' + IG))
      f_ig.write("<tr><th>Cluster Nodes</tr>\n")
     
      for wwpn in mvObj.ClusterInitiatorGroups[IG]:
        link = '<a href=/' + site + '/IG/' + wwpn + '.html>' + wwpn + '</a>\n'
        f_ig.write("<tr><th>" + link + "</tr>\n")
      f_ig.write(www.end_table) 
      f_ig.write(www.end_html)
      f_ig.close()            

    # Database section
    # First, grab a list of known WWPN logins
    vmaxdb=mysql.connect(host="chatst3utsan01",user="******",passwd="emc",db="vmax")
    cur = vmaxdb.cursor()
    query = "SELECT wwpn,ig FROM initiatorgroups"
    results  = cur.execute(query)
    rows = cur.fetchall()
    # Now create a simple list of logins. We use this to check whether we've seen a login before.
    # If we know this login, we just update the time seen and array/port.
    knownwwpns = []
    p = re.compile('\w+')
    for row in rows:
      wwpn = row[0]
      ig = row[1]

      key = ''.join(p.findall(wwpn + ig)).lower()    
      knownwwpns.append(key)
    # Now get the current date and time.
    today = datetime.date.today()
    formatteddate = today.strftime('%Y-%m-%d')
    now = datetime.datetime.now().time()
    formattedtime = now.strftime('%H:%M:%S')      
    
    for IG in mvObj.HostInitiatorGroups:
      for wwpn in mvObj.HostInitiatorGroups[IG]:
        wwpn = wwpn.lower()
        ig = str(IG)
        ig = ig.lower()
        
        insertquery = 'insert into initiatorgroups(wwpn,array_name,ig,record_date,record_time) '
        insertquery += " values('" + wwpn + "','" + site + "','"
        insertquery += ig + "',NOW(),NOW() );"

        updatequery = 'update initiatorgroups SET array_name = %s, ig = %s, record_date = %s, record_time = %s '
        updatequery += ' WHERE wwpn = %s AND ig = %s'

        wwpn_ig = ''.join(p.findall(wwpn + ig))
        
        if wwpn_ig in knownwwpns:
          
          results  = cur.execute(updatequery, (site,ig,formatteddate,formattedtime,wwpn,ig))
        else:
          results  = cur.execute(insertquery)       
Example #26
0
def createReports(sgObj,site,datacenter):
    #
    # Create directories if they don't exist
    orange = '#F7EE6F'
    green =  '#69F24E'       
    dir = "/www/" + site + "/SG/"
    if not os.path.isdir(dir):
      subprocess.call(["mkdir", dir]) 
    dir = '/www/' + site + '/CSV'      
    if not os.path.isdir(dir):
      subprocess.call(["mkdir", dir]) 
    #
    #    
    reportDate =  str(time.strftime("%c"))
    GB =  float(sgObj.ProvisionedCapacity)/1024
    ProvisionedGB = "%.2f" % GB
    info1 = 'Total Porvisioned(GB)=' + str(ProvisionedGB)

    www = HTML()
    html_index = '/www/' + site + '/index.html'
    sgSummCSV = '/www/' + site + '/CSV/' + site + '_SG_Summary.csv'
    f_index = open(html_index,'w')
    f_SummCSV = open(sgSummCSV,'w')
    f_SummCSV.write("Storage Group,FAST Policy,Capacity GB,Masking View,Max IOPS,Max MB/s\n")
    linkpath = "/" + site + "/CSV/"  + site + '_SG_Summary.csv'
    sgSummLink = '<a href=' + linkpath + '>Summary CSV</a>\n'
    f_index.write(www.start_html('SG Summary'))
    f_index.write(www.EMC_Header(site,datacenter,info1,sgSummLink))
    f_index.write(www.start_table())
    f_index.write(www.end_table)
    
    # Dictionary: key = policy name and value is a list of html formatted lines, one line per SG with that policy.
    fpTables = {}
    # Dictionary of policies. Used to iterate over the above var.
    policies = {}
    demandpage = '/www/' + site + '/fast_demand.html'
    f = Files()
    f.read_file(demandpage)
    dp = f.readfile[:]
    for sg in sgObj.SGs:
      #
      #
      found = False 
      demandtable = ''
      for txt in dp:
        if 'SG_Name' in txt:
          hr = txt
        if str(sg) in txt:
          found = True
          rowdata = txt
      if found:
        demandtable = '<br><p><table border=3 align=center><caption>FAST-VP Demand</caption>\n' 
        demandtable += hr
        demandtable += '\n'
        demandtable += rowdata
        demandtable += '\n'
        demandtable += '\n</table>'
      
       
      color = ''      
      if sg in sgObj.Associations:
        fp = sgObj.Associations[sg]
      else:
        fp = 'NO_FAST_POLICY'
        color = orange
      #
      policies[fp] = 1
      if not fp in fpTables:
        fpTables[fp] = []
      #
      link = "/" + site + "/SG/" + sg + ".html"
      sgGB = float(sgObj.SG2Capacity[sg])/1024
      sgProvisionedGB = "%.2f" % sgGB
      MVs = sgObj.SGinMV[sg]
      IOPS_LIMIT = str(sgObj.SG_IOPS_Limit[sg])
      MB_LIMIT = str(sgObj.SG_MB_Limit[sg])
      if MVs.upper() == 'NO':
        color = orange
      
      rowlist = ["<a href=" + link + '>' + sg + "</a>",fp, str(sgProvisionedGB),MVs,IOPS_LIMIT,MB_LIMIT]
      SGhtml = www.tr_list(rowlist,color)
      fpTables[fp].append(SGhtml)
      f_SummCSV.write(sg + ',' + fp + ',' + str(sgProvisionedGB)   + ',' + MVs + ',' + IOPS_LIMIT + ',' + MB_LIMIT + "\n")
      # go ahead and write out the sg detail HTML page.
      sgfile = "/www/" + site + "/SG/" + sg + ".html"
      sgCSV = "/www/" + site + "/CSV/" + site + '_' + sg + ".csv"
      linkpath = "/" + site + "/CSV/" + site + '_' + sg + ".csv"
      link1 = '<a href=' + linkpath + '>SG CSV</a>\n'
      f_sg = open(sgfile,'w')
      f_sgCSV = open(sgCSV,'w')
      f_sgCSV.write('Storage Group Report for ' + sg + '\n\n\n')
      f_sg.write(www.start_html('SG Detail'))
      f_sg.write(www.EMC_Header(site,datacenter,'',link1))
      html = "<p><H3><center>Detailed Storage Report for " + sg + " SSC_VMAX40K_1794</center></H3>\n"
      f_sg.write(html)
      f_sg.write(www.start_table())
      f_sg.write(www.tr + www.th + "Storage Group<th>FAST<TH>Capacity GB</tr>\n") 
      html = www.tr + www.td +  sg + www.td + fp + www.td + str(sgProvisionedGB) + "\n"
      f_sg.write(html)
      f_sg.write(www.end_table)  
      f_sg.write(demandtable)  
      f_sg.write(www.start_table(3,'Device List'))
      f_sg.write(www.tr + www.th + "Device<th>Capacity GB</tr>\n") 
      f_sgCSV.write("Volume,Capacity GB\n")
      for device in sgObj.SGs[sg]:
        devGB = float(sgObj.SGs[sg][device])/1024
        dev_formatted = "%.2f" % devGB
        html = www.tr + www.td + device + www.td + str(dev_formatted) +"\n"
        f_sg.write(html)
        f_sgCSV.write(device + ',' + str(dev_formatted) + '\n')
      f_sg.write(www.end_table)
      f_sg.write(www.end_html)
      f_sg.close()
      f_sgCSV.close()
      
    for fp in policies:
      f_index.write(www.start_table(3,'Groups with FAST VP ' + fp + ' Policy Association'))
      f_index.write("<tr><th>Storage Group<th>FAST<TH>Capacity GB<TH>Masking View<TH>IOPS Limit<TH>MB/s Limit</tr>\n")   
      for line in fpTables[fp]:
        f_index.write(line)
      f_index.write(www.end_table)    
    f_index.write(www.end_html)  
    f_index.close()
    f_SummCSV.close()
Example #27
0
def createReports(mvObj, site, datacenter):
    reportDate = str(time.strftime("%c"))
    GB = float(mvObj.mvCapTotal) / 1024
    ProvisionedGB = "%.2f" % GB
    info1 = 'Total Porvisioned=' + str(ProvisionedGB)
    www = HTML()
    f = Files()
    html_index = '/www/' + site + '/views.html'
    viewsCSV = '/www/' + site + '/CSV/' + site + '_views.csv'
    htmlpage = []
    csvpage = []

    htmlpage.append(www.start_html('MV Summary'))
    linkpath = "/" + site + "/CSV/" + site + '_views.csv'
    mvSummLink = '<a href=' + linkpath + '>Views CSV</a>\n'
    htmlpage.append(www.EMC_Header(site, datacenter, info1, mvSummLink))
    csvpage.append('MV Report for ' + site + '\n\n')

    # Write out a table for Masking Views

    csvpage.append("View Name,SG,IG,PG,Capacity\n")
    for pg in mvObj.portgroups:
        htmlpage.append(www.start_table(3, 'Masking Views on PG, ' + pg))
        htmlpage.append(
            "<tr><th>View Name<th>SG<th>IG<th>PG<th>Capacity</tr>\n")
        for mv in mvObj.portgroups[pg]:
            viewname = mv['MV']
            ig = mv['IG']
            pg = mv['PG']
            sg = mv['SG']
            gb = float(mv['MB']) / 1024
            fmtgb = "%.2f" % gb
            gb = str(fmtgb)
            sglink = "<a href=/" + site + "/SG/" + sg + ".html>" + sg + "</a>\n"
            iglink = "<a href=/" + site + "/IG/" + ig + ".html>" + ig + "</a>\n"
            row = [viewname, sglink, iglink, pg, gb]
            htmlpage.append(www.tr_list(row))
            csrow = [viewname, sg, ig, pg, gb]
            csvpage.append(', '.join(csrow) + '\n')
        htmlpage.append(www.end_table)
    htmlpage.append(www.end_html)
    f.write_file(html_index, htmlpage)
    f.write_file(viewsCSV, csvpage)

    dir = "/www/" + site + "/IG/"
    if not os.path.isdir(dir):
        subprocess.call(["mkdir", dir])

    for IG in mvObj.HostInitiatorGroups:
        igPage = '/www/' + site + '/IG/' + IG + '.html'
        f_ig = open(igPage, 'w')
        f_ig.write(www.start_html('Initiator Group ' + IG))
        f_ig.write(www.EMC_Header(site, datacenter))
        # Write out a table for Masking Views
        f_ig.write(www.start_table(3, 'Initiator Group ' + IG))
        f_ig.write("<tr><th>HBA Initiators</tr>\n")

        for wwpn in mvObj.HostInitiatorGroups[IG]:
            f_ig.write("<tr><th>" + wwpn + "</tr>\n")
        f_ig.write(www.end_table)
        f_ig.write(www.end_html)
        f_ig.close()

    for IG in mvObj.ClusterInitiatorGroups:
        igPage = '/www/' + site + '/IG/' + IG + '.html'
        f_ig = open(igPage, 'w')
        f_ig.write(www.start_html('Cluster Initiator Group ' + IG))
        f_ig.write(www.EMC_Header(site, datacenter))
        # Write out a table for Masking Views
        f_ig.write(www.start_table(3, 'Initiator Group ' + IG))
        f_ig.write("<tr><th>Cluster Nodes</tr>\n")

        for wwpn in mvObj.ClusterInitiatorGroups[IG]:
            link = '<a href=/' + site + '/IG/' + wwpn + '.html>' + wwpn + '</a>\n'
            f_ig.write("<tr><th>" + link + "</tr>\n")
        f_ig.write(www.end_table)
        f_ig.write(www.end_html)
        f_ig.close()

    # Database section
    # First, grab a list of known WWPN logins
    vmaxdb = mysql.connect(host="chatst3utsan01",
                           user="******",
                           passwd="emc",
                           db="vmax")
    cur = vmaxdb.cursor()
    query = "SELECT wwpn,ig FROM initiatorgroups"
    results = cur.execute(query)
    rows = cur.fetchall()
    # Now create a simple list of logins. We use this to check whether we've seen a login before.
    # If we know this login, we just update the time seen and array/port.
    knownwwpns = []
    p = re.compile('\w+')
    for row in rows:
        wwpn = row[0]
        ig = row[1]

        key = ''.join(p.findall(wwpn + ig)).lower()
        knownwwpns.append(key)
    # Now get the current date and time.
    today = datetime.date.today()
    formatteddate = today.strftime('%Y-%m-%d')
    now = datetime.datetime.now().time()
    formattedtime = now.strftime('%H:%M:%S')

    for IG in mvObj.HostInitiatorGroups:
        for wwpn in mvObj.HostInitiatorGroups[IG]:
            wwpn = wwpn.lower()
            ig = str(IG)
            ig = ig.lower()

            insertquery = 'insert into initiatorgroups(wwpn,array_name,ig,record_date,record_time) '
            insertquery += " values('" + wwpn + "','" + site + "','"
            insertquery += ig + "',NOW(),NOW() );"

            updatequery = 'update initiatorgroups SET array_name = %s, ig = %s, record_date = %s, record_time = %s '
            updatequery += ' WHERE wwpn = %s AND ig = %s'

            wwpn_ig = ''.join(p.findall(wwpn + ig))

            if wwpn_ig in knownwwpns:

                results = cur.execute(
                    updatequery,
                    (site, ig, formatteddate, formattedtime, wwpn, ig))
            else:
                results = cur.execute(insertquery)