def sum_hds(): capD = {} filepath = os.path.join(JSONDIR, 'hds', 'drive_status') data = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'hds', 'drive_vendor') data2 = load_data(filepath, []) for record in data: for record2 in data2: if record['Storage'] == record2['Storage'] and record['HDU'] == record2['HDU']: record.update(record2) for record in data: storage = record['Storage'] #0.9313 = 1000*1000*1000/1024/1024/1024/1000 drive_size = int(record['Capacity'].replace('GB', ''))*0.9313/1024 Type = record['Type'] Status = record['Status'] if not storage in capD: capD[storage] = {'RawTotal': 0, 'RawData': 0, 'RawSpare': 0, 'RawAllocated': 0, 'RawFree': 0} capD[storage]['RawTotal'] += drive_size if Type == 'Data': capD[storage]['RawData'] += drive_size if Type == 'Spare' and Status == 'Standby': capD[storage]['RawSpare'] += drive_size return capD
def main(): records = [] filepath = os.path.join(JSONDIR, 'port') records0 = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'portshow') records1 = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'swport_alias') records2 = load_data(filepath, []) for record0 in records0: switch = record0['Switch'] uPort = record0['uPort'] Index = record0['Index'] record = {} for key in ['Switch', 'uPort', 'Index', 'Speed', 'State', 'Type']: record[key] = record0[key] for record1 in records1: if switch == record1['Switch'] and uPort == record1['uPort']: record['portWwn_of_devices_connected'] = record1['portWwn_of_devices_connected'] record['portName'] = record1['portName'] for record2 in records2: if record2['Swport'] == '%s %s' %(switch, Index): record['Aliases'] = record2['Aliases'] records.append(record) dump_data(os.path.join(JSONDIR, 'port_common'), records) logging.info('%s | %s records' %('path', len(records))) return
def main(): filepath = os.path.join(JSONDIR, 'models') models = load_data(filepath, []) RawCapD = {} RawCapD.update(sum_3par()) RawCapD.update(sum_eva()) RawCapD.update(sum_hds()) FormCapD = sum_form_cap() Form3parAvailD = sum_form_3par_avail() FormHdsAvailD = sum_form_hds_cap() records = [] for storage in RawCapD: record = {'Storage': storage} record.update(RawCapD[storage]) record.update(FormCapD[storage]) record.update(Form3parAvailD.get(storage, {})) record.update(FormHdsAvailD.get(storage, {})) if storage in models.get('eva', []): record['FormattedAvailable'] = record['RawFree']/2 elif storage in models.get('hds', []): rate = record['RawData']/(record['FormattedUsed'] + record['FormattedAvailable']) record['RawAllocated'] = record['FormattedUsed']*rate record['RawFree'] = record['FormattedAvailable']*rate record['FormattedTotal'] = record['FormattedUsed'] + record['FormattedAvailable'] records.append(record) sorted_systems = load_data(os.path.join(JSONDIR, 'sorted_systems'), []) records = sort_storage_records(records, sorted_systems) filepath = os.path.join(JSONDIR, 'capacity') dump_data(filepath, records) return
def sum_f_ports(cdicts): #f_list = [] #ports = load_data(os.path.join(BASEDIR, 'data/fc/json/port'), []) #for port in ports: # if port['Type'] == 'F-Port' and not 'NPIV' in port['Comment'] and not 'Trunk' in port['Comment']: # f_list.append('{} {}'.format(port['Switch'], port['Index'])) recs = load_data(os.path.join(BASEDIR, 'data/fc/json/port'), []) f_ports = ['{} {}'.format(r['Switch'], r['Index']) for r in recs if r['Type'] == 'F-Port'] recs = load_data(os.path.join(BASEDIR, 'data/fc/json/link'), []) link_ports = ['{} {}'.format(r['Switch1'], r['Port1']) for r in recs] fdicts = {} for counter, swportvalues in cdicts.items(): if not counter in fdicts: fdicts[counter] = [] for swport, values in swportvalues.items(): if swport in f_ports and not swport in link_ports: fdicts[counter].append(values) for counter, values in fdicts.items(): fdicts[counter] = [sum(x) for x in zip(*values)] return fdicts
def main(): filepath = os.path.join(JSONDIR, "link") links = load_data(filepath, []) filepath = os.path.join(JSONDIR, "port") ports = load_data(filepath, []) data = f_graph(links, ports) filepath = os.path.join(JSONDIR, "graph") dump_data(filepath, data) logging.info("%s | %s records" % ("graph", len(data))) return
def sum_form_cap(): filepath = os.path.join(JSONDIR, 'volumes') data = load_data(filepath, []) xdict = {} for record in data: storage = record['Storage'] size = record['Size'] if not storage in xdict: xdict[storage] = { 'FormattedTotal': 0, 'FormattedUsed': 0, 'FormattedPresented': 0, 'FormattedNotPresented': 0, 'FormattedAvailable': 0, } xdict[storage]['FormattedUsed'] += size if record['Hosts']: xdict[storage]['FormattedPresented'] += size else: xdict[storage]['FormattedNotPresented'] += size for storage, size in xdict.items(): xdict[storage]['FormattedUsed'] = round(size['FormattedUsed']/1024, 2) xdict[storage]['FormattedPresented'] = round(size['FormattedPresented']/1024, 2) xdict[storage]['FormattedNotPresented'] = round(size['FormattedNotPresented']/1024, 2) return xdict
def run(): logger = get_logger(logfile, "sanscript.fc") logger.info("START") dirpath = os.path.dirname(os.path.realpath(__file__)) filepath = os.path.join(dirpath, "SwitchConnection.json") connections = load_data(filepath, []) for connection in connections: systemname = connection["name"] address = connection["address"] logger.info("Trying to connect to %s (SNMP)." % systemname) cmdGen = cmdgen.CommandGenerator() errorIndication, errorStatus, errorIndex, varBinds = cmdGen.getCmd( cmdgen.CommunityData("public", mpModel=0), cmdgen.UdpTransportTarget((address, 161)), cmdgen.MibVariable("SNMPv2-MIB", "sysName", 0), lookupNames=True, lookupValues=True, ) if errorIndication: logger.warning(errorIndication) elif errorStatus: logger.warning(errorStatus) else: for name, val in varBinds: logger.info("%s = %s" % (name.prettyPrint(), val.prettyPrint())) if errorIndication or errorStatus: logger.warning("%s test failure." % systemname) else: logger.info("%s test successful." % systemname) logger.info("FINISH") return
def sort_records(records): if records and type(records[0]) == dict: if 'Switch' in records[0].keys(): names = load_data(os.path.join(JSONDIR, 'sorted_systems'), []) indexes = {name: num for num, name in enumerate(names)} records.sort(key=lambda x: indexes.get(x['Switch'], 0)) return records
def main(): directory = os.path.join(JSONDIR, TRIBE) if not os.path.exists(directory): os.makedirs(directory) models_filepath = os.path.join(JSONDIR, 'models') models = load_data(models_filepath) models = models.get(TRIBE, []) commandout = {} for filename in os.listdir(TEXTDIR): filepath = os.path.join(TEXTDIR, filename) system, command = filename.split('.') if system in models: with open(filepath) as f: lines = f.readlines() for parser_command, parser in PARSERS: if command == parser_command: if not parser in commandout: commandout[parser] = [] function = getattr(defs_parsers, 'p_'+parser) records = function(system, lines) commandout[parser] += records break for command, records in commandout.items(): filepath = os.path.join(JSONDIR, TRIBE, command) logging.info('%s | %s records' %(command, len(records))) dump_data(filepath, records) return
def main(): if not os.path.exists(CONFIGSDIR): os.makedirs(CONFIGSDIR) connections = load_data(fabrics_connections_path, []) fields = ['name', 'address', 'username', 'password'] connections = [[c[k] for k in fields] for c in connections] out = {} for args in connections: args.append([['zoneshow', 'zoneshow'],]) systemname, outs, errs, exception = ssh_run(args) if not exception: lines = outs['zoneshow'].split('\n') aliases = parse_aliases(lines) zones = parse_zones(lines) config = parse_config(lines) out[systemname] = { 'aliases': aliases, 'zones': zones, 'config': config, } if os.path.isfile(oldconfigpath): from_dt = datetime.fromtimestamp(os.path.getmtime(oldconfigpath)) else: from_dt = None if os.path.isfile(newconfigpath): os.rename(newconfigpath, oldconfigpath) till_dt = datetime.now() dump_data(newconfigpath, out) dump_data(os.path.join(JSONDIR, 'changes_dts'), {'From': str(from_dt) if from_dt else None, 'Till': str(till_dt)}) logging.info('%s | %s records' %(newconfigpath, len(out))) return
def main(): config1 = load_data(oldconfigpath, {}) config2 = load_data(newconfigpath, {}) dts = load_data(os.path.join(JSONDIR, 'changes_dts'), {}) if not config1: logging.warning('no config in %s' %oldconfigpath) logging.info('run collect_configs script again') if not config2: logging.warning('no config in %s' %newconfigpath) if config1 and config2: records = compare_configs(config1, config2) for record in records: record.update(dts) filepath = os.path.join(JSONDIR, 'changes') dump_data(filepath, records) logging.info('%s | %s records' %(filepath, len(records))) return
def get_f_link_ports(): f_link_ports = {} filepath = os.path.join(JSONDIR, 'port') records = load_data(filepath, []) for record in records: if record['Type'] == 'F-Port': f_link_ports[record['Address']] = record return f_link_ports
def get_n_trunk_ports(): n_trunk_ports = {} filepath = os.path.join(JSONDIR, 'port') records = load_data(filepath, []) for record in records: if record['Type'] == 'N-Port' and 'Trunk' in record['Comment']: n_trunk_ports[record['Comment'][2:8]] = record return n_trunk_ports
def main(): username = '******' password = '******' servers = load_data(os.path.join(JSONDIR, 'servers')) enc_bays = {} for server in servers: enc_name = server['Enclosure_Name'] bay_number = server['Server_Bay'] if not enc_name in enc_bays: enc_bays[enc_name] = [] enc_bays[enc_name].append(bay_number) new_info = {} encurls = load_data(os.path.join(JSONDIR, 'encurls')) for enc_name, enc_fqdn in encurls.items(): bays = enc_bays.get(enc_name, []) try: oaSessionKey = get_session(enc_fqdn, username, password) print('SessionKey: {}'.format(oaSessionKey)) except: print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), 'Connection failed:', enc_fqdn) continue for bay_number in bays: ram_size, cpu_count, cpu_type, cpu_cores = get_blade_info(enc_fqdn, oaSessionKey, bay_number) new_info['{}.{}'.format(enc_name, bay_number)] = { 'CPU_type': cpu_type, 'CPU_cores': cpu_cores, 'CPU_count': cpu_count, 'RAM_size': ram_size, } for server in servers: enc_name = server['Enclosure_Name'] bay_number = server['Server_Bay'] ext = new_info.get('{}.{}'.format(enc_name, bay_number)) if ext: server.update(ext) dump_data(os.path.join(JSONDIR, 'servers2'), servers)
def get_domains(): domains = {} filepath = os.path.join(JSONDIR, 'switch') records = load_data(filepath, []) for record in records: domain = record.get('switchDomain') if domain: domains[domain] = record['Switch'] return domains
def sum_form_3par_avail(): filepath = os.path.join(JSONDIR, 'capacity_3par') data = load_data(filepath, []) xdict = {} for record in data: xdict[record['Storage']] = { 'FormattedAvailable': record['FREE'], } return xdict
def do_temp(snmp_values): """ read data from temp file and rewrite new data; """ filepath = TEMPFILE temp_dt = os.path.getmtime(filepath) if os.path.exists(filepath) else None temp_values = load_data(filepath, {}) dump_data(filepath, snmp_values) return temp_dt, temp_values
def get_n_link_ports(): n_link_ports_list = [] filepath = os.path.join(JSONDIR, 'port') records = load_data(filepath, []) print(records) for record in records: if record['Type'] == 'N-Port' and not 'Trunk' in record['Comment']: n_link_ports_list.append(record) return n_link_ports_list
def get_addresses(): addresses = {} filepath = os.path.join(JSONDIR, 'port') records = load_data(filepath, []) for record in records: key = '%s.%s' %(record['Switch'], record['Index']) value = record['Address'] addresses[key] = value return addresses
def get_speeds(): speeds = {} filepath = os.path.join(JSONDIR, 'port') records = load_data(filepath, []) for record in records: key = '%s.%s' %(record['Switch'], record['Index']) value = record['Speed'].replace('G','').replace('N','') speeds[key] = value return speeds
def do_temp(snmp_values): """ read data from temp file and rewrite new data; """ if not os.path.exists(TEMP_DIR): os.makedirs(TEMP_DIR) filepath = os.path.join(TEMP_DIR, 'values.json') temp_dt = os.path.getmtime(filepath) if os.path.exists(filepath) else None temp_values = load_data(filepath, {}) dump_data(filepath, snmp_values) return temp_dt, temp_values
def main(): connections = load_data(CONNECTIONS, []) names = [c['name'] for c in connections] for filename in os.listdir(TEXTDIR): if '.' in filename: systemname = filename.split('.')[0] if not systemname in names: filepath = os.path.join(TEXTDIR, filename) os.remove(filepath) logging.info('removed: {}'.format(filepath)) return
def get_hds_volume_hosts(): filepath = os.path.join(JSONDIR, 'hds/hgmap') data = load_data(filepath, []) xdict = {} for record in data: uid = '%s %s' %(record['Storage'], record['LUN']) if not uid in xdict: xdict[uid] = set() xdict[uid].add(record['Group'].split(':')[1]) for key, val in xdict.items(): xdict[key] = sorted(val) return xdict
def get_tpar_volume_hosts(): filepath = os.path.join(JSONDIR, '3par/vlun') data = load_data(filepath, []) xdict = {} for record in data: wwn = record['VV_WWN'] if not wwn in xdict: xdict[wwn] = set() xdict[wwn].add(record['HostName']) for key, val in xdict.items(): xdict[key] = sorted(val) return xdict
def main(): filepath = os.path.join(JSONDIR, 'zone') zones = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'alias') aliases = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'port') ports = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'portshow') portshow = load_data(filepath, []) alirelations, swport_alias, alias_swport = form_rels(zones, aliases, ports, portshow) swportrelations = form_swport_rels(alirelations, alias_swport) records = [] for swport, aliases in swport_alias.items(): aliases = [a.split()[1] for a in aliases] records.append({'Swport': swport, 'Aliases': aliases}) filepath = os.path.join(JSONDIR, 'swport_alias') dump_data(filepath, records) logging.info('%s | %s records' %('swport_alias', len(records))) records = [] for alias, swports in alias_swport.items(): records.append({'Alias': alias, 'Swports': list(swports)}) filepath = os.path.join(JSONDIR, 'alias_swport') dump_data(filepath, records) logging.info('%s | %s records' %('alias_swport', len(records))) records = [] for port, relation in swportrelations.items(): records.append({'Port': port, 'Relation': relation}) filepath = os.path.join(JSONDIR, 'port_relation') dump_data(filepath, records) logging.info('%s | %s records' %('port_relation', len(records))) filepath = os.path.join(JSONDIR, 'rels') dump_data(filepath, swportrelations)
def sum_form_hds_cap(): filepath = os.path.join(JSONDIR, 'hds/dppool') data1 = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'hds/rgref') data2 = load_data(filepath, []) xdict = {} for record in data1: storage = record['Storage'] if not storage in xdict: xdict[storage] = {'FormattedAvailable': 0} FREE = float(record['Total_Capacity']) - float(record['Consumed_Capacity']) xdict[storage]['FormattedAvailable'] += FREE for record in data2: storage = record['Storage'] if not storage in xdict: xdict[storage] = {'FormattedAvailable': 0} FREE = float(record['Free_Capacity']) xdict[storage]['FormattedAvailable'] += FREE for storage, stordict in xdict.items(): for key, val in stordict.items(): xdict[storage][key] = float(val)/1024 return xdict
def main(): unique_hosts = {} hosts = load_data(os.path.join(JSONDIR, "hosts"), []) for host in hosts: wwns_uid = " ".join(sorted(host["WWNs"])) if not wwns_uid in unique_hosts: unique_hosts[wwns_uid] = [] unique_hosts[wwns_uid].append(host) for key, hosts in unique_hosts.items(): if len(hosts) > 1: print(hosts) print("")
def tpar_form_volumes(volume_hosts): filepath = os.path.join(JSONDIR, '3par/vv') data = load_data(filepath, []) xlist = [] for record in data: volume = { 'Storage': record['Storage'], 'Uid': record['VV_WWN'], 'Name': record['Name'], 'Size': int(record['VSize_MB'])/1024, 'Hosts': volume_hosts.get(record['VV_WWN'], []), } xlist.append(volume) return xlist
def main(): filepath = os.path.join(JSONDIR, 'graph') graph = load_data(filepath, {}) filepath = os.path.join(JSONDIR, 'link') links = load_data(filepath, {}) filepath = os.path.join(JSONDIR, 'rels') swports_rels = load_data(filepath, {}) linksD = {'%s %s %s %s' %(r['Switch1'], r['Port1'], r['Switch2'], r['Port2']): r for r in links } records = [] for swport1, swports in swports_rels.items(): for swport2 in swports: sw1 = swport1.split()[0] sw2 = swport2.split()[0] sws = '%s %s' %(sw1, sw2) if not sws in sw_treads: treads = walk_graph(graph, swport1, swport2) else: treads = [[swport1] + tread + [swport2] for tread in sw_treads[sws]] nodes = form_nodes(treads) links = form_links(treads, linksD) nodes = list(nodes.items()) records.append({'Node1':swport1, 'Node2':swport2, 'Treads':treads, 'Nodes': nodes, 'Links': links}) filepath = os.path.join(JSONDIR, 'path') dump_data(filepath, records) logging.info('%s | %s records' %('path', len(records))) return
def hds_form_volumes(volume_hosts): filepath = os.path.join(JSONDIR, 'hds/luref') data = load_data(filepath, []) xlist = [] for record in data: uid = '%s %s' %(record['Storage'], record['LU']) volume = { 'Storage': record['Storage'], 'Uid': record['LU'], 'Name': '', 'Size': float(record['Capacity']), 'Hosts': volume_hosts.get(uid, []), } xlist.append(volume) return xlist
def sum_udiffs(dirpath): udiffs, xtimes, deltas = {}, [], [] filenames = os.listdir(dirpath) zeros = [0 for _ in filenames] for num, filename in enumerate(sorted(filenames)): _, xtime, delta = filename.split('.') xtimes.append(xtime.split('-')) deltas.append(int(delta)) filepath = os.path.join(dirpath, filename) data = load_data(filepath, {}) for uid, value in data.items(): if not uid in udiffs: udiffs[uid] = zeros[:] value = check_and_fix(uid, value) udiffs[uid][num] = value return udiffs, xtimes, deltas