def main(): records = [] filepath = os.path.join(JSONDIR, 'port') records0 = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'portshow') records1 = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'swport_alias') records2 = load_data(filepath, []) for record0 in records0: switch = record0['Switch'] uPort = record0['uPort'] Index = record0['Index'] record = {} for key in ['Switch', 'uPort', 'Index', 'Speed', 'State', 'Type']: record[key] = record0[key] for record1 in records1: if switch == record1['Switch'] and uPort == record1['uPort']: record['portWwn_of_devices_connected'] = record1['portWwn_of_devices_connected'] record['portName'] = record1['portName'] for record2 in records2: if record2['Swport'] == '%s %s' %(switch, Index): record['Aliases'] = record2['Aliases'] records.append(record) dump_data(os.path.join(JSONDIR, 'port_common'), records) logging.info('%s | %s records' %('path', len(records))) return
def main(): directory = os.path.join(JSONDIR, TRIBE) if not os.path.exists(directory): os.makedirs(directory) models_filepath = os.path.join(JSONDIR, 'models') models = load_data(models_filepath) models = models.get(TRIBE, []) commandout = {} for filename in os.listdir(TEXTDIR): filepath = os.path.join(TEXTDIR, filename) system, command = filename.split('.') if system in models: with open(filepath) as f: lines = f.readlines() for parser_command, parser in PARSERS: if command == parser_command: if not parser in commandout: commandout[parser] = [] function = getattr(defs_parsers, 'p_'+parser) records = function(system, lines) commandout[parser] += records break for command, records in commandout.items(): filepath = os.path.join(JSONDIR, TRIBE, command) logging.info('%s | %s records' %(command, len(records))) dump_data(filepath, records) return
def main(): if not os.path.exists(CONFIGSDIR): os.makedirs(CONFIGSDIR) connections = load_data(fabrics_connections_path, []) fields = ['name', 'address', 'username', 'password'] connections = [[c[k] for k in fields] for c in connections] out = {} for args in connections: args.append([['zoneshow', 'zoneshow'],]) systemname, outs, errs, exception = ssh_run(args) if not exception: lines = outs['zoneshow'].split('\n') aliases = parse_aliases(lines) zones = parse_zones(lines) config = parse_config(lines) out[systemname] = { 'aliases': aliases, 'zones': zones, 'config': config, } if os.path.isfile(oldconfigpath): from_dt = datetime.fromtimestamp(os.path.getmtime(oldconfigpath)) else: from_dt = None if os.path.isfile(newconfigpath): os.rename(newconfigpath, oldconfigpath) till_dt = datetime.now() dump_data(newconfigpath, out) dump_data(os.path.join(JSONDIR, 'changes_dts'), {'From': str(from_dt) if from_dt else None, 'Till': str(till_dt)}) logging.info('%s | %s records' %(newconfigpath, len(out))) return
def main(): filepath = os.path.join(JSONDIR, 'models') models = load_data(filepath, []) RawCapD = {} RawCapD.update(sum_3par()) RawCapD.update(sum_eva()) RawCapD.update(sum_hds()) FormCapD = sum_form_cap() Form3parAvailD = sum_form_3par_avail() FormHdsAvailD = sum_form_hds_cap() records = [] for storage in RawCapD: record = {'Storage': storage} record.update(RawCapD[storage]) record.update(FormCapD[storage]) record.update(Form3parAvailD.get(storage, {})) record.update(FormHdsAvailD.get(storage, {})) if storage in models.get('eva', []): record['FormattedAvailable'] = record['RawFree']/2 elif storage in models.get('hds', []): rate = record['RawData']/(record['FormattedUsed'] + record['FormattedAvailable']) record['RawAllocated'] = record['FormattedUsed']*rate record['RawFree'] = record['FormattedAvailable']*rate record['FormattedTotal'] = record['FormattedUsed'] + record['FormattedAvailable'] records.append(record) sorted_systems = load_data(os.path.join(JSONDIR, 'sorted_systems'), []) records = sort_storage_records(records, sorted_systems) filepath = os.path.join(JSONDIR, 'capacity') dump_data(filepath, records) return
def main(): tpar_form_hosts() hds_form_hosts() eva_form_hosts() hosts = tpar_form_hosts() + hds_form_hosts() + eva_form_hosts() filepath = os.path.join(JSONDIR, 'hosts') dump_data(filepath, hosts)
def main(): dump_data( os.path.join(JSONDIR, 'last_update'), [{'appname': APPNAME}] ) save('sa', [ ['last_update', 'AppDataLastUpdate', {'before_delete': {'appname': APPNAME}}], ])
def main(): tpar_volume_hosts = get_tpar_volume_hosts() tpar_volumes = tpar_form_volumes(tpar_volume_hosts) hds_volume_hosts = get_hds_volume_hosts() hds_volumes = hds_form_volumes(hds_volume_hosts) eva_volumes = get_eva_form_volumes() volumes = tpar_volumes + hds_volumes + eva_volumes filepath = os.path.join(JSONDIR, 'volumes') dump_data(filepath, volumes)
def do_temp(snmp_values): """ read data from temp file and rewrite new data; """ filepath = TEMPFILE temp_dt = os.path.getmtime(filepath) if os.path.exists(filepath) else None temp_values = load_data(filepath, {}) dump_data(filepath, snmp_values) return temp_dt, temp_values
def main(): records = [] filepath = os.path.join(JSONDIR, 'sorted_switchnames') sorted_switchnames = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'switch') swi_records = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'serial') ser_records = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'version') ver_records = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'fswitch') fsw_records = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'agswitch') ags_records = load_data(filepath, []) for swi_record in swi_records: switch = swi_record['Switch'] record = { 'Switch': swi_record['Switch'], 'switchType': swi_record['switchType'], 'switchMode': swi_record['switchMode'], 'switchRole': swi_record['switchRole'] if 'switchRole' in swi_record else '', } if record['switchMode'] == 'Access Gateway Mode': record['switchMode'] = 'AG' for records2 in [fsw_records, ags_records]: for record2 in records2: if switch == record2['Switch']: record['Fabric'] = record2['Fabric'] for ser_record in ser_records: if switch == ser_record['Switch']: record.update({ 'Part_Num': ser_record['Part_Num'], 'Serial_Num': ser_record['Serial_Num'], }) for ver_record in ver_records: if switch == ver_record['Switch']: record.update({ 'Fabric_OS': ver_record['Fabric_OS'], }) records.append(record) dump_data(os.path.join(JSONDIR, 'switch_common'), records) logging.info('%s | %s records' %(__name__, len(records))) return
def do_temp(snmp_values): """ read data from temp file and rewrite new data; """ if not os.path.exists(TEMP_DIR): os.makedirs(TEMP_DIR) filepath = os.path.join(TEMP_DIR, 'values.json') temp_dt = os.path.getmtime(filepath) if os.path.exists(filepath) else None temp_values = load_data(filepath, {}) dump_data(filepath, snmp_values) return temp_dt, temp_values
def main(): filepath = os.path.join(JSONDIR, "link") links = load_data(filepath, []) filepath = os.path.join(JSONDIR, "port") ports = load_data(filepath, []) data = f_graph(links, ports) filepath = os.path.join(JSONDIR, "graph") dump_data(filepath, data) logging.info("%s | %s records" % ("graph", len(data))) return
def main(): models = {} for connection in CONNECTIONS: model = connection['model'].lower() name = connection['name'] if not model in models: models[model] = [] models[model].append(name) filepath = os.path.join(JSONDIR, 'models') logging.info(models) dump_data(filepath, models) return
def main(): newpath = os.path.join(CONFIGSDIR, 'volumes_new') oldpath = os.path.join(CONFIGSDIR, 'volumes_old') if os.path.isfile(newpath): from_dt = datetime.fromtimestamp(os.path.getmtime(newpath)) copyfile(newpath, oldpath) else: from_dt = None copyfile(os.path.join(JSONDIR, 'volumes'), newpath) till_dt = datetime.fromtimestamp(os.path.getmtime(newpath)) dump_data( os.path.join(JSONDIR, 'volumes_changes_dts'), {'From': str(from_dt) if from_dt else None, 'Till': str(till_dt)} )
def main(): config1 = load_data(oldconfigpath, {}) config2 = load_data(newconfigpath, {}) dts = load_data(os.path.join(JSONDIR, 'changes_dts'), {}) if not config1: logging.warning('no config in %s' %oldconfigpath) logging.info('run collect_configs script again') if not config2: logging.warning('no config in %s' %newconfigpath) if config1 and config2: records = compare_configs(config1, config2) for record in records: record.update(dts) filepath = os.path.join(JSONDIR, 'changes') dump_data(filepath, records) logging.info('%s | %s records' %(filepath, len(records))) return
def main(): all_enclosures = [] all_servers = [] all_mezzanines = [] dirpath = os.path.dirname(os.path.realpath(__file__)) filepath = os.path.join(dirpath, 'Connection.json') CONNECTIONS = load_data(filepath, []) name_addr = {con['name']: con['address'] for con in CONNECTIONS} encurls = {} for filename in os.listdir(TEXTDIR): filepath = os.path.join(TEXTDIR, filename) system, command = filename.split('.') with open(filepath) as f: content = f.read() data = xmltodict.parse(content) enclosure, servers, mezzanines = parse(data) all_enclosures += [enclosure] all_servers += servers all_mezzanines += mezzanines url = name_addr.get(system) encurls[enclosure['Enclosure_Name']] = url print(encurls) for name, records in ( ('enclosures', all_enclosures), ('servers', all_servers), ('mezzanines', all_mezzanines), ): filepath = os.path.join(JSONDIR, name) dump_data(os.path.join(JSONDIR, name), records) logging.info('%s | %s records' %(name, len(records))) ks = {} for record in records: for key, value in record.items(): if not key in ks: ks[key] = 0 if len(value) > ks[key]: ks[key] = len(value) dump_data(os.path.join(JSONDIR, 'encurls'), encurls)
def main(): username = '******' password = '******' servers = load_data(os.path.join(JSONDIR, 'servers')) enc_bays = {} for server in servers: enc_name = server['Enclosure_Name'] bay_number = server['Server_Bay'] if not enc_name in enc_bays: enc_bays[enc_name] = [] enc_bays[enc_name].append(bay_number) new_info = {} encurls = load_data(os.path.join(JSONDIR, 'encurls')) for enc_name, enc_fqdn in encurls.items(): bays = enc_bays.get(enc_name, []) try: oaSessionKey = get_session(enc_fqdn, username, password) print('SessionKey: {}'.format(oaSessionKey)) except: print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), 'Connection failed:', enc_fqdn) continue for bay_number in bays: ram_size, cpu_count, cpu_type, cpu_cores = get_blade_info(enc_fqdn, oaSessionKey, bay_number) new_info['{}.{}'.format(enc_name, bay_number)] = { 'CPU_type': cpu_type, 'CPU_cores': cpu_cores, 'CPU_count': cpu_count, 'RAM_size': ram_size, } for server in servers: enc_name = server['Enclosure_Name'] bay_number = server['Server_Bay'] ext = new_info.get('{}.{}'.format(enc_name, bay_number)) if ext: server.update(ext) dump_data(os.path.join(JSONDIR, 'servers2'), servers)
def main(): storhosts = {} volumes = load_data(os.path.join(JSONDIR, 'volumes'), []) for volume in volumes: uid = '+'.join([volume['Storage'], ' '.join(sorted(volume['Hosts']))]) if not uid in storhosts: storhosts[uid] = 0 storhosts[uid] += float(volume['Size']) records = [] for uid, size in storhosts.items(): storage, hosts_str = uid.split('+') hosts = hosts_str.split() record = { 'Storage': storage, 'Hosts': hosts, 'Size': size, } records.append(record) dump_data(os.path.join(JSONDIR, 'hosts_capacity'), records)
def main(): commandout = {} for filename in os.listdir(TEXTDIR): filepath = os.path.join(TEXTDIR, filename) system, command = filename.split('.') with open(filepath) as f: lines = f.readlines() for name in PARSERS.get(command, []): if not name in commandout: commandout[name] = [] function = getattr(defs_parsers, 'p_'+name) records = function(system, lines) commandout[name] += records for command, records in commandout.items(): records = sort_records(records) filepath = os.path.join(JSONDIR, command) dump_data(filepath, records) logging.info('%s | %s records' %(command, len(records))) return
def main(): filepath = os.path.join(JSONDIR, 'graph') graph = load_data(filepath, {}) filepath = os.path.join(JSONDIR, 'link') links = load_data(filepath, {}) filepath = os.path.join(JSONDIR, 'rels') swports_rels = load_data(filepath, {}) linksD = {'%s %s %s %s' %(r['Switch1'], r['Port1'], r['Switch2'], r['Port2']): r for r in links } records = [] for swport1, swports in swports_rels.items(): for swport2 in swports: sw1 = swport1.split()[0] sw2 = swport2.split()[0] sws = '%s %s' %(sw1, sw2) if not sws in sw_treads: treads = walk_graph(graph, swport1, swport2) else: treads = [[swport1] + tread + [swport2] for tread in sw_treads[sws]] nodes = form_nodes(treads) links = form_links(treads, linksD) nodes = list(nodes.items()) records.append({'Node1':swport1, 'Node2':swport2, 'Treads':treads, 'Nodes': nodes, 'Links': links}) filepath = os.path.join(JSONDIR, 'path') dump_data(filepath, records) logging.info('%s | %s records' %('path', len(records))) return
def main(): """ main function """ pdata = {} udata = multisnmpwalk(counters, connections, PROCESSES) for uid, value in udata.items(): switch, port, counter = uid.split() uport = '{} {}'.format(switch, port) if not uport in pdata: pdata[uport] = {} if counter == 'connUnitPortType': pdata[uport]['porttype'] = value elif counter == 'connUnitPortName': pdata[uport]['portname'] = value records = [] for uport, record in pdata.items(): switch, port = uport.split() record['switchname'] = switch record['portindex'] = port records.append(record) names = [x['name'] for x in connections] records.sort(key=lambda x: (names.index(x['switchname']), int(x['portindex']))) dump_data(os.path.join(JSONDIR, 'configs'), records) """ for counter, xdict in cdata.items(): xdict = sort_uports(xdict) dump_data(os.path.join(JSONDIR, counter), xdict) records.append({'counter': counter, 'values': xdict, 'datetime': str(dt)}) """ return
def main(): filepath = os.path.join(JSONDIR, 'zone') zones = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'alias') aliases = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'port') ports = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'portshow') portshow = load_data(filepath, []) alirelations, swport_alias, alias_swport = form_rels(zones, aliases, ports, portshow) swportrelations = form_swport_rels(alirelations, alias_swport) records = [] for swport, aliases in swport_alias.items(): aliases = [a.split()[1] for a in aliases] records.append({'Swport': swport, 'Aliases': aliases}) filepath = os.path.join(JSONDIR, 'swport_alias') dump_data(filepath, records) logging.info('%s | %s records' %('swport_alias', len(records))) records = [] for alias, swports in alias_swport.items(): records.append({'Alias': alias, 'Swports': list(swports)}) filepath = os.path.join(JSONDIR, 'alias_swport') dump_data(filepath, records) logging.info('%s | %s records' %('alias_swport', len(records))) records = [] for port, relation in swportrelations.items(): records.append({'Port': port, 'Relation': relation}) filepath = os.path.join(JSONDIR, 'port_relation') dump_data(filepath, records) logging.info('%s | %s records' %('port_relation', len(records))) filepath = os.path.join(JSONDIR, 'rels') dump_data(filepath, swportrelations)
def main(): p_null_dt = datetime.now()-timedelta(hours=1) f_null_dt = datetime.now()-timedelta(days=365) last_dates = load_data(os.path.join(TEMPDIR, 'last_dates'), {}) portlog = [] fabriclog = [] for filename in os.listdir(TEXTDIR): filepath = os.path.join(TEXTDIR, filename) system, command = filename.split('.') if command in ['portlogdump', 'fabriclog']: xdate = None with open(filepath) as f: records = [] lines = f.readlines() last_dt_str = last_dates.get(filename) if last_dt_str is None: last_dt = p_null_dt if command == 'portlogdump' else f_null_dt else: last_dt = datetime.strptime(last_dt_str, "%Y-%m-%d %H:%M:%S") xdate = None if command == 'portlogdump': records, xdate = to_items(system, lines[2:], last_dt) portlog += records elif command == 'fabriclog': records, xdate = to_items_fab(system, lines[2:-2], last_dt) fabriclog += records dt_str = None if xdate is None else xdate.strftime("%Y-%m-%d %H:%M:%S") last_dates['{}.{}'.format(system, command)] = dt_str print(len(portlog)) print(len(fabriclog)) dump_data(os.path.join(TEMPDIR, 'last_dates'), last_dates) dump_data(os.path.join(JSONDIR, 'portlog'), portlog) dump_data(os.path.join(JSONDIR, 'fabriclog'), fabriclog) return
def main(): records = f_etrunk() + f_ftrunk() + form_f_links() filepath = os.path.join(JSONDIR, 'link') dump_data(filepath, records) logging.info('%s | %s records' %('link', len(records)))
def main(): connections = load_data(CONNECTIONS, []) names = [c['name'] for c in connections] dump_data(os.path.join(JSONDIR, 'sorted_systems'), names) logging.info(str(names)) return
def main(): dirpath = os.path.dirname(os.path.realpath(__file__)) connections = load_data(os.path.join(dirpath, 'SwitchConnection.json'), []) names = [c['name'] for c in connections] dump_data(os.path.join(JSONDIR, 'sorted_switchnames.json'), names) return
def main(): records = [] filepath = os.path.join(JSONDIR, '3par/sys') sys_data = load_data(filepath) raw_total_sizes = {r["Storage"]: int(r["TotalCap"]) for r in sys_data} raw_alloc_sizes = {r["Storage"]: int(r["AllocCap"]) for r in sys_data} filepath = os.path.join(JSONDIR, '3par/vv') data = load_data(filepath) sizes = {} for record in data: storage = record['Storage'] if not storage in sizes: sizes[storage] = { 'full': 0, 'cpvv': 0, 'tpvv': 0, 'snp': 0, 'tpvv_used': 0, 'tpvv_free': 0, 'copy': 0 } prov = record['Prov'] size = int(record['VSize_MB']) used_size = int(record['Usr_Used_MB']) if record['Usr_Used_MB'] != '--' else 0 copy_size = int(record['VSize_MB']) if record['SnpCPG'] != '--' and record['UsrCPG'] != '--' else 0 sizes[storage][prov] += size sizes[storage]['tpvv_used'] += used_size if prov == 'tpvv' else 0 sizes[storage]['copy'] += copy_size for storage, stordict in sizes.items(): raw_total = raw_total_sizes.get(storage) raw_alloc = raw_alloc_sizes.get(storage) TOTAL = raw_total/2*0.95 RESERVE = raw_total/2*0.05 USED = stordict['full'] + stordict['cpvv'] + stordict['tpvv'] + stordict['copy'] FREE = TOTAL - USED REAL = stordict['full'] + stordict['cpvv'] + stordict['tpvv_used'] reserve_used = raw_alloc/2 - REAL reserve_overused = 0 print(RESERVE,reserve_used) if reserve_used < 0: reserve_overused = reserve_used reserve_used = RESERVE reserve_free = 0 elif RESERVE > reserve_used: reserve_free = RESERVE - reserve_used else: reserve_overused = reserve_used - RESERVE reserve_free = 0 FREE = FREE - reserve_overused OVERPROVISIONED = 0 if FREE < 0: OVERPROVISIONED = -FREE USED += FREE FREE = 0 sizes[storage]['TOTAL'] = TOTAL sizes[storage]['USED'] = USED sizes[storage]['FREE'] = FREE sizes[storage]['OVERPROVISIONED'] = OVERPROVISIONED sizes[storage]['RESERVE'] = RESERVE sizes[storage]['RESERVE_OVERUSED'] = reserve_overused sizes[storage]['tpvv_free'] = stordict['tpvv'] - stordict['tpvv_used'] sizes[storage]['reserve_used'] = reserve_used sizes[storage]['reserve_free'] = reserve_free record = {k: round(v/1024.0/1024, 2) for k, v in stordict.items()} record['Storage'] = storage records.append(record) logging.info(storage) sorted_systems = load_data(os.path.join(JSONDIR, 'sorted_systems'), []) records = sort_storage_records(records, sorted_systems) filepath = os.path.join(JSONDIR, 'capacity_3par') dump_data(filepath, records)