def backups_integrate(todaydate, datedmy): rootdir = '/anonymised/path/' + todaydate endloc = '/anonymised/path.json' backupjson = {} for dir in os.listdir(rootdir): environment = dir.upper() backupjson[environment] = [] environmentpath = rootdir + os.sep + dir backuplist = os.listdir(environmentpath) for backup in backuplist: hostname = backup.split('.')[1].upper() backupjson[environment].append({'date': datedmy, 'host': hostname}) jsoncommands.writejson(backupjson, endloc)
def online_status(todaydate, datedmy): reportfileloc = '/anonymised/path/' + todaydate + '_gather.txt' endloc = '/anonymised/path.json' endjson = {} with open(reportfileloc, 'r') as reportfile: report = reportfile.readlines() for x in range(1, len(report)): # skipping first line splitline = report[x].split(',') splitline = [x.upper().strip() for x in splitline] if not splitline[0] in endjson.keys(): endjson[splitline[0]] = [] data = [splitline[1], splitline[2], splitline[3]] endjson[splitline[0]].append(data) endjson['polltime'] = datedmy jsoncommands.writejson(endjson, endloc)
def createwebdata(todaydate, datadate): backupdata = '/anonymised/path.json' backupjson = jsoncommands.readjson(backupdata) rootfolder = '/anonymised/path' reportfolder = rootfolder + '/anonymised/path' reportdest = reportfolder + '/' + todaydate + '_gather.txt' successdict = {} with open(reportdest, 'r') as reportfile: report = reportfile.readlines() for host in report: hostsplit = host.split(',') if hostsplit[3].strip() == 'SUCCESS': region = hostsplit[0].upper() hostname = hostsplit[2].upper() if not region in successdict.keys(): successdict[region] = [] successdict[region].append({'host': hostname, 'date': datadate}) for successkeys in successdict.keys(): if successkeys in backupjson.keys(): for successes in successdict[successkeys]: itemfound = False for existingindex, existingitems in enumerate( backupjson[successkeys]): if successes['host'] == existingitems['host']: backupjson[successkeys][existingindex][ 'date'] = successes['date'] itemfound = True break if not itemfound: backupjson[successkeys].append({ 'host': successes['host'], 'date': successes['date'] }) else: backupjson[successkeys] = successdict[successkeys] jsoncommands.writejson(backupjson, backupdata)
def dashboard_integrate(): # globals lcmpath = '/anonymised/path.json' datadir = '/anonymised/path' interdir = '/anonymised/path/intermediate' lcmdict = jsoncommands.readjson(lcmpath) lcmsources = lcmdict.keys() additions = 0 changes = 0 for lcmsource in lcmsources: existinghosts = [] lcmhosts = [x['hostname'] for x in lcmdict[lcmsource]] existingdatasrc = datadir + os.sep + 'dashboard_' + lcmsource + '.json' existingdict = jsoncommands.readjson(existingdatasrc) existingexcerpt = existingdict['data'] # modifying existing hosts for existingindex, existingdata in enumerate(existingexcerpt): existinghost = existingdata['hostname'] existinghosts.append(existinghost) if existinghost in lcmhosts: # update lcmindex = lcmhosts.index(existinghost) lcmdata = lcmdict[lcmsource][lcmindex] for category in lcmdata: if category != 'hostname' and lcmdata[ category] and lcmdata[category] != 'UNKNOWN': try: if existingdict['data'][existingindex][ category] != lcmdata[category]: existingdict['data'][existingindex][ category] = lcmdata[category] changes += 1 except KeyError: existingdict['data'][existingindex][ category] = lcmdata[category] changes += 1 # adding new hosts for lcmindex, lcmhost in enumerate(lcmhosts): if not lcmhost in existinghosts: existingdict['data'].append(lcmdict[lcmsource][lcmindex]) additions += 1 # outputting existingdict['polltime'] = datetime.datetime.today().strftime( '%d/%m/%Y') outputpath = interdir + os.sep + 'dashboard_' + lcmsource + '.json' jsoncommands.writejson(existingdict, outputpath) print(additions) print(changes) flagjson = jsoncommands.readjson( '/anonymised/path/intermediate/flags.json') flagjson['neo'] = 1 jsoncommands.writejson(flagjson, '/anonymised/path/intermediate/flags.json')
def parse(todaydate): # important variables rootdir = '/anonymised/path/' + todaydate + '/' reportroot = '/anonymised/path/' relevantpattern = '(\d{1,3}[.]\d{1,3}[.]\d{1,3}[.]\d{1,3})[@](\d+)[(]?([\w._-]*)[)]?->(\d{1,3}[.]\d{1,3}[.]\d{1,3}[.]\d{1,3})[@](\d+)[(]?([\w._-]*)[)]?' datadict = {} outputloc = '/anonymised/path/asymmetry_data.json' datafiles = os.listdir(rootdir) for datafile in datafiles: hostvdom = datafile.replace('.txt', '') datapath = rootdir + datafile reportpath = reportroot + hostvdom + '-asym.txt' with open(datapath, 'r') as testfile: testarray = testfile.readlines() matcharray = [] matchesprocessed = [] asymmetricarray = [] asymmetric = 0 symmetric = 0 duplicates = 0 unpaired = 0 blanks = 0 print('FILE: ' + datafile) for testline in testarray: regextest = re.search(relevantpattern, testline, re.IGNORECASE) if regextest: matchadd = [] for x in range(1, 7): # ip, port (@n), intf (human name) ~ ip, port (@n), intf (human name) matchadd.append(regextest.group(x)) matcharray.append(matchadd) while len(matcharray) > 0: # using while instead of for as we remove elements from the array as we go on, so length needs to be # recalculated each loop, until the array is empty # removed elements are most current (highest index) and any matched pair (lower index) #print('{0} elements to go...'.format(len(matcharray))) matchfound = False blankinterface_relevant = False relevantarray = matcharray.pop() relevantipa = relevantarray[0] relevantporta = relevantarray[1] relevantintfa = relevantarray[2] if not relevantintfa: relevantintfa = "(BLANK)" blankinterface_relevant = True relevantipb = relevantarray[3] relevantportb = relevantarray[4] relevantintfb = relevantarray[5] if not relevantintfb: relevantintfb = "(BLANK)" blankinterface_relevant = True characteristicreference = relevantipa + relevantipb if characteristicreference in matchesprocessed: duplicates += 1 continue for checkindex, checkarray in enumerate(matcharray): blankinterface_test = False testipa = checkarray[0] testporta = checkarray[1] testintfa = checkarray[2] if not testintfa: testintfa = "(BLANK)" blankinterface_test = True testipb = checkarray[3] testportb = checkarray[4] testintfb = checkarray[5] if not testintfb: testintfb = "(BLANK)" blankinterface_test = True if relevantipa == testipb and relevantipb == testipa: matchfound = True if relevantporta == testportb and relevantportb == testporta: # symmetric symmetric += 1 del matcharray[checkindex] else: if blankinterface_relevant or blankinterface_test: blanks += 1 elif relevantportb != testporta: # straightforward asymmetric asymmetricstring = '{0} [interface {1} @{2}] -> {3}'.format( relevantipa, relevantintfa, relevantporta, relevantipb) asymmetricstring += ' [interface {0} @{1}], returning on [interface {2} @{3}]'.format( relevantintfb, relevantportb, testintfa, testporta) elif relevantporta != testportb: # reverse asymmetric asymmetricstring = '{0} [interface {1} @{2}] -> {3}'.format( relevantipb, relevantintfb, relevantportb, testipb) asymmetricstring += ' [interface {0} @{1}], returning on [interface {2} @{3}]'.format( testintfb, testportb, relevantintfa, relevantporta) else: print('ERROR') if not blankinterface_relevant and not blankinterface_test: if not asymmetricstring in asymmetricarray: asymmetric += 1 asymmetricarray.append(asymmetricstring) else: duplicates += 1 del matcharray[checkindex] break if not matchfound: unpaired += 1 matchesprocessed.append(characteristicreference) if asymmetric > 0: with open(reportpath, 'w') as resultfile: for asymindex, asymmetriclines in enumerate(asymmetricarray): linetowrite = "FLOW " + str( asymindex + 1) + ": " + asymmetriclines + "\n" print(linetowrite) resultfile.write(linetowrite) print('ASYMMETRIC: ' + str(asymmetric)) print('SYMMETRIC: ' + str(symmetric)) print('DUPLICATES: ' + str(duplicates)) print('UNPAIRED: ' + str(unpaired)) print('BLANKS: ' + str(blanks)) datadict[hostvdom] = { 'asymmetric': asymmetric, 'symmetric': symmetric, 'duplicates': duplicates, 'unpaired': unpaired, 'blank': blanks } os.remove(datapath) os.rmdir(rootdir) datadict['polltime'] = datetime.datetime.today().strftime('%d/%m/%Y') jsoncommands.writejson(datadict, outputloc)
def getlcminfo(todaydate, backupdate): # globals rootdir = '/anonymised/path/' + todaydate endpath = '/anonymised/path.json' # RegEx softwarepatterns = ['IOS', 'software, version'] # IOS, catOS versionpatterns = ['IOS.*Version ([\w.()]+),?', 'Version NmpSW: ([\w.()]+)'] hardwarepatterns = ['Cisco ([\w./-]+) .* processor', 'Model: ([\w./-]+)', 'Cisco ([\w./-]+) .* bytes of memory'] specifichardwarepatterns = ['Cisco [\w.\/-]+ [(](.*)[)] processor'] serialpatterns = ['System serial number\s*: (\w+)', 'Processor board ID (\w+)', 'Hardware Version: .* Model: .* Serial #: (\w+)'] lastrebootpatterns = ['uptime is (.*)', 'Uptime is (.*)'] newjson = {} sources = os.listdir(rootdir) found = 0 notfound = 0 for source in sources: sourcedir = rootdir + os.sep + source newjson[source] = [] filelist = os.listdir(sourcedir) for file in filelist: found += 1 hostname = file.split('.')[1].upper() ipaddr = file.split('.')[0].replace('_', '.') newdata = {'hostname': hostname, 'software_type': '', 'software_version': '', 'ip_address': ipaddr, 'hardware': '', 'hardware_specific': '', 'serial_number': '', 'last_reboot': '', 'uptime': -1, 'last_backup': backupdate, 'days_elapsed': 0} dataloc = sourcedir + os.sep + file with open(dataloc, 'r') as datafile: datalines = datafile.readlines() for dataline in datalines: if not newdata['software_type']: if re.search(softwarepatterns[0], dataline, re.IGNORECASE): newdata['software_type'] = 'IOS' elif re.search(softwarepatterns[1], dataline, re.IGNORECASE): newdata['software_type'] = 'CATOS' checkregex(versionpatterns, dataline, 'software_version', newdata) checkregex(hardwarepatterns, dataline, 'hardware', newdata) checkregex(specifichardwarepatterns, dataline, 'hardware_specific', newdata) checkregex(serialpatterns, dataline, 'serial_number', newdata) checkregex(lastrebootpatterns, dataline, 'last_reboot', newdata) # handling information extractnum = '(\d+)' if newdata['last_reboot']: fulluptime = newdata['last_reboot'].split(',') newdata['last_reboot'] = '' # just in case it somehow fails totaluptime = 0 for uptimepart in fulluptime: magnitude = int(re.search(extractnum, uptimepart).group(1)) if magnitude: if 'year' in uptimepart.lower(): totaluptime += magnitude * 365 if 'month' in uptimepart.lower(): totaluptime += magnitude * 30 if 'week' in uptimepart.lower(): totaluptime += magnitude * 7 if 'day' in uptimepart.lower(): totaluptime += magnitude newdata['last_reboot'] = (datetime.datetime.now() - datetime.timedelta(days=totaluptime)).strftime('%d/%m/%Y') newdata['uptime'] = totaluptime if not newdata['software_version'] or not newdata['hardware'] or not newdata['serial_number'] \ or not newdata['last_reboot']: print(hostname + ' did not find all info @ ' + source) print(newdata) found -= 1 notfound += 1 newjson[source].append(newdata) jsoncommands.writejson(newjson, endpath) print('Found: ' + str(found)) print('Not Found: ' + str(notfound))