def convert_single_alexa_file(file_location): """Converts an Alexa file into a dict and returns it""" with open('alexa_data/' + file_location) as f: try: f = xmlparse(f) except Exception: print "Could not parse: " + file_location return False return f
def __init__(self, filename='anime-titles.xml.gz'): self.anidb_data = xmlparse(GzipFile(filename)) self.engine = create_engine('postgresql://*****:*****@db-server/studyzero') Base.metadata.create_all(self.engine) # noinspection PyPep8Naming Session = sessionmaker(bind=self.engine) self.session = Session()
def fromnmapxml(self, filename): """extract IPs from nmap xml scan log""" nmapxml = xmlparse(open(filename).read(), force_list=xmlforced) if 'host' not in nmapxml['nmaprun']: raise Exception('No hosts scanned\n{}\n'.format(hostspath)) for host in nmapxml['nmaprun']['host']: if isinstance(host['address'], list): host['address'] = host['address'][0] addr = host['address']['@addr'] # XXX: ensure ipv4 addr # print(addr, type(addr)) self[addr] = 'Miscellaneous' # TODO: add global/config setting
def __init__(self, file_str): self._file_str = file_str self._convert_newlines() self._parse_header() if self.major_version() == 2: self._v2_dict = xmlparse(file_str) self._parse_signon() self._parse_profile()
async def findStream(stream): stats = [] uri = '%s.xspf' % stream data = xmlparse(await getIcecast(uri)) if 'playlist' in data and 'trackList' in data['playlist'] and data[ 'playlist']['trackList']: stats = data['playlist']['trackList']['track'] stats['stream'] = stream return stats if 'title' in stats else None
def create_json(): """Creates 2 gigantic json payloads from the xml data, one formatted nicely and one with no extra whitespace""" result = {} for fn in listdir('alexa_data/'): with open('alexa_data/' + fn) as f: try: jdoc = xmlparse(f.read()) result[fn.replace(".txt", "")] = jdoc except Exception: print fn with open('data_formatted.json', 'w') as f: dump(result, f) with open('data.json', 'w') as f: dump(result, f, indent=4, sort_keys=True)
def visfromtracexml(xmlfile, format='svg', outpath=os.curdir, metadata={}): """takes a file descriptor of an XML file generated by trace.py and graphs each network in it as an SVG""" xmldata = xmlparse(xmlfile, force_list=xmlforced) # generate graph data from trace.xml graph_dump = [] for net in xmldata['networks']['network']: G = nx.Graph() labels = {} # dict mapping nodes to labels # construct new graph # I opted to use continue's because the depth gets # unwieldy for proper if statements. if net['hosts'] is None: continue for host in net['hosts']['host']: # construct the topology for the relevant hosts labels['PUBLIC'] = 'PUBLIC' # root of all traces last = 'PUBLIC' if host['trace'] is None: continue for hop in host['trace']['hop']: G.add_edge(last, hop['address']) labels[hop['address']] = f'{hop["hostname"]}'\ '\n{hop["address"]}' last = hop['address'] G.add_edge(last, host['address']) labels[host['address']] = f'{host["hostname"]}'\ '\n{host["address"]}' graph_dump.append((G, net['networkname'], labels)) # convert each to the appropriate format files = [] for G, netname, labels in graph_dump: pos = {} # icons = {} icons, types = choose_icons_and_types(labels) pos = hierarchy_pos(G, 'PUBLIC') filename = path.join(outpath, f'{netname}.{format}') svg_from_nxgraph(G, pos, icons, types, labels, filename, f'{netname} Network', metadata) files.append(filename) print(filename) # TODO: add exception handling for bad graphs print('finished successfully') return files
def run(xml=None, output=tablecsv): """constructs a semi-colon delimited csv file from trace data""" if xml is None: xml = open(tracepath, 'rb').read() x = xmlparse(xml, force_list=trace.xmlforced) if isinstance(output, str): output = open(output, 'w') # use to separate network categories/add headings csvfile = csv.writer(output, lineterminator='\n') for network in x['networks']['network']: csvfile.writerow((network['networkname'],)) csvfile.writerow(('hostname', 'address', 'type', 'trace')) if network['hosts'] is None: continue for host in network['hosts']['host']: csvfile.writerow((host['hostname'], host['address'],)) csvfile.writerow( (None, None, None, 'index', 'hostname', 'address')) if host['trace'] is None: continue for hop in host['trace']['hop']: csvfile.writerow( (None, None, None, hop['index'], hop['hostname'], hop['address'])) # typedct.get(host['address'])) # host['traceblob']) output.close()
def parse_result(output=sys.stdout): print('Parsing nmap results if they exist from scan...') try: nmapxml = xmlparse(open(nmapxmlpath, 'rb'), force_list=xmlforced) except FileNotFoundError as e: print('The results of the scan do not exist or have been moved!') raise scans = (nmapxml, ) #, nmapxml2) newxml = odict() newxml['networks'] = odict() newxml['networks']['network'] = [] # extract and aggregate data from scans # TODO: remove terrible naming (referring to previous var) for nmapxml in scans: for network in sorted(list(set(iplist.values()))): # TODO: custom exception class if 'host' not in nmapxml['nmaprun']: raise Exception('No hosts scanned\n{}\n{}\n'.format( nmapxml, hostspath)) for host in nmapxml['nmaprun'][ 'host']: # XXX: isolate ipv4 address directly if isinstance(host['address'], list): host['address'] = host['address'][0] hosts = (n for n in nmapxml['nmaprun']['host'] if iplist[n['address']['@addr']] == network) nethosts = odict() newxml['networks']['network'].append(nethosts) nethosts['networkname'] = network nethosts['hosts'] = odict() nethosts['hosts']['host'] = [] for host in hosts: # do all the nasty xml creation # if you're unfamiliar with the '@keys' convention, # odict use and etc, you should read the xmltodict # module docs newhost = odict() newhost['address'] = host['address']['@addr'] # in case hostnames is set to none or the tag doesn't even exist if host.get('hostnames') is not None: newhost['hostname'] = host['hostnames']['hostname'][ '@name'] else: newhost['hostname'] = host['address']['@addr'] # newhost['networkname'] = iplist[host['address']['@addr']] newhost['trace'] = odict() newhost['trace']['hop'] = [] traceblob = [] try: for hop in host['trace']['hop']: # TODO: Add a black hole for unknown trace nodes if possible newhop = odict() newhop['index'] = hop['@ttl'] # in case hostname is set to none or the tag doesn't even exist if hop.get('@host') is None: newhop['hostname'] = hop['@ipaddr'] else: newhop['hostname'] = hop.get('@host') newhop['address'] = hop['@ipaddr'] newhost['trace']['hop'].append(newhop) traceblob.append( (newhop['index'], newhop['hostname'] if newhop['hostname'] is not None else '', newhop['address'])) except KeyError as e: if str(e) == 'trace': raise Exception( "It's likely that no traces were run, please inspect the nmap output" ) newhost['traceblob'] = str(traceblob) nethosts['hosts']['host'].append(newhost) # sort nethosts['hosts']['host'].sort( key=lambda t: (t['hostname'] if t['hostname'] is not None else '', t.get('address', ''))) xmldump(newxml, output, pretty=True)
if 'AliasInfo' in data.keys(): if type(data['AliasInfo']) is list: aliases = [d.get('Name') for d in data['AliasInfo']] elif type(data['AliasInfo']) is dict: aliases = [data['AliasInfo'].get('Name', '')] data['AliasInfo'] = aliases return data if __name__ == '__main__': parser = ArgumentParser(description='read SEGGER XML file') parser.add_argument('infile', help='the SEGGER XML file') args = parser.parse_args() with open(args.infile, 'r') as ifh: data = xmlparse(ifh.read()) vendors = data['DeviceDatabase']['VendorInfo'] devices = [] for vendor in vendors: vendorname = vendor['@Name'] deviceinfos = vendor['DeviceInfo'] if type(deviceinfos) is OrderedDict: device = {} device['vendor'] = vendorname device.update(clean_dict(deviceinfos)) device = clean_aliasinfo(device) devices.append(device) else: for deviceinfo in deviceinfos: device = {}