Пример #1
0
def parse(input_file=None,
          output_directory=None,
          plugin_outputs=False,
          disable_color_output=None,
          *args,
          **kwargs):

    if disable_color_output:
        color = False
    else:
        from termcolor import colored
        color = True

    # build output directory
    bo = base_output_path = helpers.handle_output_directory(output_directory)

    # Load the Nessus file
    sprint('Loading Nessus file')
    tree = ET.parse(input_file)
    os.chdir(bo)

    os.mkdir('additional_info')
    os.chdir('additional_info')

    # Dump target ip addresses
    sprint('Dumping target information (all scanned addresses)')
    with open('target_ips.txt', 'w') as of:

        # dump all target s to disk
        for pref in tree.findall('.//preference'):

            name = pref.find('./name')

            if name.text == 'TARGET':

                value = pref.find('./value')
                of.write('\n'.join(value.text.split(',')))
                break

    # Dump responsive ips
    sprint('Dumping responsive ip addresses')
    with open('responsive_ips.txt', 'w') as of:

        cache = []

        for tag in tree.xpath('//tag[@name="host-ip"]'):
            ip = tag.text
            if ip not in cache:
                cache.append(ip)

        of.write('\n'.join(sorted(cache)))

    # Dump additional hostnames to disk
    for a in ['netbios-name', 'host-fqdn', 'host-rdns']:

        if a[-1] != 's': fname = a + 's'
        else: fname = a
        fname += '.txt'
        sprint(f'Dumping {a} values to {fname}')

        values = {}
        if tree.xpath(f'//tag[@name="{a}"]'):

            with open(fname.replace('-', '_'), 'w') as outfile:

                values = []
                for ele in tree.xpath(f'//tag[@name="{a}"]'):
                    if not ele.text in values:
                        values.append(ele.text)
                        outfile.write(ele.text + '\n')

    # Dump open ports
    sprint('Dumping open ports')
    with open('open_ports.txt', 'w') as of:

        ports = [
            str(p)
            for p in sorted(set([int(e) for e in tree.xpath('//@port')])) if p
        ]

        of.write('\n'.join(ports))

    os.chdir('..')

    # =====================================
    # BEGIN DUMPING THE REPORT BY PLUGIN ID
    # =====================================

    # Dump plugin outputs
    sprint('Dumping report items\n')
    finding_index = {
        'NONE': [],
        'LOW': [],
        'MEDIUM': [],
        'HIGH': [],
        'CRITICAL': []
    }

    color_lookup = {
        'none': 'blue',
        'low': 'green',
        'medium': 'yellow',
        'high': 'red',
        'critical': 'magenta'
    }

    # ============================================
    # GET LONGEST PID LENGTH FOR OUTPUT FORMATTING
    # ============================================

    pid_len = 0
    for pid in list(set(tree.xpath('//@pluginID'))):
        plen = pid.__len__()
        if plen > pid_len: pid_len = plen
    pid_len += 2

    # =================
    # PARSE EACH PLUGIN
    # =================

    header = 'Risk       ' \
          'Exploitable    ' \
          'Plugin ID   ' \
          'Plugin Name'

    print(header)
    print('-' * header.__len__())

    for plugin_id in list(set(tree.xpath('//@pluginID'))):

        rhosts = {}
        protocols = []
        alert = True
        pid = plugin_id

        # ==========================================================
        # EXTRACT PLUGIN IDS, PROTOCOLS, AND INITIALIZE REPORT HOSTS
        # ==========================================================

        for eri in tree.xpath(f'//ReportItem[@pluginID="{plugin_id}"]'):
            ri = FromXML.report_item(eri)

            if not ri.protocol in protocols:
                protocols.append(ri.protocol)

            if alert:
                alert = False

                if color:
                    rf = colored(ri.risk_factor.upper(),
                                 color_lookup[ri.risk_factor])

                    if ri.risk_factor.__len__() < 11:
                        rf += ' ' * (11 - ri.risk_factor.__len__())

                    if ri.exploitable:
                        rf += colored('True ', 'red')
                    else:
                        rf += 'False'

                    rf += '      '

                else:

                    rf = ri.risk_factor.upper()

                    if ri.risk_factor.__len__() < 11:
                        rf += ' ' * (11 - ri.risk_factor.__len__())

                    if ri.exploitable:
                        rf += 'True '
                    else:
                        rf += 'False'

                    rf += '      '

                if pid.__len__() < pid_len:
                    pid += ' ' * (pid_len - pid.__len__())
                    pid += '    '

                rf += '    ' + pid
                rf += ri.plugin_name

                print(rf)

            parent = eri.getparent()
            name = parent.get('name')

            if name in rhosts:

                rh = rhosts[name]
                ports = rh.ports.get('number',ri.port.number) \
                    .get('protocol',ri.protocol)
                if not ports:
                    rh.append_port(ri.port)
                else:
                    port = ports[0]

            else:

                rh = FromXML.report_host(parent)
                rh.append_port(ri.port)
                rhosts[name] = rh

            if ri.plugin_output:
                ri.port.plugin_outputs.append_output(plugin_id,
                                                     ri.plugin_output)

        # Handle finding index item
        sev = ri.risk_factor.upper()
        prefix = f'[{sev}] [{plugin_id}] '
        suffix = ' '
        if ri.exploit_available:
            suffix += '[EXPLOITABLE]'
        if ri.exploit_frameworks:
            fws = ','.join([fw.upper() for fw in ri.exploit_frameworks])
            suffix += f'[EXPLOIT FRAMEWORKS: {fws}]'
        finding_index[sev].append(prefix + ri.plugin_name + suffix)

        # ================================
        # BUILD REPORT ITEM DIRECTORY NAME
        # ================================

        ri_dir = re.sub(pname_re, '_', ri.plugin_name).lower().strip('_')

        # =========================
        # BUILD DIRECTORY STRUCTURE
        # =========================

        if not Path(ri.risk_factor).exists():
            os.mkdir(ri.risk_factor)
        os.chdir(ri.risk_factor)

        if not Path(ri_dir).exists():
            os.mkdir(ri_dir)
        os.chdir(ri_dir)

        # =====================
        # WRITE CONTENT TO DISK
        # =====================

        # Additional information
        with open('additional_info.txt', 'w') as of:
            of.write(ri.additional_info())

        for protocol in protocols:

            # Address Lists
            ips = []
            sockets = []
            fqdns = []
            fsockets = []

            # Unique ports affected
            ports = []

            try:

                if plugin_outputs:

                    plugin_outputs_file = open(
                        f'{protocol}_plugin_outputs.txt', 'w')

                for rhost in rhosts.values():

                    plist = rhost.__getattribute__(protocol + '_ports')
                    if plist:

                        for addr in rhost.to_addresses(fqdns=True):

                            if re.match(ipv4_re, addr):
                                ips.append(addr)
                            elif re.match(ipv6_re, addr):
                                ips.append(addr)
                            elif re.match(fqdn_re, addr):
                                fqdns.append(addr)
                            else:
                                continue

                        for number, port in plist.items():

                            socket = None
                            fsocket = None

                            if number > 0:
                                ports.append(number)

                            for ip in ips:
                                if number > 0:
                                    socket = f'{ip}:{port.number}'
                                    sockets.append(socket)

                            for fqdn in fqdns:
                                if number > 0:
                                    fsocket = f'{fqdn}:{port.number}'
                                    fsockets.append(fsocket)

                            if not socket: continue

                            header = socket
                            if fsocket: header = header + ',' + fsocket + ':'
                            ban = '=' * header.__len__()
                            header = f'{ban}{header}{ban}'

                            if plugin_outputs and plugin_id in port.plugin_outputs:

                                plugin_output = f'{header}\n\n' + '\n'.join(
                                    port.plugin_outputs[plugin_id])

                                plugin_outputs_file.write('\n\n' +
                                                          plugin_output)

            finally:

                if plugin_outputs:
                    plugin_outputs_file.close()

            # =====================
            # HANDLE IPv4 ADDRESSES
            # =====================
            '''

            IPs are now properly sorted before written to disk.

            1. convert each ipv4 string to an ipaddress.ip_address object
            2. sort the ip_address objects
            3. convert each ip_address object back to a string
            '''

            ips = [
                ip.__str__()
                for ip in sorted(set([ipaddress.ip_address(ip) for ip in ips]))
            ]

            # ===================
            # HANDLE IPv4 SOCKETS
            # ===================
            '''

            Sockets are now properly sorted before written to disk.

            1. unique string sockets
            2. map each string ip to a list of ports
            3. convert each string ip to an ipaddress.ip_address object
            4. sort the ip_address objects
            5. create a new list of sockets
            '''

            sockets = set(sockets)
            smap = {}

            for s in sockets:
                ip, port = s.split(':')
                if ip not in smap:
                    smap[ip] = [port]
                elif port not in smap[ip]:
                    smap[ip].append(port)

            sips = [
                ip.__str__() for ip in sorted(
                    [ipaddress.ip_address(ip) for ip in set(smap.keys())])
            ]
            sockets = []
            for sip in sips:
                for p in sorted(smap[sip]):
                    s = f'{sip}:{p}'
                    if s not in sockets: sockets.append(s)

            # ============
            # HANDLE PORTS
            # ============

            ports = sorted(set(ports))
            if ports:

                # write a list of unique ports to disk
                with open(f'{protocol}_ports.txt', 'w') as outfile:
                    outfile.write('\n'.join([str(p) for p in ports]) + '\n')

            # ============
            # HANDLE FQDNS
            # ============

            fqdns = sorted(set(fqdns))
            fsockets = sorted(set(fsockets))

            # write address lists to disk
            for fmt, lst in {
                    'ips': ips,
                    'sockets': sockets,
                    'fqdns': fqdns,
                    'fqdn_sockets': fsockets
            }.items():

                if not lst: continue

                fname = f'{protocol}_{fmt}.list'

                with open(fname, 'w') as outfile:

                    outfile.write('\n'.join(lst) + '\n')

        os.chdir('../../')

    os.chdir('additional_info')

    print()
    sprint('Writing report item index')
    with open('report_item_index.txt', 'w') as outfile:

        outfile.write('[Risk Factor] [Plugin ID] Plugin Name [Exploitable]' \
                ' [Exploit Frameworks]\n')

        for k in ['CRITICAL', 'HIGH', 'MEDIUM', 'LOW', 'NONE']:

            if finding_index[k]:
                outfile.write('\n'.join(finding_index[k]) + '\n')

    print()
    return 0
Пример #2
0
def parse(input_file=None,
          output_directory=None,
          plugin_outputs=False,
          disable_color_output=None,
          debug=None,
          create_port_splits=False,
          risk_factors=RISK_FACTORS,
          *args,
          **kwargs):

    port_splits = create_port_splits

    if disable_color_output:
        color = False
    else:
        from termcolor import colored
        color = True

    if debug:
        logger.setLevel(logging.DEBUG)

    # build output directory
    bo = base_output_path = helpers.handle_output_directory(output_directory)

    # Load the Nessus file
    sprint('Loading Nessus file')
    tree = ET.parse(input_file)
    os.chdir(bo)

    os.mkdir('additional_info')
    os.chdir('additional_info')

    # Dump target ip addresses
    sprint('Dumping target information (all scanned addresses)')
    with open('target_ips.txt', 'w') as of:

        # dump all target s to disk
        for pref in tree.findall('.//preference'):

            name = pref.find('./name')

            if name.text == 'TARGET':

                value = pref.find('./value').text.split(',')
                logger.debug(f'Total target count: {len(value)}')
                of.write('\n'.join(value))
                break

    # Dump responsive ips
    sprint('Dumping responsive ip addresses')
    with open('responsive_ips.txt', 'w') as of:

        cache = []

        for tag in tree.xpath('//tag[@name="host-ip"]'):
            ip = tag.text
            if ip not in cache:
                cache.append(ip)

        count = 0
        for value in sorted(cache):
            count += 1
            of.write(value + '\n')

        logger.debug(f'Total responsive IPs: {count}')

    # Dump additional hostnames to disk
    for a in ['netbios-name', 'host-fqdn', 'host-rdns']:

        if a[-1] != 's': fname = a + 's'
        else: fname = a
        fname += '.txt'
        sprint(f'Dumping {a} values to {fname}')

        if tree.xpath(f'//tag[@name="{a}"]'):

            with open(fname.replace('-', '_'), 'w') as outfile:

                values, count = [], 0
                for ele in tree.xpath(f'//tag[@name="{a}"]'):
                    if not ele.text in values:
                        count += 1
                        values.append(ele.text)
                        outfile.write(ele.text + '\n')

                logger.debug(f'Total of {a} values: {count}')

    # Dump open ports
    sprint('Dumping open ports')
    with open('open_ports.txt', 'w') as of:

        ports = [
            str(p)
            for p in sorted(set([int(e) for e in tree.xpath('//@port')])) if p
        ]

        of.write('\n'.join(ports))

        logger.debug(f'Total count of ports: {len(ports)}')

    os.chdir('..')

    # =====================================
    # BEGIN DUMPING THE REPORT BY PLUGIN ID
    # =====================================

    # Dump plugin outputs
    sprint('Dumping report items\n')
    finding_index = {
        'NONE': {},
        'LOW': {},
        'MEDIUM': {},
        'HIGH': {},
        'CRITICAL': {}
    }

    color_lookup = {
        'none': 'blue',
        'low': 'green',
        'medium': 'yellow',
        'high': 'red',
        'critical': 'magenta'
    }

    # =================
    # PARSE EACH PLUGIN
    # =================

    header = 'Risk       ' \
          'Exploitable    ' \
          'Plugin ID   ' \
          'Plugin Name'

    print(header)
    print('-' * header.__len__())

    # ============================
    # GET PLUGIN ID BY RISK FACTOR
    # ============================

    plugin_ids = []

    for risk_factor in risk_factors:

        if risk_factor == 'none':
            severity = 0
        elif risk_factor == 'low':
            severity = 1
        elif risk_factor == 'medium':
            severity = 2
        elif risk_factor == 'high':
            severity = 3
        elif risk_factor == 'critical':
            severity = 4
        else:
            continue

        plugin_ids += set(
            tree.xpath(f'//ReportItem[@severity="{severity}"]/@pluginID'))

    # ============================================
    # GET LONGEST PID LENGTH FOR OUTPUT FORMATTING
    # ============================================

    pid_len = 0
    for pid in plugin_ids:
        plen = pid.__len__()
        if plen > pid_len: pid_len = plen
    pid_len += 2

    # ==============================
    # PARSE REPORT ITEM BY PLUGIN_ID
    # ==============================

    alerted = []
    for plugin_id in plugin_ids:

        # Report hosts
        rhosts = {}
        protocols = []
        pid = plugin_id

        # ==========================================================
        # EXTRACT PLUGIN IDS, PROTOCOLS, AND INITIALIZE REPORT HOSTS
        # ==========================================================

        for eri in tree.xpath(f'//ReportItem[@pluginID="{plugin_id}"]'):
            ri = FromXML.report_item(eri)

            if ri.risk_factor not in risk_factors: continue

            if not ri.protocol in protocols:
                if not ri.protocol.lower() in ReportHost.PORT_PROTOCOLS:
                    esprint(
                        'Unknown protocol provided. Skipping: {}' \
                        .format(ri.protocol)
                    )
                    continue
                protocols.append(ri.protocol)

            if not plugin_id in alerted:
                alerted.append(plugin_id)

                if color:
                    rf = colored(ri.risk_factor.capitalize(),
                                 color_lookup[ri.risk_factor])

                    if ri.risk_factor.__len__() < 11:
                        rf += ' ' * (11 - ri.risk_factor.__len__())

                    if ri.exploitable:
                        rf += colored('True ', 'red')
                    else:
                        rf += 'False'

                    rf += '      '

                else:

                    rf = ri.risk_factor.upper()

                    if ri.risk_factor.__len__() < 11:
                        rf += ' ' * (11 - ri.risk_factor.__len__())

                    if ri.exploitable:
                        rf += 'True '
                    else:
                        rf += 'False'

                    rf += '      '

                if pid.__len__() < pid_len:
                    pid += ' ' * (pid_len - pid.__len__())
                    pid += '    '

                rf += '    ' + pid
                rf += ri.plugin_name

                print(rf)

            # ===================================================
            # CREATE/UPDATE THE OWNER HOST WITH THE AFFECTED PORT
            # ===================================================
            '''
            - Report items (ri) are child elements of hosts
            - The parent of the report item is a host element
            '''

            # Get the host element
            parent = eri.getparent()

            # Get the name of the host
            name = parent.get('name')

            host_ips = parent.xpath(
                './HostProperties/tag[@name="host-ip"]/text()')

            for host_ip in host_ips:

                rh = rhosts.get(host_ip)

                # Check if the host is already being tracked in rhosts
                if rh:

                    # ==================
                    # UPDATE KNOWN RHOST
                    # ==================

                    # update the ports list of the target host with the port
                    # of the current report item

                    if not ri.port in rh.ports \
                            .get('number', ri.port.number) \
                            .get('protocol', ri.protocol):
                        rh.append_port(ri.port)

                else:

                    # ================
                    # CREATE NEW RHOST
                    # ================

                    rh = FromXML.report_host(parent)
                    rh.append_port(ri.port)
                    rhosts[host_ip] = rh

                # ====================
                # HANDLE PLUGIN OUTPUT
                # ====================

                if ri.plugin_output and plugin_outputs:

                    ri.port.plugin_outputs.append_output(
                        plugin_id, ri.plugin_output)

        # =============================
        # HANDLE THE FINDING INDEX ITEM
        # =============================
        '''
        - this is dumped to the findings index in additional_info
        '''

        sev = ri.risk_factor.upper()
        prefix = f'[{sev}] [{plugin_id}] [{len(rhosts.keys())}] '
        suffix = ' '

        exploitable, fws = 'false', 'n/a'

        if ri.exploit_available:
            exploitable = 'true'

        if ri.exploit_frameworks:
            fws = ','.join([fw.upper() for fw in ri.exploit_frameworks])

        finding_index[sev][ri.plugin_name] = ({
            'plugin_name': ri.plugin_name,
            'plugin_id': plugin_id,
            'severity': sev,
            'count': len(rhosts.keys()),
            'exploitable': exploitable,
            'exploit_frameworks': fws
        })

        # ================================
        # BUILD REPORT ITEM DIRECTORY NAME
        # ================================

        ri_dir = re.sub(pname_re, '_', ri.plugin_name).lower().strip('_')

        # =========================
        # BUILD DIRECTORY STRUCTURE
        # =========================

        out_dir = Path(ri.risk_factor) / str(ri_dir)[:250]
        out_dir.mkdir(parents=True, exist_ok=True)

        # =====================
        # WRITE CONTENT TO DISK
        # =====================

        # Write additional info
        with (out_dir / 'additional_info.txt').open('w') as of:
            of.write(ri.additional_info())

        # Iterate over each protocol
        # These were captured while collecting plugin ids
        for protocol in protocols:

            # Address Lists
            ips = []
            sockets = []
            fqdns = []
            fsockets = []

            # Unique ports affected
            ports = []

            try:

                # Prepare to handle plugin outputs
                if plugin_outputs:

                    plugin_outputs_file = (
                        outdir / f'{protocol}_plugin_outputs.txt').open('w')

                for rhost in rhosts.values():
                    host_ips, host_fqdns = [], []

                    plist = rhost.ports
                    if not plist: continue

                    # ====================
                    # CAPTURE IP ADDRESSES
                    # ====================

                    for addr in rhost.to_addresses(fqdns=True):

                        try:

                            ip = ipaddress.ip_address(addr)
                            host_ips.append(ip)

                            if not ip in ips: ips.append(ip)
                        except:
                            if re.match(fqdn_re, addr):
                                host_fqdns.append(addr)
                                if not addr in fqdns: fqdns.append(addr)
                            else:
                                logger.debug(
                                    f'Failed to handle address: {addr}')
                                continue

                    # ===============
                    # CAPTURE SOCKETS
                    # ===============

                    for port in sorted(plist):

                        if port.number > 0:

                            if not port.number in ports:
                                ports.append(port.number)

                            for ip in host_ips:
                                socket = f'{ip}:{port.number}'
                                sockets.append(socket)

                            for fqdn in host_fqdns:
                                fsocket = f'{fqdn}:{port.number}'
                                fsockets.append(fsocket)

                        if plugin_outputs and plugin_id in port.plugin_outputs:

                            header = socket
                            if fsocket: header = header + ',' + fsocket + ':'
                            ban = '=' * header.__len__()
                            header = f'{ban}{header}{ban}'

                            plugin_output = f'{header}\n\n' + '\n'.join(
                                port.plugin_outputs[plugin_id])

                            plugin_outputs_file.write('\n\n' + plugin_output)

            except Exception as e:

                logger.debug(f'Unhandled exception occurred: {e}')
                raise e

            finally:

                if plugin_outputs: plugin_outputs_file.close()

            # =====================
            # HANDLE IPv4 ADDRESSES
            # =====================

            ips = [str(ip) for ip in sorted(set(ips))]
            finding_index[sev][ri.plugin_name]['ip_count'] = len(ips)

            # ===================
            # HANDLE IPv4 SOCKETS
            # ===================

            sorted_sockets = []
            for ip in ips:
                for s in [s for s in sockets if s.startswith(ip)]:
                    if not s in sorted_sockets: sorted_sockets.append(s)
            sockets = sorted_sockets

            finding_index[sev][ri.plugin_name]['socket_count'] = len(sockets)

            # ============
            # HANDLE PORTS
            # ============

            ports = sorted(set(ports))
            if ports:

                # write a list of unique ports to disk
                with open(f'{protocol}_ports.txt', 'w') as outfile:
                    outfile.write('\n'.join([str(p) for p in ports]) + '\n')

            # ============
            # HANDLE FQDNS
            # ============

            fqdns = sorted(set(fqdns))
            fsockets = sorted(set(fsockets))

            finding_index[sev][ri.plugin_name]['fqdn_count'] = len(fqdns)
            finding_index[sev][ri.plugin_name]['fqdn_socket_count'] = len(
                fsockets)

            logger.debug(
                f'{ri.plugin_name}: ip_count({len(ips)})  ' \
                f'socket_count({len(sockets)}) fqdn_count({len(fqdns)})' \
                f'fqdn_socket_count({len(fsockets)})'
            )

            # write address lists to disk
            for fmt, lst in {
                    'ips': ips,
                    'sockets': sockets,
                    'fqdns': fqdns,
                    'fqdn_sockets': fsockets
            }.items():

                if not lst: continue

                fname = f'{protocol}_{fmt}.list'

                with (out_dir / fname).open('a') as outfile:

                    outfile.write('\n'.join(lst) + '\n')

            # ==================
            # HANDLE PORT SPLITS
            # ==================
            '''
            Creates a new directory that will contain a series of files named
            like "<proto>_<port.number>.list". This is useful when passing the
            list to Metasploit, which doesn't support sockets.
            '''

            if port_splits:

                psplits_dir = out_dir / 'port_splits'
                fpsplits_dir = out_dir / 'fqdn_port_splits'

                psplits_dir.mkdir(parents=True, exist_ok=True)
                fpsplits_dir.mkdir(parents=True, exist_ok=True)

                for port in ports:

                    port = str(port)
                    with (psplits_dir /
                          f'{protocol}_{port}_ips.list').open('a') as outfile:

                        for socket in sockets:
                            addr, sport = socket.split(':')
                            if port == sport: outfile.write(addr + '\n')

                    with (ffpsplits_dir /
                          '{protocol}_{port}_fqdns.list').open('a') as outfile:

                        for socket in fsockets:
                            addr, sport = socket.split(':')
                            if port == sport: outfile.write(addr + '\n')

    adinfo_dir = out_dir / 'additional_info'
    adinfo_dir.mkdir(parents=True, exist_ok=True)

    print()

    sprint('Writing report item index')
    with (adinfo_dir / 'report_item_index.txt').open('w+') as outfile:

        rows = [[
            'Risk Factor', 'Plugin ID', 'Count IPs', 'Count Sockets',
            'Count FQDNs', 'Count FQDN Sockets', 'Exploitable',
            'Exploit Frameworks', 'Plugin Name'
        ]]

        for k in ['CRITICAL', 'HIGH', 'MEDIUM', 'LOW', 'NONE']:

            if finding_index[k]:

                for plugin_name in sorted(list(finding_index[k].keys())):

                    dct = finding_index[k][plugin_name]

                    rows.append([
                        dct.get('severity'),
                        dct.get('plugin_id'),
                        dct.get('ip_count'),
                        dct.get('socket_count'),
                        dct.get('fqdn_count'),
                        dct.get('fqdn_socket_count'),
                        dct.get('exploitable'),
                        dct.get('exploit_frameworks'),
                        dct.get('plugin_name'),
                    ])

        outfile.write(tabulate(rows, headers='firstrow') + '\n')

    print()
    return 0
Пример #3
0
def parse(input_file=None,
          no_url=False,
          output_directory=None,
          no_headers=False,
          no_beautify_js=False,
          huge_tree=False,
          **kwargs):

    # Invert flags
    write_url = (not no_url)
    write_headers = (not no_headers)
    beautify_js = (not no_beautify_js)

    esprint(f'Parsing input file: {input_file}')

    # parse the input file as HTML
    parser = ET.XMLParser(huge_tree=huge_tree)

    try:
        tree = ET.parse(input_file, parser=parser)
    except Exception as e:
        if e.msg.find('Huge input lookup') > 0:
            esprint(
                '\nWARNING: ' \
                'Large input file selected. Include --huge-tree ' \
                'to continue parsing the target file. Exiting.',
                suf='[!]'
            )
            exit()

    bo = base_output_path = helpers.handle_output_directory(output_directory)
    os.chdir(bo)

    counter = 0

    for item in tree.xpath('//item'):

        try:

            item = Item.from_lxml(item)

        except Exception as e:

            esprint(f'Failed to parse item #{counter}: {e}')
            continue

        # ==================
        # HANDLE THE REQUEST
        # ==================

        with open(str(counter) + '.req', 'wb') as outfile:

            if write_url:
                outfile.write(
                    bytify(f'URL: {item.url}\r\n{item.request.firstline}\r\n'))

            for k, v in item.request.headers.items():

                if beautify_js \
                        and re.match('content-type',k,re.I) \
                        and re.search('json',v,re.I):
                    try:
                        item.request.sbody = jsbeautifier.beautify(
                            item.request.sbody)
                    except Exception as e:
                        esprint('Failed to beautify JSON: {e}')

                if write_headers: outfile.write(bytify(f'{k}: {v}\r\n'))

            if write_headers: outfile.write(b'\r\n')

            if item.request.body and not item.request.sbody:
                outfile.write(item.request.body)
            else:
                outfile.write(bytify(item.request.sbody))

        if item.mimetype: mimetype = item.mimetype.lower()
        else: mimetype = 'no_mimetype'

        # ===================
        # HANDLE THE RESPONSE
        # ===================

        with open(str(counter) + '.resp.' + mimetype, 'wb') as outfile:

            # Write the first line
            if write_url:
                outfile.write(
                    bytify(
                        f'URL: {item.url}\r\n{item.response.firstline}\r\n'))

            # Handle response headers
            for k, v in item.response.headers.items():

                # Beautify JavaScript/JSON content
                if beautify_js \
                        and re.match('content-type',k,re.I) \
                        and re.search('java|json',v,re.I):
                    try:
                        item.response.sbody = jsbeautifier.beautify(
                            item.response.sbody)
                    except Exception as e:
                        esprint('Failed to beautify JavaScript/JSON: {e}')
                        pass

                # Write headers to the output file
                if write_headers: outfile.write(bytify(f'{k}: {v}\r\n'))

            # Write newlines
            if write_headers: outfile.write(b'\r\n')

            # Write response body to disk
            if item.response.body and not item.response.sbody:
                outfile.write(item.response.body)
            else:
                outfile.write(bytify(item.response.sbody))

        counter += 1

    return 0
Пример #4
0
def parse(input_file=None, output_directory=None, tcpwrapped=None, **kwargs):

    bo = base_output_path = helpers.handle_output_directory(output_directory)

    # parse the input file
    tree = ET.parse(input_file)

    os.chdir(output_directory)
    services = set(tree.xpath('//service/@name'))
    sprint(f'Parsing {len(services)} services...\n')

    hcache = []
    for sname in services:

        # skip tcpwrapped services unless specified
        if sname == 'tcpwrapped' and not tcpwrapped:
            continue

        hosts = tree.findall(
            f'.//service[@name="{sname}"]/../../../status[@state="up"]/..')

        if hosts:
            os.mkdir(sname)
            os.chdir(sname)
        else:
            continue

        print(f'- {sname}')
        '''
        {
            protocol:{
                'addresses':[],
                'sockets':[],
                'fqdns':[],
                'fsockets':[],
            }
        }
        '''
        to_dump = {}

        # Iterate over a set of unique protocol/port combinations
        # associated with a given service. Each item of the set will
        # be a tuple in the following form: (protocol,port)
        for tup in set([(
                p.get('protocol'),
                p.get('portid'),
        ) for p in tree.xpath(f'//service[@name="{sname}"]/..')]):

            protocol, port = tup

            if protocol not in to_dump:

                to_dump[protocol] = {
                    'addresses': [],
                    'sockets': [],
                    'fqdns': [],
                    'fsockets': []
                }

            dct = to_dump[protocol]

            #
            for ehost in tree.xpath(
                    f'.//service[@name="{sname}"]/../../../status[@state=' \
                    f'"up"]/../ports/port[@protocol="{protocol}" and ' \
                    f'@portid="{port}"]/../..'
                ):

                try:
                    host = hcache[hcache.index(ehost.get('addr'))]
                except:
                    host = NH.FromXML.host(ehost)

                if host.ipv4_address:
                    dct['addresses'].append(host.ipv4_address)
                    dct['sockets'].append(host.ipv4_address + f':{port}')

                if host.ipv6_address:
                    dct['addresses'].append(host.ipv6_address)
                    dct['sockets'].append(f'[{host.ipv6_address}]:{port}')

                dct['fqdns'] += host.hostnames

                for hn in host.hostnames:
                    dct['fsockets'].append(hn + f':{port}')

        # =======================================
        # DUMP OUTPUT TO DISK FOR CURRENT SERVICE
        # =======================================

        for proto, output in to_dump.items():

            for tpe, lst in output.items():

                if not lst: continue

                with open(f'{protocol}_{tpe}.txt', 'w') as outfile:

                    outfile.write('\n'.join(sorted(list(set(lst)))))

        # Change back to main output directory
        os.chdir('..')

    return 0
Пример #5
0
def parse(input_files, output_directory, *args, **kwargs):

    helpers.handle_output_directory(output_directory)

    groups = GroupList()

    signatures = [
        '[+] Getting builtin group memberships:',
        '[+] Getting local group memberships:',
        '[+] Getting domain group memberships:'
    ]

    # ==========================
    # PARSE EACH ENUM4LINUX FILE
    # ==========================

    sprint('Parsing files. This may take some time....')

    domain = None
    for infile in input_files:

        sprint(f'Parsing: {infile}')

        with open(infile) as f:

            group_type = None

            for line in f:

                line = line.strip()

                # Entering a group type section
                if line in signatures:
                    group_type = line.split(' ')[2]

                # Following 'Getting' signature, each line will begin with 'Group '
                # blank lines indicate that all groups have been parsed. We reset
                # group_type to None to indicate continuation until the next type
                # of group is identified.
                elif not line.startswith('Group '):
                    group_type = None

                # Parse out the Group and Member
                elif line.startswith('Group ') and group_type:
                    gd = re.match(Group.REG, line).groupdict()
                    group = gd['group']
                    group_rid = gd['group_rid']

                    if not domain and gd['domain']:
                        domain = gd['domain']

                    member = gd['username']

                    # Append the new group and member.
                    # Append method handles logic regarding duplicate values
                    groups.append(group=group,
                                  group_type=group_type,
                                  member=member)

    sprint('Dumping output to disk')

    os.chdir(output_directory)

    # ====================
    # WRITE DOMAIN TO DISK
    # ====================

    if domain:
        with open('domain.txt', 'w') as outfile:
            outfile.write(domain + '\n')

    # =============================
    # DUMP EACH DETECTED GROUP TYPE
    # =============================

    for k in ['builtin', 'local', 'domain']:

        # ==========================
        # EXTRACT APPROPRIATE GROUPS
        # ==========================

        cgroups = groups.find('type', k)

        # ==========
        # BEGIN DUMP
        # ==========

        if cgroups:

            os.mkdir(k)
            os.chdir(k)

            # ==================================
            # DUMP MANIFESTS OF GROUPS AND USERS
            # ==================================

            written_groups = []
            written_members = []

            groups_file = open(f'groups.txt', 'w')
            members_file = open(f'members.txt', 'w')

            sprint(f'Dumping {k} groups...')
            for group in cgroups:

                if group.value not in written_groups:
                    groups_file.write(group.value + '\n')
                    written_groups.append(group.value)

                for member in group.members:
                    if member.value not in written_members:
                        members_file.write(member.value + '\n')
                        written_members.append(member.value)

            groups_file.close()
            members_file.close()

            # ===================
            # DUMP USERS BY GROUP
            # ===================

            os.mkdir(f'members_by_group')
            os.chdir(f'members_by_group')

            for group in cgroups:

                with open(group.normalized + '.users', 'w') as outfile:

                    for member in group.members:

                        outfile.write(member.value + '\n')

            os.chdir('..')

            # ===================
            # DUMP GROUPS BY USER
            # ===================

            os.mkdir(f'groups_by_member')
            os.chdir(f'groups_by_member')

            for member in written_members:

                with open(Normalized.normalize(member) + '.groups',
                          'w') as outfile:

                    for group in cgroups:

                        if group.members.find('value', member):

                            outfile.write(group.value + '\n')

            os.chdir('../..')

    return 0
Пример #6
0
def parse(input_file=None, output_directory=None, 
        huge_tree=False, **kwargs):

    esprint(f'Parsing input file: {input_file}')
    

    # parse the input file as HTML
    parser = ET.XMLParser(huge_tree=huge_tree)

    try:
        tree = ET.parse(input_file,parser=parser)
    except Exception as e:
        if e.msg.find('Huge input lookup') > 0:
            esprint(
                '\nWARNING: ' \
                'Large input file selected. Include --huge-tree ' \
                'to continue parsing the target file. Exiting.',
                suf='[!]'
            )
            exit()
    
    bo = base_output_path = helpers.handle_output_directory(
        output_directory
    )
    os.chdir(bo)

    counter = 0

    for item in tree.xpath('//item'):

        try:

            item = Item.from_lxml(item)

        except Exception as e:

            esprint(f'Failed to parse item #{counter}: {e}')
            continue

        with open(str(counter)+'.req','w') as outfile:

            outfile.write(
                f'URL: {item.url}\r\n{item.request.firstline}\r\n'
            )

            for k,v in item.request.headers.items():
                outfile.write(
                    f'{k}: {v}\r\n'
                )

            outfile.write('\r\n\r\n')
            outfile.write(item.request.sbody)
        
        if item.mimetype: mimetype = item.mimetype.lower()
        else: mimetype = 'no_mimetype'

        with open(str(counter)+'.resp.'+mimetype,'w') as outfile:
            
            outfile.write(
                f'URL: {item.url}\r\n{item.response.firstline}\r\n'
            )

            for k,v in item.response.headers.items():
                outfile.write(
                    f'{k} {v}\r\n'
                )

            outfile.write('\r\n\r\n')
            outfile.write(item.response.sbody)
        
        counter += 1

    return 0