Пример #1
0
def get_host_record(argument):
    """
    Returns a t_host record based on the argument. If argument is a ipv4/ipv6
    address look it up and return it
    """

    if argument is None:
        return None

    from gluon.validators import IS_IPADDRESS
    db = current.globalenv['db']
    cache = current.globalenv['cache']

    record = db.t_hosts(argument) or None
    if record:
        return record
    else:
        if IS_IPADDRESS(is_ipv4=True)(argument)[1] == None:
            host_rec = db(db.t_hosts.f_ipv4 == argument).select().first()
            if host_rec:
                record = db.t_hosts(host_rec['id'])
            else:
                record = None
        elif IS_IPADDRESS(is_ipv6=True)(argument)[1] == None:
            host_rec = db(
                db.t_hosts.f_ipv6 == request.args(0)).select().first()
            if host_rec:
                record = db.t_hosts(host_rec['id'])
            else:
                record = None
        else:
            record = None

    return record
Пример #2
0
def ip_4_6_api_kludge(ipaddress):
    """Takes an IP address and returns a list with ipv4 and ipv6 set accordingly.

    This is a kludge for api.py JSONRPC functions.

    >>> ip_4_6_api_kludge('192.168.1.1')
    ('192.168.1.1', None)
    >>> ip_4_6_api_kludge('fdf8:f53b:82e4::53')
    (None, 'fdf8:f53b:82e4::53')

    :param ipaddress: IP Address
    :return ipv4: if ipaddress is ipv4, this is ipaddress, otherwise None
    :return ipv6: if ipaddress is ipv6, this is ipaddress, otherwise None
    """
    from gluon.validators import IS_IPADDRESS

    if IS_IPADDRESS(is_ipv4=True)(ipaddress)[1] is None:
        ipv4 = ipaddress
    else:
        ipv4 = None

    if IS_IPADDRESS(is_ipv6=True)(ipaddress)[1] is None:
        ipv6 = ipaddress
    else:
        ipv6 = None

    return ipv4, ipv6
Пример #3
0
def get_host_record(argument):
    """
    Returns a t_host record based on the argument. If argument is a ipv4/ipv6
    address look it up and return it
    """

    if argument is None:
        return None

    from gluon.validators import IS_IPADDRESS
    db = current.globalenv['db']

    if isinstance(argument, int):
        argument = str(argument)

    record = db.t_hosts(argument) or None
    if record:
        return record
    else:
        if IS_IPADDRESS()(argument)[1] == None:
            host_rec = db(db.t_hosts.f_ipaddr == argument).select().first()
            if host_rec:
                record = db.t_hosts(host_rec['id'])
            else:
                record = None
        else:
            record = None

    return record
Пример #4
0
def get_or_create_record(argument, **defaults):
    """
    Returns a t_host record based on the argument. If argument is an ipv4/ipv6 address it looks it up. If it's an
    integer it returns it. If none exist and argument is an ipv4/ipv6 address it creates a new record using the
    defaults provided.

    :param argument: ip address or db.t_hosts.id
    :param defaults: dictionary of db.t_hosts fields, validated before inserting
    :returns: Row with id

    >>> get_or_create_record('2.2.2.2')
    <Row {'f_confirmed': False, 'f_followup': None, 'f_macaddr': None, 'f_longitude': None, 'f_vuln_count': 0L, 'f_asset_group': 'undefined', 'f_accessed': False, 'id': 1L, 'f_vuln_graph': '0,0,0,0,0,0,0,0,0,0', 'f_engineer': 1L, 'f_exploit_count': 0L, 'f_hostname': None, 'f_ipaddr': '2.2.2.2', 'f_city': None, 'f_country': None, 'f_latitude': None, 'f_netbios_name': None, 'f_service_count': 0L}>

    >>> get_or_create_record('9.9.9.9', f_engineer=9999)
    None

    >>> get_or_create_record(1)
    <Row {'f_confirmed': False, 'f_followup': None, 'f_macaddr': None, 'f_longitude': None, 'f_vuln_count': 0L, 'f_asset_group': 'undefined', 'f_accessed': False, 'id': 1L, 'f_vuln_graph': '0,0,0,0,0,0,0,0,0,0', 'f_engineer': 1L, 'f_exploit_count': 0L, 'f_hostname': None, 'f_ipaddr': '2.2.2.2', 'f_city': None, 'f_country': None, 'f_latitude': None, 'f_netbios_name': None, 'f_service_count': 0L}>

    >>> get_or_create_record(9999)
    None
    """
    if argument is None:
        return None

    from gluon.validators import IS_IPADDRESS
    db = current.globalenv['db']
    auth = current.globalenv['auth']

    record = get_host_record(argument)
    if not record:
        fields = {}
        for k in list(defaults.keys()):
            if k in db.t_hosts.fields:
                fields[k] = defaults[k]

        # set defaults for assetgroup/engineer if not set
        if 'f_asset_group' not in fields:
            fields['f_asset_group'] = 'undefined'
        if 'f_engineer' not in fields:
            fields['f_engineer'] = auth.user_id or 1

        if IS_IPADDRESS()(argument)[1] == None:
            fields['f_ipaddr'] = argument
        else:
            # invalid ip address, clear the fields
            fields = None

        if fields:
            host_rec = db.t_hosts.validate_and_insert(**fields)
            if host_rec.errors:
                log("Error creating host record: %s" % host_rec.errors,
                    logging.ERROR)
            else:
                db.commit()
                record = db.t_hosts(host_rec.get('id'))

    return record
Пример #5
0
def process_file(filename=None, asset_group=None, engineer=None):

    # Upload and process hping Scan file
    from skaldship.hosts import get_host_record, do_host_status, add_or_update
    from gluon.validators import IS_IPADDRESS

    log(" [*] Processing hping scan file %s" % filename)

    hoststats = 0
    nodefields = {'f_engineer': engineer, 'f_asset_group': asset_group, 'f_confirmed': False}

    svc_db = db.t_services

    host_ip = None
    ICMP_type = ''
    answer_ip = ''

    with open(filename) as f:
        for line in f:
            if "IP: " in line:
                host_ip = line.split()[1]
                if IS_IPADDRESS()(host_ip)[1] == None:
                    nodefields['f_ipaddr'] = host_ip
                    db.t_hosts.update_or_insert(**nodefields)
		            db.commit()
                    hoststats += 1
                else:
                    log(" [!] ERROR: Not a valid IP Address (%s)" % host_ip, logging.ERROR)
            if "[*] " in line:
                ICMP_type = line.split()[1]
            if "ip=" in line:
                ip = line.split('=')[2]
                answer_ip = ip.split()[0]
            if "transmitted" in line:
                packets = line.split()
                if packets[0] == packets[3]:
                    if answer_ip != host_ip:
                        response = "No"
                    else:
                        response = "Yes"
                else:
                    response = "No"
                get_id = get_host_record(host_ip)
                svc_db.update_or_insert(
                    f_hosts_id=get_id.id, f_proto='ICMP', f_number='0', f_status=response, f_name=ICMP_type
                )
                db.commit()
Пример #6
0
def launch_terminal(record=None, launch_cmd=None):
    """
    Opens a terminal on the Web Server. This only works if the
    web2py server is running on the user's workstation.

    The command to execute is stored in the user's settings db
    under auth_user.f_launch_cmd. Variables translated:

       _IP_      -- The current IP Address (v4 by default, v6 if exists)
       _LOGFILE_ -- Session logfile name (we prepend the path)

    If an IPv6 address is used then ':' is changed to '_'

    Example:

    xterm -sb -sl 1500 -vb -T 'manual hacking: _IP_' -n 'manual hacking: _IP_' -e script _LOGFILE_
    """

    record = get_host_record(record)

    # only execute launch on requests from localhost!
    if request.env['remote_addr'] != '127.0.0.1':
        logger.error("Can only launch from localhost! remote_addr = %s" %
                     (request.env['remote_addr']))
        return "Can only launch from localhost"

    if record is None:
        return "No record found"

    import string, os, subprocess
    import time
    from gluon.validators import IS_IPADDRESS

    # if no launch command use the default
    if not launch_cmd:
        launch_cmd = "xterm -sb -sl 1500 -vb -T 'manual hacking: _IP_' -n 'manual hacking: _IP_' -e 'script _LOGFILE_'"

    # check ip address
    ip = record.f_ipaddr
    logip = ip
    if IS_IPADDRESS(is_ipv6=True)(ip)[0] == None:
        logip = ip.replace(":", "_")

    logdir = "session-logs"
    logfilename = "%s-%s.log" % (
        logip, time.strftime("%Y%m%d%H%M%S", time.localtime(time.time())))
    logfile = os.path.join(logdir, logfilename)
    launch_cmd = launch_cmd.replace("_IP_", ip)
    launch_cmd = launch_cmd.replace("_LOGFILE_", logfile)

    from skaldship.general import check_datadir
    # Check to see if data directories exist, create otherwise
    check_datadir(request.folder)
    datadir = os.path.join(os.getcwd(), request.folder, "data")

    # chdir to datadir!
    launch_cmd = launch_cmd.replace("_DATADIR_", datadir)
    os.chdir(datadir)

    # set environment variables
    os.environ['IP'] = ip
    os.environ['HOSTNAME'] = record.f_hostname or ""
    os.environ['DATADIR'] = datadir

    try:
        logger.info("Spawning: %s\n" % (launch_cmd))
        print("Spawning: %s" % (launch_cmd))
        subprocess.Popen(launch_cmd,
                         shell=True)  #, stdout=None, stdin=None, stderr=None)
    except Exception, e:
        logger.error("Error spawning launch cmd (%s): %s\n" % (launch_cmd, e))
        print("Error spawning launch cmd (%s): %s\n" % (launch_cmd, e))
Пример #7
0
    def parse_host(self, host):
        """
        Parse an XML host data from ShodanHQ results
        """
        from gluon.validators import IS_IPADDRESS
        db = current.globalenv['db']

        hostfields = {}
        ipaddr = host.get('ip')

        if self.ip_only and ipaddr not in self.ip_only:
            log(" [-] %s is not in the only list... skipping" % (ipaddr))
            #sys.stderr.write(msg)
            self.stats['hosts_skipped'] += 1
            return

        if ipaddr in self.ip_exclude:
            log(" [-] %s is in exclude list... skipping" % (ipaddr))

        if IS_IPADDRESS(is_ipv4=True)(ipaddr)[1] is None:
            # address is IPv4:
            hostfields['f_ipv4'] = ipaddr
        elif IS_IPADDRESS(is_ipv6=True)(ipaddr)[1] is None:
            hostfields['f_ipv6'] = ipaddr
        else:
            log(" [!] Invalid IP Address in report: %s" % (ipaddr))
            return

        hostname = host.findtext('hostnames')
        if hostname:
            hostfields['f_hostname'] = hostname

        # check to see if IP exists in DB already
        if 'f_ipv4' in hostfields:
            host_rec = db(
                db.t_hosts.f_ipv4 == hostfields['f_ipv4']).select().first()
        else:
            host_rec = db(
                db.t_hosts.f_ipv6 == hostfields['f_ipv6']).select().first()

        if host_rec is None:
            hostfields['f_asset_group'] = self.asset_group
            hostfields['f_engineer'] = self.engineer
            host_id = db.t_hosts.insert(**hostfields)
            db.commit()
            self.stats['hosts_added'] += 1
            log(" [-] Adding IP: %s" % (ipaddr))

        elif host_rec is not None:
            db.commit()
            if 'f_ipv4' in hostfields:
                host_id = db(db.t_hosts.f_ipv4 == hostfields['f_ipv4']).update(
                    **hostfields)
                db.commit()
                host_id = get_host_record(hostfields['f_ipv4'])
                host_id = host_id.id
                self.stats['hosts_updated'] += 1
                log(" [-] Updating IP: %s" % (hostfields['f_ipv4']))
            else:
                host_id = db(db.t_hosts.f_ipv6 == hostfields['f_ipv6']).update(
                    **hostfields)
                db.commit()
                host_id = get_host_record(hostfields['f_ipv6'])
                host_id = host_id.id
                self.stats['hosts_updated'] += 1
                log(" [-] Updating IP: %s" % (hostfields['f_ipv6']))

        else:
            self.stats['hosts_skipped'] += 1
            db.commit()
            log(" [-] Skipped IP: %s" % (ipaddr))
            return

        # process the service / data
        f_number = host.get('port')
        if f_number == '161':
            # only udp provided by shodanhq is snmp
            f_proto = 'udp'
        else:
            f_proto = 'tcp'

        f_status = 'open'
        f_name = ''
        addl_fields = {}

        # extract the data field for processing
        port_data = host.findtext('data')

        # for ssh, telnet and smtp throw data into the banner
        if f_number == '21':
            f_banner = "\n".join(self.SMTP_FTP_220.findall(port_data))
            f_name = 'FTP'
            addl_fields = {
                'ftp.banner': port_data,
            }
        elif f_number == '22':
            f_banner = port_data
            f_name = 'SSH'
            addl_fields = {
                'ssh.banner': port_data,
            }
        elif f_number == '23':
            f_banner = port_data
            f_name = 'Telnet'
        elif f_number == '25':
            f_banner = "\n".join(self.SMTP_FTP_220.findall(port_data))
            f_name = 'SMTP'
            addl_fields = {
                'smtp.banner': port_data,
            }
        elif f_number in HTTP_PORTS:
            # TODO: parse HTTP headers.. ugly
            f_banner = port_data
            f_name = 'HTTP'
            addl_fields = {
                'http.banner': port_data,
            }
        elif f_number == '1900':
            f_banner = port_data
            f_name = 'UPNP'
            addl_fields = {
                'upnp.banner': port_data,
            }
        else:
            f_banner = port_data

        query = (db.t_services.f_proto == f_proto) & (
            db.t_services.f_number == f_number) & (db.t_services.f_hosts_id
                                                   == host_id)
        svc_row = db(query).select().first()
        if svc_row:
            # we found a service record! Check for similar status, names and banners
            do_update = False
            if svc_row.f_status != f_status:
                svc_row.f_status = f_status
                do_update = True
            if svc_row.f_name != f_name:
                svc_row.f_name = f_name
                do_update = True
            if svc_row.f_banner != f_banner:
                svc_row.f_banner = f_banner
                do_update = True

            svc_id = svc_row.id
            if do_update:
                svc_row.update_record()
                db.commit()
                didwhat = "Updated"
                self.stats['services_updated'] += 1
            else:
                didwhat = "Unaltered"
        else:
            # we have a new service!
            svc_id = db.t_services.insert(f_proto=f_proto,
                                          f_number=f_number,
                                          f_status=f_status,
                                          f_name=f_name,
                                          f_banner=f_banner,
                                          f_hosts_id=host_id)
            db.commit()
            didwhat = "Added"
            self.stats['services_added'] += 1

        log(" [-] %s service: (%s) %s/%s" %
            (didwhat, ipaddr, f_proto, f_number))

        for k, v in addl_fields.iteritems():
            # add additional field entries as service_info records
            db.t_service_info.update_or_insert(
                f_services_id=svc_id,
                f_name=k,
                f_text=v,
            )
            db.commit()
Пример #8
0
    for host in hosts:
        didwhat = "Unknown"
        if host.findtext('state') != "alive":
            stats['hosts_skipped'] += 1
            continue

        hostfields = {}
        ipaddr = host.findtext('address')

        if len(ip_only) > 0 and ipaddr not in ip_only:
            log(" [-] Node is not in the only list... skipping")
            stats['hosts_skipped'] += 1
            continue

        if IS_IPADDRESS()(ipaddr)[1] is not None:
            logger.error("Invalid IP Address in report: %s" % ipaddr)
            log(" [!] Invalid IP Address in report: %s" % ipaddr)
            continue

        macaddr = host.findtext('mac')
        if macaddr:
            hostfields['f_macaddr'] = macaddr

        hostname = host.findtext('name')
        if hostname:
            hostfields['f_hostname'] = hostname

        # check to see if IP exists in DB already
        hostfields['f_asset_group'] = asset_group
        hostfields['f_engineer'] = engineer
Пример #9
0
        nodefields = {}

        if len(ip_only) > 0 and node.attrib['address'] not in ip_only:
            log(" [-] Node is not in the only list... skipping")
            hoststats['skipped'] += 1
            continue

        # we'll just take the last hostname in the names list since it'll usually be the full dns name
        names = node.findall("names/name")
        for name in names:
            nodefields['f_hostname'] = name.text

        ip = node.attrib['address']

        if IS_IPADDRESS()(ip):
            nodefields['f_ipaddr'] = ip
        else:
            log(" [!] Invalid IP Address: %s" % ip, logging.ERROR)

        nodefields['f_engineer'] = user_id
        nodefields['f_asset_group'] = asset_group
        nodefields['f_confirmed'] = False

        if 'hardware-address' in node.attrib:
            nodefields['f_macaddr'] = node.attrib['hardware-address']
        if node.find('names/name') is not None:
            # XXX: for now just take the first hostname
            nodefields['f_hostname'] = node.find('names/name').text

        # check to see if IP exists in DB already
Пример #10
0
    def parse(self, host_properties):
        """
        Parse out the <HostProperties> xml content or CSV line.

        There can be a number of <tag> entries that are either useful to us in
        t_hosts or other areas. These are processed and returned as dictionary
        entries in 'hostdata'

        Args:
            host_properties: A <HostProperties> section from .nessus or a CSV line

        Returns:
            t_hosts.id, { hostdata }
        """
        from gluon.validators import IS_IPADDRESS
        hostdata = {}
        if etree.iselement(host_properties):
            for tag in host_properties.findall('tag'):
                hostdata[tag.get('name')] = tag.text
            ipaddr = hostdata.get('host-ip')
        else:
            # with CSV each line has all the hostdata fields so we set them here for use later
            ipaddr = host_properties.get('IP Address')
            if not ipaddr:
                # Scanner CSV, use Host
                ipaddr = host_properties.get('Host')
            hostdata['mac-address'] = host_properties.get('MAC Address', '')
            hostdata['host-fqdn'] = host_properties.get('DNS Name', '')
            hostdata['netbios-name'] = host_properties.get('NetBIOS Name', '')

        if (ipaddr not in self.ip_include
                and self.ip_include) or (ipaddr in self.ip_exclude):
            log("Host in exclude or not in include list, skipping")
            self.stats['skipped'] += 1
            return None, {}

        host_id = get_host_record(ipaddr)
        if host_id and not self.update_hosts:
            return host_id, hostdata

        # new host found, pull what we need for t_hosts
        hostfields = {}
        hostfields['f_engineer'] = self.engineer
        hostfields['f_asset_group'] = self.asset_group
        hostfields['f_confirmed'] = False

        # check ipv4/ipv6 and set hostfields accordingly
        if IS_IPADDRESS(is_ipv4=True)(ipaddr)[1] is None:
            hostfields['f_ipv4'] = ipaddr
        elif IS_IPADDRESS(is_ipv6=True)(ipaddr)[1] is None:
            hostfields['f_ipv6'] = ipaddr
        else:
            log("Invalid IP Address in HostProperties: %s" % ipaddr,
                logging.ERROR)
            return None, {}

        # pull out relevant hostfields
        for (k, v) in hostdata.iteritems():
            if k == 'mac-address':
                # multiple mac addrs may appear wildly, just pull the first
                hostfields['f_macaddr'] = v[:v.find('\n')]
            elif k == 'host-fqdn':
                hostfields['f_hostname'] = v
            elif k == 'netbios-name':
                hostfields['f_netbios_name'] = v

        if not self.update_hosts and not host_id:
            result = self.db.t_hosts.validate_and_insert(**hostfields)
            if not result.id:
                log("Error adding host to DB: %s" % result.errors,
                    logging.ERROR)
                return None, {}
            self.stats['added'] += 1
            host_id = result.id
            log(" [-] Adding host: %s" % ipaddr)
        elif self.update_hosts:
            if hostfields['f_ipv4']:
                host_id = self.db(
                    self.db.t_hosts.f_ipv4 == hostfields['f_ipv4']).update(
                        **hostfields)
                self.db.commit()
                host_id = get_host_record(hostfields['f_ipv4'])
                if host_id:
                    host_id = host_id.id
                log(" [-] Updating IP: %s" % (hostfields['f_ipv4']))
            else:
                host_id = self.db(
                    self.db.t_hosts.f_ipv6 == hostfields['f_ipv6']).update(
                        **hostfields)
                self.db.commit()
                host_id = get_host_record(hostfields['f_ipv6'])
                host_id = host_id.id
                log(" [-] Updating IP: %s" % (hostfields['f_ipv6']))
            self.stats['updated'] += 1

        return host_id, hostfields
Пример #11
0
        nodefields = {}

        if len(ip_only) > 0 and node.attrib['address'] not in ip_only:
            log(" [-] Node is not in the only list... skipping")
            hoststats['skipped'] += 1
            continue

        # we'll just take the last hostname in the names list since it'll usually be the full dns name
        names = node.findall("names/name")
        for name in names:
            nodefields['f_hostname'] = name.text

        ip = node.attrib['address']

        if IS_IPADDRESS(is_ipv4=True)(ip):
            nodefields['f_ipv4'] = ip
        elif IS_IPADDRESS(is_ipv6=True)(ip):
            nodefields['f_ipv6'] = ip
        else:
            log(" [!] Invalid IP Address: %s" % ip, logging.ERROR)

        nodefields['f_engineer'] = user_id
        nodefields['f_asset_group'] = asset_group
        nodefields['f_confirmed'] = False

        if 'hardware-address' in node.attrib:
            nodefields['f_macaddr'] = node.attrib['hardware-address']
        if node.find('names/name') is not None:
            # XXX: for now just take the first hostname
            nodefields['f_hostname'] = node.find('names/name').text
Пример #12
0
    for host in hosts:
        didwhat = "Unknown"
        if host.findtext('state') != "alive":
            stats['hosts_skipped'] += 1
            continue

        hostfields = {}
        ipaddr = host.findtext('address')

        if len(ip_only) > 0 and ipaddr not in ip_only:
            print(" [-] Node is not in the only list... skipping")
            #sys.stderr.write(msg)
            stats['hosts_skipped'] += 1
            continue

        if IS_IPADDRESS(is_ipv4=True)(ipaddr)[1] == None:
            # address is IPv4:
            hostfields['f_ipv4'] = ipaddr
        elif IS_IPADDRESS(is_ipv6=True)(ipaddr)[1] == None:
            hostfields['f_ipv6'] = ipaddr
        else:
            logger.error("Invalid IP Address in report: %s" % (ipaddr))
            print(" [!] Invalid IP Address in report: %s" % (ipaddr))
            continue

        macaddr = host.findtext('mac')
        if macaddr:
            hostfields['f_macaddr'] = macaddr

        hostname = host.findtext('name')
        if hostname:
Пример #13
0
def process_xml(
    filename=None,
    asset_group=None,
    engineer=None,
    msf_settings={},
    ip_ignore_list=None,
    ip_include_list=None,
    update_hosts=False,
):
    # Upload and process Nexpose XML Scan file

    from skaldship.cpe import lookup_cpe
    from skaldship.hosts import get_host_record
    from gluon.validators import IS_IPADDRESS
    import os

    db = current.globalenv['db']
    session = current.globalenv['session']

    parser = html.parser.HTMLParser()
    user_id = db.auth_user(engineer)

    # build the hosts only/exclude list
    ip_exclude = []
    if ip_ignore_list:
        ip_exclude = ip_ignore_list.split('\r\n')
        # TODO: check for ip subnet/range and break it out to individuals
    ip_only = []
    if ip_include_list:
        ip_only = ip_include_list.split('\r\n')
        # TODO: check for ip subnet/range and break it out to individuals

    log(" [*] Processing Nexpose scan file %s" % filename)

    try:
        nexpose_xml = etree.parse(filename)
    except etree.ParseError as e:
        msg = " [!] Invalid Nexpose XML file (%s): %s " % (filename, e)
        log(msg, logging.ERROR)
        return msg

    root = nexpose_xml.getroot()

    existing_vulnids = db(db.t_vulndata()).select(
        db.t_vulndata.id, db.t_vulndata.f_vulnid).as_dict(key='f_vulnid')
    log(" [*] Found %d vulnerabilities in the database already." %
        len(existing_vulnids))

    # start with the vulnerability details
    vulns_added = 0
    vulns_skipped = 0
    vulns = root.findall("VulnerabilityDefinitions/vulnerability")
    log(" [*] Parsing %d vulnerabilities" % len(vulns))
    for vuln in vulns:

        # nexpose identifiers are always lower case in kvasir. UPPER CASE IS FOR SHOUTING!!!
        vulnid = vuln.attrib['id'].lower()
        if vulnid in existing_vulnids:
            #log(" [-] Skipping %s - It's in the db already" % vulnid)
            vulns_skipped += 1
        else:
            # add the vulnerability to t_vulndata - any duplicates are errored out
            (vulnfields, references) = vuln_parse(vuln, fromapi=False)
            try:
                vulnid = db.t_vulndata.update_or_insert(**vulnfields)
                if not vulnid:
                    vulnid = db(db.t_vulndata.f_vulnid ==
                                vulnfields['f_vulnid']).select().first().id
                vulns_added += 1
                db.commit()
            except Exception as e:
                log(
                    " [!] Error inserting %s to vulndata: %s" %
                    (vulnfields['f_vulnid'], e), logging.ERROR)
                vulnid = None
                db.commit()
                continue

            # add the references
            if vulnid is not None:
                for reference in references:
                    # check to see if reference exists first
                    ref_id = db(db.t_vuln_refs.f_text == reference[1])
                    if ref_id.count() == 0:
                        # add because it doesn't
                        ref_id = db.t_vuln_refs.insert(f_source=reference[0],
                                                       f_text=reference[1])
                        db.commit()
                    else:
                        # pick the first reference as the ID
                        ref_id = ref_id.select()[0].id

                    # make many-to-many relationship with t_vuln_data
                    res = db.t_vuln_references.insert(f_vuln_ref_id=ref_id,
                                                      f_vulndata_id=vulnid)
                    db.commit()

    log(" [*] %d Vulnerabilities added, %d skipped" %
        (vulns_added, vulns_skipped))

    # re-make the existing_vulnids dict() since we've updated the system
    existing_vulnids = db(db.t_vulndata()).select(
        db.t_vulndata.id, db.t_vulndata.f_vulnid).as_dict(key='f_vulnid')

    # parse the nodes now
    nodes = root.findall("nodes/node")
    log(" [-] Parsing %d nodes" % len(nodes))
    hoststats = {'added': 0, 'skipped': 0, 'updated': 0, 'errored': 0}
    hosts = []  # array of host_id fields
    for node in nodes:
        log(" [-] Node %s status is: %s" %
            (node.attrib['address'], node.attrib['status']))
        #sys.stderr.write(msg)
        if node.attrib['status'] != "alive":
            hoststats['skipped'] += 1
            continue

        if node.attrib['address'] in ip_exclude:
            log(" [-] Node is in exclude list... skipping")
            hoststats['skipped'] += 1
            continue

        nodefields = {}

        if len(ip_only) > 0 and node.attrib['address'] not in ip_only:
            log(" [-] Node is not in the only list... skipping")
            hoststats['skipped'] += 1
            continue

        # we'll just take the last hostname in the names list since it'll usually be the full dns name
        names = node.findall("names/name")
        for name in names:
            nodefields['f_hostname'] = name.text

        ip = node.attrib['address']

        if IS_IPADDRESS()(ip):
            nodefields['f_ipaddr'] = ip
        else:
            log(" [!] Invalid IP Address: %s" % ip, logging.ERROR)

        nodefields['f_engineer'] = user_id
        nodefields['f_asset_group'] = asset_group
        nodefields['f_confirmed'] = False

        if 'hardware-address' in node.attrib:
            nodefields['f_macaddr'] = node.attrib['hardware-address']
        if node.find('names/name') is not None:
            # XXX: for now just take the first hostname
            nodefields['f_hostname'] = node.find('names/name').text

        # check to see if IP exists in DB already
        query = (db.t_hosts.f_ipaddr == ip)
        host_rec = db(query).select().first()
        if host_rec is None:
            host_id = db.t_hosts.insert(**nodefields)
            db.commit()
            hoststats['added'] += 1
            log(" [-] Adding IP: %s" % ip)
        elif update_hosts:
            db.commit()
            db(db.t_hosts.f_ipaddr == nodefields['f_ipaddr']).update(
                **nodefields)
            db.commit()
            host_id = get_host_record(nodefields['f_ipaddr'])
            host_id = host_id.id
            hoststats['updated'] += 1
            log(" [-] Updating IP: %s" % ip)
        else:
            hoststats['skipped'] += 1
            db.commit()
            log(" [-] Skipped IP: %s" % ip)
            continue
        hosts.append(host_id)

        # tests that aren't specific to any port we wrap up into a meta service
        # called "INFO"
        tests = node.findall("tests/test")
        if len(tests) > 0:
            svc_id = db.t_services.update_or_insert(f_proto="info",
                                                    f_number="0",
                                                    f_status="info",
                                                    f_hosts_id=host_id)
            db.commit()

        for test in tests:
            d = {}
            vulnid = test.get('id').lower()

            # we may have valid username.
            if "cifs-acct-" in vulnid:
                username = test.get('key')
                if username is not None:
                    d['f_services_id'] = svc_id
                    d['f_username'] = username
                    d['f_active'] = True
                    d['f_source'] = vulnid
                    query = (db.t_accounts.f_services_id == d['f_services_id']) &\
                            (db.t_accounts.f_username == d['f_username'])
                    db.t_accounts.update_or_insert(query, **d)
                    db.commit()

            if test.attrib['status'] == 'vulnerable-exploited' or \
               test.attrib['status'] == 'potential' or \
               test.attrib['status'] == 'exception-vulnerable-exploited' or \
               test.attrib['status'] == 'exception-vulnerable-version' or \
               test.attrib['status'] == 'exception-vulnerable-potential' or \
               test.attrib['status'] == 'vulnerable-version':
                if vulnid in existing_vulnids:
                    vuln_id = existing_vulnids[vulnid]['id']
                else:
                    continue

                if vulnid == 'cifs-nt-0001':
                    # Windows users, local groups, and global groups
                    infotext = nx_xml_to_html(
                        StringIO(etree.tostring(test, xml_declaration=False)))
                    try:
                        unames = re.search(
                            "Found user\(s\): (?P<unames>.+?) </li>",
                            infotext).group('unames')
                    except AttributeError as e:
                        # regex not found
                        continue
                    for uname in unames.split():
                        # add account
                        d['f_username'] = uname
                        d['f_services_id'] = svc_id
                        d['f_source'] = 'cifs-nt-0001'
                        db.t_accounts.update_or_insert(**d)
                        db.commit()

                test_str = etree.tostring(test,
                                          xml_declaration=False,
                                          encoding=str)
                test_str = test_str.encode('ascii', 'xmlcharrefreplace')
                proof = nx_xml_to_html(StringIO(test_str))
                proof = html_to_markmin(proof)

                if vulnid == 'cifs-insecure-acct-lockout-limit':
                    d['f_hosts_id'] = host_id
                    try:
                        d['f_lockout_limit'] = re.search(
                            "contains: (?P<l>\d+)", proof).group('l')
                    except AttributeError:
                        d['f_lockout_limit'] = 0
                    query = (db.t_netbios.f_hosts_id == host_id)
                    db.t_netbios.update_or_insert(query, **d)
                    db.commit()

                # Check for CIFS uid/pw
                if "cifs-" in vulnid:
                    try:
                        uid = re.search("uid\[(?P<u>.*?)\]", proof).group('u')
                        pw = re.search("pw\[(?P<p>.*?)\]", proof).group('p')
                        realm = re.search("realm\[(?P<r>.*?)\]",
                                          proof).group('r')
                        d = {
                            'f_services_id': svc_id,
                            'f_username': uid,
                            'f_password': pw,
                            'f_description': realm,
                            'f_active': True,
                            'f_compromised': True,
                            'f_source': vulnid
                        }
                        query = (db.t_accounts.f_services_id
                                 == svc_id) & (db.t_accounts.f_username == uid)
                        db.t_accounts.update_or_insert(query, **d)
                        db.commit()
                    except AttributeError:
                        db.commit()
                    except Exception as e:
                        log("Error inserting account (%s): %s" % (uid, e),
                            logging.ERROR)
                    db.commit()

                # solaris-kcms-readfile shadow file
                if vulnid.lower() == "rpc-solaris-kcms-readfile":
                    # funky chicken stuff, if they mess with this output then we've got to
                    # change this around as well. thems the breaks, maynard!
                    shadow = parser.unescape(proof)
                    for line in shadow.split("<br />")[1:-1]:
                        user, pw, uid = line.split(':')[0:3]
                        d['f_services_id'] = svc_id
                        d['f_username'] = user
                        d['f_hash1'] = pw
                        d['f_hash1_type'] = "crypt"
                        d['f_uid'] = uid
                        d['f_source'] = "shadow"
                        d['f_active'] = True
                        d['f_source'] = "rpc-solaris-kcms-readfile"
                        query = (db.t_accounts.f_services_id == svc_id) & (
                            db.t_accounts.f_username == user)
                        db.t_accounts.update_or_insert(query, **d)
                        db.commit()

                db.t_service_vulns.update_or_insert(
                    f_services_id=svc_id,
                    f_status=test.attrib['status'],
                    f_proof=proof,
                    f_vulndata_id=vuln_id)

                if "cisco-default-http-account" in vulnid.lower():
                    d['f_services_id'] = svc_id
                    d['f_username'] = vulnid.split('-')[4]
                    d['f_password'] = vulnid.split('-')[6]
                    d['f_source'] = "cisco-default-http-account"
                    query = (db.t_accounts.f_services_id == svc_id) & (
                        db.t_accounts.f_username == d['f_username'])
                    db.t_accounts.update_or_insert(query, **d)
                    db.commit()

        # add services (ports) and resulting vulndata
        for endpoint in node.findall("endpoints/endpoint"):
            f_proto = endpoint.attrib['protocol']
            f_number = endpoint.attrib['port']
            f_status = endpoint.attrib['status']

            query = (db.t_services.f_hosts_id == host_id) \
                    & (db.t_services.f_proto == f_proto) \
                    & (db.t_services.f_number == f_number)
            svc_id = db.t_services.update_or_insert(query,
                                                    f_proto=f_proto,
                                                    f_number=f_number,
                                                    f_status=f_status,
                                                    f_hosts_id=host_id)
            if not svc_id:
                svc_id = db(query).select().first().id

            for service in endpoint.findall("services/service"):
                d = {}
                if 'name' in service.attrib:
                    db.t_services[svc_id] = dict(f_name=service.attrib['name'])

                for test in service.findall("tests/test"):
                    vulnid = test.get('id').lower()

                    if test.attrib['status'] == 'vulnerable-exploited' or \
                       test.attrib['status'] == 'potential' or \
                       test.attrib['status'] == 'exception-vulnerable-exploited' or \
                       test.attrib['status'] == 'exception-vulnerable-version' or \
                       test.attrib['status'] == 'exception-vulnerable-potential' or \
                       test.attrib['status'] == 'vulnerable-version':
                        if vulnid in existing_vulnids:
                            vuln_id = existing_vulnids[vulnid]['id']
                        else:
                            log(
                                " [!] Unknown vulnid, Skipping! (id: %s)" %
                                vulnid, logging.ERROR)
                            continue

                        test_str = etree.tostring(test,
                                                  xml_declaration=False,
                                                  encoding=str)
                        test_str = test_str.encode('ascii',
                                                   'xmlcharrefreplace')
                        proof = nx_xml_to_html(StringIO(test_str))
                        proof = html_to_markmin(proof)

                        # Check for SNMP strings
                        if "snmp-read-" in vulnid:
                            snmpstring = re.search("pw\[(?P<pw>.*?)\]",
                                                   proof).group('pw')
                            db.t_snmp.update_or_insert(f_hosts_id=host_id,
                                                       f_community=snmpstring,
                                                       f_access="READ",
                                                       f_version="v1")
                            db.commit()

                        if "snmp-write" in vulnid:
                            snmpstring = re.search("pw\[(?P<pw>.*?)\]",
                                                   proof).group('pw')
                            db.t_snmp.update_or_insert(f_hosts_id=host_id,
                                                       f_community=snmpstring,
                                                       f_access="WRITE",
                                                       f_version="v1")
                            db.commit()

                        # TODO: account names

                        # Dell DRAC root/calvin
                        if vulnid == "http-drac-default-login":
                            d['f_services_id'] = svc_id
                            d['f_username'] = '******'
                            d['f_password'] = '******'
                            d['f_active'] = True
                            d['f_compromised'] = True
                            d['f_source'] = vulnid
                            query = (db.t_accounts.f_services_id == svc_id) & (
                                db.t_accounts.f_username == 'root')
                            db.t_accounts.update_or_insert(query, **d)
                            db.commit()

                        # Check for uid/pw
                        if "ftp-iis-" in vulnid or \
                           "telnet-" in vulnid or \
                           "cifs-" in vulnid or \
                           "tds-" in vulnid or \
                           "oracle-" in vulnid or \
                           "-default-" in vulnid or \
                           "ftp-generic-" in vulnid:
                            try:
                                uid = re.search("uid\[(?P<u>.*?)\]",
                                                proof).group('u')
                                pw = re.search("pw\[(?P<p>.*?)\]",
                                               proof).group('p')
                                realm = re.search("realm\[(?P<r>.*?)\]",
                                                  proof).group('r')
                                d['f_services_id'] = svc_id
                                d['f_username'] = uid
                                d['f_password'] = pw
                                d['f_description'] = realm
                                d['f_active'] = True
                                d['f_compromised'] = True
                                d['f_source'] = vulnid
                                query = (db.t_accounts.f_services_id
                                         == svc_id) & (db.t_accounts.f_username
                                                       == uid)
                                db.t_accounts.update_or_insert(query, **d)
                                db.commit()
                            except AttributeError:
                                db.commit()
                            except Exception as e:
                                log(
                                    "Error inserting account (%s): %s" %
                                    (uid, e), logging.ERROR)
                            db.commit()

                        # cisco default http login accounts
                        if "cisco-default-http-account" in vulnid.lower():
                            d['f_services_id'] = svc_id
                            d['f_username'] = vulnid.split('-')[4]
                            d['f_password'] = vulnid.split('-')[6]
                            d['f_source'] = "cisco-default-http-account"
                            query = (db.t_accounts.f_services_id == svc_id) \
                                    & (db.t_accounts.f_username == d['f_username'])
                            db.t_accounts.update_or_insert(query, **d)
                            db.commit()

                        db.t_service_vulns.update_or_insert(
                            f_services_id=svc_id,
                            f_status=test.attrib['status'],
                            f_proof=proof,
                            f_vulndata_id=vuln_id)
                        db.commit()

                for config in service.findall("configuration/config"):
                    db.t_service_info.update_or_insert(
                        f_services_id=svc_id,
                        f_name=config.attrib['name'],
                        f_text=config.text)
                    db.commit()
                    if re.match('\w+.banner$', config.attrib['name']):
                        db.t_services[svc_id] = dict(f_banner=config.text)
                        db.commit()
                    if config.attrib['name'] == 'mac-address':
                        # update the mac address of the host
                        db.t_hosts[host_id] = dict(f_macaddr=config.text)
                        db.commit()
                    if "advertised-name" in config.attrib['name']:
                        # netbios computer name
                        d = config.text.split(" ")[0]
                        if "Computer Name" in config.text:
                            data = {'f_netbios_name': d}
                            # if hostname isn't defined then lowercase netbios name and put it in
                            if db.t_hosts[host_id].f_hostname is None:
                                data['f_hostname'] = d.lower()
                            db(db.t_hosts.id == host_id).update(**data)
                            db.commit()
                        elif "Domain Name" in config.text:
                            query = (db.t_netbios.f_hosts_id == host_id)
                            db.t_netbios.update_or_insert(query,
                                                          f_hosts_id=host_id,
                                                          f_domain=d)
                        db.commit()

        for os_rec in node.findall('fingerprints/os'):
            """
            <os  certainty="1.00" device-class="Workstation" vendor="Microsoft" family="Windows" product="Windows 2000 Professional" version="SP4" arch="x86"/>

            if using SCAP output the os line looks like:

            <os  certainty="0.66" device-class="General" vendor="Microsoft" family="Windows" product="Windows XP" arch="x86" cpe="cpe:/o:microsoft:windows_xp::sp3"/>
            """

            if 'cpe' in os_rec.attrib:
                # we have a cpe entry from xml! hooray!
                cpe_name = os_rec.attrib['cpe'].replace('cpe:/o:', '')
                os_id = lookup_cpe(cpe_name)
            else:
                # no cpe attribute in xml, go through our messy lookup
                os_id = guess_cpe_os(os_rec)

            if os_id is not None:
                db.t_host_os_refs.update_or_insert(
                    f_certainty=os_rec.attrib['certainty'],
                    f_family=os_rec.get('family', 'Unknown'),
                    f_class=os_rec.get('device-class', 'Other'),
                    f_hosts_id=host_id,
                    f_os_id=os_id)
                db.commit()
            else:
                log(
                    " [!] os_rec could not be parsed: %s" %
                    etree.tostring(os_rec), logging.ERROR)

        db.commit()

    if msf_settings.get('workspace'):
        try:
            # check to see if we have a Metasploit RPC instance configured and talking
            from MetasploitProAPI import MetasploitProAPI
            msf_api = MetasploitProAPI(host=msf_settings.get('url'),
                                       apikey=msf_settings.get('key'))
            working_msf_api = msf_api.login()
        except Exception as error:
            log(" [!] Unable to authenticate to MSF API: %s" % str(error),
                logging.ERROR)
            working_msf_api = False

        try:
            scan_data = open(filename, "r+").readlines()
        except Exception as error:
            log(
                " [!] Error loading scan data to send to Metasploit: %s" %
                str(error), logging.ERROR)
            scan_data = None

        if scan_data and working_msf_api:
            task = msf_api.pro_import_data(
                msf_settings.get('workspace'),
                "".join(scan_data),
                {
                    #'preserve_hosts': form.vars.preserve_hosts,
                    'blacklist_hosts': "\n".join(ip_ignore_list)
                },
            )

            msf_workspace_num = session.msf_workspace_num or 'unknown'
            msfurl = os.path.join(msf_settings.get('url'), 'workspaces',
                                  msf_workspace_num, 'tasks', task['task_id'])
            log(" [*] Added file to MSF Pro: %s" % msfurl)

    # any new nexpose vulns need to be checked against exploits table and connected
    log(" [*] Connecting exploits to vulns and performing do_host_status")
    connect_exploits()
    do_host_status(asset_group=asset_group)

    msg = " [*] Import complete: hosts: %s added, %s skipped, %s errors - vulns: %s added, %s skipped" % (
        hoststats['added'], hoststats['skipped'], hoststats['errored'],
        vulns_added, vulns_skipped)
    log(msg)
    return msg
Пример #14
0
def process_report_xml(
    filename=None,
    ip_ignore_list=None,
    ip_include_list=None,
    engineer=1,
    asset_group="Metasploit Import",
    update_hosts=True,
):
    """
    Processes a Metasploit XML Export for the following data and adds to the db:

    - Hosts and services
    - Credentials

    Generate the XML report by using db_export -t xml filename.xml or through WebUI

    TODO: Auto-exploits successful exploit attempts if matching CVE/VulnDB entry found
    """
    from gluon.validators import IS_IPADDRESS
    from skaldship.passwords.utils import lookup_hash
    from skaldship.hosts import get_host_record, get_or_create_record
    from skaldship.services import Services
    services = Services()

    db = current.globalenv['db']
    #cache = current.globalenv['cache']

    try:
        from lxml import etree
    except ImportError:
        try:
            import xml.etree.cElementTree as etree
        except ImportError:
            try:
                import xml.etree.ElementTree as etree
            except:
                raise Exception("Unable to find valid ElementTree module.")

    # build the hosts only/exclude list
    ip_exclude = []
    if ip_ignore_list:
        ip_exclude = ip_ignore_list.split('\r\n')
        # TODO: check for ip subnet/range and break it out to individuals
    ip_only = []
    if ip_include_list:
        ip_only = ip_include_list
        # TODO: check for ip subnet/range and break it out to individuals

    log(" [*] Processing Metasploit Pro report file: %s" % (filename))

    try:
        xml = etree.parse(filename)
    except etree.ParseError as e:
        raise Exception(" [!] Invalid XML file (%s): %s " % (filename, e))

    root = xml.getroot()

    # parse the hosts now
    hosts = root.findall("hosts/host")
    log(" [-] Parsing %d hosts" % (len(hosts)))
    stats = {}
    stats['hosts_added'] = 0
    stats['hosts_skipped'] = 0
    stats['hosts_updated'] = 0
    stats['services_added'] = 0
    stats['services_updated'] = 0
    stats['accounts_added'] = 0
    stats['accounts_updated'] = 0

    for host in hosts:
        didwhat = "Unknown"
        if host.findtext('state') != "alive":
            stats['hosts_skipped'] += 1
            continue

        hostfields = {}
        ipaddr = host.findtext('address')

        if len(ip_only) > 0 and ipaddr not in ip_only:
            log(" [-] Node is not in the only list... skipping")
            stats['hosts_skipped'] += 1
            continue

        if IS_IPADDRESS()(ipaddr)[1] is not None:
            logger.error("Invalid IP Address in report: %s" % ipaddr)
            log(" [!] Invalid IP Address in report: %s" % ipaddr)
            continue

        macaddr = host.findtext('mac')
        if macaddr:
            hostfields['f_macaddr'] = macaddr

        hostname = host.findtext('name')
        if hostname:
            hostfields['f_hostname'] = hostname

        # check to see if IP exists in DB already
        hostfields['f_asset_group'] = asset_group
        hostfields['f_engineer'] = engineer

        if update_hosts:
            # update or add, doesn't matter which
            host_rec = get_or_create_record(ipaddr, **hostfields)
            stats['hosts_added'] += 1
        else:
            # weird logic.. get a host record, if it doesn't exist create it otherwise skip because update_hosts=False
            host_rec = get_host_record(ipaddr)
            if not host_rec:
                host_rec = get_or_create_record(ipaddr, **hostfields)
                stats['hosts_added'] += 1
                log(" [-] Adding IP: %s" % (ipaddr))
            else:
                stats['hosts_skipped'] += 1
                log(" [-] Skipped IP: %s" % (ipaddr))
                continue

        # add the <info> and <comments> as a note to the host
        info_note = host.findtext('info') or None
        if info_note and info_note.startswith('Domain controller for '):
            db.t_netbios.update_or_insert(f_hosts_id=host_rec.id,
                                          f_type="PDC",
                                          f_domain=info_note[22:].upper())
        elif info_note:
            db.t_host_notes.update_or_insert(
                f_hosts_id=host_rec.id,
                f_note=info_note,
            )
        db.commit()
        for comment in host.findall('comments/comment'):
            db.t_host_notes.update_or_insert(
                f_hosts_id=host_rec.id,
                f_note=comment.text,
            )

        # process the services, adding any new
        for svc in host.findall('services/service'):
            svc_fields = {
                'f_number': svc.findtext('port'),
                'f_proto': svc.findtext('proto'),
                'f_status': svc.findtext('state'),
                'f_name': svc.findtext('name') or '',
                'f_banner': svc.findtext('info') or '',
                'f_hosts_id': host_rec.id,
            }

            if svc_fields['f_name'] in ['http', 'https']:
                svc_fields['f_name'] = svc_fields['f_name'].upper()

            svc_rec = services.get_record(create_or_update=True, **svc_fields)

        for cred in host.findall('creds/cred'):
            # handle credential data
            f_password = None
            f_compromised = False

            cred_type = cred.findtext('ptype')
            if cred_type == "smb_hash":
                # add smb hashes to info/0 service
                svc_fields = {
                    'f_number': '0',
                    'f_proto': 'info',
                    'f_hosts_id': host_rec.id,
                }
                svc_rec = services.get_record(create_or_update=True,
                                              **svc_fields)

                pwhash = cred.findtext('pass')
                f_password = lookup_hash(pwhash)
                (lm, nt) = pwhash.split(':')
                user = cred.findtext('user')
                query = (db.t_accounts.f_services_id == svc_rec.id) & (
                    db.t_accounts.f_username.upper() == user.upper())
                acct_row = db(query).select().first()
                if acct_row:
                    # we have an account already, lets see if the hashes are in there
                    h1 = acct_row.f_hash1
                    if isinstance(h1, str):
                        if acct_row.f_hash1.upper() != lm.upper():
                            acct_row.f_hash1 = lm.upper()
                            acct_row.f_hash1_type = "LM"
                            acct_row.f_hash2 = nt.upper()
                            acct_row.f_hash2_type = "NT"
                            if f_password:
                                acct_row.f_compromised = True
                                acct_row.f_password = f_password
                            if not acct_row.f_source:
                                acct_row.f_source = "Metasploit Import"
                            acct_row.update_record()
                            db.commit()
                            stats['accounts_updated'] += 1
                            didwhat = "Updated"
                else:
                    # add a new account record
                    if f_password:
                        f_compromised = True
                    else:
                        f_compromised = False
                    acct_data = dict(f_services_id=svc_rec.id,
                                     f_username=user,
                                     f_password=f_password,
                                     f_compromised=f_compromised,
                                     f_hash1=lm.upper(),
                                     f_hash1_type='LM',
                                     f_hash2=nt.upper(),
                                     f_hash2_type='NT',
                                     f_source="Metasploit Import")
                    acct_id = db.t_accounts.insert(**acct_data)
                    db.commit()
                    stats['accounts_added'] += 1
                    didwhat = "Added"

            elif cred_type == 'smb_challenge':
                # add smb challenge hashes to info/0 service
                svc_fields = {
                    'f_number': '0',
                    'f_proto': 'info',
                    'f_hosts_id': host_rec.id,
                }
                svc_rec = services.get_record(create_or_update=True,
                                              **svc_fields)

                user = cred.findtext('user')
                query = (db.t_accounts.f_services_id == svc_rec.id) & (
                    db.t_accounts.f_username.upper() == user.upper())
                acct_row = db(query).select().first()
                if acct_row:
                    # we have an account already, lets see if the hashes are in there
                    h1 = acct_row.f_hash1
                    if isinstance(h1, str):
                        if acct_row.f_hash1.upper() != lm.upper():
                            acct_row.f_password = f_password
                            acct_row.f_hash1 = pwhash.upper()
                            acct_row.f_hash1_type = 'NTCHALLENGE'
                            acct_row.f_domain = cred.findtext('proof')
                            if not acct_row.f_source:
                                acct_row.f_source = "Metasploit Capture"
                            acct_row.update_record()
                            db.commit()
                            stats['accounts_updated'] += 1
                            didwhat = "Updated"
                else:
                    # new account record
                    f_password = lookup_hash(pwhash)
                    if f_password:
                        f_compromised = True
                    else:
                        f_compromised = False
                    acct_data = dict(f_services_id=svc_rec.id,
                                     f_username=user,
                                     f_password=f_password,
                                     f_compromised=f_compromised,
                                     f_hash1=pwhash.upper(),
                                     f_hash1_type='NTCHALLENGE',
                                     f_source="Metasploit Capture")
                    acct_id = db.t_accounts.insert(**acct_data)
                    db.commit()
                    stats['accounts_added'] += 1
                    didwhat = "Added"

            elif cred_type == 'rakp_hmac_sha1_hash':
                # IPMI 2.0 RAKP Remote SHA1 Hashes

                f_hash1 = cred.findtext('pass')
                f_hash1_type = cred.findtext('ptype')
                user = cred.findtext('user')
                svcname = cred.findtext('sname')

                query = (db.t_accounts.f_services_id == svc_rec.id) & (
                    db.t_accounts.f_username.upper() == user.upper())
                acct_row = db(query).select().first()
                f_source = "Metasploit Import"
                if acct_row:
                    # we have an account already, lets see if the hashes are in there
                    if acct_row.f_hash1 != f_hash1:
                        acct_row.f_hash1 = f_hash1
                        acct_row.f_hash1_type = f_hash1_type
                        if not acct_row.f_source:
                            acct_row.f_source = f_source
                        acct_row.update_record()
                        db.commit()
                        stats['accounts_updated'] += 1
                        didwhat = "Updated"
                else:
                    # new account record
                    acct_data = dict(f_services_id=svc_rec.id,
                                     f_username=user,
                                     f_hash1=f_hash1,
                                     f_hash1_type=f_hash1_type,
                                     f_source=f_source,
                                     f_compromised=True)
                    acct_id = db.t_accounts.insert(**acct_data)
                    db.commit()
                    stats['accounts_added'] += 1
                    didwhat = "Added"

            else:
                # for cred_type == 'password' or 'exploit':
                # add regular password
                if svc_fields['f_number'] == '445':
                    svc_fields['f_proto'] = 'info'
                    svc_fields['f_number'] = '0'

                svc_rec = services.get_record(create_or_update=True,
                                              **svc_fields)

                f_password = cred.findtext('pass')
                if f_password == "*BLANK PASSWORD*":
                    f_password = ''

                user = cred.findtext('user')
                svcname = cred.findtext('sname')

                # do some case mangling for known variations we want in all upper case
                if svcname == "vnc":
                    user = "******"

                query = (db.t_accounts.f_services_id == svc_rec.id) & (
                    db.t_accounts.f_username.upper() == user.upper())
                acct_row = db(query).select().first()
                f_source = cred.findtext('type')
                if f_source == 'captured':
                    f_source = "Metasploit Capture"
                else:
                    f_source = "Metasploit Import"
                if acct_row:
                    # we have an account already, lets see if the hashes are in there
                    if acct_row.f_password != f_password:
                        acct_row.f_password = f_password
                        acct_row.f_compromised = True
                        if not acct_row.f_source:
                            acct_row.f_source = f_source
                        acct_row.update_record()
                        db.commit()
                        stats['accounts_updated'] += 1
                        didwhat = "Updated"
                else:
                    # new account record
                    acct_data = dict(f_services_id=svc_rec.id,
                                     f_username=user,
                                     f_password=f_password,
                                     f_source=f_source,
                                     f_compromised=True)
                    acct_id = db.t_accounts.insert(**acct_data)
                    db.commit()
                    stats['accounts_added'] += 1
                    didwhat = "Added"

            log(" [-] Account %s: (%s) %s" % (didwhat, ipaddr, user))

    do_host_status()

    msg = " [*] Import complete: hosts: (%s/A, %s/U, %s/S) - services: (%s/A, %s/U), creds: (%s/A, %s/U)"\
        % (
            stats['hosts_added'],
            stats['hosts_updated'],
            stats['hosts_skipped'],
            stats['services_added'],
            stats['services_updated'],
            stats['accounts_added'],
            stats['accounts_updated']
        )

    log(msg)
    return msg