Пример #1
0
def readlines_log(debug=False):
    """
    read stdin, if any.
    Assume sdf file names and return sdfs
    """

    while sys.stdin in select.select([sys.stdin], [], [], 0)[0]:

        line = sys.stdin.readline()

        if not line:
            yield from []
            break

        line = line.strip()

        name = line.split("/")
        name = name[-1]
        name = name.split(".")
        name = name[0]

        try:

            if "b3lyp" in line:
                properties = read_properties_b3lyp(line)
            elif "g4mp2" in line:
                properties = read_properties_g4mp2(line)

            if debug: print(name)

            yield name, properties

        except:
            misc.eprint(name, "fail")
            continue
Пример #2
0
def main():
    global args, searchterm

    parser = argparse.ArgumentParser(
        description='Search captured traffic for a pattern')
    parser.add_argument('app_or_capture',
                        metavar='appname',
                        help='Application name or network capture file')
    parser.add_argument('searchterm', type=str, help='String to search for')
    parser.add_argument('-v',
                        '--verbose',
                        dest='verbose',
                        action='store_true',
                        help='display packet contents')
    parser.add_argument('-m',
                        '--multi',
                        dest='multi',
                        action='store_true',
                        help='search multiple encodings')

    args = parser.parse_args()

    app = args.app_or_capture
    searchterm = args.searchterm
    appdir = os.path.join('results', app)
    search_output = get_search_outname(searchterm)
    if os.path.isdir(appdir):
        sys.stdout = Logger(os.path.join(appdir, search_output))

    if args.app_or_capture:
        # Check only one app
        # Option to use full packets perhaps specified
        if args.multi:
            check_multi(app, searchterm)
        else:
            check_app(app, searchterm)
    else:
        # Check all apps tested
        for entry in os.listdir('results'):
            if os.path.isdir(os.path.join('results', entry)):
                app = entry
                if args.multi:
                    check_multi(app, searchterm)
                else:
                    check_app(app, searchterm)
            elif os.path.isdir(os.path.join('results', entry.lower())):
                app = entry
                if args.multi:
                    check_multi(app, searchterm)
                else:
                    check_app(app, searchterm)
    print('')
    # Flush stdout log file
    sys.stdout = sys.__stdout__
    # Copy log file to universally-named one
    copy2(os.path.join(appdir, search_output),
          os.path.join(appdir, report_output))
    eprint(color.bright('Done!'))
Пример #3
0
def main():

    parser = argparse.ArgumentParser(
        description='Run all reports for one or more tested application')
    parser.add_argument('app_or_capture', metavar='appname', nargs='?',
                        help='Application name or network capture file')
    args = parser.parse_args()

    app = args.app_or_capture

    if args.app_or_capture:
        # Check only one app
        runreports(app)
    else:
        # Check all apps tested
        for app in getapps():
            runreports(app)

    eprint(color.bright('Done!'))
Пример #4
0
def check_app(app, force=False):
    '''
    Check application based on app name in Tapioca results
    '''

    dnscacheloaded = False
    largewarned = False

    # Get pcap file location
    if app.endswith('.pcap'):
        pcapfile = app
        if os.path.exists(pcapfile):
            sys.stdout = Logger('%s.%s' % (pcapfile, report_output))
    else:
        pcapfile = os.path.join('results', app, 'tcpdump.pcap')
        if os.path.exists(pcapfile):
            sys.stdout = Logger(os.path.join('results', app, report_output))

    if os.path.exists(pcapfile):

        pcapdir = os.path.dirname(pcapfile)
        dnspkl = os.path.join(pcapdir, '.dnsmap.pkl')

        eprint(color.bright('Checking app %s...' % color.cyan(app)))

        if os.path.exists(dnspkl) and not force:
            eprint('Loading cached DNS info...')
            with open(dnspkl, 'rb') as pklhandle:
                try:
                    net.dnsmap = pickle.load(pklhandle)
                    dnscacheloaded = True
                except:
                    pass

        if not dnscacheloaded:
            if os.path.getsize(pcapfile) > 100000000:
                # Over 100MB
                eprint(
                    color.bright(
                        color.yellow(
                            'Warning: capture size is large. Please be patient.'
                        )))
                largewarned = True

            # Get captured DNS info for IP addresses
            eprint('Getting DNS info...')
            dnspackets = pyshark.FileCapture(pcapfile,
                                             keep_packets=False,
                                             display_filter='dns')
            dnspackets.apply_on_packets(net.get_dns_info, timeout=1000)
            with open(dnspkl, 'wb') as pklhandle:
                pickle.dump(net.dnsmap,
                            pklhandle,
                            protocol=pickle.HIGHEST_PROTOCOL)

        if os.path.getsize(pcapfile) > 100000000 and not largewarned:
            # Over 100MB
            eprint(
                color.bright(
                    color.yellow(
                        'Warning: capture size is large. Please be patient.')))
            largewarned = True

        sslpackets = pyshark.FileCapture(pcapfile,
                                         keep_packets=False,
                                         display_filter='ssl')

        eprint('Getting SSL info from capture...')
        # get_indexed_ssl_info(cap)
        sslpackets.apply_on_packets(net.get_ssl_info, timeout=1000)

        dtlspackets = pyshark.FileCapture(pcapfile,
                                          keep_packets=False,
                                          display_filter='dtls')

        eprint('Getting DTLS info from capture...')
        dtlspackets.apply_on_packets(net.get_dtls_info, timeout=1000)

        # Print report
        generate_report(app, pcapfile=pcapfile)

        # Reset globals
        net.clear()
Пример #5
0
def check_app(app, fullpacket=False, force=False):
    '''
    Check application based on app name in Tapioca results
    '''

    dnscacheloaded = False
    targetscacheloaded = False
    largewarned = False

    # load local network from config
    net.set_local()

    # Get pcap file location
    if app.endswith('.pcap'):
        pcapfile = app
        if os.path.exists(pcapfile):
            sys.stdout = Logger('%s.%s' % (pcapfile, report_output))
    else:
        pcapfile = os.path.join('results', app, 'tcpdump.pcap')
        if os.path.exists(pcapfile):
            sys.stdout = Logger(os.path.join('results', app, report_output))

    if os.path.exists(pcapfile):

        pcapdir = os.path.dirname(pcapfile)
        dnspkl = os.path.join(pcapdir, '.dnsmap.pkl')
        targetspkl = os.path.join(pcapdir, '.targets.pkl')

        eprint(color.bright('Checking app %s...' % color.cyan(app)))

        if os.path.exists(dnspkl) and not force:
            eprint('Loading cached DNS info...')
            with open(dnspkl, 'rb') as pklhandle:
                try:
                    net.dnsmap = pickle.load(pklhandle)
                    net.dnsreqs = pickle.load(pklhandle)
                    dnscacheloaded = True
                except:
                    pass

        if not dnscacheloaded:
            if os.path.getsize(pcapfile) > 100000000:
                # Over 100MB
                eprint(
                    color.bright(
                        color.yellow(
                            'Warning: capture size is large. Please be patient.'
                        )))
                largewarned = True
            # Get captured DNS info for IP addresses
            eprint('Getting DNS info...')
            dnspackets = pyshark.FileCapture(pcapfile,
                                             keep_packets=False,
                                             display_filter='dns')
            dnspackets.apply_on_packets(net.get_dns_info, timeout=1000)
            with open(dnspkl, 'wb') as pklhandle:
                pickle.dump(net.dnsmap,
                            pklhandle,
                            protocol=pickle.HIGHEST_PROTOCOL)
                pickle.dump(net.dnsreqs,
                            pklhandle,
                            protocol=pickle.HIGHEST_PROTOCOL)

#        if os.path.exists(targetspkl) and not force:
#            eprint('Loading cached targets...')
#            with open(targetspkl, 'rb') as pklhandle:
#                try:
#                    net.targets = pickle.load(pklhandle)
#                    targetscacheloaded = True
#                except:
#                    pass

        if not targetscacheloaded:
            if fullpacket:
                packets = pyshark.FileCapture(pcapfile, keep_packets=False)
                # Get hosts contacted
                eprint('Getting hosts contacted...')
                packets.apply_on_packets(net.get_hosts_contacted_fullpacket,
                                         timeout=1000)
            else:
                packets = pyshark.FileCapture(pcapfile,
                                              keep_packets=False,
                                              only_summaries=True)
                # Get hosts contacted
                eprint('Getting hosts contacted...')
                packets.apply_on_packets(net.get_hosts_contacted, timeout=1000)


#                with open(targetspkl, 'wb') as pklhandle:
#                    pickle.dump(
# net.targets, pklhandle, protocol=pickle.HIGHEST_PROTOCOL)

# Print report
        generate_report(app, fullpacket=fullpacket, pcapfile=pcapfile)

        # Reset globals
        net.clear()
Пример #6
0
def main():

    parser = argparse.ArgumentParser(
        description=
        'Verify SSL certificate validation for one or more tested application')
    parser.add_argument('app_or_capture',
                        metavar='appname',
                        nargs='?',
                        help='Application name or network capture file')
    args = parser.parse_args()

    app = args.app_or_capture

    if args.app_or_capture:
        check_app(app)
    else:
        for entry in os.listdir('results'):
            if os.path.isdir(os.path.join('results', entry)):
                app = entry
                check_app(app)

        eprint('')
        eprint(color.bright('SSL test summary:'))
        eprint(color.bright(color.red(('Failed:'))))
        for app in ssl_failed:
            eprint(app)
        if ssl_notest:
            eprint(color.bright('Not tested:'))
            for app in ssl_notest:
                eprint(app)
        eprint(color.bright(color.green(('Passed:'))))
        for app in ssl_passed:
            eprint(app)