示例#1
0
文件: arp2db.py 项目: sree789/ivre-1
def main():
    """Update the flow database from ARP requests in PCAP files"""
    parser, use_argparse = utils.create_argparser(__doc__, extraargs="files")
    if use_argparse:
        parser.add_argument("files",
                            nargs='*',
                            metavar='FILE',
                            help="PCAP files")
    parser.add_argument("-v",
                        "--verbose",
                        help="verbose mode",
                        action="store_true")
    args = parser.parse_args()

    if args.verbose:
        config.DEBUG = True

    bulk = db.flow.start_bulk_insert()
    query_cache = db.flow.add_flow(["Flow"], ('proto', ))
    for fname in args.files:
        for pkt in reader(fname):
            rec = {
                "dst": pkt.pdst,
                "src": pkt.psrc,
                "start_time": datetime.fromtimestamp(pkt.time),
                "end_time": datetime.fromtimestamp(pkt.time),
                "proto": "arp"
            }
            if rec["dst"] != "0.0.0.0" and rec["src"] != "0.0.0.0":
                bulk.append(query_cache, rec)
    bulk.close()
示例#2
0
文件: ipdata.py 项目: xx-zhang/ivre
def main():
    parser, use_argparse = utils.create_argparser(__doc__, extraargs='ip')
    torun = []
    parser.add_argument('--download', action='store_true',
                        help='Fetch all data files.')
    parser.add_argument('--import-all', action='store_true',
                        help='Create all CSV files for reverse lookups.')
    parser.add_argument('--quiet', "-q", action='store_true',
                        help='Quiet mode.')
    if use_argparse:
        parser.add_argument('ip', nargs='*', metavar='IP',
                            help='Display results for specified IP addresses.')
    args = parser.parse_args()
    if args.download:
        geoiputils.download_all(verbose=not args.quiet)
        db.data.reload_files()
    if args.import_all:
        torun.append((db.data.build_dumps, [], {}))
    for function, fargs, fkargs in torun:
        function(*fargs, **fkargs)
    for addr in args.ip:
        if addr.isdigit():
            addr = int(addr)
        print(addr)
        for info in [db.data.as_byip(addr), db.data.location_byip(addr)]:
            if info:
                for key, value in viewitems(info):
                    print('    %s %s' % (key, value))
def main():
    """Parses the arguments and call worker()"""
    # Set the signal handler
    for s in [signal.SIGINT, signal.SIGTERM]:
        signal.signal(s, shutdown)
        signal.siginterrupt(s, False)
    parser, _ = utils.create_argparser(__doc__)
    parser.add_argument(
        '--sensor', metavar='SENSOR[:SENSOR]',
        help='sensor to check, optionally with a long name, defaults to all.',
    )
    parser.add_argument(
        '--directory', metavar='DIR',
        help='base directory (defaults to /ivre/passiverecon/).',
        default="/ivre/passiverecon/",
    )
    parser.add_argument(
        '--progname', metavar='PROG',
        help='Program to run (defaults to ivre passiverecon2db).',
        default="ivre passiverecon2db",
    )
    args = parser.parse_args()
    if args.sensor is not None:
        SENSORS.update(dict([args.sensor.split(':', 1)
                             if ':' in args.sensor
                             else [args.sensor, args.sensor]]))
        sensor = args.sensor.split(':', 1)[0]
    else:
        sensor = None
    worker(args.progname, args.directory, sensor=sensor)
示例#4
0
def main():
    """Parses the arguments and call worker()"""
    # Set the signal handler
    for s in [signal.SIGINT, signal.SIGTERM]:
        signal.signal(s, shutdown)
        signal.siginterrupt(s, False)
    parser, _ = utils.create_argparser(__doc__)
    parser.add_argument(
        '--sensor',
        metavar='SENSOR[:SENSOR]',
        help='sensor to check, optionally with a long name, defaults to all.',
    )
    parser.add_argument(
        '--directory',
        metavar='DIR',
        help='base directory (defaults to /ivre/passiverecon/).',
        default="/ivre/passiverecon/",
    )
    parser.add_argument(
        '--progname',
        metavar='PROG',
        help='Program to run (defaults to ivre passiverecon2db).',
        default="ivre passiverecon2db",
    )
    args = parser.parse_args()
    if args.sensor is not None:
        SENSORS.update(
            dict([
                args.sensor.split(':', 1)
                if ':' in args.sensor else [args.sensor, args.sensor]
            ]))
        sensor = args.sensor.split(':', 1)[0]
    else:
        sensor = None
    worker(args.progname, args.directory, sensor=sensor)
示例#5
0
def main():
    """Update the flow database from Airodump CSV files"""
    parser, use_argparse = utils.create_argparser(__doc__, extraargs='files')
    if use_argparse:
        parser.add_argument('files', nargs='*', metavar='FILE',
                            help='Airodump CSV files')
    parser.add_argument("-v", "--verbose", help="verbose mode",
                        action="store_true")
    args = parser.parse_args()

    if args.verbose:
        config.DEBUG = True

    bulk = db.flow.start_bulk_insert()
    for fname in args.files:
        with Airodump(fname) as fdesc:
            for line in fdesc:
                if "Station MAC" in line:
                    if line["BSSID"] == "(not associated)":
                        continue
                    line["src"] = line.pop("Station MAC")
                    line["dst"] = line.pop("BSSID")
                    # TODO FIX list
                    del line["Probed ESSIDs"]
                    line["start_time"] = line.pop("First time seen")
                    line["end_time"] = line.pop("Last time seen")
                    line["packets"] = line.pop('# packets')
                    # TODO FIX MEAN (en plus de MAX et MEAN)
                    db.flow.add_flow(
                        line, "WLAN", {}, counters=["packets"],
                        srcnode=("Intel:Mac", {"addr": "{src}"}),
                        dstnode=("Intel:Wlan", {"addr": "{dst}"}),
                    )
                else:
                    line["start_time"] = line.pop("First time seen")
                    line["end_time"] = line.pop("Last time seen")
                    line["lan_ip"] = line.pop("LAN IP")
                    query = [
                        "MERGE (wlan:Intel:Wlan {addr: {BSSID}})",
                        "ON CREATE SET wlan.essid = {ESSID}, "
                        "wlan.firstseen = {start_time}, "
                        "wlan.lastseen = {end_time}, "
                        "wlan.channel = {channel}, wlan.speed = {Speed}, "
                        "wlan.privacy = {Privacy}, wlan.cipher = {Cipher}, "
                        "wlan.authentication = {Authentication}, "
                        "wlan.ip = {lan_ip}",
                        "ON MATCH SET wlan.essid = {ESSID}, "
                        "wlan.firstseen = CASE WHEN "
                        "wlan.firstseen > {start_time} THEN {start_time} "
                        "ELSE wlan.firstseen END, wlan.lastseen = CASE WHEN "
                        "wlan.lastseen < {end_time} THEN {end_time} ELSE "
                        "wlan.lastseen END, wlan.channel = {channel}, "
                        "wlan.speed = {Speed}, wlan.privacy = {Privacy}, "
                        "wlan.cipher = {Cipher}, "
                        "wlan.authentication = {Authentication}, "
                        "wlan.ip = {lan_ip}",
                    ]
                    bulk.append("\n".join(query), line)
    bulk.close()
示例#6
0
def parse_args():
    """Imports the available module to parse the arguments and return
    the parsed arguments.

    """
    parser, _ = create_argparser(__doc__)
    parser.add_argument('--bind-address', '-b',
                        help='(IP) Address to bind the server to (defaults '
                        'to 127.0.0.1).',
                        default="127.0.0.1")
    parser.add_argument('--port', '-p', type=int, default=80,
                        help='(TCP) Port to use (defaults to 80)')
    return parser.parse_args()
示例#7
0
文件: bro2db.py 项目: sree789/ivre-1
def main():
    """Update the flow database from Bro logs"""
    parser, use_argparse = utils.create_argparser(__doc__,
                                                  extraargs="logfiles")
    if use_argparse:
        parser.add_argument("logfiles",
                            nargs='*',
                            metavar='FILE',
                            help="Bro log files")
    parser.add_argument("-v",
                        "--verbose",
                        help="verbose mode",
                        action="store_true")
    parser.add_argument("-C",
                        "--no-cleanup",
                        help="avoid port cleanup heuristics",
                        action="store_true")
    args = parser.parse_args()

    if args.verbose:
        config.DEBUG = True

    for fname in args.logfiles:
        if not os.path.exists(fname):
            utils.LOGGER.error("File %r does not exist", fname)
            continue
        with BroFile(fname) as brof:
            bulk = db.flow.start_bulk_insert()
            utils.LOGGER.debug(
                "Parsing %s\n\t%s", fname,
                "Fields:\n%s\n" % "\n".join("%s: %s" % (f, t)
                                            for f, t in brof.field_types))
            if brof.path in FUNCTIONS:
                func = FUNCTIONS[brof.path]
            elif brof.path in flow.META_DESC:
                func = any2flow(brof.path)
            else:
                utils.LOGGER.debug("Log format not (yet) supported for %r",
                                   fname)
                continue
            for line in brof:
                if not line:
                    continue
                func(bulk, _bro2flow(line))
            db.flow.bulk_commit(bulk)
            if brof.path == "conn" and not args.no_cleanup:
                db.flow.cleanup_flows()
示例#8
0
def parse_args():
    """Imports the available module to parse the arguments and return
    the parsed arguments.

    """
    parser, _ = create_argparser(__doc__)
    parser.add_argument('--bind-address',
                        '-b',
                        help='(IP) Address to bind the server to (defaults '
                        'to 127.0.0.1).',
                        default="127.0.0.1")
    parser.add_argument('--port',
                        '-p',
                        type=int,
                        default=80,
                        help='(TCP) Port to use (defaults to 80)')
    return parser.parse_args()
示例#9
0
def main():
    """Update the flow database from Bro logs"""
    parser, use_argparse = utils.create_argparser(__doc__,
                                                  extraargs="logfiles")
    if use_argparse:
        parser.add_argument('logfiles', nargs='*', metavar='FILE',
                            help='Bro log files')
    parser.add_argument("-v", "--verbose", help="verbose mode",
                        action="store_true")
    parser.add_argument("-C", "--no-cleanup",
                        help="avoid port cleanup heuristics",
                        action="store_true")
    args = parser.parse_args()

    if args.verbose:
        config.DEBUG = True

    for fname in args.logfiles:
        if not os.path.exists(fname):
            utils.LOGGER.error("File %r does not exist", fname)
            continue
        with BroFile(fname) as brof:
            bulk = db.flow.start_bulk_insert()
            utils.LOGGER.debug("Parsing %s\n\t%s", fname,
                               "Fields:\n%s\n" % "\n".join(
                                   "%s: %s" % (f, t)
                                   for f, t in brof.field_types
                               ))
            if brof.path in FUNCTIONS:
                func = FUNCTIONS[brof.path]
            elif brof.path in ALL_DESCS:
                func = any2neo(ALL_DESCS[brof.path])
            else:
                utils.LOGGER.debug("Log format not (yet) supported for %r",
                                   fname)
                continue
            for line in brof:
                if not line:
                    continue
                func(bulk, _bro2neo(line))
            bulk.commit()
            if brof.path == "conn" and not args.no_cleanup:
                db.flow.cleanup_flows()
示例#10
0
文件: flow2db.py 项目: sree789/ivre-1
def main():
    """Update the flow database from log files"""
    parser, use_argparse = utils.create_argparser(__doc__, extraargs='files')
    if use_argparse:
        parser.add_argument('files', nargs='*', metavar='FILE',
                            help='Files to import in the flow database')
    parser.add_argument("-v", "--verbose", help="verbose mode",
                        action="store_true")
    parser.add_argument("-t", "--type", help="file type",
                        choices=list(PARSERS_CHOICE))
    parser.add_argument("-f", "--pcap-filter",
                        help="pcap filter to apply (when supported)")
    parser.add_argument("-C", "--no-cleanup",
                        help="avoid port cleanup heuristics",
                        action="store_true")
    args = parser.parse_args()

    if args.verbose:
        config.DEBUG = True

    for fname in args.files:
        try:
            fileparser = PARSERS_CHOICE[args.type]
        except KeyError:
            with utils.open_file(fname) as fdesc:
                try:
                    fileparser = PARSERS_MAGIC[fdesc.read(4)]
                except KeyError:
                    utils.LOGGER.warning(
                        'Cannot find the appropriate parser for file %r',
                        fname,
                    )
                    continue
        bulk = db.flow.start_bulk_insert()
        with fileparser(fname, args.pcap_filter) as fdesc:
            for rec in fdesc:
                if not rec:
                    continue
                db.flow.flow2flow(bulk, rec)
        db.flow.bulk_commit(bulk)

    if not args.no_cleanup:
        db.flow.cleanup_flows()
示例#11
0
def main():
    """Update the flow database from ARP requests in PCAP files"""
    parser, use_argparse = utils.create_argparser(__doc__, extraargs='files')
    if use_argparse:
        parser.add_argument('files', nargs='*', metavar='FILE',
                            help='PCAP files')
    parser.add_argument("-v", "--verbose", help="verbose mode",
                        action="store_true")
    args = parser.parse_args()

    if args.verbose:
        config.DEBUG = True

    bulk = db.flow.start_bulk_insert()
    query_cache = db.flow.add_flow(["Flow"], ('proto',))
    for fname in args.files:
        for pkt in reader(fname):
            rec = {"dst": pkt.pdst, "src": pkt.psrc,
                   "start_time": datetime.fromtimestamp(pkt.time),
                   "end_time": datetime.fromtimestamp(pkt.time),
                   "proto": "arp"}
            if rec["dst"] != "0.0.0.0" and rec["src"] != "0.0.0.0":
                bulk.append(query_cache, rec)
    bulk.close()
示例#12
0
def main():
    parser, _ = utils.create_argparser(__doc__)
    parser.add_argument('--init',
                        '--purgedb',
                        action='store_true',
                        help='Purge or create and initialize the database.')
    parser.add_argument('--ensure-indexes',
                        action='store_true',
                        help='Create missing indexes (will lock the '
                        'database).')
    parser.add_argument('--node-filters',
                        '-n',
                        nargs="+",
                        metavar="FILTER",
                        help='Filter the results with a list of ivre specific '
                        'node textual filters (see WebUI doc in FLOW.md).')
    parser.add_argument('--flow-filters',
                        '-f',
                        nargs="+",
                        metavar="FILTER",
                        help='Filter the results with a list of ivre specific '
                        'flow textual filters (see WebUI doc in FLOW.md).')
    parser.add_argument('--json',
                        '-j',
                        action='store_true',
                        help='Outputs the full json records of results.')
    parser.add_argument('--count',
                        '-c',
                        action='store_true',
                        help='Only return the count of the results.')
    parser.add_argument('--limit',
                        '-l',
                        type=int,
                        help='Ouput at most LIMIT results.')
    parser.add_argument('--skip',
                        type=int,
                        default=0,
                        help='Skip first SKIP results.')
    parser.add_argument('--orderby',
                        '-o',
                        help='Order of results ("src", "dst" or "flow")')
    parser.add_argument('--separator', '-s', help="Separator string.")
    parser.add_argument('--top',
                        '-t',
                        nargs="+",
                        help='Top flows for a given set of fields, e.g. '
                        '"--top src.addr dport".')
    parser.add_argument('--collect',
                        '-C',
                        nargs="+",
                        help='When using --top, also collect these '
                        'properties.')
    parser.add_argument('--sum',
                        '-S',
                        nargs="+",
                        help='When using --top, sum on these properties to '
                        'order the result.')
    parser.add_argument('--mode',
                        '-m',
                        help="Query special mode (flow_map, talk_map...)")
    parser.add_argument('--timeline',
                        '-T',
                        action="store_true",
                        help='Retrieves the timeline of each flow')
    parser.add_argument('--flow-daily',
                        action="store_true",
                        help="Flow count per times of the day")
    parser.add_argument('--plot',
                        action="store_true",
                        help="Plot data when possible (requires matplotlib).")
    parser.add_argument('--fields',
                        nargs='+',
                        help="Display these fields for each entry.")
    args = parser.parse_args()

    out = sys.stdout

    if args.plot and plt is None:
        utils.LOGGER.critical("Matplotlib is required for --plot")
        sys.exit(-1)

    if args.init:
        if os.isatty(sys.stdin.fileno()):
            out.write('This will remove any scan result in your database. '
                      'Process ? [y/N] ')
            ans = input()
            if ans.lower() != 'y':
                sys.exit(-1)
        db.flow.init()
        sys.exit(0)

    if args.ensure_indexes:
        if os.isatty(sys.stdin.fileno()):
            out.write('This will lock your database. ' 'Process ? [y/N] ')
            ans = input()
            if ans.lower() != 'y':
                sys.exit(-1)
        db.flow.ensure_indexes()
        sys.exit(0)

    filters = {
        "nodes": args.node_filters or [],
        "edges": args.flow_filters or []
    }

    query = db.flow.from_filters(filters,
                                 limit=args.limit,
                                 skip=args.skip,
                                 orderby=args.orderby,
                                 mode=args.mode,
                                 timeline=args.timeline)
    sep = args.separator or ' | '
    coma = ';' if args.separator else '; '
    coma2 = ',' if args.separator else ', '
    if args.count:
        count = db.flow.count(query)
        out.write('%(clients)d clients\n%(servers)d servers\n'
                  '%(flows)d flows\n' % count)

    elif args.top:
        top = db.flow.top(query, args.top, args.collect, args.sum)
        for rec in top:
            sys.stdout.write(
                "%s%s%s%s%s\n" %
                (coma.join(str(elt)
                           for elt in rec["fields"]), sep, rec["count"], sep,
                 coma.join(
                     str(coma2.join(str(val) for val in elt))
                     for elt in rec["collected"]) if rec["collected"] else ""))

    elif args.flow_daily:
        # FIXME? fully in-memory
        if args.plot:
            plot_data = {}
        for rec in db.flow.flow_daily(query):
            out.write(
                sep.join([
                    rec["flow"], rec["time_in_day"].strftime("%T.%f"),
                    str(rec["count"])
                ]))
            out.write("\n")

            if args.plot:
                plot_data.setdefault(rec["flow"], [[], []])
                plot_data[rec["flow"]][0].append(rec["time_in_day"])
                plot_data[rec["flow"]][1].append(rec["count"])
        for flow, points in viewitems(plot_data):
            plt.plot(points[0], points[1], label=flow)
        plt.legend(loc='best')
        plt.show()

    else:
        fmt = '%%s%s%%s%s%%s' % (sep, sep)
        node_width = len('XXX.XXX.XXX.XXX')
        flow_width = len('tcp/XXXXX')
        for res in db.flow.to_iter(query):
            if args.json:
                out.write('%s\n' % res)
            else:
                elts = {}
                for elt in ["src", "flow", "dst"]:
                    elts[elt] = res[elt]['label']
                    if args.fields:
                        elts[elt] = "%s%s%s" % (
                            elts[elt], coma,
                            coma.join(
                                str(res[elt]['data'].get(field, ""))
                                for field in args.fields))
                src, flow, dst = elts["src"], elts["flow"], elts["dst"]
                node_width = max(node_width, len(src), len(dst))
                flow_width = max(flow_width, len(flow))
                if not args.separator:
                    fmt = ('%%-%ds%s%%-%ds%s%%-%ds' %
                           (node_width, sep, flow_width, sep, node_width))
                out.write(fmt % (src, flow, dst))
                if args.timeline:
                    out.write(sep)
                    out.write(
                        coma.join(
                            str(elt) for elt in sorted(res['flow']['data']
                                                       ['meta']['times'])))
                out.write('\n')
示例#13
0
def main():
    """Update the flow database from Airodump CSV files"""
    parser, use_argparse = utils.create_argparser(__doc__, extraargs='files')
    if use_argparse:
        parser.add_argument('files',
                            nargs='*',
                            metavar='FILE',
                            help='Airodump CSV files')
    parser.add_argument("-v",
                        "--verbose",
                        help="verbose mode",
                        action="store_true")
    args = parser.parse_args()

    if args.verbose:
        config.DEBUG = True

    bulk = db.flow.start_bulk_insert()
    for fname in args.files:
        with Airodump(fname) as fdesc:
            for line in fdesc:
                if "Station MAC" in line:
                    if line["BSSID"] == "(not associated)":
                        continue
                    line["src"] = line.pop("Station MAC")
                    line["dst"] = line.pop("BSSID")
                    # TODO FIX list
                    del line["Probed ESSIDs"]
                    line["start_time"] = line.pop("First time seen")
                    line["end_time"] = line.pop("Last time seen")
                    line["packets"] = line.pop('# packets')
                    # TODO FIX MEAN (en plus de MAX et MEAN)
                    db.flow.add_flow(
                        line,
                        "WLAN",
                        {},
                        counters=["packets"],
                        srcnode=("Intel:Mac", {
                            "addr": "{src}"
                        }),
                        dstnode=("Intel:Wlan", {
                            "addr": "{dst}"
                        }),
                    )
                else:
                    line["start_time"] = line.pop("First time seen")
                    line["end_time"] = line.pop("Last time seen")
                    line["lan_ip"] = line.pop("LAN IP")
                    query = [
                        "MERGE (wlan:Intel:Wlan {addr: {BSSID}})",
                        "ON CREATE SET wlan.essid = {ESSID}, "
                        "wlan.firstseen = {start_time}, "
                        "wlan.lastseen = {end_time}, "
                        "wlan.channel = {channel}, wlan.speed = {Speed}, "
                        "wlan.privacy = {Privacy}, wlan.cipher = {Cipher}, "
                        "wlan.authentication = {Authentication}, "
                        "wlan.ip = {lan_ip}",
                        "ON MATCH SET wlan.essid = {ESSID}, "
                        "wlan.firstseen = CASE WHEN "
                        "wlan.firstseen > {start_time} THEN {start_time} "
                        "ELSE wlan.firstseen END, wlan.lastseen = CASE WHEN "
                        "wlan.lastseen < {end_time} THEN {end_time} ELSE "
                        "wlan.lastseen END, wlan.channel = {channel}, "
                        "wlan.speed = {Speed}, wlan.privacy = {Privacy}, "
                        "wlan.cipher = {Cipher}, "
                        "wlan.authentication = {Authentication}, "
                        "wlan.ip = {lan_ip}",
                    ]
                    bulk.append("\n".join(query), line)
    bulk.close()
示例#14
0
def main():
    """Update the flow database from log files"""
    parser, use_argparse = utils.create_argparser(__doc__, extraargs='files')
    if use_argparse:
        parser.add_argument('files',
                            nargs='*',
                            metavar='FILE',
                            help='Files to import in the flow database')
    parser.add_argument("-v",
                        "--verbose",
                        help="verbose mode",
                        action="store_true")
    parser.add_argument("-t",
                        "--type",
                        help="file type",
                        choices=list(PARSERS_CHOICE))
    parser.add_argument("-f",
                        "--pcap-filter",
                        help="pcap filter to apply (when supported)")
    parser.add_argument("-C",
                        "--no-cleanup",
                        help="avoid port cleanup heuristics",
                        action="store_true")
    args = parser.parse_args()

    if args.verbose:
        config.DEBUG = True

    query_cache = {}
    for fname in args.files:
        try:
            fileparser = PARSERS_CHOICE[args.type]
        except KeyError:
            with utils.open_file(fname) as fdesc:
                try:
                    fileparser = PARSERS_MAGIC[fdesc.read(4)]
                except KeyError:
                    utils.LOGGER.warning(
                        'Cannot find the appropriate parser for file %r',
                        fname,
                    )
                    continue
        bulk = db.flow.start_bulk_insert()
        with fileparser(fname, args.pcap_filter) as fdesc:
            for rec in fdesc:
                if not rec:
                    continue
                linkattrs = ('proto', )
                accumulators = {}
                for (fields, sp_linkattrs,
                     sp_accumulators) in FIELD_REQUEST_EXT:
                    if all(field in rec for field in fields):
                        linkattrs = sp_linkattrs
                        accumulators = sp_accumulators
                        break
                if linkattrs not in query_cache:
                    query_cache[linkattrs] = db.flow.add_flow(
                        ["Flow"],
                        linkattrs,
                        counters=COUNTERS,
                        accumulators=accumulators)
                bulk.append(query_cache[linkattrs], rec)
        bulk.close()

    if not args.no_cleanup:
        db.flow.cleanup_flows()
示例#15
0
def main():
    parser, _ = utils.create_argparser(__doc__)
    parser.add_argument('--init',
                        '--purgedb',
                        action='store_true',
                        help='Purge or create and initialize the database.')
    parser.add_argument('--ensure-indexes',
                        action='store_true',
                        help='Create missing indexes (will lock the '
                        'database).')
    parser.add_argument('--node-filters',
                        '-n',
                        nargs="+",
                        metavar="FILTER",
                        help='Filter the results with a list of ivre specific '
                        'node textual filters (see WebUI doc in FLOW.md).')
    parser.add_argument('--flow-filters',
                        '-f',
                        nargs="+",
                        metavar="FILTER",
                        help='Filter the results with a list of ivre specific '
                        'flow textual filters (see WebUI doc in FLOW.md).')
    parser.add_argument('--json',
                        '-j',
                        action='store_true',
                        help='Outputs the full json records of results.')
    parser.add_argument('--count',
                        '-c',
                        action='store_true',
                        help='Only return the count of the results.')
    parser.add_argument('--limit',
                        '-l',
                        type=int,
                        default=None,
                        help='Ouput at most LIMIT results.')
    parser.add_argument('--skip',
                        type=int,
                        default=0,
                        help='Skip first SKIP results.')
    parser.add_argument('--orderby',
                        '-o',
                        help='Order of results ("src", "dst" or "flow")')
    parser.add_argument('--separator', '-s', help="Separator string.")
    parser.add_argument('--top',
                        '-t',
                        nargs="+",
                        help='Top flows for a given set of fields, e.g. '
                        '"--top src.addr dport".')
    parser.add_argument('--collect',
                        '-C',
                        nargs="+",
                        help='When using --top, also collect these '
                        'properties.',
                        default=[])
    parser.add_argument('--sum',
                        '-S',
                        nargs="+",
                        help='When using --top, sum on these properties to '
                        'order the result.',
                        default=[])
    parser.add_argument('--least',
                        '-L',
                        action='store_true',
                        help='When using --top, sort records by least')
    parser.add_argument('--mode',
                        '-m',
                        help="Query special mode (flow_map, talk_map...)")
    parser.add_argument('--timeline',
                        '-T',
                        action="store_true",
                        help='Retrieves the timeline of each flow')
    parser.add_argument('--flow-daily',
                        action="store_true",
                        help="Flow count per times of the day. If --precision "
                        "is absent, it will be based on FLOW_TIME_PRECISION "
                        "(%d)" % config.FLOW_TIME_PRECISION)
    parser.add_argument('--plot',
                        action="store_true",
                        help="Plot data when possible (requires matplotlib).")
    parser.add_argument('--fields',
                        nargs='+',
                        help="Display these fields for each entry.")
    parser.add_argument('--reduce-precision',
                        type=int,
                        metavar="NEW_PRECISION",
                        help="Only with MongoDB backend. "
                        "Reduce precision to NEW_PRECISION for flows "
                        "timeslots. Uses precision, before, after and "
                        "filters.")
    parser.add_argument("--after",
                        "-a",
                        type=str,
                        help="Only with MongoDB "
                        "backend. Get only flows seen after this date. "
                        "Date format: YEAR-MONTH-DAY HOUR:MINUTE. "
                        "Based on timeslots precision. If the given date is "
                        "in the middle of a timeslot, flows start at the next "
                        "timeslot.")
    parser.add_argument("--before",
                        "-b",
                        type=str,
                        help="Only with MongoDB "
                        "backend. Get only flows seen before this date. "
                        "Date format: YEAR-MONTH-DAY HOUR:MINUTE. "
                        "Based on timeslots precision. If the given date is "
                        "in the middle of a timeslot, the whole period is "
                        "kept even if theoretically some flows may have been "
                        "seen after the given date.")
    parser.add_argument('--precision',
                        nargs='?',
                        default=None,
                        const=0,
                        help="Only With MongoDB backend. If PRECISION is "
                        "specified, get only flows with one timeslot of "
                        "the given precision. Otherwise, list "
                        "precisions.",
                        type=int)
    args = parser.parse_args()

    out = sys.stdout

    if args.plot and plt is None:
        utils.LOGGER.critical("Matplotlib is required for --plot")
        sys.exit(-1)

    if args.init:
        if os.isatty(sys.stdin.fileno()):
            out.write('This will remove any flow result in your database. '
                      'Process ? [y/N] ')
            ans = input()
            if ans.lower() != 'y':
                sys.exit(-1)
        db.flow.init()
        sys.exit(0)

    if args.ensure_indexes:
        if os.isatty(sys.stdin.fileno()):
            out.write('This will lock your database. ' 'Process ? [y/N] ')
            ans = input()
            if ans.lower() != 'y':
                sys.exit(-1)
        db.flow.ensure_indexes()
        sys.exit(0)

    if args.precision == 0:
        # Get precisions list
        out.writelines('%d\n' % precision
                       for precision in db.flow.list_precisions())
        sys.exit(0)

    filters = {
        "nodes": args.node_filters or [],
        "edges": args.flow_filters or []
    }

    time_args = ['before', 'after']
    time_values = {}
    args_dict = vars(args)
    for arg in time_args:
        time_values[arg] = (datetime.datetime.strptime(args_dict[arg],
                                                       "%Y-%m-%d %H:%M")
                            if args_dict[arg] is not None else None)

    query = db.flow.from_filters(filters,
                                 limit=args.limit,
                                 skip=args.skip,
                                 orderby=args.orderby,
                                 mode=args.mode,
                                 timeline=args.timeline,
                                 after=time_values['after'],
                                 before=time_values['before'],
                                 precision=args.precision)

    if args.reduce_precision:
        if os.isatty(sys.stdin.fileno()):
            out.write('This will permanently reduce the precision of your '
                      'database. Process ? [y/N] ')
            ans = input()
            if ans.lower() != 'y':
                sys.exit(-1)
        new_precision = args.reduce_precision
        db.flow.reduce_precision(new_precision,
                                 flt=query,
                                 before=time_values['before'],
                                 after=time_values['after'],
                                 current_precision=args.precision)
        sys.exit(0)

    sep = args.separator or ' | '
    coma = ' ;' if args.separator else ' ; '
    coma2 = ',' if args.separator else ', '
    if args.count:
        count = db.flow.count(query)
        out.write('%(clients)d clients\n%(servers)d servers\n'
                  '%(flows)d flows\n' % count)

    elif args.top:
        top = db.flow.topvalues(query,
                                args.top,
                                collect_fields=args.collect,
                                sum_fields=args.sum,
                                topnbr=args.limit,
                                skip=args.skip,
                                least=args.least)
        for rec in top:
            sys.stdout.write(
                "%s%s%s%s%s\n" %
                ('(' + coma2.join(str(val) for val in rec["fields"]) + ')',
                 sep, rec["count"], sep,
                 coma.join(
                     str('(' + coma2.join(str(val) for val in collected) + ')')
                     for collected in rec["collected"])
                 if rec["collected"] else ""))

    elif args.flow_daily:
        precision = (args.precision if args.precision is not None else
                     config.FLOW_TIME_PRECISION)
        plot_data = {}
        for rec in db.flow.flow_daily(precision,
                                      query,
                                      after=time_values['after'],
                                      before=time_values['before']):
            out.write(
                sep.join([
                    rec["time_in_day"].strftime("%T.%f"), " ; ".join([
                        '(' + x[0] + ', ' + str(x[1]) + ')'
                        for x in rec["flows"]
                    ])
                ]))
            out.write("\n")

            if args.plot:
                for flw in rec["flows"]:
                    t = rec["time_in_day"]
                    # pyplot needs datetime objects
                    dt = datetime.datetime(1970,
                                           1,
                                           1,
                                           hour=t.hour,
                                           minute=t.minute,
                                           second=t.second)
                    plot_data.setdefault(flw[0], {})
                    plot_data[flw[0]][dt] = flw[1]
        if args.plot and plot_data:
            t = datetime.datetime(1970, 1, 1, 0, 0, 0)
            t += datetime.timedelta(seconds=config.FLOW_TIME_BASE % precision)
            times = []
            while t < datetime.datetime(1970, 1, 2):
                times.append(t)
                t = t + datetime.timedelta(seconds=precision)
            ax = plt.subplots()[1]
            fmt = matplotlib.dates.DateFormatter('%H:%M:%S')
            for flow, data in viewitems(plot_data):
                values = [(data[ti] if ti in data else 0) for ti in times]
                plt.step(times, values, '.-', where='post', label=flow)
            plt.legend(loc='best')
            ax.xaxis.set_major_formatter(fmt)
            plt.gcf().autofmt_xdate()
            plt.show()

    else:
        fmt = '%%s%s%%s%s%%s' % (sep, sep)
        node_width = len('XXXX:XXXX:XXXX:XXXX:XXXX:XXXX')
        flow_width = len('tcp/XXXXX')
        for res in db.flow.to_iter(query,
                                   limit=args.limit,
                                   skip=args.skip,
                                   orderby=args.orderby,
                                   mode=args.mode,
                                   timeline=args.timeline):
            if args.json:
                out.write('%s\n' % res)
            else:
                elts = {}
                for elt in ["src", "flow", "dst"]:
                    elts[elt] = res[elt]['label']
                    if args.fields:
                        elts[elt] = "%s%s%s" % (
                            elts[elt], coma,
                            coma.join(
                                str(res[elt]['data'].get(field, ""))
                                for field in args.fields))
                src, flow, dst = elts["src"], elts["flow"], elts["dst"]
                node_width = max(node_width, len(src), len(dst))
                flow_width = max(flow_width, len(flow))
                if not args.separator:
                    fmt = ('%%-%ds%s%%-%ds%s%%-%ds' %
                           (node_width, sep, flow_width, sep, node_width))
                out.write(fmt % (src, flow, dst))
                if args.timeline:
                    out.write(sep)
                    # Print '?' instead of failing if meta.times does not exist
                    try:
                        out.write(
                            coma.join(
                                str(elt) for elt in sorted(res['flow']['data']
                                                           ['meta']['times'])))
                    except KeyError:
                        out.write("?")
                out.write('\n')
示例#16
0
def main():
    parser, _ = utils.create_argparser(__doc__)
    parser.add_argument('--init', '--purgedb', action='store_true',
                        help='Purge or create and initialize the database.')
    parser.add_argument('--ensure-indexes', action='store_true',
                        help='Create missing indexes (will lock the '
                        'database).')
    parser.add_argument('--node-filters', '-n', nargs="+", metavar="FILTER",
                        help='Filter the results with a list of ivre specific '
                        'node textual filters (see WebUI doc in FLOW.md).')
    parser.add_argument('--flow-filters', '-f', nargs="+", metavar="FILTER",
                        help='Filter the results with a list of ivre specific '
                        'flow textual filters (see WebUI doc in FLOW.md).')
    parser.add_argument('--json', '-j', action='store_true',
                        help='Outputs the full json records of results.')
    parser.add_argument('--count', '-c', action='store_true',
                        help='Only return the count of the results.')
    parser.add_argument('--limit', '-l', type=int,
                        help='Ouput at most LIMIT results.')
    parser.add_argument('--skip', type=int, default=0,
                        help='Skip first SKIP results.')
    parser.add_argument('--orderby', '-o',
                        help='Order of results ("src", "dst" or "flow")')
    parser.add_argument('--separator', '-s', help="Separator string.")
    parser.add_argument('--top', '-t', nargs="+",
                        help='Top flows for a given set of fields, e.g. '
                        '"--top src.addr dport".')
    parser.add_argument('--collect', '-C', nargs="+",
                        help='When using --top, also collect these '
                        'properties.')
    parser.add_argument('--sum', '-S', nargs="+",
                        help='When using --top, sum on these properties to '
                        'order the result.')
    parser.add_argument('--mode', '-m',
                        help="Query special mode (flow_map, talk_map...)")
    parser.add_argument('--timeline', '-T', action="store_true",
                        help='Retrieves the timeline of each flow')
    parser.add_argument('--flow-daily', action="store_true",
                        help="Flow count per times of the day")
    parser.add_argument('--plot', action="store_true",
                        help="Plot data when possible (requires matplotlib).")
    parser.add_argument('--fields', nargs='+',
                        help="Display these fields for each entry.")
    args = parser.parse_args()

    out = sys.stdout

    if args.plot and plt is None:
        utils.LOGGER.critical("Matplotlib is required for --plot")
        sys.exit(-1)

    if args.init:
        if os.isatty(sys.stdin.fileno()):
            out.write(
                'This will remove any scan result in your database. '
                'Process ? [y/N] ')
            ans = input()
            if ans.lower() != 'y':
                sys.exit(-1)
        db.flow.init()
        sys.exit(0)

    if args.ensure_indexes:
        if os.isatty(sys.stdin.fileno()):
            out.write(
                'This will lock your database. '
                'Process ? [y/N] ')
            ans = input()
            if ans.lower() != 'y':
                sys.exit(-1)
        db.flow.ensure_indexes()
        sys.exit(0)

    filters = {"nodes": args.node_filters or [],
               "edges": args.flow_filters or []}

    query = db.flow.from_filters(filters, limit=args.limit, skip=args.skip,
                                 orderby=args.orderby, mode=args.mode,
                                 timeline=args.timeline)
    sep = args.separator or ' | '
    coma = ';' if args.separator else '; '
    coma2 = ',' if args.separator else ', '
    if args.count:
        count = db.flow.count(query)
        out.write('%(clients)d clients\n%(servers)d servers\n'
                  '%(flows)d flows\n' % count)

    elif args.top:
        top = db.flow.top(query, args.top, args.collect, args.sum)
        for rec in top:
            sys.stdout.write("%s%s%s%s%s\n" % (
                coma.join(str(elt) for elt in rec["fields"]),
                sep,
                rec["count"],
                sep,
                coma.join(str(coma2.join(str(val) for val in elt))
                          for elt in rec["collected"])
                if rec["collected"] else ""
            ))

    elif args.flow_daily:
        # FIXME? fully in-memory
        if args.plot:
            plot_data = {}
        for rec in db.flow.flow_daily(query):
            out.write(sep.join([rec["flow"],
                                rec["time_in_day"].strftime("%T.%f"),
                                str(rec["count"])]))
            out.write("\n")

            if args.plot:
                plot_data.setdefault(rec["flow"], [[], []])
                plot_data[rec["flow"]][0].append(rec["time_in_day"])
                plot_data[rec["flow"]][1].append(rec["count"])
        for flow, points in viewitems(plot_data):
            plt.plot(points[0], points[1], label=flow)
        plt.legend(loc='best')
        plt.show()

    else:
        fmt = '%%s%s%%s%s%%s' % (sep, sep)
        node_width = len('XXX.XXX.XXX.XXX')
        flow_width = len('tcp/XXXXX')
        for res in db.flow.to_iter(query):
            if args.json:
                out.write('%s\n' % res)
            else:
                elts = {}
                for elt in ["src", "flow", "dst"]:
                    elts[elt] = res[elt]['label']
                    if args.fields:
                        elts[elt] = "%s%s%s" % (
                            elts[elt],
                            coma,
                            coma.join(
                                str(res[elt]['data'].get(field, ""))
                                for field in args.fields
                            )
                        )
                src, flow, dst = elts["src"], elts["flow"], elts["dst"]
                node_width = max(node_width, len(src), len(dst))
                flow_width = max(flow_width, len(flow))
                if not args.separator:
                    fmt = ('%%-%ds%s%%-%ds%s%%-%ds' %
                           (node_width, sep, flow_width, sep, node_width))
                out.write(fmt % (src, flow, dst))
                if args.timeline:
                    out.write(sep)
                    out.write(coma.join(
                        str(elt) for elt in sorted(
                            res['flow']['data']['meta']['times']
                        )
                    ))
                out.write('\n')