Beispiel #1
0
    def run(self):
        """Run the KafkaRouter with all of the registered logic routes"""
        with signal_utils.signal_catcher(self.exit_program):

            # Now lets process our Kafka Messages
            for message in self.input_pipe:
                topic = message.topic
                message = message.value
                for route in self.routes[topic]:
                    topic = route(message)
                    if topic:
                        self.output_pipe.send(topic, message)
Beispiel #2
0
        print('Unrecognized args: %s' % commands)
        sys.exit(1)

    # If no args just call help
    if len(sys.argv) == 1:
        parser.print_help()
        print('\nNote: Download the yara repo and give the index file as an arg')
        print('$ git clone https://github.com/Yara-Rules/rules')
        print('$ python yara_matches -r /path/to/rules/index.yar -e /path/to/bro/extract_files')
        sys.exit(1)

    # Sanity check that the args exist and are what we expect
    if not os.path.isfile(args.rule_index):
        print('--rule-index file not found.. should be /full/path/to/yara/rules/index.yar')
        sys.exit(1)
    if not os.path.isdir(args.extract_dir):
        print('--extract-dir directory not found.. should be /full/path/to/bro/extract_files')
        sys.exit(1)

    # Load/compile the yara rules
    my_rules = yara.compile(args.rule_index)

    # Create DirWatcher and start watching the Zeek extract_files directory
    print('Watching Extract Files Directory: {:s}'.format(args.extract_dir))
    dir_watcher.DirWatcher(args.extract_dir, callback=yara_match, rules=my_rules)

    # Okay so just wait around for files to be dropped by Zeek or someone hits Ctrl-C
    with signal_utils.signal_catcher(my_exit):
        while True:
            time.sleep(.5)
Beispiel #3
0
            vtq = pickle.load(open('vtq.pkl', 'rb'))
            print('Opening VirusTotal Query Cache (cache_size={:d})...'.format(
                vtq.size))
        except IOError:
            vtq = vt_query.VTQuery(max_cache_time=60 * 24 *
                                   7)  # One week cache

        # See our 'Risky Domains' Notebook for the analysis and
        # statistical methods used to compute this risky set of TLDs
        risky_tlds = set([
            'info', 'tk', 'xyz', 'online', 'club', 'ru', 'website', 'in', 'ws',
            'top', 'site', 'work', 'biz', 'name', 'tech', 'loan', 'win', 'pro'
        ])

        # Launch long lived process with signal catcher
        with signal_utils.signal_catcher(save_vtq):

            # Run the zeek reader on the dns.log file looking for risky TLDs
            reader = zeek_log_reader.ZeekLogReader(args.zeek_log)
            for row in reader.readrows():

                # Pull out the TLD
                query = row['query']
                tld = tldextract.extract(query).suffix

                # Check if the TLD is in the risky group
                if tld in risky_tlds:
                    # Show the risky dns
                    print('Making VT query for {:s}...'.format(query))

                    # Make the VT query
Beispiel #4
0
    parser.add_argument('--topics', type=lambda s: s.split(','), default='all',
                        help='Specify the Kafka Topics (e.g. dns   or   dns, http, blah   (defaults to all)')
    args, commands = parser.parse_known_args()

    # Check for unknown args
    if commands:
        print('Unrecognized args: %s' % commands)
        sys.exit(1)

    # Create a Kafka Consumer and subscribe to the topics
    all_topics = ['capture_loss', 'dns', 'http', 'ssl', 'weird', 'conn', 'files', 'x509']
    kserver = args.server
    topics = args.topics if args.topics != ['all'] else all_topics
    print('Subscribing to: {!r}'.format(topics))
    try:
        consumer = KafkaConsumer(*topics, bootstrap_servers=[kserver],
                                 value_deserializer=lambda x: json.loads(x.decode('utf-8')))
    except NoBrokersAvailable:
        print('Could not connect to Kafka server: {:s}'.format(args.server))
        sys.exit(-1)

    # Launch long lived process with signal catcher
    with signal_utils.signal_catcher(exit_program):

        # Now lets process our Kafka Messages
        for message in consumer:
            topic = message.topic
            message = message.value
            print('\n{:s}'.format(topic.upper()))
            pprint(message)