Example #1
0
 def __init__(self, configuration):
     Process.__init__(self)
     self.configuration = configuration
     self.model = Model(self.configuration)
     self.feeds = {}
     self.threads = {}
     self.global_thread = None
Example #2
0
    def __init__(self, setup):
        super(SnifferEngine, self).__init__()
        self.setup = setup
        sys.stderr.write("[+] Starting sniffer...\n")

        # check if sniffer directory exists
        if not os.path.isdir(self.setup['SNIFFER_DIR']):
            sys.stderr.write("Could not load directory specified in sniffer_dir: {}\n".format(self.setup['SNIFFER_DIR']))
            exit()

        sys.stderr.write("[+] Successfully loaded sniffer directory: {}\n".format(self.setup['SNIFFER_DIR']))

        if setup['TLS_PROXY_PORT'] > 0:
            from Malcom.sniffer.tlsproxy.tlsproxy import MalcomTLSProxy
            sys.stderr.write("[+] Starting TLS proxy on port {}\n".format(setup['TLS_PROXY_PORT']))
            self.tls_proxy = MalcomTLSProxy(setup['TLS_PROXY_PORT'])
            self.tls_proxy.engine = self
            self.tls_proxy.start()
        else:
            self.tls_proxy = None

        self.sessions = {}

        self.model = Model(self.setup)
        self.db_lock = threading.Lock()

        self.messenger = SnifferMessenger()
        self.messenger.snifferengine = self
Example #3
0
    def __init__(self, max_threads=4):
        self.data = Model()
        # self.max_threads = threading.Semaphore(app.config['THREADS'])
        self.active = False
        self.status = "Inactive"
        self.websocket = None
        self.thread = None
        self.websocket_lock = threading.Lock()
        self.stack_lock = threading.Lock()
        self.progress = 0
        self.total = 0

        self.max_threads = threading.Semaphore(4)
Example #4
0
	def __init__(self):
		self.data = Model()
		self.max_threads = Malcom.config.get('MAX_THREADS', 4)
		self.active = False
		self.status = "Inactive"
		self.websocket = None
		self.thread = None
		self.websocket_lock = threading.Lock()
		self.stack_lock = threading.Lock()
		self.progress = 0
		self.total = 0

		self.max_threads = threading.Semaphore(self.max_threads)
		self.worker_threads = {}
Example #5
0
class SnifferEngine(object):
    """docstring for SnifferEngine"""

    def __init__(self, setup):
        super(SnifferEngine, self).__init__()
        self.setup = setup
        sys.stderr.write("[+] Starting sniffer...\n")

        # check if sniffer directory exists
        if not os.path.isdir(self.setup['SNIFFER_DIR']):
            sys.stderr.write("Could not load directory specified in sniffer_dir: {}\n".format(self.setup['SNIFFER_DIR']))
            exit()

        sys.stderr.write("[+] Successfully loaded sniffer directory: {}\n".format(self.setup['SNIFFER_DIR']))

        if setup['TLS_PROXY_PORT'] > 0:
            from Malcom.sniffer.tlsproxy.tlsproxy import MalcomTLSProxy
            sys.stderr.write("[+] Starting TLS proxy on port {}\n".format(setup['TLS_PROXY_PORT']))
            self.tls_proxy = MalcomTLSProxy(setup['TLS_PROXY_PORT'])
            self.tls_proxy.engine = self
            self.tls_proxy.start()
        else:
            self.tls_proxy = None

        self.sessions = {}

        self.model = Model(self.setup)
        self.db_lock = threading.Lock()

        self.messenger = SnifferMessenger()
        self.messenger.snifferengine = self


    def fetch_sniffer_session(self, session_id):
        try:
            debug_output("Fetching session {} from memory".format(session_id))
            session = self.sessions.get(ObjectId(session_id))
        except Exception as e:
            debug_output("An {} error occurred when fetching session '{}': {}".format(type(e).__name__, session_id, e), 'error')
            return

        # if not found, recreate it from the DB
        if not session:
            debug_output("Fetching session {} from DB".format(session_id))
            s = self.model.get_sniffer_session(session_id)
            if not s:
                return None
            # TLS interception only possible if PCAP hasn't been generated yet
            intercept_tls = s['intercept_tls'] and not s['pcap']

            if s:
                session = SnifferSession(s['name'],
                                         None,
                                         None,
                                         self,
                                         id=s['_id'],
                                         filter_restore=s['filter'],
                                         intercept_tls=intercept_tls)
                session.pcap = s['pcap']
                session.public = s['public']
                session.date_created = s['date_created']
                self.sessions[session.id] = session
                session_data = bson_loads(s['session_data'])
                session.nodes = session_data['nodes']
                session.edges = session_data['edges']
                session.packet_count = s['packet_count']
                session.flows = {}
                for flow in session_data['flows']:
                    f = Flow.load_flow(flow)
                    session.flows[f.fid] = f

        return session

    def new_session(self, params):
        session_name = params['session_name']
        remote_addr = params['remote_addr']
        filter = params['filter']
        intercept_tls = params['intercept_tls']

        sniffer_session = SnifferSession(session_name, remote_addr, filter, self, None, intercept_tls)
        sniffer_session.pcap = params['pcap']
        sniffer_session.public = params['public']

        return self.model.save_sniffer_session(sniffer_session)

    def delete_session(self, session_id):
        session = self.fetch_sniffer_session(session_id)

        if not session:
            return 'notfound'

        if session.status():
            return "running"

        else:
            self.model.del_sniffer_session(session, self.setup['SNIFFER_DIR'])
            return "removed"

    def commit_to_db(self, session):
        with self.db_lock:
            session.save_pcap()
            self.model.save_sniffer_session(session)
        debug_output("[+] Sniffing session {} saved".format(session.name))
        return True
Example #6
0
class Analytics:
    def __init__(self, max_threads=4):
        self.data = Model()
        # self.max_threads = threading.Semaphore(app.config['THREADS'])
        self.active = False
        self.status = "Inactive"
        self.websocket = None
        self.thread = None
        self.websocket_lock = threading.Lock()
        self.stack_lock = threading.Lock()
        self.progress = 0
        self.total = 0

        self.max_threads = threading.Semaphore(4)

    def add_text(self, text, tags=[]):
        added = []
        for t in text:
            elt = None
            if t.strip() != "":
                if is_url(t):
                    elt = Url(is_url(t), [])
                elif is_hostname(t):
                    elt = Hostname(is_hostname(t), [])
                elif is_ip(t):
                    elt = Ip(is_ip(t), [])
                if elt:
                    added.append(self.save_element(elt, tags))

        if len(added) == 1:
            return added[0]
        else:
            return added

    def save_element(self, element, tags=[], with_status=False):

        element.upgrade_tags(tags)
        return self.data.save(element, with_status=with_status)

        # graph function

    def add_artifacts(self, data, tags=[]):
        artifacts = find_artifacts(data)

        added = []
        for url in artifacts["urls"]:
            added.append(self.save_element(url, tags))

        for hostname in artifacts["hostnames"]:
            added.append(self.save_element(hostname, tags))

        for ip in artifacts["ips"]:
            added.append(self.save_element(ip, tags))

        return added

        # elements analytics

    def bulk_asn(self, items=1000):

        last_analysis = {
            "$or": [
                {"last_analysis": {"$lt": datetime.datetime.utcnow() - datetime.timedelta(days=7)}},
                {"last_analysis": None},
            ]
        }

        nobgp = {"$or": [{"bgp": None}, last_analysis]}

        total = self.data.elements.find({"$and": [{"type": "ip"}, nobgp]}).count()
        done = 0
        results = [r for r in self.data.elements.find({"$and": [{"type": "ip"}, nobgp]})[:items]]

        while len(results) > 0:

            ips = []
            debug_output("(getting ASNs for %s IPs - %s/%s done)" % (len(results), done, total), type="analytics")

            for r in results:
                ips.append(r)

            as_info = {}

            try:
                as_info = get_net_info_shadowserver(ips)
            except Exception, e:
                debug_output("Could not get AS for IPs: %s" % e)

            if as_info == {}:
                debug_output("as_info empty", "error")
                return

            for ip in as_info:

                _as = as_info[ip]
                _ip = self.data.find_one({"value": ip})

                if not _ip:
                    return

                del _as["ip"]
                for key in _as:
                    if key not in ["type", "value", "tags"]:
                        _ip[key] = _as[key]
                del _as["bgp"]

                _as = As.from_dict(_as)

                # commit any changes to DB
                _as = self.save_element(_as)
                _ip = self.save_element(_ip)

                if _as and _ip:
                    self.data.connect(_ip, _as, "net_info")
            done += len(results)
            results = [r for r in self.data.elements.find({"$and": [{"type": "ip"}, nobgp]})[:items]]
Example #7
0
class FeedEngine(Process):
    """Feed engine. This object will load and update feeds"""
    def __init__(self, configuration):
        Process.__init__(self)
        self.configuration = configuration
        self.model = Model(self.configuration)
        self.feeds = {}
        self.threads = {}
        self.global_thread = None
        # self.messenger = FeedsMessenger(self)

    def run_feed(self, feed_name):
        # Check if feed exists in list
        if not self.feeds.get(feed_name):
            return False

        # if feed is not already running
        if not (self.threads.get(feed_name) and self.threads[feed_name].is_alive()):
            self.threads[feed_name] = threading.Thread(None, self.feeds[feed_name].run, None)
            self.threads[feed_name].start()

        return True

    def run_all_feeds(self, block=False):
        debug_output("Running all feeds")
        for feed_name in [f for f in self.feeds if self.feeds[f].enabled]:
            debug_output('Starting thread for feed %s...' % feed_name)
            self.run_feed(feed_name)

        if block:
            for t in self.threads:
                if self.threads[t].is_alive():
                    self.threads[t].join()

    def stop_all_feeds(self):
        self.shutdown = True
        for t in self.threads:
            if self.threads[t].is_alive():
                self.threads[t]._Thread__stop()

    def run_scheduled_feeds(self):
        for f in self.feeds:
            if self.feeds[f].next_run < datetime.utcnow() and self.feeds[f].enabled:
                debug_output('Starting thread for feed %s...' % self.feeds[f].name)
                self.run_feed(self.feeds[f].name)

        for t in self.threads:
            if self.threads[t].is_alive():
                self.threads[t].join()

    def run(self):
        self.messenger = FeedsMessenger(self)
        self.shutdown = False
        while not self.shutdown:
            try:
                debug_output("FeedEngine heartbeat")
                if self.scheduler:
                    self.run_scheduled_feeds()
                time.sleep(self.period)  # run a new thread every period seconds
            except KeyboardInterrupt:
                self.shutdown = True

    def load_feeds(self, feed_directories):
        debug_output("Loading feeds in {}...".format(feed_directories))

        for d, subd, files in os.walk(feed_directories):
            if not d.endswith('core'):
                for f in files:
                    if f.endswith(".py") and f != "__init__.py":
                        full_filename = os.path.join(d, f)
                        module = imp.load_source(f.split('.')[0], full_filename)

                        for name, obj in module.__dict__.items():
                            try:
                                if issubclass(obj, Feed) and obj != Feed:
                                    feed = module.__dict__.get(name)()
                                    feed.model = self.model
                                    feed.engine = self
                                    feed.tags = list(set(feed.tags + [d]))
                                    self.feeds[name] = feed
                                    debug_output("Loaded feed {}".format(name))
                                    break
                            except TypeError:
                                pass
                        else:
                            debug_output("Something went wrong parsing {}".format(full_filename), type='error')

        self.load_feed_status()

    def load_feed_status(self):
        feed_status = self.model.get_feed_progress([f for f in self.feeds])
        for status in feed_status:
            name = status['name']
            self.feeds[name].last_run = status['last_run']
            self.feeds[name].next_run = status['last_run'] + self.feeds[name].run_every
Example #8
0
class SnifferEngine(object):
    """docstring for SnifferEngine"""

    def __init__(self, setup):
        super(SnifferEngine, self).__init__()
        self.setup = setup
        sys.stderr.write("[+] Starting sniffer...\n")

        # check if sniffer directory exists
        if not os.path.isdir(self.setup['SNIFFER_DIR']):
            sys.stderr.write("Could not load directory specified in sniffer_dir: {}\n".format(self.setup['SNIFFER_DIR']))
            exit()

        sys.stderr.write("[+] Successfully loaded sniffer directory: {}\n".format(self.setup['SNIFFER_DIR']))

        if setup['TLS_PROXY_PORT'] > 0:
            from Malcom.sniffer.tlsproxy.tlsproxy import MalcomTLSProxy
            sys.stderr.write("[+] Starting TLS proxy on port {}\n".format(setup['TLS_PROXY_PORT']))
            self.tls_proxy = MalcomTLSProxy(setup['TLS_PROXY_PORT'])
            self.tls_proxy.engine = self
            self.tls_proxy.start()
        else:
            self.tls_proxy = None

        self.sessions = {}

        self.model = Model(self.setup)
        self.db_lock = threading.Lock()

        self.messenger = SnifferMessenger()
        self.messenger.snifferengine = self


    def fetch_sniffer_session(self, session_id):
        try:
            debug_output("Fetching session {} from memory".format(session_id))
            session = self.sessions.get(ObjectId(session_id))
        except Exception as e:
            debug_output("An {} error occurred when fetching session '{}': {}".format(type(e).__name__, session_id, e), 'error')
            return

        # if not found, recreate it from the DB
        if not session:
            debug_output("Fetching session {} from DB".format(session_id))
            s = self.model.get_sniffer_session(session_id)
            if not s:
                return None
            # TLS interception only possible if PCAP hasn't been generated yet
            intercept_tls = s['intercept_tls'] and not s['pcap']

            if s:
                session = SnifferSession(s['name'],
                                         None,
                                         None,
                                         self,
                                         id=s['_id'],
                                         filter_restore=s['filter'],
                                         intercept_tls=intercept_tls)
                session.pcap = s['pcap']
                session.public = s['public']
                session.date_created = s['date_created']
                self.sessions[session.id] = session
                session_data = bson_loads(s['session_data'])
                session.nodes = session_data['nodes']
                session.edges = session_data['edges']
                session.packet_count = s['packet_count']
                session.flows = {}
                for flow in session_data['flows']:
                    f = Flow.load_flow(flow)
                    session.flows[f.fid] = f

        return session

    def new_session(self, params):
        session_name = params['session_name']
        remote_addr = params['remote_addr']
        filter = params['filter']
        intercept_tls = params['intercept_tls']

        sniffer_session = SnifferSession(session_name, remote_addr, filter, self, None, intercept_tls)
        sniffer_session.pcap = params['pcap']
        sniffer_session.public = params['public']

        return self.model.save_sniffer_session(sniffer_session)

    def delete_session(self, session_id):
        session = self.fetch_sniffer_session(session_id)

        if not session:
            return 'notfound'

        if session.status():
            return "running"

        else:
            self.model.del_sniffer_session(session, self.setup['SNIFFER_DIR'])
            return "removed"

    def commit_to_db(self, session):
        with self.db_lock:
            session.save_pcap()
            self.model.save_sniffer_session(session)
        debug_output("[+] Sniffing session {} saved".format(session.name))
        return True
Example #9
0
class Analytics:
    def __init__(self):
        self.data = Model()
        self.max_threads = Malcom.config.get('MAX_THREADS', 4)
        self.active = False
        self.status = "Inactive"
        self.websocket = None
        self.thread = None
        self.websocket_lock = threading.Lock()
        self.stack_lock = threading.Lock()
        self.progress = 0
        self.total = 0

        self.max_threads = threading.Semaphore(self.max_threads)
        self.worker_threads = {}

    def add_text(self, text, tags=[]):
        added = []
        for t in text:
            elt = None
            if t.strip() != "":
                if is_url(t):
                    elt = Url(is_url(t), [])
                elif is_hostname(t):
                    elt = Hostname(is_hostname(t), [])
                elif is_ip(t):
                    elt = Ip(is_ip(t), [])
                if elt:
                    added.append(self.save_element(elt, tags))

        if len(added) == 1:
            return added[0]
        else:
            return added

    def save_element(self, element, tags=[], with_status=False):
        element.upgrade_tags(tags)
        return self.data.save(element, with_status=with_status)

    # graph function
    def add_artifacts(self, data, tags=[]):
        artifacts = find_artifacts(data)

        added = []
        for url in artifacts['urls']:
            added.append(self.save_element(url, tags))

        for hostname in artifacts['hostnames']:
            added.append(self.save_element(hostname, tags))

        for ip in artifacts['ips']:
            added.append(self.save_element(ip, tags))

        return added

    # elements analytics

    def bulk_asn(self, items=1000):

        last_analysis = {
            '$or': [
                {
                    'last_analysis': {
                        "$lt":
                        datetime.datetime.utcnow() - datetime.timedelta(days=7)
                    }
                },
                {
                    'last_analysis': None
                },
            ]
        }

        nobgp = {"$or": [{'bgp': None}, last_analysis]}

        total = self.data.elements.find({
            "$and": [{
                'type': 'ip'
            }, nobgp]
        }).count()
        done = 0
        results = [
            r
            for r in self.data.elements.find({"$and": [{
                'type': 'ip'
            }, nobgp]})[:items]
        ]

        while len(results) > 0:

            ips = []
            debug_output("(getting ASNs for %s IPs - %s/%s done)" %
                         (len(results), done, total),
                         type='analytics')

            for r in results:
                ips.append(r)

            as_info = {}

            try:
                as_info = get_net_info_shadowserver(ips)
            except Exception, e:
                debug_output("Could not get AS for IPs: %s" % e)

            if as_info == {}:
                debug_output("as_info empty", 'error')
                return

            for ip in as_info:

                _as = as_info[ip]
                _ip = self.data.find_one({'value': ip})

                if not _ip:
                    return

                del _as['ip']
                for key in _as:
                    if key not in ['type', 'value', 'tags']:
                        _ip[key] = _as[key]
                del _as['bgp']

                _as = As.from_dict(_as)

                # commit any changes to DB
                _as = self.save_element(_as)
                _ip['last_analysis'] = datetime.datetime.now()
                _ip = self.save_element(_ip)

                if _as and _ip:
                    self.data.connect(_ip, _as, 'net_info')
            done += len(results)
            results = [
                r for r in self.data.elements.find(
                    {"$and": [{
                        'type': 'ip'
                    }, nobgp]})[:items]
            ]