Example #1
0
    def get_iplists(self):
        ip_lists = {}
        # Get all IPs except from tor
        q = {
            'query': {
                'bool': {
                    'must_not': [{
                        'match': {
                            'iplist.name': 'tor'
                        }
                    }]
                }
            }
        }
        res = rawSearch(q, index='redelk-iplist-*')

        if not res:
            return (ip_lists)

        for ipdoc in res['hits']['hits']:
            ip = getValue('_source.iplist.ip', ipdoc)
            iplist_name = getValue('_source.iplist.name', ipdoc)
            # Already one IP found in this list, adding it
            if iplist_name in ip_lists:
                ip_lists[iplist_name].append(ip)
            # First IP for this IP list, creating the array
            else:
                ip_lists[iplist_name] = [ip]

        return (ip_lists)
Example #2
0
    def get_alarmed_ips(self):
        query = {
            'sort': [{'@timestamp': {'order': 'desc'}}],
            'query': {
                'bool': {
                    'filter': [
                        {
                            'range':  {
                                '@timestamp': {
                                    'gte': 'now-1y'
                                }
                            }
                        },
                        {'match': {'tags': info['submodule']}}
                    ]
                }
            }
        }
        res = rawSearch(query, index='redirtraffic-*')
        if res is None:
            alarmed = []
        else:
            alarmed = res['hits']['hits']

        # Created a dict grouped by IP address (from source.ip)
        ips = {}
        for al in alarmed:
            ip = getValue('_source.source.ip', al)
            if ip in ips:
                ips[ip].append(al)
            else:
                ips[ip] = [al]

        return(ips)
Example #3
0
    def enrich_beacon_data(self):
        # Get all lines in rtops that have not been enriched yet (for CS)
        query = 'implant.id:* AND c2.program: cobaltstrike AND NOT c2.log.type:implant_newimplant AND NOT tags:%s' % info[
            'submodule']
        notEnriched = getQuery(query, size=10000, index='rtops-*')

        # Created a dict grouped by implant ID
        implantIds = {}
        for ne in notEnriched:
            implantId = getValue('_source.implant.id', ne)
            if implantId in implantIds:
                implantIds[implantId].append(ne)
            else:
                implantIds[implantId] = [ne]

        hits = []
        # For each implant ID, get the initial beacon line
        for iID in implantIds:
            initialBeaconDoc = self.get_initial_beacon_doc(iID)

            # If not initial beacon line found, skip the beacon ID
            if not initialBeaconDoc:
                continue

            for doc in implantIds[iID]:
                # Fields to copy: host.*, implant.*, process.*, user.*
                res = self.copy_data_fields(
                    initialBeaconDoc, doc,
                    ['host', 'implant', 'user', 'process'])
                if res:
                    hits.append(res)

        return (hits)
Example #4
0
    def get_last_sync(self):
        # Get greynoise data from ES if less than 1 day old
        q = {
            "size": 1,
            "sort": [{
                "@timestamp": {
                    "order": "desc"
                }
            }],
            "query": {
                "bool": {
                    "filter": [{
                        "term": {
                            "iplist.name": "tor"
                        }
                    }]
                }
            }
        }

        res = rawSearch(q, index='redelk-*')

        self.logger.debug(res)

        # Return the latest hit or False if not found
        if res and len(res['hits']['hits']) > 0:
            dt_str = getValue('_source.@timestamp', res['hits']['hits'][0])
            dt = datetime.datetime.strptime(dt_str, '%Y-%m-%dT%H:%M:%S.%f')
            return (dt)
        else:
            return (datetime.datetime.fromtimestamp(0))
Example #5
0
    def get_es_tor_exitnodes(self):
        q = {'query': {'bool': {'filter': {'term': {'iplist.name': 'tor'}}}}}
        res = rawSearch(q, index='redelk-*')

        if not res:
            return []

        iplist = []
        for ipdoc in res['hits']['hits']:
            ip = getValue('_source.iplist.ip', ipdoc)
            iplist.append(ip)

        return (iplist)
Example #6
0
    def alarm_check(self, alarmed_ips):
        # This check queries for IP's that aren't listed in any iplist* but do talk to c2* paths on redirectors
        query = {
            'sort': [{'@timestamp': {'order': 'desc'}}],
            'query': {
                'bool': {
                    'filter': [
                        {'match': {'tags': 'enrich_iplists'}}
                    ],
                    'must': {
                        'query_string': {
                            'fields': ['redir.backend.name'],
                            'query': 'c2-*'
                        }
                    },
                    'must_not': [{
                            'query_string': {
                                'fields': ['tags'],
                                'query': 'iplist_*'
                            }
                        },
                        {'match': {'tags': info['submodule']}}
                    ]
                }
            }
        }
        res = rawSearch(query, index='redirtraffic-*')
        if res is None:
            notEnriched = []
        else:
            notEnriched = res['hits']['hits']

        # Created a dict grouped by IP address (from source.ip)
        ips = {}
        for ne in notEnriched:
            ip = getValue('_source.source.ip', ne)
            if ip in ips:
                ips[ip].append(ne)
            else:
                ips[ip] = [ne]

        hits = []

        # Now we check if the IPs have already been alarmed in the past timeframe defined in the config
        for ip in ips:
            # Not alarmed yet, process it
            if ip not in alarmed_ips:
                hits += ips[ip]

        # Return the array of new IP documents to be alarmed
        return(hits)
Example #7
0
    def send_alarm(self, alarm):

        tmsg = pymsteams.connectorcard(
            config.notifications['msteams']['webhook_url'])
        description = alarm['info']['description']
        if len(alarm['groupby']) > 0:
            description += '\n *Please note that the items below have been grouped by: %s*' % pprint(
                alarm['groupby'])
        tmsg.text(description)
        tmsg.color('red')
        try:
            for hit in alarm['hits']['hits']:
                tcs = pymsteams.cardsection()
                tcs.disableMarkdown()
                i = 0
                title = hit['_id']
                while i < len(alarm['groupby']):
                    if i == 0:
                        title = getValue('_source.%s' % alarm['groupby'][i],
                                         hit)
                    else:
                        title = '%s / %s' % (
                            title,
                            getValue('_source.%s' % alarm['groupby'][i], hit))
                    i += 1
                tcs.activityTitle('Alarm on item: %s' % title)
                # tcs.activitySubtitle(alarm['info']['description'])
                for field in alarm['fields']:
                    val = getValue('_source.%s' % field, hit)
                    tcs.addFact(field, pprint(val))
                tmsg.addSection(tcs)
        except Exception as e:
            self.logger.exception(e)
            pass

        tmsg.title('Alarm from %s [%s hits]' %
                   (alarm['info']['name'], alarm['hits']['total']))
        tmsg.send()
Example #8
0
    def enrich_tor(self, iplist):
        # Get all lines in redirtraffic that have not been enriched with 'enrich_iplist' or 'enrich_tor'
        # Filter documents that were before the last run time of enrich_iplist (to avoid race condition)
        iplist_lastrun = getLastRun('enrich_iplists')
        query = {
            'sort': [{
                '@timestamp': {
                    'order': 'desc'
                }
            }],
            'query': {
                'bool': {
                    'filter': [{
                        'range': {
                            '@timestamp': {
                                'lte': iplist_lastrun.isoformat()
                            }
                        }
                    }],
                    'must_not': [{
                        'match': {
                            'tags': info['submodule']
                        }
                    }]
                }
            }
        }
        res = rawSearch(query, index='redirtraffic-*')
        if res is None:
            notEnriched = []
        else:
            notEnriched = res['hits']['hits']

        # For each IP, check if it is in tor exit node data
        hits = []
        for ne in notEnriched:
            ip = getValue('_source.source.ip', ne)
            if ip in iplist:
                hits.append(ne)

        return (hits)
Example #9
0
    def enrich_greynoise(self):
        # Get all lines in redirtraffic that have not been enriched with 'enrich_greynoise'
        # Filter documents that were before the last run time of enrich_iplist (to avoid race condition)
        iplist_lastrun = getLastRun('enrich_iplists')
        query = {
            'sort': [{
                '@timestamp': {
                    'order': 'desc'
                }
            }],
            'query': {
                'bool': {
                    'filter': [{
                        'range': {
                            '@timestamp': {
                                'lte': iplist_lastrun.isoformat()
                            }
                        }
                    }],
                    'must_not': [{
                        'match': {
                            'tags': info['submodule']
                        }
                    }]
                }
            }
        }
        res = rawSearch(query, index='redirtraffic-*')
        if res is None:
            notEnriched = []
        else:
            notEnriched = res['hits']['hits']

        # Created a dict grouped by IP address (from source.ip)
        ips = {}
        for ne in notEnriched:
            ip = getValue('_source.source.ip', ne)
            if ip in ips:
                ips[ip].append(ne)
            else:
                ips[ip] = [ne]

        hits = []
        # For each IP, get the greynoise data
        for ip in ips:
            # Get data from redirtraffic if within interval
            lastESData = self.get_last_es_data(ip)

            if not lastESData:
                greynoiseData = self.get_greynoise_data(ip)
            else:
                greynoiseData = getValue('_source.greynoise', lastESData)

            # If no greynoise data found, skip the IP
            if not greynoiseData:
                continue

            for doc in ips[ip]:
                # Fields to copy: greynoise.*
                res = self.add_greynoise_data(doc, greynoiseData)
                if res:
                    hits.append(res)

        return (hits)
Example #10
0
    def sync_iplist(self, iplist='redteam'):
        # Get data from config file iplist
        cfg_iplist = []
        fname = '/etc/redelk/iplist_%s.conf' % iplist

        # Check first if the local config file exists; if not, skip the sync
        if not os.path.isfile(fname):
            self.logger.warning(
                'File %s doesn\'t exist, skipping IP list sync for this one.' %
                fname)
            return

        with open(fname, 'r') as f:
            content = f.readlines()

        for line in content:
            m = re.match(IP_CIDR_RE, line)
            if m:
                cfg_iplist.append((m.group(1), m.group(len(m.groups()))))
            else:
                m = re.match(IP_RE, line)
                if m:
                    cfg_iplist.append(
                        ('%s/32' % m.group(1), m.group(len(m.groups()))))

        # Get data from ES iplist
        query = 'iplist.name:%s' % iplist
        es_iplist_docs = getQuery(query, size=10000, index='redelk-*')

        # Check if config IP is in ES and source = config_file
        es_iplist = []
        for doc in es_iplist_docs:
            ip = getValue('_source.iplist.ip', doc)
            if ip:
                es_iplist.append((ip, doc))

        for ipc, comment in cfg_iplist:
            found = [item for item in es_iplist if ipc in item]
            if not found:
                self.logger.debug('IP not found in ES: %s' % ipc)
                # if not, add it
                self.add_es_ip(ipc, iplist, comment)

        toadd = []
        for ipe, doc in es_iplist:
            # Check if ES IP is in config file
            found = [item for item in cfg_iplist if ipe in item]
            if not found:
                # if not, check if source = config_file
                if getValue('_source.iplist.source', doc) == 'config_file':
                    # if yes, remove IP from ES
                    self.remove_es_ip(doc, iplist)
                else:
                    # if not, add it
                    comment = getValue('_source.iplist.comment', doc)
                    if comment:
                        ipa = '%s # From ES -- %s' % (ipe, comment)
                    else:
                        ipa = '%s # From ES' % ipe
                    toadd.append(ipa)

        self.add_cfg_ips(toadd, iplist)

        return (toadd)