def process(self, i, router): if i.itype != 'fqdn': return try: r = resolve_ns(i.indicator, t='CNAME') except Timeout: self.logger.info('timeout trying to resolve: {}'.format(i.indicator)) r = [] for rr in r: # http://serverfault.com/questions/44618/is-a-wildcard-cname-dns-record-valid rr = str(rr).rstrip('.').lstrip('*.') if rr in ['', 'localhost']: continue fqdn = Indicator(**i.__dict__()) fqdn.indicator = rr fqdn.lasttime = arrow.utcnow() try: resolve_itype(fqdn.indicator) except InvalidIndicator as e: self.logger.error(fqdn) self.logger.error(e) return fqdn.itype = 'fqdn' fqdn.confidence = (fqdn.confidence - 1) router.indicators_create(fqdn)
def _log_search(self, t, data): if not data.get('indicator'): return if data.get('nolog') in ['1', 'True', 1, True]: return if '*' in data.get('indicator'): return if '%' in data.get('indicator'): return ts = arrow.utcnow().format('YYYY-MM-DDTHH:mm:ss.SSZ') s = Indicator( indicator=data['indicator'], tlp='amber', confidence=10, tags='search', provider=t['username'], firsttime=ts, lasttime=ts, reporttime=ts, group=t['groups'][0], count=1, ) self.store.indicators.upsert(t, [s.__dict__()])
def clean_indicator(self, i, rule): # check for de-fang'd feed if rule.replace: for e in i: if not rule.replace.get(e): continue for k, v in rule.replace[e].items(): i[e] = i[e].replace(k, v) i = normalize_itype(i) if isinstance(i, dict): i = Indicator(**i) if not i.firsttime: i.firsttime = i.lasttime if not i.reporttime: i.reporttime = arrow.utcnow().datetime if not i.group: i.group = 'everyone' return i
def test_indicator_url(): i = Indicator('http://example.org', tags='botnet,malware') assert i.is_private() is False assert i.indicator == 'http://example.org' assert i.itype is not 'fqdn' assert i.itype is 'url' assert 'botnet' in i.tags assert 'malware' in i.tags
def start(self): self._init_plugins() context = zmq.Context() pull_s = context.socket(zmq.PULL) push_s = context.socket(zmq.PUSH) push_s.SNDTIMEO = SNDTIMEO logger.debug('connecting to sockets...') pull_s.connect(self.pull) push_s.connect(self.push) logger.debug('starting Gatherer') poller = zmq.Poller() poller.register(pull_s) while not self.exit.is_set(): try: s = dict(poller.poll(1000)) except Exception as e: self.logger.error(e) break if pull_s in s: id, token, mtype, data = Msg().recv(pull_s) data = json.loads(data) if isinstance(data, dict): data = [data] rv = [] start = time.time() for d in data: i = Indicator(**d) for g in self.gatherers: try: g.process(i) except Exception as e: from pprint import pprint pprint(i) logger.error('gatherer failed: %s' % g) logger.error(e) traceback.print_exc() rv.append(i.__dict__()) data = json.dumps(rv) logger.debug('sending back to router: %f' % (time.time() - start)) Msg(id=id, mtype=mtype, token=token, data=data).send(push_s) logger.info('shutting down gatherer..')
def test_firsttime_only(): l = arrow.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ") i = Indicator('192.168.1.1', firsttime=l) assert i.firsttime == arrow.get(l).datetime s = str(i) i = json.loads(s) assert i.get('lasttime') is None
def process(self, i, router): if i.itype == 'url': u = urlparse(i.indicator) if u.netloc: fqdn = Indicator(**i.__dict__) fqdn.indicator = u.netloc fqdn.itype = 'fqdn' fqdn.confidence = (int(fqdn.confidence) / 2) fqdn.rdata = i.indicator self.logger.debug('sending to router..') x = router.indicators_create(fqdn)
def start(self): self._init_plugins() context = zmq.Context() pull_s = context.socket(zmq.PULL) push_s = context.socket(zmq.PUSH) push_s.SNDTIMEO = SNDTIMEO logger.debug("connecting to sockets...") pull_s.connect(self.pull) push_s.connect(self.push) logger.debug("starting Gatherer") poller = zmq.Poller() poller.register(pull_s) while not self.exit.is_set(): try: s = dict(poller.poll(1000)) except Exception as e: self.logger.error(e) break if pull_s in s: m = pull_s.recv_multipart() logger.debug(m) id, null, mtype, token, data = m data = json.loads(data) if isinstance(data, dict): data = [data] rv = [] for d in data: i = Indicator(**d) for g in self.gatherers: try: g.process(i) except Exception as e: logger.error("gatherer failed: %s" % g) logger.error(e) traceback.print_exc() rv.append(i.__dict__()) data = json.dumps(rv) logger.debug("sending back to router...") push_s.send_multipart([id, null, mtype, token, data.encode("utf-8")]) logger.info("shutting down gatherer..")
def process(self, i, router): if i.itype == 'fqdn' and i.provider != 'spamhaus.org': try: r = self._resolve(i.indicator) try: r = CODES.get(str(r), None) except Exception as e: # https://www.spamhaus.org/faq/section/DNSBL%20Usage self.logger.error(e) self.logger.info('check spamhaus return codes') r = None if r: confidence = CONFIDENCE if ' legit ' in r['description']: confidence = 6 f = Indicator(**i.__dict__()) f.tags = [r['tags']] f.description = r['description'] f.confidence = confidence f.provider = PROVIDER f.reference_tlp = 'white' f.reference = 'http://www.spamhaus.org/query/dbl?domain={}'.format(f.indicator) f.lasttime = arrow.utcnow() x = router.indicators_create(f) self.logger.debug(x) except KeyError as e: self.logger.error(e)
def process(self, i, router): if i.itype != 'ipv4' and i.itype != 'ipv6': return if i.provider == 'spamhaus.org' and not is_ipv4_net(i.indicator): return try: r = self._resolve(i.indicator) try: r = CODES.get(str(r), None) except Exception as e: # https://www.spamhaus.org/faq/section/DNSBL%20Usage self.logger.error(e) self.logger.info('check spamhaus return codes') r = None if r: f = Indicator(**i.__dict__()) f.tags = [r['tags']] f.description = r['description'] f.confidence = CONFIDENCE f.provider = PROVIDER f.reference_tlp = 'white' f.reference = 'http://www.spamhaus.org/query/bl?ip={}'.format(f.indicator) f.lasttime = arrow.utcnow() x = router.indicators_create(f) except Exception as e: self.logger.error(e) import traceback traceback.print_exc()
def text_to_list(text, known_only=True): separator = find_seperator(text) t_tokens = top_tokens(text) top = set() for t in range(0, 9): top.add(t_tokens[t]) if known_only: if separator not in KNOWN_SEPERATORS: pprint(top) raise SystemError('separator not in known list: {}'.format(separator)) ret = [] for l in text.split("\n"): if l == '': continue if l.startswith('#') or l.startswith(';'): continue cols = l.split(separator) cols = [x.strip() for x in cols] indicator = Indicator() for e in cols: if e: try: i = resolve_itype(e) if i: indicator.indicator = e indicator.itype = i except NotImplementedError: pass try: ts = arrow.get(e) if ts: indicator.lasttime = ts.datetime except (arrow.parser.ParserError, UnicodeDecodeError): pass if e in top: indicator.tags = [e] if indicator.itype and indicator.indicator: ret.append(indicator) return ret
def test_get_set(): i = Indicator('localhost.com') try: i.indicator = 'localhost' except InvalidIndicator: pass i.indicator = 'localhost.org' assert i.itype == 'fqdn' i.indicator = 'https://192.168.1.1' assert i.itype == 'url' assert str(i) print(i)
def process(self, i, router): if i.itype == 'fqdn' and i.provider != 'spamhaus.org': try: r = self._resolve(i.indicator) try: r = CODES[r] except Exception as e: # https://www.spamhaus.org/faq/section/DNSBL%20Usage self.logger.error(e) self.logger.info('check spamhaus return codes') r = None if r: f = Indicator(**i.__dict__) f.tags = [r['tags']] f.description = r['description'] f.confidence = CONFIDENCE f.provider = PROVIDER f.reference_tlp = 'white' f.reference = 'http://www.spamhaus.org/query/dbl?domain={}'.format(f.indicator) x = router.indicators_create(f) self.logger.debug(x) except KeyError as e: self.logger.error(e) except dns.resolver.NoAnswer: self.logger.info('no answer...') except dns.resolver.NXDOMAIN: self.logger.info('nxdomain...') except EmptyLabel: self.logger.error('empty label: {}'.format(i.indicator))
def process(self, i, router): if (i.itype == 'ipv4' or i.itype == 'ipv6') and i.provider != 'spamhaus.org': try: r = self._resolve(i.indicator) try: r = CODES.get(str(r), None) except Exception as e: # https://www.spamhaus.org/faq/section/DNSBL%20Usage self.logger.error(e) self.logger.info('check spamhaus return codes') r = None if r: f = Indicator(**i.__dict__) f.tags = [r['tags']] f.description = r['description'] f.confidence = CONFIDENCE f.provider = PROVIDER f.reference_tlp = 'white' f.reference = 'http://www.spamhaus.org/query/bl?ip={}'.format(f.indicator) x = router.indicators_create(f) self.logger.debug(x) except dns.resolver.NoAnswer: self.logger.info('no answer...') except dns.resolver.NXDOMAIN: self.logger.info('nxdomain...') except Exception as e: self.logger.error(e) import traceback traceback.print_exc()
def process(self, i, router): if i.itype == 'url': u = urlparse(i.indicator) if u.hostname: try: resolve_itype(u.hostname) except InvalidIndicator as e: self.logger.error(u.hostname) self.logger.error(e) else: fqdn = Indicator(**i.__dict__()) fqdn.indicator = u.hostname fqdn.itype = 'fqdn' fqdn.confidence = (int(fqdn.confidence) / 2) fqdn.rdata = i.indicator self.logger.debug('sending to router..') router.indicators_create(fqdn)
def process(self, i, router): if i.itype not in ['ipv4', 'ipv6']: return if 'whitelist' not in i.tags: return prefix = i.indicator.split('.') prefix = prefix[:3] prefix.append('0/24') prefix = '.'.join(prefix) ii = Indicator(**i.__dict__()) ii.lasttime = arrow.utcnow() ii.indicator = prefix ii.tags = ['whitelist'] ii.confidence = (ii.confidence - 2) if ii.confidence >= 2 else 0 router.indicators_create(ii)
def process(self, i, router): if i.itype != 'fqdn': return if 'search' in i.tags: return try: r = resolve_ns(i.indicator) except Timeout: self.logger.info('timeout trying to resolve: {}'.format(i.indicator)) return for rr in r: if str(rr).rstrip('.') in ["", 'localhost']: continue ip = Indicator(**i.__dict__()) ip.indicator = str(rr) ip.lasttime = arrow.utcnow() try: resolve_itype(ip.indicator) except InvalidIndicator as e: self.logger.error(ip) self.logger.error(e) else: ip.itype = 'ipv4' ip.rdata = i.indicator ip.confidence = (ip.confidence - 4) if ip.confidence >= 4 else 0 router.indicators_create(ip)
def process(self, i, router): if i.itype != 'fqdn': return if 'search' in i.tags: return if not i.is_subdomain(): return fqdn = Indicator(**i.__dict__()) fqdn.indicator = i.is_subdomain() fqdn.lasttime = arrow.utcnow() try: resolve_itype(fqdn.indicator) except InvalidIndicator as e: self.logger.error(fqdn) self.logger.error(e) else: fqdn.confidence = (fqdn.confidence - 3) if fqdn.confidence >= 3 else 0 router.indicators_create(fqdn)
def process(self, i, router): if i.itype != 'fqdn': return if 'search' in i.tags: return try: r = resolve_ns(i.indicator) except Timeout: self.logger.info('timeout trying to resolve: {}'.format( i.indicator)) return for rr in r: rr = str(rr) if rr in ["", 'localhost']: continue ip = Indicator(**i.__dict__()) ip.indicator = rr try: resolve_itype(ip.indicator) except InvalidIndicator as e: self.logger.error(ip) self.logger.error(e) else: ip.itype = 'ipv4' ip.rdata = i.indicator ip.confidence = (int(ip.confidence) / 4) router.indicators_create(ip) # also create a passive dns tag ip.tags = 'pdns' ip.confidence = 10 router.indicators_create(ip)
def process(i): if i.itype != 'url': return u = urlparse(i.indicator) if not u.hostname: return try: resolve_itype(u.hostname) except TypeError as e: logger.error(u.hostname) logger.error(e) return fqdn = Indicator(**i.__dict__()) fqdn.lasttime = arrow.utcnow() fqdn.indicator = u.hostname fqdn.itype = 'fqdn' fqdn.confidence = 2 fqdn.rdata = i.indicator fqdn.probability = 0 return fqdn
def process(self): defaults = self._defaults() map = self.rule.feeds[self.feed]['map'] values = self.rule.feeds[self.feed]['values'] rv = [] for l in self.fetcher.process(): i = copy.deepcopy(defaults) l = json.loads(l) for e in l: i = {} for x, c in enumerate(map): i[values[x]] = e[c] try: self.logger.debug(i) i = normalize_itype(i) i = Indicator(**i) except NotImplementedError as e: self.logger.error(e) self.logger.info('skipping: {}'.format(i['indicator'])) else: if self.is_archived(i.indicator, i.provider, i.group, i.tags, i.firsttime, i.lasttime): self.logger.info('skipping: {}/{}'.format(i.provider, i.indicator)) else: r = self.client.indicators_create(i) self.archive(i.indicator, i.provider, i.group, i.tags, i.firsttime, i.lasttime) rv.append(r) if self.limit: self.limit -= 1 if self.limit == 0: self.logger.debug('limit reached...') break return rv
def process(self, i, router): if i.itype != 'fqdn': return if 'search' in i.tags: return try: r = resolve_ns(i.indicator, t='MX') except Timeout: self.logger.info('timeout trying to resolve MX for: {}'.format( i.indicator)) return for rr in r: rr = re.sub(r'^\d+ ', '', str(rr)) rr = str(rr).rstrip('.') if rr in ["", 'localhost', '0.0.0.0']: continue fqdn = Indicator(**i.__dict__()) fqdn.indicator = rr.rstrip('.') fqdn.lasttime = arrow.utcnow() # 10 if re.match('^\d+$', rr): return try: resolve_itype(fqdn.indicator) except InvalidIndicator as e: self.logger.info(fqdn) self.logger.info(e) else: fqdn.itype = 'fqdn' fqdn.rdata = i.indicator fqdn.confidence = (fqdn.confidence - 5) if fqdn.confidence >= 5 else 0 router.indicators_create(fqdn)
def handle_indicators_search(self, token, data): # need to send searches through the _submission pipe self.store.send_multipart(['indicators_search', token, data]) x = self.store.recv() data = json.loads(data) xx = json.loads(x) if xx.get('status') == 'success': if data.get('indicator') and data.get('nolog') == 'False': self.logger.debug('creating search') i = Indicator( indicator=data['indicator'], tlp='green', confidence=10, tags=['search'], ) self.logger.debug('creating indicator') r = self.handle_indicators_create(token, str(i)) if r: self.logger.info('search logged') return x
def process(self, i, router): if i.itype == 'url': u = urlparse(i.indicator) if u.netloc: try: resolve_itype(u.netloc) except InvalidIndicator as e: self.logger.error(u.netloc) self.logger.error(e) else: fqdn = Indicator(**i.__dict__()) fqdn.indicator = u.netloc fqdn.itype = 'fqdn' fqdn.confidence = (int(fqdn.confidence) / 2) fqdn.rdata = i.indicator self.logger.debug('sending to router..') router.indicators_create(fqdn)
def process(i): if not ENABLED: return if i.itype != 'fqdn': return try: r = resolve_ns(i.indicator, t='CNAME') if not r: return except Timeout: return rv = [] for rr in r: # http://serverfault.com/questions/44618/is-a-wildcard-cname-dns-record-valid rr = str(rr).rstrip('.').lstrip('*.') if rr in ['', 'localhost']: continue fqdn = Indicator(**i.__dict__()) fqdn.probability = 0 fqdn.indicator = rr fqdn.lasttime = arrow.utcnow() try: resolve_itype(fqdn.indicator) except: return fqdn.itype = 'fqdn' # keep avoid recursive cname lookups fqdn.confidence = int(fqdn.confidence / 2) if fqdn.confidence >= 2 else 0 rv.append(fqdn) return rv
def _parse_taxii_content(self, taxii_content): indicators_to_add = {} for stix_obj in taxii_content: try: stix_parsed = STIXPackage.from_xml( lxml_fromstring(stix_obj.content)) except NotImplementedError as e: if str(e).endswith("AISMarkingStructure'"): import stix.extensions.marking.ais stix_parsed = STIXPackage.from_xml( lxml_fromstring(stix_obj.content)) else: raise except Exception as e: logger.error('Error parsing STIX object: {}'.format(e)) continue try: tmp = _parse_stix_package(stix_parsed) except NotImplementedError as e: if str(e).endswith("AISMarkingStructure'"): import stix.extensions.marking.ais tmp = _parse_stix_package(stix_parsed) else: raise for obs_key, value in tmp.items(): if obs_key in indicators_to_add: indicators_to_add[obs_key].update(value) else: indicators_to_add[obs_key] = value for i_dict in indicators_to_add.values(): if i_dict.get('indicator'): logger.debug('adding indicator {}'.format(i_dict['indicator'])) yield Indicator(**i_dict)
def handle_message_gatherer(self, s, e): self.logger.debug('received message from gatherer') m = s.recv_multipart() self.logger.debug(m) id, null, mtype, token, data = m data = json.loads(data) i = Indicator(**data) data = json.dumps(data) if i.confidence >= HUNTER_MIN_CONFIDENCE: if self.p2p: self.logger.info('sending to peers...') self.p2p.send(data.encode('utf-8')) self.logger.debug('sending to hunters...') self.hunters_s.send(data) self.logger.debug('sending to store') self.store_s.send_multipart([id, '', 'indicators_create', token, data]) self.logger.debug('done')
def start(self): # TODO - convert this to an async socket router = Client(remote=self.router, token=self.token, nowait=True) plugins = self._load_plugins() socket = zmq.Context().socket(zmq.PULL) socket.SNDTIMEO = SNDTIMEO socket.set_hwm(ZMQ_HWM) logger.debug('connecting to {}'.format(self.hunters)) socket.connect(self.hunters) logger.debug('starting hunter') try: while True: logger.debug('waiting...') data = socket.recv() logger.debug(data) data = json.loads(data) if isinstance(data, dict): data = [data] for d in data: d = Indicator(**d) for p in plugins: try: p.process(d, router) except Exception as e: logger.error(e) traceback.print_exc() logger.error('giving up on: {}'.format(d)) except KeyboardInterrupt: logger.info('shutting down hunter...') return
def process(i): if i.itype != 'fqdn': return if not i.is_subdomain(): return fqdn = Indicator(**i.__dict__()) fqdn.probability = 0 fqdn.indicator = i.is_subdomain() fqdn.lasttime = arrow.utcnow() try: resolve_itype(fqdn.indicator) except: return fqdn.confidence = 1 return fqdn
def process(self, i, router): if i.itype != 'url': return u = urlparse(i.indicator) if not u.hostname: return try: resolve_itype(u.hostname) except InvalidIndicator as e: self.logger.error(u.hostname) self.logger.error(e) else: fqdn = Indicator(**i.__dict__()) fqdn.lasttime = arrow.utcnow() fqdn.indicator = u.hostname fqdn.itype = 'fqdn' fqdn.confidence = (int(fqdn.confidence) / 2) fqdn.rdata = i.indicator self.logger.debug('sending to router: {}'.format(fqdn)) router.indicators_create(fqdn)
def process(self, i, router, **kwargs): if i.itype != 'fqdn': return if 'search' in i.tags: return try: r = resolve_ns(i.indicator, t='CNAME') except Timeout: self.logger.info('timeout trying to resolve: {}'.format( i.indicator)) r = [] for rr in r: # http://serverfault.com/questions/44618/is-a-wildcard-cname-dns-record-valid rr = str(rr).rstrip('.').lstrip('*.') if rr in ['', 'localhost', '0.0.0.0']: continue fqdn = Indicator(**i.__dict__()) fqdn.indicator = rr fqdn.lasttime = fqdn.reporttime = arrow.utcnow() try: resolve_itype(fqdn.indicator) except InvalidIndicator as e: self.logger.error(fqdn) self.logger.error(e) return fqdn.itype = 'fqdn' fqdn.rdata = '{} cname'.format(i.indicator) if 'hunter' not in fqdn.tags: fqdn.tags.append('hunter') if fqdn.confidence < 8: fqdn.confidence -= 1 else: fqdn.confidence = 7 router.indicators_create(fqdn) self.logger.debug("FQDN CNAME Hunter: {}".format(fqdn))
def test_hunter_plugins(): plugins = load_plugins(cif.hunter.__path__) count = 0 indicators = [] for d in range(0, 1): i = Indicator(indicator=fake.domain_name(), tags=['malware']) indicators.append(i) indicators.append(Indicator('csirtgadgets.com', tags=['botnet'])) indicators.append(Indicator('gfycat.com', tags=['exploit'])) indicators.append(Indicator('http://csirtgadgets.com', tags=['botnet'])) for p in plugins: rv = p.process(next(i for i in indicators)) rv = list(r for r in rv) if not rv or len(rv) == 0: continue rv = [i.__dict__() for i in rv] count += len(rv)
def process(self, i, router): if i.itype != 'fqdn': return if 'search' in i.tags: return try: r = resolve_ns(i.indicator, t='MX') except Timeout: self.logger.info('timeout trying to resolve MX for: {}'.format(i.indicator)) return for rr in r: rr = re.sub(r'^\d+ ', '', str(rr)) rr = str(rr).rstrip('.') if rr in ["", 'localhost']: continue fqdn = Indicator(**i.__dict__()) fqdn.indicator = rr.rstrip('.') fqdn.lasttime = arrow.utcnow() # 10 if re.match('^\d+$', rr): return try: resolve_itype(fqdn.indicator) except InvalidIndicator as e: self.logger.info(fqdn) self.logger.info(e) else: fqdn.itype = 'fqdn' fqdn.rdata = i.indicator fqdn.confidence = (fqdn.confidence - 5) if fqdn.confidence >= 5 else 0 router.indicators_create(fqdn)
def test_indicator_fqdn(): i = Indicator('example.org') assert i.is_private() is False assert i.indicator == 'example.org' assert i.itype == 'fqdn'
def _kv_to_indicator(self, kv): return Indicator(**kv)
def main(): p = get_argument_parser() p = ArgumentParser( description=textwrap.dedent('''\ Env Variables: CSIRTG_RUNTIME_PATH example usage: $ csirtg-cef -f /var/log/foo.log $ ZYRE_GROUP=honeynet csirtg-cef -d -f /var/log/foo.log --client zyre $ csirtg-cef -f /var/log/foo.log --client csirtg --user wes --feed scanners -d '''), formatter_class=RawDescriptionHelpFormatter, prog='csirtg-cef', parents=[p], ) p.add_argument('--no-verify-ssl', help='turn TLS/SSL verification OFF', action='store_true') p.add_argument('-f', '--file') p.add_argument('--client', default='stdout') p.add_argument('--user') p.add_argument('--feed') p.add_argument('--format', default='csv') p.add_argument('--tags', help='specify indicator tags [default %(default)s', default='scanner') p.add_argument('--provider', help='specify provider [default %(default)s]', default=PROVIDER) p.add_argument('--aggregate', help='specify how many seconds to aggregate batches before sending to client ' '[default %(default)s]', default=60) p.add_argument('--tail-docker') args = p.parse_args() # setup logging setup_logging(args) verify_ssl = True if args.no_verify_ssl: verify_ssl = False if args.file: logger.debug('starting on: {}'.format(args.file)) data_source = tail(args.file) elif args.tail_docker: logger.debug('starting on container: {}'.format(args.tail_docker)) #data_source = subprocess.Popen(["docker", "logs", "-f", "--tail", "0", args.tail_docker], bufsize=1, stdout=subprocess.PIPE).stdout client = docker.from_env(version='auto') container = client.containers.get(args.tail_docker) data_source = container.logs(stream=True, follow=True, tail=0) else: logger.error('Missing --file or --tail-docker flag') raise SystemExit logger.info('sending data as: %s' % args.provider) s = Smrt(client=args.client, username=args.user, feed=args.feed, verify_ssl=verify_ssl) bucket = set() last_t = round_time(round=int(args.aggregate)) try: for line in data_source: i = parse_line(line) if not i: logger.debug('skipping line') continue i = Indicator(**i) logger.debug(i) i.provider = args.provider i.tags = args.tags if args.aggregate: t = round_time(dt=datetime.now(), round=int(args.aggregate)) if t != last_t: bucket = set() last_t = t if i.indicator in bucket: logger.info('skipping send {}'.format(i.indicator)) continue bucket.add(i.indicator) if args.client == 'stdout': print(FORMATS[args.format](data=[i])) else: try: s.client.indicators_create(i) logger.info('indicator created: {}'.format(i.indicator)) except Exception as e: logger.error(e) except Exception as e: logger.error(e) except KeyboardInterrupt: logger.info('SIGINT caught... stopping') if args.client != 'stdout': s.client.stop() logger.info('exiting...')
def process(self, i, router): if i.itype == 'fqdn': try: r = resolve_ns(i.indicator, t='CNAME') except Timeout: self.logger.info('timeout trying to resolve: {}'.format(i.indicator)) r = [] for rr in r: fqdn = Indicator(**i.__dict__()) fqdn.indicator = str(rr).rstrip('.') try: resolve_itype(fqdn.indicator) except InvalidIndicator as e: self.logger.error(fqdn) self.logger.error(e) else: fqdn.itype = 'fqdn' fqdn.confidence = (int(fqdn.confidence) / 2) router.indicators_create(fqdn) if i.is_subdomain(): fqdn = Indicator(**i.__dict__()) fqdn.indicator = i.is_subdomain() try: resolve_itype(fqdn.indicator) except InvalidIndicator as e: self.logger.error(fqdn) self.logger.error(e) else: fqdn.confidence = (int(fqdn.confidence) / 3) router.indicators_create(fqdn) try: r = resolve_ns(i.indicator) except Timeout: self.logger.info('timeout trying to resolve: {}'.format(i.indicator)) r = [] for rr in r: ip = Indicator(**i.__dict__()) ip.indicator = str(rr) try: resolve_itype(ip.indicator) except InvalidIndicator as e: self.logger.error(ip) self.logger.error(e) else: ip.itype = 'ipv4' ip.rdata = i.indicator ip.confidence = (int(ip.confidence) / 4) router.indicators_create(ip) try: r = resolve_ns(i.indicator, t='NS') except Timeout: self.logger.info('timeout trying to resolve NS for: {}'.format(i.indicator)) r = [] for rr in r: fqdn = Indicator(**i.__dict__()) fqdn.indicator = str(rr).rstrip('.') try: resolve_itype(fqdn.indicator) except InvalidIndicator as e: self.logger.error(fqdn) self.logger.error(e) else: fqdn.itype = 'fqdn' fqdn.rdata = i.indicator fqdn.confidence = (int(fqdn.confidence) / 5) router.indicators_create(fqdn) try: r = resolve_ns(i.indicator, t='MX') except Timeout: self.logger.info('timeout trying to resolve MX for: {}'.format(i.indicator)) r = [] for rr in r: rr = re.sub(r'^\d+ ', '', str(rr)) fqdn = Indicator(**i.__dict__()) fqdn.indicator = rr.rstrip('.') try: resolve_itype(fqdn.indicator) except InvalidIndicator as e: if not str(e).startswith('unknown itype for "localhost"'): self.logger.error(fqdn) self.logger.error(e) else: fqdn.itype = 'fqdn' fqdn.rdata = i.indicator fqdn.confidence = (int(fqdn.confidence) / 6) router.indicators_create(fqdn)
def main(): p = get_argument_parser() p = ArgumentParser( description=textwrap.dedent('''\ Env Variables: CSIRTG_RUNTIME_PATH example usage: $ csirtg-cef -f /var/log/foo.log $ ZYRE_GROUP=honeynet csirtg-cef -d -f /var/log/foo.log --client zyre $ csirtg-cef -f /var/log/foo.log --client csirtg --user wes --feed scanners -d '''), formatter_class=RawDescriptionHelpFormatter, prog='csirtg-cef', parents=[p], ) p.add_argument('--no-verify-ssl', help='turn TLS/SSL verification OFF', action='store_true') p.add_argument('-f', '--file') p.add_argument('--client', default='stdout') p.add_argument('--user') p.add_argument('--feed') p.add_argument('--format', default='csv') p.add_argument('--tags', help='specify indicator tags [default %(default)s', default='scanner') p.add_argument('--provider', help='specify provider [default %(default)s]', default=PROVIDER) p.add_argument('--tail-docker') args = p.parse_args() if not args.provider: raise RuntimeError('Missing --provider flag') if not args.file: raise RuntimeError('Missing --file flag') # setup logging setup_logging(args) logger.debug('starting on: {}'.format(args.file)) verify_ssl = True if args.no_verify_ssl: verify_ssl = False f = open(args.file) from csirtg_smrt import Smrt s = Smrt(client=args.client, username=args.user, feed=args.feed, verify_ssl=verify_ssl) try: for line in tailer.follow(f): i = parse_line(line) if not i: logger.debug('skipping line') continue i = Indicator(**i) logger.debug(i) i.provider = args.provider i.tags = args.tags if args.client == 'stdout': print(FORMATS[args.format](data=[i])) else: s.client.indicators_create(i) logger.info('indicator created: {}'.format(i.indicator)) except KeyboardInterrupt: logger.info('SIGINT caught... stopping') if args.client != 'stdout': s.client.stop() logger.info('exiting...')
def test_eq(): u1 = Indicator(indicator='192.168.1.1') u2 = Indicator(indicator='192.168.1.1') u2.uuid = u1.uuid assert u1 == u2
def process(self, i, router): if i.itype == 'fqdn': r = resolve_ns(i.indicator, t='CNAME') self.logger.debug('CNAME: {}'.format(r)) for rr in r: fqdn = Indicator(**i.__dict__) fqdn.indicator = str(rr).rstrip('.') fqdn.itype = 'fqdn' fqdn.confidence = (int(fqdn.confidence) / 2) x = router.indicators_create(fqdn) self.logger.debug(x) if i.is_subdomain(): fqdn = Indicator(**i.__dict__) fqdn.indicator = i.is_subdomain() fqdn.confidence = (int(fqdn.confidence) / 3) x = router.indicators_create(fqdn) self.logger.debug(x) r = resolve_ns(i.indicator) self.logger.debug(r) for rr in r: ip = Indicator(**i.__dict__) ip.indicator = str(rr) ip.itype = 'ipv4' ip.confidence = (int(ip.confidence) / 4) x = router.indicators_create(ip) self.logger.debug(x) r = resolve_ns(i.indicator, t='NS') self.logger.debug('NS: {}'.format(r)) for rr in r: ip = Indicator(**i.__dict__) ip.indicator = str(rr).rstrip('.') ip.itype = 'fqdn' ip.confidence = (int(ip.confidence) / 5) x = router.indicators_create(ip) self.logger.debug(x) r = resolve_ns(i.indicator, t='MX') self.logger.debug('MX: {}'.format(r)) for rr in r: ip = Indicator(**i.__dict__) ip.indicator = str(rr).rstrip('.') ip.itype = 'fqdn' ip.confidence = (int(ip.confidence) / 6) x = router.indicators_create(ip) self.logger.debug(x)
def test_fqdn_random(): for d in range(0, 100): assert Indicator(indicator=fake.domain_name()).itype == 'fqdn'
def indicator(): return Indicator(indicator='example.com', tags='botnet', provider='csirtg.io', group='everyone')
def start(self): router = Client(remote=self.router, token=self.token, nowait=True) plugins = self._load_plugins() socket = zmq.Context().socket(zmq.PULL) socket.SNDTIMEO = SNDTIMEO socket.set_hwm(ZMQ_HWM) logger.debug('connecting to {}'.format(self.hunters)) socket.connect(self.hunters) logger.debug('starting hunter') poller = zmq.Poller() poller.register(socket, zmq.POLLIN) while not self.exit.is_set(): try: s = dict(poller.poll(1000)) except SystemExit or KeyboardInterrupt: break if socket not in s: continue data = socket.recv_multipart() logger.debug(data) data = json.loads(data[0]) if isinstance(data, dict): if not data.get('indicator'): continue if not data.get('itype'): try: data = Indicator( indicator=data['indicator'], tags='search', confidence=10, group='everyone', tlp='amber', ).__dict__() except InvalidIndicator: logger.debug('skipping invalid indicator: {}'.format(data['indicator'])) continue if not data.get('tags'): data['tags'] = [] data = [data] for d in data: d = Indicator(**d) if d.indicator in ["", 'localhost', 'example.com']: continue if self.exclude.get(d.provider): for t in d.tags: if t in self.exclude[d.provider]: logger.debug('skipping: {}'.format(d.indicator)) continue for p in plugins: if p.is_advanced: if not HUNTER_ADVANCED: continue try: p.process(d, router) except Exception as e: logger.error(e) logger.error('[{}] giving up on: {}'.format(p, d))
def get_indicator(l, hints=None): i = OrderedDict() if not isinstance(l, list): l = [l] # step 1, detect datatypes for e in l: if not isinstance(e, (str, bytes)): continue e = e.rstrip() e = e.lstrip() if re.match('^[a-zA-Z]{2}$', e): i[e] = 'CC' continue t = None try: t = resolve_itype(e.rstrip('/')) # 25553.0 ASN formats trip up FQDN resolve itype if t and not (t == 'fqdn' and re.match('^\d+\.[0-9]$', e)): i[e] = 'indicator' continue except Exception: pass if isinstance(e, int): i[e] = 'int' continue if isinstance(e, float) or re.match('^\d+\.[0-9]$', e): i[e] = 'float' continue if is_timestamp(e): i[e] = 'timestamp' continue if isinstance(e, (str, bytes)): if hints: for ii in range(0, 25): if len(hints) == ii: break if e.lower() == hints[ii].lower(): i[e] = 'description' break if not i.get(e): i[e] = 'string' i2 = Indicator() timestamps = [] ports = [] for e in i: if i[e] == 'CC': i2.cc = e continue if i[e] == 'indicator': if i2.indicator: i2.reference = e else: i2.indicator = e continue if i[e] == 'timestamp': timestamps.append(parse_timestamp(e)) continue if i[e] == 'float': i2.asn = e continue if i[e] == 'int': ports.append(e) continue if i[e] == 'description': i2.description = e continue if i[e] == 'string': if re.match(r'[0-9A-Za-z\.\s\/]+', e) and i2.asn: i2.asn_desc = e continue if 4 <= len(e) <= 10 and re.match('[a-z-A-Z]+,?', e) and e not in [ 'ipv4', 'fqdn', 'url', 'ipv6' ]: i2.tags = [e] continue if ' ' in e and 5 <= len(e) and not i2.asn_desc: i2.description = e continue timestamps = sorted(timestamps, reverse=True) if len(timestamps) > 0: i2.last_at = timestamps[0] if len(timestamps) > 1: i2.first_at = timestamps[1] if len(ports) > 0: if len(ports) == 1: i2.portlist = ports[0] else: if ports[0] > ports[1]: i2.portlist = ports[0] i2.dest_portlist = ports[1] else: i2.portlist = ports[1] i2.dest_portlist = ports[0] return i2
def test_urls_ok(): for d in GOOD: d = Indicator(d) assert d.itype is 'url'
def start(self): router = Client(remote=self.router, token=self.token, nowait=True) plugins = self._load_plugins() socket = zmq.Context().socket(zmq.PULL) socket.SNDTIMEO = SNDTIMEO socket.set_hwm(ZMQ_HWM) logger.debug('connecting to {}'.format(self.hunters)) socket.connect(self.hunters) logger.debug('starting hunter') poller = zmq.Poller() poller.register(socket, zmq.POLLIN) while not self.exit.is_set(): try: s = dict(poller.poll(1000)) except SystemExit or KeyboardInterrupt: break if socket not in s: continue data = socket.recv_multipart() logger.debug(data) data = json.loads(data[0]) if isinstance(data, dict): if not data.get('indicator'): continue if not data.get('itype'): data = Indicator( indicator=data['indicator'], tags='search', confidence=10, group='everyone', tlp='amber', ).__dict__() if not data.get('tags'): data['tags'] = [] data = [data] for d in data: d = Indicator(**d) if d.indicator in ["", 'localhost', 'example.com']: continue if self.exclude.get(d.provider): for t in d.tags: if t in self.exclude[d.provider]: logger.debug('skipping: {}'.format(d.indicator)) for p in plugins: if p.is_advanced: if not HUNTER_ADVANCED: continue try: p.process(d, router) except Exception as e: logger.error(e) logger.error('[{}] giving up on: {}'.format(p, d))
def test_fqdn_ok(): for d in GOOD: d = Indicator(d) assert d.itype is 'fqdn'
def start(self): router = Client(remote=self.router, token=self.token, nowait=True, autoclose=False) plugins = self._load_plugins() socket = zmq.Context().socket(zmq.PULL) socket.SNDTIMEO = SNDTIMEO socket.set_hwm(ZMQ_HWM) logger.debug('connecting to {}'.format(self.hunters)) socket.connect(self.hunters) logger.debug('starting hunter') poller = zmq.Poller() poller.register(socket, zmq.POLLIN) while not self.exit.is_set(): try: s = dict(poller.poll(1000)) except SystemExit or KeyboardInterrupt: break if socket not in s: continue id, token, mtype, data = Msg().recv(socket) data = json.loads(data) if isinstance(data, dict): if not data.get('indicator'): continue if not data.get('itype'): try: data = Indicator( indicator=data['indicator'], tags='search', confidence=10, group='everyone', tlp='amber', ).__dict__() except InvalidIndicator: logger.debug('skipping invalid indicator: {}'.format( data['indicator'])) continue if not data.get('tags'): data['tags'] = [] data = [data] token = json.loads(token) for d in data: d = Indicator(**d) if d.confidence < HUNTER_MIN_CONFIDENCE: continue # prevent hunter recursion if disabled if not HUNTER_RECURSION and d.tags and 'hunter' in d.tags: continue if d.indicator in ["", 'localhost', 'example.com']: continue if self.exclude.get(d.provider): for t in d.tags: if t in self.exclude[d.provider]: logger.debug('skipping: {}'.format(d.indicator)) continue for p in plugins: if p.is_advanced: if not HUNTER_ADVANCED: continue try: p.process(i=d, router=router, user_token=token) except Exception as e: logger.error(e) logger.error('[{}] giving up on: {}'.format(p, d))
def indicator_ipv6(): return Indicator(indicator='2001:4860:4860::8888', tags='botnet', provider='csirtg.io', group='everyone')
def _kv_to_indicator(kv): return Indicator(**kv)
def test_format_indicator(): i = Indicator('example.com') i.altid = 'https://csirtg.io/search?q={indicator}' i = i.format_keys() assert i.altid == 'https://csirtg.io/search?q=example.com'
def get_indicator(l): i = {} # step 1, detect datatypes for e in l: if isinstance(e, int): i[e] = 'int' continue t = None try: t = resolve_itype(e) if t: i[e] = 'indicator' continue except Exception: pass if is_timestamp(e): i[e] = 'timestamp' continue if isinstance(e, basestring): i[e] = 'string' i2 = Indicator() timestamps = [] ports = [] for e in i: if i[e] == 'indicator': i2.indicator = e continue if i[e] == 'timestamp': timestamps.append(e) continue if i[e] == 'int': ports.append(e) continue if i[e] == 'string': if ' ' in e: i2.description = e continue if len(e) < 10: i2.tags = [e] continue timestamps = sorted(timestamps, reverse=True) if len(timestamps) > 0: i2.lasttime = timestamps[0] if len(timestamps) > 1: i2.firsttime = timestamps[1] if len(ports) > 0: if len(ports) == 1: i2.portlist = ports[0] else: if ports[0] > ports[1]: i2.portlist = ports[0] i2.dest_portlist = ports[1] else: i2.portlist = ports[1] i2.dest_portlist = ports[0] return i2
def process(self, i, router): if i.itype == 'fqdn': try: r = resolve_ns(i.indicator, t='CNAME') except Timeout: self.logger.info('timeout trying to resolve: {}'.format(i.indicator)) r = [] for rr in r: fqdn = Indicator(**i.__dict__) fqdn.indicator = str(rr).rstrip('.') fqdn.itype = 'fqdn' fqdn.confidence = (int(fqdn.confidence) / 2) x = router.indicators_create(fqdn) if i.is_subdomain(): fqdn = Indicator(**i.__dict__) fqdn.indicator = i.is_subdomain() fqdn.confidence = (int(fqdn.confidence) / 3) x = router.indicators_create(fqdn) try: r = resolve_ns(i.indicator) except Timeout: self.logger.info('timeout trying to resolve: {}'.format(i.indicator)) r = [] for rr in r: ip = Indicator(**i.__dict__) ip.indicator = str(rr) ip.itype = 'ipv4' ip.rdata = i.indicator ip.confidence = (int(ip.confidence) / 4) x = router.indicators_create(ip) self.logger.debug(x) try: r = resolve_ns(i.indicator, t='NS') except Timeout: self.logger.info('timeout trying to resolve NS for: {}'.format(i.indicator)) r = [] for rr in r: ip = Indicator(**i.__dict__) ip.indicator = str(rr).rstrip('.') ip.itype = 'fqdn' ip.rdata = i.indicator ip.confidence = (int(ip.confidence) / 5) x = router.indicators_create(ip) self.logger.debug(x) try: r = resolve_ns(i.indicator, t='MX') except Timeout: self.logger.info('timeout trying to resolve MX for: {}'.format(i.indicator)) r = [] for rr in r: ip = Indicator(**i.__dict__) ip.indicator = str(rr).rstrip('.') ip.itype = 'fqdn' ip.rdata = i.indicator ip.confidence = (int(ip.confidence) / 6) x = router.indicators_create(ip) self.logger.debug(x)
def main(): p = get_argument_parser() p = ArgumentParser( description=textwrap.dedent('''\ Env Variables: CSIRTG_RUNTIME_PATH example usage: $ csirtg-ufw -f /var/log/ufw.log $ ZYRE_GROUP=honeynet csirtg-ufw -d -f /var/log/ufw.log --client zyre $ csirtg-ufw -f /var/log/ufw.log --client csirtg --user wes --feed scanners -d '''), formatter_class=RawDescriptionHelpFormatter, prog='csirtg-ufw', parents=[p], ) p.add_argument('--no-verify-ssl', help='turn TLS/SSL verification OFF', action='store_true') p.add_argument('-f', '--file', default=FILENAME) p.add_argument('--client', default='stdout') p.add_argument('--user') p.add_argument('--feed') p.add_argument('--format', default='csv') p.add_argument('--provider', help='specify provider [default %(default)s]', default=PROVIDER) p.add_argument('--ignore-client-errors', help='skip when client errors out (eg: HTTP 5XX, etc)', action='store_true') p.add_argument('--aggregate', help='specify how many seconds to aggregate batches before sending to client ' '[default %(default)s]', default=60) args = p.parse_args() if not args.provider: raise RuntimeError('Missing --provider flag') # setup logging setup_logging(args) logger.debug('starting on: {}'.format(args.file)) verify_ssl = True if args.no_verify_ssl: verify_ssl = False from csirtg_smrt import Smrt s = Smrt(client=args.client, username=args.user, feed=args.feed, verify_ssl=verify_ssl) bucket = set() last_t = round_time(round=int(args.aggregate)) try: for line in tail(args.file): if 'csirtg-ufw' in line: continue if '[UFW BLOCK]' not in line: continue if ' SYN ' not in line: continue logger.debug(line) try: i = parse_line(line) except AttributeError: logger.debug("line not matched: \n{}".format(line)) continue i = Indicator(**i) i.provider = args.provider u_indicator = ':'.join([i.indicator,'/'.join([i.portlist,i.protocol])]) if args.aggregate: t = round_time(dt=datetime.now(), round=int(args.aggregate)) if t != last_t: bucket = set() last_t = t if u_indicator in bucket: logger.info('skipping send {}'.format(u_indicator)) continue bucket.add(u_indicator) if args.client == 'stdout': print(FORMATS[args.format](data=[i])) continue try: s.client.indicators_create(i) logger.info('indicator created: {}'.format(u_indicator)) except Exception as e: logger.error(e) if args.ignore_client_errors: pass except KeyboardInterrupt: logger.info('SIGINT caught... stopping') if args.client != 'stdout': try: s.client.stop() except AttributeError: pass logger.info('exiting...')
def test_geo_ok(): for i in OK: assert Indicator(i, resolve_geo=True, resolve_fqdn=True).asn is not None
def _not(data): for d in data: d = Indicator(d) assert d.itype != 'url'
def test_indicator_ipv4(): i = Indicator('192.168.1.1') assert i.is_private() assert i.indicator == '192.168.1.1' assert i.itype == 'ipv4'