def process(i): if not ENABLED: return if i.itype != 'url': return if i.probability: return for t in i.tags: if t == 'predicted': return if not predict(i.indicator): return i = Indicator(**i.__dict__()) i.lasttime = arrow.utcnow() i.confidence = 4 i.probability = 84 i.provider = 'csirtgadgets.com' i.reference = 'https://github.com/csirtgadgets/csirtg-urlsml-py' + '#' + VERSION tags = set(i.tags) tags.add('predicted') i.tags = list(tags) return i
def _log_search(self, t, data): if not data.get('indicator'): return if data.get('nolog') in ['1', 'True', 1, True]: return if '*' in data.get('indicator'): return if '%' in data.get('indicator'): return ts = arrow.utcnow().format('YYYY-MM-DDTHH:mm:ss.SSZ') s = Indicator( indicator=data['indicator'], tlp='amber', confidence=10, tags='search', provider=t['username'], firsttime=ts, lasttime=ts, reporttime=ts, group=t['groups'][0], count=1, ) self.store.indicators.upsert(t, [s.__dict__()])
def resolve(i): plugins = load_plugins(hunters.__path__) try: i = Indicator(i, resolve_geo=True, resolve_fqdn=True) except dns.resolver.NoNameservers as e: logger.error(e) i = Indicator(i) data = [i.__dict__()] for p in plugins: try: indicators = p.process(i) indicators = [i2.__dict__() for i2 in indicators] data += indicators except (KeyboardInterrupt, SystemExit): break except Exception as e: if 'SERVFAIL' in str(e): continue logger.error(e, exc_info=True) continue return data
def start(self): self._init_plugins() context = zmq.Context() pull_s = context.socket(zmq.PULL) push_s = context.socket(zmq.PUSH) push_s.SNDTIMEO = SNDTIMEO logger.debug('connecting to sockets...') pull_s.connect(self.pull) push_s.connect(self.push) logger.debug('starting Gatherer') poller = zmq.Poller() poller.register(pull_s) while not self.exit.is_set(): try: s = dict(poller.poll(1000)) except Exception as e: self.logger.error(e) break if pull_s in s: id, token, mtype, data = Msg().recv(pull_s) data = json.loads(data) if isinstance(data, dict): data = [data] rv = [] start = time.time() for d in data: i = Indicator(**d) for g in self.gatherers: try: g.process(i) except Exception as e: from pprint import pprint pprint(i) logger.error('gatherer failed: %s' % g) logger.error(e) traceback.print_exc() rv.append(i.__dict__()) data = json.dumps(rv) logger.debug('sending back to router: %f' % (time.time() - start)) Msg(id=id, mtype=mtype, token=token, data=data).send(push_s) logger.info('shutting down gatherer..')
def start(self): self._init_plugins() context = zmq.Context() pull_s = context.socket(zmq.PULL) push_s = context.socket(zmq.PUSH) push_s.SNDTIMEO = SNDTIMEO logger.debug("connecting to sockets...") pull_s.connect(self.pull) push_s.connect(self.push) logger.debug("starting Gatherer") poller = zmq.Poller() poller.register(pull_s) while not self.exit.is_set(): try: s = dict(poller.poll(1000)) except Exception as e: self.logger.error(e) break if pull_s in s: m = pull_s.recv_multipart() logger.debug(m) id, null, mtype, token, data = m data = json.loads(data) if isinstance(data, dict): data = [data] rv = [] for d in data: i = Indicator(**d) for g in self.gatherers: try: g.process(i) except Exception as e: logger.error("gatherer failed: %s" % g) logger.error(e) traceback.print_exc() rv.append(i.__dict__()) data = json.dumps(rv) logger.debug("sending back to router...") push_s.send_multipart([id, null, mtype, token, data.encode("utf-8")]) logger.info("shutting down gatherer..")
def process(self, **kwargs): map = copy.deepcopy(self.rule.feeds[self.feed]['map']) for p in map: map[p]['pattern'] = re.compile(map[p]['pattern']) itype = None if self.rule.feeds[self.feed].get('itype'): itype = self.rule.feeds[self.feed].get('itype') feed = [] count = 0 with open(self.cache, 'rb') as cache: for l in cache.readlines(): l = l.decode('utf-8') feed.append(l) feed = "\n".join(feed) try: feed = feedparser.parse(feed) except Exception as e: self.logger.error('Error parsing feed: {}'.format(e)) raise e for e in feed.entries: i = Indicator() self.set_defaults(i) for k in e: if not map.get(k): continue try: m = map[k]['pattern'].search(e[k]).groups() except AttributeError: continue for idx, c in enumerate(map[k]['values']): s = m[idx] if c == 'indicator' and itype == 'url' and not m[idx].startswith('http'): s = 'http://%s' % s setattr(i, c, s) logger.debug(i) yield i.__dict__() count += 1 if self.limit and int(self.limit) == count: return
def start(self): self._init_plugins() context = zmq.Context() pull_s = context.socket(zmq.PULL) push_s = context.socket(zmq.PUSH) push_s.SNDTIMEO = SNDTIMEO logger.debug('connecting to sockets...') pull_s.connect(self.pull) push_s.connect(self.push) logger.debug('starting Gatherer') try: while True: m = pull_s.recv_multipart() logger.debug(m) id, null, mtype, token, data = m data = json.loads(data) if isinstance(data, dict): data = [data] rv = [] for d in data: i = Indicator(**d) for g in self.gatherers: try: g.process(i) except Exception as e: logger.error('gatherer failed: %s' % g) logger.error(e) traceback.print_exc() rv.append(i.__dict__()) data = json.dumps(rv) logger.debug('sending back to router...') push_s.send_multipart([id, null, mtype, token, data.encode('utf-8')]) except KeyboardInterrupt: logger.info('shutting down gatherer..') return
def test_hunter_plugins(): plugins = load_plugins(cif.hunter.__path__) count = 0 indicators = [] for d in range(0, 1): i = Indicator(indicator=fake.domain_name(), tags=['malware']) indicators.append(i) indicators.append(Indicator('csirtgadgets.com', tags=['botnet'])) indicators.append(Indicator('gfycat.com', tags=['exploit'])) indicators.append(Indicator('http://csirtgadgets.com', tags=['botnet'])) for p in plugins: rv = p.process(next(i for i in indicators)) rv = list(r for r in rv) if not rv or len(rv) == 0: continue rv = [i.__dict__() for i in rv] count += len(rv)