def clean_indicator(self, i, rule): # check for de-fang'd feed if rule.replace: for e in i: if not rule.replace.get(e): continue for k, v in rule.replace[e].items(): i[e] = i[e].replace(k, v) i = normalize_itype(i) if isinstance(i, dict): i = Indicator(**i) if not i.firsttime: i.firsttime = i.lasttime if not i.reporttime: i.reporttime = arrow.utcnow().datetime if not i.group: i.group = 'everyone' return i
def process(self): defaults = self._defaults() map = self.rule.feeds[self.feed]['map'] values = self.rule.feeds[self.feed]['values'] data = [] for l in self.fetcher.process(): i = copy.deepcopy(defaults) l = json.loads(l) for e in l: i = {} for x, c in enumerate(map): i[values[x]] = e[c] try: self.logger.debug(i) i = normalize_itype(i) i = Indicator(**i) r = self.client.indicators_create(i) data.append(r) except NotImplementedError as e: self.logger.error(e) self.logger.info('skipping: {}'.format(i['indicator'])) return data
def process(self): defaults = self._defaults() patterns = copy.deepcopy(self.rule.feeds[self.feed]['pattern']) for p in patterns: patterns[p]['pattern'] = re.compile(patterns[p]['pattern']) feed = [] for l in self.fetcher.process(): feed.append(l) feed = "\n".join(feed) try: feed = feedparser.parse(feed) except Exception as e: self.logger.error('Error parsing feed: {}'.format(e)) self.logger.error(defaults['remote']) raise e rv = [] for e in feed.entries: i = copy.deepcopy(defaults) for k in e: if k == 'summary' and patterns.get('description'): try: m = patterns['description']['pattern'].search(e[k]).groups() except AttributeError: continue for idx, c in enumerate(patterns['description']['values']): i[c] = m[idx] elif patterns.get(k): try: m = patterns[k]['pattern'].search(e[k]).groups() except AttributeError: continue for idx, c in enumerate(patterns[k]['values']): i[c] = m[idx] if not i.get('indicator'): self.logger.error('missing indicator: {}'.format(e[k])) continue try: i = normalize_itype(i) i = Indicator(**i) self.logger.debug(i) r = self.client.indicators_create(i) rv.append(r) except NotImplementedError as e: self.logger.error(e) self.logger.info('skipping: {}'.format(i['indicator'])) return rv
def is_valid(self, i, rule): # check for de-fang'd feed if rule.replace: for e in i: if not rule.replace.get(e): continue for k, v in rule.replace[e].items(): i[e] = i[e].replace(k, v) try: i = normalize_itype(i) return True except InvalidIndicator as e: if logger.getEffectiveLevel() == logging.DEBUG: if not self.skip_invalid: raise e return False
def is_valid(self, i, rule): # check for de-fang'd feed if not i.get('indicator') or i['indicator'] == '': return False if rule.replace: for e in i: if not rule.replace.get(e): continue for k, v in rule.replace[e].items(): i[e] = i[e].replace(k, v) try: i = normalize_itype(i) return True except InvalidIndicator as e: if logger.getEffectiveLevel() == logging.DEBUG: if not self.skip_invalid: raise e return False
def process(self): defaults = self._defaults() map = self.rule.feeds[self.feed]['map'] values = self.rule.feeds[self.feed]['values'] rv = [] for l in self.fetcher.process(): i = copy.deepcopy(defaults) l = json.loads(l) for e in l: i = {} for x, c in enumerate(map): i[values[x]] = e[c] try: self.logger.debug(i) i = normalize_itype(i) i = Indicator(**i) except NotImplementedError as e: self.logger.error(e) self.logger.info('skipping: {}'.format(i['indicator'])) else: if self.is_archived(i.indicator, i.provider, i.group, i.tags, i.firsttime, i.lasttime): self.logger.info('skipping: {}/{}'.format(i.provider, i.indicator)) else: r = self.client.indicators_create(i) self.archive(i.indicator, i.provider, i.group, i.tags, i.firsttime, i.lasttime) rv.append(r) if self.limit: self.limit -= 1 if self.limit == 0: self.logger.debug('limit reached...') break return rv
def process(self): defaults = self._defaults() patterns = copy.deepcopy(self.rule.feeds[self.feed]['pattern']) for p in patterns: patterns[p]['pattern'] = re.compile(patterns[p]['pattern']) feed = [] for l in self.fetcher.process(): feed.append(l) feed = "\n".join(feed) try: feed = feedparser.parse(feed) except Exception as e: self.logger.error('Error parsing feed: {}'.format(e)) self.logger.error(defaults['remote']) raise e rv = [] for e in feed.entries: i = copy.deepcopy(defaults) for k in e: if k == 'summary' and patterns.get('description'): try: m = patterns['description']['pattern'].search(e[k]).groups() except AttributeError: continue for idx, c in enumerate(patterns['description']['values']): i[c] = m[idx] elif patterns.get(k): try: m = patterns[k]['pattern'].search(e[k]).groups() except AttributeError: continue for idx, c in enumerate(patterns[k]['values']): i[c] = m[idx] if not i.get('indicator'): self.logger.error('missing indicator: {}'.format(e[k])) continue try: i = normalize_itype(i) i = Indicator(**i) except InvalidIndicator as e: self.logger.error(e) self.logger.info('skipping: {}'.format(i['indicator'])) else: if self.is_archived(i.indicator, i.provider, i.group, i.tags, i.firsttime, i.lasttime): self.logger.info('skipping: {}/{}'.format(i.provider, i.indicator)) else: r = self.client.indicators_create(i) self.archive(i.indicator, i.provider, i.group, i.tags, i.firsttime, i.lasttime) rv.append(r) if self.limit: self.limit -= 1 if self.limit == 0: self.logger.debug('limit reached...') break return rv