Example #1
0
    def process(self, rule, feed, limit=None, data=None, filters=None):
        parser = self.load_parser(rule,
                                  feed,
                                  limit=limit,
                                  data=data,
                                  filters=filters)

        feed_indicators = parser.process()

        if not limit:
            limit = rule.feeds[feed].get('limit')

        if limit:
            feed_indicators = itertools.islice(feed_indicators, int(limit))

        feed_indicators = (i for i in feed_indicators
                           if self.is_valid(i, rule))
        feed_indicators = (self.clean_indicator(i, rule)
                           for i in feed_indicators)

        # check to see if the indicator is too old
        if self.goback:
            feed_indicators = (i for i in feed_indicators
                               if not self.is_old(i))

        feed_indicators = (i for i in feed_indicators
                           if not self.is_archived_with_log(i))

        feed_indicators_batches = chunk(feed_indicators, int(FIREBALL_SIZE))

        for indicator_batch in feed_indicators_batches:
            self.archiver.begin()
            self.send_indicators(indicator_batch)

            for i in indicator_batch:
                if self.is_archived_with_log(i):
                    continue

                # TODO- this affects a lot of tests
                # converted i.format_keys to generator in indicator-0.0.0b0
                yield list(i.format_keys())[0]
                self.archive(i)

            self.archiver.commit()

        if limit:
            self.logger.debug("limit reached...")
Example #2
0
    def _process(self, rule, feed, limit=None):

        fetch = Fetcher(rule, feed)

        parser_name = rule.parser or PARSER_DEFAULT
        parser = load_plugin(csirtg_smrt.parser.__path__[0], parser_name)

        if parser is None:
            self.logger.info('trying z{}'.format(parser_name))
            parser = load_plugin(csirtg_smrt.parser.__path__[0], 'z{}'.format(parser_name))
            if parser is None:
                raise SystemError('Unable to load parser: {}'.format(parser_name))

        self.logger.debug("loading parser: {}".format(parser))

        parser = parser(self.client, fetch, rule, feed, limit=limit)

        rv = parser.process()

        return rv
Example #3
0
    def process(self, rule, feed, limit=None, data=None, filters=None):
        parser = self.load_parser(rule, feed, limit=limit, data=data, filters=filters)

        feed_indicators = parser.process()

        if not limit:
            limit = rule.feeds[feed].get('limit')

        if limit:
            feed_indicators = itertools.islice(feed_indicators, int(limit))

        feed_indicators = (i for i in feed_indicators if self.is_valid(i, rule))
        feed_indicators = (self.clean_indicator(i, rule) for i in feed_indicators)

        # check to see if the indicator is too old
        if self.goback:
            feed_indicators = (i for i in feed_indicators if not self.is_old(i))
        
        feed_indicators = (i for i in feed_indicators if not self.is_archived_with_log(i))

        feed_indicators_batches = chunk(feed_indicators, int(FIREBALL_SIZE))

        for indicator_batch in feed_indicators_batches:
            self.archiver.begin()
            self.send_indicators(indicator_batch)

            for i in indicator_batch:
                if self.is_archived_with_log(i):
                    continue

                yield i.format_keys()
                self.archive(i)

            self.archiver.commit()

        if limit:
            self.logger.debug("limit reached...")