def init(self): if rt is None: self.logger.error('Could not import rt. Please install it.') self.stop() self.set_request_parameters() if getattr(self.parameters, 'search_not_older_than', None): try: self.not_older_than = parser.parse( self.parameters.search_not_older_than) self.not_older_than_type = 'absolute' except ValueError: try: self.not_older_than_relative = timedelta( minutes=parse_relative( self.parameters.search_not_older_than)) except ValueError: self.logger.error( "Parameter 'search_not_older_than' could not be parsed. " "Check your configuration.") raise self.not_older_than_type = 'relative' else: self.not_older_than_type = False
def init(self): self.http_header['Ocp-Apim-Subscription-Key'] = self.api_key if self.file_match: self.file_match = re.compile(self.file_match) else: self.file_match = None if self.not_older_than: try: self.time_match = timedelta( minutes=parse_relative(self.not_older_than)) except ValueError: self.time_match = parser.parse(self.not_older_than).astimezone( pytz.utc) self.logger.info("Filtering files absolute %r.", self.time_match) self.check_ttl_time() else: self.logger.info("Filtering files relative %r.", self.time_match) if timedelta(seconds=self.redis_cache_ttl) < self.time_match: raise ValueError( "The cache's TTL must be higher than 'not_older_than', " "otherwise the bot is processing the same data over and over again." ) else: self.time_match = None
def init(self): self.set_request_parameters() self.http_header['Ocp-Apim-Subscription-Key'] = self.parameters.api_key if self.parameters.file_match: self.file_match = re.compile(self.parameters.file_match) else: self.file_match = None if self.parameters.not_older_than: try: self.time_match = timedelta( minutes=parse_relative(self.parameters.not_older_than)) except ValueError: self.time_match = parser.parse(self.parameters.not_older_than) self.logger.info("Filtering files absolute %r.", self.time_match) else: self.logger.info("Filtering files relative %r.", self.time_match) else: self.time_match = None self.cache = Cache( self.parameters.redis_cache_host, self.parameters.redis_cache_port, self.parameters.redis_cache_db, self.parameters.redis_cache_ttl, getattr(self.parameters, "redis_cache_password", None))
def init(self): self.set_request_parameters() self.http_header['Ocp-Apim-Subscription-Key'] = self.parameters.api_key if self.parameters.file_match: self.file_match = re.compile(self.parameters.file_match) else: self.file_match = None if self.parameters.not_older_than: try: self.time_match = timedelta(minutes=parse_relative(self.parameters.not_older_than)) except ValueError: self.time_match = parser.parse(self.parameters.not_older_than) self.logger.info("Filtering files absolute %r.", self.time_match) else: self.logger.info("Filtering files relative %r.", self.time_match) else: self.time_match = None self.cache = Cache(self.parameters.redis_cache_host, self.parameters.redis_cache_port, self.parameters.redis_cache_db, self.parameters.redis_cache_ttl, getattr(self.parameters, "redis_cache_password", None) )
def parse_timeattr(self, time_attr): try: absolute = parser.parse(time_attr) except ValueError: relative = timedelta(minutes=parse_relative(time_attr)) self.logger.info("Filtering out events to (relative time) %r.", relative) return relative else: self.logger.info("Filtering out events to (absolute time) %r.", absolute) return absolute
def process(self): event = self.receive_message() if not ("source.ip" in event or "source.fqdn" in event): self.send_message(event) self.acknowledge_message() return try: params = { "classification_taxonomy": event["classification.taxonomy"], "classification_type": event["classification.type"], "feed_provider": event["feed.provider"], "feed_name": event["feed.name"], "feed_status": "production", } except KeyError as exc: self.logger.debug('Skipping event because of missing field: %s.', exc) self.send_message(event) self.acknowledge_message() return try: params["ip"] = event["source.ip"] except KeyError: pass try: params["domain"] = event["source.fqdn"] except KeyError: pass response = self.session.get(self.url, params=params).json() self.logger.debug('Received response %r.', response) if response.get("suppress", False): event["extra.notify"] = False else: if 'interval' not in response: # empty response self.send_message(event) self.acknowledge_message() return elif response['interval']['unit'] == 'immediate': event["extra.ttl"] = 0 else: event["extra.ttl"] = parse_relative(f"{response['interval']['length']} {response['interval']['unit']}") * 60 contacts = [] for destination in response.get('ip', {'destinations': []})['destinations'] + response.get('domain', {'destinations': []})['destinations']: contacts.extend(contact['email'] for contact in destination["contacts"]) event.add('source.abuse_contact', ','.join(contacts), overwrite=self.overwrite) self.send_message(event) self.acknowledge_message()
def init(self): if requests is None: raise ValueError('Could not import requests. Please install it.') self.set_request_parameters() self.http_header['Ocp-Apim-Subscription-Key'] = self.parameters.api_key if self.parameters.file_match: self.file_match = re.compile(self.parameters.file_match) else: self.file_match = None if self.parameters.not_older_than: try: self.time_match = timedelta( minutes=parse_relative(self.parameters.not_older_than)) except ValueError: if sys.version_info >= (3, 6): self.time_match = parser.parse( self.parameters.not_older_than).astimezone(pytz.utc) else: # "astimezone() cannot be applied to a naive datetime" otherwise if '+' not in self.parameters.not_older_than: self.parameters.not_older_than += '+00:00' self.time_match = parser.parse( self.parameters.not_older_than) self.logger.info("Filtering files absolute %r.", self.time_match) self.check_ttl_time() else: self.logger.info("Filtering files relative %r.", self.time_match) if timedelta(seconds=self.parameters.redis_cache_ttl ) < self.time_match: raise ValueError( "The cache's TTL must be higher than 'not_older_than', " "otherwise the bot is processing the same data over and over again." ) else: self.time_match = None self.session = create_request_session_from_bot(self) self.cache = Cache( self.parameters.redis_cache_host, self.parameters.redis_cache_port, self.parameters.redis_cache_db, self.parameters.redis_cache_ttl, getattr(self.parameters, "redis_cache_password", None))
def parse_timeattr(self, time_attr): """ Parses relative or absolute time specification, decides how to parse by checking if the string contains any timespan identifier. See also https://github.com/certtools/intelmq/issues/1523 dateutil.parser.parse detects strings like `10 hours` as absolute time. """ if any([timespan in time_attr for timespan in TIMESPANS.keys()]): relative = timedelta(minutes=parse_relative(time_attr)) self.logger.info("Filtering out events to (relative time) %r.", relative) return relative else: absolute = parser.parse(time_attr) self.logger.info("Filtering out events to (absolute time) %r.", absolute) return absolute
def init(self): if requests is None: raise MissingDependencyError("requests") if rt is None: raise MissingDependencyError("rt") if getattr(self.parameters, 'search_not_older_than', None): try: self.not_older_than = parser.parse( self.parameters.search_not_older_than) self.not_older_than_type = 'absolute' except ValueError: try: self.not_older_than_relative = timedelta( minutes=parse_relative( self.parameters.search_not_older_than)) except ValueError: self.logger.error( "Parameter 'search_not_older_than' could not be parsed. " "Check your configuration.") raise self.not_older_than_type = 'relative' else: self.not_older_than_type = False self.set_request_parameters() self.session = create_request_session(self) self._parse_extract_file_parameter('extract_attachment') self._parse_extract_file_parameter('extract_download') if hasattr(self.parameters, 'unzip_attachment'): self.logger.warning( "The parameter 'unzip_attachment' is deprecated and " "will be removed in version 3.0 in favor of the " "more generic and powerful 'extract_attachment'. " "Look at the Bots documentation for more details.") if not self.extract_attachment: self.extract_attachment = self.parameters.unzip_attachment else: self.logger.warn( "Both 'extract_attachment' and the deprecated " "'unzip_attachment' parameter are in use. Ignoring " "the latter one.")
def init(self): if MISPEvent is None and import_fail_reason == 'syntax': raise MissingDependencyError("pymisp", version='>=2.4.117.3', additional_text="Python versions below 3.6 are " "only supported by pymisp <= 2.4.119.1.") elif MISPEvent is None: raise MissingDependencyError('pymisp', version='>=2.4.117.3') self.current_event = None self.misp_org = MISPOrganisation() self.misp_org.name = self.misp_org_name self.misp_org.uuid = self.misp_org_uuid self.output_dir = Path(self.output_dir) MISPFeedOutputBot.check_output_dir(self.output_dir) if self.interval_event is None: self.timedelta = datetime.timedelta(hours=1) else: self.timedelta = datetime.timedelta(minutes=parse_relative(self.interval_event)) if (self.output_dir / '.current').exists(): with (self.output_dir / '.current').open() as f: self.current_file = Path(f.read()) self.current_event = MISPEvent() self.current_event.load_file(self.current_file) last_min_time, last_max_time = re.findall('IntelMQ event (.*) - (.*)', self.current_event.info)[0] last_min_time = datetime.datetime.strptime(last_min_time, '%Y-%m-%dT%H:%M:%S.%f') last_max_time = datetime.datetime.strptime(last_max_time, '%Y-%m-%dT%H:%M:%S.%f') if last_max_time < datetime.datetime.now(): self.min_time_current = datetime.datetime.now() self.max_time_current = self.min_time_current + self.timedelta self.current_event = None else: self.min_time_current = last_min_time self.max_time_current = last_max_time else: self.min_time_current = datetime.datetime.now() self.max_time_current = self.min_time_current + self.timedelta
def init(self): if rt is None: self.logger.error('Could not import rt. Please install it.') self.stop() self.set_request_parameters() if getattr(self.parameters, 'search_not_older_than', None): try: self.not_older_than = parser.parse(self.parameters.search_not_older_than) self.not_older_than_type = 'absolute' except ValueError: try: self.not_older_than_relative = timedelta(minutes=parse_relative(self.parameters.search_not_older_than)) except ValueError: self.logger.error("Parameter 'search_not_older_than' could not be parsed. " "Check your configuration.") raise self.not_older_than_type = 'relative' else: self.not_older_than_type = False
def init(self): if requests is None: raise MissingDependencyError("requests") self.set_request_parameters() self.http_header['Ocp-Apim-Subscription-Key'] = self.parameters.api_key if self.parameters.file_match: self.file_match = re.compile(self.parameters.file_match) else: self.file_match = None if self.parameters.not_older_than: try: self.time_match = timedelta( minutes=parse_relative(self.parameters.not_older_than)) except ValueError: self.time_match = parser.parse( self.parameters.not_older_than).astimezone(pytz.utc) self.logger.info("Filtering files absolute %r.", self.time_match) self.check_ttl_time() else: self.logger.info("Filtering files relative %r.", self.time_match) if timedelta(seconds=self.parameters.redis_cache_ttl ) < self.time_match: raise ValueError( "The cache's TTL must be higher than 'not_older_than', " "otherwise the bot is processing the same data over and over again." ) else: self.time_match = None self.session = create_request_session(self) self.cache = Cache( self.parameters.redis_cache_host, self.parameters.redis_cache_port, self.parameters.redis_cache_db, self.parameters.redis_cache_ttl, getattr(self.parameters, "redis_cache_password", None))
def init(self): if requests is None: raise ValueError('Could not import requests. Please install it.') self.set_request_parameters() self.http_header['Ocp-Apim-Subscription-Key'] = self.parameters.api_key if self.parameters.file_match: self.file_match = re.compile(self.parameters.file_match) else: self.file_match = None if self.parameters.not_older_than: try: self.time_match = timedelta(minutes=parse_relative(self.parameters.not_older_than)) except ValueError: if sys.version_info >= (3, 6): self.time_match = parser.parse(self.parameters.not_older_than).astimezone(pytz.utc) else: # "astimezone() cannot be applied to a naive datetime" otherwise if '+' not in self.parameters.not_older_than: self.parameters.not_older_than += '+00:00' self.time_match = parser.parse(self.parameters.not_older_than) self.logger.info("Filtering files absolute %r.", self.time_match) self.check_ttl_time() else: self.logger.info("Filtering files relative %r.", self.time_match) if timedelta(seconds=self.parameters.redis_cache_ttl) < self.time_match: raise ValueError("The cache's TTL must be higher than 'not_older_than', " "otherwise the bot is processing the same data over and over again.") else: self.time_match = None self.cache = Cache(self.parameters.redis_cache_host, self.parameters.redis_cache_port, self.parameters.redis_cache_db, self.parameters.redis_cache_ttl, getattr(self.parameters, "redis_cache_password", None) )
def init(self): if rt is None: raise MissingDependencyError("rt") if self.search_not_older_than is not None: try: self.not_older_than = parser.parse(self.search_not_older_than) self.not_older_than_type = 'absolute' except ValueError: try: self.not_older_than_relative = timedelta( minutes=parse_relative(self.search_not_older_than)) except ValueError: self.logger.error( "Parameter 'search_not_older_than' could not be parsed. " "Check your configuration.") raise self.not_older_than_type = 'relative' else: self.not_older_than_type = False self._parse_extract_file_parameter('extract_attachment') self._parse_extract_file_parameter('extract_download')
def init(self): if MISPEvent is None: raise MissingDependencyError('pymisp', version='>=2.4.117.3') self.current_event = None self.misp_org = MISPOrganisation() self.misp_org.name = self.parameters.misp_org_name self.misp_org.uuid = self.parameters.misp_org_uuid self.output_dir = Path(self.parameters.output_dir) MISPFeedOutputBot.check_output_dir(self.output_dir) if not hasattr(self.parameters, 'interval_event'): self.timedelta = datetime.timedelta(hours=1) else: self.timedelta = datetime.timedelta(minutes=parse_relative(self.parameters.interval_event)) if (self.output_dir / '.current').exists(): with (self.output_dir / '.current').open() as f: self.current_file = Path(f.read()) self.current_event = MISPEvent() self.current_event.load_file(self.current_file) last_min_time, last_max_time = re.findall('IntelMQ event (.*) - (.*)', self.current_event.info)[0] last_min_time = datetime.datetime.strptime(last_min_time, '%Y-%m-%dT%H:%M:%S.%f') last_max_time = datetime.datetime.strptime(last_max_time, '%Y-%m-%dT%H:%M:%S.%f') if last_max_time < datetime.datetime.now(): self.min_time_current = datetime.datetime.now() self.max_time_current = self.min_time_current + self.timedelta self.current_event = None else: self.min_time_current = last_min_time self.max_time_current = last_max_time else: self.min_time_current = datetime.datetime.now() self.max_time_current = self.min_time_current + self.timedelta
def test_parse_relative(self): """Tests if parse_reltive returns the correct timespan.""" self.assertEqual(utils.parse_relative('1 hour'), 60) self.assertEqual(utils.parse_relative('2\tyears'), 1051200)
def test_parse_relative_raises(self): """Tests if parse_reltive correctly raises ValueError.""" with self.assertRaises(ValueError): utils.parse_relative('1 hou') with self.assertRaises(ValueError): utils.parse_relative('1 minute')
def compute_basic_math(self, action, event) -> str: date = DateTime.parse_utc_isoformat(event[action.key], True) delta = datetime.timedelta(minutes=parse_relative(action.value)) return self._basic_math_op_map[action.operator](date, delta).isoformat()
def compute_basic_math(action, event): date = DateTime.parse_utc_isoformat(event[action.key], True) if action.operator == '+=': return (date + datetime.timedelta(minutes=parse_relative(action.value))).isoformat() elif action.operator == '-=': return (date - datetime.timedelta(minutes=parse_relative(action.value))).isoformat()
def init(self): self.__timespan = parse_relative(self.timespan) self.fields = {k.strip() for k in self.fields.split(',')} self.cleanup()