def init(self): self.apikey = getattr(self.parameters, "api_key", None) if self.apikey is None: raise ValueError('No api_key provided.') self.secret = getattr(self.parameters, "secret", None) if self.secret is None: raise ValueError('No secret provided.') self.country = getattr(self.parameters, "country", None) if self.country is None: raise ValueError('No country provided.') self.types = getattr(self.parameters, 'types', None) if isinstance(self.types, str): self.types = self.types.split(',') self.preamble = '{{ "apikey": "{}" '.format(self.apikey) self.set_request_parameters() self.session = create_request_session(self) self.cache = Cache( self.parameters.redis_cache_host, self.parameters.redis_cache_port, self.parameters.redis_cache_db, getattr(self.parameters, 'redis_cache_ttl', 864000), # 10 days getattr(self.parameters, "redis_cache_password", None))
def init(self): if requests is None: raise MissingDependencyError("requests") if twitter is None: raise MissingDependencyError("twitter") self.current_time_in_seconds = int(time.time()) self.target_timelines = [] if getattr(self.parameters, "target_timelines", '') != '': self.target_timelines.extend( self.parameters.target_timelines.split(',')) self.tweet_count = int(getattr(self.parameters, "tweet_count", 20)) self.follow_urls = [] if getattr(self.parameters, "follow_urls", '') != '': self.follow_urls.extend(self.parameters.follow_urls.split(',')) self.include_rts = getattr(self.parameters, "include_rts", False) self.exclude_replies = getattr(self.parameters, "exclude_replies", False) self.timelimit = int( getattr(self.parameters, "timelimit", 24 * 60 * 60)) self.api = twitter.Api( consumer_key=self.parameters.consumer_key, consumer_secret=self.parameters.consumer_secret, access_token_key=self.parameters.access_token_key, access_token_secret=self.parameters.access_token_secret, tweet_mode="extended") self.set_request_parameters() self.session = create_request_session(self)
def process(self): event = self.receive_message() if self.field in event: self.set_request_parameters() session = create_request_session(self) try: response = session.get(event[self.field]) # If success_status_codes are configured, we use those # to check the success of the request, otherwise we # rely on the boolean value of the response if (self.success_status_codes and response.status_code in self.success_status_codes) or (response): event.add('status', "online", overwrite=self.overwrite) else: event.add('status', 'offline', overwrite=self.overwrite) event.add('extra.reason', response.reason) except Exception as exc: event.add('status', 'offline', overwrite=self.overwrite) event.add('extra.reason', str(exc)) else: self.logger.debug('Field %s was not part of the message.', self.field) self.send_message(event) self.acknowledge_message()
def init(self): if requests is None: raise MissingDependencyError("requests") self.set_request_parameters() self.session = create_request_session(self) self.__error_count = 0
def init(self): if requests is None: raise MissingDependencyError("requests") self.retry_interval = getattr(self.parameters, "retry_interval", 5) self.url = getattr(self.parameters, "url", None) if not self.url: raise ConfigurationError("Connection", "No Splunk API URL specified") self.auth_token = getattr(self.parameters, "auth_token", None) if not self.auth_token: raise ConfigurationError( "Connection", "No Splunk API authorization token specified") self.saved_search = getattr(self.parameters, "saved_search", None) if not self.saved_search: raise ConfigurationError("Search", "No Splunk saved search specified") self.search_parameters = getattr(self.parameters, "search_parameters", {}) self.result_fields = getattr(self.parameters, "result_fields", {}) self.not_found = getattr(self.parameters, "not_found", ["warn", "send"]) if "send" in self.not_found and "drop" in self.not_found: raise ConfigurationError( "Processing", "Cannot both drop and send messages without search results") self.multiple_result_handling = getattr(self.parameters, "multiple_result_handling", ["warn", "use_first", "send"]) if "limit" in self.multiple_result_handling and len( self.multiple_result_handling) != 1: raise ConfigurationError( "Processing", "Search results limited to one, no processing of multiple results possible" ) if "send" in self.multiple_result_handling and "drop" in self.multiple_result_handling: raise ConfigurationError( "Processing", "Cannot both drop and send messages with multiple search results" ) if "ignore" in self.multiple_result_handling and "use_first" in self.multiple_result_handling: raise ConfigurationError( "Processing", "Cannot both ignore and use multiple search results") self.overwrite = getattr(self.parameters, "overwrite", None) self.set_request_parameters() self.http_header.update( {"Authorization": "Bearer {}".format(self.auth_token)}) self.session = utils.create_request_session(self) self.session.keep_alive = False
def init(self): super().init() if requests is None: raise MissingDependencyError("requests") # Build request self.set_request_parameters() self.session = create_request_session(self) self.chunk_size = getattr(self.parameters, 'chunk_size', None) self.chunk_replicate_header = getattr(self.parameters, 'chunk_replicate_header', None)
def init(self): if requests is None: raise MissingDependencyError("requests") self.set_request_parameters() self.session = create_request_session(self) self.use_gpg = getattr(self.parameters, "verify_pgp_signatures", False) if self.use_gpg and gnupg is None: raise MissingDependencyError("gnupg") else: self.logger.info('PGP signature verification is active.')
def update_database(cls, verbose=False): bots = {} runtime_conf = get_bots_settings() try: for bot in runtime_conf: if runtime_conf[bot]["module"] == __name__: bots[bot] = runtime_conf[bot]["parameters"]["suffix_file"] except KeyError as e: sys.exit( "Database update failed. Your configuration of {0} is missing key {1}." .format(bot, e)) if not bots: if verbose: print( "Database update skipped. No bots of type {0} present in runtime.conf." .format(__name__)) sys.exit(0) # we only need to import now. If there are no asn_lookup bots, this dependency does not need to be installed try: session = create_request_session() url = "https://publicsuffix.org/list/public_suffix_list.dat" if verbose: print("Downloading the latest database update...") response = session.get(url) if not response.ok: sys.exit("Database update failed. Server responded: {0}.\n" "URL: {1}".format(response.status_code, response.url)) except requests.exceptions.RequestException as e: sys.exit("Database update failed. Connection Error: {0}".format(e)) for database_path in set(bots.values()): database_dir = pathlib.Path(database_path).parent database_dir.mkdir(parents=True, exist_ok=True) with open(database_path, "wb") as database: database.write(response.content) if verbose: print("Database updated. Reloading affected bots.") ctl = IntelMQController() for bot in bots.keys(): ctl.bot_reload(bot)
def init(self): if requests is None: raise MissingDependencyError("requests") self.set_request_parameters() if self.parameters.auth_token_name and self.parameters.auth_token: if self.parameters.auth_type == 'http_header': self.http_header.update( {self.parameters.auth_token_name: self.parameters.auth_token}) elif self.parameters.auth_type == 'http_basic_auth': self.auth = self.parameters.auth_token_name, self.parameters.auth_token self.http_header.update({"Content-Type": "application/json; charset=utf-8"}) self.session = utils.create_request_session(self) self.session.keep_alive = False
def update_database(cls): bots = {} runtime_conf = load_configuration(RUNTIME_CONF_FILE) try: for bot in runtime_conf: if runtime_conf[bot]["module"] == __name__: bots[bot] = runtime_conf[bot]["parameters"]["database"] except KeyError as e: sys.exit( "Database update failed. Your configuration of {0} is missing key {1}." .format(bot, e)) if not bots: print( "Database update skipped. No bots of type {0} present in runtime.conf." .format(__name__)) sys.exit(0) try: print("Downloading the latest database update...") session = create_request_session() response = session.get( "https://check.torproject.org/exit-addresses") except requests.exceptions.RequestException as e: sys.exit("Database update failed. Connection Error: {0}".format(e)) if response.status_code != 200: sys.exit("Database update failed. Server responded: {0}.\n" "URL: {1}".format(response.status_code, response.url)) pattern = re.compile(r"ExitAddress ([^\s]+)") tor_exits = "\n".join(pattern.findall(response.text)) for database_path in set(bots.values()): database_dir = pathlib.Path(database_path).parent database_dir.mkdir(parents=True, exist_ok=True) with open(database_path, "w") as database: database.write(tor_exits) print("Database updated. Reloading affected bots.") ctl = IntelMQController() for bot in bots.keys(): ctl.bot_reload(bot)
def init(self): if requests is None: raise MissingDependencyError("requests") if rt is None: raise MissingDependencyError("rt") if getattr(self.parameters, 'search_not_older_than', None): try: self.not_older_than = parser.parse( self.parameters.search_not_older_than) self.not_older_than_type = 'absolute' except ValueError: try: self.not_older_than_relative = timedelta( minutes=parse_relative( self.parameters.search_not_older_than)) except ValueError: self.logger.error( "Parameter 'search_not_older_than' could not be parsed. " "Check your configuration.") raise self.not_older_than_type = 'relative' else: self.not_older_than_type = False self.set_request_parameters() self.session = create_request_session(self) self._parse_extract_file_parameter('extract_attachment') self._parse_extract_file_parameter('extract_download') if hasattr(self.parameters, 'unzip_attachment'): self.logger.warning( "The parameter 'unzip_attachment' is deprecated and " "will be removed in version 3.0 in favor of the " "more generic and powerful 'extract_attachment'. " "Look at the Bots documentation for more details.") if not self.extract_attachment: self.extract_attachment = self.parameters.unzip_attachment else: self.logger.warn( "Both 'extract_attachment' and the deprecated " "'unzip_attachment' parameter are in use. Ignoring " "the latter one.")
def init(self): if requests is None: raise ValueError("Library 'requests' could not be loaded. Please install it.") self.set_request_parameters() self.url = self.portal_url + '/api/1.0/ripe/contact?cidr=%s' self.http_header.update({ "Content-Type": "application/json", "Accept": "application/json", "API-Authorization": self.portal_api_key }) self.session = utils.create_request_session(self) retries = requests.urllib3.Retry.from_int(self.http_timeout_max_tries) retries.status_forcelist = [502] adapter = requests.adapters.HTTPAdapter(max_retries=retries) self.session.mount('http://', adapter) self.session.mount('https://', adapter)
def init(self): if requests is None: raise MissingDependencyError("requests") if self.url is None: raise ConfigurationError("Connection", "No Splunk API URL specified") if self.auth_token is None: raise ConfigurationError( "Connection", "No Splunk API authorization token specified") if self.saved_search is None: raise ConfigurationError("Search", "No Splunk saved search specified") if "send" in self.not_found and "drop" in self.not_found: raise ConfigurationError( "Processing", "Cannot both drop and send messages without search results") if "limit" in self.multiple_result_handling and len( self.multiple_result_handling) != 1: raise ConfigurationError( "Processing", "Search results limited to one, no processing of multiple results possible" ) if "send" in self.multiple_result_handling and "drop" in self.multiple_result_handling: raise ConfigurationError( "Processing", "Cannot both drop and send messages with multiple search results" ) if "ignore" in self.multiple_result_handling and "use_first" in self.multiple_result_handling: raise ConfigurationError( "Processing", "Cannot both ignore and use multiple search results") self.set_request_parameters() self.http_header.update( {"Authorization": "Bearer {}".format(self.auth_token)}) self.session = utils.create_request_session(self) self.session.keep_alive = False
def update_database(cls, verbose=False): bots = {} runtime_conf = get_bots_settings() try: for bot in runtime_conf: if runtime_conf[bot]["module"] == __name__: bots[bot] = runtime_conf[bot]["parameters"]["tlds_domains_list"] except KeyError as e: sys.exit("Database update failed. Your configuration of {0} is missing key {1}.".format(bot, e)) if not bots: if verbose: print("Database update skipped. No bots of type {0} present in runtime.conf.".format(__name__)) sys.exit(0) try: session = create_request_session() url = "https://data.iana.org/TLD/tlds-alpha-by-domain.txt" if verbose: print("Downloading the latest database update...") response = session.get(url) if not response.ok: sys.exit("Database update failed. Server responded: {0}.\n" "URL: {1}".format(response.status_code, response.url)) except requests.exceptions.RequestException as e: sys.exit("Database update failed. Connection Error: {0}".format(e)) for database_path in set(bots.values()): database_dir = pathlib.Path(database_path).parent database_dir.mkdir(parents=True, exist_ok=True) with open(database_path, "wb") as database: database.write(response.content) if verbose: print("Database updated. Reloading affected bots.") ctl = IntelMQController() for bot in bots.keys(): ctl.bot_reload(bot)
def init(self): if requests is None: raise MissingDependencyError("requests") self.set_request_parameters() self.http_header['Ocp-Apim-Subscription-Key'] = self.parameters.api_key if self.parameters.file_match: self.file_match = re.compile(self.parameters.file_match) else: self.file_match = None if self.parameters.not_older_than: try: self.time_match = timedelta( minutes=parse_relative(self.parameters.not_older_than)) except ValueError: self.time_match = parser.parse( self.parameters.not_older_than).astimezone(pytz.utc) self.logger.info("Filtering files absolute %r.", self.time_match) self.check_ttl_time() else: self.logger.info("Filtering files relative %r.", self.time_match) if timedelta(seconds=self.parameters.redis_cache_ttl ) < self.time_match: raise ValueError( "The cache's TTL must be higher than 'not_older_than', " "otherwise the bot is processing the same data over and over again." ) else: self.time_match = None self.session = create_request_session(self) self.cache = Cache( self.parameters.redis_cache_host, self.parameters.redis_cache_port, self.parameters.redis_cache_db, self.parameters.redis_cache_ttl, getattr(self.parameters, "redis_cache_password", None))
def init(self): if requests is None: raise MissingDependencyError("requests") self.set_request_parameters() self.__session = create_request_session(self) # get overall rdap data from iana resp = self.__session.get('https://data.iana.org/rdap/dns.json') self.__session.close() resp = resp.json() for service in resp['services']: for tld in service[0]: self.__rdap_directory[tld] = {"url": service[1][0]} # get bootstrapped servers for service in self.rdap_bootstrapped_servers: if type(self.rdap_bootstrapped_servers[service]) is str: self.__rdap_directory[service] = { "url": self.rdap_bootstrapped_servers[service] } elif type(self.rdap_bootstrapped_servers) is dict: self.__rdap_directory[ service] = self.rdap_bootstrapped_servers[service]
def __initialize_http_session(self): self.set_request_parameters() self.http_session = utils.create_request_session(self)
def init(self): self.set_request_parameters() self.__session = create_request_session(self)
def update_database(cls, verbose=False): bots = {} api_token = None runtime_conf = get_bots_settings() try: for bot in runtime_conf: if runtime_conf[bot]["module"] == __name__: api_token = runtime_conf[bot]["parameters"]["api_token"] bots[bot] = runtime_conf[bot]["parameters"]["database"] except KeyError as e: sys.exit( "Database update failed. Your configuration of {0} is missing key {1}." .format(bot, e)) if not bots: if verbose: print( "Database update skipped. No bots of type {0} present in runtime.conf." .format(__name__)) sys.exit(0) try: if verbose: print("Downloading the latest database update...") session = create_request_session() response = session.get( "https://api.recordedfuture.com/v2/ip/risklist", params={ "format": "csv/splunk", "gzip": "true", "list": "large" }, headers={"X-RFToken": api_token}) except requests.exceptions.RequestException as e: sys.exit("Database update failed. Connection Error: {0}".format(e)) if response.status_code == 401: sys.exit("Database update failed. Your API token is invalid.") if response.status_code != 200: sys.exit("Database update failed. Server responded: {0}.\n" "URL: {1}".format(response.status_code, response.url)) database_data = None with tarfile.open(fileobj=io.BytesIO(response.content), mode='r:gz') as archive: for member in archive.getmembers(): if "rfiprisk.dat" in member.name: database_data = archive.extract(member) break if not database_data: sys.exit( "Database update failed. Could not locate file 'rfiprisk.dat' in the downloaded archive." ) for database_path in set(bots.values()): database_dir = pathlib.Path(database_path).parent database_dir.mkdir(parents=True, exist_ok=True) with open(database_path, "w") as database: database.write(database_data) if verbose: print("Database updated. Reloading affected bots.") ctl = IntelMQController() for bot in bots.keys(): ctl.bot_reload(bot)
def update_database(cls): bots = {} runtime_conf = load_configuration(RUNTIME_CONF_FILE) try: for bot in runtime_conf: if runtime_conf[bot]["module"] == __name__: bots[bot] = runtime_conf[bot]["parameters"]["database"] except KeyError as e: sys.exit( "Database update failed. Your configuration of {0} is missing key {1}." .format(bot, e)) if not bots: print( "Database update skipped. No bots of type {0} present in runtime.conf." .format(__name__)) sys.exit(0) # we only need to import now. If there are no asn_lookup bots, this dependency does not need to be installed if pyasn is None: raise MissingDependencyError("pyasn") try: print("Searching for the latest database update...") session = create_request_session() url = "http://archive.routeviews.org/route-views4/bgpdata/" response = session.get(url) pattern = re.compile(r"href=\"(\d{4}\.\d{2})/\"") months = pattern.findall(response.text) months.sort(reverse=True) if not months: sys.exit( "Database update failed. Couldn't find the latest database update." ) url += str(months[0]) + "/RIBS/" response = session.get(url) pattern = re.compile(r"href=\"(rib\.\d{8}\.\d{4}\.bz2)\"") days = pattern.findall(response.text) days.sort(reverse=True) if not days: sys.exit( "Database update failed. Couldn't find the latest database update." ) print("Downloading the latest database update...") url += days[0] response = session.get(url) if response.status_code != 200: sys.exit("Database update failed. Server responded: {0}.\n" "URL: {1}".format(response.status_code, response.url)) except requests.exceptions.RequestException as e: sys.exit("Database update failed. Connection Error: {0}".format(e)) with bz2.open(io.BytesIO(response.content)) as archive: print("Parsing the latest database update...") prefixes = pyasn.mrtx.parse_mrt_file(archive, print_progress=False, skip_record_on_error=True) for database_path in set(bots.values()): database_dir = pathlib.Path(database_path).parent database_dir.mkdir(parents=True, exist_ok=True) pyasn.mrtx.dump_prefixes_to_file(prefixes, database_path) print("Database updated. Reloading affected bots.") ctl = IntelMQController() for bot in bots.keys(): ctl.bot_reload(bot)
def process(self): event = self.receive_message() if 'source.fqdn' in event: url = event.get('source.fqdn') cache_key = "rdap_%s" % (url) result = self.cache_get(cache_key) if result: event.add('source.abuse_contact', result, overwrite=self.overwrite) else: self.__session = create_request_session(self) domain_parts = url.split('.') domain_suffix = None while domain_suffix is None: if ".".join(domain_parts) in self.__rdap_directory: domain_suffix = ".".join(domain_parts) else: if len(domain_parts) == 0: break domain_parts.pop(0) url_without_domain_suffix = url.replace( ".%s" % (domain_suffix), "") url = "%s.%s" % (url_without_domain_suffix.split(".")[-1], domain_suffix) if domain_suffix in self.__rdap_directory: service = self.__rdap_directory[domain_suffix] if 'auth' in service: if service['auth']['type'] == 'jwt': self.__session.headers[ 'Authorization'] = "Bearer %s" % ( service['auth']['token']) else: raise NotImplementedError( "Authentication type %r (configured for service %r) is not implemented" % (service['auth'], domain_suffix)) resp = self.__session.get("{0}domain/{1}".format( service['url'], url)) if resp.status_code < 200 or resp.status_code > 299: if resp.status_code == 404: self.logger.debug( 'Treating server response 404 as no data.') self.send_message(event) self.acknowledge_message() return self.logger.debug( "RDAP Server '%s' responded with '%d' for domain '%s'.", service['url'], resp.status_code, url) raise ValueError( f"Unable to process server's response, the returned status-code was {resp.status_code}. Enable debug logging to see more details." ) try: resp = resp.json() except ValueError: self.logger.debug("Server response: %r", resp.text) raise ValueError( "Unable to parse server response as JSON. Enable debug logging to see more details." ) for entity in resp['entities']: if not isinstance(entity, dict): self.logger.warning( "Invalid type '%s' in entities of response for domain '%s' found.", type(entity), url) continue if 'removed' in entity['roles']: continue for entrole in entity['roles']: if 'entities' in entity: for subentity in entity['entities']: if not isinstance(subentity, dict): self.logger.warning( "Invalid type '%s' in entities of response for domain '%s' found.", type(subentity), url) continue for subentrole in subentity['roles']: if 'vcardArray' in subentity: entity_data = self.parse_entities( subentity['vcardArray']) self.__rdap_order_dict[ subentrole] = { 'email': entity_data[0] if len(entity_data) > 0 else None } if 'vcardArray' in entity: entity_data = self.parse_entities( entity['vcardArray']) self.__rdap_order_dict[entrole] = { 'email': entity_data[0] if len(entity_data) > 0 else None } for role in self.rdap_order: if role in self.__rdap_order_dict: if self.__rdap_order_dict[role][ 'email'] is not None: self.cache_set( cache_key, self.__rdap_order_dict[role]['email'], self.redis_cache_ttl) event.add( 'source.abuse_contact', self.__rdap_order_dict[role]['email'], overwrite=self.overwrite) break self.__session.close() self.send_message(event) self.acknowledge_message()
def update_database(cls): bots = {} license_key = None runtime_conf = load_configuration(RUNTIME_CONF_FILE) try: for bot in runtime_conf: if runtime_conf[bot]["module"] == __name__: license_key = runtime_conf[bot]["parameters"][ "license_key"] bots[bot] = runtime_conf[bot]["parameters"]["database"] except KeyError as e: error = "Database update failed. Your configuration of {0} is missing key {1}.".format( bot, e) if str(e) == "'license_key'": error += "\n" error += "Since December 30, 2019 you need to register for a free license key to access GeoLite2 database.\n" error += "https://blog.maxmind.com/2019/12/18/significant-changes-to-accessing-and-using-geolite2-databases/" sys.exit(error) else: sys.exit(error) if not bots: print( "Database update skipped. No bots of type {0} present in runtime.conf." .format(__name__)) sys.exit(0) # we only need to import now, if there are no maxmind_geoip bots, this dependency does not need to be installed try: import maxminddb except ImportError: raise MissingDependencyError( 'maxminddb', additional_text="Package maxminddb should be present because it " "is a dependency for the required geoip2 package.") try: print("Downloading the latest database update...") session = create_request_session() response = session.get( "https://download.maxmind.com/app/geoip_download", params={ "license_key": license_key, "edition_id": "GeoLite2-City", "suffix": "tar.gz" }) except requests.exceptions.RequestException as e: sys.exit("Database update failed. Connection Error: {0}".format(e)) if response.status_code == 401: sys.exit("Database update failed. Your license key is invalid.") if response.status_code != 200: sys.exit("Database update failed. Server responded: {0}.\n" "URL: {1}".format(response.status_code, response.url)) database_data = None try: with tarfile.open(fileobj=io.BytesIO(response.content), mode='r:gz') as archive: for member in archive.getmembers(): if "GeoLite2-City.mmdb" in member.name: database_data = maxminddb.open_database( database=archive.extractfile(member), mode=maxminddb.MODE_FD) break except maxminddb.InvalidDatabaseError: sys.exit("Database update failed. Database file invalid.") if not database_data: sys.exit( "Database update failed. Could not locate file 'GeoLite2-City.mmbd' in the downloaded archive." ) for database_path in set(bots.values()): database_dir = pathlib.Path(database_path).parent database_dir.mkdir(parents=True, exist_ok=True) with open(database_path, "wb") as database: database.write(database_data._buffer) print("Database updated. Reloading affected bots.") ctl = IntelMQController() for bot in bots.keys(): ctl.bot_reload(bot)
def init(self): self.set_request_parameters() self.session = create_request_session(self) self.session.headers["Authorization"] = f"Bearer {self.authentication_token}" self.url = f"{self.url}intelmq/lookup"