def check_version(suricata_version): if "dev" in suricata_version.full: logger.warning( "Development version of Suricata found: %s. " "Skipping version check.", suricata_version.full) return index_filename = sources.get_index_filename() if not os.path.exists(index_filename): logger.warning("No index exists, will use bundled index.") logger.warning("Please run suricata-update update-sources.") index = sources.Index(index_filename) version = index.get_versions() recommended = engine.parse_version(version["suricata"]["recommended"]) if not recommended: logger.error("Recommended version was not parsed properly") sys.exit(1) # In case index is out of date if float(suricata_version.short) > float(recommended.short): return # Evaluate if the installed version is present in index upgrade_version = version["suricata"].get(suricata_version.short) if not upgrade_version: logger.warning( "Suricata version %s has reached EOL. Please upgrade to %s.", suricata_version.full, recommended.full) return if suricata_version.full == upgrade_version: logger.info("Suricata version %s is up to date", suricata_version.full) elif upgrade_version == recommended.full: logger.warning( "Suricata version %s is outdated. Please upgrade to %s.", suricata_version.full, recommended.full) else: logger.warning( "Suricata version %s is outdated. Please upgrade to %s or %s.", suricata_version.full, upgrade_version, recommended.full)
def load_sources(suricata_version): files = {} urls = [] http_header = None checksum = True # Add any URLs added with the --url command line parameter. if config.args().url: for url in config.args().url: urls.append((url, http_header, checksum)) # Get the new style sources. enabled_sources = sources.get_enabled_sources() # Convert the Suricata version to a version string. version_string = "%d.%d.%d" % ( suricata_version.major, suricata_version.minor, suricata_version.patch) # Construct the URL replacement parameters that are internal to # suricata-update. internal_params = {"__version__": version_string} # If we have new sources, we also need to load the index. if enabled_sources: index_filename = sources.get_index_filename() if not os.path.exists(index_filename): logger.warning("No index exists, will use bundled index.") logger.warning("Please run suricata-update update-sources.") if os.path.exists(index_filename) and time.time() - \ os.stat(index_filename).st_mtime > INDEX_EXPIRATION_TIME: logger.warning( "Source index is older than 2 weeks. " "Please update with suricata-update update-sources.") index = sources.Index(index_filename) for (name, source) in enabled_sources.items(): params = source["params"] if "params" in source else {} params.update(internal_params) if "url" in source: # No need to go off to the index. http_header = source.get("http_header") checksum = source.get("checksum") url = (source["url"] % params, http_header, checksum) logger.debug("Resolved source %s to URL %s.", name, url[0]) else: if not index: raise exceptions.ApplicationError( "Source index is required for source %s; " "run suricata-update update-sources" % (source["source"])) source_config = index.get_source_by_name(name) try: checksum = source_config["checksum"] except: checksum = True url = (index.resolve_url(name, params), http_header, checksum) logger.debug("Resolved source %s to URL %s.", name, url) urls.append(url) if config.get("sources"): for url in config.get("sources"): if not isinstance(url, str): raise exceptions.InvalidConfigurationError( "Invalid datatype for source URL: %s" % (str(url))) url = (url % internal_params, http_header, checksum) logger.debug("Adding source %s.", url) urls.append(url) # If --etopen is on the command line, make sure its added. Or if # there are no URLs, default to ET/Open. if config.get("etopen") or not urls: if not config.args().offline and not urls: logger.info( "No sources configured, will use Emerging Threats Open") urls.append( (sources.get_etopen_url(internal_params), http_header, checksum)) # Converting the URLs to a set removed dupes. urls = set(urls) # Now download each URL. for url in urls: Fetch().run(url, files) # Now load local rules. if config.get("local") is not None: for local in config.get("local"): load_local(local, files) return files
def load_sources(suricata_version): files = {} urls = [] # Add any URLs added with the --url command line parameter. if config.args().url: for url in config.args().url: urls.append(url) # Get the new style sources. enabled_sources = sources.get_enabled_sources() # Convert the Suricata version to a version string. version_string = "%d.%d.%d" % ( suricata_version.major, suricata_version.minor, suricata_version.patch) # Construct the URL replacement parameters that are internal to # suricata-update. internal_params = {"__version__": version_string} # If we have new sources, we also need to load the index. if enabled_sources: index_filename = os.path.join(config.get_cache_dir(), sources.SOURCE_INDEX_FILENAME) if os.path.exists(index_filename): index = sources.Index(index_filename) else: index = None for (name, source) in enabled_sources.items(): params = source["params"] if "params" in source else {} params.update(internal_params) if "url" in source: # No need to go off to the index. url = source["url"] % params else: if not index: raise exceptions.ApplicationError( "Source index is required for source %s; " "run suricata-update update-sources" % (source["source"])) url = index.resolve_url(name, params) logger.debug("Resolved source %s to URL %s.", name, url) urls.append(url) if config.get("sources"): for url in config.get("sources"): if type(url) not in [type("")]: raise exceptions.InvalidConfigurationError( "Invalid datatype for source URL: %s" % (str(url))) url = url % internal_params logger.debug("Adding source %s.", url) urls.append(url) # If --etopen is on the command line, make sure its added. Or if # there are no URLs, default to ET/Open. if config.get("etopen") or not urls: if not urls: logger.info( "No sources configured, will use Emerging Threats Open") urls.append(sources.get_etopen_url(internal_params)) # Converting the URLs to a set removed dupes. urls = set(urls) # Now download each URL. for url in urls: Fetch().run(url, files) # Now load local rules. for local in config.get("local"): load_local(local, files) return files