def update_sources(): global local_index_filename local_index_filename = sources.get_index_filename() initial_content = get_initial_content() with io.BytesIO() as fileobj: url = sources.get_source_index_url() logger.info("Downloading %s", url) try: net.get(url, fileobj) except Exception as err: raise exceptions.ApplicationError( "Failed to download index: %s: %s" % (url, err)) if not os.path.exists(config.get_cache_dir()): try: os.makedirs(config.get_cache_dir()) except Exception as err: logger.error("Failed to create directory %s: %s", config.get_cache_dir(), err) return 1 write_and_compare(initial_content=initial_content, fileobj=fileobj)
def update_sources(): local_index_filename = sources.get_index_filename() with io.BytesIO() as fileobj: url = sources.get_source_index_url() logger.info("Downloading %s", url) try: net.get(url, fileobj) except Exception as err: raise exceptions.ApplicationError( "Failed to download index: %s: %s" % (url, err)) if not os.path.exists(config.get_cache_dir()): try: os.makedirs(config.get_cache_dir()) except Exception as err: logger.error("Failed to create directory %s: %s", config.get_cache_dir(), err) return 1 with open(local_index_filename, "wb") as outobj: outobj.write(fileobj.getvalue()) logger.info("Saved %s", local_index_filename)
def check_version(suricata_version): if "dev" in suricata_version.full: logger.warning( "Development version of Suricata found: %s. " "Skipping version check.", suricata_version.full) return index_filename = sources.get_index_filename() if not os.path.exists(index_filename): logger.warning("No index exists, will use bundled index.") logger.warning("Please run suricata-update update-sources.") index = sources.Index(index_filename) version = index.get_versions() recommended = engine.parse_version(version["suricata"]["recommended"]) if not recommended: logger.error("Recommended version was not parsed properly") sys.exit(1) # In case index is out of date if float(suricata_version.short) > float(recommended.short): return # Evaluate if the installed version is present in index upgrade_version = version["suricata"].get(suricata_version.short) if not upgrade_version: logger.warning( "Suricata version %s has reached EOL. Please upgrade to %s.", suricata_version.full, recommended.full) return if suricata_version.full == upgrade_version: logger.info("Suricata version %s is up to date", suricata_version.full) elif upgrade_version == recommended.full: logger.warning( "Suricata version %s is outdated. Please upgrade to %s.", suricata_version.full, recommended.full) else: logger.warning( "Suricata version %s is outdated. Please upgrade to %s or %s.", suricata_version.full, upgrade_version, recommended.full)
def enable_source(): name = config.args().name # Check if source is already enabled. enabled_source_filename = sources.get_enabled_source_filename(name) if os.path.exists(enabled_source_filename): logger.error("The source %s is already enabled.", name) return 1 # First check if this source was previous disabled and then just # re-enable it. disabled_source_filename = sources.get_disabled_source_filename(name) if os.path.exists(disabled_source_filename): logger.info("Re-enabling previous disabled source for %s.", name) os.rename(disabled_source_filename, enabled_source_filename) return 0 if not os.path.exists(sources.get_index_filename()): logger.warning("Source index does not exist, will use bundled one.") logger.warning("Please run suricata-update update-sources.") source_index = sources.load_source_index(config) if not name in source_index.get_sources(): logger.error("Unknown source: %s", name) return 1 # Parse key=val options. opts = {} for param in config.args().params: key, val = param.split("=", 1) opts[key] = val source = source_index.get_sources()[name] if "subscribe-url" in source: print("The source %s requires a subscription. Subscribe here:" % (name)) print(" %s" % source["subscribe-url"]) params = {} if "parameters" in source: for param in source["parameters"]: if param in opts: params[param] = opts[param] else: prompt = source["parameters"][param]["prompt"] while True: r = raw_input("%s (%s): " % (prompt, param)) r = r.strip() if r: break params[param] = r.strip() new_source = sources.SourceConfiguration(name, params=params) # If the source directory does not exist, create it. Also create # the default rule-source of et/open, unless the source being # enabled replaces it. source_directory = sources.get_source_directory() if not os.path.exists(source_directory): try: logger.info("Creating directory %s", source_directory) os.makedirs(source_directory) except Exception as err: logger.error("Failed to create directory %s: %s", source_directory, err) return 1 if "replaces" in source and default_source in source["replaces"]: logger.debug( "Not enabling default source as selected source replaces it") elif new_source.name == default_source: logger.debug( "Not enabling default source as selected source is the default" ) else: logger.info("Enabling default source %s", default_source) if not source_index.get_source_by_name(default_source): logger.error("Default source %s not in index", default_source) else: default_source_config = sources.SourceConfiguration( default_source) write_source_config(default_source_config, True) write_source_config(new_source, True) logger.info("Source %s enabled", new_source.name) if "replaces" in source: for replaces in source["replaces"]: filename = sources.get_enabled_source_filename(replaces) if os.path.exists(filename): logger.info("Removing source %s as its replaced by %s", replaces, new_source.name) logger.debug("Deleting %s", filename) os.unlink(filename)
def load_sources(suricata_version): files = {} urls = [] http_header = None checksum = True # Add any URLs added with the --url command line parameter. if config.args().url: for url in config.args().url: urls.append((url, http_header, checksum)) # Get the new style sources. enabled_sources = sources.get_enabled_sources() # Convert the Suricata version to a version string. version_string = "%d.%d.%d" % ( suricata_version.major, suricata_version.minor, suricata_version.patch) # Construct the URL replacement parameters that are internal to # suricata-update. internal_params = {"__version__": version_string} # If we have new sources, we also need to load the index. if enabled_sources: index_filename = sources.get_index_filename() if not os.path.exists(index_filename): logger.warning("No index exists, will use bundled index.") logger.warning("Please run suricata-update update-sources.") if os.path.exists(index_filename) and time.time() - \ os.stat(index_filename).st_mtime > INDEX_EXPIRATION_TIME: logger.warning( "Source index is older than 2 weeks. " "Please update with suricata-update update-sources.") index = sources.Index(index_filename) for (name, source) in enabled_sources.items(): params = source["params"] if "params" in source else {} params.update(internal_params) if "url" in source: # No need to go off to the index. http_header = source.get("http_header") checksum = source.get("checksum") url = (source["url"] % params, http_header, checksum) logger.debug("Resolved source %s to URL %s.", name, url[0]) else: if not index: raise exceptions.ApplicationError( "Source index is required for source %s; " "run suricata-update update-sources" % (source["source"])) source_config = index.get_source_by_name(name) try: checksum = source_config["checksum"] except: checksum = True url = (index.resolve_url(name, params), http_header, checksum) logger.debug("Resolved source %s to URL %s.", name, url) urls.append(url) if config.get("sources"): for url in config.get("sources"): if not isinstance(url, str): raise exceptions.InvalidConfigurationError( "Invalid datatype for source URL: %s" % (str(url))) url = (url % internal_params, http_header, checksum) logger.debug("Adding source %s.", url) urls.append(url) # If --etopen is on the command line, make sure its added. Or if # there are no URLs, default to ET/Open. if config.get("etopen") or not urls: if not config.args().offline and not urls: logger.info( "No sources configured, will use Emerging Threats Open") urls.append( (sources.get_etopen_url(internal_params), http_header, checksum)) # Converting the URLs to a set removed dupes. urls = set(urls) # Now download each URL. for url in urls: Fetch().run(url, files) # Now load local rules. if config.get("local") is not None: for local in config.get("local"): load_local(local, files) return files