def run(self, rule): modified_rule = self.pattern.sub(self.repl, rule.format()) parsed = suricata.update.rule.parse(modified_rule, rule.group) if parsed is None: logger.error("Modification of rule %s results in invalid rule: %s", rule.idstr, modified_rule) return rule return parsed
def run(self, rule): new_rule_string = re.sub( ";\s*\)$", "; metadata: {} {};)".format(self.key, self.val), rule.format()) new_rule = suricata.update.rule.parse(new_rule_string, rule.group) if not new_rule: logger.error( "Rule is not valid after adding metadata: [{}]: {}".format( rule.idstr, new_rule_string)) return rule return new_rule
def process(self, filein, fileout, rulemap): count = 0 for line in filein: line = line.rstrip() if not line or line.startswith("#"): print(line, file=fileout) continue pattern = self.extract_pattern(line) if not pattern: print(line, file=fileout) else: for rule in rulemap.values(): if rule.enabled: if pattern.search(rule.format()): count += 1 print("# %s" % (rule.brief()), file=fileout) print(self.replace(line, rule), file=fileout) print("", file=fileout) logger.info("Generated %d thresholds to %s." % (count, fileout.name))
def build_report(prev_rulemap, rulemap): """Build a report of changes between 2 rulemaps. Returns a dict with the following keys that each contain a list of rules. - added - removed - modified """ report = {"added": [], "removed": [], "modified": []} for key in rulemap: rule = rulemap[key] if not rule.id in prev_rulemap: report["added"].append(rule) elif rule.format() != prev_rulemap[rule.id].format(): report["modified"].append(rule) for key in prev_rulemap: rule = prev_rulemap[key] if not rule.id in rulemap: report["removed"].append(rule) return report
def _main(): global args global_parser = argparse.ArgumentParser(add_help=False) global_parser.add_argument("-v", "--verbose", action="store_true", default=None, help="Be more verbose") global_parser.add_argument( "-q", "--quiet", action="store_true", default=False, help="Be quiet, warning and error messages only") global_parser.add_argument( "-D", "--data-dir", metavar="<directory>", dest="data_dir", help="Data directory (default: /var/lib/suricata)") global_parser.add_argument( "-c", "--config", metavar="<filename>", help="configuration file (default: /etc/suricata/update.yaml)") global_parser.add_argument( "--suricata-conf", metavar="<filename>", help="configuration file (default: /etc/suricata/suricata.yaml)") global_parser.add_argument("--suricata", metavar="<path>", help="Path to Suricata program") global_parser.add_argument("--suricata-version", metavar="<version>", help="Override Suricata version") global_parser.add_argument("--user-agent", metavar="<user-agent>", help="Set custom user-agent string") global_parser.add_argument( "--no-check-certificate", action="store_true", default=None, help="Disable server SSL/TLS certificate verification") global_args, rem = global_parser.parse_known_args() if not rem or rem[0].startswith("-"): rem.insert(0, "update") parser = argparse.ArgumentParser() subparsers = parser.add_subparsers(dest="subcommand", metavar="<command>") # The "update" (default) sub-command parser. update_parser = subparsers.add_parser("update", add_help=False, parents=[global_parser]) update_parser.add_argument("-o", "--output", metavar="<directory>", dest="output", help="Directory to write rules to") update_parser.add_argument( "-f", "--force", action="store_true", default=False, help="Force operations that might otherwise be skipped") update_parser.add_argument("--yaml-fragment", metavar="<filename>", help="Output YAML fragment for rule inclusion") update_parser.add_argument( "--url", metavar="<url>", action="append", default=[], help= "URL to use instead of auto-generating one (can be specified multiple times)" ) update_parser.add_argument( "--local", metavar="<path>", action="append", default=[], help="Local rule files or directories (can be specified multiple times)" ) update_parser.add_argument("--sid-msg-map", metavar="<filename>", help="Generate a sid-msg.map file") update_parser.add_argument("--sid-msg-map-2", metavar="<filename>", help="Generate a v2 sid-msg.map file") update_parser.add_argument("--disable-conf", metavar="<filename>", help="Filename of rule disable filters") update_parser.add_argument("--enable-conf", metavar="<filename>", help="Filename of rule enable filters") update_parser.add_argument("--modify-conf", metavar="<filename>", help="Filename of rule modification filters") update_parser.add_argument("--drop-conf", metavar="<filename>", help="Filename of drop rules filters") update_parser.add_argument( "--ignore", metavar="<pattern>", action="append", default=[], help= "Filenames to ignore (can be specified multiple times; default: *deleted.rules)" ) update_parser.add_argument("--no-ignore", action="store_true", default=False, help="Disables the ignore option.") update_parser.add_argument( "--threshold-in", metavar="<filename>", help="Filename of rule thresholding configuration") update_parser.add_argument( "--threshold-out", metavar="<filename>", help="Output of processed threshold configuration") update_parser.add_argument( "--dump-sample-configs", action="store_true", default=False, help="Dump sample config files to current directory") update_parser.add_argument("--etopen", action="store_true", help="Use ET-Open rules (default)") update_parser.add_argument("--reload-command", metavar="<command>", help="Command to run after update if modified") update_parser.add_argument("--no-reload", action="store_true", default=False, help="Disable reload") update_parser.add_argument("-T", "--test-command", metavar="<command>", help="Command to test Suricata configuration") update_parser.add_argument("--no-test", action="store_true", default=False, help="Disable testing rules with Suricata") update_parser.add_argument("-V", "--version", action="store_true", default=False, help="Display version") update_parser.add_argument( "--no-merge", action="store_true", default=False, help="Do not merge the rules into a single file") update_parser.add_argument("-h", "--help", action="store_true") # Hidden argument, --now to bypass the timebased bypass of # updating a ruleset. update_parser.add_argument("--now", default=False, action="store_true", help=argparse.SUPPRESS) # The Python 2.7 argparse module does prefix matching which can be # undesirable. Reserve some names here that would match existing # options to prevent prefix matching. update_parser.add_argument("--disable", default=False, help=argparse.SUPPRESS) update_parser.add_argument("--enable", default=False, help=argparse.SUPPRESS) update_parser.add_argument("--modify", default=False, help=argparse.SUPPRESS) update_parser.add_argument("--drop", default=False, help=argparse.SUPPRESS) commands.listsources.register( subparsers.add_parser("list-sources", parents=[global_parser])) commands.listenabledsources.register( subparsers.add_parser("list-enabled-sources", parents=[global_parser])) commands.addsource.register( subparsers.add_parser("add-source", parents=[global_parser])) commands.updatesources.register( subparsers.add_parser("update-sources", parents=[global_parser])) commands.enablesource.register( subparsers.add_parser("enable-source", parents=[global_parser])) commands.disablesource.register( subparsers.add_parser("disable-source", parents=[global_parser])) commands.removesource.register( subparsers.add_parser("remove-source", parents=[global_parser])) args = parser.parse_args(rem) # Merge global args into args. for arg in vars(global_args): if not hasattr(args, arg): setattr(args, arg, getattr(global_args, arg)) elif hasattr(args, arg) and getattr(args, arg) is None: setattr(args, arg, getattr(global_args, arg)) # Go verbose or quiet sooner than later. if args.verbose: logger.setLevel(logging.DEBUG) if args.quiet: logger.setLevel(logging.WARNING) config.init(args) # Error out if any reserved/unimplemented arguments were set. unimplemented_args = [ "disable", "enable", "modify", "drop", ] for arg in unimplemented_args: if hasattr(args, arg) and getattr(args, arg): logger.error("--%s not implemented", arg) return 1 logger.debug("This is suricata-update version %s (rev: %s); Python: %s" % (version, revision, sys.version.replace("\n", "- "))) suricata_path = config.get("suricata") # Now parse the Suricata version. If provided on the command line, # use that, otherwise attempt to get it from Suricata. if args.suricata_version: # The Suricata version was passed on the command line, parse it. suricata_version = suricata.update.engine.parse_version( args.suricata_version) if not suricata_version: logger.error("Failed to parse provided Suricata version: %s" % (args.suricata_version)) return 1 logger.info("Forcing Suricata version to %s." % (suricata_version.full)) elif suricata_path: suricata_version = suricata.update.engine.get_version(suricata_path) if suricata_version: logger.info("Found Suricata version %s at %s." % (str(suricata_version.full), suricata_path)) else: logger.error("Failed to get Suricata version.") return 1 else: logger.info("Using default Suricata version of %s", DEFAULT_SURICATA_VERSION) suricata_version = suricata.update.engine.parse_version( DEFAULT_SURICATA_VERSION) # Provide the Suricata version to the net module to add to the # User-Agent. suricata.update.net.set_user_agent_suricata_version(suricata_version.full) # Load custom user-agent-string. user_agent = config.get("user-agent") if user_agent: logger.info("Using user-agent: %s.", user_agent) suricata.update.net.set_custom_user_agent(user_agent) if args.subcommand: if hasattr(args, "func"): return args.func() elif args.subcommand != "update": logger.error("Unknown command: %s", args.subcommand) return 1 if args.dump_sample_configs: return dump_sample_configs() if args.version: print("suricata-update version %s (rev: %s)" % (version, revision)) return 0 if args.help: print(update_parser.format_help()) print("""other commands: update-sources Update the source index list-sources List available sources enable-source Enable a source from the index disable-source Disable an enabled source remove-source Remove an enabled or disabled source list-enabled-sources List all enabled sources add-source Add a new source by URL """) return 0 # If --no-ignore was provided, clear any ignores provided in the # config. if args.no_ignore: config.set(config.IGNORE_KEY, []) file_tracker = FileTracker() disable_matchers = [] enable_matchers = [] modify_filters = [] drop_filters = [] # Load user provided disable filters. disable_conf_filename = config.get("disable-conf") if disable_conf_filename and os.path.exists(disable_conf_filename): logger.info("Loading %s.", disable_conf_filename) disable_matchers += load_matchers(disable_conf_filename) # Load user provided enable filters. enable_conf_filename = config.get("enable-conf") if enable_conf_filename and os.path.exists(enable_conf_filename): logger.info("Loading %s.", enable_conf_filename) enable_matchers += load_matchers(enable_conf_filename) # Load user provided modify filters. modify_conf_filename = config.get("modify-conf") if modify_conf_filename and os.path.exists(modify_conf_filename): logger.info("Loading %s.", modify_conf_filename) modify_filters += load_filters(modify_conf_filename) # Load user provided drop filters. drop_conf_filename = config.get("drop-conf") if drop_conf_filename and os.path.exists(drop_conf_filename): logger.info("Loading %s.", drop_conf_filename) drop_filters += load_drop_filters(drop_conf_filename) if os.path.exists(config.get("suricata-conf")) and \ suricata_path and os.path.exists(suricata_path): logger.info("Loading %s", config.get("suricata-conf")) suriconf = suricata.update.engine.Configuration.load( config.get("suricata-conf"), suricata_path=suricata_path) for key in suriconf.keys(): if key.startswith("app-layer.protocols") and \ key.endswith(".enabled"): if not suriconf.is_true(key, ["detection-only"]): proto = key.split(".")[2] logger.info("Disabling rules with proto %s", proto) disable_matchers.append(ProtoRuleMatcher(proto)) # Check that the cache directory exists and is writable. if not os.path.exists(config.get_cache_dir()): try: os.makedirs(config.get_cache_dir(), mode=0o770) except Exception as err: logger.warning( "Cache directory does not exist and could not be created. " "/var/tmp will be used instead.") config.set_cache_dir("/var/tmp") files = load_sources(suricata_version) load_dist_rules(files) # Remove ignored files. for filename in list(files.keys()): if ignore_file(config.get("ignore"), filename): logger.info("Ignoring file %s" % (filename)) del (files[filename]) rules = [] for filename in files: if not filename.endswith(".rules"): continue logger.debug("Parsing %s." % (filename)) rules += suricata.update.rule.parse_fileobj( io.BytesIO(files[filename]), filename) rulemap = build_rule_map(rules) logger.info("Loaded %d rules." % (len(rules))) # Counts of user enabled and modified rules. enable_count = 0 modify_count = 0 drop_count = 0 # List of rules disabled by user. Used for counting, and to log # rules that are re-enabled to meet flowbit requirements. disabled_rules = [] for key, rule in rulemap.items(): for matcher in disable_matchers: if rule.enabled and matcher.match(rule): logger.debug("Disabling: %s" % (rule.brief())) rule.enabled = False disabled_rules.append(rule) for matcher in enable_matchers: if not rule.enabled and matcher.match(rule): logger.debug("Enabling: %s" % (rule.brief())) rule.enabled = True enable_count += 1 for filter in drop_filters: if filter.match(rule): rulemap[rule.id] = filter.filter(rule) drop_count += 1 # Apply modify filters. for fltr in modify_filters: for key, rule in rulemap.items(): if fltr.match(rule): new_rule = fltr.filter(rule) if new_rule and new_rule.format() != rule.format(): rulemap[rule.id] = new_rule modify_count += 1 logger.info("Disabled %d rules." % (len(disabled_rules))) logger.info("Enabled %d rules." % (enable_count)) logger.info("Modified %d rules." % (modify_count)) logger.info("Dropped %d rules." % (drop_count)) # Fixup flowbits. resolve_flowbits(rulemap, disabled_rules) # Check that output directory exists. if not os.path.exists(config.get_output_dir()): try: os.makedirs(config.get_output_dir(), mode=0o770) except Exception as err: logger.error( "Output directory does not exist and could not be created: %s", config.get_output_dir()) return 1 # Check that output directory is writable. if not os.access(config.get_output_dir(), os.W_OK): logger.error("Output directory is not writable: %s", config.get_output_dir()) return 1 # Backup the output directory. logger.info("Backing up current rules.") backup_directory = util.mktempdir() shutil.copytree(config.get_output_dir(), os.path.join(backup_directory, "backup"), ignore=copytree_ignore_backup) if not args.no_merge: # The default, write out a merged file. output_filename = os.path.join(config.get_output_dir(), DEFAULT_OUTPUT_RULE_FILENAME) file_tracker.add(output_filename) write_merged(os.path.join(output_filename), rulemap) else: for filename in files: file_tracker.add( os.path.join(config.get_output_dir(), os.path.basename(filename))) write_to_directory(config.get_output_dir(), files, rulemap) if args.yaml_fragment: file_tracker.add(args.yaml_fragment) write_yaml_fragment(args.yaml_fragment, files) if args.sid_msg_map: write_sid_msg_map(args.sid_msg_map, rulemap, version=1) if args.sid_msg_map_2: write_sid_msg_map(args.sid_msg_map_2, rulemap, version=2) if args.threshold_in and args.threshold_out: file_tracker.add(args.threshold_out) threshold_processor = ThresholdProcessor() threshold_processor.process(open(args.threshold_in), open(args.threshold_out, "w"), rulemap) if not args.force and not file_tracker.any_modified(): logger.info("No changes detected, exiting.") return 0 if not test_suricata(suricata_path): logger.error("Suricata test failed, aborting.") logger.error("Restoring previous rules.") copytree(os.path.join(backup_directory, "backup"), config.get_output_dir()) return 1 if not config.args().no_reload and config.get("reload-command"): logger.info("Running %s." % (config.get("reload-command"))) rc = subprocess.Popen(config.get("reload-command"), shell=True).wait() if rc != 0: logger.error("Reload command exited with error: %d", rc) logger.info("Done.") return 0
def main(): global args suricata_path = suricata.update.engine.get_path() # Support the Python argparse style of configuration file. parser = argparse.ArgumentParser(fromfile_prefix_chars="@") parser.add_argument("-v", "--verbose", action="store_true", default=False, help="Be more verbose") parser.add_argument("-c", "--config", metavar="<filename>", help="Configuration file") parser.add_argument("-o", "--output", metavar="<directory>", dest="output", default="/var/lib/suricata/rules", help="Directory to write rules to") parser.add_argument("--cache-dir", default="/var/lib/suricata/cache", metavar="<directory>", help="set the cache directory") parser.add_argument("--suricata", metavar="<path>", help="Path to Suricata program") parser.add_argument("--suricata-version", metavar="<version>", help="Override Suricata version") parser.add_argument( "-f", "--force", action="store_true", default=False, help="Force operations that might otherwise be skipped") parser.add_argument("--yaml-fragment", metavar="<filename>", help="Output YAML fragment for rule inclusion") parser.add_argument( "--url", metavar="<url>", action="append", default=[], help= "URL to use instead of auto-generating one (can be specified multiple times)" ) parser.add_argument( "--local", metavar="<path>", action="append", default=[], help="Local rule files or directories (can be specified multiple times)" ) parser.add_argument("--sid-msg-map", metavar="<filename>", help="Generate a sid-msg.map file") parser.add_argument("--sid-msg-map-2", metavar="<filename>", help="Generate a v2 sid-msg.map file") parser.add_argument("--disable-conf", metavar="<filename>", help="Filename of rule disable filters") parser.add_argument("--enable-conf", metavar="<filename>", help="Filename of rule enable filters") parser.add_argument("--modify-conf", metavar="<filename>", help="Filename of rule modification filters") parser.add_argument("--drop-conf", metavar="<filename>", help="Filename of drop rules filters") parser.add_argument( "--ignore", metavar="<pattern>", action="append", default=[], help= "Filenames to ignore (can be specified multiple times; default: *deleted.rules)" ) parser.add_argument("--no-ignore", action="store_true", default=False, help="Disables the ignore option.") parser.add_argument("--threshold-in", metavar="<filename>", help="Filename of rule thresholding configuration") parser.add_argument("--threshold-out", metavar="<filename>", help="Output of processed threshold configuration") parser.add_argument("--dump-sample-configs", action="store_true", default=False, help="Dump sample config files to current directory") parser.add_argument("--etpro", metavar="<etpro-code>", help="Use ET-Pro rules with provided ET-Pro code") parser.add_argument("--etopen", action="store_true", help="Use ET-Open rules (default)") parser.add_argument("-q", "--quiet", action="store_true", default=False, help="Be quiet, warning and error messages only") parser.add_argument("--reload-command", metavar="<command>", help="Command to run after update if modified") parser.add_argument("--no-reload", action="store_true", default=False, help="Disable reload") parser.add_argument("-T", "--test-command", metavar="<command>", help="Command to test Suricata configuration") parser.add_argument("--no-test", action="store_true", default=False, help="Disable testing rules with Suricata") parser.add_argument("-V", "--version", action="store_true", default=False, help="Display version") parser.add_argument("--no-merge", action="store_true", default=False, help="Do not merge the rules into a single file") # The Python 2.7 argparse module does prefix matching which can be # undesirable. Reserve some names here that would match existing # options to prevent prefix matching. parser.add_argument("--disable", default=False, help=argparse.SUPPRESS) parser.add_argument("--enable", default=False, help=argparse.SUPPRESS) parser.add_argument("--modify", default=False, help=argparse.SUPPRESS) parser.add_argument("--drop", default=False, help=argparse.SUPPRESS) args = parser.parse_args() # Error out if any reserved/unimplemented arguments were set. unimplemented_args = [ "disable", "enable", "modify", "drop", ] for arg in unimplemented_args: if getattr(args, arg): logger.error("--%s not implemented", arg) return 1 if args.version: print("suricata-update version %s" % suricata.update.version) return 0 if args.verbose: logger.setLevel(logging.DEBUG) if args.quiet: logger.setLevel(logging.WARNING) logger.debug("This is suricata-update version %s; Python: %s" % (suricata.update.version, sys.version.replace("\n", "- "))) if args.dump_sample_configs: return dump_sample_configs() config = Config(args) try: config.load() except Exception as err: logger.error("Failed to load configuration: %s" % (err)) return 1 # If --no-ignore was provided, make sure args.ignore is # empty. Otherwise if no ignores are provided, set a sane default. if args.no_ignore: config.set("ignore", []) elif not config.get("ignore"): config.set("ignore", ["*deleted.rules"]) # Check for Suricata binary... if args.suricata: if not os.path.exists(args.suricata): logger.error("Specified path to suricata does not exist: %s", args.suricata) return 1 suricata_path = args.suricata else: suricata_path = suricata.update.engine.get_path() if not suricata_path: logger.warning("No suricata application binary found on path.") if args.suricata_version: # The Suricata version was passed on the command line, parse it. suricata_version = suricata.update.engine.parse_version( args.suricata_version) if not suricata_version: logger.error("Failed to parse provided Suricata version: %s" % (args.suricata_version)) return 1 logger.info("Forcing Suricata version to %s." % (suricata_version.full)) elif suricata_path: suricata_version = suricata.update.engine.get_version(args.suricata) if suricata_version: logger.info("Found Suricata version %s at %s." % (str(suricata_version.full), suricata_path)) else: logger.error("Failed to get Suricata version.") return 1 else: logger.info("Using default Suricata version of %s", DEFAULT_SURICATA_VERSION) suricata_version = suricata.update.engine.parse_version( DEFAULT_SURICATA_VERSION) file_tracker = FileTracker() disable_matchers = [] enable_matchers = [] modify_filters = [] drop_filters = [] # Load user provided disable filters. disable_conf_filename = config.get("disable-conf") if disable_conf_filename and os.path.exists(disable_conf_filename): logger.info("Loading %s.", disable_conf_filename) disable_matchers += load_matchers(disable_conf_filename) # Load user provided enable filters. enable_conf_filename = config.get("enable-conf") if enable_conf_filename and os.path.exists(enable_conf_filename): logger.info("Loading %s.", enable_conf_filename) enable_matchers += load_matchers(enable_conf_filename) # Load user provided modify filters. modify_conf_filename = config.get("modify-conf") if modify_conf_filename and os.path.exists(modify_conf_filename): logger.info("Loading %s.", modify_conf_filename) modify_filters += load_filters(modify_conf_filename) # Load user provided drop filters. drop_conf_filename = config.get("drop-conf") if drop_conf_filename and os.path.exists(drop_conf_filename): logger.info("Loading %s.", drop_conf_filename) drop_filters += load_drop_filters(drop_conf_filename) # Check that the cache directory exists and is writable. if not os.path.exists(args.cache_dir): try: os.makedirs(args.cache_dir, mode=0o770) except Exception as err: logger.warning( "Cache directory does exist and could not be created. /var/tmp will be used instead." ) args.cache_dir = "/var/tmp" files = load_sources(config, suricata_version) load_dist_rules(files) # Remove ignored files. for filename in list(files.keys()): if ignore_file(config.get("ignore"), filename): logger.info("Ignoring file %s" % (filename)) del (files[filename]) rules = [] for filename in files: if not filename.endswith(".rules"): continue logger.debug("Parsing %s." % (filename)) rules += suricata.update.rule.parse_fileobj( io.BytesIO(files[filename]), filename) rulemap = build_rule_map(rules) logger.info("Loaded %d rules." % (len(rules))) # Counts of user enabled and modified rules. enable_count = 0 modify_count = 0 drop_count = 0 # List of rules disabled by user. Used for counting, and to log # rules that are re-enabled to meet flowbit requirements. disabled_rules = [] for key, rule in rulemap.items(): for matcher in disable_matchers: if rule.enabled and matcher.match(rule): logger.debug("Disabling: %s" % (rule.brief())) rule.enabled = False disabled_rules.append(rule) for matcher in enable_matchers: if not rule.enabled and matcher.match(rule): logger.debug("Enabling: %s" % (rule.brief())) rule.enabled = True enable_count += 1 for filter in drop_filters: if filter.match(rule): rulemap[rule.id] = filter.filter(rule) drop_count += 1 # Apply modify filters. for fltr in modify_filters: for key, rule in rulemap.items(): if fltr.match(rule): new_rule = fltr.filter(rule) if new_rule and new_rule.format() != rule.format(): rulemap[rule.id] = new_rule modify_count += 1 logger.info("Disabled %d rules." % (len(disabled_rules))) logger.info("Enabled %d rules." % (enable_count)) logger.info("Modified %d rules." % (modify_count)) logger.info("Dropped %d rules." % (drop_count)) # Fixup flowbits. resolve_flowbits(rulemap, disabled_rules) # Don't allow an empty output directory. if not args.output: logger.error("No output directory provided.") return 1 # Check that output directory exists. if not os.path.exists(args.output): try: os.makedirs(args.output, mode=0o770) except Exception as err: logger.error( "Output directory does not exist and could not be created: %s", args.output) return 1 # Check that output directory is writable. if not os.access(args.output, os.W_OK): logger.error("Output directory is not writable: %s", args.output) return 1 # Backup the output directory. logger.info("Backing up current rules.") backup_directory = util.mktempdir() shutil.copytree(args.output, os.path.join(backup_directory, "backup")) if not args.no_merge: # The default, write out a merged file. output_filename = os.path.join(args.output, DEFAULT_OUTPUT_RULE_FILENAME) file_tracker.add(output_filename) write_merged(os.path.join(output_filename), rulemap) else: for filename in files: file_tracker.add( os.path.join(args.output, os.path.basename(filename))) write_to_directory(args.output, files, rulemap) if args.yaml_fragment: file_tracker.add(args.yaml_fragment) write_yaml_fragment(args.yaml_fragment, files) if args.sid_msg_map: write_sid_msg_map(args.sid_msg_map, rulemap, version=1) if args.sid_msg_map_2: write_sid_msg_map(args.sid_msg_map_2, rulemap, version=2) if args.threshold_in and args.threshold_out: file_tracker.add(args.threshold_out) threshold_processor = ThresholdProcessor() threshold_processor.process(open(args.threshold_in), open(args.threshold_out, "w"), rulemap) if not args.force and not file_tracker.any_modified(): logger.info("No changes detected, exiting.") return 0 if not test_suricata(config, suricata_path): logger.error("Suricata test failed, aborting.") logger.error("Restoring previous rules.") copytree(os.path.join(backup_directory, "backup"), args.output) return 1 if not args.no_reload and config.get("reload-command"): logger.info("Running %s." % (config.get("reload-command"))) rc = subprocess.Popen(config.get("reload-command"), shell=True).wait() if rc != 0: logger.error("Reload command exited with error: %d", rc) logger.info("Done.") return 0