Beispiel #1
0
 def fetch(self, url):
     tmp_filename = self.get_tmp_filename(url)
     if not config.args().force and os.path.exists(tmp_filename):
         if not config.args().now and \
            time.time() - os.stat(tmp_filename).st_mtime < (60 * 15):
             logger.info(
                 "Last download less than 15 minutes ago. Not downloading %s.",
                 url)
             return self.extract_files(tmp_filename)
         if self.check_checksum(tmp_filename, url):
             logger.info("Remote checksum has not changed. Not fetching.")
             return self.extract_files(tmp_filename)
     if not os.path.exists(config.get_cache_dir()):
         os.makedirs(config.get_cache_dir(), mode=0o770)
     logger.info("Fetching %s." % (url))
     try:
         tmp_fileobj = tempfile.NamedTemporaryFile()
         suricata.update.net.get(url,
                                 tmp_fileobj,
                                 progress_hook=self.progress_hook)
         shutil.copyfile(tmp_fileobj.name, tmp_filename)
         tmp_fileobj.close()
     except URLError as err:
         if os.path.exists(tmp_filename):
             logger.warning(
                 "Failed to fetch %s, "
                 "will use latest cached version: %s", url, err)
             return self.extract_files(tmp_filename)
         raise err
     except Exception as err:
         raise err
     if not config.args().quiet:
         self.progress_hook_finish()
     logger.info("Done.")
     return self.extract_files(tmp_filename)
Beispiel #2
0
 def fetch(self, url):
     net_arg = url
     checksum = url[2]
     url = url[0]
     tmp_filename = self.get_tmp_filename(url)
     if config.args().offline:
         if config.args().force:
             logger.warning("Running offline, skipping download of %s", url)
         logger.info("Using latest cached version of rule file: %s", url)
         if not os.path.exists(tmp_filename):
             logger.error("Can't proceed offline, "
                          "source %s has not yet been downloaded.", url)
             sys.exit(1)
         return self.extract_files(tmp_filename)
     if not config.args().force and os.path.exists(tmp_filename):
         if not config.args().now and \
            time.time() - os.stat(tmp_filename).st_mtime < (60 * 15):
             logger.info(
                 "Last download less than 15 minutes ago. Not downloading %s.",
                 url)
             return self.extract_files(tmp_filename)
         if checksum:
             if self.check_checksum(tmp_filename, url):
                 logger.info("Remote checksum has not changed. "
                             "Not fetching.")
                 return self.extract_files(tmp_filename)
     if not os.path.exists(config.get_cache_dir()):
         os.makedirs(config.get_cache_dir(), mode=0o770)
     logger.info("Fetching %s." % (url))
     try:
         tmp_fileobj = tempfile.NamedTemporaryFile()
         net.get(
             net_arg,
             tmp_fileobj,
             progress_hook=self.progress_hook)
         shutil.copyfile(tmp_fileobj.name, tmp_filename)
         tmp_fileobj.close()
     except URLError as err:
         if os.path.exists(tmp_filename):
             logger.warning(
                 "Failed to fetch %s, "
                 "will use latest cached version: %s", url, err)
             return self.extract_files(tmp_filename)
         raise err
     except IOError as err:
         self.progress_hook_finish()
         logger.error("Failed to copy file: %s", err)
         sys.exit(1)
     except Exception as err:
         raise err
     self.progress_hook_finish()
     logger.info("Done.")
     return self.extract_files(tmp_filename)
Beispiel #3
0
def update_sources():
    global local_index_filename
    local_index_filename = sources.get_index_filename()
    initial_content = get_initial_content()
    with io.BytesIO() as fileobj:
        url = sources.get_source_index_url()
        logger.info("Downloading %s", url)
        try:
            net.get(url, fileobj)
        except Exception as err:
            raise exceptions.ApplicationError(
                "Failed to download index: %s: %s" % (url, err))
        if not os.path.exists(config.get_cache_dir()):
            try:
                os.makedirs(config.get_cache_dir())
            except Exception as err:
                logger.error("Failed to create directory %s: %s",
                             config.get_cache_dir(), err)
                return 1
        write_and_compare(initial_content=initial_content, fileobj=fileobj)
Beispiel #4
0
def update_sources():
    local_index_filename = sources.get_index_filename()
    with io.BytesIO() as fileobj:
        url = sources.get_source_index_url()
        logger.info("Downloading %s", url)
        try:
            net.get(url, fileobj)
        except Exception as err:
            raise exceptions.ApplicationError(
                "Failed to download index: %s: %s" % (url, err))
        if not os.path.exists(config.get_cache_dir()):
            try:
                os.makedirs(config.get_cache_dir())
            except Exception as err:
                logger.error("Failed to create directory %s: %s",
                             config.get_cache_dir(), err)
                return 1
        with open(local_index_filename, "wb") as outobj:
            outobj.write(fileobj.getvalue())
        logger.info("Saved %s", local_index_filename)
Beispiel #5
0
def _main():
    global args
    args = parsers.parse_arg()

    # Go verbose or quiet sooner than later.
    if args.verbose:
        logger.setLevel(logging.DEBUG)
    if args.quiet:
        logger.setLevel(logging.WARNING)

    logger.debug("This is suricata-update version %s (rev: %s); Python: %s" %
                 (version, revision, sys.version.replace("\n", "- ")))

    config.init(args)

    # Error out if any reserved/unimplemented arguments were set.
    unimplemented_args = [
        "disable",
        "enable",
        "modify",
        "drop",
    ]
    for arg in unimplemented_args:
        if hasattr(args, arg) and getattr(args, arg):
            logger.error("--%s not implemented", arg)
            return 1

    suricata_path = config.get("suricata")

    # Now parse the Suricata version. If provided on the command line,
    # use that, otherwise attempt to get it from Suricata.
    if args.suricata_version:
        # The Suricata version was passed on the command line, parse it.
        suricata_version = engine.parse_version(args.suricata_version)
        if not suricata_version:
            logger.error("Failed to parse provided Suricata version: %s" %
                         (args.suricata_version))
            return 1
        logger.info("Forcing Suricata version to %s." %
                    (suricata_version.full))
    elif suricata_path:
        suricata_version = engine.get_version(suricata_path)
        if suricata_version:
            logger.info("Found Suricata version %s at %s." %
                        (str(suricata_version.full), suricata_path))
        else:
            logger.error("Failed to get Suricata version.")
            return 1
    else:
        logger.info("Using default Suricata version of %s",
                    DEFAULT_SURICATA_VERSION)
        suricata_version = engine.parse_version(DEFAULT_SURICATA_VERSION)

    # Provide the Suricata version to the net module to add to the
    # User-Agent.
    net.set_user_agent_suricata_version(suricata_version.full)

    if args.subcommand:
        if args.subcommand == "check-versions" and hasattr(args, "func"):
            return args.func(suricata_version)
        elif hasattr(args, "func"):
            return args.func()
        elif args.subcommand != "update":
            logger.error("Unknown command: %s", args.subcommand)
            return 1

    if args.dump_sample_configs:
        return dump_sample_configs()

    # If --no-ignore was provided, clear any ignores provided in the
    # config.
    if args.no_ignore:
        config.set(config.IGNORE_KEY, [])

    file_tracker = FileTracker()

    disable_matchers = []
    enable_matchers = []
    modify_filters = []
    drop_filters = []

    # Load user provided disable filters.
    disable_conf_filename = config.get("disable-conf")
    if disable_conf_filename and os.path.exists(disable_conf_filename):
        logger.info("Loading %s.", disable_conf_filename)
        disable_matchers += load_matchers(disable_conf_filename)

    # Load user provided enable filters.
    enable_conf_filename = config.get("enable-conf")
    if enable_conf_filename and os.path.exists(enable_conf_filename):
        logger.info("Loading %s.", enable_conf_filename)
        enable_matchers += load_matchers(enable_conf_filename)

    # Load user provided modify filters.
    modify_conf_filename = config.get("modify-conf")
    if modify_conf_filename and os.path.exists(modify_conf_filename):
        logger.info("Loading %s.", modify_conf_filename)
        modify_filters += load_filters(modify_conf_filename)

    # Load user provided drop filters.
    drop_conf_filename = config.get("drop-conf")
    if drop_conf_filename and os.path.exists(drop_conf_filename):
        logger.info("Loading %s.", drop_conf_filename)
        drop_filters += load_drop_filters(drop_conf_filename)

    # Load the Suricata configuration if we can.
    suriconf = None
    if config.get("suricata-conf") and \
       os.path.exists(config.get("suricata-conf")) and \
       suricata_path and os.path.exists(suricata_path):
        logger.info("Loading %s", config.get("suricata-conf"))
        try:
            suriconf = engine.Configuration.load(config.get("suricata-conf"),
                                                 suricata_path=suricata_path)
        except subprocess.CalledProcessError:
            return 1

    # Disable rule that are for app-layers that are not enabled.
    if suriconf:
        for key in suriconf.keys():
            m = re.match("app-layer\.protocols\.([^\.]+)\.enabled", key)
            if m:
                proto = m.group(1)
                if not suriconf.is_true(key, ["detection-only"]):
                    logger.info("Disabling rules for protocol %s", proto)
                    disable_matchers.append(
                        matchers_mod.ProtoRuleMatcher(proto))
                elif proto == "smb" and suriconf.build_info:
                    # Special case for SMB rules. For versions less
                    # than 5, disable smb rules if Rust is not
                    # available.
                    if suriconf.build_info["version"].major < 5:
                        if not "RUST" in suriconf.build_info["features"]:
                            logger.info(
                                "Disabling rules for protocol {}".format(
                                    proto))
                            disable_matchers.append(
                                matchers_mod.ProtoRuleMatcher(proto))

    # Check that the cache directory exists and is writable.
    if not os.path.exists(config.get_cache_dir()):
        try:
            os.makedirs(config.get_cache_dir(), mode=0o770)
        except Exception as err:
            logger.warning(
                "Cache directory does not exist and could not be created. "
                "/var/tmp will be used instead.")
            config.set_cache_dir("/var/tmp")

    files = load_sources(suricata_version)

    load_dist_rules(files)

    # Remove ignored files.
    for filename in list(files.keys()):
        if ignore_file(config.get("ignore"), filename):
            logger.info("Ignoring file %s" % (filename))
            del (files[filename])

    rules = []
    for filename in sorted(files):
        if not filename.endswith(".rules"):
            continue
        logger.debug("Parsing %s." % (filename))
        rules += rule_mod.parse_fileobj(io.BytesIO(files[filename]), filename)

    rulemap = build_rule_map(rules)
    logger.info("Loaded %d rules." % (len(rules)))

    # Counts of user enabled and modified rules.
    enable_count = 0
    modify_count = 0
    drop_count = 0

    # List of rules disabled by user. Used for counting, and to log
    # rules that are re-enabled to meet flowbit requirements.
    disabled_rules = []

    for key, rule in rulemap.items():

        for matcher in disable_matchers:
            if rule.enabled and matcher.match(rule):
                logger.debug("Disabling: %s" % (rule.brief()))
                rule.enabled = False
                disabled_rules.append(rule)

        for matcher in enable_matchers:
            if not rule.enabled and matcher.match(rule):
                logger.debug("Enabling: %s" % (rule.brief()))
                rule.enabled = True
                enable_count += 1

        for fltr in drop_filters:
            if fltr.match(rule):
                rulemap[rule.id] = fltr.run(rule)
                drop_count += 1

    # Apply modify filters.
    for fltr in modify_filters:
        for key, rule in rulemap.items():
            if fltr.match(rule):
                new_rule = fltr.run(rule)
                if new_rule and new_rule.format() != rule.format():
                    rulemap[rule.id] = new_rule
                    modify_count += 1

    # Check if we should disable ja3 rules.
    try:
        disable_ja3(suriconf, rulemap, disabled_rules)
    except Exception as err:
        logger.error("Failed to dynamically disable ja3 rules: %s" % (err))

    # Check rule vars, disabling rules that use unknown vars.
    check_vars(suriconf, rulemap)

    logger.info("Disabled %d rules." % (len(disabled_rules)))
    logger.info("Enabled %d rules." % (enable_count))
    logger.info("Modified %d rules." % (modify_count))
    logger.info("Dropped %d rules." % (drop_count))

    # Fixup flowbits.
    resolve_flowbits(rulemap, disabled_rules)

    # Check that output directory exists, creating it if needed.
    check_output_directory(config.get_output_dir())

    # Check that output directory is writable.
    if not os.access(config.get_output_dir(), os.W_OK):
        logger.error("Output directory is not writable: %s",
                     config.get_output_dir())
        return 1

    # Backup the output directory.
    logger.info("Backing up current rules.")
    backup_directory = util.mktempdir()
    shutil.copytree(config.get_output_dir(),
                    os.path.join(backup_directory, "backup"),
                    ignore=copytree_ignore_backup)

    if not args.no_merge:
        # The default, write out a merged file.
        output_filename = os.path.join(config.get_output_dir(),
                                       DEFAULT_OUTPUT_RULE_FILENAME)
        file_tracker.add(output_filename)
        write_merged(os.path.join(output_filename), rulemap)
    else:
        for filename in files:
            file_tracker.add(
                os.path.join(config.get_output_dir(),
                             os.path.basename(filename)))
        write_to_directory(config.get_output_dir(), files, rulemap)

    if args.yaml_fragment:
        file_tracker.add(args.yaml_fragment)
        write_yaml_fragment(args.yaml_fragment, files)

    if args.sid_msg_map:
        write_sid_msg_map(args.sid_msg_map, rulemap, version=1)
    if args.sid_msg_map_2:
        write_sid_msg_map(args.sid_msg_map_2, rulemap, version=2)

    if args.threshold_in and args.threshold_out:
        file_tracker.add(args.threshold_out)
        threshold_processor = ThresholdProcessor()
        threshold_processor.process(open(args.threshold_in),
                                    open(args.threshold_out, "w"), rulemap)

    if not args.force and not file_tracker.any_modified():
        logger.info("No changes detected, exiting.")
        notes.dump_notes()
        return 0

    # Set these containers to None to fee the memory before testing Suricata which
    # may consume a lot of memory by itself. Ideally we should refactor this large
    # function into multiple methods so these go out of scope and get removed
    # automatically.
    rulemap = None
    rules = None
    files = None

    if not test_suricata(suricata_path):
        logger.error("Suricata test failed, aborting.")
        logger.error("Restoring previous rules.")
        copytree(os.path.join(backup_directory, "backup"),
                 config.get_output_dir())
        return 1

    if not config.args().no_reload and config.get("reload-command"):
        logger.info("Running %s." % (config.get("reload-command")))
        rc = subprocess.Popen(config.get("reload-command"), shell=True).wait()
        if rc != 0:
            logger.error("Reload command exited with error: %d", rc)

    logger.info("Done.")

    notes.dump_notes()

    return 0
Beispiel #6
0
 def get_tmp_filename(self, url):
     url_hash = hashlib.md5(url.encode("utf-8")).hexdigest()
     return os.path.join(config.get_cache_dir(),
                         "%s-%s" % (url_hash, self.url_basename(url)))
Beispiel #7
0
def _main():
    global args

    global_parser = argparse.ArgumentParser(add_help=False)
    global_parser.add_argument("-v",
                               "--verbose",
                               action="store_true",
                               default=None,
                               help="Be more verbose")
    global_parser.add_argument(
        "-q",
        "--quiet",
        action="store_true",
        default=False,
        help="Be quiet, warning and error messages only")
    global_parser.add_argument(
        "-D",
        "--data-dir",
        metavar="<directory>",
        dest="data_dir",
        help="Data directory (default: /var/lib/suricata)")
    global_parser.add_argument(
        "-c",
        "--config",
        metavar="<filename>",
        help="configuration file (default: /etc/suricata/update.yaml)")
    global_parser.add_argument(
        "--suricata-conf",
        metavar="<filename>",
        help="configuration file (default: /etc/suricata/suricata.yaml)")
    global_parser.add_argument("--suricata",
                               metavar="<path>",
                               help="Path to Suricata program")
    global_parser.add_argument("--suricata-version",
                               metavar="<version>",
                               help="Override Suricata version")
    global_parser.add_argument("--user-agent",
                               metavar="<user-agent>",
                               help="Set custom user-agent string")
    global_parser.add_argument(
        "--no-check-certificate",
        action="store_true",
        default=None,
        help="Disable server SSL/TLS certificate verification")

    global_args, rem = global_parser.parse_known_args()

    if not rem or rem[0].startswith("-"):
        rem.insert(0, "update")

    parser = argparse.ArgumentParser()
    subparsers = parser.add_subparsers(dest="subcommand", metavar="<command>")

    # The "update" (default) sub-command parser.
    update_parser = subparsers.add_parser("update",
                                          add_help=False,
                                          parents=[global_parser])

    update_parser.add_argument("-o",
                               "--output",
                               metavar="<directory>",
                               dest="output",
                               help="Directory to write rules to")
    update_parser.add_argument(
        "-f",
        "--force",
        action="store_true",
        default=False,
        help="Force operations that might otherwise be skipped")
    update_parser.add_argument("--yaml-fragment",
                               metavar="<filename>",
                               help="Output YAML fragment for rule inclusion")
    update_parser.add_argument(
        "--url",
        metavar="<url>",
        action="append",
        default=[],
        help=
        "URL to use instead of auto-generating one (can be specified multiple times)"
    )
    update_parser.add_argument(
        "--local",
        metavar="<path>",
        action="append",
        default=[],
        help="Local rule files or directories (can be specified multiple times)"
    )
    update_parser.add_argument("--sid-msg-map",
                               metavar="<filename>",
                               help="Generate a sid-msg.map file")
    update_parser.add_argument("--sid-msg-map-2",
                               metavar="<filename>",
                               help="Generate a v2 sid-msg.map file")

    update_parser.add_argument("--disable-conf",
                               metavar="<filename>",
                               help="Filename of rule disable filters")
    update_parser.add_argument("--enable-conf",
                               metavar="<filename>",
                               help="Filename of rule enable filters")
    update_parser.add_argument("--modify-conf",
                               metavar="<filename>",
                               help="Filename of rule modification filters")
    update_parser.add_argument("--drop-conf",
                               metavar="<filename>",
                               help="Filename of drop rules filters")

    update_parser.add_argument(
        "--ignore",
        metavar="<pattern>",
        action="append",
        default=[],
        help=
        "Filenames to ignore (can be specified multiple times; default: *deleted.rules)"
    )
    update_parser.add_argument("--no-ignore",
                               action="store_true",
                               default=False,
                               help="Disables the ignore option.")

    update_parser.add_argument(
        "--threshold-in",
        metavar="<filename>",
        help="Filename of rule thresholding configuration")
    update_parser.add_argument(
        "--threshold-out",
        metavar="<filename>",
        help="Output of processed threshold configuration")

    update_parser.add_argument(
        "--dump-sample-configs",
        action="store_true",
        default=False,
        help="Dump sample config files to current directory")
    update_parser.add_argument("--etopen",
                               action="store_true",
                               help="Use ET-Open rules (default)")
    update_parser.add_argument("--reload-command",
                               metavar="<command>",
                               help="Command to run after update if modified")
    update_parser.add_argument("--no-reload",
                               action="store_true",
                               default=False,
                               help="Disable reload")
    update_parser.add_argument("-T",
                               "--test-command",
                               metavar="<command>",
                               help="Command to test Suricata configuration")
    update_parser.add_argument("--no-test",
                               action="store_true",
                               default=False,
                               help="Disable testing rules with Suricata")
    update_parser.add_argument("-V",
                               "--version",
                               action="store_true",
                               default=False,
                               help="Display version")

    update_parser.add_argument(
        "--no-merge",
        action="store_true",
        default=False,
        help="Do not merge the rules into a single file")

    update_parser.add_argument("-h", "--help", action="store_true")

    # Hidden argument, --now to bypass the timebased bypass of
    # updating a ruleset.
    update_parser.add_argument("--now",
                               default=False,
                               action="store_true",
                               help=argparse.SUPPRESS)

    # The Python 2.7 argparse module does prefix matching which can be
    # undesirable. Reserve some names here that would match existing
    # options to prevent prefix matching.
    update_parser.add_argument("--disable",
                               default=False,
                               help=argparse.SUPPRESS)
    update_parser.add_argument("--enable",
                               default=False,
                               help=argparse.SUPPRESS)
    update_parser.add_argument("--modify",
                               default=False,
                               help=argparse.SUPPRESS)
    update_parser.add_argument("--drop", default=False, help=argparse.SUPPRESS)

    commands.listsources.register(
        subparsers.add_parser("list-sources", parents=[global_parser]))
    commands.listenabledsources.register(
        subparsers.add_parser("list-enabled-sources", parents=[global_parser]))
    commands.addsource.register(
        subparsers.add_parser("add-source", parents=[global_parser]))
    commands.updatesources.register(
        subparsers.add_parser("update-sources", parents=[global_parser]))
    commands.enablesource.register(
        subparsers.add_parser("enable-source", parents=[global_parser]))
    commands.disablesource.register(
        subparsers.add_parser("disable-source", parents=[global_parser]))
    commands.removesource.register(
        subparsers.add_parser("remove-source", parents=[global_parser]))

    args = parser.parse_args(rem)

    # Merge global args into args.
    for arg in vars(global_args):
        if not hasattr(args, arg):
            setattr(args, arg, getattr(global_args, arg))
        elif hasattr(args, arg) and getattr(args, arg) is None:
            setattr(args, arg, getattr(global_args, arg))

    # Go verbose or quiet sooner than later.
    if args.verbose:
        logger.setLevel(logging.DEBUG)
    if args.quiet:
        logger.setLevel(logging.WARNING)

    config.init(args)

    # Error out if any reserved/unimplemented arguments were set.
    unimplemented_args = [
        "disable",
        "enable",
        "modify",
        "drop",
    ]
    for arg in unimplemented_args:
        if hasattr(args, arg) and getattr(args, arg):
            logger.error("--%s not implemented", arg)
            return 1

    logger.debug("This is suricata-update version %s (rev: %s); Python: %s" %
                 (version, revision, sys.version.replace("\n", "- ")))

    suricata_path = config.get("suricata")

    # Now parse the Suricata version. If provided on the command line,
    # use that, otherwise attempt to get it from Suricata.
    if args.suricata_version:
        # The Suricata version was passed on the command line, parse it.
        suricata_version = suricata.update.engine.parse_version(
            args.suricata_version)
        if not suricata_version:
            logger.error("Failed to parse provided Suricata version: %s" %
                         (args.suricata_version))
            return 1
        logger.info("Forcing Suricata version to %s." %
                    (suricata_version.full))
    elif suricata_path:
        suricata_version = suricata.update.engine.get_version(suricata_path)
        if suricata_version:
            logger.info("Found Suricata version %s at %s." %
                        (str(suricata_version.full), suricata_path))
        else:
            logger.error("Failed to get Suricata version.")
            return 1
    else:
        logger.info("Using default Suricata version of %s",
                    DEFAULT_SURICATA_VERSION)
        suricata_version = suricata.update.engine.parse_version(
            DEFAULT_SURICATA_VERSION)

    # Provide the Suricata version to the net module to add to the
    # User-Agent.
    suricata.update.net.set_user_agent_suricata_version(suricata_version.full)

    # Load custom user-agent-string.
    user_agent = config.get("user-agent")
    if user_agent:
        logger.info("Using user-agent: %s.", user_agent)
        suricata.update.net.set_custom_user_agent(user_agent)

    if args.subcommand:
        if hasattr(args, "func"):
            return args.func()
        elif args.subcommand != "update":
            logger.error("Unknown command: %s", args.subcommand)
            return 1

    if args.dump_sample_configs:
        return dump_sample_configs()

    if args.version:
        print("suricata-update version %s (rev: %s)" % (version, revision))
        return 0

    if args.help:
        print(update_parser.format_help())
        print("""other commands:
    update-sources             Update the source index
    list-sources               List available sources
    enable-source              Enable a source from the index
    disable-source             Disable an enabled source
    remove-source              Remove an enabled or disabled source
    list-enabled-sources       List all enabled sources
    add-source                 Add a new source by URL
""")
        return 0

    # If --no-ignore was provided, clear any ignores provided in the
    # config.
    if args.no_ignore:
        config.set(config.IGNORE_KEY, [])

    file_tracker = FileTracker()

    disable_matchers = []
    enable_matchers = []
    modify_filters = []
    drop_filters = []

    # Load user provided disable filters.
    disable_conf_filename = config.get("disable-conf")
    if disable_conf_filename and os.path.exists(disable_conf_filename):
        logger.info("Loading %s.", disable_conf_filename)
        disable_matchers += load_matchers(disable_conf_filename)

    # Load user provided enable filters.
    enable_conf_filename = config.get("enable-conf")
    if enable_conf_filename and os.path.exists(enable_conf_filename):
        logger.info("Loading %s.", enable_conf_filename)
        enable_matchers += load_matchers(enable_conf_filename)

    # Load user provided modify filters.
    modify_conf_filename = config.get("modify-conf")
    if modify_conf_filename and os.path.exists(modify_conf_filename):
        logger.info("Loading %s.", modify_conf_filename)
        modify_filters += load_filters(modify_conf_filename)

    # Load user provided drop filters.
    drop_conf_filename = config.get("drop-conf")
    if drop_conf_filename and os.path.exists(drop_conf_filename):
        logger.info("Loading %s.", drop_conf_filename)
        drop_filters += load_drop_filters(drop_conf_filename)

    if os.path.exists(config.get("suricata-conf")) and \
       suricata_path and os.path.exists(suricata_path):
        logger.info("Loading %s", config.get("suricata-conf"))
        suriconf = suricata.update.engine.Configuration.load(
            config.get("suricata-conf"), suricata_path=suricata_path)
        for key in suriconf.keys():
            if key.startswith("app-layer.protocols") and \
               key.endswith(".enabled"):
                if not suriconf.is_true(key, ["detection-only"]):
                    proto = key.split(".")[2]
                    logger.info("Disabling rules with proto %s", proto)
                    disable_matchers.append(ProtoRuleMatcher(proto))

    # Check that the cache directory exists and is writable.
    if not os.path.exists(config.get_cache_dir()):
        try:
            os.makedirs(config.get_cache_dir(), mode=0o770)
        except Exception as err:
            logger.warning(
                "Cache directory does not exist and could not be created. "
                "/var/tmp will be used instead.")
            config.set_cache_dir("/var/tmp")

    files = load_sources(suricata_version)

    load_dist_rules(files)

    # Remove ignored files.
    for filename in list(files.keys()):
        if ignore_file(config.get("ignore"), filename):
            logger.info("Ignoring file %s" % (filename))
            del (files[filename])

    rules = []
    for filename in files:
        if not filename.endswith(".rules"):
            continue
        logger.debug("Parsing %s." % (filename))
        rules += suricata.update.rule.parse_fileobj(
            io.BytesIO(files[filename]), filename)

    rulemap = build_rule_map(rules)
    logger.info("Loaded %d rules." % (len(rules)))

    # Counts of user enabled and modified rules.
    enable_count = 0
    modify_count = 0
    drop_count = 0

    # List of rules disabled by user. Used for counting, and to log
    # rules that are re-enabled to meet flowbit requirements.
    disabled_rules = []

    for key, rule in rulemap.items():

        for matcher in disable_matchers:
            if rule.enabled and matcher.match(rule):
                logger.debug("Disabling: %s" % (rule.brief()))
                rule.enabled = False
                disabled_rules.append(rule)

        for matcher in enable_matchers:
            if not rule.enabled and matcher.match(rule):
                logger.debug("Enabling: %s" % (rule.brief()))
                rule.enabled = True
                enable_count += 1

        for filter in drop_filters:
            if filter.match(rule):
                rulemap[rule.id] = filter.filter(rule)
                drop_count += 1

    # Apply modify filters.
    for fltr in modify_filters:
        for key, rule in rulemap.items():
            if fltr.match(rule):
                new_rule = fltr.filter(rule)
                if new_rule and new_rule.format() != rule.format():
                    rulemap[rule.id] = new_rule
                    modify_count += 1

    logger.info("Disabled %d rules." % (len(disabled_rules)))
    logger.info("Enabled %d rules." % (enable_count))
    logger.info("Modified %d rules." % (modify_count))
    logger.info("Dropped %d rules." % (drop_count))

    # Fixup flowbits.
    resolve_flowbits(rulemap, disabled_rules)

    # Check that output directory exists.
    if not os.path.exists(config.get_output_dir()):
        try:
            os.makedirs(config.get_output_dir(), mode=0o770)
        except Exception as err:
            logger.error(
                "Output directory does not exist and could not be created: %s",
                config.get_output_dir())
            return 1

    # Check that output directory is writable.
    if not os.access(config.get_output_dir(), os.W_OK):
        logger.error("Output directory is not writable: %s",
                     config.get_output_dir())
        return 1

    # Backup the output directory.
    logger.info("Backing up current rules.")
    backup_directory = util.mktempdir()
    shutil.copytree(config.get_output_dir(),
                    os.path.join(backup_directory, "backup"),
                    ignore=copytree_ignore_backup)

    if not args.no_merge:
        # The default, write out a merged file.
        output_filename = os.path.join(config.get_output_dir(),
                                       DEFAULT_OUTPUT_RULE_FILENAME)
        file_tracker.add(output_filename)
        write_merged(os.path.join(output_filename), rulemap)
    else:
        for filename in files:
            file_tracker.add(
                os.path.join(config.get_output_dir(),
                             os.path.basename(filename)))
        write_to_directory(config.get_output_dir(), files, rulemap)

    if args.yaml_fragment:
        file_tracker.add(args.yaml_fragment)
        write_yaml_fragment(args.yaml_fragment, files)

    if args.sid_msg_map:
        write_sid_msg_map(args.sid_msg_map, rulemap, version=1)
    if args.sid_msg_map_2:
        write_sid_msg_map(args.sid_msg_map_2, rulemap, version=2)

    if args.threshold_in and args.threshold_out:
        file_tracker.add(args.threshold_out)
        threshold_processor = ThresholdProcessor()
        threshold_processor.process(open(args.threshold_in),
                                    open(args.threshold_out, "w"), rulemap)

    if not args.force and not file_tracker.any_modified():
        logger.info("No changes detected, exiting.")
        return 0

    if not test_suricata(suricata_path):
        logger.error("Suricata test failed, aborting.")
        logger.error("Restoring previous rules.")
        copytree(os.path.join(backup_directory, "backup"),
                 config.get_output_dir())
        return 1

    if not config.args().no_reload and config.get("reload-command"):
        logger.info("Running %s." % (config.get("reload-command")))
        rc = subprocess.Popen(config.get("reload-command"), shell=True).wait()
        if rc != 0:
            logger.error("Reload command exited with error: %d", rc)

    logger.info("Done.")

    return 0
Beispiel #8
0
def load_sources(suricata_version):
    files = {}

    urls = []

    # Add any URLs added with the --url command line parameter.
    if config.args().url:
        for url in config.args().url:
            urls.append(url)

    # Get the new style sources.
    enabled_sources = sources.get_enabled_sources()

    # Convert the Suricata version to a version string.
    version_string = "%d.%d.%d" % (
        suricata_version.major, suricata_version.minor, suricata_version.patch)

    # Construct the URL replacement parameters that are internal to
    # suricata-update.
    internal_params = {"__version__": version_string}

    # If we have new sources, we also need to load the index.
    if enabled_sources:
        index_filename = os.path.join(config.get_cache_dir(),
                                      sources.SOURCE_INDEX_FILENAME)
        if os.path.exists(index_filename):
            index = sources.Index(index_filename)
        else:
            index = None

        for (name, source) in enabled_sources.items():
            params = source["params"] if "params" in source else {}
            params.update(internal_params)
            if "url" in source:
                # No need to go off to the index.
                url = source["url"] % params
            else:
                if not index:
                    raise exceptions.ApplicationError(
                        "Source index is required for source %s; "
                        "run suricata-update update-sources" %
                        (source["source"]))
                url = index.resolve_url(name, params)
            logger.debug("Resolved source %s to URL %s.", name, url)
            urls.append(url)

    if config.get("sources"):
        for url in config.get("sources"):
            if type(url) not in [type("")]:
                raise exceptions.InvalidConfigurationError(
                    "Invalid datatype for source URL: %s" % (str(url)))
            url = url % internal_params
            logger.debug("Adding source %s.", url)
            urls.append(url)

    # If --etopen is on the command line, make sure its added. Or if
    # there are no URLs, default to ET/Open.
    if config.get("etopen") or not urls:
        if not urls:
            logger.info(
                "No sources configured, will use Emerging Threats Open")
        urls.append(sources.get_etopen_url(internal_params))

    # Converting the URLs to a set removed dupes.
    urls = set(urls)

    # Now download each URL.
    for url in urls:
        Fetch().run(url, files)

    # Now load local rules.
    for local in config.get("local"):
        load_local(local, files)

    return files
Beispiel #9
0
def get_index_filename():
    return os.path.join(config.get_cache_dir(), SOURCE_INDEX_FILENAME)