Exemple #1
0
 def fetch(self, url):
     tmp_filename = self.get_tmp_filename(url)
     if not config.args().force and os.path.exists(tmp_filename):
         if not config.args().now and \
            time.time() - os.stat(tmp_filename).st_mtime < (60 * 15):
             logger.info(
                 "Last download less than 15 minutes ago. Not downloading %s.",
                 url)
             return self.extract_files(tmp_filename)
         if self.check_checksum(tmp_filename, url):
             logger.info("Remote checksum has not changed. Not fetching.")
             return self.extract_files(tmp_filename)
     if not os.path.exists(config.get_cache_dir()):
         os.makedirs(config.get_cache_dir(), mode=0o770)
     logger.info("Fetching %s." % (url))
     try:
         tmp_fileobj = tempfile.NamedTemporaryFile()
         suricata.update.net.get(url,
                                 tmp_fileobj,
                                 progress_hook=self.progress_hook)
         shutil.copyfile(tmp_fileobj.name, tmp_filename)
         tmp_fileobj.close()
     except URLError as err:
         if os.path.exists(tmp_filename):
             logger.warning(
                 "Failed to fetch %s, "
                 "will use latest cached version: %s", url, err)
             return self.extract_files(tmp_filename)
         raise err
     except Exception as err:
         raise err
     if not config.args().quiet:
         self.progress_hook_finish()
     logger.info("Done.")
     return self.extract_files(tmp_filename)
Exemple #2
0
 def fetch(self, url):
     net_arg = url
     checksum = url[2]
     url = url[0]
     tmp_filename = self.get_tmp_filename(url)
     if config.args().offline:
         if config.args().force:
             logger.warning("Running offline, skipping download of %s", url)
         logger.info("Using latest cached version of rule file: %s", url)
         if not os.path.exists(tmp_filename):
             logger.error("Can't proceed offline, "
                          "source %s has not yet been downloaded.", url)
             sys.exit(1)
         return self.extract_files(tmp_filename)
     if not config.args().force and os.path.exists(tmp_filename):
         if not config.args().now and \
            time.time() - os.stat(tmp_filename).st_mtime < (60 * 15):
             logger.info(
                 "Last download less than 15 minutes ago. Not downloading %s.",
                 url)
             return self.extract_files(tmp_filename)
         if checksum:
             if self.check_checksum(tmp_filename, url):
                 logger.info("Remote checksum has not changed. "
                             "Not fetching.")
                 return self.extract_files(tmp_filename)
     if not os.path.exists(config.get_cache_dir()):
         os.makedirs(config.get_cache_dir(), mode=0o770)
     logger.info("Fetching %s." % (url))
     try:
         tmp_fileobj = tempfile.NamedTemporaryFile()
         net.get(
             net_arg,
             tmp_fileobj,
             progress_hook=self.progress_hook)
         shutil.copyfile(tmp_fileobj.name, tmp_filename)
         tmp_fileobj.close()
     except URLError as err:
         if os.path.exists(tmp_filename):
             logger.warning(
                 "Failed to fetch %s, "
                 "will use latest cached version: %s", url, err)
             return self.extract_files(tmp_filename)
         raise err
     except IOError as err:
         self.progress_hook_finish()
         logger.error("Failed to copy file: %s", err)
         sys.exit(1)
     except Exception as err:
         raise err
     self.progress_hook_finish()
     logger.info("Done.")
     return self.extract_files(tmp_filename)
def add_source():
    args = config.args()

    if args.name:
        name = args.name
    else:
        while True:
            name = raw_input("Name of source: ").strip()
            if name:
                break

    if sources.source_name_exists(name):
        logger.error("A source with name %s already exists.", name)
        return 1

    if args.url:
        url = args.url
    else:
        while True:
            url = raw_input("URL: ").strip()
            if url:
                break

    source_config = sources.SourceConfiguration(name, url=url)
    sources.save_source_config(source_config)
Exemple #4
0
def list_sources():
    free_only = config.args().free
    if not sources.source_index_exists(config):
        logger.info("No source index found, running update-sources")
        try:
            update_sources()
        except exceptions.ApplicationError as err:
            logger.warning("%s: will use bundled index.", err)
    index = sources.load_source_index(config)
    for name, source in index.get_sources().items():
        is_not_free = source.get("subscribe-url")
        if free_only and is_not_free:
            continue
        print("%s: %s" % (util.bright_cyan("Name"), util.bright_magenta(name)))
        print("  %s: %s" % (util.bright_cyan("Vendor"),
                            util.bright_magenta(source["vendor"])))
        print("  %s: %s" % (util.bright_cyan("Summary"),
                            util.bright_magenta(source["summary"])))
        print("  %s: %s" % (util.bright_cyan("License"),
                            util.bright_magenta(source["license"])))
        if "tags" in source:
            print("  %s: %s" %
                  (util.bright_cyan("Tags"),
                   util.bright_magenta(", ".join(source["tags"]))))
        if "replaces" in source:
            print("  %s: %s" %
                  (util.bright_cyan("Replaces"),
                   util.bright_magenta(", ".join(source["replaces"]))))
        if "parameters" in source:
            print("  %s: %s" %
                  (util.bright_cyan("Parameters"),
                   util.bright_magenta(", ".join(source["parameters"]))))
        if "subscribe-url" in source:
            print("  %s: %s" % (util.bright_cyan("Subscription"),
                                util.bright_magenta(source["subscribe-url"])))
Exemple #5
0
def add_source():
    args = config.args()

    if args.name:
        name = args.name
    else:
        while True:
            name = input("Name of source: ").strip()
            if name:
                break

    if sources.source_name_exists(name):
        logger.error("A source with name %s already exists.", name)
        return 1

    if args.url:
        url = args.url
    else:
        while True:
            url = input("URL: ").strip()
            if url:
                break

    checksum = args.no_checksum

    header = args.http_header if args.http_header else None

    source_config = sources.SourceConfiguration(
        name, header=header, url=url, checksum=checksum)
    sources.save_source_config(source_config)
def disable_source():
    name = config.args().name
    filename = sources.get_enabled_source_filename(name)
    if not os.path.exists(filename):
        logger.debug("Filename %s does not exist.", filename)
        logger.warning("Source %s is not enabled.", name)
        return 0
    logger.debug("Renaming %s to %s.disabled.", filename, filename)
    os.rename(filename, "%s.disabled" % (filename))
    logger.info("Source %s has been disabled", name)
Exemple #7
0
 def progress_hook(self, content_length, bytes_read):
     if config.args().quiet or not self.istty:
         return
     if not content_length or content_length == 0:
         percent = 0
     else:
         percent = int((bytes_read / float(content_length)) * 100)
     buf = " %3d%% - %-30s" % (percent, "%d/%d" %
                               (bytes_read, content_length))
     sys.stdout.write(buf)
     sys.stdout.flush()
     sys.stdout.write("\b" * 38)
Exemple #8
0
def enable_source():
    name = config.args().name

    # Check if source is already enabled.
    enabled_source_filename = sources.get_enabled_source_filename(name)
    if os.path.exists(enabled_source_filename):
        logger.error("The source %s is already enabled.", name)
        return 1

    # First check if this source was previous disabled and then just
    # re-enable it.
    disabled_source_filename = sources.get_disabled_source_filename(name)
    if os.path.exists(disabled_source_filename):
        logger.info("Re-enabling previous disabled source for %s.", name)
        os.rename(disabled_source_filename, enabled_source_filename)
        return 0

    if not os.path.exists(sources.get_index_filename()):
        logger.warning("Source index does not exist, will use bundled one.")
        logger.warning("Please run suricata-update update-sources.")

    source_index = sources.load_source_index(config)

    if not name in source_index.get_sources():
        logger.error("Unknown source: %s", name)
        return 1

    # Parse key=val options.
    opts = {}
    for param in config.args().params:
        key, val = param.split("=", 1)
        opts[key] = val

    source = source_index.get_sources()[name]

    if "subscribe-url" in source:
        print("The source %s requires a subscription. Subscribe here:" %
              (name))
        print("  %s" % source["subscribe-url"])

    params = {}
    if "parameters" in source:
        for param in source["parameters"]:
            if param in opts:
                params[param] = opts[param]
            else:
                prompt = source["parameters"][param]["prompt"]
                while True:
                    r = raw_input("%s (%s): " % (prompt, param))
                    r = r.strip()
                    if r:
                        break
                params[param] = r.strip()
    new_source = sources.SourceConfiguration(name, params=params)

    # If the source directory does not exist, create it. Also create
    # the default rule-source of et/open, unless the source being
    # enabled replaces it.
    source_directory = sources.get_source_directory()
    if not os.path.exists(source_directory):
        try:
            logger.info("Creating directory %s", source_directory)
            os.makedirs(source_directory)
        except Exception as err:
            logger.error("Failed to create directory %s: %s", source_directory,
                         err)
            return 1

        if "replaces" in source and default_source in source["replaces"]:
            logger.debug(
                "Not enabling default source as selected source replaces it")
        elif new_source.name == default_source:
            logger.debug(
                "Not enabling default source as selected source is the default"
            )
        else:
            logger.info("Enabling default source %s", default_source)
            if not source_index.get_source_by_name(default_source):
                logger.error("Default source %s not in index", default_source)
            else:
                default_source_config = sources.SourceConfiguration(
                    default_source)
                write_source_config(default_source_config, True)

    write_source_config(new_source, True)
    logger.info("Source %s enabled", new_source.name)

    if "replaces" in source:
        for replaces in source["replaces"]:
            filename = sources.get_enabled_source_filename(replaces)
            if os.path.exists(filename):
                logger.info("Removing source %s as its replaced by %s",
                            replaces, new_source.name)
                logger.debug("Deleting %s", filename)
                os.unlink(filename)
Exemple #9
0
def _main():
    global args
    args = parsers.parse_arg()

    # Go verbose or quiet sooner than later.
    if args.verbose:
        logger.setLevel(logging.DEBUG)
    if args.quiet:
        logger.setLevel(logging.WARNING)

    logger.debug("This is suricata-update version %s (rev: %s); Python: %s" %
                 (version, revision, sys.version.replace("\n", "- ")))

    config.init(args)

    # Error out if any reserved/unimplemented arguments were set.
    unimplemented_args = [
        "disable",
        "enable",
        "modify",
        "drop",
    ]
    for arg in unimplemented_args:
        if hasattr(args, arg) and getattr(args, arg):
            logger.error("--%s not implemented", arg)
            return 1

    suricata_path = config.get("suricata")

    # Now parse the Suricata version. If provided on the command line,
    # use that, otherwise attempt to get it from Suricata.
    if args.suricata_version:
        # The Suricata version was passed on the command line, parse it.
        suricata_version = engine.parse_version(args.suricata_version)
        if not suricata_version:
            logger.error("Failed to parse provided Suricata version: %s" %
                         (args.suricata_version))
            return 1
        logger.info("Forcing Suricata version to %s." %
                    (suricata_version.full))
    elif suricata_path:
        suricata_version = engine.get_version(suricata_path)
        if suricata_version:
            logger.info("Found Suricata version %s at %s." %
                        (str(suricata_version.full), suricata_path))
        else:
            logger.error("Failed to get Suricata version.")
            return 1
    else:
        logger.info("Using default Suricata version of %s",
                    DEFAULT_SURICATA_VERSION)
        suricata_version = engine.parse_version(DEFAULT_SURICATA_VERSION)

    # Provide the Suricata version to the net module to add to the
    # User-Agent.
    net.set_user_agent_suricata_version(suricata_version.full)

    if args.subcommand:
        if args.subcommand == "check-versions" and hasattr(args, "func"):
            return args.func(suricata_version)
        elif hasattr(args, "func"):
            return args.func()
        elif args.subcommand != "update":
            logger.error("Unknown command: %s", args.subcommand)
            return 1

    if args.dump_sample_configs:
        return dump_sample_configs()

    # If --no-ignore was provided, clear any ignores provided in the
    # config.
    if args.no_ignore:
        config.set(config.IGNORE_KEY, [])

    file_tracker = FileTracker()

    disable_matchers = []
    enable_matchers = []
    modify_filters = []
    drop_filters = []

    # Load user provided disable filters.
    disable_conf_filename = config.get("disable-conf")
    if disable_conf_filename and os.path.exists(disable_conf_filename):
        logger.info("Loading %s.", disable_conf_filename)
        disable_matchers += load_matchers(disable_conf_filename)

    # Load user provided enable filters.
    enable_conf_filename = config.get("enable-conf")
    if enable_conf_filename and os.path.exists(enable_conf_filename):
        logger.info("Loading %s.", enable_conf_filename)
        enable_matchers += load_matchers(enable_conf_filename)

    # Load user provided modify filters.
    modify_conf_filename = config.get("modify-conf")
    if modify_conf_filename and os.path.exists(modify_conf_filename):
        logger.info("Loading %s.", modify_conf_filename)
        modify_filters += load_filters(modify_conf_filename)

    # Load user provided drop filters.
    drop_conf_filename = config.get("drop-conf")
    if drop_conf_filename and os.path.exists(drop_conf_filename):
        logger.info("Loading %s.", drop_conf_filename)
        drop_filters += load_drop_filters(drop_conf_filename)

    # Load the Suricata configuration if we can.
    suriconf = None
    if config.get("suricata-conf") and \
       os.path.exists(config.get("suricata-conf")) and \
       suricata_path and os.path.exists(suricata_path):
        logger.info("Loading %s", config.get("suricata-conf"))
        try:
            suriconf = engine.Configuration.load(config.get("suricata-conf"),
                                                 suricata_path=suricata_path)
        except subprocess.CalledProcessError:
            return 1

    # Disable rule that are for app-layers that are not enabled.
    if suriconf:
        for key in suriconf.keys():
            m = re.match("app-layer\.protocols\.([^\.]+)\.enabled", key)
            if m:
                proto = m.group(1)
                if not suriconf.is_true(key, ["detection-only"]):
                    logger.info("Disabling rules for protocol %s", proto)
                    disable_matchers.append(
                        matchers_mod.ProtoRuleMatcher(proto))
                elif proto == "smb" and suriconf.build_info:
                    # Special case for SMB rules. For versions less
                    # than 5, disable smb rules if Rust is not
                    # available.
                    if suriconf.build_info["version"].major < 5:
                        if not "RUST" in suriconf.build_info["features"]:
                            logger.info(
                                "Disabling rules for protocol {}".format(
                                    proto))
                            disable_matchers.append(
                                matchers_mod.ProtoRuleMatcher(proto))

    # Check that the cache directory exists and is writable.
    if not os.path.exists(config.get_cache_dir()):
        try:
            os.makedirs(config.get_cache_dir(), mode=0o770)
        except Exception as err:
            logger.warning(
                "Cache directory does not exist and could not be created. "
                "/var/tmp will be used instead.")
            config.set_cache_dir("/var/tmp")

    files = load_sources(suricata_version)

    load_dist_rules(files)

    # Remove ignored files.
    for filename in list(files.keys()):
        if ignore_file(config.get("ignore"), filename):
            logger.info("Ignoring file %s" % (filename))
            del (files[filename])

    rules = []
    for filename in sorted(files):
        if not filename.endswith(".rules"):
            continue
        logger.debug("Parsing %s." % (filename))
        rules += rule_mod.parse_fileobj(io.BytesIO(files[filename]), filename)

    rulemap = build_rule_map(rules)
    logger.info("Loaded %d rules." % (len(rules)))

    # Counts of user enabled and modified rules.
    enable_count = 0
    modify_count = 0
    drop_count = 0

    # List of rules disabled by user. Used for counting, and to log
    # rules that are re-enabled to meet flowbit requirements.
    disabled_rules = []

    for key, rule in rulemap.items():

        for matcher in disable_matchers:
            if rule.enabled and matcher.match(rule):
                logger.debug("Disabling: %s" % (rule.brief()))
                rule.enabled = False
                disabled_rules.append(rule)

        for matcher in enable_matchers:
            if not rule.enabled and matcher.match(rule):
                logger.debug("Enabling: %s" % (rule.brief()))
                rule.enabled = True
                enable_count += 1

        for fltr in drop_filters:
            if fltr.match(rule):
                rulemap[rule.id] = fltr.run(rule)
                drop_count += 1

    # Apply modify filters.
    for fltr in modify_filters:
        for key, rule in rulemap.items():
            if fltr.match(rule):
                new_rule = fltr.run(rule)
                if new_rule and new_rule.format() != rule.format():
                    rulemap[rule.id] = new_rule
                    modify_count += 1

    # Check if we should disable ja3 rules.
    try:
        disable_ja3(suriconf, rulemap, disabled_rules)
    except Exception as err:
        logger.error("Failed to dynamically disable ja3 rules: %s" % (err))

    # Check rule vars, disabling rules that use unknown vars.
    check_vars(suriconf, rulemap)

    logger.info("Disabled %d rules." % (len(disabled_rules)))
    logger.info("Enabled %d rules." % (enable_count))
    logger.info("Modified %d rules." % (modify_count))
    logger.info("Dropped %d rules." % (drop_count))

    # Fixup flowbits.
    resolve_flowbits(rulemap, disabled_rules)

    # Check that output directory exists, creating it if needed.
    check_output_directory(config.get_output_dir())

    # Check that output directory is writable.
    if not os.access(config.get_output_dir(), os.W_OK):
        logger.error("Output directory is not writable: %s",
                     config.get_output_dir())
        return 1

    # Backup the output directory.
    logger.info("Backing up current rules.")
    backup_directory = util.mktempdir()
    shutil.copytree(config.get_output_dir(),
                    os.path.join(backup_directory, "backup"),
                    ignore=copytree_ignore_backup)

    if not args.no_merge:
        # The default, write out a merged file.
        output_filename = os.path.join(config.get_output_dir(),
                                       DEFAULT_OUTPUT_RULE_FILENAME)
        file_tracker.add(output_filename)
        write_merged(os.path.join(output_filename), rulemap)
    else:
        for filename in files:
            file_tracker.add(
                os.path.join(config.get_output_dir(),
                             os.path.basename(filename)))
        write_to_directory(config.get_output_dir(), files, rulemap)

    if args.yaml_fragment:
        file_tracker.add(args.yaml_fragment)
        write_yaml_fragment(args.yaml_fragment, files)

    if args.sid_msg_map:
        write_sid_msg_map(args.sid_msg_map, rulemap, version=1)
    if args.sid_msg_map_2:
        write_sid_msg_map(args.sid_msg_map_2, rulemap, version=2)

    if args.threshold_in and args.threshold_out:
        file_tracker.add(args.threshold_out)
        threshold_processor = ThresholdProcessor()
        threshold_processor.process(open(args.threshold_in),
                                    open(args.threshold_out, "w"), rulemap)

    if not args.force and not file_tracker.any_modified():
        logger.info("No changes detected, exiting.")
        notes.dump_notes()
        return 0

    # Set these containers to None to fee the memory before testing Suricata which
    # may consume a lot of memory by itself. Ideally we should refactor this large
    # function into multiple methods so these go out of scope and get removed
    # automatically.
    rulemap = None
    rules = None
    files = None

    if not test_suricata(suricata_path):
        logger.error("Suricata test failed, aborting.")
        logger.error("Restoring previous rules.")
        copytree(os.path.join(backup_directory, "backup"),
                 config.get_output_dir())
        return 1

    if not config.args().no_reload and config.get("reload-command"):
        logger.info("Running %s." % (config.get("reload-command")))
        rc = subprocess.Popen(config.get("reload-command"), shell=True).wait()
        if rc != 0:
            logger.error("Reload command exited with error: %d", rc)

    logger.info("Done.")

    notes.dump_notes()

    return 0
Exemple #10
0
def load_sources(suricata_version):
    files = {}

    urls = []

    http_header = None
    checksum = True

    # Add any URLs added with the --url command line parameter.
    if config.args().url:
        for url in config.args().url:
            urls.append((url, http_header, checksum))

    # Get the new style sources.
    enabled_sources = sources.get_enabled_sources()

    # Convert the Suricata version to a version string.
    version_string = "%d.%d.%d" % (
        suricata_version.major, suricata_version.minor, suricata_version.patch)

    # Construct the URL replacement parameters that are internal to
    # suricata-update.
    internal_params = {"__version__": version_string}

    # If we have new sources, we also need to load the index.
    if enabled_sources:
        index_filename = sources.get_index_filename()
        if not os.path.exists(index_filename):
            logger.warning("No index exists, will use bundled index.")
            logger.warning("Please run suricata-update update-sources.")
        if os.path.exists(index_filename) and time.time() - \
                os.stat(index_filename).st_mtime > INDEX_EXPIRATION_TIME:
            logger.warning(
                "Source index is older than 2 weeks. "
                "Please update with suricata-update update-sources.")
        index = sources.Index(index_filename)

        for (name, source) in enabled_sources.items():
            params = source["params"] if "params" in source else {}
            params.update(internal_params)
            if "url" in source:
                # No need to go off to the index.
                http_header = source.get("http_header")
                checksum = source.get("checksum")
                url = (source["url"] % params, http_header, checksum)
                logger.debug("Resolved source %s to URL %s.", name, url[0])
            else:
                if not index:
                    raise exceptions.ApplicationError(
                        "Source index is required for source %s; "
                        "run suricata-update update-sources" %
                        (source["source"]))
                source_config = index.get_source_by_name(name)
                try:
                    checksum = source_config["checksum"]
                except:
                    checksum = True
                url = (index.resolve_url(name, params), http_header, checksum)
                logger.debug("Resolved source %s to URL %s.", name, url)
            urls.append(url)

    if config.get("sources"):
        for url in config.get("sources"):
            if not isinstance(url, str):
                raise exceptions.InvalidConfigurationError(
                    "Invalid datatype for source URL: %s" % (str(url)))
            url = (url % internal_params, http_header, checksum)
            logger.debug("Adding source %s.", url)
            urls.append(url)

    # If --etopen is on the command line, make sure its added. Or if
    # there are no URLs, default to ET/Open.
    if config.get("etopen") or not urls:
        if not config.args().offline and not urls:
            logger.info(
                "No sources configured, will use Emerging Threats Open")
        urls.append(
            (sources.get_etopen_url(internal_params), http_header, checksum))

    # Converting the URLs to a set removed dupes.
    urls = set(urls)

    # Now download each URL.
    for url in urls:
        Fetch().run(url, files)

    # Now load local rules.
    if config.get("local") is not None:
        for local in config.get("local"):
            load_local(local, files)

    return files
Exemple #11
0
 def progress_hook_finish(self):
     if config.args().quiet or not self.istty:
         return
     sys.stdout.write("\n")
     sys.stdout.flush()
Exemple #12
0
def _main():
    global args

    global_parser = argparse.ArgumentParser(add_help=False)
    global_parser.add_argument("-v",
                               "--verbose",
                               action="store_true",
                               default=None,
                               help="Be more verbose")
    global_parser.add_argument(
        "-q",
        "--quiet",
        action="store_true",
        default=False,
        help="Be quiet, warning and error messages only")
    global_parser.add_argument(
        "-D",
        "--data-dir",
        metavar="<directory>",
        dest="data_dir",
        help="Data directory (default: /var/lib/suricata)")
    global_parser.add_argument(
        "-c",
        "--config",
        metavar="<filename>",
        help="configuration file (default: /etc/suricata/update.yaml)")
    global_parser.add_argument(
        "--suricata-conf",
        metavar="<filename>",
        help="configuration file (default: /etc/suricata/suricata.yaml)")
    global_parser.add_argument("--suricata",
                               metavar="<path>",
                               help="Path to Suricata program")
    global_parser.add_argument("--suricata-version",
                               metavar="<version>",
                               help="Override Suricata version")
    global_parser.add_argument("--user-agent",
                               metavar="<user-agent>",
                               help="Set custom user-agent string")
    global_parser.add_argument(
        "--no-check-certificate",
        action="store_true",
        default=None,
        help="Disable server SSL/TLS certificate verification")

    global_args, rem = global_parser.parse_known_args()

    if not rem or rem[0].startswith("-"):
        rem.insert(0, "update")

    parser = argparse.ArgumentParser()
    subparsers = parser.add_subparsers(dest="subcommand", metavar="<command>")

    # The "update" (default) sub-command parser.
    update_parser = subparsers.add_parser("update",
                                          add_help=False,
                                          parents=[global_parser])

    update_parser.add_argument("-o",
                               "--output",
                               metavar="<directory>",
                               dest="output",
                               help="Directory to write rules to")
    update_parser.add_argument(
        "-f",
        "--force",
        action="store_true",
        default=False,
        help="Force operations that might otherwise be skipped")
    update_parser.add_argument("--yaml-fragment",
                               metavar="<filename>",
                               help="Output YAML fragment for rule inclusion")
    update_parser.add_argument(
        "--url",
        metavar="<url>",
        action="append",
        default=[],
        help=
        "URL to use instead of auto-generating one (can be specified multiple times)"
    )
    update_parser.add_argument(
        "--local",
        metavar="<path>",
        action="append",
        default=[],
        help="Local rule files or directories (can be specified multiple times)"
    )
    update_parser.add_argument("--sid-msg-map",
                               metavar="<filename>",
                               help="Generate a sid-msg.map file")
    update_parser.add_argument("--sid-msg-map-2",
                               metavar="<filename>",
                               help="Generate a v2 sid-msg.map file")

    update_parser.add_argument("--disable-conf",
                               metavar="<filename>",
                               help="Filename of rule disable filters")
    update_parser.add_argument("--enable-conf",
                               metavar="<filename>",
                               help="Filename of rule enable filters")
    update_parser.add_argument("--modify-conf",
                               metavar="<filename>",
                               help="Filename of rule modification filters")
    update_parser.add_argument("--drop-conf",
                               metavar="<filename>",
                               help="Filename of drop rules filters")

    update_parser.add_argument(
        "--ignore",
        metavar="<pattern>",
        action="append",
        default=[],
        help=
        "Filenames to ignore (can be specified multiple times; default: *deleted.rules)"
    )
    update_parser.add_argument("--no-ignore",
                               action="store_true",
                               default=False,
                               help="Disables the ignore option.")

    update_parser.add_argument(
        "--threshold-in",
        metavar="<filename>",
        help="Filename of rule thresholding configuration")
    update_parser.add_argument(
        "--threshold-out",
        metavar="<filename>",
        help="Output of processed threshold configuration")

    update_parser.add_argument(
        "--dump-sample-configs",
        action="store_true",
        default=False,
        help="Dump sample config files to current directory")
    update_parser.add_argument("--etopen",
                               action="store_true",
                               help="Use ET-Open rules (default)")
    update_parser.add_argument("--reload-command",
                               metavar="<command>",
                               help="Command to run after update if modified")
    update_parser.add_argument("--no-reload",
                               action="store_true",
                               default=False,
                               help="Disable reload")
    update_parser.add_argument("-T",
                               "--test-command",
                               metavar="<command>",
                               help="Command to test Suricata configuration")
    update_parser.add_argument("--no-test",
                               action="store_true",
                               default=False,
                               help="Disable testing rules with Suricata")
    update_parser.add_argument("-V",
                               "--version",
                               action="store_true",
                               default=False,
                               help="Display version")

    update_parser.add_argument(
        "--no-merge",
        action="store_true",
        default=False,
        help="Do not merge the rules into a single file")

    update_parser.add_argument("-h", "--help", action="store_true")

    # Hidden argument, --now to bypass the timebased bypass of
    # updating a ruleset.
    update_parser.add_argument("--now",
                               default=False,
                               action="store_true",
                               help=argparse.SUPPRESS)

    # The Python 2.7 argparse module does prefix matching which can be
    # undesirable. Reserve some names here that would match existing
    # options to prevent prefix matching.
    update_parser.add_argument("--disable",
                               default=False,
                               help=argparse.SUPPRESS)
    update_parser.add_argument("--enable",
                               default=False,
                               help=argparse.SUPPRESS)
    update_parser.add_argument("--modify",
                               default=False,
                               help=argparse.SUPPRESS)
    update_parser.add_argument("--drop", default=False, help=argparse.SUPPRESS)

    commands.listsources.register(
        subparsers.add_parser("list-sources", parents=[global_parser]))
    commands.listenabledsources.register(
        subparsers.add_parser("list-enabled-sources", parents=[global_parser]))
    commands.addsource.register(
        subparsers.add_parser("add-source", parents=[global_parser]))
    commands.updatesources.register(
        subparsers.add_parser("update-sources", parents=[global_parser]))
    commands.enablesource.register(
        subparsers.add_parser("enable-source", parents=[global_parser]))
    commands.disablesource.register(
        subparsers.add_parser("disable-source", parents=[global_parser]))
    commands.removesource.register(
        subparsers.add_parser("remove-source", parents=[global_parser]))

    args = parser.parse_args(rem)

    # Merge global args into args.
    for arg in vars(global_args):
        if not hasattr(args, arg):
            setattr(args, arg, getattr(global_args, arg))
        elif hasattr(args, arg) and getattr(args, arg) is None:
            setattr(args, arg, getattr(global_args, arg))

    # Go verbose or quiet sooner than later.
    if args.verbose:
        logger.setLevel(logging.DEBUG)
    if args.quiet:
        logger.setLevel(logging.WARNING)

    config.init(args)

    # Error out if any reserved/unimplemented arguments were set.
    unimplemented_args = [
        "disable",
        "enable",
        "modify",
        "drop",
    ]
    for arg in unimplemented_args:
        if hasattr(args, arg) and getattr(args, arg):
            logger.error("--%s not implemented", arg)
            return 1

    logger.debug("This is suricata-update version %s (rev: %s); Python: %s" %
                 (version, revision, sys.version.replace("\n", "- ")))

    suricata_path = config.get("suricata")

    # Now parse the Suricata version. If provided on the command line,
    # use that, otherwise attempt to get it from Suricata.
    if args.suricata_version:
        # The Suricata version was passed on the command line, parse it.
        suricata_version = suricata.update.engine.parse_version(
            args.suricata_version)
        if not suricata_version:
            logger.error("Failed to parse provided Suricata version: %s" %
                         (args.suricata_version))
            return 1
        logger.info("Forcing Suricata version to %s." %
                    (suricata_version.full))
    elif suricata_path:
        suricata_version = suricata.update.engine.get_version(suricata_path)
        if suricata_version:
            logger.info("Found Suricata version %s at %s." %
                        (str(suricata_version.full), suricata_path))
        else:
            logger.error("Failed to get Suricata version.")
            return 1
    else:
        logger.info("Using default Suricata version of %s",
                    DEFAULT_SURICATA_VERSION)
        suricata_version = suricata.update.engine.parse_version(
            DEFAULT_SURICATA_VERSION)

    # Provide the Suricata version to the net module to add to the
    # User-Agent.
    suricata.update.net.set_user_agent_suricata_version(suricata_version.full)

    # Load custom user-agent-string.
    user_agent = config.get("user-agent")
    if user_agent:
        logger.info("Using user-agent: %s.", user_agent)
        suricata.update.net.set_custom_user_agent(user_agent)

    if args.subcommand:
        if hasattr(args, "func"):
            return args.func()
        elif args.subcommand != "update":
            logger.error("Unknown command: %s", args.subcommand)
            return 1

    if args.dump_sample_configs:
        return dump_sample_configs()

    if args.version:
        print("suricata-update version %s (rev: %s)" % (version, revision))
        return 0

    if args.help:
        print(update_parser.format_help())
        print("""other commands:
    update-sources             Update the source index
    list-sources               List available sources
    enable-source              Enable a source from the index
    disable-source             Disable an enabled source
    remove-source              Remove an enabled or disabled source
    list-enabled-sources       List all enabled sources
    add-source                 Add a new source by URL
""")
        return 0

    # If --no-ignore was provided, clear any ignores provided in the
    # config.
    if args.no_ignore:
        config.set(config.IGNORE_KEY, [])

    file_tracker = FileTracker()

    disable_matchers = []
    enable_matchers = []
    modify_filters = []
    drop_filters = []

    # Load user provided disable filters.
    disable_conf_filename = config.get("disable-conf")
    if disable_conf_filename and os.path.exists(disable_conf_filename):
        logger.info("Loading %s.", disable_conf_filename)
        disable_matchers += load_matchers(disable_conf_filename)

    # Load user provided enable filters.
    enable_conf_filename = config.get("enable-conf")
    if enable_conf_filename and os.path.exists(enable_conf_filename):
        logger.info("Loading %s.", enable_conf_filename)
        enable_matchers += load_matchers(enable_conf_filename)

    # Load user provided modify filters.
    modify_conf_filename = config.get("modify-conf")
    if modify_conf_filename and os.path.exists(modify_conf_filename):
        logger.info("Loading %s.", modify_conf_filename)
        modify_filters += load_filters(modify_conf_filename)

    # Load user provided drop filters.
    drop_conf_filename = config.get("drop-conf")
    if drop_conf_filename and os.path.exists(drop_conf_filename):
        logger.info("Loading %s.", drop_conf_filename)
        drop_filters += load_drop_filters(drop_conf_filename)

    if os.path.exists(config.get("suricata-conf")) and \
       suricata_path and os.path.exists(suricata_path):
        logger.info("Loading %s", config.get("suricata-conf"))
        suriconf = suricata.update.engine.Configuration.load(
            config.get("suricata-conf"), suricata_path=suricata_path)
        for key in suriconf.keys():
            if key.startswith("app-layer.protocols") and \
               key.endswith(".enabled"):
                if not suriconf.is_true(key, ["detection-only"]):
                    proto = key.split(".")[2]
                    logger.info("Disabling rules with proto %s", proto)
                    disable_matchers.append(ProtoRuleMatcher(proto))

    # Check that the cache directory exists and is writable.
    if not os.path.exists(config.get_cache_dir()):
        try:
            os.makedirs(config.get_cache_dir(), mode=0o770)
        except Exception as err:
            logger.warning(
                "Cache directory does not exist and could not be created. "
                "/var/tmp will be used instead.")
            config.set_cache_dir("/var/tmp")

    files = load_sources(suricata_version)

    load_dist_rules(files)

    # Remove ignored files.
    for filename in list(files.keys()):
        if ignore_file(config.get("ignore"), filename):
            logger.info("Ignoring file %s" % (filename))
            del (files[filename])

    rules = []
    for filename in files:
        if not filename.endswith(".rules"):
            continue
        logger.debug("Parsing %s." % (filename))
        rules += suricata.update.rule.parse_fileobj(
            io.BytesIO(files[filename]), filename)

    rulemap = build_rule_map(rules)
    logger.info("Loaded %d rules." % (len(rules)))

    # Counts of user enabled and modified rules.
    enable_count = 0
    modify_count = 0
    drop_count = 0

    # List of rules disabled by user. Used for counting, and to log
    # rules that are re-enabled to meet flowbit requirements.
    disabled_rules = []

    for key, rule in rulemap.items():

        for matcher in disable_matchers:
            if rule.enabled and matcher.match(rule):
                logger.debug("Disabling: %s" % (rule.brief()))
                rule.enabled = False
                disabled_rules.append(rule)

        for matcher in enable_matchers:
            if not rule.enabled and matcher.match(rule):
                logger.debug("Enabling: %s" % (rule.brief()))
                rule.enabled = True
                enable_count += 1

        for filter in drop_filters:
            if filter.match(rule):
                rulemap[rule.id] = filter.filter(rule)
                drop_count += 1

    # Apply modify filters.
    for fltr in modify_filters:
        for key, rule in rulemap.items():
            if fltr.match(rule):
                new_rule = fltr.filter(rule)
                if new_rule and new_rule.format() != rule.format():
                    rulemap[rule.id] = new_rule
                    modify_count += 1

    logger.info("Disabled %d rules." % (len(disabled_rules)))
    logger.info("Enabled %d rules." % (enable_count))
    logger.info("Modified %d rules." % (modify_count))
    logger.info("Dropped %d rules." % (drop_count))

    # Fixup flowbits.
    resolve_flowbits(rulemap, disabled_rules)

    # Check that output directory exists.
    if not os.path.exists(config.get_output_dir()):
        try:
            os.makedirs(config.get_output_dir(), mode=0o770)
        except Exception as err:
            logger.error(
                "Output directory does not exist and could not be created: %s",
                config.get_output_dir())
            return 1

    # Check that output directory is writable.
    if not os.access(config.get_output_dir(), os.W_OK):
        logger.error("Output directory is not writable: %s",
                     config.get_output_dir())
        return 1

    # Backup the output directory.
    logger.info("Backing up current rules.")
    backup_directory = util.mktempdir()
    shutil.copytree(config.get_output_dir(),
                    os.path.join(backup_directory, "backup"),
                    ignore=copytree_ignore_backup)

    if not args.no_merge:
        # The default, write out a merged file.
        output_filename = os.path.join(config.get_output_dir(),
                                       DEFAULT_OUTPUT_RULE_FILENAME)
        file_tracker.add(output_filename)
        write_merged(os.path.join(output_filename), rulemap)
    else:
        for filename in files:
            file_tracker.add(
                os.path.join(config.get_output_dir(),
                             os.path.basename(filename)))
        write_to_directory(config.get_output_dir(), files, rulemap)

    if args.yaml_fragment:
        file_tracker.add(args.yaml_fragment)
        write_yaml_fragment(args.yaml_fragment, files)

    if args.sid_msg_map:
        write_sid_msg_map(args.sid_msg_map, rulemap, version=1)
    if args.sid_msg_map_2:
        write_sid_msg_map(args.sid_msg_map_2, rulemap, version=2)

    if args.threshold_in and args.threshold_out:
        file_tracker.add(args.threshold_out)
        threshold_processor = ThresholdProcessor()
        threshold_processor.process(open(args.threshold_in),
                                    open(args.threshold_out, "w"), rulemap)

    if not args.force and not file_tracker.any_modified():
        logger.info("No changes detected, exiting.")
        return 0

    if not test_suricata(suricata_path):
        logger.error("Suricata test failed, aborting.")
        logger.error("Restoring previous rules.")
        copytree(os.path.join(backup_directory, "backup"),
                 config.get_output_dir())
        return 1

    if not config.args().no_reload and config.get("reload-command"):
        logger.info("Running %s." % (config.get("reload-command")))
        rc = subprocess.Popen(config.get("reload-command"), shell=True).wait()
        if rc != 0:
            logger.error("Reload command exited with error: %d", rc)

    logger.info("Done.")

    return 0
Exemple #13
0
def load_sources(suricata_version):
    files = {}

    urls = []

    # Add any URLs added with the --url command line parameter.
    if config.args().url:
        for url in config.args().url:
            urls.append(url)

    # Get the new style sources.
    enabled_sources = sources.get_enabled_sources()

    # Convert the Suricata version to a version string.
    version_string = "%d.%d.%d" % (
        suricata_version.major, suricata_version.minor, suricata_version.patch)

    # Construct the URL replacement parameters that are internal to
    # suricata-update.
    internal_params = {"__version__": version_string}

    # If we have new sources, we also need to load the index.
    if enabled_sources:
        index_filename = os.path.join(config.get_cache_dir(),
                                      sources.SOURCE_INDEX_FILENAME)
        if os.path.exists(index_filename):
            index = sources.Index(index_filename)
        else:
            index = None

        for (name, source) in enabled_sources.items():
            params = source["params"] if "params" in source else {}
            params.update(internal_params)
            if "url" in source:
                # No need to go off to the index.
                url = source["url"] % params
            else:
                if not index:
                    raise exceptions.ApplicationError(
                        "Source index is required for source %s; "
                        "run suricata-update update-sources" %
                        (source["source"]))
                url = index.resolve_url(name, params)
            logger.debug("Resolved source %s to URL %s.", name, url)
            urls.append(url)

    if config.get("sources"):
        for url in config.get("sources"):
            if type(url) not in [type("")]:
                raise exceptions.InvalidConfigurationError(
                    "Invalid datatype for source URL: %s" % (str(url)))
            url = url % internal_params
            logger.debug("Adding source %s.", url)
            urls.append(url)

    # If --etopen is on the command line, make sure its added. Or if
    # there are no URLs, default to ET/Open.
    if config.get("etopen") or not urls:
        if not urls:
            logger.info(
                "No sources configured, will use Emerging Threats Open")
        urls.append(sources.get_etopen_url(internal_params))

    # Converting the URLs to a set removed dupes.
    urls = set(urls)

    # Now download each URL.
    for url in urls:
        Fetch().run(url, files)

    # Now load local rules.
    for local in config.get("local"):
        load_local(local, files)

    return files
Exemple #14
0
def list_sources():
    enabled = config.args().enabled or \
        config.args().subcommand == "list-enabled-sources"

    if enabled:
        found = False

        # First list sources from the main config.
        config_sources = config.get("sources")
        if config_sources:
            found = True
            print("From %s:" % (config.filename))
            for source in config_sources:
                print("  - %s" % (source))

        # And local files.
        local = config.get("local")
        if local:
            found = True
            print("Local files/directories:")
            for filename in local:
                print("  - %s" % (filename))

        enabled_sources = sources.get_enabled_sources()
        if enabled_sources:
            found = True
            print("Enabled sources:")
            for source in enabled_sources.values():
                print("  - %s" % (source["source"]))

        # If no enabled sources were found, log it.
        if not found:
            logger.warning("No enabled sources.")
        return 0

    free_only = config.args().free
    if not sources.source_index_exists(config):
        logger.info("No source index found, running update-sources")
        try:
            update_sources()
        except exceptions.ApplicationError as err:
            logger.warning("%s: will use bundled index.", err)
    index = sources.load_source_index(config)
    for name, source in index.get_sources().items():
        is_not_free = source.get("subscribe-url")
        if free_only and is_not_free:
            continue
        if not config.args().all:
            if source.get("deprecated") is not None or \
               source.get("obsolete") is not None:
                continue
        print("%s: %s" % (util.bright_cyan("Name"), util.bright_magenta(name)))
        print("  %s: %s" % (util.bright_cyan("Vendor"),
                            util.bright_magenta(source["vendor"])))
        print("  %s: %s" % (util.bright_cyan("Summary"),
                            util.bright_magenta(source["summary"])))
        print("  %s: %s" % (util.bright_cyan("License"),
                            util.bright_magenta(source["license"])))
        if "tags" in source:
            print("  %s: %s" %
                  (util.bright_cyan("Tags"),
                   util.bright_magenta(", ".join(source["tags"]))))
        if "replaces" in source:
            print("  %s: %s" %
                  (util.bright_cyan("Replaces"),
                   util.bright_magenta(", ".join(source["replaces"]))))
        if "parameters" in source:
            print("  %s: %s" %
                  (util.bright_cyan("Parameters"),
                   util.bright_magenta(", ".join(source["parameters"]))))
        if "subscribe-url" in source:
            print("  %s: %s" % (util.bright_cyan("Subscription"),
                                util.bright_magenta(source["subscribe-url"])))
        if "deprecated" in source:
            print("  %s: %s" % (util.orange("Deprecated"),
                                util.bright_magenta(source["deprecated"])))
        if "obsolete" in source:
            print("  %s: %s" % (util.orange("Obsolete"),
                                util.bright_magenta(source["obsolete"])))