예제 #1
0
def list_enabled_sources():

    found = False

    # First list sources from the main config.
    config_sources = config.get("sources")
    if config_sources:
        found = True
        print("From %s:" % (config.filename))
        for source in config_sources:
            print("  - %s" % (source))

    # And local files.
    local = config.get("local")
    if local:
        found = True
        print("Local files/directories:")
        for filename in local:
            print("  - %s" % (filename))

    enabled_sources = sources.get_enabled_sources()
    if enabled_sources:
        found = True
        print("Enabled sources:")
        for source in enabled_sources.values():
            print("  - %s" % (source["source"]))

    # If no enabled sources were found, log it.
    if not found:
        logger.warning("No enabled sources.")
예제 #2
0
def build_user_agent():
    params = []
    has_custom_user_agent = config.has("user-agent")
    if has_custom_user_agent:
        user_agent = config.get("user-agent")
        if user_agent is None or len(user_agent.strip()) == 0:
            logger.debug("Suppressing HTTP User-Agent header")
            return None
        return user_agent

    params = []
    try:
        params.append("OS: {}".format(platform.system()))
    except Exception as err:
        logger.error("Failed to set user-agent OS: {}".format(str(err)))
    try:
        params.append("CPU: {}".format(osinfo.arch()))
    except Exception as err:
        logger.error("Failed to set user-agent architecture: {}".format(
            str(err)))
    try:
        params.append("Python: {}".format(platform.python_version()))
    except Exception as err:
        logger.error("Failed to set user-agent python version: {}".format(
            str(err)))
    try:
        params.append("Dist: {}".format(osinfo.dist()))
    except Exception as err:
        logger.error("Failed to set user-agent distribution: {}".format(
            str(err)))

    params.append("Suricata: %s" % (user_agent_suricata_verison))

    return "Suricata-Update/%s (%s)" % (version, "; ".join(params))
예제 #3
0
def get(url, fileobj, progress_hook=None):
    """ Perform a GET request against a URL writing the contents into
    the provideded file like object.

    :param url: The URL to fetch
    :param fileobj: The fileobj to write the content to
    :param progress_hook: The function to call with progress updates

    :returns: Returns a tuple containing the number of bytes read and
      the result of the info() function from urllib2.urlopen().

    :raises: Exceptions from urllib2.urlopen() and writing to the
      provided fileobj may occur.
    """

    user_agent = build_user_agent()

    try:
        # Wrap in a try as Python versions prior to 2.7.9 don't have
        # create_default_context, but some distros have backported it.
        ssl_context = ssl.create_default_context()
        if config.get("no-check-certificate"):
            logger.debug("Disabling SSL/TLS certificate verification.")
            ssl_context.check_hostname = False
            ssl_context.verify_mode = ssl.CERT_NONE
        opener = build_opener(HTTPSHandler(context=ssl_context))
    except:
        opener = build_opener()

    if user_agent:
        logger.debug("Setting HTTP User-Agent to %s", user_agent)
        opener.addheaders = [
            ("User-Agent", user_agent),
        ]
    else:
        opener.addheaders = [(header, value)
                             for header, value in opener.addheaders
                             if header.lower() != "user-agent"]
    remote = opener.open(url)
    info = remote.info()
    try:
        content_length = int(info["content-length"])
    except:
        content_length = 0
    bytes_read = 0
    while True:
        buf = remote.read(GET_BLOCK_SIZE)
        if not buf:
            # EOF
            break
        bytes_read += len(buf)
        fileobj.write(buf)
        if progress_hook:
            progress_hook(content_length, bytes_read)
    remote.close()
    fileobj.flush()
    return bytes_read, info
예제 #4
0
def load_dist_rules(files):
    """Load the rule files provided by the Suricata distribution."""

    # In the future hopefully we can just pull in all files from
    # /usr/share/suricata/rules, but for now pull in the set of files
    # known to have been provided by the Suricata source.
    filenames = [
        "app-layer-events.rules",
        "decoder-events.rules",
        "dhcp-events.rules",
        "dnp3-events.rules",
        "dns-events.rules",
        "files.rules",
        "http-events.rules",
        "ipsec-events.rules",
        "kerberos-events.rules",
        "modbus-events.rules",
        "nfs-events.rules",
        "ntp-events.rules",
        "smb-events.rules",
        "smtp-events.rules",
        "stream-events.rules",
        "tls-events.rules",
    ]

    dist_rule_path = config.get(config.DIST_RULE_DIRECTORY_KEY)
    if not dist_rule_path:
        logger.warning("No distribution rule directory found.")
        return

    if not os.path.exists(dist_rule_path):
        logger.warning("Distribution rule directory not found: %s",
                       dist_rule_path)
        return

    if os.path.exists(dist_rule_path):
        if not os.access(dist_rule_path, os.R_OK):
            logger.warning("Distribution rule path not readable: %s",
                           dist_rule_path)
            return
        for filename in filenames:
            path = os.path.join(dist_rule_path, filename)
            if not os.path.exists(path):
                continue
            if not os.access(path, os.R_OK):
                logger.warning("Distribution rule file not readable: %s",
                               path)
                continue
            logger.info("Loading distribution rule file %s", path)
            try:
                with open(path, "rb") as fileobj:
                    files.append(SourceFile(path, fileobj.read()))
            except Exception as err:
                logger.error("Failed to open %s: %s" % (path, err))
                sys.exit(1)
예제 #5
0
def test_suricata(suricata_path):
    if not suricata_path:
        logger.info("No suricata application binary found, skipping test.")
        return True

    if config.get("no-test"):
        logger.info("Skipping test, disabled by configuration.")
        return True

    if config.get("test-command"):
        test_command = config.get("test-command")
        logger.info("Testing Suricata configuration with: %s" % (test_command))
        env = {
            "SURICATA_PATH": suricata_path,
            "OUTPUT_DIR": config.get("output"),
        }
        if not config.get("no-merge"):
            env["OUTPUT_FILENAME"] = os.path.join(
                config.get("output"), DEFAULT_OUTPUT_RULE_FILENAME)
        rc = subprocess.Popen(test_command, shell=True, env=env).wait()
        if rc != 0:
            return False
    else:
        logger.info("Testing with suricata -T.")
        suricata_conf = config.get("suricata-conf")
        if not config.get("no-merge"):
            if not suricata.update.engine.test_configuration(
                    suricata_path, suricata_conf,
                    os.path.join(config.get_output_dir(),
                                 DEFAULT_OUTPUT_RULE_FILENAME)):
                return False
        else:
            if not suricata.update.engine.test_configuration(
                    suricata_path, suricata_conf):
                return False

    return True
예제 #6
0
def build_user_agent():
    params = []
    has_custom_user_agent = config.has("user-agent")
    if has_custom_user_agent:
        user_agent = config.get("user-agent")
        if user_agent is None or len(user_agent.strip()) == 0:
            logger.debug("Suppressing HTTP User-Agent header")
            return None
        return user_agent
    uname_system = platform.uname()[0]

    params.append("OS: %s" % (uname_system))
    params.append("CPU: %s" % (platform.machine()))
    params.append("Python: %s" % (platform.python_version()))

    if uname_system == "Linux":
        distribution = platform.linux_distribution()
        params.append("Dist: %s/%s" %
                      (str(distribution[0]), str(distribution[1])))

    params.append("Suricata: %s" % (user_agent_suricata_verison))

    return "Suricata-Update/%s (%s)" % (version, "; ".join(params))
예제 #7
0
def _main():
    global args
    args = parsers.parse_arg()

    # Go verbose or quiet sooner than later.
    if args.verbose:
        logger.setLevel(logging.DEBUG)
    if args.quiet:
        logger.setLevel(logging.WARNING)

    logger.debug("This is suricata-update version %s (rev: %s); Python: %s" %
                 (version, revision, sys.version.replace("\n", "- ")))

    config.init(args)

    # Error out if any reserved/unimplemented arguments were set.
    unimplemented_args = [
        "disable",
        "enable",
        "modify",
        "drop",
    ]
    for arg in unimplemented_args:
        if hasattr(args, arg) and getattr(args, arg):
            logger.error("--%s not implemented", arg)
            return 1

    suricata_path = config.get("suricata")

    # Now parse the Suricata version. If provided on the command line,
    # use that, otherwise attempt to get it from Suricata.
    if args.suricata_version:
        # The Suricata version was passed on the command line, parse it.
        suricata_version = engine.parse_version(args.suricata_version)
        if not suricata_version:
            logger.error("Failed to parse provided Suricata version: %s" %
                         (args.suricata_version))
            return 1
        logger.info("Forcing Suricata version to %s." %
                    (suricata_version.full))
    elif suricata_path:
        suricata_version = engine.get_version(suricata_path)
        if suricata_version:
            logger.info("Found Suricata version %s at %s." %
                        (str(suricata_version.full), suricata_path))
        else:
            logger.error("Failed to get Suricata version.")
            return 1
    else:
        logger.info("Using default Suricata version of %s",
                    DEFAULT_SURICATA_VERSION)
        suricata_version = engine.parse_version(DEFAULT_SURICATA_VERSION)

    # Provide the Suricata version to the net module to add to the
    # User-Agent.
    net.set_user_agent_suricata_version(suricata_version.full)

    if args.subcommand:
        if args.subcommand == "check-versions" and hasattr(args, "func"):
            return args.func(suricata_version)
        elif hasattr(args, "func"):
            return args.func()
        elif args.subcommand != "update":
            logger.error("Unknown command: %s", args.subcommand)
            return 1

    if args.dump_sample_configs:
        return dump_sample_configs()

    # If --no-ignore was provided, clear any ignores provided in the
    # config.
    if args.no_ignore:
        config.set(config.IGNORE_KEY, [])

    file_tracker = FileTracker()

    disable_matchers = []
    enable_matchers = []
    modify_filters = []
    drop_filters = []

    # Load user provided disable filters.
    disable_conf_filename = config.get("disable-conf")
    if disable_conf_filename and os.path.exists(disable_conf_filename):
        logger.info("Loading %s.", disable_conf_filename)
        disable_matchers += load_matchers(disable_conf_filename)

    # Load user provided enable filters.
    enable_conf_filename = config.get("enable-conf")
    if enable_conf_filename and os.path.exists(enable_conf_filename):
        logger.info("Loading %s.", enable_conf_filename)
        enable_matchers += load_matchers(enable_conf_filename)

    # Load user provided modify filters.
    modify_conf_filename = config.get("modify-conf")
    if modify_conf_filename and os.path.exists(modify_conf_filename):
        logger.info("Loading %s.", modify_conf_filename)
        modify_filters += load_filters(modify_conf_filename)

    # Load user provided drop filters.
    drop_conf_filename = config.get("drop-conf")
    if drop_conf_filename and os.path.exists(drop_conf_filename):
        logger.info("Loading %s.", drop_conf_filename)
        drop_filters += load_drop_filters(drop_conf_filename)

    # Load the Suricata configuration if we can.
    suriconf = None
    if config.get("suricata-conf") and \
       os.path.exists(config.get("suricata-conf")) and \
       suricata_path and os.path.exists(suricata_path):
        logger.info("Loading %s", config.get("suricata-conf"))
        try:
            suriconf = engine.Configuration.load(config.get("suricata-conf"),
                                                 suricata_path=suricata_path)
        except subprocess.CalledProcessError:
            return 1

    # Disable rule that are for app-layers that are not enabled.
    if suriconf:
        for key in suriconf.keys():
            m = re.match("app-layer\.protocols\.([^\.]+)\.enabled", key)
            if m:
                proto = m.group(1)
                if not suriconf.is_true(key, ["detection-only"]):
                    logger.info("Disabling rules for protocol %s", proto)
                    disable_matchers.append(
                        matchers_mod.ProtoRuleMatcher(proto))
                elif proto == "smb" and suriconf.build_info:
                    # Special case for SMB rules. For versions less
                    # than 5, disable smb rules if Rust is not
                    # available.
                    if suriconf.build_info["version"].major < 5:
                        if not "RUST" in suriconf.build_info["features"]:
                            logger.info(
                                "Disabling rules for protocol {}".format(
                                    proto))
                            disable_matchers.append(
                                matchers_mod.ProtoRuleMatcher(proto))

    # Check that the cache directory exists and is writable.
    if not os.path.exists(config.get_cache_dir()):
        try:
            os.makedirs(config.get_cache_dir(), mode=0o770)
        except Exception as err:
            logger.warning(
                "Cache directory does not exist and could not be created. "
                "/var/tmp will be used instead.")
            config.set_cache_dir("/var/tmp")

    files = load_sources(suricata_version)

    load_dist_rules(files)

    # Remove ignored files.
    for filename in list(files.keys()):
        if ignore_file(config.get("ignore"), filename):
            logger.info("Ignoring file %s" % (filename))
            del (files[filename])

    rules = []
    for filename in sorted(files):
        if not filename.endswith(".rules"):
            continue
        logger.debug("Parsing %s." % (filename))
        rules += rule_mod.parse_fileobj(io.BytesIO(files[filename]), filename)

    rulemap = build_rule_map(rules)
    logger.info("Loaded %d rules." % (len(rules)))

    # Counts of user enabled and modified rules.
    enable_count = 0
    modify_count = 0
    drop_count = 0

    # List of rules disabled by user. Used for counting, and to log
    # rules that are re-enabled to meet flowbit requirements.
    disabled_rules = []

    for key, rule in rulemap.items():

        for matcher in disable_matchers:
            if rule.enabled and matcher.match(rule):
                logger.debug("Disabling: %s" % (rule.brief()))
                rule.enabled = False
                disabled_rules.append(rule)

        for matcher in enable_matchers:
            if not rule.enabled and matcher.match(rule):
                logger.debug("Enabling: %s" % (rule.brief()))
                rule.enabled = True
                enable_count += 1

        for fltr in drop_filters:
            if fltr.match(rule):
                rulemap[rule.id] = fltr.run(rule)
                drop_count += 1

    # Apply modify filters.
    for fltr in modify_filters:
        for key, rule in rulemap.items():
            if fltr.match(rule):
                new_rule = fltr.run(rule)
                if new_rule and new_rule.format() != rule.format():
                    rulemap[rule.id] = new_rule
                    modify_count += 1

    # Check if we should disable ja3 rules.
    try:
        disable_ja3(suriconf, rulemap, disabled_rules)
    except Exception as err:
        logger.error("Failed to dynamically disable ja3 rules: %s" % (err))

    # Check rule vars, disabling rules that use unknown vars.
    check_vars(suriconf, rulemap)

    logger.info("Disabled %d rules." % (len(disabled_rules)))
    logger.info("Enabled %d rules." % (enable_count))
    logger.info("Modified %d rules." % (modify_count))
    logger.info("Dropped %d rules." % (drop_count))

    # Fixup flowbits.
    resolve_flowbits(rulemap, disabled_rules)

    # Check that output directory exists, creating it if needed.
    check_output_directory(config.get_output_dir())

    # Check that output directory is writable.
    if not os.access(config.get_output_dir(), os.W_OK):
        logger.error("Output directory is not writable: %s",
                     config.get_output_dir())
        return 1

    # Backup the output directory.
    logger.info("Backing up current rules.")
    backup_directory = util.mktempdir()
    shutil.copytree(config.get_output_dir(),
                    os.path.join(backup_directory, "backup"),
                    ignore=copytree_ignore_backup)

    if not args.no_merge:
        # The default, write out a merged file.
        output_filename = os.path.join(config.get_output_dir(),
                                       DEFAULT_OUTPUT_RULE_FILENAME)
        file_tracker.add(output_filename)
        write_merged(os.path.join(output_filename), rulemap)
    else:
        for filename in files:
            file_tracker.add(
                os.path.join(config.get_output_dir(),
                             os.path.basename(filename)))
        write_to_directory(config.get_output_dir(), files, rulemap)

    if args.yaml_fragment:
        file_tracker.add(args.yaml_fragment)
        write_yaml_fragment(args.yaml_fragment, files)

    if args.sid_msg_map:
        write_sid_msg_map(args.sid_msg_map, rulemap, version=1)
    if args.sid_msg_map_2:
        write_sid_msg_map(args.sid_msg_map_2, rulemap, version=2)

    if args.threshold_in and args.threshold_out:
        file_tracker.add(args.threshold_out)
        threshold_processor = ThresholdProcessor()
        threshold_processor.process(open(args.threshold_in),
                                    open(args.threshold_out, "w"), rulemap)

    if not args.force and not file_tracker.any_modified():
        logger.info("No changes detected, exiting.")
        notes.dump_notes()
        return 0

    # Set these containers to None to fee the memory before testing Suricata which
    # may consume a lot of memory by itself. Ideally we should refactor this large
    # function into multiple methods so these go out of scope and get removed
    # automatically.
    rulemap = None
    rules = None
    files = None

    if not test_suricata(suricata_path):
        logger.error("Suricata test failed, aborting.")
        logger.error("Restoring previous rules.")
        copytree(os.path.join(backup_directory, "backup"),
                 config.get_output_dir())
        return 1

    if not config.args().no_reload and config.get("reload-command"):
        logger.info("Running %s." % (config.get("reload-command")))
        rc = subprocess.Popen(config.get("reload-command"), shell=True).wait()
        if rc != 0:
            logger.error("Reload command exited with error: %d", rc)

    logger.info("Done.")

    notes.dump_notes()

    return 0
예제 #8
0
def load_sources(suricata_version):
    files = {}

    urls = []

    http_header = None
    checksum = True

    # Add any URLs added with the --url command line parameter.
    if config.args().url:
        for url in config.args().url:
            urls.append((url, http_header, checksum))

    # Get the new style sources.
    enabled_sources = sources.get_enabled_sources()

    # Convert the Suricata version to a version string.
    version_string = "%d.%d.%d" % (
        suricata_version.major, suricata_version.minor, suricata_version.patch)

    # Construct the URL replacement parameters that are internal to
    # suricata-update.
    internal_params = {"__version__": version_string}

    # If we have new sources, we also need to load the index.
    if enabled_sources:
        index_filename = sources.get_index_filename()
        if not os.path.exists(index_filename):
            logger.warning("No index exists, will use bundled index.")
            logger.warning("Please run suricata-update update-sources.")
        if os.path.exists(index_filename) and time.time() - \
                os.stat(index_filename).st_mtime > INDEX_EXPIRATION_TIME:
            logger.warning(
                "Source index is older than 2 weeks. "
                "Please update with suricata-update update-sources.")
        index = sources.Index(index_filename)

        for (name, source) in enabled_sources.items():
            params = source["params"] if "params" in source else {}
            params.update(internal_params)
            if "url" in source:
                # No need to go off to the index.
                http_header = source.get("http_header")
                checksum = source.get("checksum")
                url = (source["url"] % params, http_header, checksum)
                logger.debug("Resolved source %s to URL %s.", name, url[0])
            else:
                if not index:
                    raise exceptions.ApplicationError(
                        "Source index is required for source %s; "
                        "run suricata-update update-sources" %
                        (source["source"]))
                source_config = index.get_source_by_name(name)
                try:
                    checksum = source_config["checksum"]
                except:
                    checksum = True
                url = (index.resolve_url(name, params), http_header, checksum)
                logger.debug("Resolved source %s to URL %s.", name, url)
            urls.append(url)

    if config.get("sources"):
        for url in config.get("sources"):
            if not isinstance(url, str):
                raise exceptions.InvalidConfigurationError(
                    "Invalid datatype for source URL: %s" % (str(url)))
            url = (url % internal_params, http_header, checksum)
            logger.debug("Adding source %s.", url)
            urls.append(url)

    # If --etopen is on the command line, make sure its added. Or if
    # there are no URLs, default to ET/Open.
    if config.get("etopen") or not urls:
        if not config.args().offline and not urls:
            logger.info(
                "No sources configured, will use Emerging Threats Open")
        urls.append(
            (sources.get_etopen_url(internal_params), http_header, checksum))

    # Converting the URLs to a set removed dupes.
    urls = set(urls)

    # Now download each URL.
    for url in urls:
        Fetch().run(url, files)

    # Now load local rules.
    if config.get("local") is not None:
        for local in config.get("local"):
            load_local(local, files)

    return files
예제 #9
0
def get(url, fileobj, progress_hook=None):
    """ Perform a GET request against a URL writing the contents into
    the provideded file like object.

    :param url: The URL to fetch
    :param fileobj: The fileobj to write the content to
    :param progress_hook: The function to call with progress updates

    :returns: Returns a tuple containing the number of bytes read and
      the result of the info() function from urllib2.urlopen().

    :raises: Exceptions from urllib2.urlopen() and writing to the
      provided fileobj may occur.
    """

    user_agent = build_user_agent()

    try:
        # Wrap in a try as Python versions prior to 2.7.9 don't have
        # create_default_context, but some distros have backported it.
        ssl_context = ssl.create_default_context()
        if config.get("no-check-certificate"):
            logger.debug("Disabling SSL/TLS certificate verification.")
            ssl_context.check_hostname = False
            ssl_context.verify_mode = ssl.CERT_NONE
        opener = build_opener(HTTPSHandler(context=ssl_context))
    except:
        opener = build_opener()

    if user_agent:
        logger.debug("Setting HTTP User-Agent to %s", user_agent)
        http_headers = [("User-Agent", user_agent)]
    else:
        http_headers = [(header, value) for header, value in opener.addheaders
                        if header.lower() != "user-agent"]
    if isinstance(url, tuple):
        header = url[1].split(":") if url[1] is not None else None
        if header and is_header_clean(header=header):
            name, val = header[0].strip(), header[1].strip()
            logger.debug("Setting HTTP header %s to %s", name, val)
            http_headers.append((name, val))
        elif header:
            logger.error("Header not set as it does not meet the criteria")
        url = url[0]
    opener.addheaders = http_headers

    try:
        remote = opener.open(url, timeout=30)
    except ValueError as ve:
        logger.error(ve)
    else:
        info = remote.info()
        content_length = info.get("content-length")
        content_length = int(content_length) if content_length else 0
        bytes_read = 0
        while True:
            buf = remote.read(GET_BLOCK_SIZE)
            if not buf:
                # EOF
                break
            bytes_read += len(buf)
            fileobj.write(buf)
            if progress_hook:
                progress_hook(content_length, bytes_read)
        remote.close()
        fileobj.flush()
        return bytes_read, info
예제 #10
0
def _main():
    global args

    global_parser = argparse.ArgumentParser(add_help=False)
    global_parser.add_argument("-v",
                               "--verbose",
                               action="store_true",
                               default=None,
                               help="Be more verbose")
    global_parser.add_argument(
        "-q",
        "--quiet",
        action="store_true",
        default=False,
        help="Be quiet, warning and error messages only")
    global_parser.add_argument(
        "-D",
        "--data-dir",
        metavar="<directory>",
        dest="data_dir",
        help="Data directory (default: /var/lib/suricata)")
    global_parser.add_argument(
        "-c",
        "--config",
        metavar="<filename>",
        help="configuration file (default: /etc/suricata/update.yaml)")
    global_parser.add_argument(
        "--suricata-conf",
        metavar="<filename>",
        help="configuration file (default: /etc/suricata/suricata.yaml)")
    global_parser.add_argument("--suricata",
                               metavar="<path>",
                               help="Path to Suricata program")
    global_parser.add_argument("--suricata-version",
                               metavar="<version>",
                               help="Override Suricata version")
    global_parser.add_argument("--user-agent",
                               metavar="<user-agent>",
                               help="Set custom user-agent string")
    global_parser.add_argument(
        "--no-check-certificate",
        action="store_true",
        default=None,
        help="Disable server SSL/TLS certificate verification")

    global_args, rem = global_parser.parse_known_args()

    if not rem or rem[0].startswith("-"):
        rem.insert(0, "update")

    parser = argparse.ArgumentParser()
    subparsers = parser.add_subparsers(dest="subcommand", metavar="<command>")

    # The "update" (default) sub-command parser.
    update_parser = subparsers.add_parser("update",
                                          add_help=False,
                                          parents=[global_parser])

    update_parser.add_argument("-o",
                               "--output",
                               metavar="<directory>",
                               dest="output",
                               help="Directory to write rules to")
    update_parser.add_argument(
        "-f",
        "--force",
        action="store_true",
        default=False,
        help="Force operations that might otherwise be skipped")
    update_parser.add_argument("--yaml-fragment",
                               metavar="<filename>",
                               help="Output YAML fragment for rule inclusion")
    update_parser.add_argument(
        "--url",
        metavar="<url>",
        action="append",
        default=[],
        help=
        "URL to use instead of auto-generating one (can be specified multiple times)"
    )
    update_parser.add_argument(
        "--local",
        metavar="<path>",
        action="append",
        default=[],
        help="Local rule files or directories (can be specified multiple times)"
    )
    update_parser.add_argument("--sid-msg-map",
                               metavar="<filename>",
                               help="Generate a sid-msg.map file")
    update_parser.add_argument("--sid-msg-map-2",
                               metavar="<filename>",
                               help="Generate a v2 sid-msg.map file")

    update_parser.add_argument("--disable-conf",
                               metavar="<filename>",
                               help="Filename of rule disable filters")
    update_parser.add_argument("--enable-conf",
                               metavar="<filename>",
                               help="Filename of rule enable filters")
    update_parser.add_argument("--modify-conf",
                               metavar="<filename>",
                               help="Filename of rule modification filters")
    update_parser.add_argument("--drop-conf",
                               metavar="<filename>",
                               help="Filename of drop rules filters")

    update_parser.add_argument(
        "--ignore",
        metavar="<pattern>",
        action="append",
        default=[],
        help=
        "Filenames to ignore (can be specified multiple times; default: *deleted.rules)"
    )
    update_parser.add_argument("--no-ignore",
                               action="store_true",
                               default=False,
                               help="Disables the ignore option.")

    update_parser.add_argument(
        "--threshold-in",
        metavar="<filename>",
        help="Filename of rule thresholding configuration")
    update_parser.add_argument(
        "--threshold-out",
        metavar="<filename>",
        help="Output of processed threshold configuration")

    update_parser.add_argument(
        "--dump-sample-configs",
        action="store_true",
        default=False,
        help="Dump sample config files to current directory")
    update_parser.add_argument("--etopen",
                               action="store_true",
                               help="Use ET-Open rules (default)")
    update_parser.add_argument("--reload-command",
                               metavar="<command>",
                               help="Command to run after update if modified")
    update_parser.add_argument("--no-reload",
                               action="store_true",
                               default=False,
                               help="Disable reload")
    update_parser.add_argument("-T",
                               "--test-command",
                               metavar="<command>",
                               help="Command to test Suricata configuration")
    update_parser.add_argument("--no-test",
                               action="store_true",
                               default=False,
                               help="Disable testing rules with Suricata")
    update_parser.add_argument("-V",
                               "--version",
                               action="store_true",
                               default=False,
                               help="Display version")

    update_parser.add_argument(
        "--no-merge",
        action="store_true",
        default=False,
        help="Do not merge the rules into a single file")

    update_parser.add_argument("-h", "--help", action="store_true")

    # Hidden argument, --now to bypass the timebased bypass of
    # updating a ruleset.
    update_parser.add_argument("--now",
                               default=False,
                               action="store_true",
                               help=argparse.SUPPRESS)

    # The Python 2.7 argparse module does prefix matching which can be
    # undesirable. Reserve some names here that would match existing
    # options to prevent prefix matching.
    update_parser.add_argument("--disable",
                               default=False,
                               help=argparse.SUPPRESS)
    update_parser.add_argument("--enable",
                               default=False,
                               help=argparse.SUPPRESS)
    update_parser.add_argument("--modify",
                               default=False,
                               help=argparse.SUPPRESS)
    update_parser.add_argument("--drop", default=False, help=argparse.SUPPRESS)

    commands.listsources.register(
        subparsers.add_parser("list-sources", parents=[global_parser]))
    commands.listenabledsources.register(
        subparsers.add_parser("list-enabled-sources", parents=[global_parser]))
    commands.addsource.register(
        subparsers.add_parser("add-source", parents=[global_parser]))
    commands.updatesources.register(
        subparsers.add_parser("update-sources", parents=[global_parser]))
    commands.enablesource.register(
        subparsers.add_parser("enable-source", parents=[global_parser]))
    commands.disablesource.register(
        subparsers.add_parser("disable-source", parents=[global_parser]))
    commands.removesource.register(
        subparsers.add_parser("remove-source", parents=[global_parser]))

    args = parser.parse_args(rem)

    # Merge global args into args.
    for arg in vars(global_args):
        if not hasattr(args, arg):
            setattr(args, arg, getattr(global_args, arg))
        elif hasattr(args, arg) and getattr(args, arg) is None:
            setattr(args, arg, getattr(global_args, arg))

    # Go verbose or quiet sooner than later.
    if args.verbose:
        logger.setLevel(logging.DEBUG)
    if args.quiet:
        logger.setLevel(logging.WARNING)

    config.init(args)

    # Error out if any reserved/unimplemented arguments were set.
    unimplemented_args = [
        "disable",
        "enable",
        "modify",
        "drop",
    ]
    for arg in unimplemented_args:
        if hasattr(args, arg) and getattr(args, arg):
            logger.error("--%s not implemented", arg)
            return 1

    logger.debug("This is suricata-update version %s (rev: %s); Python: %s" %
                 (version, revision, sys.version.replace("\n", "- ")))

    suricata_path = config.get("suricata")

    # Now parse the Suricata version. If provided on the command line,
    # use that, otherwise attempt to get it from Suricata.
    if args.suricata_version:
        # The Suricata version was passed on the command line, parse it.
        suricata_version = suricata.update.engine.parse_version(
            args.suricata_version)
        if not suricata_version:
            logger.error("Failed to parse provided Suricata version: %s" %
                         (args.suricata_version))
            return 1
        logger.info("Forcing Suricata version to %s." %
                    (suricata_version.full))
    elif suricata_path:
        suricata_version = suricata.update.engine.get_version(suricata_path)
        if suricata_version:
            logger.info("Found Suricata version %s at %s." %
                        (str(suricata_version.full), suricata_path))
        else:
            logger.error("Failed to get Suricata version.")
            return 1
    else:
        logger.info("Using default Suricata version of %s",
                    DEFAULT_SURICATA_VERSION)
        suricata_version = suricata.update.engine.parse_version(
            DEFAULT_SURICATA_VERSION)

    # Provide the Suricata version to the net module to add to the
    # User-Agent.
    suricata.update.net.set_user_agent_suricata_version(suricata_version.full)

    # Load custom user-agent-string.
    user_agent = config.get("user-agent")
    if user_agent:
        logger.info("Using user-agent: %s.", user_agent)
        suricata.update.net.set_custom_user_agent(user_agent)

    if args.subcommand:
        if hasattr(args, "func"):
            return args.func()
        elif args.subcommand != "update":
            logger.error("Unknown command: %s", args.subcommand)
            return 1

    if args.dump_sample_configs:
        return dump_sample_configs()

    if args.version:
        print("suricata-update version %s (rev: %s)" % (version, revision))
        return 0

    if args.help:
        print(update_parser.format_help())
        print("""other commands:
    update-sources             Update the source index
    list-sources               List available sources
    enable-source              Enable a source from the index
    disable-source             Disable an enabled source
    remove-source              Remove an enabled or disabled source
    list-enabled-sources       List all enabled sources
    add-source                 Add a new source by URL
""")
        return 0

    # If --no-ignore was provided, clear any ignores provided in the
    # config.
    if args.no_ignore:
        config.set(config.IGNORE_KEY, [])

    file_tracker = FileTracker()

    disable_matchers = []
    enable_matchers = []
    modify_filters = []
    drop_filters = []

    # Load user provided disable filters.
    disable_conf_filename = config.get("disable-conf")
    if disable_conf_filename and os.path.exists(disable_conf_filename):
        logger.info("Loading %s.", disable_conf_filename)
        disable_matchers += load_matchers(disable_conf_filename)

    # Load user provided enable filters.
    enable_conf_filename = config.get("enable-conf")
    if enable_conf_filename and os.path.exists(enable_conf_filename):
        logger.info("Loading %s.", enable_conf_filename)
        enable_matchers += load_matchers(enable_conf_filename)

    # Load user provided modify filters.
    modify_conf_filename = config.get("modify-conf")
    if modify_conf_filename and os.path.exists(modify_conf_filename):
        logger.info("Loading %s.", modify_conf_filename)
        modify_filters += load_filters(modify_conf_filename)

    # Load user provided drop filters.
    drop_conf_filename = config.get("drop-conf")
    if drop_conf_filename and os.path.exists(drop_conf_filename):
        logger.info("Loading %s.", drop_conf_filename)
        drop_filters += load_drop_filters(drop_conf_filename)

    if os.path.exists(config.get("suricata-conf")) and \
       suricata_path and os.path.exists(suricata_path):
        logger.info("Loading %s", config.get("suricata-conf"))
        suriconf = suricata.update.engine.Configuration.load(
            config.get("suricata-conf"), suricata_path=suricata_path)
        for key in suriconf.keys():
            if key.startswith("app-layer.protocols") and \
               key.endswith(".enabled"):
                if not suriconf.is_true(key, ["detection-only"]):
                    proto = key.split(".")[2]
                    logger.info("Disabling rules with proto %s", proto)
                    disable_matchers.append(ProtoRuleMatcher(proto))

    # Check that the cache directory exists and is writable.
    if not os.path.exists(config.get_cache_dir()):
        try:
            os.makedirs(config.get_cache_dir(), mode=0o770)
        except Exception as err:
            logger.warning(
                "Cache directory does not exist and could not be created. "
                "/var/tmp will be used instead.")
            config.set_cache_dir("/var/tmp")

    files = load_sources(suricata_version)

    load_dist_rules(files)

    # Remove ignored files.
    for filename in list(files.keys()):
        if ignore_file(config.get("ignore"), filename):
            logger.info("Ignoring file %s" % (filename))
            del (files[filename])

    rules = []
    for filename in files:
        if not filename.endswith(".rules"):
            continue
        logger.debug("Parsing %s." % (filename))
        rules += suricata.update.rule.parse_fileobj(
            io.BytesIO(files[filename]), filename)

    rulemap = build_rule_map(rules)
    logger.info("Loaded %d rules." % (len(rules)))

    # Counts of user enabled and modified rules.
    enable_count = 0
    modify_count = 0
    drop_count = 0

    # List of rules disabled by user. Used for counting, and to log
    # rules that are re-enabled to meet flowbit requirements.
    disabled_rules = []

    for key, rule in rulemap.items():

        for matcher in disable_matchers:
            if rule.enabled and matcher.match(rule):
                logger.debug("Disabling: %s" % (rule.brief()))
                rule.enabled = False
                disabled_rules.append(rule)

        for matcher in enable_matchers:
            if not rule.enabled and matcher.match(rule):
                logger.debug("Enabling: %s" % (rule.brief()))
                rule.enabled = True
                enable_count += 1

        for filter in drop_filters:
            if filter.match(rule):
                rulemap[rule.id] = filter.filter(rule)
                drop_count += 1

    # Apply modify filters.
    for fltr in modify_filters:
        for key, rule in rulemap.items():
            if fltr.match(rule):
                new_rule = fltr.filter(rule)
                if new_rule and new_rule.format() != rule.format():
                    rulemap[rule.id] = new_rule
                    modify_count += 1

    logger.info("Disabled %d rules." % (len(disabled_rules)))
    logger.info("Enabled %d rules." % (enable_count))
    logger.info("Modified %d rules." % (modify_count))
    logger.info("Dropped %d rules." % (drop_count))

    # Fixup flowbits.
    resolve_flowbits(rulemap, disabled_rules)

    # Check that output directory exists.
    if not os.path.exists(config.get_output_dir()):
        try:
            os.makedirs(config.get_output_dir(), mode=0o770)
        except Exception as err:
            logger.error(
                "Output directory does not exist and could not be created: %s",
                config.get_output_dir())
            return 1

    # Check that output directory is writable.
    if not os.access(config.get_output_dir(), os.W_OK):
        logger.error("Output directory is not writable: %s",
                     config.get_output_dir())
        return 1

    # Backup the output directory.
    logger.info("Backing up current rules.")
    backup_directory = util.mktempdir()
    shutil.copytree(config.get_output_dir(),
                    os.path.join(backup_directory, "backup"),
                    ignore=copytree_ignore_backup)

    if not args.no_merge:
        # The default, write out a merged file.
        output_filename = os.path.join(config.get_output_dir(),
                                       DEFAULT_OUTPUT_RULE_FILENAME)
        file_tracker.add(output_filename)
        write_merged(os.path.join(output_filename), rulemap)
    else:
        for filename in files:
            file_tracker.add(
                os.path.join(config.get_output_dir(),
                             os.path.basename(filename)))
        write_to_directory(config.get_output_dir(), files, rulemap)

    if args.yaml_fragment:
        file_tracker.add(args.yaml_fragment)
        write_yaml_fragment(args.yaml_fragment, files)

    if args.sid_msg_map:
        write_sid_msg_map(args.sid_msg_map, rulemap, version=1)
    if args.sid_msg_map_2:
        write_sid_msg_map(args.sid_msg_map_2, rulemap, version=2)

    if args.threshold_in and args.threshold_out:
        file_tracker.add(args.threshold_out)
        threshold_processor = ThresholdProcessor()
        threshold_processor.process(open(args.threshold_in),
                                    open(args.threshold_out, "w"), rulemap)

    if not args.force and not file_tracker.any_modified():
        logger.info("No changes detected, exiting.")
        return 0

    if not test_suricata(suricata_path):
        logger.error("Suricata test failed, aborting.")
        logger.error("Restoring previous rules.")
        copytree(os.path.join(backup_directory, "backup"),
                 config.get_output_dir())
        return 1

    if not config.args().no_reload and config.get("reload-command"):
        logger.info("Running %s." % (config.get("reload-command")))
        rc = subprocess.Popen(config.get("reload-command"), shell=True).wait()
        if rc != 0:
            logger.error("Reload command exited with error: %d", rc)

    logger.info("Done.")

    return 0
예제 #11
0
def load_sources(suricata_version):
    files = {}

    urls = []

    # Add any URLs added with the --url command line parameter.
    if config.args().url:
        for url in config.args().url:
            urls.append(url)

    # Get the new style sources.
    enabled_sources = sources.get_enabled_sources()

    # Convert the Suricata version to a version string.
    version_string = "%d.%d.%d" % (
        suricata_version.major, suricata_version.minor, suricata_version.patch)

    # Construct the URL replacement parameters that are internal to
    # suricata-update.
    internal_params = {"__version__": version_string}

    # If we have new sources, we also need to load the index.
    if enabled_sources:
        index_filename = os.path.join(config.get_cache_dir(),
                                      sources.SOURCE_INDEX_FILENAME)
        if os.path.exists(index_filename):
            index = sources.Index(index_filename)
        else:
            index = None

        for (name, source) in enabled_sources.items():
            params = source["params"] if "params" in source else {}
            params.update(internal_params)
            if "url" in source:
                # No need to go off to the index.
                url = source["url"] % params
            else:
                if not index:
                    raise exceptions.ApplicationError(
                        "Source index is required for source %s; "
                        "run suricata-update update-sources" %
                        (source["source"]))
                url = index.resolve_url(name, params)
            logger.debug("Resolved source %s to URL %s.", name, url)
            urls.append(url)

    if config.get("sources"):
        for url in config.get("sources"):
            if type(url) not in [type("")]:
                raise exceptions.InvalidConfigurationError(
                    "Invalid datatype for source URL: %s" % (str(url)))
            url = url % internal_params
            logger.debug("Adding source %s.", url)
            urls.append(url)

    # If --etopen is on the command line, make sure its added. Or if
    # there are no URLs, default to ET/Open.
    if config.get("etopen") or not urls:
        if not urls:
            logger.info(
                "No sources configured, will use Emerging Threats Open")
        urls.append(sources.get_etopen_url(internal_params))

    # Converting the URLs to a set removed dupes.
    urls = set(urls)

    # Now download each URL.
    for url in urls:
        Fetch().run(url, files)

    # Now load local rules.
    for local in config.get("local"):
        load_local(local, files)

    return files
예제 #12
0
def list_sources():
    enabled = config.args().enabled or \
        config.args().subcommand == "list-enabled-sources"

    if enabled:
        found = False

        # First list sources from the main config.
        config_sources = config.get("sources")
        if config_sources:
            found = True
            print("From %s:" % (config.filename))
            for source in config_sources:
                print("  - %s" % (source))

        # And local files.
        local = config.get("local")
        if local:
            found = True
            print("Local files/directories:")
            for filename in local:
                print("  - %s" % (filename))

        enabled_sources = sources.get_enabled_sources()
        if enabled_sources:
            found = True
            print("Enabled sources:")
            for source in enabled_sources.values():
                print("  - %s" % (source["source"]))

        # If no enabled sources were found, log it.
        if not found:
            logger.warning("No enabled sources.")
        return 0

    free_only = config.args().free
    if not sources.source_index_exists(config):
        logger.info("No source index found, running update-sources")
        try:
            update_sources()
        except exceptions.ApplicationError as err:
            logger.warning("%s: will use bundled index.", err)
    index = sources.load_source_index(config)
    for name, source in index.get_sources().items():
        is_not_free = source.get("subscribe-url")
        if free_only and is_not_free:
            continue
        if not config.args().all:
            if source.get("deprecated") is not None or \
               source.get("obsolete") is not None:
                continue
        print("%s: %s" % (util.bright_cyan("Name"), util.bright_magenta(name)))
        print("  %s: %s" % (util.bright_cyan("Vendor"),
                            util.bright_magenta(source["vendor"])))
        print("  %s: %s" % (util.bright_cyan("Summary"),
                            util.bright_magenta(source["summary"])))
        print("  %s: %s" % (util.bright_cyan("License"),
                            util.bright_magenta(source["license"])))
        if "tags" in source:
            print("  %s: %s" %
                  (util.bright_cyan("Tags"),
                   util.bright_magenta(", ".join(source["tags"]))))
        if "replaces" in source:
            print("  %s: %s" %
                  (util.bright_cyan("Replaces"),
                   util.bright_magenta(", ".join(source["replaces"]))))
        if "parameters" in source:
            print("  %s: %s" %
                  (util.bright_cyan("Parameters"),
                   util.bright_magenta(", ".join(source["parameters"]))))
        if "subscribe-url" in source:
            print("  %s: %s" % (util.bright_cyan("Subscription"),
                                util.bright_magenta(source["subscribe-url"])))
        if "deprecated" in source:
            print("  %s: %s" % (util.orange("Deprecated"),
                                util.bright_magenta(source["deprecated"])))
        if "obsolete" in source:
            print("  %s: %s" % (util.orange("Obsolete"),
                                util.bright_magenta(source["obsolete"])))